本文整理汇总了Java中javax.sound.sampled.SourceDataLine.start方法的典型用法代码示例。如果您正苦于以下问题:Java SourceDataLine.start方法的具体用法?Java SourceDataLine.start怎么用?Java SourceDataLine.start使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类javax.sound.sampled.SourceDataLine
的用法示例。
在下文中一共展示了SourceDataLine.start方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: init
import javax.sound.sampled.SourceDataLine; //导入方法依赖的package包/类
private static void init() {
try {
// 44,100 samples per second, 16-bit audio, mono, signed PCM, little
// Endian
AudioFormat format = new AudioFormat((float) SAMPLE_RATE, BITS_PER_SAMPLE, 1, true, false);
DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
line = (SourceDataLine) AudioSystem.getLine(info);
line.open(format, SAMPLE_BUFFER_SIZE * BYTES_PER_SAMPLE);
// the internal buffer is a fraction of the actual buffer size, this
// choice is arbitrary
// it gets divided because we can't expect the buffered data to line
// up exactly with when
// the sound card decides to push out its samples.
buffer = new byte[SAMPLE_BUFFER_SIZE * BYTES_PER_SAMPLE / 3];
} catch (LineUnavailableException e) {
System.out.println(e.getMessage());
}
// no sound gets made before this call
line.start();
}
示例2: playGradient
import javax.sound.sampled.SourceDataLine; //导入方法依赖的package包/类
public static void playGradient(double fstart,double fend,double duration,double volume,byte fadeend,byte wave) {
byte[] freqdata = new byte[(int)(duration * SAMPLE_RATE)];
// Generate the sound
for(int i = 0; i < freqdata.length; i++) {
freqdata[i] = (byte)generateValue(i, duration, fstart + (fend-fstart) * (i/(double)freqdata.length), volume, fadeend, wave);
}
// Play it
try {
final AudioFormat af = new AudioFormat(SAMPLE_RATE, 8, 1, true, true);
SourceDataLine line = AudioSystem.getSourceDataLine(af);
line.open(af, SAMPLE_RATE);
line.start();
line.write(freqdata, 0, freqdata.length);
line.drain();
line.close();
}catch(LineUnavailableException e) {
e.printStackTrace();
}
}
示例3: playSound
import javax.sound.sampled.SourceDataLine; //导入方法依赖的package包/类
/**
* Play a sound at a given frequency freq during duration (in seconds) with volume as strenght
* <br/><br/>
* <code>SoundGenerator.playSound(440.0,1.0,0.5,SoundGenerator.FADE_LINEAR,SoundGenerator.WAVE_SIN);</code><br/>
* Available fades : FADE_NONE, FADE_LINEAR, FADE_QUADRATIC<br/>
* Available waves : WAVE_SIN, WAVE_SQUARE, WAVE_TRIANGLE, WAVE_SAWTOOTH<br/>
*/
public static void playSound(double freq,double duration,double volume,byte fade,byte wave){
double[] soundData = generateSoundData(freq,duration,volume,fade,wave);
byte[] freqdata = new byte[soundData.length];
for(int i = 0;i < soundData.length;i++) {
freqdata[i] = (byte)soundData[i];
}
// Play it
try {
final AudioFormat af = new AudioFormat(SAMPLE_RATE, 8, 1, true, true);
SourceDataLine line = AudioSystem.getSourceDataLine(af);
line.open(af, SAMPLE_RATE);
line.start();
line.write(freqdata, 0, freqdata.length);
line.drain();
line.close();
}catch(LineUnavailableException e) {
e.printStackTrace();
}
}
示例4: playRecorded
import javax.sound.sampled.SourceDataLine; //导入方法依赖的package包/类
void playRecorded(AudioFormat format, byte[] data) throws Exception {
//SourceDataLine line = AudioSystem.getSourceDataLine(format);
DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
SourceDataLine line = (SourceDataLine)AudioSystem.getLine(info);
line.open();
line.start();
int remaining = data.length;
while (remaining > 0) {
int avail = line.available();
if (avail > 0) {
if (avail > remaining)
avail = remaining;
int written = line.write(data, data.length - remaining, avail);
remaining -= written;
log("Playing: " + written + " bytes written");
} else {
delay(100);
}
}
line.drain();
line.stop();
}
示例5: main
import javax.sound.sampled.SourceDataLine; //导入方法依赖的package包/类
public static void main(String[] args) throws Exception {
AbstractRcomArgs a=new AbstractRcomArgs();
UtilCli.parse(a, args, true);
File folder=new File("/home/rizsi/tmp/video");
byte[] data=UtilFile.loadFile(new File(folder, "remote.sw"));
AudioFormat format=ManualTestEchoCancel.getFormat();
final Mixer mixer = AudioSystem.getMixer(null);
DataLine.Info info2= new DataLine.Info(SourceDataLine.class, format);
SourceDataLine s=(SourceDataLine) mixer.getLine(info2);
s.open(format, framesamples*2);
s.start();
try(LoopInputStream lis=new LoopInputStream(data))
{
try(JitterResampler rs=new JitterResampler(a, 8000, framesamples, 2))
{
new FeedThread(lis, rs).start();
final byte[] buffer=new byte[framesamples*2];;
while(true)
{
rs.readOutput(buffer);
s.write(buffer, 0, buffer.length);
}
}
}
}
示例6: playAudioStream
import javax.sound.sampled.SourceDataLine; //导入方法依赖的package包/类
/** Plays audio from the given audio input stream.
*
* @param stream the AudioInputStream to play.
* @param startTime the time to skip to when playing starts.
* A value of zero means this plays from the beginning, 1 means it skips one second, etc.
* @param listener an optional Listener to update.
* @param cancellable an optional Cancellable to consult.
* @param blocking whether this call is blocking or not.
* @throws LineUnavailableException if a line is unavailable.
* @throws UnsupportedOperationException if this static method doesn't support playing the stream argument
**/
public static SourceDataLine playAudioStream(AudioInputStream stream,StartTime startTime,Listener listener,Cancellable cancellable,boolean blocking) throws UnsupportedOperationException, LineUnavailableException {
AudioFormat audioFormat = stream.getFormat();
DataLine.Info info = new DataLine.Info( SourceDataLine.class, audioFormat );
if ( !AudioSystem.isLineSupported( info ) ) {
throw new UnsupportedOperationException("AudioPlayback.playAudioStream: info="+info );
}
final SourceDataLine dataLine = (SourceDataLine) AudioSystem.getLine( info );
dataLine.open( audioFormat );
dataLine.start();
PlayAudioThread thread = new PlayAudioThread(stream, startTime, dataLine, listener, cancellable);
if(blocking) {
thread.run();
} else {
thread.start();
}
return dataLine;
}
示例7: init
import javax.sound.sampled.SourceDataLine; //导入方法依赖的package包/类
private static void init() {
try {
// 44,100 samples per second, 16-bit audio, mono, signed PCM, little Endian
AudioFormat format = new AudioFormat((float) SAMPLE_RATE, BITS_PER_SAMPLE, 1, true, false);
DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
line = (SourceDataLine) AudioSystem.getLine(info);
line.open(format, SAMPLE_BUFFER_SIZE * BYTES_PER_SAMPLE);
// the internal buffer is a fraction of the actual buffer size, this choice is arbitrary
// it gets divided because we can't expect the buffered data to line up exactly with when
// the sound card decides to push out its samples.
buffer = new byte[SAMPLE_BUFFER_SIZE * BYTES_PER_SAMPLE/3];
}
catch (LineUnavailableException e) {
System.out.println(e.getMessage());
}
// no sound gets made before this call
line.start();
}
示例8: main
import javax.sound.sampled.SourceDataLine; //导入方法依赖的package包/类
public static void main(String[] args) throws LineUnavailableException, IOException {
AudioPlayerManager manager = new DefaultAudioPlayerManager();
AudioSourceManagers.registerRemoteSources(manager);
manager.getConfiguration().setOutputFormat(new AudioDataFormat(2, 44100, 960, AudioDataFormat.Codec.PCM_S16_BE));
AudioPlayer player = manager.createPlayer();
manager.loadItem("ytsearch: epic soundtracks", new FunctionalResultHandler(null, playlist -> {
player.playTrack(playlist.getTracks().get(0));
}, null, null));
AudioDataFormat format = manager.getConfiguration().getOutputFormat();
AudioInputStream stream = AudioPlayerInputStream.createStream(player, format, 10000L, false);
SourceDataLine.Info info = new DataLine.Info(SourceDataLine.class, stream.getFormat());
SourceDataLine line = (SourceDataLine) AudioSystem.getLine(info);
line.open(stream.getFormat());
line.start();
byte[] buffer = new byte[format.bufferSize(2)];
int chunkSize;
while ((chunkSize = stream.read(buffer)) >= 0) {
line.write(buffer, 0, chunkSize);
}
}
示例9: rawplay
import javax.sound.sampled.SourceDataLine; //导入方法依赖的package包/类
private void rawplay(AudioFormat targetFormat, AudioInputStream din) throws IOException, LineUnavailableException
{
byte[] data = new byte[4096];
SourceDataLine line = getLine(targetFormat);
if (line != null)
{
// Start
line.start();
int nBytesRead = 0, nBytesWritten = 0;
while (nBytesRead != -1)
{
nBytesRead = din.read(data, 0, data.length);
if (nBytesRead != -1) nBytesWritten = line.write(data, 0, nBytesRead);
}
// Stop
line.drain();
line.stop();
line.close();
din.close();
}
}
示例10: reproduce
import javax.sound.sampled.SourceDataLine; //导入方法依赖的package包/类
private static void reproduce( byte soundbytes[]) {
try {
DataLine.Info dataLineInfo = new DataLine.Info(SourceDataLine.class, AudioFormatHelper.getAudioFormat());
// El source data line se usa para escribir datos en el
SourceDataLine sourceDataLine = (SourceDataLine) AudioSystem.getLine(dataLineInfo);
sourceDataLine.open(AudioFormatHelper.getAudioFormat());
sourceDataLine.start();
sourceDataLine.write(soundbytes, 0, soundbytes.length);
sourceDataLine.drain();
sourceDataLine.close();
} catch (Exception e) {
// Log and Handle exception
e.printStackTrace();
}
}
示例11: initializeOutput
import javax.sound.sampled.SourceDataLine; //导入方法依赖的package包/类
/**
* FIXME:
* specify the buffer size in the open(AudioFormat,int) method. A delay of 10ms-100ms will be acceptable for realtime audio. Very low latencies like will
* not work on all computer systems, and 100ms or more will probably be annoying for your users. A good tradeoff is, e.g. 50ms. For your audio format,
* 8-bit, mono at 44100Hz, a good buffer size is 2200 bytes, which is almost 50ms
*/
void initializeOutput() {
DataLine.Info dataLineInfo = new DataLine.Info( SourceDataLine.class, audioFormat);
//line = (TargetDataLine) AudioSystem.getLine(info);
//Mixer m = AudioSystem.getMixer(null);
try {
//sourceDataLine = (SourceDataLine)m.getLine(dataLineInfo);
sourceDataLine = (SourceDataLine)AudioSystem.getLine(dataLineInfo);
sourceDataLine.open(audioFormat);
sourceDataLine.start();
} catch (LineUnavailableException e) {
// TODO Auto-generated catch block
e.printStackTrace(Log.getWriter());
}
}
示例12: rawplay
import javax.sound.sampled.SourceDataLine; //导入方法依赖的package包/类
private void rawplay(AudioFormat targetFormat, AudioInputStream din) throws IOException, LineUnavailableException
{
byte[] data = new byte[4096];
SourceDataLine line = getLine(targetFormat);
if (line != null)
{
// Start
line.start();
int nBytesRead = 0, nBytesWritten = 0;
while (nBytesRead != -1)
{
nBytesRead = din.read(data, 0, data.length);
if (nBytesRead != -1) nBytesWritten = line.write(data, 0, nBytesRead);
}
// Stop
line.drain();
line.stop();
line.close();
din.close();
}
}
示例13: rawplay
import javax.sound.sampled.SourceDataLine; //导入方法依赖的package包/类
private void rawplay(AudioFormat targetFormat, AudioInputStream din) throws IOException, LineUnavailableException
{
byte[] data = new byte[4096];
SourceDataLine line = getLine(targetFormat);
if (line != null)
{
// Start
line.start();
int nBytesRead = 0, nBytesWritten = 0;
while (nBytesRead != -1)
{
nBytesRead = din.read(data, 0, data.length);
if (nBytesRead != -1) nBytesWritten = line.write(data, 0, nBytesRead);
}
// Stop
line.drain();
line.stop();
line.close();
din.close();
}
}
示例14: start
import javax.sound.sampled.SourceDataLine; //导入方法依赖的package包/类
@Override
public void start() {
DataLine.Info info = new DataLine.Info(SourceDataLine.class, format);
if (!AudioSystem.isLineSupported(info)) {
// Handle the error.
logger.severe("JavaSoundOutputStream - not supported." + format);
} else {
try {
line = (SourceDataLine) getDataLine(info);
int bufferSize = calculateBufferSize(suggestedOutputLatency);
line.open(format, bufferSize);
logger.fine("Output buffer size = " + bufferSize + " bytes.");
line.start();
} catch (Exception e) {
e.printStackTrace();
line = null;
}
}
}
示例15: rawplay
import javax.sound.sampled.SourceDataLine; //导入方法依赖的package包/类
private static void rawplay(AudioFormat targetFormat,
AudioInputStream din) throws IOException, LineUnavailableException
{
byte[] data = new byte[4096];
SourceDataLine line = getLine(targetFormat);
if (line != null)
{
// Start
line.start();
int nBytesRead = 0, nBytesWritten = 0;
while (nBytesRead != -1)
{
nBytesRead = din.read(data, 0, data.length);
if (nBytesRead != -1) nBytesWritten = line.write(data, 0, nBytesRead);
}
// Stop
line.drain();
line.stop();
line.close();
din.close();
}
}