當前位置: 首頁>>代碼示例>>Java>>正文


Java AudioTrack.play方法代碼示例

本文整理匯總了Java中android.media.AudioTrack.play方法的典型用法代碼示例。如果您正苦於以下問題:Java AudioTrack.play方法的具體用法?Java AudioTrack.play怎麽用?Java AudioTrack.play使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在android.media.AudioTrack的用法示例。


在下文中一共展示了AudioTrack.play方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。

示例1: playSound

import android.media.AudioTrack; //導入方法依賴的package包/類
/**
 * This method plays the sound data in the specified buffer.
 *
 * @param buffer specifies the sound data buffer.
 */
public void playSound(short[] buffer)
{
    final String funcName = "playSound";

    if (debugEnabled)
    {
        dbgTrace.traceEnter(funcName, TrcDbgTrace.TraceLevel.API);
        dbgTrace.traceExit(funcName, TrcDbgTrace.TraceLevel.API);
    }

    audioTrack = new AudioTrack(
            AudioManager.STREAM_MUSIC,
            sampleRate,
            AudioFormat.CHANNEL_OUT_MONO,
            AudioFormat.ENCODING_PCM_16BIT,
            buffer.length*2,    //buffer length in bytes
            AudioTrack.MODE_STATIC);
    audioTrack.write(buffer, 0, buffer.length);
    audioTrack.setNotificationMarkerPosition(buffer.length);
    audioTrack.setPlaybackPositionUpdateListener(this);
    audioTrack.play();
    playing = true;
}
 
開發者ID:trc492,項目名稱:Ftc2018RelicRecovery,代碼行數:29,代碼來源:FtcAndroidTone.java

示例2: onCreate

import android.media.AudioTrack; //導入方法依賴的package包/類
@Override
public void onCreate() {
    super.onCreate();
    mHandler = new Handler();
    fetchAccessToken();

    int outputBufferSize = AudioTrack.getMinBufferSize(16000,
            AudioFormat.CHANNEL_IN_STEREO,
            AudioFormat.ENCODING_PCM_16BIT);

    try {
        mAudioTrack = new AudioTrack(AudioManager.USE_DEFAULT_STREAM_TYPE, 16000, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, outputBufferSize, AudioTrack.MODE_STREAM);
        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
            mAudioTrack.setVolume(DEFAULT_VOLUME);
        }
        mAudioTrack.play();
    }catch (Exception e){
        e.printStackTrace();
    }
}
 
開發者ID:hsavaliya,項目名稱:GoogleAssistantSDK,代碼行數:21,代碼來源:SpeechService.java

示例3: run

import android.media.AudioTrack; //導入方法依賴的package包/類
@Override
public void run() {
    super.run();
    isRunning = true;
    int buffsize = AudioTrack.getMinBufferSize(sr,
            AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT);
    // create an audiotrack object
    AudioTrack audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
            sr, AudioFormat.CHANNEL_OUT_MONO,
            AudioFormat.ENCODING_PCM_16BIT, buffsize,
            AudioTrack.MODE_STREAM);

    short samples[] = new short[buffsize];
    int amp = 10000;
    double twopi = 8.*Math.atan(1.);
    double ph = 0.0;

    // start audio
    audioTrack.play();

    // synthesis loop
    while(isRunning){
        double fr = tuneFreq;
        for(int i=0; i < buffsize; i++){
            samples[i] = (short) (amp*Math.sin(ph));
            ph += twopi*fr/sr;
        }
        audioTrack.write(samples, 0, buffsize);
    }
    audioTrack.stop();
    audioTrack.release();
}
 
開發者ID:karlotoy,項目名稱:perfectTune,代碼行數:33,代碼來源:TuneThread.java

示例4: PWave

import android.media.AudioTrack; //導入方法依賴的package包/類
public PWave(AppRunner appRunner) {
    super(appRunner);
    appRunner.whatIsRunning.add(this);

    // set the buffer size
    buffsize = AudioTrack.getMinBufferSize(mSampleRate,
            AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT);

    samples = new short[buffsize];

    // create an audiotrack object
    audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
            mSampleRate, AudioFormat.CHANNEL_OUT_MONO,
            AudioFormat.ENCODING_PCM_16BIT, buffsize,
            AudioTrack.MODE_STREAM);

    // start audio
    audioTrack.play();
}
 
開發者ID:victordiaz,項目名稱:phonk,代碼行數:20,代碼來源:PWave.java

示例5: start

import android.media.AudioTrack; //導入方法依賴的package包/類
/**
 * 設置頻率
 * @param rate
 */
@SuppressWarnings("deprecation")
public void start(int rate){
	stop();
	if(rate>0){
		Hz=rate;
		waveLen = RATE / Hz;
		length = waveLen * Hz;
		audioTrack=new AudioTrack(AudioManager.STREAM_MUSIC, RATE,
				AudioFormat.CHANNEL_CONFIGURATION_STEREO, // CHANNEL_CONFIGURATION_MONO,
				AudioFormat.ENCODING_PCM_8BIT, length, AudioTrack.MODE_STREAM);
		//生成正弦波
		wave=SinWave.sin(wave, waveLen, length);
		if(audioTrack!=null){
			audioTrack.play();
		}
	}else{
		return;
	}
	
}
 
開發者ID:Becavalier,項目名稱:QRDataTransfer-Android,代碼行數:25,代碼來源:AudioTrackManager.java

示例6: createAudioTrack

import android.media.AudioTrack; //導入方法依賴的package包/類
private AudioTrack createAudioTrack(int sampleRate) {
    int encoding = AudioFormat.ENCODING_PCM_16BIT;
    // 得到一個滿足最小要求的緩衝區的大小
    int minBufferSize = getMinBufferSize(sampleRate, mChannelConfig, encoding);
    Log.d(TAG, "Decoder-AudioTrack-minBufferSize=" + minBufferSize);
    AudioTrack audioTrack =
            new AudioTrack(mStreamType,
                    sampleRate,
                    mChannelConfig,
                    encoding,
                    minBufferSize,
                    AudioTrack.MODE_STREAM);
    audioTrack.play();
    return audioTrack;
}
 
開發者ID:dueros,項目名稱:dcs-sdk-java,代碼行數:16,代碼來源:AudioTrackPlayerImpl.java

示例7: AndroidAudioPlayer

import android.media.AudioTrack; //導入方法依賴的package包/類
/**
 * Constructs a new AndroidAudioPlayer from an audio format, default buffer size and stream type.
 *
 * @param audioFormat The audio format of the stream that this AndroidAudioPlayer will process.
 *                    This can only be 1 channel, PCM 16 bit.
 * @param bufferSizeInSamples  The requested buffer size in samples.
 * @param streamType  The type of audio stream that the internal AudioTrack should use. For
 *                    example, {@link AudioManager#STREAM_MUSIC}.
 * @throws IllegalArgumentException if audioFormat is not valid or if the requested buffer size is invalid.
 * @see AudioTrack
 */
public AndroidAudioPlayer(TarsosDSPAudioFormat audioFormat, int bufferSizeInSamples, int streamType) {
    if (audioFormat.getChannels() != 1) {
        throw new IllegalArgumentException("TarsosDSP only supports mono audio channel count: " + audioFormat.getChannels());
    }

    // The requested sample rate
    int sampleRate = (int) audioFormat.getSampleRate();

    //The buffer size in bytes is twice the buffer size expressed in samples if 16bit samples are used:
    int bufferSizeInBytes = bufferSizeInSamples * audioFormat.getSampleSizeInBits()/8;

    // From the Android API about getMinBufferSize():
    // The total size (in bytes) of the internal buffer where audio data is read from for playback.
    // If track's creation mode is MODE_STREAM, you can write data into this buffer in chunks less than or equal to this size,
    // and it is typical to use chunks of 1/2 of the total size to permit double-buffering. If the track's creation mode is MODE_STATIC,
    // this is the maximum length sample, or audio clip, that can be played by this instance. See getMinBufferSize(int, int, int) to determine
    // the minimum required buffer size for the successful creation of an AudioTrack instance in streaming mode. Using values smaller
    // than getMinBufferSize() will result in an initialization failure.
    int minBufferSizeInBytes = AudioTrack.getMinBufferSize(sampleRate, AudioFormat.CHANNEL_OUT_MONO,  AudioFormat.ENCODING_PCM_16BIT);
    if(minBufferSizeInBytes > bufferSizeInBytes){
        throw new IllegalArgumentException("The buffer size should be at least " + (minBufferSizeInBytes/(audioFormat.getSampleSizeInBits()/8)) + " (samples) according to  AudioTrack.getMinBufferSize().");
    }

    //http://developer.android.com/reference/android/media/AudioTrack.html#AudioTrack(int, int, int, int, int, int)
    audioTrack = new AudioTrack(streamType, sampleRate, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSizeInBytes,AudioTrack.MODE_STREAM);

    audioTrack.play();
}
 
開發者ID:gstraube,項目名稱:cythara,代碼行數:40,代碼來源:AndroidAudioPlayer.java

示例8: audioDecoderTest

import android.media.AudioTrack; //導入方法依賴的package包/類
public void audioDecoderTest(String filePath) throws IOException {
  AudioDecoder audioDecoderThread = new AudioDecoder(this, this);
  audioDecoderThread.initExtractor(filePath);
  audioDecoderThread.prepareAudio();

  int buffsize = AudioTrack.getMinBufferSize(audioDecoderThread.getSampleRate(),
      AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_16BIT);
  audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, audioDecoderThread.getSampleRate(),
      AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_16BIT, buffsize,
      AudioTrack.MODE_STREAM);
  audioTrack.play();
  audioDecoderThread.start();
}
 
開發者ID:pedroSG94,項目名稱:rtmp-rtsp-stream-client-java,代碼行數:14,代碼來源:DecodersTest.java

示例9: init

import android.media.AudioTrack; //導入方法依賴的package包/類
@Override
public void init(int samples) {
    mAudioBuffer = new short[(5 * (int) mSampleRate) / 2]; // 2.5 seconds of buffer
    mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
            (int) mSampleRate, AudioFormat.CHANNEL_OUT_MONO,
            AudioFormat.ENCODING_PCM_16BIT, mAudioBuffer.length * 2,
            AudioTrack.MODE_STREAM);
    mAudioTrack.play();
}
 
開發者ID:olgamiller,項目名稱:SSTVEncoder2,代碼行數:10,代碼來源:AudioOutput.java

示例10: createCodec

import android.media.AudioTrack; //導入方法依賴的package包/類
@Override
protected MediaCodec createCodec(final MediaExtractor media_extractor, final int track_index, final MediaFormat format)
	throws IOException, IllegalArgumentException {

	final MediaCodec codec = super.createCodec(media_extractor, track_index, format);
	if (codec != null) {
		final ByteBuffer[] buffers = codec.getOutputBuffers();
		int sz = buffers[0].capacity();
		if (sz <= 0)
			sz = mAudioInputBufSize;
		if (DEBUG) Log.v(TAG, "AudioOutputBufSize:" + sz);
		mAudioOutTempBuf = new byte[sz];
		try {
			mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
				mAudioSampleRate,
				(mAudioChannels == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO),
				AudioFormat.ENCODING_PCM_16BIT,
				mAudioInputBufSize,
				AudioTrack.MODE_STREAM);
			mAudioTrack.play();
		} catch (final Exception e) {
			Log.e(TAG, "failed to start audio track playing", e);
			if (mAudioTrack != null) {
				mAudioTrack.release();
				mAudioTrack = null;
			}
			throw e;
		}
	}
	return codec;
}
 
開發者ID:saki4510t,項目名稱:libcommon,代碼行數:32,代碼來源:MediaAudioDecoder.java

示例11: start

import android.media.AudioTrack; //導入方法依賴的package包/類
@Override
public void start()    {
    minBufferSize = AudioTrack.getMinBufferSize(frameRate,
            AudioFormat.CHANNEL_OUT_STEREO,
            AudioFormat.ENCODING_PCM_16BIT);
    System.out.println("Audio minBufferSize = " + minBufferSize);
    bufferSize = (3 * (minBufferSize / 2)) & ~3;
    System.out.println("Audio bufferSize = " + bufferSize);
    audioTrack = new AudioTrack(
            AudioManager.STREAM_MUSIC, frameRate,
            AudioFormat.CHANNEL_OUT_STEREO,
            AudioFormat.ENCODING_PCM_16BIT, bufferSize,
            AudioTrack.MODE_STREAM);
    audioTrack.play();
}
 
開發者ID:google,項目名稱:science-journal,代碼行數:16,代碼來源:AndroidAudioForJSyn.java

示例12: encodeMessage

import android.media.AudioTrack; //導入方法依賴的package包/類
private void encodeMessage(int value) {
	// audio initialization
	int AUDIO_BUFFER_SIZE = 4096;
	int minBufferSize = AudioTrack.getMinBufferSize(AUDIO_SAMPLE_FREQ,
			AudioFormat.CHANNEL_CONFIGURATION_MONO,
			AudioFormat.ENCODING_PCM_16BIT);
	if (AUDIO_BUFFER_SIZE < minBufferSize)
		AUDIO_BUFFER_SIZE = minBufferSize;
	AudioTrack aT = new AudioTrack(AudioManager.STREAM_MUSIC,
			AUDIO_SAMPLE_FREQ, AudioFormat.CHANNEL_CONFIGURATION_MONO,
			AudioFormat.ENCODING_PCM_16BIT, AUDIO_BUFFER_SIZE,
			AudioTrack.MODE_STREAM);
	aT.play();

	// error detection encoding
	Log.i("TAG", "encodeMessage() value=" + value);
	value = ErrorDetection.createMessage(value);
	Log.i("TAG", "encodeMessage() message=" + value);
	// sound encoding
	double[] sound = FSKModule.encode(value);

	ByteBuffer buf = ByteBuffer.allocate(4 * sound.length);
	buf.order(ByteOrder.LITTLE_ENDIAN);
	for (int i = 0; i < sound.length; i++) {
		int yInt = (int) sound[i];
		buf.putInt(yInt);
	}
	byte[] tone = buf.array();
	// play message
	int nBytes = aT.write(tone, 0, tone.length);
	aT.stop();
	aT.release();
}
 
開發者ID:quake0day,項目名稱:Jigglypuff,代碼行數:34,代碼來源:SenderActivity.java

示例13: playSound

import android.media.AudioTrack; //導入方法依賴的package包/類
void playSound() {
    AudioTrack audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
            sampleRate, AudioFormat.CHANNEL_CONFIGURATION_MONO,
            AudioFormat.ENCODING_PCM_16BIT, numSamples,
            AudioTrack.MODE_STATIC);
    audioTrack.write(generatedSnd, 0, generatedSnd.length);
    audioTrack.play();
}
 
開發者ID:zh-h,項目名稱:IoTApp,代碼行數:9,代碼來源:PlaySoundActivity.java

示例14: initPlayer

import android.media.AudioTrack; //導入方法依賴的package包/類
/**
 * Initialize AudioTrack by getting buffersize
 */
private void initPlayer() {
  synchronized (this) {
    int bs = AudioTrack.getMinBufferSize(sampleRate, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT);
    audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, AudioFormat.CHANNEL_OUT_MONO,
            AudioFormat.ENCODING_PCM_16BIT, bs, AudioTrack.MODE_STREAM);
    if (audioTrack != null)
      audioTrack.play();
  }
}
 
開發者ID:watson-developer-cloud,項目名稱:android-sdk,代碼行數:13,代碼來源:StreamPlayer.java

示例15: initPlayer

import android.media.AudioTrack; //導入方法依賴的package包/類
/**
 * Initialize AudioTrack by getting buffersize
 */
private void initPlayer() {
  synchronized (this) {
    int bs = AudioTrack.getMinBufferSize(sampleRate, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT);
    audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
        sampleRate,
        AudioFormat.CHANNEL_OUT_MONO,
        AudioFormat.ENCODING_PCM_16BIT,
        bs,
        AudioTrack.MODE_STREAM);
    if (audioTrack != null)
      audioTrack.play();
  }
}
 
開發者ID:blumareks,項目名稱:2016EduHackathon_SanFrancisco,代碼行數:17,代碼來源:StreamPlayer.java


注:本文中的android.media.AudioTrack.play方法示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。