当前位置: 首页>>代码示例>>Java>>正文


Java AudioTrack.play方法代码示例

本文整理汇总了Java中android.media.AudioTrack.play方法的典型用法代码示例。如果您正苦于以下问题:Java AudioTrack.play方法的具体用法?Java AudioTrack.play怎么用?Java AudioTrack.play使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在android.media.AudioTrack的用法示例。


在下文中一共展示了AudioTrack.play方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: playSound

import android.media.AudioTrack; //导入方法依赖的package包/类
/**
 * This method plays the sound data in the specified buffer.
 *
 * @param buffer specifies the sound data buffer.
 */
public void playSound(short[] buffer)
{
    final String funcName = "playSound";

    if (debugEnabled)
    {
        dbgTrace.traceEnter(funcName, TrcDbgTrace.TraceLevel.API);
        dbgTrace.traceExit(funcName, TrcDbgTrace.TraceLevel.API);
    }

    audioTrack = new AudioTrack(
            AudioManager.STREAM_MUSIC,
            sampleRate,
            AudioFormat.CHANNEL_OUT_MONO,
            AudioFormat.ENCODING_PCM_16BIT,
            buffer.length*2,    //buffer length in bytes
            AudioTrack.MODE_STATIC);
    audioTrack.write(buffer, 0, buffer.length);
    audioTrack.setNotificationMarkerPosition(buffer.length);
    audioTrack.setPlaybackPositionUpdateListener(this);
    audioTrack.play();
    playing = true;
}
 
开发者ID:trc492,项目名称:Ftc2018RelicRecovery,代码行数:29,代码来源:FtcAndroidTone.java

示例2: onCreate

import android.media.AudioTrack; //导入方法依赖的package包/类
@Override
public void onCreate() {
    super.onCreate();
    mHandler = new Handler();
    fetchAccessToken();

    int outputBufferSize = AudioTrack.getMinBufferSize(16000,
            AudioFormat.CHANNEL_IN_STEREO,
            AudioFormat.ENCODING_PCM_16BIT);

    try {
        mAudioTrack = new AudioTrack(AudioManager.USE_DEFAULT_STREAM_TYPE, 16000, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, outputBufferSize, AudioTrack.MODE_STREAM);
        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
            mAudioTrack.setVolume(DEFAULT_VOLUME);
        }
        mAudioTrack.play();
    }catch (Exception e){
        e.printStackTrace();
    }
}
 
开发者ID:hsavaliya,项目名称:GoogleAssistantSDK,代码行数:21,代码来源:SpeechService.java

示例3: run

import android.media.AudioTrack; //导入方法依赖的package包/类
@Override
public void run() {
    super.run();
    isRunning = true;
    int buffsize = AudioTrack.getMinBufferSize(sr,
            AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT);
    // create an audiotrack object
    AudioTrack audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
            sr, AudioFormat.CHANNEL_OUT_MONO,
            AudioFormat.ENCODING_PCM_16BIT, buffsize,
            AudioTrack.MODE_STREAM);

    short samples[] = new short[buffsize];
    int amp = 10000;
    double twopi = 8.*Math.atan(1.);
    double ph = 0.0;

    // start audio
    audioTrack.play();

    // synthesis loop
    while(isRunning){
        double fr = tuneFreq;
        for(int i=0; i < buffsize; i++){
            samples[i] = (short) (amp*Math.sin(ph));
            ph += twopi*fr/sr;
        }
        audioTrack.write(samples, 0, buffsize);
    }
    audioTrack.stop();
    audioTrack.release();
}
 
开发者ID:karlotoy,项目名称:perfectTune,代码行数:33,代码来源:TuneThread.java

示例4: PWave

import android.media.AudioTrack; //导入方法依赖的package包/类
public PWave(AppRunner appRunner) {
    super(appRunner);
    appRunner.whatIsRunning.add(this);

    // set the buffer size
    buffsize = AudioTrack.getMinBufferSize(mSampleRate,
            AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT);

    samples = new short[buffsize];

    // create an audiotrack object
    audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
            mSampleRate, AudioFormat.CHANNEL_OUT_MONO,
            AudioFormat.ENCODING_PCM_16BIT, buffsize,
            AudioTrack.MODE_STREAM);

    // start audio
    audioTrack.play();
}
 
开发者ID:victordiaz,项目名称:phonk,代码行数:20,代码来源:PWave.java

示例5: start

import android.media.AudioTrack; //导入方法依赖的package包/类
/**
 * 设置频率
 * @param rate
 */
@SuppressWarnings("deprecation")
public void start(int rate){
	stop();
	if(rate>0){
		Hz=rate;
		waveLen = RATE / Hz;
		length = waveLen * Hz;
		audioTrack=new AudioTrack(AudioManager.STREAM_MUSIC, RATE,
				AudioFormat.CHANNEL_CONFIGURATION_STEREO, // CHANNEL_CONFIGURATION_MONO,
				AudioFormat.ENCODING_PCM_8BIT, length, AudioTrack.MODE_STREAM);
		//生成正弦波
		wave=SinWave.sin(wave, waveLen, length);
		if(audioTrack!=null){
			audioTrack.play();
		}
	}else{
		return;
	}
	
}
 
开发者ID:Becavalier,项目名称:QRDataTransfer-Android,代码行数:25,代码来源:AudioTrackManager.java

示例6: createAudioTrack

import android.media.AudioTrack; //导入方法依赖的package包/类
private AudioTrack createAudioTrack(int sampleRate) {
    int encoding = AudioFormat.ENCODING_PCM_16BIT;
    // 得到一个满足最小要求的缓冲区的大小
    int minBufferSize = getMinBufferSize(sampleRate, mChannelConfig, encoding);
    Log.d(TAG, "Decoder-AudioTrack-minBufferSize=" + minBufferSize);
    AudioTrack audioTrack =
            new AudioTrack(mStreamType,
                    sampleRate,
                    mChannelConfig,
                    encoding,
                    minBufferSize,
                    AudioTrack.MODE_STREAM);
    audioTrack.play();
    return audioTrack;
}
 
开发者ID:dueros,项目名称:dcs-sdk-java,代码行数:16,代码来源:AudioTrackPlayerImpl.java

示例7: AndroidAudioPlayer

import android.media.AudioTrack; //导入方法依赖的package包/类
/**
 * Constructs a new AndroidAudioPlayer from an audio format, default buffer size and stream type.
 *
 * @param audioFormat The audio format of the stream that this AndroidAudioPlayer will process.
 *                    This can only be 1 channel, PCM 16 bit.
 * @param bufferSizeInSamples  The requested buffer size in samples.
 * @param streamType  The type of audio stream that the internal AudioTrack should use. For
 *                    example, {@link AudioManager#STREAM_MUSIC}.
 * @throws IllegalArgumentException if audioFormat is not valid or if the requested buffer size is invalid.
 * @see AudioTrack
 */
public AndroidAudioPlayer(TarsosDSPAudioFormat audioFormat, int bufferSizeInSamples, int streamType) {
    if (audioFormat.getChannels() != 1) {
        throw new IllegalArgumentException("TarsosDSP only supports mono audio channel count: " + audioFormat.getChannels());
    }

    // The requested sample rate
    int sampleRate = (int) audioFormat.getSampleRate();

    //The buffer size in bytes is twice the buffer size expressed in samples if 16bit samples are used:
    int bufferSizeInBytes = bufferSizeInSamples * audioFormat.getSampleSizeInBits()/8;

    // From the Android API about getMinBufferSize():
    // The total size (in bytes) of the internal buffer where audio data is read from for playback.
    // If track's creation mode is MODE_STREAM, you can write data into this buffer in chunks less than or equal to this size,
    // and it is typical to use chunks of 1/2 of the total size to permit double-buffering. If the track's creation mode is MODE_STATIC,
    // this is the maximum length sample, or audio clip, that can be played by this instance. See getMinBufferSize(int, int, int) to determine
    // the minimum required buffer size for the successful creation of an AudioTrack instance in streaming mode. Using values smaller
    // than getMinBufferSize() will result in an initialization failure.
    int minBufferSizeInBytes = AudioTrack.getMinBufferSize(sampleRate, AudioFormat.CHANNEL_OUT_MONO,  AudioFormat.ENCODING_PCM_16BIT);
    if(minBufferSizeInBytes > bufferSizeInBytes){
        throw new IllegalArgumentException("The buffer size should be at least " + (minBufferSizeInBytes/(audioFormat.getSampleSizeInBits()/8)) + " (samples) according to  AudioTrack.getMinBufferSize().");
    }

    //http://developer.android.com/reference/android/media/AudioTrack.html#AudioTrack(int, int, int, int, int, int)
    audioTrack = new AudioTrack(streamType, sampleRate, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSizeInBytes,AudioTrack.MODE_STREAM);

    audioTrack.play();
}
 
开发者ID:gstraube,项目名称:cythara,代码行数:40,代码来源:AndroidAudioPlayer.java

示例8: audioDecoderTest

import android.media.AudioTrack; //导入方法依赖的package包/类
public void audioDecoderTest(String filePath) throws IOException {
  AudioDecoder audioDecoderThread = new AudioDecoder(this, this);
  audioDecoderThread.initExtractor(filePath);
  audioDecoderThread.prepareAudio();

  int buffsize = AudioTrack.getMinBufferSize(audioDecoderThread.getSampleRate(),
      AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_16BIT);
  audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, audioDecoderThread.getSampleRate(),
      AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_16BIT, buffsize,
      AudioTrack.MODE_STREAM);
  audioTrack.play();
  audioDecoderThread.start();
}
 
开发者ID:pedroSG94,项目名称:rtmp-rtsp-stream-client-java,代码行数:14,代码来源:DecodersTest.java

示例9: init

import android.media.AudioTrack; //导入方法依赖的package包/类
@Override
public void init(int samples) {
    mAudioBuffer = new short[(5 * (int) mSampleRate) / 2]; // 2.5 seconds of buffer
    mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
            (int) mSampleRate, AudioFormat.CHANNEL_OUT_MONO,
            AudioFormat.ENCODING_PCM_16BIT, mAudioBuffer.length * 2,
            AudioTrack.MODE_STREAM);
    mAudioTrack.play();
}
 
开发者ID:olgamiller,项目名称:SSTVEncoder2,代码行数:10,代码来源:AudioOutput.java

示例10: createCodec

import android.media.AudioTrack; //导入方法依赖的package包/类
@Override
protected MediaCodec createCodec(final MediaExtractor media_extractor, final int track_index, final MediaFormat format)
	throws IOException, IllegalArgumentException {

	final MediaCodec codec = super.createCodec(media_extractor, track_index, format);
	if (codec != null) {
		final ByteBuffer[] buffers = codec.getOutputBuffers();
		int sz = buffers[0].capacity();
		if (sz <= 0)
			sz = mAudioInputBufSize;
		if (DEBUG) Log.v(TAG, "AudioOutputBufSize:" + sz);
		mAudioOutTempBuf = new byte[sz];
		try {
			mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
				mAudioSampleRate,
				(mAudioChannels == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO),
				AudioFormat.ENCODING_PCM_16BIT,
				mAudioInputBufSize,
				AudioTrack.MODE_STREAM);
			mAudioTrack.play();
		} catch (final Exception e) {
			Log.e(TAG, "failed to start audio track playing", e);
			if (mAudioTrack != null) {
				mAudioTrack.release();
				mAudioTrack = null;
			}
			throw e;
		}
	}
	return codec;
}
 
开发者ID:saki4510t,项目名称:libcommon,代码行数:32,代码来源:MediaAudioDecoder.java

示例11: start

import android.media.AudioTrack; //导入方法依赖的package包/类
@Override
public void start()    {
    minBufferSize = AudioTrack.getMinBufferSize(frameRate,
            AudioFormat.CHANNEL_OUT_STEREO,
            AudioFormat.ENCODING_PCM_16BIT);
    System.out.println("Audio minBufferSize = " + minBufferSize);
    bufferSize = (3 * (minBufferSize / 2)) & ~3;
    System.out.println("Audio bufferSize = " + bufferSize);
    audioTrack = new AudioTrack(
            AudioManager.STREAM_MUSIC, frameRate,
            AudioFormat.CHANNEL_OUT_STEREO,
            AudioFormat.ENCODING_PCM_16BIT, bufferSize,
            AudioTrack.MODE_STREAM);
    audioTrack.play();
}
 
开发者ID:google,项目名称:science-journal,代码行数:16,代码来源:AndroidAudioForJSyn.java

示例12: encodeMessage

import android.media.AudioTrack; //导入方法依赖的package包/类
private void encodeMessage(int value) {
	// audio initialization
	int AUDIO_BUFFER_SIZE = 4096;
	int minBufferSize = AudioTrack.getMinBufferSize(AUDIO_SAMPLE_FREQ,
			AudioFormat.CHANNEL_CONFIGURATION_MONO,
			AudioFormat.ENCODING_PCM_16BIT);
	if (AUDIO_BUFFER_SIZE < minBufferSize)
		AUDIO_BUFFER_SIZE = minBufferSize;
	AudioTrack aT = new AudioTrack(AudioManager.STREAM_MUSIC,
			AUDIO_SAMPLE_FREQ, AudioFormat.CHANNEL_CONFIGURATION_MONO,
			AudioFormat.ENCODING_PCM_16BIT, AUDIO_BUFFER_SIZE,
			AudioTrack.MODE_STREAM);
	aT.play();

	// error detection encoding
	Log.i("TAG", "encodeMessage() value=" + value);
	value = ErrorDetection.createMessage(value);
	Log.i("TAG", "encodeMessage() message=" + value);
	// sound encoding
	double[] sound = FSKModule.encode(value);

	ByteBuffer buf = ByteBuffer.allocate(4 * sound.length);
	buf.order(ByteOrder.LITTLE_ENDIAN);
	for (int i = 0; i < sound.length; i++) {
		int yInt = (int) sound[i];
		buf.putInt(yInt);
	}
	byte[] tone = buf.array();
	// play message
	int nBytes = aT.write(tone, 0, tone.length);
	aT.stop();
	aT.release();
}
 
开发者ID:quake0day,项目名称:Jigglypuff,代码行数:34,代码来源:SenderActivity.java

示例13: playSound

import android.media.AudioTrack; //导入方法依赖的package包/类
void playSound() {
    AudioTrack audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
            sampleRate, AudioFormat.CHANNEL_CONFIGURATION_MONO,
            AudioFormat.ENCODING_PCM_16BIT, numSamples,
            AudioTrack.MODE_STATIC);
    audioTrack.write(generatedSnd, 0, generatedSnd.length);
    audioTrack.play();
}
 
开发者ID:zh-h,项目名称:IoTApp,代码行数:9,代码来源:PlaySoundActivity.java

示例14: initPlayer

import android.media.AudioTrack; //导入方法依赖的package包/类
/**
 * Initialize AudioTrack by getting buffersize
 */
private void initPlayer() {
  synchronized (this) {
    int bs = AudioTrack.getMinBufferSize(sampleRate, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT);
    audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, AudioFormat.CHANNEL_OUT_MONO,
            AudioFormat.ENCODING_PCM_16BIT, bs, AudioTrack.MODE_STREAM);
    if (audioTrack != null)
      audioTrack.play();
  }
}
 
开发者ID:watson-developer-cloud,项目名称:android-sdk,代码行数:13,代码来源:StreamPlayer.java

示例15: initPlayer

import android.media.AudioTrack; //导入方法依赖的package包/类
/**
 * Initialize AudioTrack by getting buffersize
 */
private void initPlayer() {
  synchronized (this) {
    int bs = AudioTrack.getMinBufferSize(sampleRate, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT);
    audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
        sampleRate,
        AudioFormat.CHANNEL_OUT_MONO,
        AudioFormat.ENCODING_PCM_16BIT,
        bs,
        AudioTrack.MODE_STREAM);
    if (audioTrack != null)
      audioTrack.play();
  }
}
 
开发者ID:blumareks,项目名称:2016EduHackathon_SanFrancisco,代码行数:17,代码来源:StreamPlayer.java


注:本文中的android.media.AudioTrack.play方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。