當前位置: 首頁>>代碼示例>>Java>>正文


Java AudioFormat類代碼示例

本文整理匯總了Java中android.media.AudioFormat的典型用法代碼示例。如果您正苦於以下問題:Java AudioFormat類的具體用法?Java AudioFormat怎麽用?Java AudioFormat使用的例子?那麽, 這裏精選的類代碼示例或許可以為您提供幫助。


AudioFormat類屬於android.media包,在下文中一共展示了AudioFormat類的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。

示例1: playSound

import android.media.AudioFormat; //導入依賴的package包/類
/**
 * This method plays the sound data in the specified buffer.
 *
 * @param buffer specifies the sound data buffer.
 */
public void playSound(short[] buffer)
{
    final String funcName = "playSound";

    if (debugEnabled)
    {
        dbgTrace.traceEnter(funcName, TrcDbgTrace.TraceLevel.API);
        dbgTrace.traceExit(funcName, TrcDbgTrace.TraceLevel.API);
    }

    audioTrack = new AudioTrack(
            AudioManager.STREAM_MUSIC,
            sampleRate,
            AudioFormat.CHANNEL_OUT_MONO,
            AudioFormat.ENCODING_PCM_16BIT,
            buffer.length*2,    //buffer length in bytes
            AudioTrack.MODE_STATIC);
    audioTrack.write(buffer, 0, buffer.length);
    audioTrack.setNotificationMarkerPosition(buffer.length);
    audioTrack.setPlaybackPositionUpdateListener(this);
    audioTrack.play();
    playing = true;
}
 
開發者ID:trc492,項目名稱:Ftc2018RelicRecovery,代碼行數:29,代碼來源:FtcAndroidTone.java

示例2: onCreate

import android.media.AudioFormat; //導入依賴的package包/類
@Override
public void onCreate() {
    super.onCreate();
    mHandler = new Handler();
    fetchAccessToken();

    int outputBufferSize = AudioTrack.getMinBufferSize(16000,
            AudioFormat.CHANNEL_IN_STEREO,
            AudioFormat.ENCODING_PCM_16BIT);

    try {
        mAudioTrack = new AudioTrack(AudioManager.USE_DEFAULT_STREAM_TYPE, 16000, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, outputBufferSize, AudioTrack.MODE_STREAM);
        if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.LOLLIPOP) {
            mAudioTrack.setVolume(DEFAULT_VOLUME);
        }
        mAudioTrack.play();
    }catch (Exception e){
        e.printStackTrace();
    }
}
 
開發者ID:hsavaliya,項目名稱:GoogleAssistantSDK,代碼行數:21,代碼來源:SpeechService.java

示例3: onCreate

import android.media.AudioFormat; //導入依賴的package包/類
@Override
    public void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.activity_main);

        bufferSize = AudioRecord.getMinBufferSize(16000,
                AudioFormat.CHANNEL_IN_MONO,
                AudioFormat.ENCODING_PCM_16BIT);

        setButtonHandlers();
        enableButtons(false);

        inferenceInterface = new TensorFlowInferenceInterface();
        inferenceInterface.initializeTensorFlow(getAssets(), MODEL_FILE);
//        tensorFlowSample();

    }
 
開發者ID:ranatrk,項目名稱:AudioGenderIdentifier,代碼行數:18,代碼來源:MainActivity.java

示例4: AudioSink

import android.media.AudioFormat; //導入依賴的package包/類
/**
 * Constructor. Will create a new AudioSink.
 *
 * @param packetSize	size of the incoming packets
 * @param sampleRate	sample rate of the audio signal
 */
public AudioSink (int packetSize, int sampleRate) {
	this.packetSize = packetSize;
	this.sampleRate = sampleRate;

	// Create the queues and fill them with
	this.inputQueue = new ArrayBlockingQueue<SamplePacket>(QUEUE_SIZE);
	this.outputQueue = new ArrayBlockingQueue<SamplePacket>(QUEUE_SIZE);
	for (int i = 0; i < QUEUE_SIZE; i++)
		this.outputQueue.offer(new SamplePacket(packetSize));

	// Create an instance of the AudioTrack class:
	int bufferSize = AudioTrack.getMinBufferSize(sampleRate, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT);
	this.audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, AudioFormat.CHANNEL_OUT_MONO,
								AudioFormat.ENCODING_PCM_16BIT, bufferSize, AudioTrack.MODE_STREAM);

	// Create the audio filters:
	this.audioFilter1 = FirFilter.createLowPass(2, 1, 1, 0.1f, 0.15f, 30);
	Log.d(LOGTAG,"constructor: created audio filter 1 with " + audioFilter1.getNumberOfTaps() + " Taps.");
	this.audioFilter2 = FirFilter.createLowPass(4, 1, 1, 0.1f, 0.1f, 30);
	Log.d(LOGTAG,"constructor: created audio filter 2 with " + audioFilter2.getNumberOfTaps() + " Taps.");
	this.tmpAudioSamples = new SamplePacket(packetSize);
}
 
開發者ID:takyonxxx,項目名稱:AndroidSdrRtlTuner,代碼行數:29,代碼來源:AudioSink.java

示例5: init_

import android.media.AudioFormat; //導入依賴的package包/類
private void init_(boolean eccEnabled) {
    mEccEncoder = EccInstanceProvider.getEncoder(eccEnabled);
    int minBufferSizeInBytes = AudioTrack.getMinBufferSize(
            RATE,
            AudioFormat.CHANNEL_OUT_MONO,
            AudioFormat.ENCODING_PCM_16BIT);
    // 44.1kHz mono 16bit
    mAudioTrack = new AudioTrack(
            AudioManager.STREAM_MUSIC,
            RATE,
            AudioFormat.CHANNEL_OUT_MONO,
            AudioFormat.ENCODING_PCM_16BIT,
            minBufferSizeInBytes,
            AudioTrack.MODE_STREAM);
    mExecutorService = Executors.newSingleThreadExecutor();
}
 
開發者ID:egglang,項目名稱:sonicky,代碼行數:17,代碼來源:Encoder.java

示例6: initAudioTrack

import android.media.AudioFormat; //導入依賴的package包/類
private void initAudioTrack(int sampleRate, int channels) {
    if (sampleRate <= 0) {
        sampleRate = AUDIO_FORMAT_PCM8K;
    }
    if (channels <= 0) {
        channels = 1;
    }
    if (channels == 1) {
        mChannelConfig = AudioFormat.CHANNEL_OUT_MONO;
    } else if (channels == 2) {
        mChannelConfig = AudioFormat.CHANNEL_OUT_STEREO;
    }
    if (iCurrentQueueAudioFormat == sampleRate) {
        if (mAudioTrack == null) {
            mAudioTrack = createAudioTrack(iCurrentQueueAudioFormat);
        }
    } else {
        Log.d(TAG, "Decoder-initAudioTrack-sampleRate=" + sampleRate);
        Log.d(TAG, "Decoder-initAudioTrack-channels=" + channels);
        mAudioTrack = createAudioTrack(sampleRate);
        iCurrentQueueAudioFormat = sampleRate;
    }
}
 
開發者ID:dueros,項目名稱:dcs-sdk-java,代碼行數:24,代碼來源:AudioTrackPlayerImpl.java

示例7: getMinBufferSize

import android.media.AudioFormat; //導入依賴的package包/類
private int getMinBufferSize(int sampleRate, int channelConfig, int audioFormat) {
    minBufferSize = AudioTrack.getMinBufferSize(sampleRate, channelConfig, audioFormat);
    // 解決異常IllegalArgumentException: Invalid audio buffer size
    int channelCount = 1;
    switch (channelConfig) {
        // AudioFormat.CHANNEL_CONFIGURATION_DEFAULT
        case AudioFormat.CHANNEL_OUT_DEFAULT:
        case AudioFormat.CHANNEL_OUT_MONO:
        case AudioFormat.CHANNEL_CONFIGURATION_MONO:
            channelCount = 1;
            break;
        case AudioFormat.CHANNEL_OUT_STEREO:
        case AudioFormat.CHANNEL_CONFIGURATION_STEREO:
            channelCount = 2;
            break;
        default:
            channelCount = Integer.bitCount(channelConfig);
    }
    // 判斷minBufferSize是否在範圍內,如果不在設定默認值為1152
    int frameSizeInBytes = channelCount * (audioFormat == AudioFormat.ENCODING_PCM_8BIT ? 1 : 2);
    if ((minBufferSize % frameSizeInBytes != 0) || (minBufferSize < 1)) {
        minBufferSize = 1152;
    }
    return minBufferSize;
}
 
開發者ID:dueros,項目名稱:dcs-sdk-java,代碼行數:26,代碼來源:AudioTrackPlayerImpl.java

示例8: PcmPlayer

import android.media.AudioFormat; //導入依賴的package包/類
public PcmPlayer(Context context, Handler handler) {
    this.mContext = context;
    this.audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, wBufferSize, AudioTrack.MODE_STREAM);
    this.handler = handler;
    audioTrack.setPlaybackPositionUpdateListener(this, handler);
    cacheDir = context.getExternalFilesDir(Environment.DIRECTORY_MUSIC);
}
 
開發者ID:LingjuAI,項目名稱:AssistantBySDK,代碼行數:8,代碼來源:PcmPlayer.java

示例9: checkMediaCodecAudioEncoderSupport

import android.media.AudioFormat; //導入依賴的package包/類
@TargetApi(MIN_API_LEVEL_AUDIO)
public static int checkMediaCodecAudioEncoderSupport(){
    if(getApiLevel()<MIN_API_LEVEL_AUDIO){
        Log.d(TAG, "checkMediaCodecAudioEncoderSupport: Min API is 16");
        return CODEC_REQ_API_NOT_SATISFIED;
    }
    final MediaFormat audioFormat = MediaFormat.createAudioFormat(MIME_TYPE_AUDIO, TEST_SAMPLE_RATE, 1);
    audioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
    audioFormat.setInteger(MediaFormat.KEY_CHANNEL_MASK, AudioFormat.CHANNEL_IN_MONO);
    audioFormat.setInteger(MediaFormat.KEY_BIT_RATE, TEST_AUDIO_BIT_RATE);
    audioFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1);
    MediaCodec mediaCodec;
    try {
        mediaCodec = MediaCodec.createEncoderByType(MIME_TYPE_AUDIO);
        mediaCodec.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
        mediaCodec.start();
        mediaCodec.stop();
        mediaCodec.release();
        mediaCodec = null;
    } catch (Exception ex) {
        Log.e(TAG, "Failed on creation of codec #", ex);
        return CODEC_ERROR;
    }
    return CODEC_SUPPORTED;
}
 
開發者ID:zhangyaqiang,項目名稱:Fatigue-Detection,代碼行數:26,代碼來源:MediaCodecUtils.java

示例10: AudioEncoderCore

import android.media.AudioFormat; //導入依賴的package包/類
public AudioEncoderCore(MMediaMuxer MMediaMuxer) throws IOException {
        super(MMediaMuxer);
        final MediaFormat audioFormat = MediaFormat.createAudioFormat(MIME_TYPE, SAMPLE_RATE, 1);
        audioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
        audioFormat.setInteger(MediaFormat.KEY_CHANNEL_MASK, AudioFormat.CHANNEL_IN_MONO);
        audioFormat.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
        audioFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1);
//		audioFormat.setLong(MediaFormat.KEY_MAX_INPUT_SIZE, inputFile.length());
//      audioFormat.setLong(MediaFormat.KEY_DURATION, (long)durationInMs );
        if (VERBOSE) Log.i(TAG, "format: " + audioFormat);
        mEncoder = MediaCodec.createEncoderByType(MIME_TYPE);
        mEncoder.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
        mEncoder.start();
        if (mAudioThread == null) {
            mAudioThread = new AudioThread();
            mAudioThread.start();
            capturing=true;
            stopped=false;
        }
    }
 
開發者ID:zhangyaqiang,項目名稱:Fatigue-Detection,代碼行數:21,代碼來源:AudioEncoderCore.java

示例11: findAudioRecord

import android.media.AudioFormat; //導入依賴的package包/類
public AudioRecord findAudioRecord() {

        for (int rate : mSampleRates) {
            for (short audioFormat : new short[] { AudioFormat.ENCODING_PCM_16BIT }) {
                for (short channelConfig : new short[] { AudioFormat.CHANNEL_IN_MONO }) {
                    try {
                        Log.d("C.TAG", "Attempting rate " + rate + "Hz, bits: " + audioFormat +      ", channel: "
                                + channelConfig);
                        int bufferSize = AudioRecord.getMinBufferSize(rate,      AudioFormat.CHANNEL_IN_MONO , AudioFormat.ENCODING_PCM_16BIT);

                        if (bufferSize != AudioRecord.ERROR_BAD_VALUE) {
                            // check if we can instantiate and have a success
                            AudioRecord recorder = new AudioRecord(AudioSource.MIC, DEFAULT_RATE,      channelConfig, audioFormat, bufferSize);

                            if (recorder.getState() == AudioRecord.STATE_INITIALIZED)
                                return recorder;
                        }
                    } catch (Exception e) {
                        Log.e("C.TAG", rate + "Exception, keep trying.",e);
                    }
                }
            }
        }
        return null;
    }
 
開發者ID:n8fr8,項目名稱:LittleBitLouder,代碼行數:26,代碼來源:TOne.java

示例12: createAudioTrack

import android.media.AudioFormat; //導入依賴的package包/類
public AudioTrack createAudioTrack(int frameRate) {
    int minBufferSizeBytes = AudioTrack.getMinBufferSize(frameRate,
            AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_FLOAT);
    Log.i(TAG, "AudioTrack.minBufferSize = " + minBufferSizeBytes
            + " bytes = " + (minBufferSizeBytes / BYTES_PER_FRAME)
            + " frames");
    int bufferSize = 8 * minBufferSizeBytes / 8;
    int outputBufferSizeFrames = bufferSize / BYTES_PER_FRAME;
    Log.i(TAG, "actual bufferSize = " + bufferSize + " bytes = "
            + outputBufferSizeFrames + " frames");

    AudioTrack player = new AudioTrack(AudioManager.STREAM_MUSIC,
            mFrameRate, AudioFormat.CHANNEL_OUT_STEREO,
            AudioFormat.ENCODING_PCM_FLOAT, bufferSize,
            AudioTrack.MODE_STREAM);
    Log.i(TAG, "created AudioTrack");
    return player;
}
 
開發者ID:sdrausty,項目名稱:buildAPKsSamples,代碼行數:19,代碼來源:SimpleAudioOutput.java

示例13: getInstanse

import android.media.AudioFormat; //導入依賴的package包/類
@SuppressWarnings("deprecation")
public static ExtAudioRecorder getInstanse(Boolean recordingCompressed) {
	ExtAudioRecorder result = null;

	if (recordingCompressed) {
		result = new ExtAudioRecorder(false, AudioSource.MIC,
				sampleRates[3], AudioFormat.CHANNEL_CONFIGURATION_MONO,
				AudioFormat.ENCODING_PCM_16BIT);
	} else {
		int i = 0;
		do {
			result = new ExtAudioRecorder(true, AudioSource.MIC,
					sampleRates[3], AudioFormat.CHANNEL_CONFIGURATION_MONO,
					AudioFormat.ENCODING_PCM_16BIT);

		} while ((++i < sampleRates.length)
				& !(result.getState() == ExtAudioRecorder.State.INITIALIZING));
	}
	return result;
}
 
開發者ID:fengdongfei,項目名稱:CXJPadProject,代碼行數:21,代碼來源:ExtAudioRecorder.java

示例14: run

import android.media.AudioFormat; //導入依賴的package包/類
@Override
public void run() {
    super.run();
    isRunning = true;
    int buffsize = AudioTrack.getMinBufferSize(sr,
            AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT);
    // create an audiotrack object
    AudioTrack audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
            sr, AudioFormat.CHANNEL_OUT_MONO,
            AudioFormat.ENCODING_PCM_16BIT, buffsize,
            AudioTrack.MODE_STREAM);

    short samples[] = new short[buffsize];
    int amp = 10000;
    double twopi = 8.*Math.atan(1.);
    double ph = 0.0;

    // start audio
    audioTrack.play();

    // synthesis loop
    while(isRunning){
        double fr = tuneFreq;
        for(int i=0; i < buffsize; i++){
            samples[i] = (short) (amp*Math.sin(ph));
            ph += twopi*fr/sr;
        }
        audioTrack.write(samples, 0, buffsize);
    }
    audioTrack.stop();
    audioTrack.release();
}
 
開發者ID:karlotoy,項目名稱:perfectTune,代碼行數:33,代碼來源:TuneThread.java

示例15: PWave

import android.media.AudioFormat; //導入依賴的package包/類
public PWave(AppRunner appRunner) {
    super(appRunner);
    appRunner.whatIsRunning.add(this);

    // set the buffer size
    buffsize = AudioTrack.getMinBufferSize(mSampleRate,
            AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT);

    samples = new short[buffsize];

    // create an audiotrack object
    audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
            mSampleRate, AudioFormat.CHANNEL_OUT_MONO,
            AudioFormat.ENCODING_PCM_16BIT, buffsize,
            AudioTrack.MODE_STREAM);

    // start audio
    audioTrack.play();
}
 
開發者ID:victordiaz,項目名稱:phonk,代碼行數:20,代碼來源:PWave.java


注:本文中的android.media.AudioFormat類示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。