當前位置: 首頁>>代碼示例>>Java>>正文


Java AudioFormat.CHANNEL_CONFIGURATION_MONO屬性代碼示例

本文整理匯總了Java中android.media.AudioFormat.CHANNEL_CONFIGURATION_MONO屬性的典型用法代碼示例。如果您正苦於以下問題:Java AudioFormat.CHANNEL_CONFIGURATION_MONO屬性的具體用法?Java AudioFormat.CHANNEL_CONFIGURATION_MONO怎麽用?Java AudioFormat.CHANNEL_CONFIGURATION_MONO使用的例子?那麽, 這裏精選的屬性代碼示例或許可以為您提供幫助。您也可以進一步了解該屬性所在android.media.AudioFormat的用法示例。


在下文中一共展示了AudioFormat.CHANNEL_CONFIGURATION_MONO屬性的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。

示例1: getMinBufferSize

private int getMinBufferSize(int sampleRate, int channelConfig, int audioFormat) {
    minBufferSize = AudioTrack.getMinBufferSize(sampleRate, channelConfig, audioFormat);
    // 解決異常IllegalArgumentException: Invalid audio buffer size
    int channelCount = 1;
    switch (channelConfig) {
        // AudioFormat.CHANNEL_CONFIGURATION_DEFAULT
        case AudioFormat.CHANNEL_OUT_DEFAULT:
        case AudioFormat.CHANNEL_OUT_MONO:
        case AudioFormat.CHANNEL_CONFIGURATION_MONO:
            channelCount = 1;
            break;
        case AudioFormat.CHANNEL_OUT_STEREO:
        case AudioFormat.CHANNEL_CONFIGURATION_STEREO:
            channelCount = 2;
            break;
        default:
            channelCount = Integer.bitCount(channelConfig);
    }
    // 判斷minBufferSize是否在範圍內,如果不在設定默認值為1152
    int frameSizeInBytes = channelCount * (audioFormat == AudioFormat.ENCODING_PCM_8BIT ? 1 : 2);
    if ((minBufferSize % frameSizeInBytes != 0) || (minBufferSize < 1)) {
        minBufferSize = 1152;
    }
    return minBufferSize;
}
 
開發者ID:dueros,項目名稱:dcs-sdk-java,代碼行數:25,代碼來源:AudioTrackPlayerImpl.java

示例2: getInstanse

@SuppressWarnings("deprecation")
public static ExtAudioRecorder getInstanse(Boolean recordingCompressed) {
	ExtAudioRecorder result = null;

	if (recordingCompressed) {
		result = new ExtAudioRecorder(false, AudioSource.MIC,
				sampleRates[3], AudioFormat.CHANNEL_CONFIGURATION_MONO,
				AudioFormat.ENCODING_PCM_16BIT);
	} else {
		int i = 0;
		do {
			result = new ExtAudioRecorder(true, AudioSource.MIC,
					sampleRates[3], AudioFormat.CHANNEL_CONFIGURATION_MONO,
					AudioFormat.ENCODING_PCM_16BIT);

		} while ((++i < sampleRates.length)
				& !(result.getState() == ExtAudioRecorder.State.INITIALIZING));
	}
	return result;
}
 
開發者ID:fengdongfei,項目名稱:CXJPadProject,代碼行數:20,代碼來源:ExtAudioRecorder.java

示例3: requestDevice

private void requestDevice() {
	int bufferSize = (_sampleRate / _ioBaseFrequency / 2);

	// The stereo buffer should be large enough to ensure
	// that scheduling doesn't mess it up.
	_playBuffer = new short[bufferSize * _bitsInBuffer];

	// Open Audio-Player
	_audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, _sampleRate,
			AudioFormat.CHANNEL_CONFIGURATION_MONO,
			AudioFormat.ENCODING_PCM_16BIT, _bufferSizeInBytes,
			AudioTrack.MODE_STREAM);

	int recBufferSize = AudioRecord.getMinBufferSize(_sampleRate,
			AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);

	_recBuffer = new short[recBufferSize * 10];

	// Open Audio-Recorder
	_audioRecord = new AudioRecord(MediaRecorder.AudioSource.DEFAULT,
			_sampleRate, AudioFormat.CHANNEL_IN_MONO,
			AudioFormat.ENCODING_PCM_16BIT, recBufferSize);

}
 
開發者ID:quake0day,項目名稱:Jigglypuff,代碼行數:24,代碼來源:AndroidAudio.java

示例4: AudioThread

public AudioThread(int sampleRateInHz, int channel, long streamId, long decoderId, Media media)
{
	if (channel == 1)
	{
		channel_configuration = AudioFormat.CHANNEL_CONFIGURATION_MONO;
	} else
	{
		channel_configuration = AudioFormat.CHANNEL_CONFIGURATION_STEREO;
	}
	this.mediaStreamId = streamId;
	this.decoderId = decoderId;
	this.media = media;
	int minBufferSize = AudioTrack.getMinBufferSize(sampleRateInHz, channel_configuration, AudioFormat.ENCODING_PCM_16BIT);
	if (minBufferSize > audioLength)
	{
		audioLength = minBufferSize;
	}
	mAudioBuffer = new byte[audioLength];
	mAudio = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRateInHz, channel_configuration, AudioFormat.ENCODING_PCM_16BIT, audioLength, AudioTrack.MODE_STREAM);
}
 
開發者ID:OpenIchano,項目名稱:Viewer,代碼行數:20,代碼來源:AudioThread.java

示例5: getInstance

public static ExtAudioRecorder getInstance(Boolean recordingCompressed, VoiceCallback callback) {
	if (recordingCompressed) {
		result = new ExtAudioRecorder(false, AudioSource.MIC,
				sampleRates[3], AudioFormat.CHANNEL_CONFIGURATION_MONO,
				AudioFormat.ENCODING_PCM_16BIT, callback);
	} else {
		int i = 3;
		do {
			result = new ExtAudioRecorder(true, AudioSource.MIC,
					sampleRates[i], AudioFormat.CHANNEL_CONFIGURATION_MONO,
					AudioFormat.ENCODING_PCM_16BIT, callback);

		} while ((--i >= 0)
				&& !(result.getState() == ExtAudioRecorder.State.INITIALIZING));
	}
	return result;
}
 
開發者ID:entboost,項目名稱:EntboostIM,代碼行數:17,代碼來源:ExtAudioRecorder.java

示例6: init

/**
 * 
 * @param audioSource @see MediaRecorder.AudioSource 音頻來源
 */
@Override
public void init(int audioSource, File desFile) throws IOException {
	File dir = desFile.getParentFile();
	if (!dir.exists()) {
		dir.mkdirs();
	}
	
	isRecording = new AtomicBoolean(false);
	
	int sampleRateInHz = 16000;
	int channelConfig = AudioFormat.CHANNEL_CONFIGURATION_MONO;
	int audioFormat = AudioFormat.ENCODING_PCM_16BIT;
	bufferSizeInBytes = AudioRecord.getMinBufferSize(sampleRateInHz, channelConfig, audioFormat);
	audioRecord = new AudioRecord(audioSource, sampleRateInHz, channelConfig, audioFormat, bufferSizeInBytes);
	
	dos = new DataOutputStream(new BufferedOutputStream(new FileOutputStream(desFile)));
}
 
開發者ID:leleliu008,項目名稱:Newton_for_Android_AS,代碼行數:21,代碼來源:RecordWithAudioRecord.java

示例7: getRecordBufferSize

public static int getRecordBufferSize() {
    int frequency = Options.getInstance().audio.frequency;
    int audioEncoding = Options.getInstance().audio.encoding;
    int channelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_MONO;
    if(Options.getInstance().audio.channelCount == 2) {
        channelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_STEREO;
    }
    return AudioRecord.getMinBufferSize(frequency, channelConfiguration, audioEncoding);
}
 
開發者ID:wuyisheng,項目名稱:libRtmp,代碼行數:9,代碼來源:AndroidUntil.java

示例8: getAudioRecord

@TargetApi(18)
public static AudioRecord getAudioRecord() {
    int frequency = Options.getInstance().audio.frequency;
    int audioEncoding = Options.getInstance().audio.encoding;
    int channelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_MONO;
    if(Options.getInstance().audio.channelCount == 2) {
        channelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_STEREO;
    }
    int audioSource = MediaRecorder.AudioSource.MIC;
    if(Options.getInstance().audio.aec) {
        audioSource = MediaRecorder.AudioSource.VOICE_COMMUNICATION;
    }
    return new AudioRecord(audioSource, frequency,
            channelConfiguration, audioEncoding, getRecordBufferSize());
}
 
開發者ID:wuyisheng,項目名稱:libRtmp,代碼行數:15,代碼來源:AndroidUntil.java

示例9: createAudioRecord

private void createAudioRecord() throws InitializationException {
    // The AudioRecord configurations parameters used here, are guaranteed
    // to be supported on all devices.

    // AudioFormat.CHANNEL_IN_MONO should be used in place of deprecated
    // AudioFormat.CHANNEL_CONFIGURATION_MONO, but it is not available for
    // API level 3.

    // Unlike AudioTrack buffer, AudioRecord buffer could be larger than
    // minimum without causing any problems. But minimum works well.
    final int audioRecordBufferSizeInBytes = AudioRecord.getMinBufferSize(
            SpeechTrainerConfig.SAMPLE_RATE_HZ, AudioFormat.CHANNEL_CONFIGURATION_MONO,
            AudioFormat.ENCODING_PCM_16BIT);
    if (audioRecordBufferSizeInBytes <= 0) {
        throw new InitializationException("Failed to initialize recording.");
    }

    // CHANNEL_IN_MONO is guaranteed to work on all devices.
    // ENCODING_PCM_16BIT is guaranteed to work on all devices.
    audioRecord = new AudioRecord(AudioSource.MIC, SpeechTrainerConfig.SAMPLE_RATE_HZ,
            AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT,
            audioRecordBufferSizeInBytes);
    if (audioRecord.getState() != AudioRecord.STATE_INITIALIZED) {
        audioRecord = null;
        throw new InitializationException("Failed to initialize recording.");
    }
}
 
開發者ID:sdrausty,項目名稱:buildAPKsApps,代碼行數:27,代碼來源:ControllerFactory.java

示例10: createAudioTrack

private void createAudioTrack() throws InitializationException {
    // The AudioTrack configurations parameters used here, are guaranteed to
    // be supported on all devices.

    // AudioFormat.CHANNEL_OUT_MONO should be used in place of deprecated
    // AudioFormat.CHANNEL_CONFIGURATION_MONO, but it is not available for
    // API level 3.

    // Output buffer for playing should be as short as possible, so
    // AudioBufferPlayed events are not invoked long before audio buffer is
    // actually played. Also, when AudioTrack is stopped, it is filled with
    // silence of length audioTrackBufferSizeInBytes. If the silence is too
    // long, it causes a delay before the next recorded data starts playing.
    audioTrackBufferSizeInBytes = AudioTrack.getMinBufferSize(
            SpeechTrainerConfig.SAMPLE_RATE_HZ,
            AudioFormat.CHANNEL_CONFIGURATION_MONO,
            AudioFormat.ENCODING_PCM_16BIT);
    if (audioTrackBufferSizeInBytes <= 0) {
        throw new InitializationException("Failed to initialize playback.");
    }

    audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
            SpeechTrainerConfig.SAMPLE_RATE_HZ,
            AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT,
            audioTrackBufferSizeInBytes,
            AudioTrack.MODE_STREAM);
    if (audioTrack.getState() != AudioTrack.STATE_INITIALIZED) {
        audioTrack = null;
        throw new InitializationException("Failed to initialize playback.");
    }
}
 
開發者ID:sdrausty,項目名稱:buildAPKsApps,代碼行數:31,代碼來源:ControllerFactory.java

示例11: startRecord

private void startRecord() {
    if (Global.sVoiceDir == null) {
        try {
            Global.sVoiceDir = FileUtil.getDestinationInExternalFilesDir(activity, Environment.DIRECTORY_MUSIC, FileUtil.DOWNLOAD_FOLDER).getAbsolutePath();
        } catch (Exception e) {
            Global.errorLog(e);
        } finally {
            if (Global.sVoiceDir == null) {
                showToast(R.string.record_failed_no_enough_storage_space);
                stopRecord();
                return;
            }
        }

    }

    voiceRecrodAnimtion.selectDrawable(1);
    tips_hold_to_talk.setVisibility(View.GONE);
    soundWaveLayout.setVisibility(View.VISIBLE);
    recordTime.setText("00:00");
    soundWaveLeft.reSet();
    soundWaveRight.reSet();
    out = Global.sVoiceDir + File.separator + "coding_voice_" + UUID.randomUUID().toString() + ".amr";
    mAmrAudioRecorder = new AmrAudioRecorder(MediaRecorder.AudioSource.MIC, AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT, out);
    mAmrAudioRecorder.setVoiceRecordingCallBack(mVoiceRecordingCallBack);
    mAmrAudioRecorder.prepare();
    mAmrAudioRecorder.start();
    if (AmrAudioRecorder.State.ERROR == mAmrAudioRecorder.getState()) {
        showToast(R.string.record_failed);
    } else {
        isRecoding = true;
    }
}
 
開發者ID:huang303513,項目名稱:Coding-Android,代碼行數:33,代碼來源:VoiceView.java

示例12: encodeMessage

private void encodeMessage(int value) {
	// audio initialization
	int AUDIO_BUFFER_SIZE = 4096;
	int minBufferSize = AudioTrack.getMinBufferSize(AUDIO_SAMPLE_FREQ,
			AudioFormat.CHANNEL_CONFIGURATION_MONO,
			AudioFormat.ENCODING_PCM_16BIT);
	if (AUDIO_BUFFER_SIZE < minBufferSize)
		AUDIO_BUFFER_SIZE = minBufferSize;
	AudioTrack aT = new AudioTrack(AudioManager.STREAM_MUSIC,
			AUDIO_SAMPLE_FREQ, AudioFormat.CHANNEL_CONFIGURATION_MONO,
			AudioFormat.ENCODING_PCM_16BIT, AUDIO_BUFFER_SIZE,
			AudioTrack.MODE_STREAM);
	aT.play();

	// error detection encoding
	Log.i("TAG", "encodeMessage() value=" + value);
	value = ErrorDetection.createMessage(value);
	Log.i("TAG", "encodeMessage() message=" + value);
	// sound encoding
	double[] sound = FSKModule.encode(value);

	ByteBuffer buf = ByteBuffer.allocate(4 * sound.length);
	buf.order(ByteOrder.LITTLE_ENDIAN);
	for (int i = 0; i < sound.length; i++) {
		int yInt = (int) sound[i];
		buf.putInt(yInt);
	}
	byte[] tone = buf.array();
	// play message
	int nBytes = aT.write(tone, 0, tone.length);
	aT.stop();
	aT.release();
}
 
開發者ID:quake0day,項目名稱:Jigglypuff,代碼行數:33,代碼來源:SenderActivity.java

示例13: analyze

private  void analyze(){
    for(int i=0;i<samplingRates.length;i++){
        int minSize= AudioRecord.getMinBufferSize(samplingRates[i],
                AudioFormat.CHANNEL_CONFIGURATION_MONO,
                AudioFormat.ENCODING_PCM_16BIT);//獲取允許的最小緩衝區大小
        AudioRecord ar=new AudioRecord(MediaRecorder.AudioSource.MIC,
                samplingRates[i],
                AudioFormat.CHANNEL_CONFIGURATION_MONO,
                AudioFormat.ENCODING_PCM_16BIT,minSize);
        if(ar.getState()==AudioRecord.STATE_INITIALIZED){
            short[] buff=new short[minSize];
            ar.startRecording();
            while (recording){
                ar.read(buff,0,minSize);//將音頻數據從硬件讀入緩衝區內
                for(short s:buff){
                    if(Math.abs(s)>minVolume){//當該平率的音量超過閾值時,向handler發送一條message
                        handler.sendEmptyMessage(0);
                    }
                }
            }
            ar.stop();
            i=samplingRates.length;
        }
        ar.release();
        ar=null;
    }
}
 
開發者ID:JianxunRao,項目名稱:FangYanShuo,代碼行數:27,代碼來源:BlowActivity.java

示例14: playSound

void playSound() {
    AudioTrack audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
            sampleRate, AudioFormat.CHANNEL_CONFIGURATION_MONO,
            AudioFormat.ENCODING_PCM_16BIT, numSamples,
            AudioTrack.MODE_STATIC);
    audioTrack.write(generatedSnd, 0, generatedSnd.length);
    audioTrack.play();
}
 
開發者ID:zh-h,項目名稱:IoTApp,代碼行數:8,代碼來源:PlaySoundActivity.java

示例15: createAudioTrack

private AudioTrack createAudioTrack(GeneratedSound generatedSound) {

        AudioTrack audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
                generatedSound.getSampleRate(), AudioFormat.CHANNEL_CONFIGURATION_MONO,
                AudioFormat.ENCODING_PCM_16BIT, generatedSound.getNumSamples(),
                AudioTrack.MODE_STREAM);

        return audioTrack;
    }
 
開發者ID:zh-h,項目名稱:IoTApp,代碼行數:9,代碼來源:SocketIOService.java


注:本文中的android.media.AudioFormat.CHANNEL_CONFIGURATION_MONO屬性示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。