當前位置: 首頁>>代碼示例>>Java>>正文


Java AudioFormat.CHANNEL_CONFIGURATION_STEREO屬性代碼示例

本文整理匯總了Java中android.media.AudioFormat.CHANNEL_CONFIGURATION_STEREO屬性的典型用法代碼示例。如果您正苦於以下問題:Java AudioFormat.CHANNEL_CONFIGURATION_STEREO屬性的具體用法?Java AudioFormat.CHANNEL_CONFIGURATION_STEREO怎麽用?Java AudioFormat.CHANNEL_CONFIGURATION_STEREO使用的例子?那麽, 這裏精選的屬性代碼示例或許可以為您提供幫助。您也可以進一步了解該屬性所在android.media.AudioFormat的用法示例。


在下文中一共展示了AudioFormat.CHANNEL_CONFIGURATION_STEREO屬性的6個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。

示例1: getMinBufferSize

private int getMinBufferSize(int sampleRate, int channelConfig, int audioFormat) {
    minBufferSize = AudioTrack.getMinBufferSize(sampleRate, channelConfig, audioFormat);
    // 解決異常IllegalArgumentException: Invalid audio buffer size
    int channelCount = 1;
    switch (channelConfig) {
        // AudioFormat.CHANNEL_CONFIGURATION_DEFAULT
        case AudioFormat.CHANNEL_OUT_DEFAULT:
        case AudioFormat.CHANNEL_OUT_MONO:
        case AudioFormat.CHANNEL_CONFIGURATION_MONO:
            channelCount = 1;
            break;
        case AudioFormat.CHANNEL_OUT_STEREO:
        case AudioFormat.CHANNEL_CONFIGURATION_STEREO:
            channelCount = 2;
            break;
        default:
            channelCount = Integer.bitCount(channelConfig);
    }
    // 判斷minBufferSize是否在範圍內,如果不在設定默認值為1152
    int frameSizeInBytes = channelCount * (audioFormat == AudioFormat.ENCODING_PCM_8BIT ? 1 : 2);
    if ((minBufferSize % frameSizeInBytes != 0) || (minBufferSize < 1)) {
        minBufferSize = 1152;
    }
    return minBufferSize;
}
 
開發者ID:dueros,項目名稱:dcs-sdk-java,代碼行數:25,代碼來源:AudioTrackPlayerImpl.java

示例2: start

/**
 * 設置頻率
 * @param rate
 */
@SuppressWarnings("deprecation")
public void start(int rate){
	stop();
	if(rate>0){
		Hz=rate;
		waveLen = RATE / Hz;
		length = waveLen * Hz;
		audioTrack=new AudioTrack(AudioManager.STREAM_MUSIC, RATE,
				AudioFormat.CHANNEL_CONFIGURATION_STEREO, // CHANNEL_CONFIGURATION_MONO,
				AudioFormat.ENCODING_PCM_8BIT, length, AudioTrack.MODE_STREAM);
		//生成正弦波
		wave=SinWave.sin(wave, waveLen, length);
		if(audioTrack!=null){
			audioTrack.play();
		}
	}else{
		return;
	}
	
}
 
開發者ID:Becavalier,項目名稱:QRDataTransfer-Android,代碼行數:24,代碼來源:AudioTrackManager.java

示例3: AudioThread

public AudioThread(int sampleRateInHz, int channel, long streamId, long decoderId, Media media)
{
	if (channel == 1)
	{
		channel_configuration = AudioFormat.CHANNEL_CONFIGURATION_MONO;
	} else
	{
		channel_configuration = AudioFormat.CHANNEL_CONFIGURATION_STEREO;
	}
	this.mediaStreamId = streamId;
	this.decoderId = decoderId;
	this.media = media;
	int minBufferSize = AudioTrack.getMinBufferSize(sampleRateInHz, channel_configuration, AudioFormat.ENCODING_PCM_16BIT);
	if (minBufferSize > audioLength)
	{
		audioLength = minBufferSize;
	}
	mAudioBuffer = new byte[audioLength];
	mAudio = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRateInHz, channel_configuration, AudioFormat.ENCODING_PCM_16BIT, audioLength, AudioTrack.MODE_STREAM);
}
 
開發者ID:OpenIchano,項目名稱:Viewer,代碼行數:20,代碼來源:AudioThread.java

示例4: getRecordBufferSize

public static int getRecordBufferSize() {
    int frequency = Options.getInstance().audio.frequency;
    int audioEncoding = Options.getInstance().audio.encoding;
    int channelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_MONO;
    if(Options.getInstance().audio.channelCount == 2) {
        channelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_STEREO;
    }
    return AudioRecord.getMinBufferSize(frequency, channelConfiguration, audioEncoding);
}
 
開發者ID:wuyisheng,項目名稱:libRtmp,代碼行數:9,代碼來源:AndroidUntil.java

示例5: getAudioRecord

@TargetApi(18)
public static AudioRecord getAudioRecord() {
    int frequency = Options.getInstance().audio.frequency;
    int audioEncoding = Options.getInstance().audio.encoding;
    int channelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_MONO;
    if(Options.getInstance().audio.channelCount == 2) {
        channelConfiguration = AudioFormat.CHANNEL_CONFIGURATION_STEREO;
    }
    int audioSource = MediaRecorder.AudioSource.MIC;
    if(Options.getInstance().audio.aec) {
        audioSource = MediaRecorder.AudioSource.VOICE_COMMUNICATION;
    }
    return new AudioRecord(audioSource, frequency,
            channelConfiguration, audioEncoding, getRecordBufferSize());
}
 
開發者ID:wuyisheng,項目名稱:libRtmp,代碼行數:15,代碼來源:AndroidUntil.java

示例6: AudioOutputQueue

public AudioOutputQueue(final AudioStreamInformationProvider streamInfoProvider) {
	convertUnsignedToSigned = true;
	
	//setup the Audio Format options
	streamType = AudioManager.STREAM_MUSIC;
	
	sampleRateInHz = streamInfoProvider.getSampleRate();
	channelConfig = streamInfoProvider.getChannels();
	audioFormat = streamInfoProvider. getAudioFormat();
	
	sampleRate = streamInfoProvider.getSampleRate();
	
	/* Audio format-dependent stuff */
	packetSizeFrames = streamInfoProvider.getFramesPerPacket();
	bytesPerFrame = streamInfoProvider.getChannels() * streamInfoProvider.getSampleSizeInBits() / 8;
	
	//calculate the buffer size in bytes
	bufferSizeInBytes = (int)Math.pow(2, Math.ceil(Math.log(BUFFER_SIZE_SECONDS * sampleRate * bytesPerFrame) / Math.log(2.0)));
	
	mode = AudioTrack.MODE_STREAM;
	
	//create the AudioTrack
	//audioTrack = new AudioTrack(streamType, sampleRateInHz, channelConfig, audioFormat, bufferSizeInBytes, mode);
	audioTrack = new AudioTrack(streamType, sampleRateInHz, AudioFormat.CHANNEL_CONFIGURATION_STEREO, audioFormat, bufferSizeInBytes, mode);//FIXME

	LOG.info("AudioTrack created succesfully with a buffer of : " + bufferSizeInBytes + " bytes and : " + bufferSizeInBytes / bytesPerFrame + " frames.");
		
	//create initial array of "filler" bytes ....
	lineLastFrame = new byte[bytesPerFrame];
	for(int b=0; b < lineLastFrame.length; ++b){
		lineLastFrame[b] = (b % 2 == 0) ? (byte)-128 : (byte)0;
	}

	/* Create enqueuer thread and wait for the line to start.
	 * The wait guarantees that the AudioClock functions return
	 * sensible values right after construction
	 */
	queueThread.setDaemon(true);
	queueThread.setName("Audio Enqueuer");
	queueThread.setPriority(Thread.MAX_PRIORITY);
	
	/*
	queueThread.start();
	
	//while ( queueThread.isAlive() && ! m_line.isActive() ){
	while ( queueThread.isAlive() && audioTrack.getPlayState() != AudioTrack.PLAYSTATE_PLAYING){
		Thread.yield();
	}
	*/

	/* Initialize the seconds time offset now that the line is running. */
	secondsTimeOffset = 2208988800.0 +  System.currentTimeMillis() * 1e-3;
}
 
開發者ID:SergioChan,項目名稱:Android-Airplay-Server,代碼行數:53,代碼來源:AudioOutputQueue.java


注:本文中的android.media.AudioFormat.CHANNEL_CONFIGURATION_STEREO屬性示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。