本文整理汇总了Java中android.media.AudioFormat.CHANNEL_OUT_MONO属性的典型用法代码示例。如果您正苦于以下问题:Java AudioFormat.CHANNEL_OUT_MONO属性的具体用法?Java AudioFormat.CHANNEL_OUT_MONO怎么用?Java AudioFormat.CHANNEL_OUT_MONO使用的例子?那么恭喜您, 这里精选的属性代码示例或许可以为您提供帮助。您也可以进一步了解该属性所在类android.media.AudioFormat
的用法示例。
在下文中一共展示了AudioFormat.CHANNEL_OUT_MONO属性的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: playSound
/**
* This method plays the sound data in the specified buffer.
*
* @param buffer specifies the sound data buffer.
*/
public void playSound(short[] buffer)
{
final String funcName = "playSound";
if (debugEnabled)
{
dbgTrace.traceEnter(funcName, TrcDbgTrace.TraceLevel.API);
dbgTrace.traceExit(funcName, TrcDbgTrace.TraceLevel.API);
}
audioTrack = new AudioTrack(
AudioManager.STREAM_MUSIC,
sampleRate,
AudioFormat.CHANNEL_OUT_MONO,
AudioFormat.ENCODING_PCM_16BIT,
buffer.length*2, //buffer length in bytes
AudioTrack.MODE_STATIC);
audioTrack.write(buffer, 0, buffer.length);
audioTrack.setNotificationMarkerPosition(buffer.length);
audioTrack.setPlaybackPositionUpdateListener(this);
audioTrack.play();
playing = true;
}
示例2: PWave
public PWave(AppRunner appRunner) {
super(appRunner);
appRunner.whatIsRunning.add(this);
// set the buffer size
buffsize = AudioTrack.getMinBufferSize(mSampleRate,
AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT);
samples = new short[buffsize];
// create an audiotrack object
audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
mSampleRate, AudioFormat.CHANNEL_OUT_MONO,
AudioFormat.ENCODING_PCM_16BIT, buffsize,
AudioTrack.MODE_STREAM);
// start audio
audioTrack.play();
}
示例3: init_
private void init_(boolean eccEnabled) {
mEccEncoder = EccInstanceProvider.getEncoder(eccEnabled);
int minBufferSizeInBytes = AudioTrack.getMinBufferSize(
RATE,
AudioFormat.CHANNEL_OUT_MONO,
AudioFormat.ENCODING_PCM_16BIT);
// 44.1kHz mono 16bit
mAudioTrack = new AudioTrack(
AudioManager.STREAM_MUSIC,
RATE,
AudioFormat.CHANNEL_OUT_MONO,
AudioFormat.ENCODING_PCM_16BIT,
minBufferSizeInBytes,
AudioTrack.MODE_STREAM);
mExecutorService = Executors.newSingleThreadExecutor();
}
示例4: initAudioTrack
private void initAudioTrack(int sampleRate, int channels) {
if (sampleRate <= 0) {
sampleRate = AUDIO_FORMAT_PCM8K;
}
if (channels <= 0) {
channels = 1;
}
if (channels == 1) {
mChannelConfig = AudioFormat.CHANNEL_OUT_MONO;
} else if (channels == 2) {
mChannelConfig = AudioFormat.CHANNEL_OUT_STEREO;
}
if (iCurrentQueueAudioFormat == sampleRate) {
if (mAudioTrack == null) {
mAudioTrack = createAudioTrack(iCurrentQueueAudioFormat);
}
} else {
Log.d(TAG, "Decoder-initAudioTrack-sampleRate=" + sampleRate);
Log.d(TAG, "Decoder-initAudioTrack-channels=" + channels);
mAudioTrack = createAudioTrack(sampleRate);
iCurrentQueueAudioFormat = sampleRate;
}
}
示例5: getMinBufferSize
private int getMinBufferSize(int sampleRate, int channelConfig, int audioFormat) {
minBufferSize = AudioTrack.getMinBufferSize(sampleRate, channelConfig, audioFormat);
// 解决异常IllegalArgumentException: Invalid audio buffer size
int channelCount = 1;
switch (channelConfig) {
// AudioFormat.CHANNEL_CONFIGURATION_DEFAULT
case AudioFormat.CHANNEL_OUT_DEFAULT:
case AudioFormat.CHANNEL_OUT_MONO:
case AudioFormat.CHANNEL_CONFIGURATION_MONO:
channelCount = 1;
break;
case AudioFormat.CHANNEL_OUT_STEREO:
case AudioFormat.CHANNEL_CONFIGURATION_STEREO:
channelCount = 2;
break;
default:
channelCount = Integer.bitCount(channelConfig);
}
// 判断minBufferSize是否在范围内,如果不在设定默认值为1152
int frameSizeInBytes = channelCount * (audioFormat == AudioFormat.ENCODING_PCM_8BIT ? 1 : 2);
if ((minBufferSize % frameSizeInBytes != 0) || (minBufferSize < 1)) {
minBufferSize = 1152;
}
return minBufferSize;
}
示例6: PcmPlayer
public PcmPlayer(Context context, Handler handler) {
this.mContext = context;
this.audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, wBufferSize, AudioTrack.MODE_STREAM);
this.handler = handler;
audioTrack.setPlaybackPositionUpdateListener(this, handler);
cacheDir = context.getExternalFilesDir(Environment.DIRECTORY_MUSIC);
}
示例7: run
@Override
public void run() {
super.run();
isRunning = true;
int buffsize = AudioTrack.getMinBufferSize(sr,
AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT);
// create an audiotrack object
AudioTrack audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
sr, AudioFormat.CHANNEL_OUT_MONO,
AudioFormat.ENCODING_PCM_16BIT, buffsize,
AudioTrack.MODE_STREAM);
short samples[] = new short[buffsize];
int amp = 10000;
double twopi = 8.*Math.atan(1.);
double ph = 0.0;
// start audio
audioTrack.play();
// synthesis loop
while(isRunning){
double fr = tuneFreq;
for(int i=0; i < buffsize; i++){
samples[i] = (short) (amp*Math.sin(ph));
ph += twopi*fr/sr;
}
audioTrack.write(samples, 0, buffsize);
}
audioTrack.stop();
audioTrack.release();
}
示例8: audioTrackInit
@SuppressLint("NewApi")
private int audioTrackInit(int sampleRateInHz, int channels) {
// this.sampleRateInHz=sampleRateInHz;
// this.channels=channels;
// return 0;
audioTrackRelease();
int channelConfig = channels >= 2 ? AudioFormat.CHANNEL_OUT_STEREO : AudioFormat.CHANNEL_OUT_MONO;
try {
mAudioTrackBufferSize = AudioTrack.getMinBufferSize(sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT);
mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT, mAudioTrackBufferSize, AudioTrack.MODE_STREAM);
} catch (Exception e) {
mAudioTrackBufferSize = 0;
Log.e("audioTrackInit", e);
}
return mAudioTrackBufferSize;
}
示例9: prepare
@Override
protected void prepare() throws IOException {
if (mState < STATE_PREPARED) {
MediaFormat format;
if (mState == STATE_UNINITIALIZED) {
mTrackIndex = selectTrack();
if (mTrackIndex < 0) {
setState(STATE_NO_TRACK_FOUND);
return;
}
mExtractor.selectTrack(mTrackIndex);
format = mExtractor.getTrackFormat(mTrackIndex);
mSampleRate = format.getInteger(MediaFormat.KEY_SAMPLE_RATE);
int audioChannels = format.getInteger(MediaFormat.KEY_CHANNEL_COUNT);
mAudioTrack = new AudioTrack(
AudioManager.STREAM_MUSIC,
mSampleRate,
(audioChannels == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO),
AudioFormat.ENCODING_PCM_16BIT,
AudioTrack.getMinBufferSize(
mSampleRate,
(audioChannels == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO),
AudioFormat.ENCODING_PCM_16BIT
),
AudioTrack.MODE_STREAM
);
mState = STATE_INITIALIZED;
} else {
format = mExtractor.getTrackFormat(mTrackIndex);
}
String mime = format.getString(MediaFormat.KEY_MIME);
Log.d(TAG, mime);
mMediaCodec = MediaCodec.createDecoderByType(mime);
// mMediaCodec.setCallback(mCallback);
mMediaCodec.configure(format, null, null, 0);
setState(STATE_PREPARED);
}
super.prepare();
}
示例10: AndroidAudioPlayer
/**
* Constructs a new AndroidAudioPlayer from an audio format, default buffer size and stream type.
*
* @param audioFormat The audio format of the stream that this AndroidAudioPlayer will process.
* This can only be 1 channel, PCM 16 bit.
* @param bufferSizeInSamples The requested buffer size in samples.
* @param streamType The type of audio stream that the internal AudioTrack should use. For
* example, {@link AudioManager#STREAM_MUSIC}.
* @throws IllegalArgumentException if audioFormat is not valid or if the requested buffer size is invalid.
* @see AudioTrack
*/
public AndroidAudioPlayer(TarsosDSPAudioFormat audioFormat, int bufferSizeInSamples, int streamType) {
if (audioFormat.getChannels() != 1) {
throw new IllegalArgumentException("TarsosDSP only supports mono audio channel count: " + audioFormat.getChannels());
}
// The requested sample rate
int sampleRate = (int) audioFormat.getSampleRate();
//The buffer size in bytes is twice the buffer size expressed in samples if 16bit samples are used:
int bufferSizeInBytes = bufferSizeInSamples * audioFormat.getSampleSizeInBits()/8;
// From the Android API about getMinBufferSize():
// The total size (in bytes) of the internal buffer where audio data is read from for playback.
// If track's creation mode is MODE_STREAM, you can write data into this buffer in chunks less than or equal to this size,
// and it is typical to use chunks of 1/2 of the total size to permit double-buffering. If the track's creation mode is MODE_STATIC,
// this is the maximum length sample, or audio clip, that can be played by this instance. See getMinBufferSize(int, int, int) to determine
// the minimum required buffer size for the successful creation of an AudioTrack instance in streaming mode. Using values smaller
// than getMinBufferSize() will result in an initialization failure.
int minBufferSizeInBytes = AudioTrack.getMinBufferSize(sampleRate, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT);
if(minBufferSizeInBytes > bufferSizeInBytes){
throw new IllegalArgumentException("The buffer size should be at least " + (minBufferSizeInBytes/(audioFormat.getSampleSizeInBits()/8)) + " (samples) according to AudioTrack.getMinBufferSize().");
}
//http://developer.android.com/reference/android/media/AudioTrack.html#AudioTrack(int, int, int, int, int, int)
audioTrack = new AudioTrack(streamType, sampleRate, AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSizeInBytes,AudioTrack.MODE_STREAM);
audioTrack.play();
}
示例11: audioTrackInit
public int audioTrackInit() {
// Log.e(" ffff mediaplayer audiotrackinit start . sampleRateInHz:=" + sampleRateInHz + " channels:=" + channels );
audioTrackRelease();
int channelConfig = channels >= 2 ? AudioFormat.CHANNEL_OUT_STEREO : AudioFormat.CHANNEL_OUT_MONO;
try {
mAudioTrackBufferSize = AudioTrack.getMinBufferSize(sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT);
mAudioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT, mAudioTrackBufferSize, AudioTrack.MODE_STREAM);
} catch (Exception e) {
mAudioTrackBufferSize = 0;
Log.e("audioTrackInit", e);
}
return mAudioTrackBufferSize;
}
示例12: SoundGenerator
public SoundGenerator() {
// Create the track in streaming mode.
this.audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
SAMPLE_RATE, AudioFormat.CHANNEL_OUT_MONO,
AudioFormat.ENCODING_PCM_16BIT, NUM_SAMPLES,
AudioTrack.MODE_STREAM);
// Call play so the track will start playing when data is written.
this.audioTrack.play();
}
示例13: createAudioTrack
private AudioTrack createAudioTrack(int sampleRate, int channelCount) {
int channelConfig = channelCount == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO;
int bufferSize = ((sampleRate * 2) * channelCount / 100) * 8;//最多缓冲80毫秒的数据
return new AudioTrack(
AudioManager.STREAM_MUSIC,
sampleRate,
channelConfig,
AudioFormat.ENCODING_PCM_16BIT,
bufferSize,
AudioTrack.MODE_STREAM);
}
示例14: initializeAndroidAudio
private void initializeAndroidAudio(int sampleRate) throws Exception {
int minBufferSize = AudioTrack.getMinBufferSize(sampleRate,
AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT);
if (minBufferSize < 0) {
throw new Exception("Failed to get minimum buffer size: "
+ Integer.toString(minBufferSize));
}
track = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate,
AudioFormat.CHANNEL_OUT_MONO, AudioFormat.ENCODING_PCM_16BIT,
minBufferSize, AudioTrack.MODE_STREAM);
}
示例15: getMinOutputFrameSize
private static int getMinOutputFrameSize(int sampleRateInHz, int numChannels) {
final int bytesPerFrame = numChannels * (BITS_PER_SAMPLE / 8);
final int channelConfig =
(numChannels == 1 ? AudioFormat.CHANNEL_OUT_MONO : AudioFormat.CHANNEL_OUT_STEREO);
return AudioTrack.getMinBufferSize(
sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT)
/ bytesPerFrame;
}