本文整理汇总了Java中android.media.AudioFormat.CHANNEL_IN_MONO属性的典型用法代码示例。如果您正苦于以下问题:Java AudioFormat.CHANNEL_IN_MONO属性的具体用法?Java AudioFormat.CHANNEL_IN_MONO怎么用?Java AudioFormat.CHANNEL_IN_MONO使用的例子?那么恭喜您, 这里精选的属性代码示例或许可以为您提供帮助。您也可以进一步了解该属性所在类android.media.AudioFormat
的用法示例。
在下文中一共展示了AudioFormat.CHANNEL_IN_MONO属性的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: updateBufferSize
void updateBufferSize(boolean pause) {
synchronized (bufferSizeLock) {
int samplesUpdate;
if (pause) {
// we need make buffer multiply of pitch.getPitchTime() (100 ms).
// to prevent missing blocks from view otherwise:
// file may contain not multiply 'samplesUpdate' count of samples. it is about 100ms.
// we can't show on pitchView sorter then 100ms samples. we can't add partial sample because on
// resumeRecording we have to apply rest of samplesUpdate or reload all samples again
// from file. better then confusing user we cut them on next resumeRecording.
long l = 1000;
l = l / pitch.getPitchTime() * pitch.getPitchTime();
samplesUpdate = (int) (l * sampleRate / 1000.0);
} else {
samplesUpdate = this.samplesUpdate;
}
bufferSize = RawSamples.CHANNEL_CONFIG == AudioFormat.CHANNEL_IN_MONO ? samplesUpdate : samplesUpdate * 2;
}
}
示例2: getMinBufferSize
private int getMinBufferSize(int sampleRate, int channelConfig, int audioFormat) {
int numOfChannels, bitsPersample;
if (channelConfig == AudioFormat.CHANNEL_IN_MONO) {
numOfChannels = 1;
} else {
numOfChannels = 2;
}
if (AudioFormat.ENCODING_PCM_16BIT == audioFormat) {
bitsPersample = 16;
} else {
bitsPersample = 8;
}
int periodInFrames = sampleRate * TIMER_INTERVAL / 1000; //num of frames in a second is same as sample rate
//refer to android/4.1.1/frameworks/av/media/libmedia/AudioRecord.cpp, AudioRecord::getMinFrameCount method
//we times 2 for ping pong use of record buffer
mMinBufferSize = periodInFrames * 2 * numOfChannels * bitsPersample / 8;
if (mMinBufferSize < AudioRecord.getMinBufferSize(sampleRate, channelConfig, audioFormat)) {
// Check to make sure buffer size is not smaller than the smallest allowed one
mMinBufferSize = AudioRecord.getMinBufferSize(sampleRate, channelConfig, audioFormat);
// Set frame period and timer interval accordingly
// periodInFrames = mMinBufferSize / ( 2 * bitsPersample * numOfChannels / 8 );
}
return mMinBufferSize;
}
示例3: generateTrack
public AudioTrack generateTrack(int sampleRate, short[] buf, int len) {
int end = len;
int c = 0;
if (RawSamples.CHANNEL_CONFIG == AudioFormat.CHANNEL_IN_MONO)
c = AudioFormat.CHANNEL_OUT_MONO;
if (RawSamples.CHANNEL_CONFIG == AudioFormat.CHANNEL_IN_STEREO)
c = AudioFormat.CHANNEL_OUT_STEREO;
// old phones bug.
// http://stackoverflow.com/questions/27602492
//
// with MODE_STATIC setNotificationMarkerPosition not called
AudioTrack track = new AudioTrack(AudioManager.STREAM_MUSIC, sampleRate,
c, RawSamples.AUDIO_FORMAT,
len * (Short.SIZE / 8), AudioTrack.MODE_STREAM);
track.write(buf, 0, len);
if (track.setNotificationMarkerPosition(end) != AudioTrack.SUCCESS)
throw new RuntimeException("unable to set marker");
return track;
}
示例4: RawAudioRecorder
/**
* <p>Instantiates a new recorder and sets the state to INITIALIZING.
* In case of errors, no exception is thrown, but the state is set to ERROR.</p>
*
* <p>Android docs say: 44100Hz is currently the only rate that is guaranteed to work on all devices,
* but other rates such as 22050, 16000, and 11025 may work on some devices.</p>
*
* @param audioSource Identifier of the audio source (e.g. microphone)
* @param sampleRate Sample rate (e.g. 16000)
*/
public RawAudioRecorder(int audioSource, int sampleRate) {
mSampleRate = sampleRate;
// E.g. 1 second of 16kHz 16-bit mono audio takes 32000 bytes.
mOneSec = RESOLUTION_IN_BYTES * CHANNELS * mSampleRate;
mRecording = new byte[mOneSec * MAX_RECORDING_TIME_IN_SECS];
try {
setBufferSizeAndFramePeriod();
mRecorder = new AudioRecord(audioSource, mSampleRate, AudioFormat.CHANNEL_IN_MONO, RESOLUTION, mBufferSize);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
boolean agcAvailable = AutomaticGainControl.isAvailable();
if(agcAvailable) {
AutomaticGainControl.create(mRecorder.getAudioSessionId());
}
}
if (mRecorder.getState() != AudioRecord.STATE_INITIALIZED) {
throw new Exception("AudioRecord initialization failed");
}
mBuffer = new byte[mFramePeriod * RESOLUTION_IN_BYTES * CHANNELS];
setState(State.READY);
} catch (Exception e) {
release();
setState(State.ERROR);
if (e.getMessage() == null) {
Log.e(LOG_TAG, "Unknown error occured while initializing recording");
} else {
Log.e(LOG_TAG, e.getMessage());
}
}
}
示例5: getAudioChannelConfig
/**
* Get the normal supported audio channel setting
*
* @return Audio channels
*/
public static int getAudioChannelConfig() {
switch (SpeechRecognizer.AUDIO_CHANNELS) {
case 1:
return AudioFormat.CHANNEL_IN_MONO;
}
return AudioFormat.CHANNEL_IN_MONO;
}
示例6: onCreate
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
mImageButton = (ImageButton) findViewById(R.id.action_image);
mImageButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (mRecorder == null) {
return;
}
boolean recording = mRecorder.isRecording();
if (recording) {
((ImageButton) v).setImageResource(R.drawable.record);
mRecorder.stop();
} else {
((ImageButton) v).setImageResource(R.drawable.pause);
mRecorder.startRecording();
}
}
});
boolean result = createOutputFile();
if (!result) {
Toast.makeText(this, "创建文件失败~", Toast.LENGTH_SHORT).show();
}
mRecorder = new Recorder(44100,
AudioFormat.CHANNEL_IN_MONO/*单双声道*/,
AudioFormat.ENCODING_PCM_16BIT/*格式*/,
MediaRecorder.AudioSource.MIC/*AudioSource*/,
NUM_SAMPLES/*period*/,
this/*onDataChangeListener*/);
output = new byte[NUM_SAMPLES * 2];
}
示例7: SaiyRecorder
/**
* Constructor
* <p>
* Uses the most common application defaults
*/
public SaiyRecorder() {
this.audioSource = MediaRecorder.AudioSource.VOICE_RECOGNITION;
this.sampleRateInHz = 8000;
this.channelConfig = AudioFormat.CHANNEL_IN_MONO;
this.audioFormat = AudioFormat.ENCODING_PCM_16BIT;
this.bufferSizeInBytes = calculateBufferSize();
this.enhance = true;
}
示例8: AudioRecordRunnable
private AudioRecordRunnable() {
bufferSize = AudioRecord.getMinBufferSize(sampleRate,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, sampleRate,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, bufferSize);
audioData = new short[bufferSize];
}
示例9: SpeechRecord
public SpeechRecord(int sampleRateInHz, int bufferSizeInBytes, boolean noise, boolean gain, boolean echo)
throws IllegalArgumentException {
this(
MediaRecorder.AudioSource.VOICE_RECOGNITION,
sampleRateInHz,
AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT,
bufferSizeInBytes,
noise,
gain,
echo
);
}
示例10: getMinInputFrameSize
private static int getMinInputFrameSize(int sampleRateInHz, int numChannels) {
final int bytesPerFrame = numChannels * (BITS_PER_SAMPLE / 8);
final int channelConfig =
(numChannels == 1 ? AudioFormat.CHANNEL_IN_MONO : AudioFormat.CHANNEL_IN_STEREO);
return AudioRecord.getMinBufferSize(
sampleRateInHz, channelConfig, AudioFormat.ENCODING_PCM_16BIT)
/ bytesPerFrame;
}
示例11: startRecording
/**
* The trigger to open a new AudioRecord and start recording with the intention of sending the audio to the AVS server using the stopRecord(). This will have permissions
* issues in Marshmallow that need to be handled at the Activity level (checking for permission to record audio, and requesting it if we don't already have permissions).
* @param url our POST url
* @param accessToken our user's access token
* @param buffer a byte[] that allows us to prepend whatever audio is recorded by the user with either generated ore pre-recorded audio, this needs
* to be in the same format as the audio being recorded
* @param callback our callback to notify us when we change states
* @throws IOException
*
* @deprecated Manage this state on the application side, instead, and send the audio using {@link SpeechSendAudio}
*/
@Deprecated
public void startRecording(final String url, final String accessToken, @Nullable byte[] buffer,
@Nullable final AsyncCallback<Void, Exception> callback) throws IOException {
synchronized(mLock) {
mAudioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC, AUDIO_RATE, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, BUFFER_SIZE);
}
if(callback != null){
callback.start();
}
mCallback = callback;
mIsRecording = true;
new AsyncTask<Void, Void, Void>() {
@Override
protected Void doInBackground(Void... params) {
synchronized(mLock) {
prepareConnection(url, accessToken);
}
return null;
}
}.executeOnExecutor(AsyncTask.THREAD_POOL_EXECUTOR);
if(buffer != null){
mOutputStream.write(buffer);
}
//record our audio
recordAudio(mAudioRecord, mOutputStream);
}
示例12: SpeechRecorder
SpeechRecorder(Conversation conversation) {
mConversation = conversation;
// audio should be 16-bit mono linear PCM at 16 kHz sample rate
mRecorder = new AudioRecord(
MediaRecorder.AudioSource.MIC,
SAMPLE_RATE,
AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT,
BUFFER_SIZE * BYTES_PER_SAMPLE
);
}
示例13: findAudioRecord
/**
* 查找可用的音频录制器
*
* @return
*/
private AudioRecord findAudioRecord() {
int[] samplingRates = new int[]{44100, 22050, 11025, 8000};
int[] audioFormats = new int[]{
AudioFormat.ENCODING_PCM_16BIT,
AudioFormat.ENCODING_PCM_8BIT};
int[] channelConfigs = new int[]{
AudioFormat.CHANNEL_IN_STEREO,
AudioFormat.CHANNEL_IN_MONO};
for (int rate : samplingRates) {
for (int format : audioFormats) {
for (int config : channelConfigs) {
try {
int bufferSize = AudioRecord.getMinBufferSize(rate, config, format);
if (bufferSize != AudioRecord.ERROR_BAD_VALUE) {
for (int source : AUDIO_SOURCES) {
AudioRecord recorder = new AudioRecord(source, rate, config, format, bufferSize * 4);
if (recorder.getState() == AudioRecord.STATE_INITIALIZED) {
return recorder;
}
}
}
} catch (Exception e) {
Log.e(TAG, "Init AudioRecord Error." + Log.getStackTraceString(e));
}
}
}
}
return null;
}
示例14: Looper
public Looper() {
minBytes = AudioRecord.getMinBufferSize(sampleRate, AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT);
minBytes = Math.max(minBytes, fftBins);
// VOICE_RECOGNITION: use the mic with AGC turned off!
record = new AudioRecord(AGC_OFF, sampleRate,
AudioFormat.CHANNEL_IN_MONO,AudioFormat.ENCODING_PCM_16BIT, minBytes);
Log.d(TAG, "Buffer size: " + minBytes + " (" + record.getSampleRate() + "=" + sampleRate + ")");
}
示例15: setupAudioRecord
private void setupAudioRecord() {
int channelConfig = stereo ? AudioFormat.CHANNEL_IN_STEREO : AudioFormat.CHANNEL_IN_MONO;
int minBufferSize = AudioRecord.getMinBufferSize(sampleRate, channelConfig,
AudioFormat.ENCODING_PCM_16BIT);
audioRecord = new AudioRecord(
MediaRecorder.AudioSource.MIC,
sampleRate,
channelConfig,
AudioFormat.ENCODING_PCM_16BIT,
4 * minBufferSize);
}