本文整理匯總了Java中android.media.AudioFormat.ENCODING_PCM_8BIT屬性的典型用法代碼示例。如果您正苦於以下問題:Java AudioFormat.ENCODING_PCM_8BIT屬性的具體用法?Java AudioFormat.ENCODING_PCM_8BIT怎麽用?Java AudioFormat.ENCODING_PCM_8BIT使用的例子?那麽, 這裏精選的屬性代碼示例或許可以為您提供幫助。您也可以進一步了解該屬性所在類android.media.AudioFormat
的用法示例。
在下文中一共展示了AudioFormat.ENCODING_PCM_8BIT屬性的8個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: getMinBufferSize
private int getMinBufferSize(int sampleRate, int channelConfig, int audioFormat) {
minBufferSize = AudioTrack.getMinBufferSize(sampleRate, channelConfig, audioFormat);
// 解決異常IllegalArgumentException: Invalid audio buffer size
int channelCount = 1;
switch (channelConfig) {
// AudioFormat.CHANNEL_CONFIGURATION_DEFAULT
case AudioFormat.CHANNEL_OUT_DEFAULT:
case AudioFormat.CHANNEL_OUT_MONO:
case AudioFormat.CHANNEL_CONFIGURATION_MONO:
channelCount = 1;
break;
case AudioFormat.CHANNEL_OUT_STEREO:
case AudioFormat.CHANNEL_CONFIGURATION_STEREO:
channelCount = 2;
break;
default:
channelCount = Integer.bitCount(channelConfig);
}
// 判斷minBufferSize是否在範圍內,如果不在設定默認值為1152
int frameSizeInBytes = channelCount * (audioFormat == AudioFormat.ENCODING_PCM_8BIT ? 1 : 2);
if ((minBufferSize % frameSizeInBytes != 0) || (minBufferSize < 1)) {
minBufferSize = 1152;
}
return minBufferSize;
}
示例2: audioFormatSampleBytes
public static int audioFormatSampleBytes(int f)
{
switch (f)
{
case AudioFormat.ENCODING_PCM_8BIT:
return 1;
case AudioFormat.ENCODING_PCM_16BIT:
case AudioFormat.ENCODING_DEFAULT:
return 2;
case AudioFormat.ENCODING_PCM_FLOAT:
return 4;
case AudioFormat.ENCODING_INVALID:
default:
return 0;
}
}
示例3: audioFormatSampleType
public static Cons.Type audioFormatSampleType(int f)
{
switch (f)
{
case AudioFormat.ENCODING_PCM_8BIT:
return Cons.Type.CHAR;
case AudioFormat.ENCODING_PCM_16BIT:
case AudioFormat.ENCODING_DEFAULT:
return Cons.Type.SHORT;
case AudioFormat.ENCODING_PCM_FLOAT:
return Cons.Type.FLOAT;
case AudioFormat.ENCODING_INVALID:
default:
return Cons.Type.UNDEF;
}
}
示例4: start
/**
* 設置頻率
* @param rate
*/
@SuppressWarnings("deprecation")
public void start(int rate){
stop();
if(rate>0){
Hz=rate;
waveLen = RATE / Hz;
length = waveLen * Hz;
audioTrack=new AudioTrack(AudioManager.STREAM_MUSIC, RATE,
AudioFormat.CHANNEL_CONFIGURATION_STEREO, // CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_8BIT, length, AudioTrack.MODE_STREAM);
//生成正弦波
wave=SinWave.sin(wave, waveLen, length);
if(audioTrack!=null){
audioTrack.play();
}
}else{
return;
}
}
示例5: writeWavHeader
/**
* Writes the proper 44-byte RIFF/WAVE header to/for the given stream
* Two size fields are left empty/null since we do not yet know the final stream size
*
* @param out The stream to write the header to
* @param channelMask An AudioFormat.CHANNEL_* mask
* @param sampleRate The sample rate in hertz
* @param encoding An AudioFormat.ENCODING_PCM_* value
* @throws IOException
*/
private void writeWavHeader(OutputStream out, int channelMask, int sampleRate, int encoding)
throws IOException {
short channels;
switch (channelMask) {
case AudioFormat.CHANNEL_IN_MONO:
channels = 1;
break;
case AudioFormat.CHANNEL_IN_STEREO:
channels = 2;
break;
default:
throw new IllegalArgumentException("Unacceptable channel mask");
}
short bitDepth;
switch (encoding) {
case AudioFormat.ENCODING_PCM_8BIT:
bitDepth = 8;
break;
case AudioFormat.ENCODING_PCM_16BIT:
bitDepth = 16;
break;
case AudioFormat.ENCODING_PCM_FLOAT:
bitDepth = 32;
break;
default:
throw new IllegalArgumentException("Unacceptable encoding");
}
writeWavHeader(out, channels, sampleRate, bitDepth);
}
示例6: findAudioRecord
/**
* 查找可用的音頻錄製器
*
* @return
*/
private AudioRecord findAudioRecord() {
int[] samplingRates = new int[]{44100, 22050, 11025, 8000};
int[] audioFormats = new int[]{
AudioFormat.ENCODING_PCM_16BIT,
AudioFormat.ENCODING_PCM_8BIT};
int[] channelConfigs = new int[]{
AudioFormat.CHANNEL_IN_STEREO,
AudioFormat.CHANNEL_IN_MONO};
for (int rate : samplingRates) {
for (int format : audioFormats) {
for (int config : channelConfigs) {
try {
int bufferSize = AudioRecord.getMinBufferSize(rate, config, format);
if (bufferSize != AudioRecord.ERROR_BAD_VALUE) {
for (int source : AUDIO_SOURCES) {
AudioRecord recorder = new AudioRecord(source, rate, config, format, bufferSize * 4);
if (recorder.getState() == AudioRecord.STATE_INITIALIZED) {
return recorder;
}
}
}
} catch (Exception e) {
Log.e(TAG, "Init AudioRecord Error." + Log.getStackTraceString(e));
}
}
}
}
return null;
}
示例7: bitsPerSample
@Override public byte bitsPerSample() {
if (audioEncoding == AudioFormat.ENCODING_PCM_16BIT) {
return 16;
} else if (audioEncoding == AudioFormat.ENCODING_PCM_8BIT) {
return 8;
} else {
return 16;
}
}
示例8: start
public void start(HashMap<String, String> options, ModuleResultListener listener){
int audioChannel = AudioFormat.CHANNEL_IN_STEREO;
if (options.get("channel").equals("mono")) audioChannel = AudioFormat.CHANNEL_IN_MONO;
int sampleRate = 22050;
int audioBit = AudioFormat.ENCODING_PCM_16BIT;
switch (options.get("quality")) {
case "low":
sampleRate = 8000;
audioBit = AudioFormat.ENCODING_PCM_8BIT;
break;
case "high":
sampleRate = 44100;
audioBit = AudioFormat.ENCODING_PCM_16BIT;
break;
}
if (mIsRecording) {
if (mIsPausing) {
if (mRecorder != null)mRecorder.resumeRecording();
mIsPausing = false;
listener.onResult(null);
} else {
listener.onResult(Util.getError(Constant.RECORDER_BUSY, Constant.RECORDER_BUSY_CODE));
}
} else {
String time_str = new Date().getTime() + "";
try {
mFile = Util.getFile(time_str + ".wav");
} catch (IOException e) {
e.printStackTrace();
listener.onResult(Util.getError(Constant.MEDIA_INTERNAL_ERROR, Constant.MEDIA_INTERNAL_ERROR_CODE));
}
mRecorder = OmRecorder.wav(
new PullTransport.Default(getMic(audioBit, audioChannel, sampleRate), new PullTransport.OnAudioChunkPulledListener() {
@Override
public void onAudioChunkPulled(AudioChunk audioChunk) {
}
}), mFile);
mRecorder.startRecording();
mIsRecording = true;
listener.onResult(null);
}
}