本文整理匯總了Java中android.media.MediaRecorder.AudioSource.MIC屬性的典型用法代碼示例。如果您正苦於以下問題:Java AudioSource.MIC屬性的具體用法?Java AudioSource.MIC怎麽用?Java AudioSource.MIC使用的例子?那麽, 這裏精選的屬性代碼示例或許可以為您提供幫助。您也可以進一步了解該屬性所在類android.media.MediaRecorder.AudioSource
的用法示例。
在下文中一共展示了AudioSource.MIC屬性的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: findAudioRecord
public AudioRecord findAudioRecord() {
for (int rate : mSampleRates) {
for (short audioFormat : new short[] { AudioFormat.ENCODING_PCM_16BIT }) {
for (short channelConfig : new short[] { AudioFormat.CHANNEL_IN_MONO }) {
try {
Log.d("C.TAG", "Attempting rate " + rate + "Hz, bits: " + audioFormat + ", channel: "
+ channelConfig);
int bufferSize = AudioRecord.getMinBufferSize(rate, AudioFormat.CHANNEL_IN_MONO , AudioFormat.ENCODING_PCM_16BIT);
if (bufferSize != AudioRecord.ERROR_BAD_VALUE) {
// check if we can instantiate and have a success
AudioRecord recorder = new AudioRecord(AudioSource.MIC, DEFAULT_RATE, channelConfig, audioFormat, bufferSize);
if (recorder.getState() == AudioRecord.STATE_INITIALIZED)
return recorder;
}
} catch (Exception e) {
Log.e("C.TAG", rate + "Exception, keep trying.",e);
}
}
}
}
return null;
}
示例2: getInstanse
@SuppressWarnings("deprecation")
public static ExtAudioRecorder getInstanse(Boolean recordingCompressed) {
ExtAudioRecorder result = null;
if (recordingCompressed) {
result = new ExtAudioRecorder(false, AudioSource.MIC,
sampleRates[3], AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_16BIT);
} else {
int i = 0;
do {
result = new ExtAudioRecorder(true, AudioSource.MIC,
sampleRates[3], AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_16BIT);
} while ((++i < sampleRates.length)
& !(result.getState() == ExtAudioRecorder.State.INITIALIZING));
}
return result;
}
示例3: AudioRecorder
public AudioRecorder(LoudnessSensor sensor) {
this.mSensor = sensor;
int channel = AudioFormat.CHANNEL_IN_MONO;
int mic = AudioSource.MIC;
// Berechne den Puffer
int minAudioBuffer = AudioRecord.getMinBufferSize(
COMMON_AUDIO_FREQUENCY,
channel,
AudioFormat.ENCODING_PCM_16BIT);
int audioBuffer = minAudioBuffer * 6;
// Erstelle den Recorder
audioInput = new AudioRecord(
mic,
COMMON_AUDIO_FREQUENCY,
channel,
AudioFormat.ENCODING_PCM_16BIT,
audioBuffer);
}
示例4: getInstance
public static ExtAudioRecorder getInstance(Boolean recordingCompressed, VoiceCallback callback) {
if (recordingCompressed) {
result = new ExtAudioRecorder(false, AudioSource.MIC,
sampleRates[3], AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_16BIT, callback);
} else {
int i = 3;
do {
result = new ExtAudioRecorder(true, AudioSource.MIC,
sampleRates[i], AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_16BIT, callback);
} while ((--i >= 0)
&& !(result.getState() == ExtAudioRecorder.State.INITIALIZING));
}
return result;
}
示例5: startRecordingPCM
public void startRecordingPCM() throws IOException {
String methodTAG = "startRecordingPCM";
//Create record object
recWAV = new RecordingWAV(AudioSource.MIC, sampleFreq, AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT);
log.logD(TAG,"recWAV.State after constructor is: " + recWAV.getState());
if (recWAV.state == RecordingWAV.State.ERROR) {
log.logD(methodTAG, "recWAV.State after constructor is ERROR, thus shutting down. Writing a log.");
log.logCriticalError(TAG, methodTAG, "recWAV.State after constructor is ERROR, thus shutting down.");
}
recWAV.setOutputFile(fPath);
log.logI(TAG,"recWAV.State after setOutputFile() is: " + recWAV.getState());
recWAV.prepare();
log.logI(TAG,"recWAV.State after prepare() is: " + recWAV.getState());
tPromptString.setTextColor(getResources().getColor(R.color.hltGreen));
recWAV.start();
log.logI(TAG,"recWAV.State after start() is: " + recWAV.getState());
}
示例6: getInstanse
@SuppressWarnings("deprecation")
public static ExtAudioRecorder getInstanse(Boolean recordingCompressed) {
ExtAudioRecorder result = null;
if (recordingCompressed) {
result = new ExtAudioRecorder(false,
AudioSource.MIC,
sampleRates[3],
AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_16BIT);
} else {
int i = 2;
do {
result = new ExtAudioRecorder(true,
AudioSource.MIC,
sampleRates[i],
AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_16BIT);
}
while ((++i < sampleRates.length) & !(result.getState() == ExtAudioRecorder.State.INITIALIZING));
}
return result;
}
示例7: run
@Override
public void run() {
int minBufferSize = AudioRecord.getMinBufferSize(sampleRate, CHANNEL,
ENCODING);
AudioRecord recorder = new AudioRecord(AudioSource.MIC, sampleRate,
CHANNEL, ENCODING, minBufferSize);
recorder.startRecording();
PcmAudioRecordReader in = new PcmAudioRecordReader(recorder);
PcmDftFilter dft = new PcmDftFilter(sampleRate, 12000, 22000, 100);
data = dft.getData();
PcmFilterReader fin = new PcmFilterReader(in, dft);
try {
while (!stopped) {
double read = fin.read();
}
} catch (IOException e) {
e.printStackTrace();
} finally {
recorder.stop();
recorder.release();
}
}
示例8: getInstanse
@SuppressWarnings("deprecation")
public static AudioRecorder getInstanse(Boolean recordingCompressed) {
AudioRecorder result = null;
if (recordingCompressed) {
result = new AudioRecorder( false,
AudioSource.MIC,
sampleRates[2],
AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_16BIT);
}
// wav format
else {
int i = 0;
do {
result = new AudioRecorder( true,
AudioSource.MIC,
sampleRates[i],
AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_16BIT);
} while((++i<sampleRates.length) & !(result.getState() == AudioRecorder.State.INITIALIZING));
}
return result;
}
示例9: getInstance
public static ExtAudioRecorder getInstance(WAVRecorder handler, String id, int sampleRate, int channels, int encoding)
{
ExtAudioRecorder result = null;
int[] processedSampleRates = sampleRates;
if (0 != sampleRate) {
processedSampleRates = new int[1];
processedSampleRates[0] = sampleRate;
}
int i=0;
do
{
result = new ExtAudioRecorder( handler,
id,
AudioSource.MIC,
processedSampleRates[i],
channels,
encoding);
} while((++i<processedSampleRates.length) & !(result.getState() == ExtAudioRecorder.State.INITIALIZING));
return result;
}
示例10: run
@Override
public void run() {
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_AUDIO);
AudioRecord record = new AudioRecord(AudioSource.MIC, SAMPLING_RATE,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, mBufferSize);
record.startRecording();
while (shouldContinue()) {
record.read(mAudioBuffer, 0, mBufferSize / 2);
mWaveformView.updateAudioData(mAudioBuffer);
updateDecibelLevel();
}
record.stop();
record.release();
}
示例11: fillBuffer
private byte[] fillBuffer(byte[] audioData, int bufferSize) {
AudioRecord recorder = new AudioRecord(AudioSource.MIC, 8000, AudioFormat.CHANNEL_IN_MONO,
AudioFormat.ENCODING_PCM_16BIT, bufferSize); // instantiate the
// AudioRecorder
if (recorder.getRecordingState() == android.media.AudioRecord.RECORDSTATE_STOPPED)
recorder.startRecording(); // check to see if the Recorder
// has stopped or is not
// recording, and make it
// record.
recorder.read(audioData, 0, bufferSize); // read the PCM
// audio data
// into the
// audioData
// array
if (recorder.getState() == android.media.AudioRecord.RECORDSTATE_RECORDING)
recorder.stop(); // stop the recorder
return audioData;
}
示例12: run
@Override
public void run() {
android.os.Process.setThreadPriority(android.os.Process.THREAD_PRIORITY_AUDIO);
AudioRecord record = new AudioRecord(AudioSource.MIC, SAMPLING_RATE,
AudioFormat.CHANNEL_IN_MONO, AudioFormat.ENCODING_PCM_16BIT, mBufferSize);
record.startRecording();
while (shouldContinue()) {
record.read(mAudioBuffer, 0, mBufferSize / 2);
mWaveformView.updateAudioData(mAudioBuffer);
updateDecibelLevel();
}
record.stop();
record.release();
}
示例13: findAudioRecord
public AudioRecord findAudioRecord() {
try {
int bufferSize = AudioRecord
.getMinBufferSize(sampleRate, channelConfiguration,
AudioFormat.ENCODING_PCM_16BIT);
if (bufferSize != AudioRecord.ERROR_BAD_VALUE) {
// check if we can instantiate and have a success
AudioRecord recorder = new AudioRecord(AudioSource.MIC,
sampleRate, channelConfiguration,
AudioFormat.ENCODING_PCM_16BIT, bufferSize);
Log.d("tag", "done1");
if (recorder.getState() == AudioRecord.STATE_INITIALIZED)
Log.d("tag", "done1.6");
return recorder;
}
} catch (Exception e) {
Log.d("tag", "done2");
}
return null;
}
示例14: createAudioRecord
private void createAudioRecord() throws InitializationException {
// The AudioRecord configurations parameters used here, are guaranteed
// to be supported on all devices.
// AudioFormat.CHANNEL_IN_MONO should be used in place of deprecated
// AudioFormat.CHANNEL_CONFIGURATION_MONO, but it is not available for
// API level 3.
// Unlike AudioTrack buffer, AudioRecord buffer could be larger than
// minimum without causing any problems. But minimum works well.
final int audioRecordBufferSizeInBytes = AudioRecord.getMinBufferSize(
SpeechTrainerConfig.SAMPLE_RATE_HZ, AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_16BIT);
if (audioRecordBufferSizeInBytes <= 0) {
throw new InitializationException("Failed to initialize recording.");
}
// CHANNEL_IN_MONO is guaranteed to work on all devices.
// ENCODING_PCM_16BIT is guaranteed to work on all devices.
audioRecord = new AudioRecord(AudioSource.MIC, SpeechTrainerConfig.SAMPLE_RATE_HZ,
AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT,
audioRecordBufferSizeInBytes);
if (audioRecord.getState() != AudioRecord.STATE_INITIALIZED) {
audioRecord = null;
throw new InitializationException("Failed to initialize recording.");
}
}
示例15: getAudioRecorder
private AudioRecord getAudioRecorder() {
for (int rate : sampleRates) {
for (short audioFormat : new short[] { AudioFormat.ENCODING_PCM_16BIT, AudioFormat.ENCODING_PCM_8BIT }) {
for (short channelConfig : new short[] { AudioFormat.CHANNEL_IN_MONO }) {
try {
bufferSize = AudioRecord.getMinBufferSize(rate, channelConfig, audioFormat);
if (bufferSize > 0) {
// check if we can instantiate and have a success
AudioRecord recorder = new AudioRecord(AudioSource.MIC, rate, channelConfig, audioFormat, bufferSize);
if (recorder.getState() == AudioRecord.STATE_INITIALIZED) {
selectedRate = rate;
selectedChannel = channelConfig == AudioFormat.CHANNEL_IN_STEREO ? (short)2 : (short)1;
selectedBPP = audioFormat == AudioFormat.ENCODING_PCM_16BIT ? (short)16 : (short)8;
String format = audioFormat == AudioFormat.ENCODING_PCM_16BIT ? "PCM 16 Bit" : "PCM 8 Bit";
String channels = channelConfig == AudioFormat.CHANNEL_IN_STEREO ? "Stereo" : "Mono";
String diags = "Audio recorded using following settings: Rate: " + String.valueOf(rate) + " " +
"Audio Format: " + format + " " +
"Channel Config: " + channels;
JTApp.logMessage(TAG, JTApp.LOG_SEVERITY_INFO, diags);
return recorder;
}
}
} catch (Exception ignored) {
}
}
}
}
return null;
}