本文整理匯總了Java中android.media.AudioRecord.STATE_INITIALIZED屬性的典型用法代碼示例。如果您正苦於以下問題:Java AudioRecord.STATE_INITIALIZED屬性的具體用法?Java AudioRecord.STATE_INITIALIZED怎麽用?Java AudioRecord.STATE_INITIALIZED使用的例子?那麽, 這裏精選的屬性代碼示例或許可以為您提供幫助。您也可以進一步了解該屬性所在類android.media.AudioRecord
的用法示例。
在下文中一共展示了AudioRecord.STATE_INITIALIZED屬性的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: start
/**
* <p>Starts the recording, and sets the state to RECORDING.</p>
*/
public void start() {
if (mRecorder.getState() == AudioRecord.STATE_INITIALIZED) {
mRecorder.startRecording();
if (mRecorder.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING) {
setState(State.RECORDING);
new Thread() {
public void run() {
while (mRecorder != null && mRecorder.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING) {
int status = read(mRecorder);
if (status < 0) {
break;
}
}
}
}.start();
} else {
Log.e(LOG_TAG, "startRecording() failed");
setState(State.ERROR);
}
} else {
Log.e(LOG_TAG, "start() called on illegal state");
setState(State.ERROR);
}
}
示例2: createAudioRecord
/**
* Creates a new {@link AudioRecord}.
*
* @return A newly created {@link AudioRecord}, or null if it cannot be created (missing
* permissions?).
*/
private AudioRecord createAudioRecord() {
for (int sampleRate : SAMPLE_RATE_CANDIDATES) {
final int sizeInBytes = AudioRecord.getMinBufferSize(sampleRate, CHANNEL, ENCODING);
if (sizeInBytes == AudioRecord.ERROR_BAD_VALUE) {
continue;
}
final AudioRecord audioRecord = new AudioRecord(MediaRecorder.AudioSource.MIC,
sampleRate, CHANNEL, ENCODING, sizeInBytes);
if (audioRecord.getState() == AudioRecord.STATE_INITIALIZED) {
mBuffer = new byte[sizeInBytes];
return audioRecord;
} else {
audioRecord.release();
}
}
return null;
}
示例3: stop
/**
* <p>Stops the recording, and sets the state to STOPPED.
* If stopping fails then sets the state to ERROR.</p>
*/
public void stop() {
// We check the underlying AudioRecord state trying to avoid IllegalStateException.
// If it still occurs then we catch it.
if (mRecorder.getState() == AudioRecord.STATE_INITIALIZED &&
mRecorder.getRecordingState() == AudioRecord.RECORDSTATE_RECORDING) {
try {
mRecorder.stop();
mRecorder.release();
setState(State.STOPPED);
} catch (IllegalStateException e) {
Log.e(LOG_TAG, "native stop() called in illegal state: " + e.getMessage());
setState(State.ERROR);
}
} else {
Log.e(LOG_TAG, "stop() called in illegal state");
setState(State.ERROR);
}
}
示例4: stopRecord
/**
* stop record
*
* @throws IOException
* @throws InterruptedException
*/
public void stopRecord() throws IOException, InterruptedException {
// specially for OPPO、XIAOMI、MEIZU、HUAWEI and so on
Thread.sleep(250);
destroyThread();
if (mRecorder != null) {
if (mRecorder.getState() == AudioRecord.STATE_INITIALIZED) {
mRecorder.stop();
}
if (mRecorder != null) {
mRecorder.release();
}
}
if (dos != null) {
dos.flush();
dos.close();
}
length = file.length();
deleteFile();
}
示例5: stopListening
public void stopListening() {
synchronized (API_LOCK) {
// stops the recording activity
isListening = false;
isRecording = false;
if (recordingThread != null) {
try {
recordingThread.join();
} catch (InterruptedException e) {
Log.e(TAG, "Failed to join recordingThread", e);
}
recordingThread = null;
}
if (null != recorder) {
if (recorder.getState() == AudioRecord.STATE_INITIALIZED) {
recorder.stop();
}
recorder.release();
}
}
}
示例6: findAudioRecord
public AudioRecord findAudioRecord() {
for (int rate : mSampleRates) {
for (short audioFormat : new short[] { AudioFormat.ENCODING_PCM_16BIT }) {
for (short channelConfig : new short[] { AudioFormat.CHANNEL_IN_MONO }) {
try {
Log.d("C.TAG", "Attempting rate " + rate + "Hz, bits: " + audioFormat + ", channel: "
+ channelConfig);
int bufferSize = AudioRecord.getMinBufferSize(rate, AudioFormat.CHANNEL_IN_MONO , AudioFormat.ENCODING_PCM_16BIT);
if (bufferSize != AudioRecord.ERROR_BAD_VALUE) {
// check if we can instantiate and have a success
AudioRecord recorder = new AudioRecord(AudioSource.MIC, DEFAULT_RATE, channelConfig, audioFormat, bufferSize);
if (recorder.getState() == AudioRecord.STATE_INITIALIZED)
return recorder;
}
} catch (Exception e) {
Log.e("C.TAG", rate + "Exception, keep trying.",e);
}
}
}
}
return null;
}
示例7: start
public void start()
{
if(getState()==AudioRecord.STATE_INITIALIZED)
{
startRecording();
if(getRecordingState()==AudioRecord.RECORDSTATE_RECORDING)
{
new Thread() {
public void run() {
recorderLoop();
}
}.start();
} else {
Log.e("startRecording()"," failed");
}
} else {
Log.e("start()"," called on illegal state");
}
}
示例8: run
@Override
public void run() {
byte[] buffer = new byte[audioBufferSize];
recorder = new AudioRecord(audioSource, sampleRate, channel, audioFormat, audioBufferSize);
try {
while (recorder.getState() != AudioRecord.STATE_INITIALIZED)
Thread.sleep(100, 0);
} catch (InterruptedException e) {
recorder.release();
return;
}
recorder.startRecording();
for (; ; ) {
int length = recorder.read(buffer, 0, buffer.length);
if (length < 0)
Log.e("Record", "error: " + Integer.toString(length));
else {
for (WebSocket ws : wss)
ws.sendBinary(buffer);
}
if (Thread.interrupted()) {
recorder.stop();
return;
}
}
}
示例9: RawAudioRecorder
/**
* <p>Instantiates a new recorder and sets the state to INITIALIZING.
* In case of errors, no exception is thrown, but the state is set to ERROR.</p>
*
* <p>Android docs say: 44100Hz is currently the only rate that is guaranteed to work on all devices,
* but other rates such as 22050, 16000, and 11025 may work on some devices.</p>
*
* @param audioSource Identifier of the audio source (e.g. microphone)
* @param sampleRate Sample rate (e.g. 16000)
*/
public RawAudioRecorder(int audioSource, int sampleRate) {
mSampleRate = sampleRate;
// E.g. 1 second of 16kHz 16-bit mono audio takes 32000 bytes.
mOneSec = RESOLUTION_IN_BYTES * CHANNELS * mSampleRate;
mRecording = new byte[mOneSec * MAX_RECORDING_TIME_IN_SECS];
try {
setBufferSizeAndFramePeriod();
mRecorder = new AudioRecord(audioSource, mSampleRate, AudioFormat.CHANNEL_IN_MONO, RESOLUTION, mBufferSize);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
boolean agcAvailable = AutomaticGainControl.isAvailable();
if(agcAvailable) {
AutomaticGainControl.create(mRecorder.getAudioSessionId());
}
}
if (mRecorder.getState() != AudioRecord.STATE_INITIALIZED) {
throw new Exception("AudioRecord initialization failed");
}
mBuffer = new byte[mFramePeriod * RESOLUTION_IN_BYTES * CHANNELS];
setState(State.READY);
} catch (Exception e) {
release();
setState(State.ERROR);
if (e.getMessage() == null) {
Log.e(LOG_TAG, "Unknown error occured while initializing recording");
} else {
Log.e(LOG_TAG, e.getMessage());
}
}
}
示例10: getAudioRecord
/**
* Get AudioRecord.
*
* @return AudioRecord object instance.
*/
public AudioRecord getAudioRecord() {
if (mAudioRecord != null && mAudioRecord.getState() == AudioRecord.STATE_INITIALIZED) {
return mAudioRecord;
} else {
return null;
}
}
示例11: createAudioRecord
private void createAudioRecord() throws InitializationException {
// The AudioRecord configurations parameters used here, are guaranteed
// to be supported on all devices.
// AudioFormat.CHANNEL_IN_MONO should be used in place of deprecated
// AudioFormat.CHANNEL_CONFIGURATION_MONO, but it is not available for
// API level 3.
// Unlike AudioTrack buffer, AudioRecord buffer could be larger than
// minimum without causing any problems. But minimum works well.
final int audioRecordBufferSizeInBytes = AudioRecord.getMinBufferSize(
SpeechTrainerConfig.SAMPLE_RATE_HZ, AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_16BIT);
if (audioRecordBufferSizeInBytes <= 0) {
throw new InitializationException("Failed to initialize recording.");
}
// CHANNEL_IN_MONO is guaranteed to work on all devices.
// ENCODING_PCM_16BIT is guaranteed to work on all devices.
audioRecord = new AudioRecord(AudioSource.MIC, SpeechTrainerConfig.SAMPLE_RATE_HZ,
AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT,
audioRecordBufferSizeInBytes);
if (audioRecord.getState() != AudioRecord.STATE_INITIALIZED) {
audioRecord = null;
throw new InitializationException("Failed to initialize recording.");
}
}
示例12: findAudioRecord
/**
* 查找可用的音頻錄製器
*
* @return
*/
private AudioRecord findAudioRecord() {
int[] samplingRates = new int[]{44100, 22050, 11025, 8000};
int[] audioFormats = new int[]{
AudioFormat.ENCODING_PCM_16BIT,
AudioFormat.ENCODING_PCM_8BIT};
int[] channelConfigs = new int[]{
AudioFormat.CHANNEL_IN_STEREO,
AudioFormat.CHANNEL_IN_MONO};
for (int rate : samplingRates) {
for (int format : audioFormats) {
for (int config : channelConfigs) {
try {
int bufferSize = AudioRecord.getMinBufferSize(rate, config, format);
if (bufferSize != AudioRecord.ERROR_BAD_VALUE) {
for (int source : AUDIO_SOURCES) {
AudioRecord recorder = new AudioRecord(source, rate, config, format, bufferSize * 4);
if (recorder.getState() == AudioRecord.STATE_INITIALIZED) {
return recorder;
}
}
}
} catch (Exception e) {
Log.e(TAG, "Init AudioRecord Error." + Log.getStackTraceString(e));
}
}
}
}
return null;
}
示例13: initialise
/**
* Initialise the Voice Recorder
*
* @return The audio record initialisation state.
*/
public int initialise() {
int count = 0;
while (count < 4) {
count++;
saiyAudio = new SaiyAudio(audioSource, sampleRateInHz, channelConfig, audioFormat,
bufferSizeInBytes, enhance);
if (saiyAudio.getState() == AudioRecord.STATE_INITIALIZED) {
return AudioRecord.STATE_INITIALIZED;
} else {
if (DEBUG) {
MyLog.w(CLS_NAME, "SaiyAudio reinitialisation attempt ~ " + count);
}
if (Looper.myLooper() != null && Looper.myLooper() != Looper.getMainLooper()) {
// Give the audio object a small chance to sort itself out
try {
Thread.sleep(250);
} catch (InterruptedException e) {
if (DEBUG) {
MyLog.w(CLS_NAME, "SaiyAudio InterruptedException");
e.printStackTrace();
}
}
}
}
}
if (DEBUG) {
MyLog.w(CLS_NAME, "SaiyAudio initialisation failed");
}
return AudioRecord.STATE_UNINITIALIZED;
}
示例14: initAudioRecord
private boolean initAudioRecord(Context context, int audioSource, int sampleRate, int channelConfig, int chanelNumber, int audioFormat, int nbBitsPerSample, int bufferSize)
{
witchRecorder = 1;
isAudioRecordRunning = false;
try
{
audioRecordSampleRate = sampleRate;
int timerInterval = 120;
audioRecordPeriodInFrames = sampleRate * timerInterval / 1000;
bufferSize = audioRecordPeriodInFrames * 2 * chanelNumber * nbBitsPerSample / 8;
if(bufferSize < AudioRecord.getMinBufferSize(sampleRate, channelConfig, audioFormat))
{
bufferSize = AudioRecord.getMinBufferSize(sampleRate, channelConfig, audioFormat);
audioRecordPeriodInFrames = bufferSize / ( 2 * nbBitsPerSample * chanelNumber / 8 );
}
audioRecord = new AudioRecord(audioSource, sampleRate, channelConfig, audioFormat, bufferSize);
if (audioRecord.getState() != AudioRecord.STATE_INITIALIZED)
{
Log.w("RecordFileWriter", "initAudioRecord : " + context.getString(R.string.log_record_file_writer_error_audiorecord_init));
databaseManager.insertLog(context, "" + context.getString(R.string.log_record_file_writer_error_audiorecord_init), new Date().getTime(), 2, false);
return false;
}
audioRecord.setRecordPositionUpdateListener(onAudioRecordPositionUpdateListener);
audioRecord.setPositionNotificationPeriod(audioRecordPeriodInFrames);
}
catch (Exception e)
{
Log.w("RecordFileWriter", "initAudioRecord : " + context.getString(R.string.log_record_file_writer_error_audiorecord_init) + " : " + e);
databaseManager.insertLog(context, "" + context.getString(R.string.log_record_file_writer_error_audiorecord_init), new Date().getTime(), 2, false);
return false;
}
return true;
}
示例15: prepareAudioRecord
private boolean prepareAudioRecord(Context context, File recordFile, int audioRecordChannelConfig, int chanelNumber, int audioRecordAudioFormat, int nbBitsPerSample)
{
try
{
if (audioRecord.getState() == AudioRecord.STATE_INITIALIZED)
{
audioRecordRandomAccessFile = new RandomAccessFile(recordFile.getAbsolutePath(), "rw");
audioRecordRandomAccessFile.setLength(0);
audioRecordRandomAccessFile.writeBytes("RIFF"); // 00 - Marks the file as a riff file
audioRecordRandomAccessFile.writeInt(0); // 04 - Size of the overall file
audioRecordRandomAccessFile.writeBytes("WAVE"); // 08 - File Type Header
audioRecordRandomAccessFile.writeBytes("fmt "); // 12 - Format chunk marker
audioRecordRandomAccessFile.writeInt(Integer.reverseBytes(16)); // 16 - Length of format data as listed above
audioRecordRandomAccessFile.writeShort(Short.reverseBytes((short) 1)); // 20 - Type of format
audioRecordRandomAccessFile.writeShort(Short.reverseBytes((short) chanelNumber)); // 22 - Number of Channels
audioRecordRandomAccessFile.writeInt(Integer.reverseBytes(audioRecordSampleRate)); // 24 - Sample Rate
audioRecordRandomAccessFile.writeInt(Integer.reverseBytes(audioRecordSampleRate * chanelNumber * (short)nbBitsPerSample / 8)); // 28 - ByteRate
audioRecordRandomAccessFile.writeShort(Short.reverseBytes((short)(chanelNumber * nbBitsPerSample / 8))); // 32 - Alignment
audioRecordRandomAccessFile.writeShort(Short.reverseBytes((short) nbBitsPerSample)); // 34 - Bits per sample
audioRecordRandomAccessFile.writeBytes("data"); // 36 - "data" chunk header
audioRecordRandomAccessFile.writeInt(0); // 40 - Size of the data section
audioRecordBuffer = new byte[audioRecordPeriodInFrames * (short)nbBitsPerSample / 8 * chanelNumber];
}
else
{
Log.w("RecordFileWriter", "prepareAudioRecord : " + context.getString(R.string.log_record_file_writer_error_audiorecord_prepare));
databaseManager.insertLog(context, "" + context.getString(R.string.log_record_file_writer_error_audiorecord_prepare), new Date().getTime(), 2, false);
return false;
}
}
catch (Exception e)
{
Log.w("RecordFileWriter", "prepareAudioRecord : " + context.getString(R.string.log_record_file_writer_error_audiorecord_prepare) + " : " + e);
databaseManager.insertLog(context, "" + context.getString(R.string.log_record_file_writer_error_audiorecord_prepare), new Date().getTime(), 2, false);
return false;
}
return true;
}