本文整理匯總了Java中android.media.AudioTrack.STATE_INITIALIZED屬性的典型用法代碼示例。如果您正苦於以下問題:Java AudioTrack.STATE_INITIALIZED屬性的具體用法?Java AudioTrack.STATE_INITIALIZED怎麽用?Java AudioTrack.STATE_INITIALIZED使用的例子?那麽, 這裏精選的屬性代碼示例或許可以為您提供幫助。您也可以進一步了解該屬性所在類android.media.AudioTrack
的用法示例。
在下文中一共展示了AudioTrack.STATE_INITIALIZED屬性的12個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: createAudioTrack
private void createAudioTrack() throws InitializationException {
// The AudioTrack configurations parameters used here, are guaranteed to
// be supported on all devices.
// AudioFormat.CHANNEL_OUT_MONO should be used in place of deprecated
// AudioFormat.CHANNEL_CONFIGURATION_MONO, but it is not available for
// API level 3.
// Output buffer for playing should be as short as possible, so
// AudioBufferPlayed events are not invoked long before audio buffer is
// actually played. Also, when AudioTrack is stopped, it is filled with
// silence of length audioTrackBufferSizeInBytes. If the silence is too
// long, it causes a delay before the next recorded data starts playing.
audioTrackBufferSizeInBytes = AudioTrack.getMinBufferSize(
SpeechTrainerConfig.SAMPLE_RATE_HZ,
AudioFormat.CHANNEL_CONFIGURATION_MONO,
AudioFormat.ENCODING_PCM_16BIT);
if (audioTrackBufferSizeInBytes <= 0) {
throw new InitializationException("Failed to initialize playback.");
}
audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC,
SpeechTrainerConfig.SAMPLE_RATE_HZ,
AudioFormat.CHANNEL_CONFIGURATION_MONO, AudioFormat.ENCODING_PCM_16BIT,
audioTrackBufferSizeInBytes,
AudioTrack.MODE_STREAM);
if (audioTrack.getState() != AudioTrack.STATE_INITIALIZED) {
audioTrack = null;
throw new InitializationException("Failed to initialize playback.");
}
}
示例2: startPlayout
private boolean startPlayout() {
Logging.d(TAG, "startPlayout");
assertTrue(audioTrack != null);
assertTrue(audioThread == null);
if (audioTrack.getState() != AudioTrack.STATE_INITIALIZED) {
reportWebRtcAudioTrackStartError("AudioTrack instance is not successfully initialized.");
return false;
}
audioThread = new AudioTrackThread("AudioTrackJavaThread");
audioThread.start();
return true;
}
示例3: startPlayout
private boolean startPlayout() {
Logging.d(TAG, "startPlayout");
assertTrue(audioTrack != null);
assertTrue(audioThread == null);
if (audioTrack.getState() != AudioTrack.STATE_INITIALIZED) {
Logging.e(TAG, "AudioTrack instance is not successfully initialized.");
return false;
}
audioThread = new AudioTrackThread("AudioTrackJavaThread");
audioThread.start();
return true;
}
示例4: audioTrackRelease
private void audioTrackRelease() {
if (mAudioTrack != null) {
if (mAudioTrack.getState() == AudioTrack.STATE_INITIALIZED)
mAudioTrack.stop();
mAudioTrack.release();
}
mAudioTrack = null;
}
示例5: getAudioTrack
/**
* Helper method to double check the returned {@link SaiyAudioTrack} object hasn't been released
* elsewhere.
*
* @return the {@link SaiyAudioTrack} object, or null it the creation process failed.
*/
private SaiyAudioTrack getAudioTrack() {
if (audioTrack == null || audioTrack.getState() != AudioTrack.STATE_INITIALIZED) {
audioTrack = SaiyAudioTrack.getSaiyAudioTrack();
audioTrack.setListener(listener);
return audioTrack;
} else {
return audioTrack;
}
}
示例6: isSpeaking
@Override
public boolean isSpeaking() {
if (audioTrack != null && audioTrack.getState() == AudioTrack.STATE_INITIALIZED) {
if (DEBUG) {
MyLog.i(CLS_NAME, "isSpeaking: audioTrack STATE_INITIALIZED");
}
if (audioTrack.getPlayState() == AudioTrack.PLAYSTATE_PLAYING
|| audioTrack.getPlayState() == AudioTrack.PLAYSTATE_PAUSED) {
if (DEBUG) {
MyLog.i(CLS_NAME, "isSpeaking: audioTrack PLAYSTATE_PLAYING/PLAYSTATE_PAUSED");
}
return true;
} else {
if (DEBUG) {
MyLog.i(CLS_NAME, "isSpeaking: audioTrack not playing");
}
}
}
final boolean speakingSuper = super.isSpeaking();
if (DEBUG) {
MyLog.i(CLS_NAME, "isSpeaking: speakingSuper " + speakingSuper);
}
return speakingSuper;
}
示例7: audioTrackRelease
private void audioTrackRelease() {
if (mAudioTrack != null) {
if (mAudioTrack.getState() == AudioTrack.STATE_INITIALIZED)
mAudioTrack.stop();
mAudioTrack.release();
}
mAudioTrack = null;
}
示例8: initPlayout
private boolean initPlayout(int sampleRate, int channels) {
Logging.d(TAG, "initPlayout(sampleRate=" + sampleRate + ", channels=" + channels + ")");
final int bytesPerFrame = channels * (BITS_PER_SAMPLE / 8);
byteBuffer = byteBuffer.allocateDirect(bytesPerFrame * (sampleRate / BUFFERS_PER_SECOND));
Logging.d(TAG, "byteBuffer.capacity: " + byteBuffer.capacity());
emptyBytes = new byte[byteBuffer.capacity()];
// Rather than passing the ByteBuffer with every callback (requiring
// the potentially expensive GetDirectBufferAddress) we simply have the
// the native class cache the address to the memory once.
nativeCacheDirectBufferAddress(byteBuffer, nativeAudioTrack);
// Get the minimum buffer size required for the successful creation of an
// AudioTrack object to be created in the MODE_STREAM mode.
// Note that this size doesn't guarantee a smooth playback under load.
// TODO(henrika): should we extend the buffer size to avoid glitches?
final int channelConfig = channelCountToConfiguration(channels);
final int minBufferSizeInBytes =
AudioTrack.getMinBufferSize(sampleRate, channelConfig, AudioFormat.ENCODING_PCM_16BIT);
Logging.d(TAG, "AudioTrack.getMinBufferSize: " + minBufferSizeInBytes);
// For the streaming mode, data must be written to the audio sink in
// chunks of size (given by byteBuffer.capacity()) less than or equal
// to the total buffer size |minBufferSizeInBytes|. But, we have seen
// reports of "getMinBufferSize(): error querying hardware". Hence, it
// can happen that |minBufferSizeInBytes| contains an invalid value.
if (minBufferSizeInBytes < byteBuffer.capacity()) {
reportWebRtcAudioTrackInitError("AudioTrack.getMinBufferSize returns an invalid value.");
return false;
}
// Ensure that prevision audio session was stopped correctly before trying
// to create a new AudioTrack.
if (audioTrack != null) {
reportWebRtcAudioTrackInitError("Conflict with existing AudioTrack.");
return false;
}
try {
// Create an AudioTrack object and initialize its associated audio buffer.
// The size of this buffer determines how long an AudioTrack can play
// before running out of data.
if (WebRtcAudioUtils.runningOnLollipopOrHigher()) {
// If we are on API level 21 or higher, it is possible to use a special AudioTrack
// constructor that uses AudioAttributes and AudioFormat as input. It allows us to
// supersede the notion of stream types for defining the behavior of audio playback,
// and to allow certain platforms or routing policies to use this information for more
// refined volume or routing decisions.
audioTrack = createAudioTrackOnLollipopOrHigher(
sampleRate, channelConfig, minBufferSizeInBytes);
} else {
// Use default constructor for API levels below 21.
audioTrack =
createAudioTrackOnLowerThanLollipop(sampleRate, channelConfig, minBufferSizeInBytes);
}
} catch (IllegalArgumentException e) {
reportWebRtcAudioTrackInitError(e.getMessage());
releaseAudioResources();
return false;
}
// It can happen that an AudioTrack is created but it was not successfully
// initialized upon creation. Seems to be the case e.g. when the maximum
// number of globally available audio tracks is exceeded.
if (audioTrack == null || audioTrack.getState() != AudioTrack.STATE_INITIALIZED) {
reportWebRtcAudioTrackInitError("Initialization of audio track failed.");
releaseAudioResources();
return false;
}
logMainParameters();
logMainParametersExtended();
return true;
}
示例9: audioTrackStart
private void audioTrackStart() {
if (mAudioTrack != null && mAudioTrack.getState() == AudioTrack.STATE_INITIALIZED && mAudioTrack.getPlayState() != AudioTrack.PLAYSTATE_PLAYING)
mAudioTrack.play();
}
示例10: audioTrackPause
private void audioTrackPause() {
if (mAudioTrack != null && mAudioTrack.getState() == AudioTrack.STATE_INITIALIZED)
mAudioTrack.pause();
}
示例11: audioTrackPause
private void audioTrackPause() {
if (mAudioTrack != null && mAudioTrack.getState() == AudioTrack.STATE_INITIALIZED)
mAudioTrack.pause();
}
示例12: processMidi
private void processMidi() {
int status = 0;
int size = 0;
// Init midi
if ((size = this.init()) == 0) {
return;
}
short[] buffer = new short[size];
// Create audio track
this.audioTrack = new AudioTrack(AudioManager.STREAM_MUSIC, SAMPLE_RATE, AudioFormat.CHANNEL_OUT_STEREO, AudioFormat.ENCODING_PCM_16BIT, BUFFER_SIZE, AudioTrack.MODE_STREAM);
// Check audiotrack
if( audioTrack == null ) {
this.shutdown();
return;
}
// Check state
int state = this.audioTrack.getState();
if (state != AudioTrack.STATE_INITIALIZED) {
this.audioTrack.release();
this.shutdown();
return;
}
// Play track
this.audioTrack.play();
// Keep running until stopped
while( this.thread != null ) {
// Write the midi events
synchronized (this.mutex) {
for(byte[] queuedEvent : this.queuedEvents) {
this.write(queuedEvent);
}
this.queuedEvents.clear();
}
// Render the audio
if (this.render(buffer) == 0) {
break;
}
// Write audio to audiotrack
status = this.audioTrack.write(buffer, 0, buffer.length);
if (status < 0) {
break;
}
}
// Render and write the last bit of audio
if( status > 0 ) {
if (this.render(buffer) > 0) {
this.audioTrack.write(buffer, 0, buffer.length);
}
}
// Shut down audio
this.shutdown();
this.audioTrack.release();
}