本文整理汇总了Java中com.google.android.exoplayer2.C.AUDIO_SESSION_ID_UNSET属性的典型用法代码示例。如果您正苦于以下问题:Java C.AUDIO_SESSION_ID_UNSET属性的具体用法?Java C.AUDIO_SESSION_ID_UNSET怎么用?Java C.AUDIO_SESSION_ID_UNSET使用的例子?那么, 这里精选的属性代码示例或许可以为您提供帮助。您也可以进一步了解该属性所在类com.google.android.exoplayer2.C
的用法示例。
在下文中一共展示了C.AUDIO_SESSION_ID_UNSET属性的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: getMediaFormat
@SuppressLint("InlinedApi")
private static MediaFormat getMediaFormat(Format format, CodecMaxValues codecMaxValues,
boolean deviceNeedsAutoFrcWorkaround, int tunnelingAudioSessionId) {
MediaFormat frameworkMediaFormat = format.getFrameworkMediaFormatV16();
// Set the maximum adaptive video dimensions.
frameworkMediaFormat.setInteger(MediaFormat.KEY_MAX_WIDTH, codecMaxValues.width);
frameworkMediaFormat.setInteger(MediaFormat.KEY_MAX_HEIGHT, codecMaxValues.height);
// Set the maximum input size.
if (codecMaxValues.inputSize != Format.NO_VALUE) {
frameworkMediaFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, codecMaxValues.inputSize);
}
// Set FRC workaround.
if (deviceNeedsAutoFrcWorkaround) {
frameworkMediaFormat.setInteger("auto-frc", 0);
}
// Configure tunneling if enabled.
if (tunnelingAudioSessionId != C.AUDIO_SESSION_ID_UNSET) {
configureTunnelingV21(frameworkMediaFormat, tunnelingAudioSessionId);
}
return frameworkMediaFormat;
}
示例2: onEnabled
@Override
protected void onEnabled(boolean joining) throws ExoPlaybackException {
decoderCounters = new DecoderCounters();
eventDispatcher.enabled(decoderCounters);
int tunnelingAudioSessionId = getConfiguration().tunnelingAudioSessionId;
if (tunnelingAudioSessionId != C.AUDIO_SESSION_ID_UNSET) {
audioTrack.enableTunnelingV21(tunnelingAudioSessionId);
} else {
audioTrack.disableTunneling();
}
}
示例3: onEnabled
@Override
protected void onEnabled(boolean joining) throws ExoPlaybackException {
super.onEnabled(joining);
eventDispatcher.enabled(decoderCounters);
int tunnelingAudioSessionId = getConfiguration().tunnelingAudioSessionId;
if (tunnelingAudioSessionId != C.AUDIO_SESSION_ID_UNSET) {
audioTrack.enableTunnelingV21(tunnelingAudioSessionId);
} else {
audioTrack.disableTunneling();
}
}
示例4: AudioTrack
/**
* @param audioCapabilities The audio capabilities for playback on this device. May be null if the
* default capabilities (no encoded audio passthrough support) should be assumed.
* @param audioProcessors An array of {@link AudioProcessor}s that will process PCM audio before
* output. May be empty.
* @param listener Listener for audio track events.
*/
public AudioTrack(AudioCapabilities audioCapabilities, AudioProcessor[] audioProcessors,
Listener listener) {
this.audioCapabilities = audioCapabilities;
this.listener = listener;
releasingConditionVariable = new ConditionVariable(true);
if (Util.SDK_INT >= 18) {
try {
getLatencyMethod =
android.media.AudioTrack.class.getMethod("getLatency", (Class<?>[]) null);
} catch (NoSuchMethodException e) {
// There's no guarantee this method exists. Do nothing.
}
}
if (Util.SDK_INT >= 19) {
audioTrackUtil = new AudioTrackUtilV19();
} else {
audioTrackUtil = new AudioTrackUtil();
}
channelMappingAudioProcessor = new ChannelMappingAudioProcessor();
sonicAudioProcessor = new SonicAudioProcessor();
availableAudioProcessors = new AudioProcessor[3 + audioProcessors.length];
availableAudioProcessors[0] = new ResamplingAudioProcessor();
availableAudioProcessors[1] = channelMappingAudioProcessor;
System.arraycopy(audioProcessors, 0, availableAudioProcessors, 2, audioProcessors.length);
availableAudioProcessors[2 + audioProcessors.length] = sonicAudioProcessor;
playheadOffsets = new long[MAX_PLAYHEAD_OFFSET_COUNT];
volume = 1.0f;
startMediaTimeState = START_NOT_SET;
streamType = C.STREAM_TYPE_DEFAULT;
audioSessionId = C.AUDIO_SESSION_ID_UNSET;
playbackParameters = PlaybackParameters.DEFAULT;
drainingAudioProcessorIndex = C.INDEX_UNSET;
this.audioProcessors = new AudioProcessor[0];
outputBuffers = new ByteBuffer[0];
playbackParametersCheckpoints = new LinkedList<>();
}
示例5: setStreamType
/**
* Sets the stream type for audio track. If the stream type has changed and if the audio track
* is not configured for use with tunneling, then the audio track is reset and the audio session
* id is cleared.
* <p>
* If the audio track is configured for use with tunneling then the stream type is ignored, the
* audio track is not reset and the audio session id is not cleared. The passed stream type will
* be used if the audio track is later re-configured into non-tunneled mode.
*
* @param streamType The {@link C.StreamType} to use for audio output.
*/
public void setStreamType(@C.StreamType int streamType) {
if (this.streamType == streamType) {
return;
}
this.streamType = streamType;
if (tunneling) {
// The stream type is ignored in tunneling mode, so no need to reset.
return;
}
reset();
audioSessionId = C.AUDIO_SESSION_ID_UNSET;
}
示例6: disableTunneling
/**
* Disables tunneling. If tunneling was previously enabled then the audio track is reset and the
* audio session id is cleared.
*/
public void disableTunneling() {
if (tunneling) {
tunneling = false;
audioSessionId = C.AUDIO_SESSION_ID_UNSET;
reset();
}
}
示例7: release
/**
* Releases all resources associated with this instance.
*/
public void release() {
reset();
releaseKeepSessionIdAudioTrack();
for (AudioProcessor audioProcessor : availableAudioProcessors) {
audioProcessor.reset();
}
audioSessionId = C.AUDIO_SESSION_ID_UNSET;
playing = false;
}
示例8: onEnabled
@Override
protected void onEnabled(boolean joining) throws ExoPlaybackException {
super.onEnabled(joining);
tunnelingAudioSessionId = getConfiguration().tunnelingAudioSessionId;
tunneling = tunnelingAudioSessionId != C.AUDIO_SESSION_ID_UNSET;
eventDispatcher.enabled(decoderCounters);
frameReleaseTimeHelper.enable();
}
示例9: MappingTrackSelector
public MappingTrackSelector() {
selectionOverrides = new SparseArray<>();
rendererDisabledFlags = new SparseBooleanArray();
tunnelingAudioSessionId = C.AUDIO_SESSION_ID_UNSET;
}
示例10: maybeConfigureRenderersForTunneling
/**
* Determines whether tunneling should be enabled, replacing {@link RendererConfiguration}s in
* {@code rendererConfigurations} with configurations that enable tunneling on the appropriate
* renderers if so.
*
* @param rendererCapabilities The {@link RendererCapabilities} of the renderers for which
* {@link TrackSelection}s are to be generated.
* @param rendererTrackGroupArrays An array of {@link TrackGroupArray}s where each entry
* corresponds to the renderer of equal index in {@code renderers}.
* @param rendererFormatSupports Maps every available track to a specific level of support as
* defined by the renderer {@code FORMAT_*} constants.
* @param rendererConfigurations The renderer configurations. Configurations may be replaced with
* ones that enable tunneling as a result of this call.
* @param trackSelections The renderer track selections.
* @param tunnelingAudioSessionId The audio session id to use when tunneling, or
* {@link C#AUDIO_SESSION_ID_UNSET} if tunneling should not be enabled.
*/
private static void maybeConfigureRenderersForTunneling(
RendererCapabilities[] rendererCapabilities, TrackGroupArray[] rendererTrackGroupArrays,
int[][][] rendererFormatSupports, RendererConfiguration[] rendererConfigurations,
TrackSelection[] trackSelections, int tunnelingAudioSessionId) {
if (tunnelingAudioSessionId == C.AUDIO_SESSION_ID_UNSET) {
return;
}
// Check whether we can enable tunneling. To enable tunneling we require exactly one audio and
// one video renderer to support tunneling and have a selection.
int tunnelingAudioRendererIndex = -1;
int tunnelingVideoRendererIndex = -1;
boolean enableTunneling = true;
for (int i = 0; i < rendererCapabilities.length; i++) {
int rendererType = rendererCapabilities[i].getTrackType();
TrackSelection trackSelection = trackSelections[i];
if ((rendererType == C.TRACK_TYPE_AUDIO || rendererType == C.TRACK_TYPE_VIDEO)
&& trackSelection != null) {
if (rendererSupportsTunneling(rendererFormatSupports[i], rendererTrackGroupArrays[i],
trackSelection)) {
if (rendererType == C.TRACK_TYPE_AUDIO) {
if (tunnelingAudioRendererIndex != -1) {
enableTunneling = false;
break;
} else {
tunnelingAudioRendererIndex = i;
}
} else {
if (tunnelingVideoRendererIndex != -1) {
enableTunneling = false;
break;
} else {
tunnelingVideoRendererIndex = i;
}
}
}
}
}
enableTunneling &= tunnelingAudioRendererIndex != -1 && tunnelingVideoRendererIndex != -1;
if (enableTunneling) {
RendererConfiguration tunnelingRendererConfiguration =
new RendererConfiguration(tunnelingAudioSessionId);
rendererConfigurations[tunnelingAudioRendererIndex] = tunnelingRendererConfiguration;
rendererConfigurations[tunnelingVideoRendererIndex] = tunnelingRendererConfiguration;
}
}