本文整理汇总了Java中com.google.android.exoplayer.MediaCodecUtil.maxH264DecodableFrameSize方法的典型用法代码示例。如果您正苦于以下问题:Java MediaCodecUtil.maxH264DecodableFrameSize方法的具体用法?Java MediaCodecUtil.maxH264DecodableFrameSize怎么用?Java MediaCodecUtil.maxH264DecodableFrameSize使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类com.google.android.exoplayer.MediaCodecUtil
的用法示例。
在下文中一共展示了MediaCodecUtil.maxH264DecodableFrameSize方法的2个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: isFormatPlayable
import com.google.android.exoplayer.MediaCodecUtil; //导入方法依赖的package包/类
/**
* Determines whether an individual format is playable, given an array of allowed container types,
* whether HD formats should be filtered and a maximum decodable frame size in pixels.
*/
private static boolean isFormatPlayable(Format format, String[] allowedContainerMimeTypes,
boolean filterHdFormats) throws DecoderQueryException {
if (allowedContainerMimeTypes != null
&& !Util.contains(allowedContainerMimeTypes, format.mimeType)) {
// Filtering format based on its container mime type.
return false;
}
if (filterHdFormats && (format.width >= 1280 || format.height >= 720)) {
// Filtering format because it's HD.
return false;
}
if (format.width > 0 && format.height > 0) {
if (Util.SDK_INT >= 21) {
String videoMediaMimeType = MimeTypes.getVideoMediaMimeType(format.codecs);
if (MimeTypes.VIDEO_UNKNOWN.equals(videoMediaMimeType)) {
// Assume the video is H.264.
videoMediaMimeType = MimeTypes.VIDEO_H264;
}
if (format.frameRate > 0) {
return MediaCodecUtil.isSizeAndRateSupportedV21(videoMediaMimeType, false, format.width,
format.height, format.frameRate);
} else {
return MediaCodecUtil.isSizeSupportedV21(videoMediaMimeType, false, format.width,
format.height);
}
}
// Assume the video is H.264.
if (format.width * format.height > MediaCodecUtil.maxH264DecodableFrameSize()) {
// Filtering format because it exceeds the maximum decodable frame size.
return false;
}
}
return true;
}
示例2: onManifest
import com.google.android.exoplayer.MediaCodecUtil; //导入方法依赖的package包/类
@Override
public void onManifest(String contentId, SmoothStreamingManifest manifest) {
Handler mainHandler = playerActivity.getMainHandler();
LoadControl loadControl = new DefaultLoadControl(new BufferPool(BUFFER_SEGMENT_SIZE));
DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter();
// Obtain stream elements for playback.
int maxDecodableFrameSize = MediaCodecUtil.maxH264DecodableFrameSize();
int audioStreamElementIndex = -1;
int videoStreamElementIndex = -1;
ArrayList<Integer> videoTrackIndexList = new ArrayList<Integer>();
for (int i = 0; i < manifest.streamElements.length; i++) {
if (audioStreamElementIndex == -1
&& manifest.streamElements[i].type == StreamElement.TYPE_AUDIO) {
audioStreamElementIndex = i;
} else if (videoStreamElementIndex == -1
&& manifest.streamElements[i].type == StreamElement.TYPE_VIDEO) {
videoStreamElementIndex = i;
StreamElement streamElement = manifest.streamElements[i];
for (int j = 0; j < streamElement.tracks.length; j++) {
TrackElement trackElement = streamElement.tracks[j];
if (trackElement.maxWidth * trackElement.maxHeight <= maxDecodableFrameSize) {
videoTrackIndexList.add(j);
} else {
// The device isn't capable of playing this stream.
}
}
}
}
int[] videoTrackIndices = Util.toArray(videoTrackIndexList);
// Build the video renderer.
DataSource videoDataSource = new UriDataSource(userAgent, bandwidthMeter);
ChunkSource videoChunkSource = new SmoothStreamingChunkSource(manifestFetcher,
videoStreamElementIndex, videoTrackIndices, videoDataSource,
new AdaptiveEvaluator(bandwidthMeter), LIVE_EDGE_LATENCY_MS);
ChunkSampleSource videoSampleSource = new ChunkSampleSource(videoChunkSource, loadControl,
VIDEO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true);
MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(videoSampleSource,
MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 0, mainHandler, playerActivity, 50);
// Build the audio renderer.
DataSource audioDataSource = new UriDataSource(userAgent, bandwidthMeter);
ChunkSource audioChunkSource = new SmoothStreamingChunkSource(manifestFetcher,
audioStreamElementIndex, new int[] {0}, audioDataSource,
new FormatEvaluator.FixedEvaluator(), LIVE_EDGE_LATENCY_MS);
SampleSource audioSampleSource = new ChunkSampleSource(audioChunkSource, loadControl,
AUDIO_BUFFER_SEGMENTS * BUFFER_SEGMENT_SIZE, true);
MediaCodecAudioTrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(
audioSampleSource);
callback.onRenderers(videoRenderer, audioRenderer);
}