本文整理匯總了Java中android.media.MediaCodec.INFO_TRY_AGAIN_LATER屬性的典型用法代碼示例。如果您正苦於以下問題:Java MediaCodec.INFO_TRY_AGAIN_LATER屬性的具體用法?Java MediaCodec.INFO_TRY_AGAIN_LATER怎麽用?Java MediaCodec.INFO_TRY_AGAIN_LATER使用的例子?那麽, 這裏精選的屬性代碼示例或許可以為您提供幫助。您也可以進一步了解該屬性所在類android.media.MediaCodec
的用法示例。
在下文中一共展示了MediaCodec.INFO_TRY_AGAIN_LATER屬性的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: videoEncodeStep
private boolean videoEncodeStep(boolean isEnd){
if(isEnd){
mVideoEncoder.signalEndOfInputStream();
}
while (true){
int outputIndex=mVideoEncoder.dequeueOutputBuffer(mVideoEncodeBufferInfo,TIME_OUT);
if(outputIndex>=0){
if(isMuxStarted&&mVideoEncodeBufferInfo.size>0&&mVideoEncodeBufferInfo.presentationTimeUs>0){
mMuxer.writeSampleData(mVideoTrack,getOutputBuffer(mVideoEncoder,outputIndex),mVideoEncodeBufferInfo);
}
mVideoEncoder.releaseOutputBuffer(outputIndex,false);
if(mVideoEncodeBufferInfo.flags==MediaCodec.BUFFER_FLAG_END_OF_STREAM){
AvLog.d("CameraRecorder get video encode end of stream");
return true;
}
}else if(outputIndex==MediaCodec.INFO_TRY_AGAIN_LATER){
break;
}else if(outputIndex==MediaCodec.INFO_OUTPUT_FORMAT_CHANGED){
AvLog.d("get video output format changed ->"+mVideoEncoder.getOutputFormat().toString());
mVideoTrack=mMuxer.addTrack(mVideoEncoder.getOutputFormat());
mMuxer.start();
isMuxStarted=true;
}
}
return false;
}
示例2: handleCodecOutput
private void handleCodecOutput(MediaCodec mediaCodec,
ByteBuffer[] codecOutputBuffers,
MediaCodec.BufferInfo bufferInfo,
OutputStream outputStream)
throws IOException
{
int codecOutputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);
while (codecOutputBufferIndex != MediaCodec.INFO_TRY_AGAIN_LATER) {
if (codecOutputBufferIndex >= 0) {
ByteBuffer encoderOutputBuffer = codecOutputBuffers[codecOutputBufferIndex];
encoderOutputBuffer.position(bufferInfo.offset);
encoderOutputBuffer.limit(bufferInfo.offset + bufferInfo.size);
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != MediaCodec.BUFFER_FLAG_CODEC_CONFIG) {
byte[] header = createAdtsHeader(bufferInfo.size - bufferInfo.offset);
outputStream.write(header);
byte[] data = new byte[encoderOutputBuffer.remaining()];
encoderOutputBuffer.get(data);
outputStream.write(data);
}
encoderOutputBuffer.clear();
mediaCodec.releaseOutputBuffer(codecOutputBufferIndex, false);
} else if (codecOutputBufferIndex== MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
codecOutputBuffers = mediaCodec.getOutputBuffers();
}
codecOutputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);
}
}
示例3: recordVirtualDisplay
private void recordVirtualDisplay() {
while (isStart) {
int index = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_US);
if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
resetOutputFormat();
} else if (index == MediaCodec.INFO_TRY_AGAIN_LATER) {
try {
// wait 10ms
Thread.sleep(10);
} catch (InterruptedException e) {
}
} else if (index >= 0) {
if (!mMuxerStarted) {
throw new IllegalStateException("MediaMuxer dose not call addTrack(format) ");
}
encodeToVideoTrack(index);
mEncoder.releaseOutputBuffer(index, false);
}
}
}
示例4: checkDecoderStatus
/**
* MediaCodec#dequeueOutputBuffer()の戻り値のチェック
*
* @param decoderStatus MediaCodec#dequeueOutputBuffer()の戻り値
* @return true: デコード処理が行われた
*/
private boolean checkDecoderStatus(int decoderStatus) {
if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// dequeueOutputBufferの呼び出しがタイムアウト
if (mInputDone) {
Log.d(TAG, "no output from mDecoder available BUT the input is done.");
}
} else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
Log.d(TAG, "mDecoder output buffers changed");
} else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
Log.d(TAG, "mDecoder output format changed");
} else if (decoderStatus < 0) {
Log.d(TAG, "unexpected result from encoder.dequeueOutputBuffer: "
+ decoderStatus);
} else {
return true;
}
return false;
}
示例5: videoDecodeStep
private boolean videoDecodeStep(){
int mInputIndex=mVideoDecoder.dequeueInputBuffer(TIME_OUT);
if(mInputIndex>=0){
ByteBuffer buffer=getInputBuffer(mVideoDecoder,mInputIndex);
buffer.clear();
synchronized (Extractor_LOCK) {
mExtractor.selectTrack(mVideoDecoderTrack);
int ret = mExtractor.readSampleData(buffer, 0);
if (ret != -1) {
mVideoStopTimeStamp=mExtractor.getSampleTime();
AvLog.d("mVideoStopTimeStamp:"+mVideoStopTimeStamp);
mVideoDecoder.queueInputBuffer(mInputIndex, 0, ret, mVideoStopTimeStamp, mExtractor.getSampleFlags());
}
isVideoExtractorEnd = !mExtractor.advance();
}
}
while (true){
int mOutputIndex=mVideoDecoder.dequeueOutputBuffer(mVideoDecoderBufferInfo,TIME_OUT);
if(mOutputIndex>=0){
try {
AvLog.d(" mDecodeSem.acquire ");
if(!isUserWantToStop){
mDecodeSem.acquire();
}
AvLog.d(" mDecodeSem.acquire end ");
} catch (InterruptedException e) {
e.printStackTrace();
}
codecNum++;
mVideoDecoder.releaseOutputBuffer(mOutputIndex,true);
mSem.release();
}else if(mOutputIndex== MediaCodec.INFO_OUTPUT_FORMAT_CHANGED){
//MediaFormat format=mVideoDecoder.getOutputFormat();
}else if(mOutputIndex== MediaCodec.INFO_TRY_AGAIN_LATER){
break;
}
}
return isVideoExtractorEnd||isUserWantToStop;
}
示例6: audioEncodeStep
private boolean audioEncodeStep(boolean isEnd){
if(isRecordAudioStarted){
int inputIndex=mAudioEncoder.dequeueInputBuffer(TIME_OUT);
if(inputIndex>=0){
ByteBuffer buffer=getInputBuffer(mAudioEncoder,inputIndex);
buffer.clear();
long time=(System.currentTimeMillis()-BASE_TIME)*1000;
int length=mAudioRecord.read(buffer,mRecordBufferSize);
if(length>=0){
mAudioEncoder.queueInputBuffer(inputIndex,0,length,time,
isEnd?MediaCodec.BUFFER_FLAG_END_OF_STREAM:0);
}
}
while (true){
int outputIndex=mAudioEncoder.dequeueOutputBuffer(mAudioEncodeBufferInfo,TIME_OUT);
if(outputIndex>=0){
//todo 第一幀音頻時間戳為0的問題
if(isMuxStarted&&mAudioEncodeBufferInfo.size>0&&mAudioEncodeBufferInfo.presentationTimeUs>0){
mMuxer.writeSampleData(mAudioTrack,getOutputBuffer(mAudioEncoder,outputIndex),mAudioEncodeBufferInfo);
}
mAudioEncoder.releaseOutputBuffer(outputIndex,false);
if(mAudioEncodeBufferInfo.flags==MediaCodec.BUFFER_FLAG_END_OF_STREAM){
AvLog.d("CameraRecorder get audio encode end of stream");
isTryStopAudio=false;
isRecordAudioStarted=false;
return true;
}
}else if(outputIndex==MediaCodec.INFO_TRY_AGAIN_LATER){
break;
}else if(outputIndex==MediaCodec.INFO_OUTPUT_FORMAT_CHANGED){
AvLog.d("get audio output format changed ->"+mAudioEncoder.getOutputFormat().toString());
synchronized (VIDEO_LOCK){
mAudioTrack=mMuxer.addTrack(mAudioEncoder.getOutputFormat());
isRecordVideoStarted=true;
}
}
}
}
return false;
}
示例7: videoDecodeStep
private boolean videoDecodeStep(){
int mInputIndex=mVideoDecoder.dequeueInputBuffer(TIME_OUT);
if(mInputIndex>=0){
ByteBuffer buffer= CodecUtil.getInputBuffer(mVideoDecoder,mInputIndex);
buffer.clear();
synchronized (Extractor_LOCK) {
mExtractor.selectTrack(mVideoDecodeTrack);
int ret = mExtractor.readSampleData(buffer, 0);
if (ret != -1) {
mVideoStopTimeStamp=mExtractor.getSampleTime();
mVideoDecoder.queueInputBuffer(mInputIndex, 0, ret, mVideoStopTimeStamp, mExtractor.getSampleFlags());
isVideoExtractorEnd = false;
}else{
//可以用!mExtractor.advance,但是貌似會延遲一幀。readSampleData 返回 -1 也表示沒有更多數據了
isVideoExtractorEnd = true;
}
mExtractor.advance();
}
}
while (true){
int mOutputIndex=mVideoDecoder.dequeueOutputBuffer(videoDecodeBufferInfo,TIME_OUT);
if(mOutputIndex>=0){
try {
if(!isUserWantToStop){
mDecodeSem.acquire();
}
} catch (InterruptedException e) {
e.printStackTrace();
}
nowTimeStamp=videoDecodeBufferInfo.presentationTimeUs;
mVideoDecoder.releaseOutputBuffer(mOutputIndex,true);
mFrameSem.release();
}else if(mOutputIndex== MediaCodec.INFO_OUTPUT_FORMAT_CHANGED){
}else if(mOutputIndex== MediaCodec.INFO_TRY_AGAIN_LATER){
break;
}
}
return isVideoExtractorEnd||isUserWantToStop;
}
示例8: videoEncodeStep
private synchronized boolean videoEncodeStep(boolean isEnd){
AvLog.d(TAG,"videoEncodeStep:"+isEncodeStarted+"/"+isEnd);
if(isEncodeStarted){
if(isEnd){
mVideoEncoder.signalEndOfInputStream();
}
MediaCodec.BufferInfo info=new MediaCodec.BufferInfo();
while (true){
int mOutputIndex=mVideoEncoder.dequeueOutputBuffer(info,TIME_OUT);
AvLog.i(TAG,"videoEncodeStep:mOutputIndex="+mOutputIndex);
if(mOutputIndex>=0){
if((info.flags&MediaCodec.BUFFER_FLAG_CODEC_CONFIG)!=0){
info.size=0;
}
ByteBuffer buffer= CodecUtil.getOutputBuffer(mVideoEncoder,mOutputIndex);
if(mStore!=null){
mStore.addData(mVideoTrack,new HardMediaData(buffer,info));
}
mVideoEncoder.releaseOutputBuffer(mOutputIndex,false);
if((info.flags&MediaCodec.BUFFER_FLAG_END_OF_STREAM)!=0){
closeVideoEncoder();
isEncodeStarted=false;
AvLog.i(TAG,"videoEncodeStep: MediaCodec.BUFFER_FLAG_END_OF_STREAM ");
break;
}
}else if(mOutputIndex== MediaCodec.INFO_OUTPUT_FORMAT_CHANGED){
MediaFormat format=mVideoEncoder.getOutputFormat();
if(mStore!=null){
mVideoTrack=mStore.addTrack(format);
}
}else if(mOutputIndex== MediaCodec.INFO_TRY_AGAIN_LATER&&!isEnd){
break;
}
}
}
return false;
}
示例9: audioEncodeStep
private synchronized boolean audioEncodeStep(boolean isEnd){
if(isStarted){
AvLog.d("audioEncodeStep");
int inputIndex=mAudioEncoder.dequeueInputBuffer(TIME_OUT);
if(inputIndex>=0){
ByteBuffer buffer= CodecUtil.getInputBuffer(mAudioEncoder,inputIndex);
buffer.clear();
long time= (SystemClock.elapsedRealtimeNanos()-startTime)/1000;
int length=mRecord.read(buffer,mRecordBufferSize);
if(length>=0){
mAudioEncoder.queueInputBuffer(inputIndex,0,length,time,
isEnd?MediaCodec.BUFFER_FLAG_END_OF_STREAM:0);
}
}
MediaCodec.BufferInfo info=new MediaCodec.BufferInfo();
while (true){
int outputIndex=mAudioEncoder.dequeueOutputBuffer(info,TIME_OUT);
if(outputIndex>=0){
if(mStore!=null){
mStore.addData(mAudioTrack,new HardMediaData(CodecUtil.getOutputBuffer(mAudioEncoder,outputIndex),info));
}
mAudioEncoder.releaseOutputBuffer(outputIndex,false);
if(info.flags==MediaCodec.BUFFER_FLAG_END_OF_STREAM){
AvLog.d("CameraRecorder get audio encode end of stream");
stop();
return true;
}
}else if(outputIndex==MediaCodec.INFO_TRY_AGAIN_LATER){
break;
}else if(outputIndex==MediaCodec.INFO_OUTPUT_FORMAT_CHANGED){
AvLog.d("get audio output format changed ->"+mAudioEncoder.getOutputFormat().toString());
mAudioTrack=mStore.addTrack(mAudioEncoder.getOutputFormat());
}
}
}
return false;
}
示例10: drainDecoder
private int drainDecoder(long timeoutUs) {
if (mIsDecoderEOS) return DRAIN_STATE_NONE;
int result = mDecoder.dequeueOutputBuffer(mBufferInfo, timeoutUs);
switch (result) {
case MediaCodec.INFO_TRY_AGAIN_LATER:
return DRAIN_STATE_NONE;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
}
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
mEncoder.signalEndOfInputStream();
mIsDecoderEOS = true;
mBufferInfo.size = 0;
}
boolean doRender = (mBufferInfo.size > 0);
// NOTE: doRender will block if buffer (of encoder) is full.
// Refer: http://bigflake.com/mediacodec/CameraToMpegTest.java.txt
mDecoder.releaseOutputBuffer(result, doRender);
if (doRender) {
mDecoderOutputSurfaceWrapper.awaitNewImage();
mDecoderOutputSurfaceWrapper.drawImage(mBufferInfo.presentationTimeUs * 1000);
mEncoderInputSurfaceWrapper.setPresentationTime(mBufferInfo.presentationTimeUs * 1000);
// swapBuffers後之前的OpenGL操作會渲染到InputSurface綁定的Surface中
mEncoderInputSurfaceWrapper.swapBuffers();
}
return DRAIN_STATE_CONSUMED;
}
示例11: drainEncoder
private int drainEncoder(long timeoutUs) {
if (mIsEncoderEOS) return DRAIN_STATE_NONE;
int result = mEncoder.dequeueOutputBuffer(mBufferInfo, timeoutUs);
switch (result) {
case MediaCodec.INFO_TRY_AGAIN_LATER:
return DRAIN_STATE_NONE;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
if (mActualOutputFormat != null) {
throw new RuntimeException("Audio output format changed twice.");
}
mActualOutputFormat = mEncoder.getOutputFormat();
mMuxer.setOutputFormat(SAMPLE_TYPE, mActualOutputFormat);
return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
mEncoderBuffers = new MediaCodecBufferCompatWrapper(mEncoder);
return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
}
if (mActualOutputFormat == null) {
throw new RuntimeException("Could not determine actual output format.");
}
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
mIsEncoderEOS = true;
mBufferInfo.set(0, 0, 0, mBufferInfo.flags);
}
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
// SPS or PPS, which should be passed by MediaFormat.
mEncoder.releaseOutputBuffer(result, false);
return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
}
mMuxer.writeSampleData(SAMPLE_TYPE, mEncoderBuffers.getOutputBuffer(result), mBufferInfo);
mWrittenPresentationTimeUs = mBufferInfo.presentationTimeUs;
mEncoder.releaseOutputBuffer(result, false);
return DRAIN_STATE_CONSUMED;
}
示例12: drainEncoder
private int drainEncoder(long timeoutUs) {
if (mIsEncoderEOS) return DRAIN_STATE_NONE;
int result = mEncoder.dequeueOutputBuffer(mBufferInfo, timeoutUs);
switch (result) {
case MediaCodec.INFO_TRY_AGAIN_LATER:
return DRAIN_STATE_NONE;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
if (mActualOutputFormat != null) {
throw new RuntimeException("Audio output format changed twice.");
}
mActualOutputFormat = mEncoder.getOutputFormat();
mMuxer.setOutputFormat(QueuedMuxer.SampleType.AUDIO, mActualOutputFormat);
return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
}
if (mActualOutputFormat == null) {
throw new RuntimeException("Could not determine actual output format.");
}
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
mIsEncoderEOS = true;
mBufferInfo.set(0, 0, 0, mBufferInfo.flags);
}
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
// SPS or PPS, which should be passed by MediaFormat.
mEncoder.releaseOutputBuffer(result, false);
return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
}
mMuxer.writeSampleData(QueuedMuxer.SampleType.AUDIO, MediaUtil.getOutputBuffer(mEncoder, result), mBufferInfo);
mEncoder.releaseOutputBuffer(result, false);
return DRAIN_STATE_CONSUMED;
}
示例13: encode
/**
* Method to set byte array to the MediaCodec encoder
* @param buffer
* @param length length of byte array, zero means EOS.
* @param presentationTimeUs
*/
protected void encode(final ByteBuffer buffer, final int length, final long presentationTimeUs) {
Log.d(TAG, "encode: "+this.getClass().getSimpleName()+" "+mIsCapturing);
if (!mIsCapturing) return;
final ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers();
while (mIsCapturing) {
final int inputBufferIndex = mMediaCodec.dequeueInputBuffer(TIMEOUT_USEC);
Log.d(TAG, "encode: "+this.getClass().getSimpleName()+" "+inputBufferIndex);
if (inputBufferIndex >= 0) {
final ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.clear();
if (buffer != null) {
inputBuffer.put(buffer);
}
// if (DEBUG) Log.v(TAG, "encode:queueInputBuffer");
if (length <= 0) {
// send EOS
mIsEOS = true;
if (DEBUG) Log.i(TAG, "send BUFFER_FLAG_END_OF_STREAM");
mMediaCodec.queueInputBuffer(inputBufferIndex, 0, 0,
presentationTimeUs, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
break;
} else {
mMediaCodec.queueInputBuffer(inputBufferIndex, 0, length,
presentationTimeUs, 0);
}
break;
} else if (inputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
// wait for MediaCodec encoder is ready to encode
// nothing to do here because MediaCodec#dequeueInputBuffer(TIMEOUT_USEC)
// will wait for maximum TIMEOUT_USEC(10msec) on each call
}
}
}
示例14: encode
/**
* Method to set byte array to the MediaCodec encoder
* @param buffer
* @param length length of byte array, zero means EOS.
* @param presentationTimeUs
*/
protected void encode(final ByteBuffer buffer, final int length, final long presentationTimeUs) {
if (!mIsCapturing) return;
final ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers();
while (mIsCapturing) {
final int inputBufferIndex = mMediaCodec.dequeueInputBuffer(TIMEOUT_USEC);
if (inputBufferIndex >= 0) {
final ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.clear();
if (buffer != null) {
inputBuffer.put(buffer);
}
// if (DEBUG) Log.v(TAG, "encode:queueInputBuffer");
if (length <= 0) {
// send EOS
mIsEOS = true;
if (DEBUG) Log.i(TAG, "send BUFFER_FLAG_END_OF_STREAM");
mMediaCodec.queueInputBuffer(inputBufferIndex, 0, 0,
presentationTimeUs, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
break;
} else {
mMediaCodec.queueInputBuffer(inputBufferIndex, 0, length,
presentationTimeUs, 0);
}
break;
} else if (inputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
// wait for MediaCodec encoder is ready to encode
// nothing to do here because MediaCodec#dequeueInputBuffer(TIMEOUT_USEC)
// will wait for maximum TIMEOUT_USEC(10msec) on each call
}
}
}
示例15: dequeueOutputBuffer
private DecodedOutputBuffer dequeueOutputBuffer(int dequeueTimeoutMs) {
checkOnMediaCodecThread();
if (decodeStartTimeMs.isEmpty()) {
return null;
}
// Drain the decoder until receiving a decoded buffer or hitting
// MediaCodec.INFO_TRY_AGAIN_LATER.
final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
while (true) {
final int result =
mediaCodec.dequeueOutputBuffer(info, TimeUnit.MILLISECONDS.toMicros(dequeueTimeoutMs));
switch (result) {
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
outputBuffers = mediaCodec.getOutputBuffers();
Logging.d(TAG, "Decoder output buffers changed: " + outputBuffers.length);
if (hasDecodedFirstFrame) {
throw new RuntimeException("Unexpected output buffer change event.");
}
break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
MediaFormat format = mediaCodec.getOutputFormat();
Logging.d(TAG, "Decoder format changed: " + format.toString());
int new_width = format.getInteger(MediaFormat.KEY_WIDTH);
int new_height = format.getInteger(MediaFormat.KEY_HEIGHT);
if (hasDecodedFirstFrame && (new_width != width || new_height != height)) {
throw new RuntimeException("Unexpected size change. Configured " + width + "*" + height
+ ". New " + new_width + "*" + new_height);
}
width = format.getInteger(MediaFormat.KEY_WIDTH);
height = format.getInteger(MediaFormat.KEY_HEIGHT);
if (!useSurface && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) {
colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
Logging.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat));
if (!supportedColorList.contains(colorFormat)) {
throw new IllegalStateException("Non supported color format: " + colorFormat);
}
}
if (format.containsKey("stride")) {
stride = format.getInteger("stride");
}
if (format.containsKey("slice-height")) {
sliceHeight = format.getInteger("slice-height");
}
Logging.d(TAG, "Frame stride and slice height: " + stride + " x " + sliceHeight);
stride = Math.max(width, stride);
sliceHeight = Math.max(height, sliceHeight);
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
return null;
default:
hasDecodedFirstFrame = true;
TimeStamps timeStamps = decodeStartTimeMs.remove();
long decodeTimeMs = SystemClock.elapsedRealtime() - timeStamps.decodeStartTimeMs;
if (decodeTimeMs > MAX_DECODE_TIME_MS) {
Logging.e(TAG, "Very high decode time: " + decodeTimeMs + "ms"
+ ". Q size: " + decodeStartTimeMs.size()
+ ". Might be caused by resuming H264 decoding after a pause.");
decodeTimeMs = MAX_DECODE_TIME_MS;
}
return new DecodedOutputBuffer(result, info.offset, info.size,
TimeUnit.MICROSECONDS.toMillis(info.presentationTimeUs), timeStamps.timeStampMs,
timeStamps.ntpTimeStampMs, decodeTimeMs, SystemClock.elapsedRealtime());
}
}
}