本文整理汇总了Java中android.media.MediaCodec.BUFFER_FLAG_CODEC_CONFIG属性的典型用法代码示例。如果您正苦于以下问题:Java MediaCodec.BUFFER_FLAG_CODEC_CONFIG属性的具体用法?Java MediaCodec.BUFFER_FLAG_CODEC_CONFIG怎么用?Java MediaCodec.BUFFER_FLAG_CODEC_CONFIG使用的例子?那么, 这里精选的属性代码示例或许可以为您提供帮助。您也可以进一步了解该属性所在类android.media.MediaCodec
的用法示例。
在下文中一共展示了MediaCodec.BUFFER_FLAG_CODEC_CONFIG属性的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: handleCodecOutput
private void handleCodecOutput(MediaCodec mediaCodec,
ByteBuffer[] codecOutputBuffers,
MediaCodec.BufferInfo bufferInfo,
OutputStream outputStream)
throws IOException
{
int codecOutputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);
while (codecOutputBufferIndex != MediaCodec.INFO_TRY_AGAIN_LATER) {
if (codecOutputBufferIndex >= 0) {
ByteBuffer encoderOutputBuffer = codecOutputBuffers[codecOutputBufferIndex];
encoderOutputBuffer.position(bufferInfo.offset);
encoderOutputBuffer.limit(bufferInfo.offset + bufferInfo.size);
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != MediaCodec.BUFFER_FLAG_CODEC_CONFIG) {
byte[] header = createAdtsHeader(bufferInfo.size - bufferInfo.offset);
outputStream.write(header);
byte[] data = new byte[encoderOutputBuffer.remaining()];
encoderOutputBuffer.get(data);
outputStream.write(data);
}
encoderOutputBuffer.clear();
mediaCodec.releaseOutputBuffer(codecOutputBufferIndex, false);
} else if (codecOutputBufferIndex== MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
codecOutputBuffers = mediaCodec.getOutputBuffers();
}
codecOutputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);
}
}
示例2: reportEncodedImage
private void reportEncodedImage(final MediaCodec.BufferInfo info, final ByteBuffer buffer) {
buffer.position(info.offset);
buffer.limit(info.size);
if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) == 0) {
Log.i(TAG, "reportEncodedImage %d %d %d",
info.flags & MediaCodec.BUFFER_FLAG_KEY_FRAME, info.size,
info.presentationTimeUs);
if (System.currentTimeMillis() - mLastResetBitsTime > 1000) {
mNotifier.reportBr(mOutputBits);
mOutputBits = 0;
mLastResetBitsTime = System.currentTimeMillis();
}
mOutputBits += info.size * 8;
}
}
示例3: encodeToVideoTrack
private void encodeToVideoTrack(int index) {
ByteBuffer encodedData = mEncoder.getOutputBuffer(index);
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
// The codec config data was pulled out and fed to the muxer when we got
// the INFO_OUTPUT_FORMAT_CHANGED status.
// Ignore it.
mBufferInfo.size = 0;
}
if (mBufferInfo.size == 0) {
encodedData = null;
} else {
}
if (encodedData != null) {
encodedData.position(mBufferInfo.offset);
encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
mWeakMuxer.writeMediaData(mVideoTrackIndex, encodedData, mBufferInfo);
}
}
示例4: videoEncodeStep
private synchronized boolean videoEncodeStep(boolean isEnd){
AvLog.d(TAG,"videoEncodeStep:"+isEncodeStarted+"/"+isEnd);
if(isEncodeStarted){
if(isEnd){
mVideoEncoder.signalEndOfInputStream();
}
MediaCodec.BufferInfo info=new MediaCodec.BufferInfo();
while (true){
int mOutputIndex=mVideoEncoder.dequeueOutputBuffer(info,TIME_OUT);
AvLog.i(TAG,"videoEncodeStep:mOutputIndex="+mOutputIndex);
if(mOutputIndex>=0){
if((info.flags&MediaCodec.BUFFER_FLAG_CODEC_CONFIG)!=0){
info.size=0;
}
ByteBuffer buffer= CodecUtil.getOutputBuffer(mVideoEncoder,mOutputIndex);
if(mStore!=null){
mStore.addData(mVideoTrack,new HardMediaData(buffer,info));
}
mVideoEncoder.releaseOutputBuffer(mOutputIndex,false);
if((info.flags&MediaCodec.BUFFER_FLAG_END_OF_STREAM)!=0){
closeVideoEncoder();
isEncodeStarted=false;
AvLog.i(TAG,"videoEncodeStep: MediaCodec.BUFFER_FLAG_END_OF_STREAM ");
break;
}
}else if(mOutputIndex== MediaCodec.INFO_OUTPUT_FORMAT_CHANGED){
MediaFormat format=mVideoEncoder.getOutputFormat();
if(mStore!=null){
mVideoTrack=mStore.addTrack(format);
}
}else if(mOutputIndex== MediaCodec.INFO_TRY_AGAIN_LATER&&!isEnd){
break;
}
}
}
return false;
}
示例5: encodeToVideoTrack
@RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
private void encodeToVideoTrack(int index) {
ByteBuffer encodedData = mEncoder.getOutputBuffer(index);
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
// The codec config data was pulled out and fed to the muxer when we got
// the INFO_OUTPUT_FORMAT_CHANGED status.
// Ignore it.
Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
mBufferInfo.size = 0;
}
if (mBufferInfo.size == 0) {
Log.d(TAG, "info.size == 0, drop it.");
encodedData = null;
} else {
Log.d(TAG, "got buffer, info: size=" + mBufferInfo.size
+ ", presentationTimeUs=" + mBufferInfo.presentationTimeUs
+ ", offset=" + mBufferInfo.offset);
}
if (encodedData != null) {
encodedData.position(mBufferInfo.offset);
encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
mMuxer.writeSampleData(mVideoTrackIndex, encodedData, mBufferInfo);
Log.i(TAG, "sent mBufferInfo.offset" + mBufferInfo.offset + " bytes to muxer...");
Log.i(TAG, "sent mBufferInfo.size" + mBufferInfo.size + " bytes to muxer...");
}
}
示例6: drainEncoder
private int drainEncoder(long timeoutUs) {
if (mIsEncoderEOS) return DRAIN_STATE_NONE;
int result = mEncoder.dequeueOutputBuffer(mBufferInfo, timeoutUs);
switch (result) {
case MediaCodec.INFO_TRY_AGAIN_LATER:
return DRAIN_STATE_NONE;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
if (mActualOutputFormat != null) {
throw new RuntimeException("Audio output format changed twice.");
}
mActualOutputFormat = mEncoder.getOutputFormat();
mMuxer.setOutputFormat(SAMPLE_TYPE, mActualOutputFormat);
return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
mEncoderBuffers = new MediaCodecBufferCompatWrapper(mEncoder);
return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
}
if (mActualOutputFormat == null) {
throw new RuntimeException("Could not determine actual output format.");
}
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
mIsEncoderEOS = true;
mBufferInfo.set(0, 0, 0, mBufferInfo.flags);
}
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
// SPS or PPS, which should be passed by MediaFormat.
mEncoder.releaseOutputBuffer(result, false);
return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
}
mMuxer.writeSampleData(SAMPLE_TYPE, mEncoderBuffers.getOutputBuffer(result), mBufferInfo);
mWrittenPresentationTimeUs = mBufferInfo.presentationTimeUs;
mEncoder.releaseOutputBuffer(result, false);
return DRAIN_STATE_CONSUMED;
}
示例7: onEncodedFrame
@Override
public void onEncodedFrame(final MediaCodecVideoEncoder.OutputBufferInfo frame,
final MediaCodec.BufferInfo bufferInfo) {
boolean configFrame = (bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0;
if (!configFrame) {
mMediaMuxer.writeSampleData(mTrackIndex, frame.buffer(), bufferInfo);
}
}
示例8: decodeVideoBuffer
/**
* 動画をデコード(再生)する
*/
private void decodeVideoBuffer() {
// MediaCodecからデコード結果を受け取る
int decodeStatus = mDecoder.dequeueOutputBuffer(mBufferinfo, BUFFER_TIMEOUT_USEC);
if (checkDecoderStatus(decodeStatus)) {
if ((mBufferinfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG)
!= 0) {
// コンフィグ部分を読み込んだ( 未だデコードは行っていない )
Log.d(TAG, "mDecoder configured (" + mBufferinfo.size + " bytes)");
} else if ((mBufferinfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM)
!= 0) {
// 末尾までデコードされた
Log.d(TAG, "Decoder gets BUFFER_FLAG_END_OF_STREAM. ");
mDecodeDone = true;
} else if (mBufferinfo.presentationTimeUs > 0) {
//( 動画のタイムスタンプ > 実際の経過時間 )になるまで待つ
while (mBufferinfo.presentationTimeUs / 1000 >
System.currentTimeMillis() - mStartMs) {
try {
Thread.sleep(10);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
// デコードされたバッファをサーフィスに送信(動画の再生)
mDecoder.releaseOutputBuffer(decodeStatus, true);
}
}
}
示例9: getDataFromSurface
private void getDataFromSurface() {
thread = new Thread(new Runnable() {
@Override
public void run() {
while (!Thread.interrupted()) {
ByteBuffer[] outputBuffers = videoEncoder.getOutputBuffers();
for (; ; ) {
int outBufferIndex = videoEncoder.dequeueOutputBuffer(videoInfo, 0);
if (outBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat mediaFormat = videoEncoder.getOutputFormat();
getH264Data.onVideoFormat(mediaFormat);
getH264Data.onSPSandPPS(mediaFormat.getByteBuffer("csd-0"),
mediaFormat.getByteBuffer("csd-1"));
spsPpsSetted = true;
} else if (outBufferIndex >= 0) {
//This ByteBuffer is H264
ByteBuffer bb = outputBuffers[outBufferIndex];
if ((videoInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
if (!spsPpsSetted) {
Pair<ByteBuffer, ByteBuffer> buffers =
decodeSpsPpsFromBuffer(bb.duplicate(), videoInfo.size);
if (buffers != null) {
getH264Data.onSPSandPPS(buffers.first, buffers.second);
spsPpsSetted = true;
}
}
}
videoInfo.presentationTimeUs = System.nanoTime() / 1000 - mPresentTimeUs;
getH264Data.getH264Data(bb, videoInfo);
videoEncoder.releaseOutputBuffer(outBufferIndex, false);
} else {
break;
}
}
}
}
});
thread.start();
}
示例10: processOutputBuffer
@Override
protected boolean processOutputBuffer(long positionUs, long elapsedRealtimeUs, MediaCodec codec,
ByteBuffer buffer, int bufferIndex, int bufferFlags, long bufferPresentationTimeUs,
boolean shouldSkip) throws ExoPlaybackException {
if (passthroughEnabled && (bufferFlags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
// Discard output buffers from the passthrough (raw) decoder containing codec specific data.
codec.releaseOutputBuffer(bufferIndex, false);
return true;
}
if (shouldSkip) {
codec.releaseOutputBuffer(bufferIndex, false);
decoderCounters.skippedOutputBufferCount++;
audioTrack.handleDiscontinuity();
return true;
}
try {
if (audioTrack.handleBuffer(buffer, bufferPresentationTimeUs)) {
codec.releaseOutputBuffer(bufferIndex, false);
decoderCounters.renderedOutputBufferCount++;
return true;
}
} catch (AudioTrack.InitializationException | AudioTrack.WriteException e) {
throw ExoPlaybackException.createForRenderer(e, getIndex());
}
return false;
}
示例11: drainEncoder
private int drainEncoder(long timeoutUs) {
if (mIsEncoderEOS) return DRAIN_STATE_NONE;
int result = mEncoder.dequeueOutputBuffer(mBufferInfo, timeoutUs);
switch (result) {
case MediaCodec.INFO_TRY_AGAIN_LATER:
return DRAIN_STATE_NONE;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
if (mActualOutputFormat != null) {
throw new RuntimeException("Video output format changed twice.");
}
mActualOutputFormat = mEncoder.getOutputFormat();
mMuxer.setOutputFormat(QueuedMuxer.SampleType.VIDEO, mActualOutputFormat);
return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
mEncoderOutputBuffers = mEncoder.getOutputBuffers();
return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
}
if (mActualOutputFormat == null) {
throw new RuntimeException("Could not determine actual output format.");
}
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
mIsEncoderEOS = true;
mBufferInfo.set(0, 0, 0, mBufferInfo.flags);
}
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
// SPS or PPS, which should be passed by MediaFormat.
mEncoder.releaseOutputBuffer(result, false);
return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
}
mMuxer.writeSampleData(QueuedMuxer.SampleType.VIDEO, mEncoderOutputBuffers[result], mBufferInfo);
mEncoder.releaseOutputBuffer(result, false);
return DRAIN_STATE_CONSUMED;
}
示例12: sendToMediaMuxer
private void sendToMediaMuxer() {
if (mAudioEncoder == null) {
return;
}
final ByteBuffer[] outputBuffers = mAudioEncoder.getOutputBuffers();
final int outputBufferIndex = mAudioEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_US);
if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
mAudioTrackIndex = mMuxer.addMediaTrack(mAudioEncoder.getOutputFormat());
if (mAudioTrackIndex == -1) {
return;
}
}
if (outputBufferIndex >= 0) {
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
// You shoud set output format to muxer here when you target Android4.3 or less
// but MediaCodec#getOutputFormat can not call here(because INFO_OUTPUT_FORMAT_CHANGED don't come yet)
// therefor we should expand and prepare output format from buffer data.
// This sample is for API>=18(>=Android 4.3), just ignore this flag here
mBufferInfo.size = 0;
}
final ByteBuffer outputBuffer = outputBuffers[outputBufferIndex];
mMuxer.writeMediaData(mAudioTrackIndex, outputBuffer, mBufferInfo);
mAudioEncoder.releaseOutputBuffer(outputBufferIndex, false);
}
}
示例13: drainEncoder
/**
* Extracts all pending data from the encoder.
* <p>
* If endOfStream is not set, this returns when there is no more data to drain. If it
* is set, we send EOS to the encoder, and then iterate until we see EOS on the output.
* Calling this with endOfStream set should be done once, right before stopping the muxer.
*/
@SuppressWarnings("deprecation")
private void drainEncoder(boolean eos) {
checkState();
//logv("Drain encoder: " + eos);
if (eos) {
//logv("Sending EOS to encoder");
mEncoder.signalEndOfInputStream();
}
int encoderStatus;
while ((encoderStatus = getEncoderStatus(eos)) != MediaCodec.INFO_TRY_AGAIN_LATER) {
switch (encoderStatus) {
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
throw new RuntimeException("Output buffers changed twice");
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
if (mTrackId != -1) {
throw new RuntimeException("Format changed twice");
}
// Now that we have the Magic Goodies, start the muxer
final MediaFormat format = mEncoder.getOutputFormat();
//logv("Encoder output format changed: " + format);
mTrackId = mMuxer.addTrack(format);
mMuxer.start();
break;
default:
if (encoderStatus >= 0) {
final ByteBuffer encodedData = mOutputBuffers[encoderStatus];
if (encodedData == null) {
throw new RuntimeException (
"EncoderOutputBuffer " + encoderStatus + " was null"
);
}
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
// The codec config data was pulled out and fed to the muxer when we got
// the INFO_OUTPUT_FORMAT_CHANGED status. Ignore it.
//logv("ignoring BUFFER_FLAG_CODEC_CONFIG");
mBufferInfo.size = 0;
}
if (mBufferInfo.size != 0) {
if (mTrackId == -1) {
throw new RuntimeException("Muxer hasn't started");
}
// Adjust the ByteBuffer values to match BufferInfo (not needed?)
encodedData.position(mBufferInfo.offset);
encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
mMuxer.writeSampleData(mTrackId, encodedData, mBufferInfo);
//logv("Sent " + mBufferInfo.size + " bytes to muxer");
}
mEncoder.releaseOutputBuffer(encoderStatus, false);
} else {
//noinspection StatementWithEmptyBody
if (encoderStatus != INFO_NO_OUTPUT_AVAILABLE_YET) {
logw("unexpected encoder status: " + encoderStatus);
}
}
break;
}
}
}
示例14: drainEncoder
/**
* Drains all pending output from the decoder, and adds it to the circular buffer.
*/
public void drainEncoder() {
final int TIMEOUT_USEC = 0; // no timeout -- check for buffers, bail if none
ByteBuffer[] encoderOutputBuffers = mEncoder.getOutputBuffers();
while (true) {
int encoderStatus = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
// no output available yet
break;
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
// not expected for an encoder
encoderOutputBuffers = mEncoder.getOutputBuffers();
} else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
// Should happen before receiving buffers, and should only happen once.
// The MediaFormat contains the csd-0 and csd-1 keys, which we'll need
// for MediaMuxer. It's unclear what else MediaMuxer might want, so
// rather than extract the codec-specific data and reconstruct a new
// MediaFormat later, we just grab it here and keep it around.
mEncodedFormat = mEncoder.getOutputFormat();
Log.d(TAG, "encoder output format changed: " + mEncodedFormat);
} else if (encoderStatus < 0) {
Log.w(TAG, "unexpected result from encoder.dequeueOutputBuffer: " +
encoderStatus);
// let's ignore it
} else {
ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
if (encodedData == null) {
throw new RuntimeException("encoderOutputBuffer " + encoderStatus +
" was null");
}
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
// The codec config data was pulled out when we got the
// INFO_OUTPUT_FORMAT_CHANGED status. The MediaMuxer won't accept
// a single big blob -- it wants separate csd-0/csd-1 chunks --
// so simply saving this off won't work.
if (VERBOSE) Log.d(TAG, "ignoring BUFFER_FLAG_CODEC_CONFIG");
mBufferInfo.size = 0;
}
if (mBufferInfo.size != 0) {
// adjust the ByteBuffer values to match BufferInfo (not needed?)
encodedData.position(mBufferInfo.offset);
encodedData.limit(mBufferInfo.offset + mBufferInfo.size);
mEncBuffer.add(encodedData, mBufferInfo.flags,
mBufferInfo.presentationTimeUs);
if (VERBOSE) {
Log.d(TAG, "sent " + mBufferInfo.size + " bytes to muxer, ts=" +
mBufferInfo.presentationTimeUs);
}
}
mEncoder.releaseOutputBuffer(encoderStatus, false);
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
Log.w(TAG, "reached end of stream unexpectedly");
break; // out of while
}
}
}
}
示例15: writeMuxerDataFromEncoding
/**
* 编码后的数据写入Muxer中
*/
private void writeMuxerDataFromEncoding(boolean isEOS) {
if (!isEncoding) {
return;
}
Log.d(TAG, "writeMuxerDataFromEncoding isEOS:" + isEOS);
ByteBuffer[] outputBuffers = mMediaCodec.getOutputBuffers();
while (isEncoding) {
int outputBufferIndex = mMediaCodec.dequeueOutputBuffer(mBufferInfo, 10);
Log.d(TAG, "outputBufferIndex=" + outputBufferIndex + " flags:" + mBufferInfo.flags);
if (outputBufferIndex >= 0) {
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
mBufferInfo.size = 0;
}
ByteBuffer encodedData = outputBuffers[outputBufferIndex];
if (mBufferInfo.size != 0) {
mMediaMuxer.writeSampleData(mTrackIndex, encodedData, mBufferInfo);
}
mMediaCodec.releaseOutputBuffer(outputBufferIndex, false);
if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
Log.e(TAG, "Encoding end of stream");
isEncoding = false;
break;
}
} else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
outputBuffers = mMediaCodec.getOutputBuffers();
} else if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
MediaFormat mediaFormat = mMediaCodec.getOutputFormat();
mTrackIndex = mMediaMuxer.addAudioTrack(mediaFormat);
mMediaMuxer.start();
Log.d(TAG, "audio mediaMuxer start");
} else if (outputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
if (!isEOS) {
break;
}
}
}
}