当前位置: 首页>>代码示例>>Java>>正文


Java MediaCodec.INFO_OUTPUT_FORMAT_CHANGED属性代码示例

本文整理汇总了Java中android.media.MediaCodec.INFO_OUTPUT_FORMAT_CHANGED属性的典型用法代码示例。如果您正苦于以下问题:Java MediaCodec.INFO_OUTPUT_FORMAT_CHANGED属性的具体用法?Java MediaCodec.INFO_OUTPUT_FORMAT_CHANGED怎么用?Java MediaCodec.INFO_OUTPUT_FORMAT_CHANGED使用的例子?那么恭喜您, 这里精选的属性代码示例或许可以为您提供帮助。您也可以进一步了解该属性所在android.media.MediaCodec的用法示例。


在下文中一共展示了MediaCodec.INFO_OUTPUT_FORMAT_CHANGED属性的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: drainDecoder

private int drainDecoder(long timeoutUs) {
    if (mIsDecoderEOS) return DRAIN_STATE_NONE;

    int result = mDecoder.dequeueOutputBuffer(mBufferInfo, timeoutUs);
    switch (result) {
        case MediaCodec.INFO_TRY_AGAIN_LATER:
            return DRAIN_STATE_NONE;
        case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
            mAudioChannel.setActualDecodedFormat(mDecoder.getOutputFormat());
        case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
            return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
    }

    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
        mIsDecoderEOS = true;
        mAudioChannel.drainDecoderBufferAndQueue(AudioChannel.BUFFER_INDEX_END_OF_STREAM, 0);
    } else if (mBufferInfo.size > 0) {
        mAudioChannel.drainDecoderBufferAndQueue(result, mBufferInfo.presentationTimeUs);
    }

    return DRAIN_STATE_CONSUMED;
}
 
开发者ID:SavorGit,项目名称:Hotspot-master-devp,代码行数:22,代码来源:AudioTrackTranscoder.java

示例2: recorderEncoderLoop

/**
 * Reads bytes from the given recorder and encodes them with the given encoder.
 * Uses the (deprecated) Synchronous Processing using Buffer Arrays.
 * <p/>
 * Encoders (or codecs that generate compressed data) will create and return the codec specific
 * data before any valid output buffer in output buffers marked with the codec-config flag.
 * Buffers containing codec-specific-data have no meaningful timestamps.
 */
@TargetApi(Build.VERSION_CODES.JELLY_BEAN)
private void recorderEncoderLoop(MediaCodec codec, SpeechRecord speechRecord) {
    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
        codec.start();
        // Getting some buffers (e.g. 4 of each) to communicate with the codec
        ByteBuffer[] codecInputBuffers = codec.getInputBuffers();
        ByteBuffer[] codecOutputBuffers = codec.getOutputBuffers();
        Log.i("input buffers " + codecInputBuffers.length + "; output buffers: " + codecOutputBuffers.length);
        boolean doneSubmittingInput = false;
        int numRetriesDequeueOutputBuffer = 0;
        int index;
        while (true) {
            if (!doneSubmittingInput) {
                index = codec.dequeueInputBuffer(DEQUEUE_TIMEOUT);
                if (index >= 0) {
                    int size = queueInputBuffer(codec, codecInputBuffers, index, speechRecord);
                    if (size == -1) {
                        codec.queueInputBuffer(index, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
                        Log.i("enc: in: EOS");
                        doneSubmittingInput = true;
                    } else {
                        Log.i("enc: in: " + size);
                        mNumBytesSubmitted += size;
                    }
                } else {
                    Log.i("enc: in: timeout, will try again");
                }
            }
            MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
            index = codec.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT);
            Log.i("enc: out: flags/index: " + info.flags + "/" + index);
            if (index == MediaCodec.INFO_TRY_AGAIN_LATER) {
                Log.i("enc: out: INFO_TRY_AGAIN_LATER: " + numRetriesDequeueOutputBuffer);
                if (++numRetriesDequeueOutputBuffer > MAX_NUM_RETRIES_DEQUEUE_OUTPUT_BUFFER) {
                    break;
                }
            } else if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                MediaFormat format = codec.getOutputFormat();
                Log.i("enc: out: INFO_OUTPUT_FORMAT_CHANGED: " + format.toString());
            } else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
                codecOutputBuffers = codec.getOutputBuffers();
                Log.i("enc: out: INFO_OUTPUT_BUFFERS_CHANGED");
            } else {
                dequeueOutputBuffer(codec, codecOutputBuffers, index, info);
                mNumBytesDequeued += info.size;
                if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                    Log.i("enc: out: EOS");
                    break;
                }
            }
        }
        codec.stop();
        codec.release();
    }
}
 
开发者ID:vaibhavs4424,项目名称:AI-Powered-Intelligent-Banking-Platform,代码行数:63,代码来源:EncodedAudioRecorder.java

示例3: recordVirtualDisplay

private void recordVirtualDisplay() {
    while (isStart) {
        int index = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_US);
        if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
            resetOutputFormat();

        } else if (index == MediaCodec.INFO_TRY_AGAIN_LATER) {
            try {
                // wait 10ms
                Thread.sleep(10);
            } catch (InterruptedException e) {
            }
        } else if (index >= 0) {
            if (!mMuxerStarted) {
                throw new IllegalStateException("MediaMuxer dose not call addTrack(format) ");
            }
            encodeToVideoTrack(index);

            mEncoder.releaseOutputBuffer(index, false);
        }
    }
}
 
开发者ID:sunshinecoast,项目名称:ScreenRecordCaptureMaster,代码行数:22,代码来源:YixiaScreenEncoder.java

示例4: recordVirtualDisplay

@RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
private void recordVirtualDisplay() {
    while (!mQuit.get()) {
        int index = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_US);
        Log.i(TAG, "dequeue output buffer index=" + index);
        if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
            resetOutputFormat();

        } else if (index == MediaCodec.INFO_TRY_AGAIN_LATER) {
            Log.d(TAG, "retrieving buffers time out!");
            try {
                // wait 10ms
                Thread.sleep(10);
            } catch (InterruptedException e) {
            }
        } else if (index >= 0) {

            if (!mMuxerStarted) {
                throw new IllegalStateException("MediaMuxer dose not call addTrack(format) ");
            }
            encodeToVideoTrack(index);

            mEncoder.releaseOutputBuffer(index, false);
        }
    }
}
 
开发者ID:SavorGit,项目名称:Hotspot-master-devp,代码行数:26,代码来源:ScreenRecorder.java

示例5: checkDecoderStatus

/**
 * MediaCodec#dequeueOutputBuffer()の戻り値のチェック
 *
 * @param decoderStatus MediaCodec#dequeueOutputBuffer()の戻り値
 * @return true: デコード処理が行われた
 */
private boolean checkDecoderStatus(int decoderStatus) {
    if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
        // dequeueOutputBufferの呼び出しがタイムアウト
        if (mInputDone) {
            Log.d(TAG, "no output from mDecoder available BUT the input is done.");
        }
    } else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
        Log.d(TAG, "mDecoder output buffers changed");
    } else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
        Log.d(TAG, "mDecoder output format changed");
    } else if (decoderStatus < 0) {
        Log.d(TAG, "unexpected result from encoder.dequeueOutputBuffer: "
                + decoderStatus);
    } else {
        return true;
    }

    return false;
}
 
开发者ID:ficklerobot,项目名称:grid-video-viewer,代码行数:25,代码来源:DecodeThread.java

示例6: videoDecodeStep

private boolean videoDecodeStep(){
    int mInputIndex=mVideoDecoder.dequeueInputBuffer(TIME_OUT);
    if(mInputIndex>=0){
        ByteBuffer buffer=getInputBuffer(mVideoDecoder,mInputIndex);
        buffer.clear();
        synchronized (Extractor_LOCK) {
            mExtractor.selectTrack(mVideoDecoderTrack);
            int ret = mExtractor.readSampleData(buffer, 0);
            if (ret != -1) {
                mVideoStopTimeStamp=mExtractor.getSampleTime();
                AvLog.d("mVideoStopTimeStamp:"+mVideoStopTimeStamp);
                mVideoDecoder.queueInputBuffer(mInputIndex, 0, ret, mVideoStopTimeStamp, mExtractor.getSampleFlags());
            }
            isVideoExtractorEnd = !mExtractor.advance();
        }
    }
    while (true){
        int mOutputIndex=mVideoDecoder.dequeueOutputBuffer(mVideoDecoderBufferInfo,TIME_OUT);
        if(mOutputIndex>=0){
            try {
                AvLog.d(" mDecodeSem.acquire ");
                if(!isUserWantToStop){
                    mDecodeSem.acquire();
                }
                AvLog.d(" mDecodeSem.acquire end ");
            } catch (InterruptedException e) {
                e.printStackTrace();
            }
            codecNum++;
            mVideoDecoder.releaseOutputBuffer(mOutputIndex,true);
            mSem.release();

        }else if(mOutputIndex== MediaCodec.INFO_OUTPUT_FORMAT_CHANGED){
            //MediaFormat format=mVideoDecoder.getOutputFormat();
        }else if(mOutputIndex== MediaCodec.INFO_TRY_AGAIN_LATER){
            break;
        }
    }
    return isVideoExtractorEnd||isUserWantToStop;
}
 
开发者ID:aiyaapp,项目名称:AAVT,代码行数:40,代码来源:Mp4Processor.java

示例7: videoEncodeStep

private boolean videoEncodeStep(boolean isEnd){
    if(isEnd){
        mVideoEncoder.signalEndOfInputStream();
    }
    while (true){
        int mOutputIndex=mVideoEncoder.dequeueOutputBuffer(mVideoEncoderBufferInfo,TIME_OUT);
        AvLog.d("videoEncodeStep-------------------mOutputIndex="+mOutputIndex+"/"+mVideoEncoderBufferInfo.presentationTimeUs);
        if(mOutputIndex>=0){
            ByteBuffer buffer=getOutputBuffer(mVideoEncoder,mOutputIndex);
            if(mVideoEncoderBufferInfo.size>0){
                mMuxer.writeSampleData(mVideoEncoderTrack,buffer,mVideoEncoderBufferInfo);
            }
            mVideoEncoder.releaseOutputBuffer(mOutputIndex,false);
        }else if(mOutputIndex== MediaCodec.INFO_OUTPUT_FORMAT_CHANGED){
            MediaFormat format=mVideoEncoder.getOutputFormat();
            AvLog.d("video format -->"+format.toString());
            mVideoEncoderTrack=mMuxer.addTrack(format);
            mMuxer.start();
            synchronized (MUX_LOCK){
                MUX_LOCK.notifyAll();
            }
        }else if(mOutputIndex== MediaCodec.INFO_TRY_AGAIN_LATER){
            break;
        }
    }
    return false;
}
 
开发者ID:aiyaapp,项目名称:AAVT,代码行数:27,代码来源:Mp4Processor.java

示例8: deliverDecodedFrame

private void deliverDecodedFrame() {
  outputThreadChecker.checkIsOnValidThread();
  try {
    MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
    // Block until an output buffer is available (up to 100 milliseconds).  If the timeout is
    // exceeded, deliverDecodedFrame() will be called again on the next iteration of the output
    // thread's loop.  Blocking here prevents the output thread from busy-waiting while the codec
    // is idle.
    int result = codec.dequeueOutputBuffer(info, DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US);
    if (result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
      reformat(codec.getOutputFormat());
      return;
    }

    if (result < 0) {
      Logging.v(TAG, "dequeueOutputBuffer returned " + result);
      return;
    }

    FrameInfo frameInfo = frameInfos.poll();
    Integer decodeTimeMs = null;
    int rotation = 0;
    if (frameInfo != null) {
      decodeTimeMs = (int) (SystemClock.elapsedRealtime() - frameInfo.decodeStartTimeMs);
      rotation = frameInfo.rotation;
    }

    hasDecodedFirstFrame = true;

    if (surfaceTextureHelper != null) {
      deliverTextureFrame(result, info, rotation, decodeTimeMs);
    } else {
      deliverByteFrame(result, info, rotation, decodeTimeMs);
    }

  } catch (IllegalStateException e) {
    Logging.e(TAG, "deliverDecodedFrame failed", e);
  }
}
 
开发者ID:Piasy,项目名称:AppRTC-Android,代码行数:39,代码来源:HardwareVideoDecoder.java

示例9: videoDecodeStep

private boolean videoDecodeStep(){
    int mInputIndex=mVideoDecoder.dequeueInputBuffer(TIME_OUT);
    if(mInputIndex>=0){
        ByteBuffer buffer= CodecUtil.getInputBuffer(mVideoDecoder,mInputIndex);
        buffer.clear();
        synchronized (Extractor_LOCK) {
            mExtractor.selectTrack(mVideoDecodeTrack);
            int ret = mExtractor.readSampleData(buffer, 0);
            if (ret != -1) {
                mVideoStopTimeStamp=mExtractor.getSampleTime();
                mVideoDecoder.queueInputBuffer(mInputIndex, 0, ret, mVideoStopTimeStamp, mExtractor.getSampleFlags());
                isVideoExtractorEnd = false;
            }else{
                //可以用!mExtractor.advance,但是貌似会延迟一帧。readSampleData 返回 -1 也表示没有更多数据了
                isVideoExtractorEnd = true;
            }
            mExtractor.advance();
        }
    }
    while (true){
        int mOutputIndex=mVideoDecoder.dequeueOutputBuffer(videoDecodeBufferInfo,TIME_OUT);
        if(mOutputIndex>=0){
            try {
                if(!isUserWantToStop){
                    mDecodeSem.acquire();
                }
            } catch (InterruptedException e) {
                e.printStackTrace();
            }
            nowTimeStamp=videoDecodeBufferInfo.presentationTimeUs;
            mVideoDecoder.releaseOutputBuffer(mOutputIndex,true);
            mFrameSem.release();
        }else if(mOutputIndex== MediaCodec.INFO_OUTPUT_FORMAT_CHANGED){

        }else if(mOutputIndex== MediaCodec.INFO_TRY_AGAIN_LATER){
            break;
        }
    }
    return isVideoExtractorEnd||isUserWantToStop;
}
 
开发者ID:aiyaapp,项目名称:AAVT,代码行数:40,代码来源:Mp4Provider.java

示例10: audioEncodeStep

private synchronized boolean audioEncodeStep(boolean isEnd){
    if(isStarted){
        AvLog.d("audioEncodeStep");
        int inputIndex=mAudioEncoder.dequeueInputBuffer(TIME_OUT);
        if(inputIndex>=0){
            ByteBuffer buffer= CodecUtil.getInputBuffer(mAudioEncoder,inputIndex);
            buffer.clear();
            long time= (SystemClock.elapsedRealtimeNanos()-startTime)/1000;
            int length=mRecord.read(buffer,mRecordBufferSize);
            if(length>=0){
                mAudioEncoder.queueInputBuffer(inputIndex,0,length,time,
                        isEnd?MediaCodec.BUFFER_FLAG_END_OF_STREAM:0);
            }
        }
        MediaCodec.BufferInfo info=new MediaCodec.BufferInfo();
        while (true){
            int outputIndex=mAudioEncoder.dequeueOutputBuffer(info,TIME_OUT);
            if(outputIndex>=0){
                if(mStore!=null){
                    mStore.addData(mAudioTrack,new HardMediaData(CodecUtil.getOutputBuffer(mAudioEncoder,outputIndex),info));
                }
                mAudioEncoder.releaseOutputBuffer(outputIndex,false);
                if(info.flags==MediaCodec.BUFFER_FLAG_END_OF_STREAM){
                    AvLog.d("CameraRecorder get audio encode end of stream");
                    stop();
                    return true;
                }
            }else if(outputIndex==MediaCodec.INFO_TRY_AGAIN_LATER){
                break;
            }else if(outputIndex==MediaCodec.INFO_OUTPUT_FORMAT_CHANGED){
                AvLog.d("get audio output format changed ->"+mAudioEncoder.getOutputFormat().toString());
                mAudioTrack=mStore.addTrack(mAudioEncoder.getOutputFormat());
            }
        }
    }
    return false;
}
 
开发者ID:aiyaapp,项目名称:AAVT,代码行数:37,代码来源:SoundRecorder.java

示例11: drainEncoder

private int drainEncoder(long timeoutUs) {
    if (mIsEncoderEOS) return DRAIN_STATE_NONE;

    int result = mEncoder.dequeueOutputBuffer(mBufferInfo, timeoutUs);
    switch (result) {
        case MediaCodec.INFO_TRY_AGAIN_LATER:
            return DRAIN_STATE_NONE;
        case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
            if (mActualOutputFormat != null) {
                throw new RuntimeException("Audio output format changed twice.");
            }
            mActualOutputFormat = mEncoder.getOutputFormat();
            mMuxer.setOutputFormat(SAMPLE_TYPE, mActualOutputFormat);
            return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
        case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
            mEncoderBuffers = new MediaCodecBufferCompatWrapper(mEncoder);
            return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
    }

    if (mActualOutputFormat == null) {
        throw new RuntimeException("Could not determine actual output format.");
    }

    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
        mIsEncoderEOS = true;
        mBufferInfo.set(0, 0, 0, mBufferInfo.flags);
    }
    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
        // SPS or PPS, which should be passed by MediaFormat.
        mEncoder.releaseOutputBuffer(result, false);
        return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
    }
    mMuxer.writeSampleData(SAMPLE_TYPE, mEncoderBuffers.getOutputBuffer(result), mBufferInfo);
    mWrittenPresentationTimeUs = mBufferInfo.presentationTimeUs;
    mEncoder.releaseOutputBuffer(result, false);
    return DRAIN_STATE_CONSUMED;
}
 
开发者ID:SavorGit,项目名称:Hotspot-master-devp,代码行数:37,代码来源:AudioTrackTranscoder.java

示例12: drainEncoder

private int drainEncoder(long timeoutUs) {
    if (mIsEncoderEOS) return DRAIN_STATE_NONE;
    int result = mEncoder.dequeueOutputBuffer(mBufferInfo, timeoutUs);
    switch (result) {
        case MediaCodec.INFO_TRY_AGAIN_LATER:
            return DRAIN_STATE_NONE;
        case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
            if (mActualOutputFormat != null)
                throw new RuntimeException("Video output format changed twice.");
            mActualOutputFormat = mEncoder.getOutputFormat();
            mMuxer.setOutputFormat(QueuedMuxer.SampleType.VIDEO, mActualOutputFormat);
            return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
        case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
            mEncoderOutputBuffers = mEncoder.getOutputBuffers();
            return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
    }
    if (mActualOutputFormat == null) {
        throw new RuntimeException("Could not determine actual output format.");
    }

    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
        mIsEncoderEOS = true;
        mBufferInfo.set(0, 0, 0, mBufferInfo.flags);
    }
    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
        // SPS or PPS, which should be passed by MediaFormat.
        mEncoder.releaseOutputBuffer(result, false);
        return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
    }
    mMuxer.writeSampleData(QueuedMuxer.SampleType.VIDEO, mEncoderOutputBuffers[result], mBufferInfo);
    mWrittenPresentationTimeUs = mBufferInfo.presentationTimeUs;
    mEncoder.releaseOutputBuffer(result, false);
    return DRAIN_STATE_CONSUMED;
}
 
开发者ID:SavorGit,项目名称:Hotspot-master-devp,代码行数:34,代码来源:VideoTrackTranscoder.java

示例13: sendToMediaMuxer

private void sendToMediaMuxer() {
    if (mAudioEncoder == null) {
        return;
    }

    final ByteBuffer[] outputBuffers = mAudioEncoder.getOutputBuffers();
    final int outputBufferIndex = mAudioEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_US);
    if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
        mAudioTrackIndex = mMuxer.addMediaTrack(mAudioEncoder.getOutputFormat());
        if (mAudioTrackIndex == -1) {
            return;
        }
    }
    if (outputBufferIndex >= 0) {
        if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
            // You shoud set output format to muxer here when you target Android4.3 or less
            // but MediaCodec#getOutputFormat can not call here(because INFO_OUTPUT_FORMAT_CHANGED don't come yet)
            // therefor we should expand and prepare output format from buffer data.
            // This sample is for API>=18(>=Android 4.3), just ignore this flag here
            mBufferInfo.size = 0;
        }
        final ByteBuffer outputBuffer = outputBuffers[outputBufferIndex];
        mMuxer.writeMediaData(mAudioTrackIndex, outputBuffer, mBufferInfo);

        mAudioEncoder.releaseOutputBuffer(outputBufferIndex, false);
    }
}
 
开发者ID:sunshinecoast,项目名称:ScreenRecordCaptureMaster,代码行数:27,代码来源:YXAudioEncoder.java

示例14: drainEncoder

private int drainEncoder(long timeoutUs) {
    if (mIsEncoderEOS) return DRAIN_STATE_NONE;

    int result = mEncoder.dequeueOutputBuffer(mBufferInfo, timeoutUs);
    switch (result) {
        case MediaCodec.INFO_TRY_AGAIN_LATER:
            return DRAIN_STATE_NONE;
        case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
            if (mActualOutputFormat != null) {
                throw new RuntimeException("Audio output format changed twice.");
            }
            mActualOutputFormat = mEncoder.getOutputFormat();
            mMuxer.setOutputFormat(QueuedMuxer.SampleType.AUDIO, mActualOutputFormat);
            return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
        case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
            return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
    }

    if (mActualOutputFormat == null) {
        throw new RuntimeException("Could not determine actual output format.");
    }

    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
        mIsEncoderEOS = true;
        mBufferInfo.set(0, 0, 0, mBufferInfo.flags);
    }
    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
        // SPS or PPS, which should be passed by MediaFormat.
        mEncoder.releaseOutputBuffer(result, false);
        return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
    }
    mMuxer.writeSampleData(QueuedMuxer.SampleType.AUDIO, MediaUtil.getOutputBuffer(mEncoder, result), mBufferInfo);
    mEncoder.releaseOutputBuffer(result, false);
    return DRAIN_STATE_CONSUMED;
}
 
开发者ID:uestccokey,项目名称:EZFilter,代码行数:35,代码来源:AudioTrackTranscoder.java

示例15: getDataFromSurface

private void getDataFromSurface() {
  thread = new Thread(new Runnable() {
    @Override
    public void run() {
      while (!Thread.interrupted()) {
        ByteBuffer[] outputBuffers = videoEncoder.getOutputBuffers();
        for (; ; ) {
          int outBufferIndex = videoEncoder.dequeueOutputBuffer(videoInfo, 0);
          if (outBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
            MediaFormat mediaFormat = videoEncoder.getOutputFormat();
            getH264Data.onVideoFormat(mediaFormat);
            getH264Data.onSPSandPPS(mediaFormat.getByteBuffer("csd-0"),
                mediaFormat.getByteBuffer("csd-1"));
            spsPpsSetted = true;
          } else if (outBufferIndex >= 0) {
            //This ByteBuffer is H264
            ByteBuffer bb = outputBuffers[outBufferIndex];
            if ((videoInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
              if (!spsPpsSetted) {
                Pair<ByteBuffer, ByteBuffer> buffers =
                    decodeSpsPpsFromBuffer(bb.duplicate(), videoInfo.size);
                if (buffers != null) {
                  getH264Data.onSPSandPPS(buffers.first, buffers.second);
                  spsPpsSetted = true;
                }
              }
            }
            videoInfo.presentationTimeUs = System.nanoTime() / 1000 - mPresentTimeUs;
            getH264Data.getH264Data(bb, videoInfo);
            videoEncoder.releaseOutputBuffer(outBufferIndex, false);
          } else {
            break;
          }
        }
      }
    }
  });
  thread.start();
}
 
开发者ID:pedroSG94,项目名称:rtmp-rtsp-stream-client-java,代码行数:39,代码来源:VideoEncoder.java


注:本文中的android.media.MediaCodec.INFO_OUTPUT_FORMAT_CHANGED属性示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。