當前位置: 首頁>>代碼示例>>Java>>正文


Java MediaCodec.INFO_OUTPUT_FORMAT_CHANGED屬性代碼示例

本文整理匯總了Java中android.media.MediaCodec.INFO_OUTPUT_FORMAT_CHANGED屬性的典型用法代碼示例。如果您正苦於以下問題:Java MediaCodec.INFO_OUTPUT_FORMAT_CHANGED屬性的具體用法?Java MediaCodec.INFO_OUTPUT_FORMAT_CHANGED怎麽用?Java MediaCodec.INFO_OUTPUT_FORMAT_CHANGED使用的例子?那麽, 這裏精選的屬性代碼示例或許可以為您提供幫助。您也可以進一步了解該屬性所在android.media.MediaCodec的用法示例。


在下文中一共展示了MediaCodec.INFO_OUTPUT_FORMAT_CHANGED屬性的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。

示例1: drainDecoder

private int drainDecoder(long timeoutUs) {
    if (mIsDecoderEOS) return DRAIN_STATE_NONE;

    int result = mDecoder.dequeueOutputBuffer(mBufferInfo, timeoutUs);
    switch (result) {
        case MediaCodec.INFO_TRY_AGAIN_LATER:
            return DRAIN_STATE_NONE;
        case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
            mAudioChannel.setActualDecodedFormat(mDecoder.getOutputFormat());
        case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
            return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
    }

    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
        mIsDecoderEOS = true;
        mAudioChannel.drainDecoderBufferAndQueue(AudioChannel.BUFFER_INDEX_END_OF_STREAM, 0);
    } else if (mBufferInfo.size > 0) {
        mAudioChannel.drainDecoderBufferAndQueue(result, mBufferInfo.presentationTimeUs);
    }

    return DRAIN_STATE_CONSUMED;
}
 
開發者ID:SavorGit,項目名稱:Hotspot-master-devp,代碼行數:22,代碼來源:AudioTrackTranscoder.java

示例2: recorderEncoderLoop

/**
 * Reads bytes from the given recorder and encodes them with the given encoder.
 * Uses the (deprecated) Synchronous Processing using Buffer Arrays.
 * <p/>
 * Encoders (or codecs that generate compressed data) will create and return the codec specific
 * data before any valid output buffer in output buffers marked with the codec-config flag.
 * Buffers containing codec-specific-data have no meaningful timestamps.
 */
@TargetApi(Build.VERSION_CODES.JELLY_BEAN)
private void recorderEncoderLoop(MediaCodec codec, SpeechRecord speechRecord) {
    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
        codec.start();
        // Getting some buffers (e.g. 4 of each) to communicate with the codec
        ByteBuffer[] codecInputBuffers = codec.getInputBuffers();
        ByteBuffer[] codecOutputBuffers = codec.getOutputBuffers();
        Log.i("input buffers " + codecInputBuffers.length + "; output buffers: " + codecOutputBuffers.length);
        boolean doneSubmittingInput = false;
        int numRetriesDequeueOutputBuffer = 0;
        int index;
        while (true) {
            if (!doneSubmittingInput) {
                index = codec.dequeueInputBuffer(DEQUEUE_TIMEOUT);
                if (index >= 0) {
                    int size = queueInputBuffer(codec, codecInputBuffers, index, speechRecord);
                    if (size == -1) {
                        codec.queueInputBuffer(index, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
                        Log.i("enc: in: EOS");
                        doneSubmittingInput = true;
                    } else {
                        Log.i("enc: in: " + size);
                        mNumBytesSubmitted += size;
                    }
                } else {
                    Log.i("enc: in: timeout, will try again");
                }
            }
            MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
            index = codec.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT);
            Log.i("enc: out: flags/index: " + info.flags + "/" + index);
            if (index == MediaCodec.INFO_TRY_AGAIN_LATER) {
                Log.i("enc: out: INFO_TRY_AGAIN_LATER: " + numRetriesDequeueOutputBuffer);
                if (++numRetriesDequeueOutputBuffer > MAX_NUM_RETRIES_DEQUEUE_OUTPUT_BUFFER) {
                    break;
                }
            } else if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                MediaFormat format = codec.getOutputFormat();
                Log.i("enc: out: INFO_OUTPUT_FORMAT_CHANGED: " + format.toString());
            } else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
                codecOutputBuffers = codec.getOutputBuffers();
                Log.i("enc: out: INFO_OUTPUT_BUFFERS_CHANGED");
            } else {
                dequeueOutputBuffer(codec, codecOutputBuffers, index, info);
                mNumBytesDequeued += info.size;
                if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                    Log.i("enc: out: EOS");
                    break;
                }
            }
        }
        codec.stop();
        codec.release();
    }
}
 
開發者ID:vaibhavs4424,項目名稱:AI-Powered-Intelligent-Banking-Platform,代碼行數:63,代碼來源:EncodedAudioRecorder.java

示例3: recordVirtualDisplay

private void recordVirtualDisplay() {
    while (isStart) {
        int index = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_US);
        if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
            resetOutputFormat();

        } else if (index == MediaCodec.INFO_TRY_AGAIN_LATER) {
            try {
                // wait 10ms
                Thread.sleep(10);
            } catch (InterruptedException e) {
            }
        } else if (index >= 0) {
            if (!mMuxerStarted) {
                throw new IllegalStateException("MediaMuxer dose not call addTrack(format) ");
            }
            encodeToVideoTrack(index);

            mEncoder.releaseOutputBuffer(index, false);
        }
    }
}
 
開發者ID:sunshinecoast,項目名稱:ScreenRecordCaptureMaster,代碼行數:22,代碼來源:YixiaScreenEncoder.java

示例4: recordVirtualDisplay

@RequiresApi(api = Build.VERSION_CODES.LOLLIPOP)
private void recordVirtualDisplay() {
    while (!mQuit.get()) {
        int index = mEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_US);
        Log.i(TAG, "dequeue output buffer index=" + index);
        if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
            resetOutputFormat();

        } else if (index == MediaCodec.INFO_TRY_AGAIN_LATER) {
            Log.d(TAG, "retrieving buffers time out!");
            try {
                // wait 10ms
                Thread.sleep(10);
            } catch (InterruptedException e) {
            }
        } else if (index >= 0) {

            if (!mMuxerStarted) {
                throw new IllegalStateException("MediaMuxer dose not call addTrack(format) ");
            }
            encodeToVideoTrack(index);

            mEncoder.releaseOutputBuffer(index, false);
        }
    }
}
 
開發者ID:SavorGit,項目名稱:Hotspot-master-devp,代碼行數:26,代碼來源:ScreenRecorder.java

示例5: checkDecoderStatus

/**
 * MediaCodec#dequeueOutputBuffer()の戻り値のチェック
 *
 * @param decoderStatus MediaCodec#dequeueOutputBuffer()の戻り値
 * @return true: デコード処理が行われた
 */
private boolean checkDecoderStatus(int decoderStatus) {
    if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
        // dequeueOutputBufferの呼び出しがタイムアウト
        if (mInputDone) {
            Log.d(TAG, "no output from mDecoder available BUT the input is done.");
        }
    } else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
        Log.d(TAG, "mDecoder output buffers changed");
    } else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
        Log.d(TAG, "mDecoder output format changed");
    } else if (decoderStatus < 0) {
        Log.d(TAG, "unexpected result from encoder.dequeueOutputBuffer: "
                + decoderStatus);
    } else {
        return true;
    }

    return false;
}
 
開發者ID:ficklerobot,項目名稱:grid-video-viewer,代碼行數:25,代碼來源:DecodeThread.java

示例6: videoDecodeStep

private boolean videoDecodeStep(){
    int mInputIndex=mVideoDecoder.dequeueInputBuffer(TIME_OUT);
    if(mInputIndex>=0){
        ByteBuffer buffer=getInputBuffer(mVideoDecoder,mInputIndex);
        buffer.clear();
        synchronized (Extractor_LOCK) {
            mExtractor.selectTrack(mVideoDecoderTrack);
            int ret = mExtractor.readSampleData(buffer, 0);
            if (ret != -1) {
                mVideoStopTimeStamp=mExtractor.getSampleTime();
                AvLog.d("mVideoStopTimeStamp:"+mVideoStopTimeStamp);
                mVideoDecoder.queueInputBuffer(mInputIndex, 0, ret, mVideoStopTimeStamp, mExtractor.getSampleFlags());
            }
            isVideoExtractorEnd = !mExtractor.advance();
        }
    }
    while (true){
        int mOutputIndex=mVideoDecoder.dequeueOutputBuffer(mVideoDecoderBufferInfo,TIME_OUT);
        if(mOutputIndex>=0){
            try {
                AvLog.d(" mDecodeSem.acquire ");
                if(!isUserWantToStop){
                    mDecodeSem.acquire();
                }
                AvLog.d(" mDecodeSem.acquire end ");
            } catch (InterruptedException e) {
                e.printStackTrace();
            }
            codecNum++;
            mVideoDecoder.releaseOutputBuffer(mOutputIndex,true);
            mSem.release();

        }else if(mOutputIndex== MediaCodec.INFO_OUTPUT_FORMAT_CHANGED){
            //MediaFormat format=mVideoDecoder.getOutputFormat();
        }else if(mOutputIndex== MediaCodec.INFO_TRY_AGAIN_LATER){
            break;
        }
    }
    return isVideoExtractorEnd||isUserWantToStop;
}
 
開發者ID:aiyaapp,項目名稱:AAVT,代碼行數:40,代碼來源:Mp4Processor.java

示例7: videoEncodeStep

private boolean videoEncodeStep(boolean isEnd){
    if(isEnd){
        mVideoEncoder.signalEndOfInputStream();
    }
    while (true){
        int mOutputIndex=mVideoEncoder.dequeueOutputBuffer(mVideoEncoderBufferInfo,TIME_OUT);
        AvLog.d("videoEncodeStep-------------------mOutputIndex="+mOutputIndex+"/"+mVideoEncoderBufferInfo.presentationTimeUs);
        if(mOutputIndex>=0){
            ByteBuffer buffer=getOutputBuffer(mVideoEncoder,mOutputIndex);
            if(mVideoEncoderBufferInfo.size>0){
                mMuxer.writeSampleData(mVideoEncoderTrack,buffer,mVideoEncoderBufferInfo);
            }
            mVideoEncoder.releaseOutputBuffer(mOutputIndex,false);
        }else if(mOutputIndex== MediaCodec.INFO_OUTPUT_FORMAT_CHANGED){
            MediaFormat format=mVideoEncoder.getOutputFormat();
            AvLog.d("video format -->"+format.toString());
            mVideoEncoderTrack=mMuxer.addTrack(format);
            mMuxer.start();
            synchronized (MUX_LOCK){
                MUX_LOCK.notifyAll();
            }
        }else if(mOutputIndex== MediaCodec.INFO_TRY_AGAIN_LATER){
            break;
        }
    }
    return false;
}
 
開發者ID:aiyaapp,項目名稱:AAVT,代碼行數:27,代碼來源:Mp4Processor.java

示例8: deliverDecodedFrame

private void deliverDecodedFrame() {
  outputThreadChecker.checkIsOnValidThread();
  try {
    MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
    // Block until an output buffer is available (up to 100 milliseconds).  If the timeout is
    // exceeded, deliverDecodedFrame() will be called again on the next iteration of the output
    // thread's loop.  Blocking here prevents the output thread from busy-waiting while the codec
    // is idle.
    int result = codec.dequeueOutputBuffer(info, DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US);
    if (result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
      reformat(codec.getOutputFormat());
      return;
    }

    if (result < 0) {
      Logging.v(TAG, "dequeueOutputBuffer returned " + result);
      return;
    }

    FrameInfo frameInfo = frameInfos.poll();
    Integer decodeTimeMs = null;
    int rotation = 0;
    if (frameInfo != null) {
      decodeTimeMs = (int) (SystemClock.elapsedRealtime() - frameInfo.decodeStartTimeMs);
      rotation = frameInfo.rotation;
    }

    hasDecodedFirstFrame = true;

    if (surfaceTextureHelper != null) {
      deliverTextureFrame(result, info, rotation, decodeTimeMs);
    } else {
      deliverByteFrame(result, info, rotation, decodeTimeMs);
    }

  } catch (IllegalStateException e) {
    Logging.e(TAG, "deliverDecodedFrame failed", e);
  }
}
 
開發者ID:Piasy,項目名稱:AppRTC-Android,代碼行數:39,代碼來源:HardwareVideoDecoder.java

示例9: videoDecodeStep

private boolean videoDecodeStep(){
    int mInputIndex=mVideoDecoder.dequeueInputBuffer(TIME_OUT);
    if(mInputIndex>=0){
        ByteBuffer buffer= CodecUtil.getInputBuffer(mVideoDecoder,mInputIndex);
        buffer.clear();
        synchronized (Extractor_LOCK) {
            mExtractor.selectTrack(mVideoDecodeTrack);
            int ret = mExtractor.readSampleData(buffer, 0);
            if (ret != -1) {
                mVideoStopTimeStamp=mExtractor.getSampleTime();
                mVideoDecoder.queueInputBuffer(mInputIndex, 0, ret, mVideoStopTimeStamp, mExtractor.getSampleFlags());
                isVideoExtractorEnd = false;
            }else{
                //可以用!mExtractor.advance,但是貌似會延遲一幀。readSampleData 返回 -1 也表示沒有更多數據了
                isVideoExtractorEnd = true;
            }
            mExtractor.advance();
        }
    }
    while (true){
        int mOutputIndex=mVideoDecoder.dequeueOutputBuffer(videoDecodeBufferInfo,TIME_OUT);
        if(mOutputIndex>=0){
            try {
                if(!isUserWantToStop){
                    mDecodeSem.acquire();
                }
            } catch (InterruptedException e) {
                e.printStackTrace();
            }
            nowTimeStamp=videoDecodeBufferInfo.presentationTimeUs;
            mVideoDecoder.releaseOutputBuffer(mOutputIndex,true);
            mFrameSem.release();
        }else if(mOutputIndex== MediaCodec.INFO_OUTPUT_FORMAT_CHANGED){

        }else if(mOutputIndex== MediaCodec.INFO_TRY_AGAIN_LATER){
            break;
        }
    }
    return isVideoExtractorEnd||isUserWantToStop;
}
 
開發者ID:aiyaapp,項目名稱:AAVT,代碼行數:40,代碼來源:Mp4Provider.java

示例10: audioEncodeStep

private synchronized boolean audioEncodeStep(boolean isEnd){
    if(isStarted){
        AvLog.d("audioEncodeStep");
        int inputIndex=mAudioEncoder.dequeueInputBuffer(TIME_OUT);
        if(inputIndex>=0){
            ByteBuffer buffer= CodecUtil.getInputBuffer(mAudioEncoder,inputIndex);
            buffer.clear();
            long time= (SystemClock.elapsedRealtimeNanos()-startTime)/1000;
            int length=mRecord.read(buffer,mRecordBufferSize);
            if(length>=0){
                mAudioEncoder.queueInputBuffer(inputIndex,0,length,time,
                        isEnd?MediaCodec.BUFFER_FLAG_END_OF_STREAM:0);
            }
        }
        MediaCodec.BufferInfo info=new MediaCodec.BufferInfo();
        while (true){
            int outputIndex=mAudioEncoder.dequeueOutputBuffer(info,TIME_OUT);
            if(outputIndex>=0){
                if(mStore!=null){
                    mStore.addData(mAudioTrack,new HardMediaData(CodecUtil.getOutputBuffer(mAudioEncoder,outputIndex),info));
                }
                mAudioEncoder.releaseOutputBuffer(outputIndex,false);
                if(info.flags==MediaCodec.BUFFER_FLAG_END_OF_STREAM){
                    AvLog.d("CameraRecorder get audio encode end of stream");
                    stop();
                    return true;
                }
            }else if(outputIndex==MediaCodec.INFO_TRY_AGAIN_LATER){
                break;
            }else if(outputIndex==MediaCodec.INFO_OUTPUT_FORMAT_CHANGED){
                AvLog.d("get audio output format changed ->"+mAudioEncoder.getOutputFormat().toString());
                mAudioTrack=mStore.addTrack(mAudioEncoder.getOutputFormat());
            }
        }
    }
    return false;
}
 
開發者ID:aiyaapp,項目名稱:AAVT,代碼行數:37,代碼來源:SoundRecorder.java

示例11: drainEncoder

private int drainEncoder(long timeoutUs) {
    if (mIsEncoderEOS) return DRAIN_STATE_NONE;

    int result = mEncoder.dequeueOutputBuffer(mBufferInfo, timeoutUs);
    switch (result) {
        case MediaCodec.INFO_TRY_AGAIN_LATER:
            return DRAIN_STATE_NONE;
        case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
            if (mActualOutputFormat != null) {
                throw new RuntimeException("Audio output format changed twice.");
            }
            mActualOutputFormat = mEncoder.getOutputFormat();
            mMuxer.setOutputFormat(SAMPLE_TYPE, mActualOutputFormat);
            return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
        case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
            mEncoderBuffers = new MediaCodecBufferCompatWrapper(mEncoder);
            return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
    }

    if (mActualOutputFormat == null) {
        throw new RuntimeException("Could not determine actual output format.");
    }

    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
        mIsEncoderEOS = true;
        mBufferInfo.set(0, 0, 0, mBufferInfo.flags);
    }
    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
        // SPS or PPS, which should be passed by MediaFormat.
        mEncoder.releaseOutputBuffer(result, false);
        return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
    }
    mMuxer.writeSampleData(SAMPLE_TYPE, mEncoderBuffers.getOutputBuffer(result), mBufferInfo);
    mWrittenPresentationTimeUs = mBufferInfo.presentationTimeUs;
    mEncoder.releaseOutputBuffer(result, false);
    return DRAIN_STATE_CONSUMED;
}
 
開發者ID:SavorGit,項目名稱:Hotspot-master-devp,代碼行數:37,代碼來源:AudioTrackTranscoder.java

示例12: drainEncoder

private int drainEncoder(long timeoutUs) {
    if (mIsEncoderEOS) return DRAIN_STATE_NONE;
    int result = mEncoder.dequeueOutputBuffer(mBufferInfo, timeoutUs);
    switch (result) {
        case MediaCodec.INFO_TRY_AGAIN_LATER:
            return DRAIN_STATE_NONE;
        case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
            if (mActualOutputFormat != null)
                throw new RuntimeException("Video output format changed twice.");
            mActualOutputFormat = mEncoder.getOutputFormat();
            mMuxer.setOutputFormat(QueuedMuxer.SampleType.VIDEO, mActualOutputFormat);
            return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
        case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
            mEncoderOutputBuffers = mEncoder.getOutputBuffers();
            return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
    }
    if (mActualOutputFormat == null) {
        throw new RuntimeException("Could not determine actual output format.");
    }

    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
        mIsEncoderEOS = true;
        mBufferInfo.set(0, 0, 0, mBufferInfo.flags);
    }
    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
        // SPS or PPS, which should be passed by MediaFormat.
        mEncoder.releaseOutputBuffer(result, false);
        return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
    }
    mMuxer.writeSampleData(QueuedMuxer.SampleType.VIDEO, mEncoderOutputBuffers[result], mBufferInfo);
    mWrittenPresentationTimeUs = mBufferInfo.presentationTimeUs;
    mEncoder.releaseOutputBuffer(result, false);
    return DRAIN_STATE_CONSUMED;
}
 
開發者ID:SavorGit,項目名稱:Hotspot-master-devp,代碼行數:34,代碼來源:VideoTrackTranscoder.java

示例13: sendToMediaMuxer

private void sendToMediaMuxer() {
    if (mAudioEncoder == null) {
        return;
    }

    final ByteBuffer[] outputBuffers = mAudioEncoder.getOutputBuffers();
    final int outputBufferIndex = mAudioEncoder.dequeueOutputBuffer(mBufferInfo, TIMEOUT_US);
    if (outputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
        mAudioTrackIndex = mMuxer.addMediaTrack(mAudioEncoder.getOutputFormat());
        if (mAudioTrackIndex == -1) {
            return;
        }
    }
    if (outputBufferIndex >= 0) {
        if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
            // You shoud set output format to muxer here when you target Android4.3 or less
            // but MediaCodec#getOutputFormat can not call here(because INFO_OUTPUT_FORMAT_CHANGED don't come yet)
            // therefor we should expand and prepare output format from buffer data.
            // This sample is for API>=18(>=Android 4.3), just ignore this flag here
            mBufferInfo.size = 0;
        }
        final ByteBuffer outputBuffer = outputBuffers[outputBufferIndex];
        mMuxer.writeMediaData(mAudioTrackIndex, outputBuffer, mBufferInfo);

        mAudioEncoder.releaseOutputBuffer(outputBufferIndex, false);
    }
}
 
開發者ID:sunshinecoast,項目名稱:ScreenRecordCaptureMaster,代碼行數:27,代碼來源:YXAudioEncoder.java

示例14: drainEncoder

private int drainEncoder(long timeoutUs) {
    if (mIsEncoderEOS) return DRAIN_STATE_NONE;

    int result = mEncoder.dequeueOutputBuffer(mBufferInfo, timeoutUs);
    switch (result) {
        case MediaCodec.INFO_TRY_AGAIN_LATER:
            return DRAIN_STATE_NONE;
        case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
            if (mActualOutputFormat != null) {
                throw new RuntimeException("Audio output format changed twice.");
            }
            mActualOutputFormat = mEncoder.getOutputFormat();
            mMuxer.setOutputFormat(QueuedMuxer.SampleType.AUDIO, mActualOutputFormat);
            return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
        case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
            return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
    }

    if (mActualOutputFormat == null) {
        throw new RuntimeException("Could not determine actual output format.");
    }

    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
        mIsEncoderEOS = true;
        mBufferInfo.set(0, 0, 0, mBufferInfo.flags);
    }
    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
        // SPS or PPS, which should be passed by MediaFormat.
        mEncoder.releaseOutputBuffer(result, false);
        return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
    }
    mMuxer.writeSampleData(QueuedMuxer.SampleType.AUDIO, MediaUtil.getOutputBuffer(mEncoder, result), mBufferInfo);
    mEncoder.releaseOutputBuffer(result, false);
    return DRAIN_STATE_CONSUMED;
}
 
開發者ID:uestccokey,項目名稱:EZFilter,代碼行數:35,代碼來源:AudioTrackTranscoder.java

示例15: getDataFromSurface

private void getDataFromSurface() {
  thread = new Thread(new Runnable() {
    @Override
    public void run() {
      while (!Thread.interrupted()) {
        ByteBuffer[] outputBuffers = videoEncoder.getOutputBuffers();
        for (; ; ) {
          int outBufferIndex = videoEncoder.dequeueOutputBuffer(videoInfo, 0);
          if (outBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
            MediaFormat mediaFormat = videoEncoder.getOutputFormat();
            getH264Data.onVideoFormat(mediaFormat);
            getH264Data.onSPSandPPS(mediaFormat.getByteBuffer("csd-0"),
                mediaFormat.getByteBuffer("csd-1"));
            spsPpsSetted = true;
          } else if (outBufferIndex >= 0) {
            //This ByteBuffer is H264
            ByteBuffer bb = outputBuffers[outBufferIndex];
            if ((videoInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
              if (!spsPpsSetted) {
                Pair<ByteBuffer, ByteBuffer> buffers =
                    decodeSpsPpsFromBuffer(bb.duplicate(), videoInfo.size);
                if (buffers != null) {
                  getH264Data.onSPSandPPS(buffers.first, buffers.second);
                  spsPpsSetted = true;
                }
              }
            }
            videoInfo.presentationTimeUs = System.nanoTime() / 1000 - mPresentTimeUs;
            getH264Data.getH264Data(bb, videoInfo);
            videoEncoder.releaseOutputBuffer(outBufferIndex, false);
          } else {
            break;
          }
        }
      }
    }
  });
  thread.start();
}
 
開發者ID:pedroSG94,項目名稱:rtmp-rtsp-stream-client-java,代碼行數:39,代碼來源:VideoEncoder.java


注:本文中的android.media.MediaCodec.INFO_OUTPUT_FORMAT_CHANGED屬性示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。