當前位置: 首頁>>代碼示例>>Java>>正文


Java MediaCodec.BUFFER_FLAG_END_OF_STREAM屬性代碼示例

本文整理匯總了Java中android.media.MediaCodec.BUFFER_FLAG_END_OF_STREAM屬性的典型用法代碼示例。如果您正苦於以下問題:Java MediaCodec.BUFFER_FLAG_END_OF_STREAM屬性的具體用法?Java MediaCodec.BUFFER_FLAG_END_OF_STREAM怎麽用?Java MediaCodec.BUFFER_FLAG_END_OF_STREAM使用的例子?那麽, 這裏精選的屬性代碼示例或許可以為您提供幫助。您也可以進一步了解該屬性所在android.media.MediaCodec的用法示例。


在下文中一共展示了MediaCodec.BUFFER_FLAG_END_OF_STREAM屬性的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。

示例1: videoEncodeStep

private boolean videoEncodeStep(boolean isEnd){
    if(isEnd){
        mVideoEncoder.signalEndOfInputStream();
    }
    while (true){
        int outputIndex=mVideoEncoder.dequeueOutputBuffer(mVideoEncodeBufferInfo,TIME_OUT);
        if(outputIndex>=0){
            if(isMuxStarted&&mVideoEncodeBufferInfo.size>0&&mVideoEncodeBufferInfo.presentationTimeUs>0){
                mMuxer.writeSampleData(mVideoTrack,getOutputBuffer(mVideoEncoder,outputIndex),mVideoEncodeBufferInfo);
            }
            mVideoEncoder.releaseOutputBuffer(outputIndex,false);
            if(mVideoEncodeBufferInfo.flags==MediaCodec.BUFFER_FLAG_END_OF_STREAM){
                AvLog.d("CameraRecorder get video encode end of stream");
                return true;
            }
        }else if(outputIndex==MediaCodec.INFO_TRY_AGAIN_LATER){
            break;
        }else if(outputIndex==MediaCodec.INFO_OUTPUT_FORMAT_CHANGED){
            AvLog.d("get video output format changed ->"+mVideoEncoder.getOutputFormat().toString());
            mVideoTrack=mMuxer.addTrack(mVideoEncoder.getOutputFormat());
            mMuxer.start();
            isMuxStarted=true;
        }
    }
    return false;
}
 
開發者ID:aiyaapp,項目名稱:AAVT,代碼行數:26,代碼來源:CameraRecorder.java

示例2: recorderEncoderLoop

/**
 * Reads bytes from the given recorder and encodes them with the given encoder.
 * Uses the (deprecated) Synchronous Processing using Buffer Arrays.
 * <p/>
 * Encoders (or codecs that generate compressed data) will create and return the codec specific
 * data before any valid output buffer in output buffers marked with the codec-config flag.
 * Buffers containing codec-specific-data have no meaningful timestamps.
 */
@TargetApi(Build.VERSION_CODES.JELLY_BEAN)
private void recorderEncoderLoop(MediaCodec codec, SpeechRecord speechRecord) {
    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
        codec.start();
        // Getting some buffers (e.g. 4 of each) to communicate with the codec
        ByteBuffer[] codecInputBuffers = codec.getInputBuffers();
        ByteBuffer[] codecOutputBuffers = codec.getOutputBuffers();
        Log.i("input buffers " + codecInputBuffers.length + "; output buffers: " + codecOutputBuffers.length);
        boolean doneSubmittingInput = false;
        int numRetriesDequeueOutputBuffer = 0;
        int index;
        while (true) {
            if (!doneSubmittingInput) {
                index = codec.dequeueInputBuffer(DEQUEUE_TIMEOUT);
                if (index >= 0) {
                    int size = queueInputBuffer(codec, codecInputBuffers, index, speechRecord);
                    if (size == -1) {
                        codec.queueInputBuffer(index, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
                        Log.i("enc: in: EOS");
                        doneSubmittingInput = true;
                    } else {
                        Log.i("enc: in: " + size);
                        mNumBytesSubmitted += size;
                    }
                } else {
                    Log.i("enc: in: timeout, will try again");
                }
            }
            MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
            index = codec.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT);
            Log.i("enc: out: flags/index: " + info.flags + "/" + index);
            if (index == MediaCodec.INFO_TRY_AGAIN_LATER) {
                Log.i("enc: out: INFO_TRY_AGAIN_LATER: " + numRetriesDequeueOutputBuffer);
                if (++numRetriesDequeueOutputBuffer > MAX_NUM_RETRIES_DEQUEUE_OUTPUT_BUFFER) {
                    break;
                }
            } else if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                MediaFormat format = codec.getOutputFormat();
                Log.i("enc: out: INFO_OUTPUT_FORMAT_CHANGED: " + format.toString());
            } else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
                codecOutputBuffers = codec.getOutputBuffers();
                Log.i("enc: out: INFO_OUTPUT_BUFFERS_CHANGED");
            } else {
                dequeueOutputBuffer(codec, codecOutputBuffers, index, info);
                mNumBytesDequeued += info.size;
                if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                    Log.i("enc: out: EOS");
                    break;
                }
            }
        }
        codec.stop();
        codec.release();
    }
}
 
開發者ID:vaibhavs4424,項目名稱:AI-Powered-Intelligent-Banking-Platform,代碼行數:63,代碼來源:EncodedAudioRecorder.java

示例3: drainDecoder

private int drainDecoder(long timeoutUs) {
    if (mIsDecoderEOS) return DRAIN_STATE_NONE;

    int result = mDecoder.dequeueOutputBuffer(mBufferInfo, timeoutUs);
    switch (result) {
        case MediaCodec.INFO_TRY_AGAIN_LATER:
            return DRAIN_STATE_NONE;
        case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
            mAudioChannel.setActualDecodedFormat(mDecoder.getOutputFormat());
        case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
            return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
    }

    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
        mIsDecoderEOS = true;
        mAudioChannel.drainDecoderBufferAndQueue(AudioChannel.BUFFER_INDEX_END_OF_STREAM, 0);
    } else if (mBufferInfo.size > 0) {
        mAudioChannel.drainDecoderBufferAndQueue(result, mBufferInfo.presentationTimeUs);
    }

    return DRAIN_STATE_CONSUMED;
}
 
開發者ID:uestccokey,項目名稱:EZFilter,代碼行數:22,代碼來源:AudioTrackTranscoder.java

示例4: audioEncodeStep

private boolean audioEncodeStep(boolean isEnd){
    if(isRecordAudioStarted){
        int inputIndex=mAudioEncoder.dequeueInputBuffer(TIME_OUT);
        if(inputIndex>=0){
            ByteBuffer buffer=getInputBuffer(mAudioEncoder,inputIndex);
            buffer.clear();
            long time=(System.currentTimeMillis()-BASE_TIME)*1000;
            int length=mAudioRecord.read(buffer,mRecordBufferSize);
            if(length>=0){
                mAudioEncoder.queueInputBuffer(inputIndex,0,length,time,
                        isEnd?MediaCodec.BUFFER_FLAG_END_OF_STREAM:0);
            }
        }
        while (true){
            int outputIndex=mAudioEncoder.dequeueOutputBuffer(mAudioEncodeBufferInfo,TIME_OUT);
            if(outputIndex>=0){
                //todo 第一幀音頻時間戳為0的問題
                if(isMuxStarted&&mAudioEncodeBufferInfo.size>0&&mAudioEncodeBufferInfo.presentationTimeUs>0){
                    mMuxer.writeSampleData(mAudioTrack,getOutputBuffer(mAudioEncoder,outputIndex),mAudioEncodeBufferInfo);
                }
                mAudioEncoder.releaseOutputBuffer(outputIndex,false);
                if(mAudioEncodeBufferInfo.flags==MediaCodec.BUFFER_FLAG_END_OF_STREAM){
                    AvLog.d("CameraRecorder get audio encode end of stream");
                    isTryStopAudio=false;
                    isRecordAudioStarted=false;
                    return true;
                }
            }else if(outputIndex==MediaCodec.INFO_TRY_AGAIN_LATER){
                break;
            }else if(outputIndex==MediaCodec.INFO_OUTPUT_FORMAT_CHANGED){
                AvLog.d("get audio output format changed ->"+mAudioEncoder.getOutputFormat().toString());
                synchronized (VIDEO_LOCK){
                    mAudioTrack=mMuxer.addTrack(mAudioEncoder.getOutputFormat());
                    isRecordVideoStarted=true;
                }
            }
        }
    }
    return false;
}
 
開發者ID:aiyaapp,項目名稱:AAVT,代碼行數:40,代碼來源:CameraRecorder.java

示例5: audioDecodeStep

private boolean audioDecodeStep(){
    ByteBuffer buffer=ByteBuffer.allocate(1024*64);
    boolean isTimeEnd=false;
    if(isOpenAudio){
        buffer.clear();
        mExtractor.selectTrack(mAudioDecodeTrack);
        MediaCodec.BufferInfo info=new MediaCodec.BufferInfo();
        while (true){
            int length=mExtractor.readSampleData(buffer,0);
            if(length!=-1){
                int flags=mExtractor.getSampleFlags();
                boolean isAudioEnd=mExtractor.getSampleTime()>mVideoStopTimeStamp;
                info.size=length;
                info.flags=isAudioEnd?MediaCodec.BUFFER_FLAG_END_OF_STREAM:flags;
                info.presentationTimeUs=mExtractor.getSampleTime();
                info.offset=0;
                AvLog.d(tag,"audio sampleTime= "+info.presentationTimeUs+"/"+mVideoStopTimeStamp);
                isTimeEnd=mExtractor.getSampleTime()>mVideoStopTimeStamp;
                AvLog.d(tag,"is End= "+isAudioEnd );
                mStore.addData(mAudioEncodeTrack,new HardMediaData(buffer,info));
                if(isAudioEnd){
                    break;
                }
            }else{
                AvLog.d(tag,"is End= "+true );
                info.size=0;
                info.flags=MediaCodec.BUFFER_FLAG_END_OF_STREAM;
                mStore.addData(mAudioEncodeTrack,new HardMediaData(buffer,info));
                isTimeEnd=true;
                break;
            }
            mExtractor.advance();
        }
    }
    return isTimeEnd;
}
 
開發者ID:aiyaapp,項目名稱:AAVT,代碼行數:36,代碼來源:Mp4Provider.java

示例6: videoEncodeStep

private synchronized boolean videoEncodeStep(boolean isEnd){
    AvLog.d(TAG,"videoEncodeStep:"+isEncodeStarted+"/"+isEnd);
    if(isEncodeStarted){
        if(isEnd){
            mVideoEncoder.signalEndOfInputStream();
        }
        MediaCodec.BufferInfo info=new MediaCodec.BufferInfo();
        while (true){
            int mOutputIndex=mVideoEncoder.dequeueOutputBuffer(info,TIME_OUT);
            AvLog.i(TAG,"videoEncodeStep:mOutputIndex="+mOutputIndex);
            if(mOutputIndex>=0){
                if((info.flags&MediaCodec.BUFFER_FLAG_CODEC_CONFIG)!=0){
                    info.size=0;
                }
                ByteBuffer buffer= CodecUtil.getOutputBuffer(mVideoEncoder,mOutputIndex);
                if(mStore!=null){
                    mStore.addData(mVideoTrack,new HardMediaData(buffer,info));
                }
                mVideoEncoder.releaseOutputBuffer(mOutputIndex,false);
                if((info.flags&MediaCodec.BUFFER_FLAG_END_OF_STREAM)!=0){
                    closeVideoEncoder();
                    isEncodeStarted=false;
                    AvLog.i(TAG,"videoEncodeStep: MediaCodec.BUFFER_FLAG_END_OF_STREAM ");
                    break;
                }
            }else if(mOutputIndex== MediaCodec.INFO_OUTPUT_FORMAT_CHANGED){
                MediaFormat format=mVideoEncoder.getOutputFormat();
                if(mStore!=null){
                    mVideoTrack=mStore.addTrack(format);
                }
            }else if(mOutputIndex== MediaCodec.INFO_TRY_AGAIN_LATER&&!isEnd){
                break;
            }
        }
    }
    return false;
}
 
開發者ID:aiyaapp,項目名稱:AAVT,代碼行數:37,代碼來源:SurfaceEncoder.java

示例7: audioEncodeStep

private synchronized boolean audioEncodeStep(boolean isEnd){
    if(isStarted){
        AvLog.d("audioEncodeStep");
        int inputIndex=mAudioEncoder.dequeueInputBuffer(TIME_OUT);
        if(inputIndex>=0){
            ByteBuffer buffer= CodecUtil.getInputBuffer(mAudioEncoder,inputIndex);
            buffer.clear();
            long time= (SystemClock.elapsedRealtimeNanos()-startTime)/1000;
            int length=mRecord.read(buffer,mRecordBufferSize);
            if(length>=0){
                mAudioEncoder.queueInputBuffer(inputIndex,0,length,time,
                        isEnd?MediaCodec.BUFFER_FLAG_END_OF_STREAM:0);
            }
        }
        MediaCodec.BufferInfo info=new MediaCodec.BufferInfo();
        while (true){
            int outputIndex=mAudioEncoder.dequeueOutputBuffer(info,TIME_OUT);
            if(outputIndex>=0){
                if(mStore!=null){
                    mStore.addData(mAudioTrack,new HardMediaData(CodecUtil.getOutputBuffer(mAudioEncoder,outputIndex),info));
                }
                mAudioEncoder.releaseOutputBuffer(outputIndex,false);
                if(info.flags==MediaCodec.BUFFER_FLAG_END_OF_STREAM){
                    AvLog.d("CameraRecorder get audio encode end of stream");
                    stop();
                    return true;
                }
            }else if(outputIndex==MediaCodec.INFO_TRY_AGAIN_LATER){
                break;
            }else if(outputIndex==MediaCodec.INFO_OUTPUT_FORMAT_CHANGED){
                AvLog.d("get audio output format changed ->"+mAudioEncoder.getOutputFormat().toString());
                mAudioTrack=mStore.addTrack(mAudioEncoder.getOutputFormat());
            }
        }
    }
    return false;
}
 
開發者ID:aiyaapp,項目名稱:AAVT,代碼行數:37,代碼來源:SoundRecorder.java

示例8: drainEncoder

private int drainEncoder(long timeoutUs) {
    if (mIsEncoderEOS) return DRAIN_STATE_NONE;

    int result = mEncoder.dequeueOutputBuffer(mBufferInfo, timeoutUs);
    switch (result) {
        case MediaCodec.INFO_TRY_AGAIN_LATER:
            return DRAIN_STATE_NONE;
        case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
            if (mActualOutputFormat != null) {
                throw new RuntimeException("Audio output format changed twice.");
            }
            mActualOutputFormat = mEncoder.getOutputFormat();
            mMuxer.setOutputFormat(SAMPLE_TYPE, mActualOutputFormat);
            return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
        case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
            mEncoderBuffers = new MediaCodecBufferCompatWrapper(mEncoder);
            return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
    }

    if (mActualOutputFormat == null) {
        throw new RuntimeException("Could not determine actual output format.");
    }

    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
        mIsEncoderEOS = true;
        mBufferInfo.set(0, 0, 0, mBufferInfo.flags);
    }
    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
        // SPS or PPS, which should be passed by MediaFormat.
        mEncoder.releaseOutputBuffer(result, false);
        return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
    }
    mMuxer.writeSampleData(SAMPLE_TYPE, mEncoderBuffers.getOutputBuffer(result), mBufferInfo);
    mWrittenPresentationTimeUs = mBufferInfo.presentationTimeUs;
    mEncoder.releaseOutputBuffer(result, false);
    return DRAIN_STATE_CONSUMED;
}
 
開發者ID:SavorGit,項目名稱:Hotspot-master-devp,代碼行數:37,代碼來源:AudioTrackTranscoder.java

示例9: drainDecoder

private int drainDecoder(long timeoutUs) {
    if (mIsDecoderEOS) return DRAIN_STATE_NONE;
    int result = mDecoder.dequeueOutputBuffer(mBufferInfo, timeoutUs);
    switch (result) {
        case MediaCodec.INFO_TRY_AGAIN_LATER:
            return DRAIN_STATE_NONE;
        case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
        case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
            return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
    }
    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
        mEncoder.signalEndOfInputStream();
        mIsDecoderEOS = true;
        mBufferInfo.size = 0;
    }
    boolean doRender = (mBufferInfo.size > 0);
    // NOTE: doRender will block if buffer (of encoder) is full.
    // Refer: http://bigflake.com/mediacodec/CameraToMpegTest.java.txt
    mDecoder.releaseOutputBuffer(result, doRender);
    if (doRender) {
        mDecoderOutputSurfaceWrapper.awaitNewImage();
        mDecoderOutputSurfaceWrapper.drawImage();
        mEncoderInputSurfaceWrapper.setPresentationTime(mBufferInfo.presentationTimeUs * 1000);
        mEncoderInputSurfaceWrapper.swapBuffers();
    }
    return DRAIN_STATE_CONSUMED;
}
 
開發者ID:SavorGit,項目名稱:Hotspot-master-devp,代碼行數:27,代碼來源:VideoTrackTranscoder.java

示例10: drainEncoder

private int drainEncoder(long timeoutUs) {
    if (mIsEncoderEOS) return DRAIN_STATE_NONE;
    int result = mEncoder.dequeueOutputBuffer(mBufferInfo, timeoutUs);
    switch (result) {
        case MediaCodec.INFO_TRY_AGAIN_LATER:
            return DRAIN_STATE_NONE;
        case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
            if (mActualOutputFormat != null)
                throw new RuntimeException("Video output format changed twice.");
            mActualOutputFormat = mEncoder.getOutputFormat();
            mMuxer.setOutputFormat(QueuedMuxer.SampleType.VIDEO, mActualOutputFormat);
            return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
        case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
            mEncoderOutputBuffers = mEncoder.getOutputBuffers();
            return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
    }
    if (mActualOutputFormat == null) {
        throw new RuntimeException("Could not determine actual output format.");
    }

    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
        mIsEncoderEOS = true;
        mBufferInfo.set(0, 0, 0, mBufferInfo.flags);
    }
    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
        // SPS or PPS, which should be passed by MediaFormat.
        mEncoder.releaseOutputBuffer(result, false);
        return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
    }
    mMuxer.writeSampleData(QueuedMuxer.SampleType.VIDEO, mEncoderOutputBuffers[result], mBufferInfo);
    mWrittenPresentationTimeUs = mBufferInfo.presentationTimeUs;
    mEncoder.releaseOutputBuffer(result, false);
    return DRAIN_STATE_CONSUMED;
}
 
開發者ID:SavorGit,項目名稱:Hotspot-master-devp,代碼行數:34,代碼來源:VideoTrackTranscoder.java

示例11: handleOutput

protected void handleOutput() {
    Log.d(TAG, TRACK_TYPE + " handle output ");
    if ((mState == STATE_PLAYING || mState == STATE_SEEKING) && !mOutputDone) {
        final int decoderStatus;
        try {
            decoderStatus = mMediaCodec.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
        } catch (IllegalStateException e) {
            Log.d(TAG, "can't dequeue output buffer: " + e.getMessage());
            return;
        }
        Log.d(TAG, TRACK_TYPE + " decoder status: " + decoderStatus);
        if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
        } else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
            final MediaFormat newFormat = mMediaCodec.getOutputFormat();
            if (DEBUG) Log.d(TAG, TRACK_TYPE + " decoder output format changed: " + newFormat);
        } else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
            if (DEBUG) Log.d(TAG, TRACK_TYPE + " decoder output buffer changed: ");
        } else if (decoderStatus < 0) {
            throw new RuntimeException(
                    "unexpected result from " + TRACK_TYPE + " decoder.dequeueOutputBuffer: " + decoderStatus);
        } else {
            Log.d(TAG, TRACK_TYPE + " Out put");
            output(decoderStatus, mBufferInfo);
        }
        if ((mBufferInfo.flags == MediaCodec.BUFFER_FLAG_END_OF_STREAM) && mState != STATE_SEEKING) {
            Log.d(TAG, TRACK_TYPE + ":output EOS");
            mBufferInfo = new MediaCodec.BufferInfo();
            mOutputDone = true;
            synchronized (mWeakPlayer.get().getSync()) {
                mWeakPlayer.get().getSync().notify();
            }
        }
    }
}
 
開發者ID:Tai-Kimura,項目名稱:VideoApplication,代碼行數:34,代碼來源:MediaDecoder.java

示例12: drainDecoder

private int drainDecoder(long timeoutUs) {
    if (mIsDecoderEOS) return DRAIN_STATE_NONE;

    int result = mDecoder.dequeueOutputBuffer(mBufferInfo, timeoutUs);
    switch (result) {
        case MediaCodec.INFO_TRY_AGAIN_LATER:
            return DRAIN_STATE_NONE;
        case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
        case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
            return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
    }

    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
        mEncoder.signalEndOfInputStream();
        mIsDecoderEOS = true;
        mBufferInfo.size = 0;
    }
    boolean doRender = (mBufferInfo.size > 0);
    // NOTE: doRender will block if buffer (of encoder) is full.
    // Refer: http://bigflake.com/mediacodec/CameraToMpegTest.java.txt
    mDecoder.releaseOutputBuffer(result, doRender);
    if (doRender) {
        mDecoderOutputSurfaceWrapper.awaitNewImage();
        mDecoderOutputSurfaceWrapper.drawImage(mBufferInfo.presentationTimeUs * 1000);
        mEncoderInputSurfaceWrapper.setPresentationTime(mBufferInfo.presentationTimeUs * 1000);
        // swapBuffers後之前的OpenGL操作會渲染到InputSurface綁定的Surface中
        mEncoderInputSurfaceWrapper.swapBuffers();
    }

    return DRAIN_STATE_CONSUMED;
}
 
開發者ID:uestccokey,項目名稱:EZFilter,代碼行數:31,代碼來源:VideoTrackTranscoder.java

示例13: decodeVideoBuffer

/**
 * 動畫をデコード(再生)する
 */
private void decodeVideoBuffer() {
    // MediaCodecからデコード結果を受け取る
    int decodeStatus = mDecoder.dequeueOutputBuffer(mBufferinfo, BUFFER_TIMEOUT_USEC);

    if (checkDecoderStatus(decodeStatus)) {
        if ((mBufferinfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG)
                != 0) {
            // コンフィグ部分を読み込んだ( 未だデコードは行っていない )
            Log.d(TAG, "mDecoder configured (" + mBufferinfo.size + " bytes)");
        } else if ((mBufferinfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM)
                != 0) {
            // 末尾までデコードされた
            Log.d(TAG, "Decoder gets BUFFER_FLAG_END_OF_STREAM. ");
            mDecodeDone = true;
        } else if (mBufferinfo.presentationTimeUs > 0) {
            //( 動畫のタイムスタンプ > 実際の経過時間 )になるまで待つ
            while (mBufferinfo.presentationTimeUs / 1000 >
                    System.currentTimeMillis() - mStartMs) {
                try {
                    Thread.sleep(10);
                } catch (InterruptedException e) {
                    e.printStackTrace();
                }
            }

            // デコードされたバッファをサーフィスに送信(動畫の再生)
            mDecoder.releaseOutputBuffer(decodeStatus, true);
        }
    }
}
 
開發者ID:ficklerobot,項目名稱:grid-video-viewer,代碼行數:33,代碼來源:DecodeThread.java

示例14: decodeVideo

private void decodeVideo() {
  ByteBuffer[] inputBuffers = videoDecoder.getInputBuffers();
  long startMs = System.currentTimeMillis();
  while (decoding) {
    int inIndex = videoDecoder.dequeueInputBuffer(10000);
    if (inIndex >= 0) {
      ByteBuffer buffer = inputBuffers[inIndex];
      int sampleSize = videoExtractor.readSampleData(buffer, 0);
      if (sampleSize < 0) {
        videoDecoder.queueInputBuffer(inIndex, 0, 0, 0L, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
        Log.i(TAG, "end of file in");
      } else {
        videoDecoder.queueInputBuffer(inIndex, 0, sampleSize, videoExtractor.getSampleTime(), 0);
        videoExtractor.advance();
      }
    }
    int outIndex = videoDecoder.dequeueOutputBuffer(videoInfo, 10000);
    if (outIndex >= 0) {
      //needed for fix decode speed
      while (videoInfo.presentationTimeUs / 1000 > System.currentTimeMillis() - startMs) {
        try {
          Thread.sleep(10);
        } catch (InterruptedException e) {
          thread.interrupt();
          break;
        }
      }
      videoDecoder.releaseOutputBuffer(outIndex, videoInfo.size != 0);
    }
    if ((videoInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
      Log.i(TAG, "end of file out");
      if (loopMode) {
        Log.i(TAG, "loop mode, restreaming file");
        videoExtractor.seekTo(0, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
        videoDecoder.flush();
      } else {
        videoDecoderInterface.onVideoDecoderFinished();
      }
    }
  }
}
 
開發者ID:pedroSG94,項目名稱:rtmp-rtsp-stream-client-java,代碼行數:41,代碼來源:VideoDecoder.java

示例15: drainOutputBuffer

/**
 * @return True if it may be possible to drain more output data. False otherwise.
 * @throws ExoPlaybackException If an error occurs draining the output buffer.
 */
@SuppressWarnings("deprecation")
private boolean drainOutputBuffer(long positionUs, long elapsedRealtimeUs)
    throws ExoPlaybackException {
  if (outputStreamEnded) {
    return false;
  }

  if (outputIndex < 0) {
    outputIndex = codec.dequeueOutputBuffer(outputBufferInfo, getDequeueOutputBufferTimeoutUs());
  }

  if (outputIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
    processOutputFormat();
    return true;
  } else if (outputIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
    outputBuffers = codec.getOutputBuffers();
    codecCounters.outputBuffersChangedCount++;
    return true;
  } else if (outputIndex < 0) {
    if (codecNeedsEosPropagationWorkaround && (inputStreamEnded
        || codecReinitializationState == REINITIALIZATION_STATE_WAIT_END_OF_STREAM)) {
      processEndOfStream();
      return true;
    }
    return false;
  }

  if ((outputBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
    processEndOfStream();
    return false;
  }

  int decodeOnlyIndex = getDecodeOnlyIndex(outputBufferInfo.presentationTimeUs);
  if (processOutputBuffer(positionUs, elapsedRealtimeUs, codec, outputBuffers[outputIndex],
      outputBufferInfo, outputIndex, decodeOnlyIndex != -1)) {
    onProcessedOutputBuffer(outputBufferInfo.presentationTimeUs);
    if (decodeOnlyIndex != -1) {
      decodeOnlyPresentationTimestamps.remove(decodeOnlyIndex);
    }
    outputIndex = -1;
    return true;
  }

  return false;
}
 
開發者ID:MLNO,項目名稱:airgram,代碼行數:49,代碼來源:MediaCodecTrackRenderer.java


注:本文中的android.media.MediaCodec.BUFFER_FLAG_END_OF_STREAM屬性示例由純淨天空整理自Github/MSDocs等開源代碼及文檔管理平台,相關代碼片段篩選自各路編程大神貢獻的開源項目,源碼版權歸原作者所有,傳播和使用請參考對應項目的License;未經允許,請勿轉載。