当前位置: 首页>>代码示例>>Java>>正文


Java MediaCodec.BUFFER_FLAG_END_OF_STREAM属性代码示例

本文整理汇总了Java中android.media.MediaCodec.BUFFER_FLAG_END_OF_STREAM属性的典型用法代码示例。如果您正苦于以下问题:Java MediaCodec.BUFFER_FLAG_END_OF_STREAM属性的具体用法?Java MediaCodec.BUFFER_FLAG_END_OF_STREAM怎么用?Java MediaCodec.BUFFER_FLAG_END_OF_STREAM使用的例子?那么, 这里精选的属性代码示例或许可以为您提供帮助。您也可以进一步了解该属性所在android.media.MediaCodec的用法示例。


在下文中一共展示了MediaCodec.BUFFER_FLAG_END_OF_STREAM属性的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: videoEncodeStep

private boolean videoEncodeStep(boolean isEnd){
    if(isEnd){
        mVideoEncoder.signalEndOfInputStream();
    }
    while (true){
        int outputIndex=mVideoEncoder.dequeueOutputBuffer(mVideoEncodeBufferInfo,TIME_OUT);
        if(outputIndex>=0){
            if(isMuxStarted&&mVideoEncodeBufferInfo.size>0&&mVideoEncodeBufferInfo.presentationTimeUs>0){
                mMuxer.writeSampleData(mVideoTrack,getOutputBuffer(mVideoEncoder,outputIndex),mVideoEncodeBufferInfo);
            }
            mVideoEncoder.releaseOutputBuffer(outputIndex,false);
            if(mVideoEncodeBufferInfo.flags==MediaCodec.BUFFER_FLAG_END_OF_STREAM){
                AvLog.d("CameraRecorder get video encode end of stream");
                return true;
            }
        }else if(outputIndex==MediaCodec.INFO_TRY_AGAIN_LATER){
            break;
        }else if(outputIndex==MediaCodec.INFO_OUTPUT_FORMAT_CHANGED){
            AvLog.d("get video output format changed ->"+mVideoEncoder.getOutputFormat().toString());
            mVideoTrack=mMuxer.addTrack(mVideoEncoder.getOutputFormat());
            mMuxer.start();
            isMuxStarted=true;
        }
    }
    return false;
}
 
开发者ID:aiyaapp,项目名称:AAVT,代码行数:26,代码来源:CameraRecorder.java

示例2: recorderEncoderLoop

/**
 * Reads bytes from the given recorder and encodes them with the given encoder.
 * Uses the (deprecated) Synchronous Processing using Buffer Arrays.
 * <p/>
 * Encoders (or codecs that generate compressed data) will create and return the codec specific
 * data before any valid output buffer in output buffers marked with the codec-config flag.
 * Buffers containing codec-specific-data have no meaningful timestamps.
 */
@TargetApi(Build.VERSION_CODES.JELLY_BEAN)
private void recorderEncoderLoop(MediaCodec codec, SpeechRecord speechRecord) {
    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
        codec.start();
        // Getting some buffers (e.g. 4 of each) to communicate with the codec
        ByteBuffer[] codecInputBuffers = codec.getInputBuffers();
        ByteBuffer[] codecOutputBuffers = codec.getOutputBuffers();
        Log.i("input buffers " + codecInputBuffers.length + "; output buffers: " + codecOutputBuffers.length);
        boolean doneSubmittingInput = false;
        int numRetriesDequeueOutputBuffer = 0;
        int index;
        while (true) {
            if (!doneSubmittingInput) {
                index = codec.dequeueInputBuffer(DEQUEUE_TIMEOUT);
                if (index >= 0) {
                    int size = queueInputBuffer(codec, codecInputBuffers, index, speechRecord);
                    if (size == -1) {
                        codec.queueInputBuffer(index, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
                        Log.i("enc: in: EOS");
                        doneSubmittingInput = true;
                    } else {
                        Log.i("enc: in: " + size);
                        mNumBytesSubmitted += size;
                    }
                } else {
                    Log.i("enc: in: timeout, will try again");
                }
            }
            MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
            index = codec.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT);
            Log.i("enc: out: flags/index: " + info.flags + "/" + index);
            if (index == MediaCodec.INFO_TRY_AGAIN_LATER) {
                Log.i("enc: out: INFO_TRY_AGAIN_LATER: " + numRetriesDequeueOutputBuffer);
                if (++numRetriesDequeueOutputBuffer > MAX_NUM_RETRIES_DEQUEUE_OUTPUT_BUFFER) {
                    break;
                }
            } else if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                MediaFormat format = codec.getOutputFormat();
                Log.i("enc: out: INFO_OUTPUT_FORMAT_CHANGED: " + format.toString());
            } else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
                codecOutputBuffers = codec.getOutputBuffers();
                Log.i("enc: out: INFO_OUTPUT_BUFFERS_CHANGED");
            } else {
                dequeueOutputBuffer(codec, codecOutputBuffers, index, info);
                mNumBytesDequeued += info.size;
                if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                    Log.i("enc: out: EOS");
                    break;
                }
            }
        }
        codec.stop();
        codec.release();
    }
}
 
开发者ID:vaibhavs4424,项目名称:AI-Powered-Intelligent-Banking-Platform,代码行数:63,代码来源:EncodedAudioRecorder.java

示例3: drainDecoder

private int drainDecoder(long timeoutUs) {
    if (mIsDecoderEOS) return DRAIN_STATE_NONE;

    int result = mDecoder.dequeueOutputBuffer(mBufferInfo, timeoutUs);
    switch (result) {
        case MediaCodec.INFO_TRY_AGAIN_LATER:
            return DRAIN_STATE_NONE;
        case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
            mAudioChannel.setActualDecodedFormat(mDecoder.getOutputFormat());
        case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
            return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
    }

    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
        mIsDecoderEOS = true;
        mAudioChannel.drainDecoderBufferAndQueue(AudioChannel.BUFFER_INDEX_END_OF_STREAM, 0);
    } else if (mBufferInfo.size > 0) {
        mAudioChannel.drainDecoderBufferAndQueue(result, mBufferInfo.presentationTimeUs);
    }

    return DRAIN_STATE_CONSUMED;
}
 
开发者ID:uestccokey,项目名称:EZFilter,代码行数:22,代码来源:AudioTrackTranscoder.java

示例4: audioEncodeStep

private boolean audioEncodeStep(boolean isEnd){
    if(isRecordAudioStarted){
        int inputIndex=mAudioEncoder.dequeueInputBuffer(TIME_OUT);
        if(inputIndex>=0){
            ByteBuffer buffer=getInputBuffer(mAudioEncoder,inputIndex);
            buffer.clear();
            long time=(System.currentTimeMillis()-BASE_TIME)*1000;
            int length=mAudioRecord.read(buffer,mRecordBufferSize);
            if(length>=0){
                mAudioEncoder.queueInputBuffer(inputIndex,0,length,time,
                        isEnd?MediaCodec.BUFFER_FLAG_END_OF_STREAM:0);
            }
        }
        while (true){
            int outputIndex=mAudioEncoder.dequeueOutputBuffer(mAudioEncodeBufferInfo,TIME_OUT);
            if(outputIndex>=0){
                //todo 第一帧音频时间戳为0的问题
                if(isMuxStarted&&mAudioEncodeBufferInfo.size>0&&mAudioEncodeBufferInfo.presentationTimeUs>0){
                    mMuxer.writeSampleData(mAudioTrack,getOutputBuffer(mAudioEncoder,outputIndex),mAudioEncodeBufferInfo);
                }
                mAudioEncoder.releaseOutputBuffer(outputIndex,false);
                if(mAudioEncodeBufferInfo.flags==MediaCodec.BUFFER_FLAG_END_OF_STREAM){
                    AvLog.d("CameraRecorder get audio encode end of stream");
                    isTryStopAudio=false;
                    isRecordAudioStarted=false;
                    return true;
                }
            }else if(outputIndex==MediaCodec.INFO_TRY_AGAIN_LATER){
                break;
            }else if(outputIndex==MediaCodec.INFO_OUTPUT_FORMAT_CHANGED){
                AvLog.d("get audio output format changed ->"+mAudioEncoder.getOutputFormat().toString());
                synchronized (VIDEO_LOCK){
                    mAudioTrack=mMuxer.addTrack(mAudioEncoder.getOutputFormat());
                    isRecordVideoStarted=true;
                }
            }
        }
    }
    return false;
}
 
开发者ID:aiyaapp,项目名称:AAVT,代码行数:40,代码来源:CameraRecorder.java

示例5: audioDecodeStep

private boolean audioDecodeStep(){
    ByteBuffer buffer=ByteBuffer.allocate(1024*64);
    boolean isTimeEnd=false;
    if(isOpenAudio){
        buffer.clear();
        mExtractor.selectTrack(mAudioDecodeTrack);
        MediaCodec.BufferInfo info=new MediaCodec.BufferInfo();
        while (true){
            int length=mExtractor.readSampleData(buffer,0);
            if(length!=-1){
                int flags=mExtractor.getSampleFlags();
                boolean isAudioEnd=mExtractor.getSampleTime()>mVideoStopTimeStamp;
                info.size=length;
                info.flags=isAudioEnd?MediaCodec.BUFFER_FLAG_END_OF_STREAM:flags;
                info.presentationTimeUs=mExtractor.getSampleTime();
                info.offset=0;
                AvLog.d(tag,"audio sampleTime= "+info.presentationTimeUs+"/"+mVideoStopTimeStamp);
                isTimeEnd=mExtractor.getSampleTime()>mVideoStopTimeStamp;
                AvLog.d(tag,"is End= "+isAudioEnd );
                mStore.addData(mAudioEncodeTrack,new HardMediaData(buffer,info));
                if(isAudioEnd){
                    break;
                }
            }else{
                AvLog.d(tag,"is End= "+true );
                info.size=0;
                info.flags=MediaCodec.BUFFER_FLAG_END_OF_STREAM;
                mStore.addData(mAudioEncodeTrack,new HardMediaData(buffer,info));
                isTimeEnd=true;
                break;
            }
            mExtractor.advance();
        }
    }
    return isTimeEnd;
}
 
开发者ID:aiyaapp,项目名称:AAVT,代码行数:36,代码来源:Mp4Provider.java

示例6: videoEncodeStep

private synchronized boolean videoEncodeStep(boolean isEnd){
    AvLog.d(TAG,"videoEncodeStep:"+isEncodeStarted+"/"+isEnd);
    if(isEncodeStarted){
        if(isEnd){
            mVideoEncoder.signalEndOfInputStream();
        }
        MediaCodec.BufferInfo info=new MediaCodec.BufferInfo();
        while (true){
            int mOutputIndex=mVideoEncoder.dequeueOutputBuffer(info,TIME_OUT);
            AvLog.i(TAG,"videoEncodeStep:mOutputIndex="+mOutputIndex);
            if(mOutputIndex>=0){
                if((info.flags&MediaCodec.BUFFER_FLAG_CODEC_CONFIG)!=0){
                    info.size=0;
                }
                ByteBuffer buffer= CodecUtil.getOutputBuffer(mVideoEncoder,mOutputIndex);
                if(mStore!=null){
                    mStore.addData(mVideoTrack,new HardMediaData(buffer,info));
                }
                mVideoEncoder.releaseOutputBuffer(mOutputIndex,false);
                if((info.flags&MediaCodec.BUFFER_FLAG_END_OF_STREAM)!=0){
                    closeVideoEncoder();
                    isEncodeStarted=false;
                    AvLog.i(TAG,"videoEncodeStep: MediaCodec.BUFFER_FLAG_END_OF_STREAM ");
                    break;
                }
            }else if(mOutputIndex== MediaCodec.INFO_OUTPUT_FORMAT_CHANGED){
                MediaFormat format=mVideoEncoder.getOutputFormat();
                if(mStore!=null){
                    mVideoTrack=mStore.addTrack(format);
                }
            }else if(mOutputIndex== MediaCodec.INFO_TRY_AGAIN_LATER&&!isEnd){
                break;
            }
        }
    }
    return false;
}
 
开发者ID:aiyaapp,项目名称:AAVT,代码行数:37,代码来源:SurfaceEncoder.java

示例7: audioEncodeStep

private synchronized boolean audioEncodeStep(boolean isEnd){
    if(isStarted){
        AvLog.d("audioEncodeStep");
        int inputIndex=mAudioEncoder.dequeueInputBuffer(TIME_OUT);
        if(inputIndex>=0){
            ByteBuffer buffer= CodecUtil.getInputBuffer(mAudioEncoder,inputIndex);
            buffer.clear();
            long time= (SystemClock.elapsedRealtimeNanos()-startTime)/1000;
            int length=mRecord.read(buffer,mRecordBufferSize);
            if(length>=0){
                mAudioEncoder.queueInputBuffer(inputIndex,0,length,time,
                        isEnd?MediaCodec.BUFFER_FLAG_END_OF_STREAM:0);
            }
        }
        MediaCodec.BufferInfo info=new MediaCodec.BufferInfo();
        while (true){
            int outputIndex=mAudioEncoder.dequeueOutputBuffer(info,TIME_OUT);
            if(outputIndex>=0){
                if(mStore!=null){
                    mStore.addData(mAudioTrack,new HardMediaData(CodecUtil.getOutputBuffer(mAudioEncoder,outputIndex),info));
                }
                mAudioEncoder.releaseOutputBuffer(outputIndex,false);
                if(info.flags==MediaCodec.BUFFER_FLAG_END_OF_STREAM){
                    AvLog.d("CameraRecorder get audio encode end of stream");
                    stop();
                    return true;
                }
            }else if(outputIndex==MediaCodec.INFO_TRY_AGAIN_LATER){
                break;
            }else if(outputIndex==MediaCodec.INFO_OUTPUT_FORMAT_CHANGED){
                AvLog.d("get audio output format changed ->"+mAudioEncoder.getOutputFormat().toString());
                mAudioTrack=mStore.addTrack(mAudioEncoder.getOutputFormat());
            }
        }
    }
    return false;
}
 
开发者ID:aiyaapp,项目名称:AAVT,代码行数:37,代码来源:SoundRecorder.java

示例8: drainEncoder

private int drainEncoder(long timeoutUs) {
    if (mIsEncoderEOS) return DRAIN_STATE_NONE;

    int result = mEncoder.dequeueOutputBuffer(mBufferInfo, timeoutUs);
    switch (result) {
        case MediaCodec.INFO_TRY_AGAIN_LATER:
            return DRAIN_STATE_NONE;
        case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
            if (mActualOutputFormat != null) {
                throw new RuntimeException("Audio output format changed twice.");
            }
            mActualOutputFormat = mEncoder.getOutputFormat();
            mMuxer.setOutputFormat(SAMPLE_TYPE, mActualOutputFormat);
            return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
        case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
            mEncoderBuffers = new MediaCodecBufferCompatWrapper(mEncoder);
            return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
    }

    if (mActualOutputFormat == null) {
        throw new RuntimeException("Could not determine actual output format.");
    }

    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
        mIsEncoderEOS = true;
        mBufferInfo.set(0, 0, 0, mBufferInfo.flags);
    }
    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
        // SPS or PPS, which should be passed by MediaFormat.
        mEncoder.releaseOutputBuffer(result, false);
        return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
    }
    mMuxer.writeSampleData(SAMPLE_TYPE, mEncoderBuffers.getOutputBuffer(result), mBufferInfo);
    mWrittenPresentationTimeUs = mBufferInfo.presentationTimeUs;
    mEncoder.releaseOutputBuffer(result, false);
    return DRAIN_STATE_CONSUMED;
}
 
开发者ID:SavorGit,项目名称:Hotspot-master-devp,代码行数:37,代码来源:AudioTrackTranscoder.java

示例9: drainDecoder

private int drainDecoder(long timeoutUs) {
    if (mIsDecoderEOS) return DRAIN_STATE_NONE;
    int result = mDecoder.dequeueOutputBuffer(mBufferInfo, timeoutUs);
    switch (result) {
        case MediaCodec.INFO_TRY_AGAIN_LATER:
            return DRAIN_STATE_NONE;
        case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
        case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
            return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
    }
    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
        mEncoder.signalEndOfInputStream();
        mIsDecoderEOS = true;
        mBufferInfo.size = 0;
    }
    boolean doRender = (mBufferInfo.size > 0);
    // NOTE: doRender will block if buffer (of encoder) is full.
    // Refer: http://bigflake.com/mediacodec/CameraToMpegTest.java.txt
    mDecoder.releaseOutputBuffer(result, doRender);
    if (doRender) {
        mDecoderOutputSurfaceWrapper.awaitNewImage();
        mDecoderOutputSurfaceWrapper.drawImage();
        mEncoderInputSurfaceWrapper.setPresentationTime(mBufferInfo.presentationTimeUs * 1000);
        mEncoderInputSurfaceWrapper.swapBuffers();
    }
    return DRAIN_STATE_CONSUMED;
}
 
开发者ID:SavorGit,项目名称:Hotspot-master-devp,代码行数:27,代码来源:VideoTrackTranscoder.java

示例10: drainEncoder

private int drainEncoder(long timeoutUs) {
    if (mIsEncoderEOS) return DRAIN_STATE_NONE;
    int result = mEncoder.dequeueOutputBuffer(mBufferInfo, timeoutUs);
    switch (result) {
        case MediaCodec.INFO_TRY_AGAIN_LATER:
            return DRAIN_STATE_NONE;
        case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
            if (mActualOutputFormat != null)
                throw new RuntimeException("Video output format changed twice.");
            mActualOutputFormat = mEncoder.getOutputFormat();
            mMuxer.setOutputFormat(QueuedMuxer.SampleType.VIDEO, mActualOutputFormat);
            return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
        case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
            mEncoderOutputBuffers = mEncoder.getOutputBuffers();
            return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
    }
    if (mActualOutputFormat == null) {
        throw new RuntimeException("Could not determine actual output format.");
    }

    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
        mIsEncoderEOS = true;
        mBufferInfo.set(0, 0, 0, mBufferInfo.flags);
    }
    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
        // SPS or PPS, which should be passed by MediaFormat.
        mEncoder.releaseOutputBuffer(result, false);
        return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
    }
    mMuxer.writeSampleData(QueuedMuxer.SampleType.VIDEO, mEncoderOutputBuffers[result], mBufferInfo);
    mWrittenPresentationTimeUs = mBufferInfo.presentationTimeUs;
    mEncoder.releaseOutputBuffer(result, false);
    return DRAIN_STATE_CONSUMED;
}
 
开发者ID:SavorGit,项目名称:Hotspot-master-devp,代码行数:34,代码来源:VideoTrackTranscoder.java

示例11: handleOutput

protected void handleOutput() {
    Log.d(TAG, TRACK_TYPE + " handle output ");
    if ((mState == STATE_PLAYING || mState == STATE_SEEKING) && !mOutputDone) {
        final int decoderStatus;
        try {
            decoderStatus = mMediaCodec.dequeueOutputBuffer(mBufferInfo, TIMEOUT_USEC);
        } catch (IllegalStateException e) {
            Log.d(TAG, "can't dequeue output buffer: " + e.getMessage());
            return;
        }
        Log.d(TAG, TRACK_TYPE + " decoder status: " + decoderStatus);
        if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
        } else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
            final MediaFormat newFormat = mMediaCodec.getOutputFormat();
            if (DEBUG) Log.d(TAG, TRACK_TYPE + " decoder output format changed: " + newFormat);
        } else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
            if (DEBUG) Log.d(TAG, TRACK_TYPE + " decoder output buffer changed: ");
        } else if (decoderStatus < 0) {
            throw new RuntimeException(
                    "unexpected result from " + TRACK_TYPE + " decoder.dequeueOutputBuffer: " + decoderStatus);
        } else {
            Log.d(TAG, TRACK_TYPE + " Out put");
            output(decoderStatus, mBufferInfo);
        }
        if ((mBufferInfo.flags == MediaCodec.BUFFER_FLAG_END_OF_STREAM) && mState != STATE_SEEKING) {
            Log.d(TAG, TRACK_TYPE + ":output EOS");
            mBufferInfo = new MediaCodec.BufferInfo();
            mOutputDone = true;
            synchronized (mWeakPlayer.get().getSync()) {
                mWeakPlayer.get().getSync().notify();
            }
        }
    }
}
 
开发者ID:Tai-Kimura,项目名称:VideoApplication,代码行数:34,代码来源:MediaDecoder.java

示例12: drainDecoder

private int drainDecoder(long timeoutUs) {
    if (mIsDecoderEOS) return DRAIN_STATE_NONE;

    int result = mDecoder.dequeueOutputBuffer(mBufferInfo, timeoutUs);
    switch (result) {
        case MediaCodec.INFO_TRY_AGAIN_LATER:
            return DRAIN_STATE_NONE;
        case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
        case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
            return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
    }

    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
        mEncoder.signalEndOfInputStream();
        mIsDecoderEOS = true;
        mBufferInfo.size = 0;
    }
    boolean doRender = (mBufferInfo.size > 0);
    // NOTE: doRender will block if buffer (of encoder) is full.
    // Refer: http://bigflake.com/mediacodec/CameraToMpegTest.java.txt
    mDecoder.releaseOutputBuffer(result, doRender);
    if (doRender) {
        mDecoderOutputSurfaceWrapper.awaitNewImage();
        mDecoderOutputSurfaceWrapper.drawImage(mBufferInfo.presentationTimeUs * 1000);
        mEncoderInputSurfaceWrapper.setPresentationTime(mBufferInfo.presentationTimeUs * 1000);
        // swapBuffers后之前的OpenGL操作会渲染到InputSurface绑定的Surface中
        mEncoderInputSurfaceWrapper.swapBuffers();
    }

    return DRAIN_STATE_CONSUMED;
}
 
开发者ID:uestccokey,项目名称:EZFilter,代码行数:31,代码来源:VideoTrackTranscoder.java

示例13: decodeVideoBuffer

/**
 * 動画をデコード(再生)する
 */
private void decodeVideoBuffer() {
    // MediaCodecからデコード結果を受け取る
    int decodeStatus = mDecoder.dequeueOutputBuffer(mBufferinfo, BUFFER_TIMEOUT_USEC);

    if (checkDecoderStatus(decodeStatus)) {
        if ((mBufferinfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG)
                != 0) {
            // コンフィグ部分を読み込んだ( 未だデコードは行っていない )
            Log.d(TAG, "mDecoder configured (" + mBufferinfo.size + " bytes)");
        } else if ((mBufferinfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM)
                != 0) {
            // 末尾までデコードされた
            Log.d(TAG, "Decoder gets BUFFER_FLAG_END_OF_STREAM. ");
            mDecodeDone = true;
        } else if (mBufferinfo.presentationTimeUs > 0) {
            //( 動画のタイムスタンプ > 実際の経過時間 )になるまで待つ
            while (mBufferinfo.presentationTimeUs / 1000 >
                    System.currentTimeMillis() - mStartMs) {
                try {
                    Thread.sleep(10);
                } catch (InterruptedException e) {
                    e.printStackTrace();
                }
            }

            // デコードされたバッファをサーフィスに送信(動画の再生)
            mDecoder.releaseOutputBuffer(decodeStatus, true);
        }
    }
}
 
开发者ID:ficklerobot,项目名称:grid-video-viewer,代码行数:33,代码来源:DecodeThread.java

示例14: decodeVideo

private void decodeVideo() {
  ByteBuffer[] inputBuffers = videoDecoder.getInputBuffers();
  long startMs = System.currentTimeMillis();
  while (decoding) {
    int inIndex = videoDecoder.dequeueInputBuffer(10000);
    if (inIndex >= 0) {
      ByteBuffer buffer = inputBuffers[inIndex];
      int sampleSize = videoExtractor.readSampleData(buffer, 0);
      if (sampleSize < 0) {
        videoDecoder.queueInputBuffer(inIndex, 0, 0, 0L, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
        Log.i(TAG, "end of file in");
      } else {
        videoDecoder.queueInputBuffer(inIndex, 0, sampleSize, videoExtractor.getSampleTime(), 0);
        videoExtractor.advance();
      }
    }
    int outIndex = videoDecoder.dequeueOutputBuffer(videoInfo, 10000);
    if (outIndex >= 0) {
      //needed for fix decode speed
      while (videoInfo.presentationTimeUs / 1000 > System.currentTimeMillis() - startMs) {
        try {
          Thread.sleep(10);
        } catch (InterruptedException e) {
          thread.interrupt();
          break;
        }
      }
      videoDecoder.releaseOutputBuffer(outIndex, videoInfo.size != 0);
    }
    if ((videoInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
      Log.i(TAG, "end of file out");
      if (loopMode) {
        Log.i(TAG, "loop mode, restreaming file");
        videoExtractor.seekTo(0, MediaExtractor.SEEK_TO_CLOSEST_SYNC);
        videoDecoder.flush();
      } else {
        videoDecoderInterface.onVideoDecoderFinished();
      }
    }
  }
}
 
开发者ID:pedroSG94,项目名称:rtmp-rtsp-stream-client-java,代码行数:41,代码来源:VideoDecoder.java

示例15: drainOutputBuffer

/**
 * @return True if it may be possible to drain more output data. False otherwise.
 * @throws ExoPlaybackException If an error occurs draining the output buffer.
 */
@SuppressWarnings("deprecation")
private boolean drainOutputBuffer(long positionUs, long elapsedRealtimeUs)
    throws ExoPlaybackException {
  if (outputStreamEnded) {
    return false;
  }

  if (outputIndex < 0) {
    outputIndex = codec.dequeueOutputBuffer(outputBufferInfo, getDequeueOutputBufferTimeoutUs());
  }

  if (outputIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
    processOutputFormat();
    return true;
  } else if (outputIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
    outputBuffers = codec.getOutputBuffers();
    codecCounters.outputBuffersChangedCount++;
    return true;
  } else if (outputIndex < 0) {
    if (codecNeedsEosPropagationWorkaround && (inputStreamEnded
        || codecReinitializationState == REINITIALIZATION_STATE_WAIT_END_OF_STREAM)) {
      processEndOfStream();
      return true;
    }
    return false;
  }

  if ((outputBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
    processEndOfStream();
    return false;
  }

  int decodeOnlyIndex = getDecodeOnlyIndex(outputBufferInfo.presentationTimeUs);
  if (processOutputBuffer(positionUs, elapsedRealtimeUs, codec, outputBuffers[outputIndex],
      outputBufferInfo, outputIndex, decodeOnlyIndex != -1)) {
    onProcessedOutputBuffer(outputBufferInfo.presentationTimeUs);
    if (decodeOnlyIndex != -1) {
      decodeOnlyPresentationTimestamps.remove(decodeOnlyIndex);
    }
    outputIndex = -1;
    return true;
  }

  return false;
}
 
开发者ID:MLNO,项目名称:airgram,代码行数:49,代码来源:MediaCodecTrackRenderer.java


注:本文中的android.media.MediaCodec.BUFFER_FLAG_END_OF_STREAM属性示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。