当前位置: 首页>>代码示例>>Java>>正文


Java MediaCodec.BUFFER_FLAG_SYNC_FRAME属性代码示例

本文整理汇总了Java中android.media.MediaCodec.BUFFER_FLAG_SYNC_FRAME属性的典型用法代码示例。如果您正苦于以下问题:Java MediaCodec.BUFFER_FLAG_SYNC_FRAME属性的具体用法?Java MediaCodec.BUFFER_FLAG_SYNC_FRAME怎么用?Java MediaCodec.BUFFER_FLAG_SYNC_FRAME使用的例子?那么恭喜您, 这里精选的属性代码示例或许可以为您提供帮助。您也可以进一步了解该属性所在android.media.MediaCodec的用法示例。


在下文中一共展示了MediaCodec.BUFFER_FLAG_SYNC_FRAME属性的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: addSample

public void addSample(long offset, MediaCodec.BufferInfo bufferInfo) {
    long delta = bufferInfo.presentationTimeUs - lastPresentationTimeUs;
    if (delta < 0) {
        return;
    }
    boolean isSyncFrame = !isAudio && (bufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
    samples.add(new Sample(offset, bufferInfo.size));
    if (syncSamples != null && isSyncFrame) {
        syncSamples.add(samples.size());
    }

    delta = (delta * timeScale + 500000L) / 1000000L;
    lastPresentationTimeUs = bufferInfo.presentationTimeUs;
    if (!first) {
        sampleDurations.add(sampleDurations.size() - 1, delta);
        duration += delta;
    }
    first = false;
}
 
开发者ID:MLNO,项目名称:airgram,代码行数:19,代码来源:Track.java

示例2: getFirstIndex

/**
 * Returns the index of the oldest sync frame.  Valid until the next add().
 * <p>
 * When sending output to a MediaMuxer, start here.
 */
public int getFirstIndex() {
    final int metaLen = mPacketStart.length;

    int index = mMetaTail;
    while (index != mMetaHead) {
        if ((mPacketFlags[index] & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0) {
            break;
        }
        index = (index + 1) % metaLen;
    }

    if (index == mMetaHead) {
        Log.w(TAG, "HEY: could not find sync frame in buffer");
        index = -1;
    }
    return index;
}
 
开发者ID:AndyZhu1991,项目名称:grafika,代码行数:22,代码来源:CircularEncoderBuffer.java

示例3: addSample

public void addSample(long offset, MediaCodec.BufferInfo bufferInfo) {
    boolean isSyncFrame = !isAudio && (bufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
    samples.add(new Sample(offset, bufferInfo.size));
    if (syncSamples != null && isSyncFrame) {
        syncSamples.add(samples.size());
    }

    long delta = bufferInfo.presentationTimeUs - lastPresentationTimeUs;
    lastPresentationTimeUs = bufferInfo.presentationTimeUs;
    delta = (delta * timeScale + 500000L) / 1000000L;
    if (!first) {
        sampleDurations.add(sampleDurations.size() - 1, delta);
        duration += delta;
    }
    first = false;
}
 
开发者ID:fishwjy,项目名称:VideoCompressor,代码行数:16,代码来源:Track.java

示例4: dequeueOutputBuffer

OutputBufferInfo dequeueOutputBuffer() {
  checkOnMediaCodecThread();
  try {
    MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
    int result = mediaCodec.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT);
    // Check if this is config frame and save configuration data.
    if (result >= 0) {
      boolean isConfigFrame = (info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0;
      if (isConfigFrame) {
        Logging.d(TAG, "Config frame generated. Offset: " + info.offset + ". Size: " + info.size);
        configData = ByteBuffer.allocateDirect(info.size);
        outputBuffers[result].position(info.offset);
        outputBuffers[result].limit(info.offset + info.size);
        configData.put(outputBuffers[result]);
        // Log few SPS header bytes to check profile and level.
        String spsData = "";
        for (int i = 0; i < (info.size < 8 ? info.size : 8); i++) {
          spsData += Integer.toHexString(configData.get(i) & 0xff) + " ";
        }
        Logging.d(TAG, spsData);
        // Release buffer back.
        mediaCodec.releaseOutputBuffer(result, false);
        // Query next output.
        result = mediaCodec.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT);
      }
    }
    if (result >= 0) {
      // MediaCodec doesn't care about Buffer position/remaining/etc so we can
      // mess with them to get a slice and avoid having to pass extra
      // (BufferInfo-related) parameters back to C++.
      ByteBuffer outputBuffer = outputBuffers[result].duplicate();
      outputBuffer.position(info.offset);
      outputBuffer.limit(info.offset + info.size);
      reportEncodedFrame(info.size);

      // Check key frame flag.
      boolean isKeyFrame = (info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
      if (isKeyFrame) {
        Logging.d(TAG, "Sync frame generated");
      }
      if (isKeyFrame && type == VideoCodecType.VIDEO_CODEC_H264) {
        Logging.d(TAG, "Appending config frame of size " + configData.capacity()
                + " to output buffer with offset " + info.offset + ", size " + info.size);
        // For H.264 key frame append SPS and PPS NALs at the start
        ByteBuffer keyFrameBuffer = ByteBuffer.allocateDirect(configData.capacity() + info.size);
        configData.rewind();
        keyFrameBuffer.put(configData);
        keyFrameBuffer.put(outputBuffer);
        keyFrameBuffer.position(0);
        return new OutputBufferInfo(result, keyFrameBuffer, isKeyFrame, info.presentationTimeUs);
      } else {
        return new OutputBufferInfo(
            result, outputBuffer.slice(), isKeyFrame, info.presentationTimeUs);
      }
    } else if (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
      outputBuffers = mediaCodec.getOutputBuffers();
      return dequeueOutputBuffer();
    } else if (result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
      return dequeueOutputBuffer();
    } else if (result == MediaCodec.INFO_TRY_AGAIN_LATER) {
      return null;
    }
    throw new RuntimeException("dequeueOutputBuffer: " + result);
  } catch (IllegalStateException e) {
    Logging.e(TAG, "dequeueOutputBuffer failed", e);
    return new OutputBufferInfo(-1, null, false, -1);
  }
}
 
开发者ID:Piasy,项目名称:AppRTC-Android,代码行数:68,代码来源:MediaCodecVideoEncoder.java

示例5: deliverEncodedImage

private void deliverEncodedImage() {
  outputThreadChecker.checkIsOnValidThread();
  try {
    MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
    int index = codec.dequeueOutputBuffer(info, DEQUEUE_OUTPUT_BUFFER_TIMEOUT_US);
    if (index < 0) {
      return;
    }

    ByteBuffer codecOutputBuffer = codec.getOutputBuffers()[index];
    codecOutputBuffer.position(info.offset);
    codecOutputBuffer.limit(info.offset + info.size);

    if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
      Logging.d(TAG, "Config frame generated. Offset: " + info.offset + ". Size: " + info.size);
      configBuffer = ByteBuffer.allocateDirect(info.size);
      configBuffer.put(codecOutputBuffer);
    } else {
      bitrateAdjuster.reportEncodedFrame(info.size);
      if (adjustedBitrate != bitrateAdjuster.getAdjustedBitrateBps()) {
        updateBitrate();
      }

      final boolean isKeyFrame = (info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
      if (isKeyFrame) {
        Logging.d(TAG, "Sync frame generated");
      }

      final ByteBuffer frameBuffer;
      if (isKeyFrame && codecType == VideoCodecType.H264) {
        Logging.d(TAG,
            "Prepending config frame of size " + configBuffer.capacity()
                + " to output buffer with offset " + info.offset + ", size " + info.size);
        // For H.264 key frame prepend SPS and PPS NALs at the start.
        frameBuffer = ByteBuffer.allocateDirect(info.size + configBuffer.capacity());
        configBuffer.rewind();
        frameBuffer.put(configBuffer);
      } else {
        frameBuffer = ByteBuffer.allocateDirect(info.size);
      }
      frameBuffer.put(codecOutputBuffer);
      frameBuffer.rewind();

      final EncodedImage.FrameType frameType = isKeyFrame
          ? EncodedImage.FrameType.VideoFrameKey
          : EncodedImage.FrameType.VideoFrameDelta;

      EncodedImage.Builder builder = outputBuilders.poll();
      builder.setBuffer(frameBuffer).setFrameType(frameType);
      // TODO(mellem):  Set codec-specific info.
      callback.onEncodedFrame(builder.createEncodedImage(), new CodecSpecificInfo());
    }
    codec.releaseOutputBuffer(index, false);
  } catch (IllegalStateException e) {
    Logging.e(TAG, "deliverOutput failed", e);
  }
}
 
开发者ID:Piasy,项目名称:AppRTC-Android,代码行数:57,代码来源:HardwareVideoEncoder.java

示例6: dequeueOutputBuffer

OutputBufferInfo dequeueOutputBuffer() {
  checkOnMediaCodecThread();
  try {
    MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
    int result = mediaCodec.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT);
    // Check if this is config frame and save configuration data.
    if (result >= 0) {
      boolean isConfigFrame = (info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0;
      if (isConfigFrame) {
        Logging.d(TAG, "Config frame generated. Offset: " + info.offset + ". Size: " + info.size);
        configData = ByteBuffer.allocateDirect(info.size);
        outputBuffers[result].position(info.offset);
        outputBuffers[result].limit(info.offset + info.size);
        configData.put(outputBuffers[result]);
        // Release buffer back.
        mediaCodec.releaseOutputBuffer(result, false);
        // Query next output.
        result = mediaCodec.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT);
      }
    }
    if (result >= 0) {
      // MediaCodec doesn't care about Buffer position/remaining/etc so we can
      // mess with them to get a slice and avoid having to pass extra
      // (BufferInfo-related) parameters back to C++.
      ByteBuffer outputBuffer = outputBuffers[result].duplicate();
      outputBuffer.position(info.offset);
      outputBuffer.limit(info.offset + info.size);
      reportEncodedFrame(info.size);

      // Check key frame flag.
      boolean isKeyFrame = (info.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
      if (isKeyFrame) {
        Logging.d(TAG, "Sync frame generated");
      }
      if (isKeyFrame && type == VideoCodecType.VIDEO_CODEC_H264) {
        Logging.d(TAG, "Appending config frame of size " + configData.capacity()
                + " to output buffer with offset " + info.offset + ", size " + info.size);
        // For H.264 key frame append SPS and PPS NALs at the start
        ByteBuffer keyFrameBuffer = ByteBuffer.allocateDirect(configData.capacity() + info.size);
        configData.rewind();
        keyFrameBuffer.put(configData);
        keyFrameBuffer.put(outputBuffer);
        keyFrameBuffer.position(0);
        return new OutputBufferInfo(result, keyFrameBuffer, isKeyFrame, info.presentationTimeUs);
      } else {
        return new OutputBufferInfo(
            result, outputBuffer.slice(), isKeyFrame, info.presentationTimeUs);
      }
    } else if (result == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
      outputBuffers = mediaCodec.getOutputBuffers();
      return dequeueOutputBuffer();
    } else if (result == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
      return dequeueOutputBuffer();
    } else if (result == MediaCodec.INFO_TRY_AGAIN_LATER) {
      return null;
    }
    throw new RuntimeException("dequeueOutputBuffer: " + result);
  } catch (IllegalStateException e) {
    Logging.e(TAG, "dequeueOutputBuffer failed", e);
    return new OutputBufferInfo(-1, null, false, -1);
  }
}
 
开发者ID:lgyjg,项目名称:AndroidRTC,代码行数:62,代码来源:MediaCodecVideoEncoder.java

示例7: deliverEncodedImage

private void deliverEncodedImage() {
  try {
    int index = mediaCodec.dequeueOutputBuffer(outputBufferInfo,
            OUTPUT_THREAD_DEQUEUE_TIMEOUT_US);
    if (index < 0) {
      if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
        callback.onOutputFormatChanged(mediaCodec, mediaCodec.getOutputFormat());
      }
      return;
    }

    ByteBuffer codecOutputBuffer = mediaCodec.getOutputBuffers()[index];
    codecOutputBuffer.position(outputBufferInfo.offset);
    codecOutputBuffer.limit(outputBufferInfo.offset + outputBufferInfo.size);

    if ((outputBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
      Logging.d(TAG, "Config frame generated. Offset: " + outputBufferInfo.offset
                     + ". Size: " + outputBufferInfo.size);
      configData = ByteBuffer.allocateDirect(outputBufferInfo.size);
      configData.put(codecOutputBuffer);
      // Log few SPS header bytes to check profile and level.
      String spsData = "";
      for (int i = 0; i < (outputBufferInfo.size < 8 ? outputBufferInfo.size : 8); i++) {
        spsData += Integer.toHexString(configData.get(i) & 0xff) + " ";
      }
      Logging.d(TAG, spsData);
    } else {
      reportEncodedFrame(outputBufferInfo.size);

      // Check key frame flag.
      boolean isKeyFrame = (outputBufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
      if (isKeyFrame && type == VideoCodecType.VIDEO_CODEC_H264) {
        // For H.264 key frame append SPS and PPS NALs at the start
        if (keyFrameData.capacity() < configData.capacity() + outputBufferInfo.size) {
          // allocate double size
          int newSize = Math.max(keyFrameData.capacity() * 2,
                  configData.capacity() + outputBufferInfo.size);
          keyFrameData = ByteBuffer.allocateDirect(newSize);
        }
        keyFrameData.position(0);
        configData.rewind();
        keyFrameData.put(configData);
        keyFrameData.put(codecOutputBuffer);
        keyFrameData.position(0);
        outputFrame.fill(index, keyFrameData, configData.capacity() + outputBufferInfo.size,
                isKeyFrame, outputBufferInfo.presentationTimeUs);
        callback.onEncodedFrame(outputFrame, outputBufferInfo);
        releaseOutputBuffer(index);
      } else {
        outputFrame.fill(index, codecOutputBuffer, outputBufferInfo.size, isKeyFrame,
                outputBufferInfo.presentationTimeUs);
        callback.onEncodedFrame(outputFrame, outputBufferInfo);
        releaseOutputBuffer(index);
      }
    }
  } catch (IllegalStateException e) {
    Logging.e(TAG, "deliverOutput failed", e);
  }
}
 
开发者ID:Piasy,项目名称:VideoCRE,代码行数:59,代码来源:MediaCodecVideoEncoder.java


注:本文中的android.media.MediaCodec.BUFFER_FLAG_SYNC_FRAME属性示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。