本文整理匯總了Java中android.media.MediaCodec.BufferInfo方法的典型用法代碼示例。如果您正苦於以下問題:Java MediaCodec.BufferInfo方法的具體用法?Java MediaCodec.BufferInfo怎麽用?Java MediaCodec.BufferInfo使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類android.media.MediaCodec
的用法示例。
在下文中一共展示了MediaCodec.BufferInfo方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: handleCodecOutput
import android.media.MediaCodec; //導入方法依賴的package包/類
private void handleCodecOutput(MediaCodec mediaCodec,
ByteBuffer[] codecOutputBuffers,
MediaCodec.BufferInfo bufferInfo,
OutputStream outputStream)
throws IOException
{
int codecOutputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);
while (codecOutputBufferIndex != MediaCodec.INFO_TRY_AGAIN_LATER) {
if (codecOutputBufferIndex >= 0) {
ByteBuffer encoderOutputBuffer = codecOutputBuffers[codecOutputBufferIndex];
encoderOutputBuffer.position(bufferInfo.offset);
encoderOutputBuffer.limit(bufferInfo.offset + bufferInfo.size);
if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != MediaCodec.BUFFER_FLAG_CODEC_CONFIG) {
byte[] header = createAdtsHeader(bufferInfo.size - bufferInfo.offset);
outputStream.write(header);
byte[] data = new byte[encoderOutputBuffer.remaining()];
encoderOutputBuffer.get(data);
outputStream.write(data);
}
encoderOutputBuffer.clear();
mediaCodec.releaseOutputBuffer(codecOutputBufferIndex, false);
} else if (codecOutputBufferIndex== MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
codecOutputBuffers = mediaCodec.getOutputBuffers();
}
codecOutputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);
}
}
示例2: writeFrameByte
import android.media.MediaCodec; //導入方法依賴的package包/類
private void writeFrameByte(int track, ByteBuffer bb, MediaCodec.BufferInfo bi, boolean isKeyFrame) {
SrsEsFrame frame = new SrsEsFrame();
frame.bb = bb;
frame.bi = bi;
frame.isKeyFrame = isKeyFrame;
frame.track = track;
if (bRecording && !bPaused) {
if (needToFindKeyFrame) {
if (frame.isKeyFrame) {
needToFindKeyFrame = false;
frameCache.add(frame);
synchronized (writeLock) {
writeLock.notifyAll();
}
}
} else {
frameCache.add(frame);
synchronized (writeLock) {
writeLock.notifyAll();
}
}
}
}
示例3: srs_avc_startswith_annexb
import android.media.MediaCodec; //導入方法依賴的package包/類
public SrsAnnexbSearch srs_avc_startswith_annexb(ByteBuffer bb, MediaCodec.BufferInfo bi) {
SrsAnnexbSearch as = new SrsAnnexbSearch();
as.match = false;
int pos = bb.position();
while (pos < bi.size - 3) {
// not match.
if (bb.get(pos) != 0x00 || bb.get(pos + 1) != 0x00) {
break;
}
// match N[00] 00 00 01, where N>=0
if (bb.get(pos + 2) == 0x01) {
as.match = true;
as.nb_start_code = pos + 3 - bb.position();
break;
}
pos++;
}
return as;
}
示例4: CameraRecorder
import android.media.MediaCodec; //導入方法依賴的package包/類
public CameraRecorder(){
mShowEGLHelper=new EglHelper();
// mEncodeEGLHelper=new EGLHelper();
mSem=new Semaphore(0);
mAudioEncodeBufferInfo=new MediaCodec.BufferInfo();
mVideoEncodeBufferInfo=new MediaCodec.BufferInfo();
}
示例5: writeSampleData
import android.media.MediaCodec; //導入方法依賴的package包/類
public void writeSampleData(SampleType sampleType, ByteBuffer byteBuf, MediaCodec.BufferInfo bufferInfo) {
if (mStarted) {
mMuxer.writeSampleData(getTrackIndexForSampleType(sampleType), byteBuf, bufferInfo);
return;
}
byteBuf.limit(bufferInfo.offset + bufferInfo.size);
byteBuf.position(bufferInfo.offset);
if (mByteBuffer == null) {
mByteBuffer = ByteBuffer.allocateDirect(BUFFER_SIZE).order(ByteOrder.nativeOrder());
}
mByteBuffer.put(byteBuf);
mSampleInfoList.add(new SampleInfo(sampleType, bufferInfo.size, bufferInfo));
}
示例6: writeMediaData
import android.media.MediaCodec; //導入方法依賴的package包/類
public synchronized void writeMediaData(int trackIndex, ByteBuffer byteBuf, MediaCodec.BufferInfo bufferInfo) {
if (mMuxerStarted) {
RecordScreenLogUtil.e(TAG, "當前線程::::" + trackIndex);
if (mMuxer != null && mMuxerStarted == true)
mMuxer.writeSampleData(trackIndex, byteBuf, bufferInfo);
}
}
示例7: EncoderCore
import android.media.MediaCodec; //導入方法依賴的package包/類
public EncoderCore(MMediaMuxer MMediaMuxer) {
mMuxerStarted = false;
mBufferInfo = new MediaCodec.BufferInfo();
mTrackIndex = -1;
this.MMediaMuxer = MMediaMuxer;
this.mMuxer= MMediaMuxer.getMuxer();
}
示例8: prepare
import android.media.MediaCodec; //導入方法依賴的package包/類
public void prepare(YXMuxerWrapper muxerWrapper) throws IOException {
mMuxer = muxerWrapper;
mBufferInfo = new MediaCodec.BufferInfo();
mAudioEncoder = MediaCodec.createEncoderByType(MIME_TYPE);
mAudioFormat = MediaFormat.createAudioFormat(MIME_TYPE, SAMPLE_RATE, CHANNEL);
mAudioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectHE);
mAudioFormat.setInteger(MediaFormat.KEY_CHANNEL_MASK, AudioFormat.CHANNEL_IN_STEREO);
mAudioFormat.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
mAudioFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, CHANNEL);
mAudioFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, 8192);
mAudioEncoder.configure(mAudioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
}
示例9: writeToBufferInfo
import android.media.MediaCodec; //導入方法依賴的package包/類
private void writeToBufferInfo(MediaCodec.BufferInfo bufferInfo, int offset) {
bufferInfo.set(offset, mSize, mPresentationTimeUs, mFlags);
}
示例10: getAacDataRtp
import android.media.MediaCodec; //導入方法依賴的package包/類
@Override
protected void getAacDataRtp(ByteBuffer aacBuffer, MediaCodec.BufferInfo info) {
rtspClient.sendAudio(aacBuffer, info);
}
示例11: SampleInfo
import android.media.MediaCodec; //導入方法依賴的package包/類
private SampleInfo(SampleType sampleType, int size, MediaCodec.BufferInfo bufferInfo) {
mSampleType = sampleType;
mSize = size;
mPresentationTimeUs = bufferInfo.presentationTimeUs;
mFlags = bufferInfo.flags;
}
示例12: onAudioEncode
import android.media.MediaCodec; //導入方法依賴的package包/類
@Override
public void onAudioEncode(ByteBuffer bb, MediaCodec.BufferInfo bi) {
if (mPacker != null) {
mPacker.onAudioData(bb, bi);
}
}
示例13: getH264DataRtp
import android.media.MediaCodec; //導入方法依賴的package包/類
@Override
protected void getH264DataRtp(ByteBuffer h264Buffer, MediaCodec.BufferInfo info) {
srsFlvMuxer.sendVideo(h264Buffer, info);
}
示例14: dequeueOutputBuffer
import android.media.MediaCodec; //導入方法依賴的package包/類
private DecodedOutputBuffer dequeueOutputBuffer(int dequeueTimeoutMs) {
checkOnMediaCodecThread();
if (decodeStartTimeMs.isEmpty()) {
return null;
}
// Drain the decoder until receiving a decoded buffer or hitting
// MediaCodec.INFO_TRY_AGAIN_LATER.
final MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
while (true) {
final int result =
mediaCodec.dequeueOutputBuffer(info, TimeUnit.MILLISECONDS.toMicros(dequeueTimeoutMs));
switch (result) {
case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
outputBuffers = mediaCodec.getOutputBuffers();
Logging.d(TAG, "Decoder output buffers changed: " + outputBuffers.length);
if (hasDecodedFirstFrame) {
throw new RuntimeException("Unexpected output buffer change event.");
}
break;
case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
MediaFormat format = mediaCodec.getOutputFormat();
Logging.d(TAG, "Decoder format changed: " + format.toString());
int new_width = format.getInteger(MediaFormat.KEY_WIDTH);
int new_height = format.getInteger(MediaFormat.KEY_HEIGHT);
if (hasDecodedFirstFrame && (new_width != width || new_height != height)) {
throw new RuntimeException("Unexpected size change. Configured " + width + "*" + height
+ ". New " + new_width + "*" + new_height);
}
width = format.getInteger(MediaFormat.KEY_WIDTH);
height = format.getInteger(MediaFormat.KEY_HEIGHT);
if (!useSurface && format.containsKey(MediaFormat.KEY_COLOR_FORMAT)) {
colorFormat = format.getInteger(MediaFormat.KEY_COLOR_FORMAT);
Logging.d(TAG, "Color: 0x" + Integer.toHexString(colorFormat));
if (!supportedColorList.contains(colorFormat)) {
throw new IllegalStateException("Non supported color format: " + colorFormat);
}
}
if (format.containsKey("stride")) {
stride = format.getInteger("stride");
}
if (format.containsKey("slice-height")) {
sliceHeight = format.getInteger("slice-height");
}
Logging.d(TAG, "Frame stride and slice height: " + stride + " x " + sliceHeight);
stride = Math.max(width, stride);
sliceHeight = Math.max(height, sliceHeight);
break;
case MediaCodec.INFO_TRY_AGAIN_LATER:
return null;
default:
hasDecodedFirstFrame = true;
TimeStamps timeStamps = decodeStartTimeMs.remove();
long decodeTimeMs = SystemClock.elapsedRealtime() - timeStamps.decodeStartTimeMs;
if (decodeTimeMs > MAX_DECODE_TIME_MS) {
Logging.e(TAG, "Very high decode time: " + decodeTimeMs + "ms"
+ ". Q size: " + decodeStartTimeMs.size()
+ ". Might be caused by resuming H264 decoding after a pause.");
decodeTimeMs = MAX_DECODE_TIME_MS;
}
return new DecodedOutputBuffer(result, info.offset, info.size,
TimeUnit.MICROSECONDS.toMillis(info.presentationTimeUs), timeStamps.timeStampMs,
timeStamps.ntpTimeStampMs, decodeTimeMs, SystemClock.elapsedRealtime());
}
}
}
示例15: writeAudioSample
import android.media.MediaCodec; //導入方法依賴的package包/類
public void writeAudioSample(final ByteBuffer bb, MediaCodec.BufferInfo bi) {
int dts = (int) (bi.presentationTimeUs / 1000);
audio_tag = mAudioAllocator.allocate(bi.size + 2);
byte aac_packet_type = 1; // 1 = AAC raw
if (!aac_specific_config_got) {
// @see aac-mp4a-format-ISO_IEC_14496-3+2001.pdf
// AudioSpecificConfig (), page 33
// 1.6.2.1 AudioSpecificConfig
// audioObjectType; 5 bslbf
byte ch = (byte) (bb.get(0) & 0xf8);
// 3bits left.
// samplingFrequencyIndex; 4 bslbf
byte samplingFrequencyIndex = 0x04; //44100
if (sampleRate == SrsCodecAudioSampleRate.R22050) {
samplingFrequencyIndex = 0x07;
} else if (sampleRate == SrsCodecAudioSampleRate.R11025) {
samplingFrequencyIndex = 0x0a;
} else if (sampleRate == SrsCodecAudioSampleRate.R32000) {
samplingFrequencyIndex = 0x05;
} else if (sampleRate == SrsCodecAudioSampleRate.R16000) {
samplingFrequencyIndex = 0x08;
}
ch |= (samplingFrequencyIndex >> 1) & 0x07;
audio_tag.put(ch, 2);
ch = (byte) ((samplingFrequencyIndex << 7) & 0x80);
// 7bits left.
// channelConfiguration; 4 bslbf
byte channelConfiguration = 1;
if (achannel == 2) {
channelConfiguration = 2;
}
ch |= (channelConfiguration << 3) & 0x78;
// 3bits left.
// GASpecificConfig(), page 451
// 4.4.1 Decoder configuration (GASpecificConfig)
// frameLengthFlag; 1 bslbf
// dependsOnCoreCoder; 1 bslbf
// extensionFlag; 1 bslbf
audio_tag.put(ch, 3);
aac_specific_config_got = true;
aac_packet_type = 0; // 0 = AAC sequence header
writeAdtsHeader(audio_tag.array(), 4);
audio_tag.appendOffset(7);
} else {
bb.get(audio_tag.array(), 2, bi.size);
audio_tag.appendOffset(bi.size + 2);
}
byte sound_format = 10; // AAC
byte sound_type = 0; // 0 = Mono sound
if (achannel == 2) {
sound_type = 1; // 1 = Stereo sound
}
byte sound_size = 1; // 1 = 16-bit samples
byte sound_rate = 3; // 44100, 22050, 11025
if (sampleRate == 22050) {
sound_rate = 2;
} else if (sampleRate == 11025) {
sound_rate = 1;
}
// for audio frame, there is 1 or 2 bytes header:
// 1bytes, SoundFormat|SoundRate|SoundSize|SoundType
// 1bytes, AACPacketType for SoundFormat == 10, 0 is sequence header.
byte audio_header = (byte) (sound_type & 0x01);
audio_header |= (sound_size << 1) & 0x02;
audio_header |= (sound_rate << 2) & 0x0c;
audio_header |= (sound_format << 4) & 0xf0;
audio_tag.put(audio_header, 0);
audio_tag.put(aac_packet_type, 1);
writeRtmpPacket(SrsCodecFlvTag.Audio, dts, 0, aac_packet_type, audio_tag);
}