当前位置: 首页>>代码示例>>Java>>正文


Java MediaCodec类代码示例

本文整理汇总了Java中android.media.MediaCodec的典型用法代码示例。如果您正苦于以下问题:Java MediaCodec类的具体用法?Java MediaCodec怎么用?Java MediaCodec使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


MediaCodec类属于android.media包,在下文中一共展示了MediaCodec类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: buildRenderers

import android.media.MediaCodec; //导入依赖的package包/类
@Override
public void buildRenderers(DemoPlayer player) {
  Allocator allocator = new DefaultAllocator(BUFFER_SEGMENT_SIZE);
  Handler mainHandler = player.getMainHandler();

  // Build the video and audio renderers.
  DefaultBandwidthMeter bandwidthMeter = new DefaultBandwidthMeter(mainHandler, null);
  DataSource dataSource = new DefaultUriDataSource(context, bandwidthMeter, userAgent);
  ExtractorSampleSource sampleSource = new ExtractorSampleSource(uri, dataSource, allocator,
      BUFFER_SEGMENT_COUNT * BUFFER_SEGMENT_SIZE, mainHandler, player, 0);
  MediaCodecVideoTrackRenderer videoRenderer = new MediaCodecVideoTrackRenderer(context,
      sampleSource, MediaCodecSelector.DEFAULT, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000,
      mainHandler, player, 50);
  MediaCodecAudioTrackRenderer audioRenderer = new MediaCodecAudioTrackRenderer(sampleSource,
      MediaCodecSelector.DEFAULT, null, true, mainHandler, player,
      AudioCapabilities.getCapabilities(context), AudioManager.STREAM_MUSIC);
  TrackRenderer textRenderer = new TextTrackRenderer(sampleSource, player,
      mainHandler.getLooper());

  // Invoke the callback.
  TrackRenderer[] renderers = new TrackRenderer[DemoPlayer.RENDERER_COUNT];
  renderers[DemoPlayer.TYPE_VIDEO] = videoRenderer;
  renderers[DemoPlayer.TYPE_AUDIO] = audioRenderer;
  renderers[DemoPlayer.TYPE_TEXT] = textRenderer;
  player.onRenderers(renderers, bandwidthMeter);
}
 
开发者ID:Dreamxiaoxuan,项目名称:AndroidTvDemo,代码行数:27,代码来源:ExtractorRendererBuilder.java

示例2: drainDecoder

import android.media.MediaCodec; //导入依赖的package包/类
private int drainDecoder(long timeoutUs) {
    if (mIsDecoderEOS) return DRAIN_STATE_NONE;

    int result = mDecoder.dequeueOutputBuffer(mBufferInfo, timeoutUs);
    switch (result) {
        case MediaCodec.INFO_TRY_AGAIN_LATER:
            return DRAIN_STATE_NONE;
        case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
            mAudioChannel.setActualDecodedFormat(mDecoder.getOutputFormat());
        case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
            return DRAIN_STATE_SHOULD_RETRY_IMMEDIATELY;
    }

    if ((mBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
        mIsDecoderEOS = true;
        mAudioChannel.drainDecoderBufferAndQueue(AudioChannel.BUFFER_INDEX_END_OF_STREAM, 0);
    } else if (mBufferInfo.size > 0) {
        mAudioChannel.drainDecoderBufferAndQueue(result, mBufferInfo.presentationTimeUs);
    }

    return DRAIN_STATE_CONSUMED;
}
 
开发者ID:SavorGit,项目名称:Hotspot-master-devp,代码行数:23,代码来源:AudioTrackTranscoder.java

示例3: open

import android.media.MediaCodec; //导入依赖的package包/类
@Override
public Point open(SurfaceTexture surface) {
    try {
        if(!extractMedia()){
            return new Point(0,0);
        }
        mFrameSem=new Semaphore(0);
        mDecodeSem=new Semaphore(1);
        videoProvideEndFlag=false;
        isUserWantToStop=false;
        mAudioEncodeTrack=mStore.addTrack(mExtractor.getTrackFormat(mAudioDecodeTrack));
        MediaFormat format=mExtractor.getTrackFormat(mVideoDecodeTrack);
        mVideoDecoder = MediaCodec.createDecoderByType(format.getString(MediaFormat.KEY_MIME));
        mVideoDecoder.configure(format,new Surface(surface),null,0);
        mVideoDecoder.start();
        startDecodeThread();
    } catch (IOException e) {
        e.printStackTrace();
    }
    return mVideoSize;
}
 
开发者ID:aiyaapp,项目名称:AAVT,代码行数:22,代码来源:Mp4Provider.java

示例4: openVideoEncoder

import android.media.MediaCodec; //导入依赖的package包/类
private void openVideoEncoder(){
    AvLog.d(TAG,"openVideoEncoder startTime-->");
    if(mVideoEncoder==null){
        try {
            MediaFormat format=convertVideoConfigToFormat(mConfig.mVideo);
            mVideoEncoder= MediaCodec.createEncoderByType(mConfig.mVideo.mime);
            mVideoEncoder.configure(format,null,null,MediaCodec.CONFIGURE_FLAG_ENCODE);
            super.setSurface(mVideoEncoder.createInputSurface());
            super.setOutputSize(mConfig.mVideo.width,mConfig.mVideo.height);
            mVideoEncoder.start();
            isEncodeStarted=true;
        } catch (IOException e) {
            e.printStackTrace();
        }
    }
    AvLog.d(TAG,"openVideoEncoder endTime-->");
}
 
开发者ID:aiyaapp,项目名称:AAVT,代码行数:18,代码来源:SurfaceEncoder.java

示例5: writeFrameByte

import android.media.MediaCodec; //导入依赖的package包/类
private void writeFrameByte(int track, ByteBuffer bb, MediaCodec.BufferInfo bi, boolean isKeyFrame) {
    SrsEsFrame frame = new SrsEsFrame();
    frame.bb = bb;
    frame.bi = bi;
    frame.isKeyFrame = isKeyFrame;
    frame.track = track;

    if (bRecording && !bPaused) {
        if (needToFindKeyFrame) {
            if (frame.isKeyFrame) {
                needToFindKeyFrame = false;
                frameCache.add(frame);
                synchronized (writeLock) {
                    writeLock.notifyAll();
                }
            }
        } else {
            frameCache.add(frame);
            synchronized (writeLock) {
                writeLock.notifyAll();
            }
        }
    }
}
 
开发者ID:lisnstatic,项目名称:live_master,代码行数:25,代码来源:SrsMp4Muxer.java

示例6: getVideoMediaCodec

import android.media.MediaCodec; //导入依赖的package包/类
@TargetApi(21)
    public static MediaCodec getVideoMediaCodec() {
        int videoWidth = getVideoSize(Options.getInstance().video.width);
        int videoHeight = getVideoSize(Options.getInstance().video.height);
        MediaFormat format = MediaFormat.createVideoFormat(Options.getInstance().video.mime, videoWidth, videoHeight);
        format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
                MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
        format.setInteger(MediaFormat.KEY_BIT_RATE, Options.getInstance().video.maxBps* 1024);
        int fps = Options.getInstance().video.fps;
        //设置摄像头预览帧率
//        if(BlackListHelper.deviceInFpsBlacklisted()) {
//            SopCastLog.d(SopCastConstant.TAG, "Device in fps setting black list, so set mediacodec fps 15");
//            fps = 15;
//        }
        format.setInteger(MediaFormat.KEY_FRAME_RATE, fps);
        format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, Options.getInstance().video.ifi);
        format.setInteger(MediaFormat.KEY_BITRATE_MODE, MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_VBR);
        format.setInteger(MediaFormat.KEY_COMPLEXITY, MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_CBR);
        MediaCodec mediaCodec = null;

        try {
            mediaCodec = MediaCodec.createEncoderByType(Options.getInstance().video.mime);
            mediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
        }catch (Exception e) {
            e.printStackTrace();
            if (mediaCodec != null) {
                mediaCodec.stop();
                mediaCodec.release();
                mediaCodec = null;
            }
        }
        return mediaCodec;
    }
 
开发者ID:wuyisheng,项目名称:libRtmp,代码行数:34,代码来源:AndroidUntil.java

示例7: queueInputBuffer

import android.media.MediaCodec; //导入依赖的package包/类
/**
 * Copy audio from the recorder into the encoder.
 */
@TargetApi(Build.VERSION_CODES.JELLY_BEAN)
private int queueInputBuffer(MediaCodec codec, ByteBuffer[] inputBuffers, int index, SpeechRecord speechRecord) {
    if (speechRecord == null || speechRecord.getRecordingState() != SpeechRecord.RECORDSTATE_RECORDING) {
        return -1;
    }

    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
        ByteBuffer inputBuffer = inputBuffers[index];
        inputBuffer.clear();
        int size = inputBuffer.limit();
        byte[] buffer = new byte[size];
        int status = read(speechRecord, buffer);
        if (status < 0) {
            handleError("status = " + status);
            return -1;
        }
        inputBuffer.put(buffer);
        codec.queueInputBuffer(index, 0, size, 0, 0);
        return size;
    }
    return -1;
}
 
开发者ID:vaibhavs4424,项目名称:AI-Powered-Intelligent-Banking-Platform,代码行数:26,代码来源:EncodedAudioRecorder.java

示例8: MediaEncoder

import android.media.MediaCodec; //导入依赖的package包/类
public MediaEncoder(final MediaMuxerWrapper muxer, final MediaEncoderListener listener) {
   	if (listener == null) throw new NullPointerException("MediaEncoderListener is null");
   	if (muxer == null) throw new NullPointerException("MediaMuxerWrapper is null");
	mWeakMuxer = new WeakReference<MediaMuxerWrapper>(muxer);
	muxer.addEncoder(this);
	mListener = listener;
       synchronized (mSync) {
           // create BufferInfo here for effectiveness(to reduce GC)
           mBufferInfo = new MediaCodec.BufferInfo();
           // wait for starting thread
           new Thread(this, getClass().getSimpleName()).start();
           try {
           	mSync.wait();
           } catch (final InterruptedException e) {
           }
       }
}
 
开发者ID:zhangyaqiang,项目名称:Fatigue-Detection,代码行数:18,代码来源:MediaEncoder.java

示例9: recorderEncoderLoop

import android.media.MediaCodec; //导入依赖的package包/类
/**
 * Reads bytes from the given recorder and encodes them with the given encoder.
 * Uses the (deprecated) Synchronous Processing using Buffer Arrays.
 * <p/>
 * Encoders (or codecs that generate compressed data) will create and return the codec specific
 * data before any valid output buffer in output buffers marked with the codec-config flag.
 * Buffers containing codec-specific-data have no meaningful timestamps.
 */
@TargetApi(Build.VERSION_CODES.JELLY_BEAN)
private void recorderEncoderLoop(MediaCodec codec, SpeechRecord speechRecord) {
    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
        codec.start();
        // Getting some buffers (e.g. 4 of each) to communicate with the codec
        ByteBuffer[] codecInputBuffers = codec.getInputBuffers();
        ByteBuffer[] codecOutputBuffers = codec.getOutputBuffers();
        Log.i("input buffers " + codecInputBuffers.length + "; output buffers: " + codecOutputBuffers.length);
        boolean doneSubmittingInput = false;
        int numRetriesDequeueOutputBuffer = 0;
        int index;
        while (true) {
            if (!doneSubmittingInput) {
                index = codec.dequeueInputBuffer(DEQUEUE_TIMEOUT);
                if (index >= 0) {
                    int size = queueInputBuffer(codec, codecInputBuffers, index, speechRecord);
                    if (size == -1) {
                        codec.queueInputBuffer(index, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
                        Log.i("enc: in: EOS");
                        doneSubmittingInput = true;
                    } else {
                        Log.i("enc: in: " + size);
                        mNumBytesSubmitted += size;
                    }
                } else {
                    Log.i("enc: in: timeout, will try again");
                }
            }
            MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
            index = codec.dequeueOutputBuffer(info, DEQUEUE_TIMEOUT);
            Log.i("enc: out: flags/index: " + info.flags + "/" + index);
            if (index == MediaCodec.INFO_TRY_AGAIN_LATER) {
                Log.i("enc: out: INFO_TRY_AGAIN_LATER: " + numRetriesDequeueOutputBuffer);
                if (++numRetriesDequeueOutputBuffer > MAX_NUM_RETRIES_DEQUEUE_OUTPUT_BUFFER) {
                    break;
                }
            } else if (index == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                MediaFormat format = codec.getOutputFormat();
                Log.i("enc: out: INFO_OUTPUT_FORMAT_CHANGED: " + format.toString());
            } else if (index == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
                codecOutputBuffers = codec.getOutputBuffers();
                Log.i("enc: out: INFO_OUTPUT_BUFFERS_CHANGED");
            } else {
                dequeueOutputBuffer(codec, codecOutputBuffers, index, info);
                mNumBytesDequeued += info.size;
                if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                    Log.i("enc: out: EOS");
                    break;
                }
            }
        }
        codec.stop();
        codec.release();
    }
}
 
开发者ID:vaibhavs4424,项目名称:AI-Powered-Intelligent-Banking-Platform,代码行数:64,代码来源:EncodedAudioRecorder.java

示例10: addSample

import android.media.MediaCodec; //导入依赖的package包/类
public void addSample(long offset, MediaCodec.BufferInfo bufferInfo) {
    long delta = bufferInfo.presentationTimeUs - lastPresentationTimeUs;
    if (delta < 0) {
        return;
    }
    boolean isSyncFrame = !isAudio && (bufferInfo.flags & MediaCodec.BUFFER_FLAG_SYNC_FRAME) != 0;
    samples.add(new Sample(offset, bufferInfo.size));
    if (syncSamples != null && isSyncFrame) {
        syncSamples.add(samples.size());
    }

    delta = (delta * timeScale + 500000L) / 1000000L;
    lastPresentationTimeUs = bufferInfo.presentationTimeUs;
    if (!first) {
        sampleDurations.add(sampleDurations.size() - 1, delta);
        duration += delta;
    }
    first = false;
}
 
开发者ID:pooyafaroka,项目名称:PlusGram,代码行数:20,代码来源:Track.java

示例11: MediaCodecRenderer

import android.media.MediaCodec; //导入依赖的package包/类
/**
 * @param trackType The track type that the renderer handles. One of the {@code C.TRACK_TYPE_*}
 *     constants defined in {@link C}.
 * @param mediaCodecSelector A decoder selector.
 * @param drmSessionManager For use with encrypted media. May be null if support for encrypted
 *     media is not required.
 * @param playClearSamplesWithoutKeys Encrypted media may contain clear (un-encrypted) regions.
 *     For example a media file may start with a short clear region so as to allow playback to
 *     begin in parallel with key acquisition. This parameter specifies whether the renderer is
 *     permitted to play clear regions of encrypted media files before {@code drmSessionManager}
 *     has obtained the keys necessary to decrypt encrypted regions of the media.
 */
public MediaCodecRenderer(int trackType, MediaCodecSelector mediaCodecSelector,
    DrmSessionManager<FrameworkMediaCrypto> drmSessionManager,
    boolean playClearSamplesWithoutKeys) {
  super(trackType);
  Assertions.checkState(Util.SDK_INT >= 16);
  this.mediaCodecSelector = Assertions.checkNotNull(mediaCodecSelector);
  this.drmSessionManager = drmSessionManager;
  this.playClearSamplesWithoutKeys = playClearSamplesWithoutKeys;
  buffer = new DecoderInputBuffer(DecoderInputBuffer.BUFFER_REPLACEMENT_MODE_DISABLED);
  flagsOnlyBuffer = DecoderInputBuffer.newFlagsOnlyInstance();
  formatHolder = new FormatHolder();
  decodeOnlyPresentationTimestamps = new ArrayList<>();
  outputBufferInfo = new MediaCodec.BufferInfo();
  codecReconfigurationState = RECONFIGURATION_STATE_NONE;
  codecReinitializationState = REINITIALIZATION_STATE_NONE;
}
 
开发者ID:sanjaysingh1990,项目名称:Exoplayer2Radio,代码行数:29,代码来源:MediaCodecRenderer.java

示例12: handleCodecOutput

import android.media.MediaCodec; //导入依赖的package包/类
private void handleCodecOutput(MediaCodec mediaCodec,
                               ByteBuffer[] codecOutputBuffers,
                               MediaCodec.BufferInfo bufferInfo,
                               OutputStream outputStream)
    throws IOException
{
  int codecOutputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);

  while (codecOutputBufferIndex != MediaCodec.INFO_TRY_AGAIN_LATER) {
    if (codecOutputBufferIndex >= 0) {
      ByteBuffer encoderOutputBuffer = codecOutputBuffers[codecOutputBufferIndex];

      encoderOutputBuffer.position(bufferInfo.offset);
      encoderOutputBuffer.limit(bufferInfo.offset + bufferInfo.size);

      if ((bufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != MediaCodec.BUFFER_FLAG_CODEC_CONFIG) {
        byte[] header = createAdtsHeader(bufferInfo.size - bufferInfo.offset);


        outputStream.write(header);

        byte[] data = new byte[encoderOutputBuffer.remaining()];
        encoderOutputBuffer.get(data);
        outputStream.write(data);
      }

      encoderOutputBuffer.clear();

      mediaCodec.releaseOutputBuffer(codecOutputBufferIndex, false);
    }  else if (codecOutputBufferIndex== MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
      codecOutputBuffers = mediaCodec.getOutputBuffers();
    }

    codecOutputBufferIndex = mediaCodec.dequeueOutputBuffer(bufferInfo, 0);
  }

}
 
开发者ID:XecureIT,项目名称:PeSanKita-android,代码行数:38,代码来源:AudioCodec.java

示例13: dequeueOutputBuffer

import android.media.MediaCodec; //导入依赖的package包/类
/**
 * Save the encoded (output) buffer into the complete encoded recording.
 * TODO: copy directly (without the intermediate byte array)
 */
@TargetApi(Build.VERSION_CODES.JELLY_BEAN)
private void dequeueOutputBuffer(MediaCodec codec, ByteBuffer[] outputBuffers, int index, MediaCodec.BufferInfo info) {
    if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.JELLY_BEAN) {
        ByteBuffer buffer = outputBuffers[index];
        Log.i("size/remaining: " + info.size + "/" + buffer.remaining());
        if (info.size <= buffer.remaining()) {
            final byte[] bufferCopied = new byte[info.size];
            buffer.get(bufferCopied); // TODO: catch BufferUnderflow
            // TODO: do we need to clear?
            // on N5: always size == remaining(), clearing is not needed
            // on SGS2: remaining decreases until it becomes less than size, which results in BufferUnderflow
            // (but SGS2 records only zeros anyway)
            //buffer.clear();
            codec.releaseOutputBuffer(index, false);
            addEncoded(bufferCopied);
            if (Log.DEBUG) {
                AudioUtils.showSomeBytes("out", bufferCopied);
            }
        } else {
            Log.e("size > remaining");
            codec.releaseOutputBuffer(index, false);
        }
    }
}
 
开发者ID:vaibhavs4424,项目名称:AI-Powered-Intelligent-Banking-Platform,代码行数:29,代码来源:EncodedAudioRecorder.java

示例14: AudioEncoderCore

import android.media.MediaCodec; //导入依赖的package包/类
public AudioEncoderCore(MMediaMuxer MMediaMuxer) throws IOException {
        super(MMediaMuxer);
        final MediaFormat audioFormat = MediaFormat.createAudioFormat(MIME_TYPE, SAMPLE_RATE, 1);
        audioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
        audioFormat.setInteger(MediaFormat.KEY_CHANNEL_MASK, AudioFormat.CHANNEL_IN_MONO);
        audioFormat.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
        audioFormat.setInteger(MediaFormat.KEY_CHANNEL_COUNT, 1);
//		audioFormat.setLong(MediaFormat.KEY_MAX_INPUT_SIZE, inputFile.length());
//      audioFormat.setLong(MediaFormat.KEY_DURATION, (long)durationInMs );
        if (VERBOSE) Log.i(TAG, "format: " + audioFormat);
        mEncoder = MediaCodec.createEncoderByType(MIME_TYPE);
        mEncoder.configure(audioFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
        mEncoder.start();
        if (mAudioThread == null) {
            mAudioThread = new AudioThread();
            mAudioThread.start();
            capturing=true;
            stopped=false;
        }
    }
 
开发者ID:zhangyaqiang,项目名称:Fatigue-Detection,代码行数:21,代码来源:AudioEncoderCore.java

示例15: buildRenderers

import android.media.MediaCodec; //导入依赖的package包/类
@Override
public void buildRenderers(VideoPlayer player) {
    Allocator allocator = new DefaultAllocator(BUFFER_SEGMENT_SIZE);
    Handler mainHandler = player.getMainHandler();

    TrackRenderer[] renderers = new TrackRenderer[RENDERER_COUNT];
    DataSource dataSource = new DefaultUriDataSource(context, userAgent);
    ExtractorSampleSource sampleSource = new ExtractorSampleSource(uri, dataSource, allocator, BUFFER_SEGMENT_COUNT * BUFFER_SEGMENT_SIZE, mainHandler, null, 0);
    renderers[TYPE_VIDEO] = new MediaCodecVideoTrackRenderer(context, sampleSource, MediaCodecSelector.DEFAULT, MediaCodec.VIDEO_SCALING_MODE_SCALE_TO_FIT, 5000, mainHandler, player, 50) {
        @Override
        protected void doSomeWork(long positionUs, long elapsedRealtimeUs, boolean sourceIsReady) throws ExoPlaybackException {
            super.doSomeWork(positionUs, elapsedRealtimeUs, sourceIsReady);
        }
    };
    renderers[TYPE_AUDIO] = new MediaCodecAudioTrackRenderer(sampleSource, MediaCodecSelector.DEFAULT, null, true, mainHandler, null, AudioCapabilities.getCapabilities(context), AudioManager.STREAM_MUSIC);
    player.onRenderers(renderers);
}
 
开发者ID:pooyafaroka,项目名称:PlusGram,代码行数:18,代码来源:VideoPlayer.java


注:本文中的android.media.MediaCodec类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。