本文整理匯總了Java中android.media.MediaFormat.createVideoFormat方法的典型用法代碼示例。如果您正苦於以下問題:Java MediaFormat.createVideoFormat方法的具體用法?Java MediaFormat.createVideoFormat怎麽用?Java MediaFormat.createVideoFormat使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類android.media.MediaFormat
的用法示例。
在下文中一共展示了MediaFormat.createVideoFormat方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: VideoEncoderCore
import android.media.MediaFormat; //導入方法依賴的package包/類
/**
* Configures encoder and muxer state, and prepares the input Surface.
*/
public VideoEncoderCore(int width, int height, int bitRate, MMediaMuxer MMediaMuxer)
throws IOException {
super(MMediaMuxer);
MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, width, height);
// Set some properties. Failing to specify some of these can cause the MediaCodec
// configure() call to throw an unhelpful exception.
format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
format.setInteger(MediaFormat.KEY_BIT_RATE, calcBitRate(width,height));
format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
if (VERBOSE) Log.d(TAG, "format: " + format);
// Create a MediaCodec encoder, and configure it with our format. Get a Surface
// we can use for input and wrap it with a class that handles the EGL work.
mEncoder = MediaCodec.createEncoderByType(MIME_TYPE);
mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mInputSurface = mEncoder.createInputSurface();
mEncoder.start();
}
示例2: getVideoMediaCodec
import android.media.MediaFormat; //導入方法依賴的package包/類
@TargetApi(21)
public static MediaCodec getVideoMediaCodec() {
int videoWidth = getVideoSize(Options.getInstance().video.width);
int videoHeight = getVideoSize(Options.getInstance().video.height);
MediaFormat format = MediaFormat.createVideoFormat(Options.getInstance().video.mime, videoWidth, videoHeight);
format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
format.setInteger(MediaFormat.KEY_BIT_RATE, Options.getInstance().video.maxBps* 1024);
int fps = Options.getInstance().video.fps;
//設置攝像頭預覽幀率
// if(BlackListHelper.deviceInFpsBlacklisted()) {
// SopCastLog.d(SopCastConstant.TAG, "Device in fps setting black list, so set mediacodec fps 15");
// fps = 15;
// }
format.setInteger(MediaFormat.KEY_FRAME_RATE, fps);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, Options.getInstance().video.ifi);
format.setInteger(MediaFormat.KEY_BITRATE_MODE, MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_VBR);
format.setInteger(MediaFormat.KEY_COMPLEXITY, MediaCodecInfo.EncoderCapabilities.BITRATE_MODE_CBR);
MediaCodec mediaCodec = null;
try {
mediaCodec = MediaCodec.createEncoderByType(Options.getInstance().video.mime);
mediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
}catch (Exception e) {
e.printStackTrace();
if (mediaCodec != null) {
mediaCodec.stop();
mediaCodec.release();
mediaCodec = null;
}
}
return mediaCodec;
}
示例3: configureDecoder
import android.media.MediaFormat; //導入方法依賴的package包/類
private static MediaCodec configureDecoder(ImageInfo info, int maxInputSize, Surface surface) {
if (mDecoderSupportedSize.getWidth() < info.size.getWidth() || mDecoderSupportedSize.getHeight() < info.size.getHeight()) {
Log.w(TAG, "HEVC image may exceed decoder capability");
}
try {
MediaCodec decoder = MediaCodec.createByCodecName(mDecoderName);
MediaFormat inputFormat = MediaFormat.createVideoFormat(
MediaFormat.MIMETYPE_VIDEO_HEVC, info.size.getWidth(), info.size.getHeight());
inputFormat.setInteger(MediaFormat.KEY_MAX_INPUT_SIZE, maxInputSize);
inputFormat.setByteBuffer("csd-0", info.paramset);
Log.d(TAG, "HEVC input-format=" + inputFormat);
decoder.configure(inputFormat, surface, null, 0);
return decoder;
} catch (IOException ex) {
throw new RuntimeException("no HEVC decoding support");
}
}
示例4: to
import android.media.MediaFormat; //導入方法依賴的package包/類
/** Create a {@link MpegEncoder} from this {@link Builder}. */
@NonNull
public final MpegEncoder to(@NonNull String path, int width, int height) {
final MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, width, height);
format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
format.setInteger(MediaFormat.KEY_BIT_RATE,
calcBitRate(width * height, mFPS, mMotion));
format.setInteger(MediaFormat.KEY_FRAME_RATE, mFPS);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, mIFrame);
try {
return new MpegEncoder(this, format, path);
} catch (IOException exception) {
throw new RuntimeException(exception);
}
}
示例5: Configuration
import android.media.MediaFormat; //導入方法依賴的package包/類
public Configuration(int width,int height){
mAudioFormat=MediaFormat.createAudioFormat("audio/mp4a-latm",48000,2);
mAudioFormat.setInteger(MediaFormat.KEY_BIT_RATE,128000);
mAudioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, MediaCodecInfo.CodecProfileLevel.AACObjectLC);
mVideoFormat=MediaFormat.createVideoFormat("video/avc",width,height);
mVideoFormat.setInteger(MediaFormat.KEY_FRAME_RATE,24);
mVideoFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL,1);
mVideoFormat.setInteger(MediaFormat.KEY_BIT_RATE,width*height*5);
mVideoFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT,MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
}
示例6: convertVideoConfigToFormat
import android.media.MediaFormat; //導入方法依賴的package包/類
protected MediaFormat convertVideoConfigToFormat(MediaConfig.Video config){
MediaFormat format=MediaFormat.createVideoFormat(config.mime,config.width,config.height);
format.setInteger(MediaFormat.KEY_BIT_RATE,config.bitrate);
format.setInteger(MediaFormat.KEY_FRAME_RATE,config.frameRate);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL,config.iframe);
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
return format;
}
示例7: VideoEncoderCore
import android.media.MediaFormat; //導入方法依賴的package包/類
/**
* Configures encoder and muxer state, and prepares the input Surface.
*/
public VideoEncoderCore(int width, int height, int bitRate, File outputFile)
throws IOException {
mBufferInfo = new MediaCodec.BufferInfo();
MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, width, height);
// Set some properties. Failing to specify some of these can cause the MediaCodec
// configure() call to throw an unhelpful exception.
format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate);
format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
if (VERBOSE) Log.d(TAG, "format: " + format);
// Create a MediaCodec encoder, and configure it with our format. Get a Surface
// we can use for input and wrap it with a class that handles the EGL work.
mEncoder = MediaCodec.createEncoderByType(MIME_TYPE);
mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mInputSurface = mEncoder.createInputSurface();
mEncoder.start();
// Create a MediaMuxer. We can't add the video track and start() the muxer here,
// because our MediaFormat doesn't have the Magic Goodies. These can only be
// obtained from the encoder after it has started processing data.
//
// We're not actually interested in multiplexing audio. We just want to convert
// the raw H.264 elementary stream we get from MediaCodec into a .mp4 file.
mMuxer = new MediaMuxer(outputFile.toString(), MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
mTrackIndex = -1;
mMuxerStarted = false;
}
示例8: getFormat
import android.media.MediaFormat; //導入方法依賴的package包/類
public static MediaFormat getFormat () {
MediaFormat mMediaFormat = MediaFormat.createVideoFormat(AccessConstants.FORMAT,
AccessConstants.PHONE_WIDTH, AccessConstants.PHONE_HEIGHT);
mMediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, AccessConstants.BITRATE);
mMediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, AccessConstants.FPS);
mMediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
mMediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
return mMediaFormat;
}
示例9: prepareEncoder
import android.media.MediaFormat; //導入方法依賴的package包/類
private void prepareEncoder() throws IOException {
MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
format.setInteger(MediaFormat.KEY_BIT_RATE, mBitRate);
format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
RecordScreenLogUtil.i(TAG, "created video format: " + format);
mEncoder = MediaCodec.createEncoderByType(MIME_TYPE);
mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mSurface = mEncoder.createInputSurface();
mEncoder.start();
}
示例10: createVideoOutputFormat
import android.media.MediaFormat; //導入方法依賴的package包/類
@SuppressLint("LongLogTag")
@Override
public MediaFormat createVideoOutputFormat(MediaFormat inputFormat) {
int width = inputFormat.getInteger(MediaFormat.KEY_WIDTH);
int height = inputFormat.getInteger(MediaFormat.KEY_HEIGHT);
int targetLonger = mScale * 16 * 16;
int targetShorter = mScale * 16 * 9;
int longer, shorter, outWidth, outHeight;
if (width >= height) {
longer = width;
shorter = height;
outWidth = targetLonger;
outHeight = targetShorter;
} else {
shorter = width;
longer = height;
outWidth = targetShorter;
outHeight = targetLonger;
}
if (longer * 9 != shorter * 16) {
throw new OutputFormatUnavailableException("This video is not 16:9, and is not able to transcode. (" + width + "x" + height + ")");
}
if (shorter <= targetShorter) {
Log.d(TAG, "This video's height is less or equal to " + targetShorter + ", pass-through. (" + width + "x" + height + ")");
return null;
}
MediaFormat format = MediaFormat.createVideoFormat("video/avc", outWidth, outHeight);
// From Nexus 4 Camera in 720p
format.setInteger(MediaFormat.KEY_BIT_RATE, mVideoBitrate);
format.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 3);
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
return format;
}
示例11: getExportPreset960x540
import android.media.MediaFormat; //導入方法依賴的package包/類
@Deprecated
public static MediaFormat getExportPreset960x540() {
MediaFormat format = MediaFormat.createVideoFormat("video/avc", 960, 540);
format.setInteger(MediaFormat.KEY_BIT_RATE, 5500 * 1000);
format.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
format.setInteger(MediaFormat.KEY_FRAME_RATE, 30);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
return format;
}
示例12: checkMediaCodecVideoEncoderSupport
import android.media.MediaFormat; //導入方法依賴的package包/類
@TargetApi(MIN_API_LEVEL_VIDEO)
public static int checkMediaCodecVideoEncoderSupport(){
if(getApiLevel()<MIN_API_LEVEL_VIDEO){
Log.d(TAG, "checkMediaCodecVideoEncoderSupport: Min API is 18");
return CODEC_REQ_API_NOT_SATISFIED;
}
MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE_VIDEO, TEST_WIDTH, TEST_HEIGHT);
format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
format.setInteger(MediaFormat.KEY_BIT_RATE, TEST_VIDEO_BIT_RATE);
format.setInteger(MediaFormat.KEY_FRAME_RATE, TEST_FRAME_RATE);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, TEST_IFRAME_INTERVAL);
MediaCodec mediaCodec;
try {
mediaCodec = MediaCodec.createEncoderByType(MIME_TYPE_VIDEO);
mediaCodec.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mediaCodec.createInputSurface();
mediaCodec.start();
mediaCodec.stop();
mediaCodec.release();
mediaCodec = null;
} catch (Exception ex) {
Log.e(TAG, "Failed on creation of codec #", ex);
return CODEC_ERROR;
}
return CODEC_SUPPORTED;
}
示例13: clickStart
import android.media.MediaFormat; //導入方法依賴的package包/類
/**
* onClick handler for "start" button.
*
* We create as many codecs as we can and return without releasing them. The codecs
* will remain in use until the next GC.
*/
public void clickStart(@SuppressWarnings("unused") View unused) {
final String MIME_TYPE = "video/avc";
final int WIDTH = 320;
final int HEIGHT = 240;
final int BIT_RATE = 1000000;
final int FRAME_RATE = 15;
final int IFRAME_INTERVAL = 1;
final boolean START_CODEC = true;
MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, WIDTH, HEIGHT);
format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
format.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
Log.d(TAG, "format: " + format);
MediaCodec[] codecs = new MediaCodec[MAX_OPEN];
int i;
for (i = 0; i < MAX_OPEN; i++) {
try {
codecs[i] = MediaCodec.createEncoderByType(MIME_TYPE);
codecs[i].configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
if (START_CODEC) {
codecs[i].createInputSurface();
codecs[i].start();
}
} catch (Exception ex) {
Log.i(TAG, "Failed on creation of codec #" + i, ex);
break;
}
}
showCountDialog(i);
}
示例14: prepareEncoder
import android.media.MediaFormat; //導入方法依賴的package包/類
/**
* Prepares the video encoder, muxer, and an input surface.
*/
private void prepareEncoder(File outputFile) throws IOException {
mBufferInfo = new MediaCodec.BufferInfo();
MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, WIDTH, HEIGHT);
// Set some properties. Failing to specify some of these can cause the MediaCodec
// configure() call to throw an unhelpful exception.
format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
format.setInteger(MediaFormat.KEY_BIT_RATE, BIT_RATE);
format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAMES_PER_SECOND);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
if (VERBOSE) Log.d(TAG, "format: " + format);
// Create a MediaCodec encoder, and configure it with our format. Get a Surface
// we can use for input and wrap it with a class that handles the EGL work.
mEncoder = MediaCodec.createEncoderByType(MIME_TYPE);
mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mInputSurface = mEncoder.createInputSurface();
mEncoder.start();
// Create a MediaMuxer. We can't add the video track and start() the muxer here,
// because our MediaFormat doesn't have the Magic Goodies. These can only be
// obtained from the encoder after it has started processing data.
//
// We're not actually interested in multiplexing audio. We just want to convert
// the raw H.264 elementary stream we get from MediaCodec into a .mp4 file.
if (VERBOSE) Log.d(TAG, "output will go to " + outputFile);
mMuxer = new MediaMuxer(outputFile.toString(),
MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
mTrackIndex = -1;
mMuxerStarted = false;
}
示例15: VideoEncoderCore
import android.media.MediaFormat; //導入方法依賴的package包/類
/**
* Configures encoder and muxer state, and prepares the input Surface.
*/
public VideoEncoderCore(int width, int height, int bitRate, File outputFile)
throws IOException {
mBufferInfo = new MediaCodec.BufferInfo();
MediaFormat format = MediaFormat.createVideoFormat(MIME_TYPE, width, height);
// Set some properties. Failing to specify some of these can cause the MediaCodec
// configure() call to throw an unhelpful exception.
format.setInteger(MediaFormat.KEY_COLOR_FORMAT,
MediaCodecInfo.CodecCapabilities.COLOR_FormatSurface);
format.setInteger(MediaFormat.KEY_BIT_RATE, bitRate);
format.setInteger(MediaFormat.KEY_FRAME_RATE, FRAME_RATE);
format.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, IFRAME_INTERVAL);
if (VERBOSE) Log.d(TAG, "format: " + format);
// Create a MediaCodec encoder, and configure it with our format. Get a Surface
// we can use for input and wrap it with a class that handles the EGL work.
mEncoder = MediaCodec.createEncoderByType(MIME_TYPE);
mEncoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
mInputSurface = mEncoder.createInputSurface();
mEncoder.start();
// Create a MediaMuxer. We can't add the video track and start() the muxer here,
// because our MediaFormat doesn't have the Magic Goodies. These can only be
// obtained from the encoder after it has started processing data.
//
// We're not actually interested in multiplexing audio. We just want to convert
// the raw H.264 elementary stream we get from MediaCodec into a .mp4 file.
mMuxer = new MediaMuxer(outputFile.toString(),
MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
mTrackIndex = -1;
mMuxerStarted = false;
}