当前位置: 首页>>代码示例>>Java>>正文


Java NV21Convertor.convert方法代码示例

本文整理汇总了Java中net.majorkernelpanic.streaming.hw.NV21Convertor.convert方法的典型用法代码示例。如果您正苦于以下问题:Java NV21Convertor.convert方法的具体用法?Java NV21Convertor.convert怎么用?Java NV21Convertor.convert使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在net.majorkernelpanic.streaming.hw.NV21Convertor的用法示例。


在下文中一共展示了NV21Convertor.convert方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: encodeWithMediaCodecMethod1

import net.majorkernelpanic.streaming.hw.NV21Convertor; //导入方法依赖的package包/类
/**
 * Video encoding is done by a MediaCodec.
 */
@SuppressLint("NewApi")
protected void encodeWithMediaCodecMethod1() throws RuntimeException, IOException {

	Log.d(TAG,"Video encoded using the MediaCodec API with a buffer");

	// Updates the parameters of the camera if needed
	createCamera();
	updateCamera();

	// Estimates the framerate of the camera
	measureFramerate();

	// Starts the preview if needed
	if (!mPreviewStarted) {
		try {
			mCamera.startPreview();
			mPreviewStarted = true;
		} catch (RuntimeException e) {
			destroyCamera();
			throw e;
		}
	}

	EncoderDebugger debugger = EncoderDebugger.debug(mSettings, mQuality.resX, mQuality.resY);
	final NV21Convertor convertor = debugger.getNV21Convertor();

	mMediaCodec = MediaCodec.createByCodecName(debugger.getEncoderName());
	MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", mQuality.resX, mQuality.resY);
	mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, mQuality.bitrate);
	mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, mQuality.framerate);	
	mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT,debugger.getEncoderColorFormat());
	mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
	mMediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
	mMediaCodec.start();

	Camera.PreviewCallback callback = new Camera.PreviewCallback() {
		long now = System.nanoTime()/1000, oldnow = now, i=0;
		ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers();
		@Override
		public void onPreviewFrame(byte[] data, Camera camera) {
			oldnow = now;
			now = System.nanoTime()/1000;
			if (i++>3) {
				i = 0;
				//Log.d(TAG,"Measured: "+1000000L/(now-oldnow)+" fps.");
			}
			try {
				int bufferIndex = mMediaCodec.dequeueInputBuffer(500000);
				if (bufferIndex>=0) {
					inputBuffers[bufferIndex].clear();
					convertor.convert(data, inputBuffers[bufferIndex]);
					mMediaCodec.queueInputBuffer(bufferIndex, 0, inputBuffers[bufferIndex].position(), now, 0);
				} else {
					Log.e(TAG,"No buffer available !");
				}
			} finally {
				mCamera.addCallbackBuffer(data);
			}				
		}
	};

	for (int i=0;i<10;i++) mCamera.addCallbackBuffer(new byte[convertor.getBufferSize()]);
	mCamera.setPreviewCallbackWithBuffer(callback);

	// The packetizer encapsulates the bit stream in an RTP stream and send it over the network
	mPacketizer.setDestination(mDestination, mRtpPort, mRtcpPort);
	mPacketizer.setInputStream(new MediaCodecInputStream(mMediaCodec));
	mPacketizer.start();

	mStreaming = true;

}
 
开发者ID:ghazi94,项目名称:Android_CCTV,代码行数:76,代码来源:VideoStream.java

示例2: encodeWithMediaCodecMethod1

import net.majorkernelpanic.streaming.hw.NV21Convertor; //导入方法依赖的package包/类
/**
 * Video encoding is done by a MediaCodec.
 */
@SuppressLint("NewApi")
protected void encodeWithMediaCodecMethod1() throws RuntimeException, IOException {

	Log.d(TAG,"Video encoded using the MediaCodec API with a buffer");

	// Updates the parameters of the camera if needed
	createCamera();
	updateCamera();

	// Estimates the frame rate of the camera
	measureFramerate();

	// Starts the preview if needed
	if (!mPreviewStarted) {
		try {
			mCamera.startPreview();
			mPreviewStarted = true;
		} catch (RuntimeException e) {
			destroyCamera();
			throw e;
		}
	}

	EncoderDebugger debugger = EncoderDebugger.debug(mSettings, mQuality.resX, mQuality.resY);
	final NV21Convertor convertor = debugger.getNV21Convertor();

	mMediaCodec = MediaCodec.createByCodecName(debugger.getEncoderName());
	MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", mQuality.resX, mQuality.resY);
	mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, mQuality.bitrate);
	mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, mQuality.framerate);	
	mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT,debugger.getEncoderColorFormat());
	mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
	mMediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
	mMediaCodec.start();

	Camera.PreviewCallback callback = new Camera.PreviewCallback() {
		long now = System.nanoTime()/1000, oldnow = now, i=0;
		ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers();
		@Override
		public void onPreviewFrame(byte[] data, Camera camera) {
			oldnow = now;
			now = System.nanoTime()/1000;
			if (i++>3) {
				i = 0;
				//Log.d(TAG,"Measured: "+1000000L/(now-oldnow)+" fps.");
			}
			try {
				int bufferIndex = mMediaCodec.dequeueInputBuffer(500000);
				if (bufferIndex>=0) {
					inputBuffers[bufferIndex].clear();
					if (data == null) Log.e(TAG,"Symptom of the \"Callback buffer was to small\" problem...");
					else convertor.convert(data, inputBuffers[bufferIndex]);
					mMediaCodec.queueInputBuffer(bufferIndex, 0, inputBuffers[bufferIndex].position(), now, 0);
				} else {
					Log.e(TAG,"No buffer available !");
				}
			} finally {
				mCamera.addCallbackBuffer(data);
			}				
		}
	};
	
	for (int i=0;i<10;i++) mCamera.addCallbackBuffer(new byte[convertor.getBufferSize()]);
	mCamera.setPreviewCallbackWithBuffer(callback);

	// The packetizer encapsulates the bit stream in an RTP stream and send it over the network
	mPacketizer.setInputStream(new MediaCodecInputStream(mMediaCodec));
	mPacketizer.start();

	mStreaming = true;

}
 
开发者ID:hypeapps,项目名称:Endoscope,代码行数:76,代码来源:VideoStream.java

示例3: encodeWithMediaCodecMethod1

import net.majorkernelpanic.streaming.hw.NV21Convertor; //导入方法依赖的package包/类
/**
 * Video encoding is done by a MediaCodec.
 */
@SuppressLint("NewApi")
protected void encodeWithMediaCodecMethod1() throws RuntimeException, IOException {

	Log.d(TAG,"Video encoded using the MediaCodec API with a buffer");

	// Updates the parameters of the camera if needed
	createCamera();
	updateCamera();

	// Estimates the framerate of the camera
	measureFramerate();

	// Starts the preview if needed
	if (!mPreviewStarted) {
		try {
			mCamera.startPreview();
			mPreviewStarted = true;
		} catch (RuntimeException e) {
			destroyCamera();
			throw e;
		}
	}

	EncoderDebugger debugger = EncoderDebugger.debug(mSettings, mQuality.resX, mQuality.resY);
	final NV21Convertor convertor = debugger.getNV21Convertor();

	mMediaCodec = MediaCodec.createByCodecName(debugger.getEncoderName());
	MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", mQuality.resX, mQuality.resY);
	mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, mQuality.bitrate);
	mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, mQuality.framerate);	
	mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT,debugger.getEncoderColorFormat());
	mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
	mMediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
	mMediaCodec.start();

	Camera.PreviewCallback callback = new Camera.PreviewCallback() {
		long now = System.nanoTime()/1000, oldnow = now, i=0;
		ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers();
		@Override
		public void onPreviewFrame(byte[] data, Camera camera) {
			oldnow = now;
			now = System.nanoTime()/1000;
			if (i++>3) {
				i = 0;
				//Log.d(TAG,"Measured: "+1000000L/(now-oldnow)+" fps.");
			}
			try {
				int bufferIndex = mMediaCodec.dequeueInputBuffer(500000);
				if (bufferIndex>=0) {
					inputBuffers[bufferIndex].clear();
					if (data == null) Log.e(TAG,"Symptom of the \"Callback buffer was to small\" problem...");
					else convertor.convert(data, inputBuffers[bufferIndex]);
					mMediaCodec.queueInputBuffer(bufferIndex, 0, inputBuffers[bufferIndex].position(), now, 0);
				} else {
					Log.e(TAG,"No buffer available !");
				}
			} finally {
				mCamera.addCallbackBuffer(data);
			}				
		}
	};
	
	for (int i=0;i<10;i++) mCamera.addCallbackBuffer(new byte[convertor.getBufferSize()]);
	mCamera.setPreviewCallbackWithBuffer(callback);

	// The packetizer encapsulates the bit stream in an RTP stream and send it over the network
	mPacketizer.setInputStream(new MediaCodecInputStream(mMediaCodec));
	mPacketizer.start();

	mStreaming = true;

}
 
开发者ID:Oo-Dev,项目名称:OoDroid2,代码行数:76,代码来源:VideoStream.java

示例4: encodeWithMediaCodecMethod1

import net.majorkernelpanic.streaming.hw.NV21Convertor; //导入方法依赖的package包/类
/**
 * Video encoding is done by a MediaCodec.
 */
@SuppressLint("NewApi")
protected void encodeWithMediaCodecMethod1() throws RuntimeException, IOException {

	Log.d(TAG,"Video encoded using the MediaCodec API with a buffer");

	// Updates the parameters of the camera if needed
	createCamera();
	updateCamera();

	// Estimates the framerate of the camera
	measureFramerate();

	// Starts the preview if needed
	if (!mPreviewStarted) {
		try {
			mCamera.startPreview();
			mPreviewStarted = true;
		} catch (RuntimeException e) {
			destroyCamera();
			throw e;
		}
	}

	EncoderDebugger debugger = EncoderDebugger.debug(mSettings, mQuality.resX, mQuality.resY);
	final NV21Convertor convertor = debugger.getNV21Convertor();

	mMediaCodec = MediaCodec.createByCodecName(debugger.getEncoderName());
	MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", mQuality.resX, mQuality.resY);
	mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, mQuality.bitrate);
	mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, mQuality.framerate);	
	mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT,debugger.getEncoderColorFormat());
	mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
	mMediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
	mMediaCodec.start();

	Camera.PreviewCallback callback = new Camera.PreviewCallback() {
		long now = System.nanoTime()/1000, oldnow = now, i=0;
		ByteBuffer[] inputBuffers = mMediaCodec.getInputBuffers();
		@Override
		public void onPreviewFrame(byte[] data, Camera camera) {
			oldnow = now;
			now = System.nanoTime()/1000;
			if (i++>3) {
				i = 0;
				//Log.d(TAG,"Measured: "+1000000L/(now-oldnow)+" fps.");
			}
			try {
				int bufferIndex = mMediaCodec.dequeueInputBuffer(500000);
				if (bufferIndex>=0) {
					inputBuffers[bufferIndex].clear();
					convertor.convert(data, inputBuffers[bufferIndex]);
					mMediaCodec.queueInputBuffer(bufferIndex, 0, inputBuffers[bufferIndex].position(), now, 0);
				} else {
					Log.e(TAG,"No buffer available !");
				}
			} finally {
				mCamera.addCallbackBuffer(data);
			}				
		}
	};

	for (int i=0;i<10;i++) mCamera.addCallbackBuffer(new byte[convertor.getBufferSize()]);
	mCamera.setPreviewCallbackWithBuffer(callback);

	// The packetizer encapsulates the bit stream in an RTP stream and send it over the network
	mPacketizer.setInputStream(new MediaCodecInputStream(mMediaCodec));
	mPacketizer.start();

	mStreaming = true;

}
 
开发者ID:mnhkahn,项目名称:cInterphone,代码行数:75,代码来源:VideoStream.java


注:本文中的net.majorkernelpanic.streaming.hw.NV21Convertor.convert方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。