本文整理汇总了Java中org.jcodec.codecs.h264.H264Utils.encodeMOVPacket方法的典型用法代码示例。如果您正苦于以下问题:Java H264Utils.encodeMOVPacket方法的具体用法?Java H264Utils.encodeMOVPacket怎么用?Java H264Utils.encodeMOVPacket使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.jcodec.codecs.h264.H264Utils
的用法示例。
在下文中一共展示了H264Utils.encodeMOVPacket方法的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: encodeNativeFrame
import org.jcodec.codecs.h264.H264Utils; //导入方法依赖的package包/类
public void encodeNativeFrame(Picture pic) throws IOException {
if (toEncode == null) {
toEncode = Picture.create(pic.getWidth() , pic.getHeight() , encoder.getSupportedColorSpaces()[0]);
}
// Perform conversion
try {
transform.transform(pic, toEncode);
}catch (Exception e){
return;
}
// Encode image into H.264 frame, the result is stored in '_out' buffer
_out.clear();
ByteBuffer result = encoder.encodeFrame(toEncode, _out);
// Based on the frame above form correct MP4 packet
spsList.clear();
ppsList.clear();
H264Utils.wipePS(result, spsList, ppsList);
H264Utils.encodeMOVPacket(result);
// Add packet to video track
outTrack.addFrame(new MP4Packet(result, frameNo, 5, 1, frameNo, true, null, frameNo, 0));
frameNo++;
}
示例2: encodeNativeFrame
import org.jcodec.codecs.h264.H264Utils; //导入方法依赖的package包/类
public void encodeNativeFrame(Picture pic) throws IOException {
if (toEncode == null) {
toEncode = Picture.create(pic.getWidth() , pic.getHeight() , encoder.getSupportedColorSpaces()[0]);
}
// Perform conversion
transform.transform(pic, toEncode);
// Encode image into H.264 frame, the result is stored in '_out' buffer
_out.clear();
ByteBuffer result = encoder.encodeFrame(toEncode, _out);
// Based on the frame above form correct MP4 packet
spsList.clear();
ppsList.clear();
H264Utils.wipePS(result, spsList, ppsList);
H264Utils.encodeMOVPacket(result);
// Add packet to video track
outTrack.addFrame(new MP4Packet(result, frameNo, timeScale, 1, frameNo, true, null, frameNo, 0));
frameNo++;
}
示例3: encode
import org.jcodec.codecs.h264.H264Utils; //导入方法依赖的package包/类
@Override
public void encode(BufferedImage img) throws IOException {
JHVRgbToYuv420j8Bit.transform(img, toEncode);
// Encode image into H.264 frame, the result is stored in '_out' buffer
_out.clear();
ByteBuffer result = encoder.encodeFrame(toEncode, _out);
// Based on the frame above form correct MP4 packet
spsList.clear();
ppsList.clear();
H264Utils.wipePS(result, spsList, ppsList);
H264Utils.encodeMOVPacket(result);
// Add packet to video track
outTrack.addFrame(new MP4Packet(result, frameNo, fps, 1, frameNo, true, null, frameNo, 0));
frameNo++;
}
示例4: encodeImage
import org.jcodec.codecs.h264.H264Utils; //导入方法依赖的package包/类
public void encodeImage(BufferedImage bi) throws IOException {
if (toEncode == null) {
toEncode = Picture.create(bi.getWidth(), bi.getHeight(), ColorSpace.YUV420);
}
// Perform conversion
for (int i = 0; i < 3; i++) {
Arrays.fill(toEncode.getData()[i], 0);
}
transform.transform(AWTUtil.fromBufferedImage(bi), toEncode);
// Encode image into H.264 frame, the result is stored in '_out' buffer
_out.clear();
ByteBuffer result = encoder.encodeFrame(_out, toEncode);
// Based on the frame above form correct MP4 packet
spsList.clear();
ppsList.clear();
H264Utils.encodeMOVPacket(result, spsList, ppsList);
// Add packet to video track
outTrack.addFrame(new MP4Packet(result, frameNo, 25, 1, frameNo, true, null, frameNo, 0));
frameNo++;
}
示例5: encodeNativeFrame
import org.jcodec.codecs.h264.H264Utils; //导入方法依赖的package包/类
public void encodeNativeFrame(Picture pic) throws IOException {
if (toEncode == null) {
toEncode = Picture.create(pic.getWidth(), pic.getHeight(), encoder.getSupportedColorSpaces()[0]);
}
// Perform conversion
transform.transform(pic, toEncode);
// Encode image into H.264 frame, the result is stored in '_out' buffer
_out.clear();
ByteBuffer result = encoder.encodeFrame(toEncode, _out);
// Based on the frame above form correct MP4 packet
spsList.clear();
ppsList.clear();
H264Utils.wipePS(result, spsList, ppsList);
H264Utils.encodeMOVPacket(result);
// Add packet to video track
outTrack.addFrame(new MP4Packet(result, frameNo, 25, 1, frameNo, true, null, frameNo, 0));
frameNo++;
}
示例6: encodeImage
import org.jcodec.codecs.h264.H264Utils; //导入方法依赖的package包/类
public void encodeImage(Bitmap bi, int timeEachFrame) throws IOException {
setTimeEachFrame(timeEachFrame);
if (toEncode == null) {
toEncode = Picture.create(bi.getWidth(), bi.getHeight(), ColorSpace.YUV420);
}
// Perform conversion
for (int i = 0; i < 3; i++)
Arrays.fill(toEncode.getData()[i], 0);
transform.transform(fromBufferedImage(bi), toEncode);
// Encode image into H.264 frame, the result is stored in '_out' buffer
_out.clear();
ByteBuffer result = encoder.encodeFrame(_out, toEncode);
// Based on the frame above form correct MP4 packet
spsList.clear();
ppsList.clear();
H264Utils.encodeMOVPacket(result, spsList, ppsList);
outTrack.addFrame(new MP4Packet(result,
frameNo, // frameNo * (this.timeEachFrame) = 5s, image will stop at second 5 and show the next image
timescale, // set default = 1. How many frame per duration: timescale = 2 duration = 1 => 0.5s show 1 image
duration, // auto-increase each time current duration = duration + pass duration.
frameNo,
true,
null,
frameNo,
0));
}
示例7: encodeImage
import org.jcodec.codecs.h264.H264Utils; //导入方法依赖的package包/类
public void encodeImage(BufferedImage bi) throws IOException {
// Encode image into H.264 frame, the result is stored in '_out' buffer
_out.clear();
ByteBuffer result = encoder.encodeFrame(_out, makeFrame(bi));
// Based on the frame above form correct MP4 packet
spsList.clear();
ppsList.clear();
H264Utils.encodeMOVPacket(result, spsList, ppsList);
// Add packet to video track
outTrack.addFrame(new MP4Packet(result, frameNo, 25, 1, frameNo, true, null, frameNo, 0));
frameNo++;
}
示例8: transcodeFrame
import org.jcodec.codecs.h264.H264Utils; //导入方法依赖的package包/类
public ByteBuffer transcodeFrame(ByteBuffer src, ByteBuffer dst) throws IOException {
if(src == null)
return null;
Picture decoded = decoder.decodeFrame(src, pic0.getData());
if (pic1 == null) {
pic1 = Picture.create(decoded.getWidth(), decoded.getHeight(), encoder.getSupportedColorSpaces()[0]);
transform = ColorUtil.getTransform(decoded.getColor(), encoder.getSupportedColorSpaces()[0]);
}
transform.transform(decoded, pic1);
pic1.setCrop(new Rect(0, 0, thumbWidth, thumbHeight));
int rate = TARGET_RATE;
do {
try {
encoder.encodeFrame(pic1, dst);
break;
} catch (BufferOverflowException ex) {
System.out.println("Abandon frame!!!");
rate -= 10;
rc.setRate(rate);
}
} while (rate > 10);
rc.setRate(TARGET_RATE);
H264Utils.encodeMOVPacket(dst);
return dst;
}
示例9: addFrame
import org.jcodec.codecs.h264.H264Utils; //导入方法依赖的package包/类
public void addFrame(Picture pic) throws IOException {
if (toEncode == null) {
toEncode = Picture.create(pic.getWidth(), pic.getHeight(), encoder.getSupportedColorSpaces()[0]);
}
if (_out == null)
{
// Allocate a buffer big enough to hold output frames
_out = ByteBuffer.allocate(pic.getWidth() * pic.getHeight() * 6);
}
// Perform conversion
transform.transform(pic, toEncode);
// Encode image into H.264 frame, the result is stored in '_out' buffer
_out.clear();
ByteBuffer result = encoder.encodeFrame(toEncode, _out);
// Based on the frame above form correct MP4 packet
spsList.clear();
ppsList.clear();
H264Utils.wipePS(result, spsList, ppsList);
H264Utils.encodeMOVPacket(result);
// Add packet to video track
outTrack.addFrame(new MP4Packet(result, frameNo, 25, 1, frameNo, true, null, frameNo, 0));
frameNo++;
}
示例10: encodeFrame
import org.jcodec.codecs.h264.H264Utils; //导入方法依赖的package包/类
/**
* Add picture to video stream
* @param picture
* @throws IOException
*/
public void encodeFrame(Picture picture) throws IOException{
if (log){
Gdx.app.log("PixmapEncoder::encodeFrame()", "Compress frame " + frameNo + " w:" + encodePicture.getWidth() + " h:" + encodePicture.getHeight());
}
outBuffer.clear();
ByteBuffer result=encoder.encodeFrame(outBuffer, picture);
spsList.clear();
ppsList.clear();
H264Utils.encodeMOVPacket(result, spsList, ppsList);
outTrack.addFrame(new MP4Packet(result, frameNo, frameRate, 1, frameNo, true, null, frameNo, 0));
frameNo++;
}
示例11: transcodeFrame
import org.jcodec.codecs.h264.H264Utils; //导入方法依赖的package包/类
public ByteBuffer transcodeFrame(ByteBuffer src, ByteBuffer dst, boolean iframe, int poc) throws IOException {
if (src == null)
return null;
if (pic0 == null) {
Size size = MPEGDecoder.getSize(src.duplicate());
thumbWidth = size.getWidth() >> this.scaleFactor;
thumbHeight = size.getHeight() >> this.scaleFactor;
int mbW = (thumbWidth + 8) >> 4;
int mbH = (thumbHeight + 8) >> 4;
pic0 = Picture.create(mbW << 4, (mbH + 1) << 4, ColorSpace.YUV444);
}
Picture decoded = decoder.decodeFrame(src, pic0.getData());
if (pic1 == null) {
pic1 = Picture.create(decoded.getWidth(), decoded.getHeight(), encoder.getSupportedColorSpaces()[0]);
transform = ColorUtil.getTransform(decoded.getColor(), encoder.getSupportedColorSpaces()[0]);
}
Picture toEnc;
if (transform != null) {
transform.transform(decoded, pic1);
toEnc = pic1;
} else {
toEnc = decoded;
}
pic1.setCrop(new Rect(0, 0, thumbWidth, thumbHeight));
int rate = Mpeg2AVCTrack.TARGET_RATE;
do {
try {
encoder.encodeFrame(toEnc, dst, iframe, poc);
break;
} catch (BufferOverflowException ex) {
System.out.println("Abandon frame!!!");
rate -= 10;
rc.setRate(rate);
}
} while (rate > 10);
rc.setRate(Mpeg2AVCTrack.TARGET_RATE);
H264Utils.encodeMOVPacket(dst);
return dst;
}