本文整理汇总了Java中org.jcodec.codecs.h264.H264Utils类的典型用法代码示例。如果您正苦于以下问题:Java H264Utils类的具体用法?Java H264Utils怎么用?Java H264Utils使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
H264Utils类属于org.jcodec.codecs.h264包,在下文中一共展示了H264Utils类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: encodeNativeFrame
import org.jcodec.codecs.h264.H264Utils; //导入依赖的package包/类
public void encodeNativeFrame(Picture pic) throws IOException {
if (toEncode == null) {
toEncode = Picture.create(pic.getWidth() , pic.getHeight() , encoder.getSupportedColorSpaces()[0]);
}
// Perform conversion
try {
transform.transform(pic, toEncode);
}catch (Exception e){
return;
}
// Encode image into H.264 frame, the result is stored in '_out' buffer
_out.clear();
ByteBuffer result = encoder.encodeFrame(toEncode, _out);
// Based on the frame above form correct MP4 packet
spsList.clear();
ppsList.clear();
H264Utils.wipePS(result, spsList, ppsList);
H264Utils.encodeMOVPacket(result);
// Add packet to video track
outTrack.addFrame(new MP4Packet(result, frameNo, 5, 1, frameNo, true, null, frameNo, 0));
frameNo++;
}
示例2: transcode
import org.jcodec.codecs.h264.H264Utils; //导入依赖的package包/类
public List<ByteBuffer> transcode() throws IOException {
H264Decoder decoder = new H264Decoder();
decoder.addSps(avcC.getSpsList());
decoder.addPps(avcC.getPpsList());
Picture buf = Picture.create(mbW << 4, mbH << 4, ColorSpace.YUV420);
Frame dec = null;
for (VirtualPacket virtualPacket : head) {
dec = decoder.decodeFrame(H264Utils.splitMOVPacket(virtualPacket.getData(), avcC), buf.getData());
}
H264Encoder encoder = new H264Encoder(rc);
ByteBuffer tmp = ByteBuffer.allocate(frameSize);
List<ByteBuffer> result = new ArrayList<ByteBuffer>();
for (VirtualPacket pkt : tail) {
dec = decoder.decodeFrame(H264Utils.splitMOVPacket(pkt.getData(), avcC), buf.getData());
tmp.clear();
ByteBuffer res = encoder.encodeFrame(dec, tmp);
ByteBuffer out = ByteBuffer.allocate(frameSize);
processFrame(res, out);
result.add(out);
}
return result;
}
示例3: encodeNativeFrame
import org.jcodec.codecs.h264.H264Utils; //导入依赖的package包/类
public void encodeNativeFrame(Picture pic) throws IOException {
if (toEncode == null) {
toEncode = Picture.create(pic.getWidth() , pic.getHeight() , encoder.getSupportedColorSpaces()[0]);
}
// Perform conversion
transform.transform(pic, toEncode);
// Encode image into H.264 frame, the result is stored in '_out' buffer
_out.clear();
ByteBuffer result = encoder.encodeFrame(toEncode, _out);
// Based on the frame above form correct MP4 packet
spsList.clear();
ppsList.clear();
H264Utils.wipePS(result, spsList, ppsList);
H264Utils.encodeMOVPacket(result);
// Add packet to video track
outTrack.addFrame(new MP4Packet(result, frameNo, timeScale, 1, frameNo, true, null, frameNo, 0));
frameNo++;
}
示例4: encode
import org.jcodec.codecs.h264.H264Utils; //导入依赖的package包/类
@Override
public void encode(BufferedImage img) throws IOException {
JHVRgbToYuv420j8Bit.transform(img, toEncode);
// Encode image into H.264 frame, the result is stored in '_out' buffer
_out.clear();
ByteBuffer result = encoder.encodeFrame(toEncode, _out);
// Based on the frame above form correct MP4 packet
spsList.clear();
ppsList.clear();
H264Utils.wipePS(result, spsList, ppsList);
H264Utils.encodeMOVPacket(result);
// Add packet to video track
outTrack.addFrame(new MP4Packet(result, frameNo, fps, 1, frameNo, true, null, frameNo, 0));
frameNo++;
}
示例5: encodeImage
import org.jcodec.codecs.h264.H264Utils; //导入依赖的package包/类
public void encodeImage(BufferedImage bi) throws IOException {
if (toEncode == null) {
toEncode = Picture.create(bi.getWidth(), bi.getHeight(), ColorSpace.YUV420);
}
// Perform conversion
for (int i = 0; i < 3; i++) {
Arrays.fill(toEncode.getData()[i], 0);
}
transform.transform(AWTUtil.fromBufferedImage(bi), toEncode);
// Encode image into H.264 frame, the result is stored in '_out' buffer
_out.clear();
ByteBuffer result = encoder.encodeFrame(_out, toEncode);
// Based on the frame above form correct MP4 packet
spsList.clear();
ppsList.clear();
H264Utils.encodeMOVPacket(result, spsList, ppsList);
// Add packet to video track
outTrack.addFrame(new MP4Packet(result, frameNo, 25, 1, frameNo, true, null, frameNo, 0));
frameNo++;
}
示例6: readMBQpDelta
import org.jcodec.codecs.h264.H264Utils; //导入依赖的package包/类
public int readMBQpDelta(MDecoder decoder, MBType prevMbType) {
int ctx = 60;
ctx += prevMbType == null || prevMbType == I_PCM || (prevMbType != I_16x16 && prevCBP == 0)
|| prevMbQpDelta == 0 ? 0 : 1;
int val = 0;
if (decoder.decodeBin(ctx) == 1) {
val++;
if (decoder.decodeBin(62) == 1) {
val++;
while (decoder.decodeBin(63) == 1)
val++;
}
}
prevMbQpDelta = H264Utils.golomb2Signed(val);
return prevMbQpDelta;
}
示例7: calcBufferSize
import org.jcodec.codecs.h264.H264Utils; //导入依赖的package包/类
private void calcBufferSize() {
int w = Integer.MIN_VALUE, h = Integer.MIN_VALUE;
for (SampleEntry se : ses) {
if ("avc1".equals(se.getFourcc())) {
AvcCBox avcC = H264Utils.parseAVCC((VideoSampleEntry) se);
for (SeqParameterSet sps : H264Utils.readSPS(avcC.getSpsList())) {
int ww = sps.pic_width_in_mbs_minus1 + 1;
if (ww > w)
w = ww;
int hh = H264Utils.getPicHeightInMbs(sps);
if (hh > h)
h = hh;
}
}
}
size = new Size(w << 4, h << 4);
}
示例8: encodeNativeFrame
import org.jcodec.codecs.h264.H264Utils; //导入依赖的package包/类
public void encodeNativeFrame(Picture pic) throws IOException {
if (toEncode == null) {
toEncode = Picture.create(pic.getWidth(), pic.getHeight(), encoder.getSupportedColorSpaces()[0]);
}
// Perform conversion
transform.transform(pic, toEncode);
// Encode image into H.264 frame, the result is stored in '_out' buffer
_out.clear();
ByteBuffer result = encoder.encodeFrame(toEncode, _out);
// Based on the frame above form correct MP4 packet
spsList.clear();
ppsList.clear();
H264Utils.wipePS(result, spsList, ppsList);
H264Utils.encodeMOVPacket(result);
// Add packet to video track
outTrack.addFrame(new MP4Packet(result, frameNo, 25, 1, frameNo, true, null, frameNo, 0));
frameNo++;
}
示例9: Transcode2AVCTrack
import org.jcodec.codecs.h264.H264Utils; //导入依赖的package包/类
public Transcode2AVCTrack(VirtualTrack src, Size frameDim) {
checkFourCC(src);
this.src = src;
ConstantRateControl rc = new ConstantRateControl(TARGET_RATE);
H264Encoder encoder = new H264Encoder(rc);
scaleFactor = selectScaleFactor(frameDim);
thumbWidth = frameDim.getWidth() >> scaleFactor;
thumbHeight = (frameDim.getHeight() >> scaleFactor) & ~1;
mbW = (thumbWidth + 15) >> 4;
mbH = (thumbHeight + 15) >> 4;
se = H264Utils.createMOVSampleEntry(encoder.initSPS(new Size(thumbWidth, thumbHeight)), encoder.initPPS());
PixelAspectExt pasp = Box.findFirst(src.getSampleEntry(), PixelAspectExt.class, "pasp");
if (pasp != null)
se.add(pasp);
frameSize = rc.calcFrameSize(mbW * mbH);
frameSize += frameSize >> 4;
}
示例10: replaySps
import org.jcodec.codecs.h264.H264Utils; //导入依赖的package包/类
private boolean replaySps() {
int inputIndex = dequeueInputBuffer();
if (inputIndex < 0) {
return false;
}
ByteBuffer inputBuffer = getEmptyInputBuffer(inputIndex);
if (inputBuffer == null) {
return false;
}
// Write the Annex B header
inputBuffer.put(new byte[]{0x00, 0x00, 0x00, 0x01, 0x67});
// Switch the H264 profile back to high
savedSps.profileIdc = 100;
// Patch the SPS constraint flags
doProfileSpecificSpsPatching(savedSps);
// The H264Utils.writeSPS function safely handles
// Annex B NALUs (including NALUs with escape sequences)
ByteBuffer escapedNalu = H264Utils.writeSPS(savedSps, 128);
inputBuffer.put(escapedNalu);
// No need for the SPS anymore
savedSps = null;
// Queue the new SPS
return queueInputBuffer(inputIndex,
0, inputBuffer.position(),
System.nanoTime() / 1000,
MediaCodec.BUFFER_FLAG_CODEC_CONFIG);
}
示例11: close
import org.jcodec.codecs.h264.H264Utils; //导入依赖的package包/类
/**
* Close the file and clear resource
* @throws IOException
*/
public void close() throws IOException{
outTrack.addSampleEntry(H264Utils.createMOVSampleEntry(spsList, ppsList));
muxer.writeHeader();
NIOUtils.closeQuietly(ch);
outBuffer.clear();
spsList.clear();
ppsList.clear();
outBuffer=null;
transform=null;
muxer=null;
encoder=null;
spsList=null;
ppsList=null;
}
示例12: finish
import org.jcodec.codecs.h264.H264Utils; //导入依赖的package包/类
public void finish() throws IOException {
// Push saved SPS/PPS to a special storage in MP4
outTrack.addSampleEntry(H264Utils.createMOVSampleEntry(spsList, ppsList, 4));
// Write MP4 header and finalize recording
muxer.writeHeader();
NIOUtils.closeQuietly(ch);
}
示例13: encodeImage
import org.jcodec.codecs.h264.H264Utils; //导入依赖的package包/类
public void encodeImage(Bitmap bi, int timeEachFrame) throws IOException {
setTimeEachFrame(timeEachFrame);
if (toEncode == null) {
toEncode = Picture.create(bi.getWidth(), bi.getHeight(), ColorSpace.YUV420);
}
// Perform conversion
for (int i = 0; i < 3; i++)
Arrays.fill(toEncode.getData()[i], 0);
transform.transform(fromBufferedImage(bi), toEncode);
// Encode image into H.264 frame, the result is stored in '_out' buffer
_out.clear();
ByteBuffer result = encoder.encodeFrame(_out, toEncode);
// Based on the frame above form correct MP4 packet
spsList.clear();
ppsList.clear();
H264Utils.encodeMOVPacket(result, spsList, ppsList);
outTrack.addFrame(new MP4Packet(result,
frameNo, // frameNo * (this.timeEachFrame) = 5s, image will stop at second 5 and show the next image
timescale, // set default = 1. How many frame per duration: timescale = 2 duration = 1 => 0.5s show 1 image
duration, // auto-increase each time current duration = duration + pass duration.
frameNo,
true,
null,
frameNo,
0));
}
示例14: close
import org.jcodec.codecs.h264.H264Utils; //导入依赖的package包/类
@Override
public void close() throws IOException {
// Push saved SPS/PPS to a special storage in MP4
outTrack.addSampleEntry(H264Utils.createMOVSampleEntry(spsList, ppsList, 4));
// Write MP4 header and finalize recording
muxer.writeHeader();
ch.close();
}
示例15: encodeImage
import org.jcodec.codecs.h264.H264Utils; //导入依赖的package包/类
public void encodeImage(BufferedImage bi) throws IOException {
// Encode image into H.264 frame, the result is stored in '_out' buffer
_out.clear();
ByteBuffer result = encoder.encodeFrame(_out, makeFrame(bi));
// Based on the frame above form correct MP4 packet
spsList.clear();
ppsList.clear();
H264Utils.encodeMOVPacket(result, spsList, ppsList);
// Add packet to video track
outTrack.addFrame(new MP4Packet(result, frameNo, 25, 1, frameNo, true, null, frameNo, 0));
frameNo++;
}