本文整理汇总了Java中org.jcodec.containers.mp4.TrackType类的典型用法代码示例。如果您正苦于以下问题:Java TrackType类的具体用法?Java TrackType怎么用?Java TrackType使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
TrackType类属于org.jcodec.containers.mp4包,在下文中一共展示了TrackType类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: SequenceEncoderMp4
import org.jcodec.containers.mp4.TrackType; //导入依赖的package包/类
public SequenceEncoderMp4(File out)
throws IOException
{
super(out);
this.ch = NIOUtils.writableFileChannel(out);
// Muxer that will store the encoded frames
muxer = new MP4Muxer(ch, Brand.MP4);
// Add video track to muxer
outTrack = muxer.addTrack(TrackType.VIDEO, 5);
// Allocate a buffer big enough to hold output frames
_out = ByteBuffer.allocate(1920 * 1080 * 6);
// Create an instance of encoder
encoder = new H264Encoder();
// Transform to convert between RGB and YUV
transform = ColorUtil.getTransform(ColorSpace.RGB, encoder.getSupportedColorSpaces()[0]);
// Encoder extra data ( SPS, PPS ) to be stored in a special place of
// MP4
spsList = new ArrayList<ByteBuffer>();
ppsList = new ArrayList<ByteBuffer>();
}
示例2: SequenceEncoderMp4
import org.jcodec.containers.mp4.TrackType; //导入依赖的package包/类
public SequenceEncoderMp4(File out)
throws IOException
{
super(out);
this.ch = NIOUtils.writableFileChannel(out);
// Muxer that will store the encoded frames
muxer = new MP4Muxer(ch, Brand.MP4);
// Add video track to muxer
outTrack = muxer.addTrack(TrackType.VIDEO, timeScale);
// Allocate a buffer big enough to hold output frames
_out = ByteBuffer.allocate(1920 * 1080 * 6);
// Create an instance of encoder
encoder = new H264Encoder();
// Transform to convert between RGB and YUV
transform = ColorUtil.getTransform(ColorSpace.RGB, encoder.getSupportedColorSpaces()[0]);
// Encoder extra data ( SPS, PPS ) to be stored in a special place of
// MP4
spsList = new ArrayList<ByteBuffer>();
ppsList = new ArrayList<ByteBuffer>();
}
示例3: SequenceImagesEncoder
import org.jcodec.containers.mp4.TrackType; //导入依赖的package包/类
public SequenceImagesEncoder(File out, int screenWidth, int screenHeight) throws IOException {
this.ch = NIOUtils.writableFileChannel(out);
// Transform to convert between RGB and YUV
transform = new RgbToYuv420(0, 0);
// Muxer that will store the encoded frames
muxer = new MP4Muxer(ch, Brand.MP4);
// Add video track to muxer
outTrack = muxer.addTrackForCompressed(TrackType.VIDEO, timescale);
// Allocate a buffer big enough to hold output frames
_out = ByteBuffer.allocate(screenWidth * screenHeight * 6);
// Create an instance of encoder
encoder = new H264Encoder();
// Encoder extra data ( SPS, PPS ) to be stored in a special place of
// MP4
spsList = new ArrayList<ByteBuffer>();
ppsList = new ArrayList<ByteBuffer>();
}
示例4: open
import org.jcodec.containers.mp4.TrackType; //导入依赖的package包/类
@Override
public void open(String _path, int width, int _height, int _fps) throws IOException {
path = _path;
height = _height;
fps = _fps;
ch = new FileChannelWrapper(FileChannel.open(Paths.get(path), StandardOpenOption.CREATE, StandardOpenOption.WRITE, StandardOpenOption.TRUNCATE_EXISTING));
// Muxer that will store the encoded frames
muxer = new MP4Muxer(ch, Brand.MP4);
// Add video track to muxer
outTrack = muxer.addTrack(TrackType.VIDEO, fps);
// Allocate a buffer big enough to hold output frames
_out = ByteBuffer.allocateDirect(width * height * 6);
// Create an instance of encoder
encoder = new H264Encoder(new JCodecUtils.JHVRateControl(20));
// Encoder extra data ( SPS, PPS ) to be stored in a special place of MP4
spsList = new ArrayList<>();
ppsList = new ArrayList<>();
toEncode = Picture.create(width, height, ColorSpace.YUV420J);
}
示例5: mediaHeader
import org.jcodec.containers.mp4.TrackType; //导入依赖的package包/类
protected void mediaHeader(MediaInfoBox minf, TrackType type) {
switch (type) {
case VIDEO:
VideoMediaHeaderBox vmhd = new VideoMediaHeaderBox(0, 0, 0, 0);
vmhd.setFlags(1);
minf.add(vmhd);
break;
case SOUND:
SoundMediaHeaderBox smhd = new SoundMediaHeaderBox();
smhd.setFlags(1);
minf.add(smhd);
break;
case TIMECODE:
NodeBox gmhd = new NodeBox(new Header("gmhd"));
gmhd.add(new GenericMediaInfoBox());
NodeBox tmcd = new NodeBox(new Header("tmcd"));
gmhd.add(tmcd);
tmcd.add(new TimecodeMediaInfoBox((short) 0, (short) 0, (short) 12, new short[] { 0, 0, 0 }, new short[] {
0xff, 0xff, 0xff }, "Lucida Grande"));
minf.add(gmhd);
break;
default:
throw new IllegalStateException("Handler " + type.getHandler() + " not supported");
}
}
示例6: SequenceEncoder
import org.jcodec.containers.mp4.TrackType; //导入依赖的package包/类
public SequenceEncoder(File out, int frameRate) throws IOException {
this.ch = NIOUtils.writableFileChannel(out);
// Transform to convert between RGB and YUV
transform = new RgbToYuv420(0, 0);
// Muxer that will store the encoded frames
muxer = new MP4Muxer(ch, Brand.MP4);
// Add video track to muxer
outTrack = muxer.addTrackForCompressed(TrackType.VIDEO, frameRate);// original frmae rate is 25
// Allocate a buffer big enough to hold output frames
_out = ByteBuffer.allocate(1920 * 1080 * 6);
// Create an instance of encoder
encoder = new H264Encoder();
// Encoder extra data ( SPS, PPS ) to be stored in a special place of
// MP4
spsList = new ArrayList<ByteBuffer>();
ppsList = new ArrayList<ByteBuffer>();
}
示例7: SequenceEncoder
import org.jcodec.containers.mp4.TrackType; //导入依赖的package包/类
public SequenceEncoder(File out) throws IOException {
this.ch = NIOUtils.writableFileChannel(out);
// Muxer that will store the encoded frames
muxer = new MP4Muxer(ch, Brand.MP4);
// Add video track to muxer
outTrack = muxer.addTrack(TrackType.VIDEO, 25);
// Allocate a buffer big enough to hold output frames
_out = ByteBuffer.allocate(1920 * 1080 * 6);
// Create an instance of encoder
encoder = new H264Encoder();
// Transform to convert between RGB and YUV
transform = ColorUtil.getTransform(ColorSpace.RGB, encoder.getSupportedColorSpaces()[0]);
// Encoder extra data ( SPS, PPS ) to be stored in a special place of
// MP4
spsList = new ArrayList<ByteBuffer>();
ppsList = new ArrayList<ByteBuffer>();
}
示例8: mediaHeader
import org.jcodec.containers.mp4.TrackType; //导入依赖的package包/类
private static void mediaHeader(MediaInfoBox minf, TrackType type) {
switch (type) {
case VIDEO:
VideoMediaHeaderBox vmhd = new VideoMediaHeaderBox(0, 0, 0, 0);
vmhd.setFlags(1);
minf.add(vmhd);
break;
case SOUND:
SoundMediaHeaderBox smhd = new SoundMediaHeaderBox();
smhd.setFlags(1);
minf.add(smhd);
break;
case TIMECODE:
NodeBox gmhd = new NodeBox(new Header("gmhd"));
gmhd.add(new GenericMediaInfoBox());
NodeBox tmcd = new NodeBox(new Header("tmcd"));
gmhd.add(tmcd);
tmcd.add(new TimecodeMediaInfoBox((short) 0, (short) 0, (short) 12, new short[] { 0, 0, 0 }, new short[] {
0xff, 0xff, 0xff }, "Lucida Grande"));
minf.add(gmhd);
break;
default:
throw new IllegalStateException("Handler " + type.getHandler() + " not supported");
}
}
示例9: ImageToH264MP4Encoder
import org.jcodec.containers.mp4.TrackType; //导入依赖的package包/类
public ImageToH264MP4Encoder(SeekableByteChannel ch, AudioFormat af) throws IOException {
this.ch = ch;
this.af = af;
// Muxer that will store the encoded frames
muxer = new MP4Muxer(ch, Brand.MP4);
// Add video track to muxer
outTrack = muxer.addTrack(TrackType.VIDEO, 25);
// Create an instance of encoder
encoder = new H264Encoder();
// Transform to convert between RGB and YUV
transform = ColorUtil.getTransform(ColorSpace.RGB, encoder.getSupportedColorSpaces()[0]);
// Encoder extra data ( SPS, PPS ) to be stored in a special place of
// MP4
spsList = new ArrayList<ByteBuffer>();
ppsList = new ArrayList<ByteBuffer>();
if (af != null)
audioTrack = muxer.addPCMAudioTrack(af);
}
示例10: initalize
import org.jcodec.containers.mp4.TrackType; //导入依赖的package包/类
/**
* Initalize a compress class
*
* @param fileHandle - resource to mpeg4 file
* @param width - video width
* @param height - video height
* @throws IOException
*/
public void initalize(File fileHandle, int width, int height) throws IOException{
this.width=width;
this.height=height;
ch=NIOUtils.writableFileChannel(fileHandle);
muxer=new MP4Muxer(ch, Brand.MP4);
outTrack=muxer.addTrackForCompressed(TrackType.VIDEO, frameRate);
outBuffer=ByteBuffer.allocate(width * height * 6);
transform=new RgbToYuv420(0, 0);
encoder=new H264Encoder();
spsList=new ArrayList<ByteBuffer>();
ppsList=new ArrayList<ByteBuffer>();
frameNo=0;
}
示例11: Encoder
import org.jcodec.containers.mp4.TrackType; //导入依赖的package包/类
public Encoder(File out, int width, int height) throws IOException {
this.ch = NIOUtils.writableFileChannel(out);
_out = ByteBuffer.allocate(width * height * 6);
encoder = new H264Encoder();
spsList = new ArrayList<ByteBuffer>();
ppsList = new ArrayList<ByteBuffer>();
this.ch = NIOUtils.writableFileChannel(out);
muxer = new MP4Muxer(ch, Brand.MP4);
outTrack = muxer.addTrackForCompressed(TrackType.VIDEO, 25);
}
示例12: PCMMP4MuxerTrack
import org.jcodec.containers.mp4.TrackType; //导入依赖的package包/类
public PCMMP4MuxerTrack(SeekableByteChannel out, int trackId, TrackType type, int timescale, int frameDuration, int frameSize,
SampleEntry se) {
super(trackId, type, timescale);
this.out = out;
this.frameDuration = frameDuration;
this.frameSize = frameSize;
addSampleEntry(se);
setTgtChunkDuration(new Rational(1, 2), Unit.SEC);
}
示例13: getMeta
import org.jcodec.containers.mp4.TrackType; //导入依赖的package包/类
@Override
public DemuxerTrackMeta getMeta() {
if(syncSamples == null)
{
return null;
}
int[] copyOf = Arrays.copyOf(syncSamples, syncSamples.length);
for (int i = 0; i < copyOf.length; i++)
copyOf[i]--;
TrackType type = getType();
return new DemuxerTrackMeta(type == TrackType.VIDEO ? VIDEO : (type == TrackType.SOUND ? AUDIO : OTHER),
copyOf, sizes.length, (double) duration / timescale, box.getCodedSize());
}
示例14: MP4H264Muxer
import org.jcodec.containers.mp4.TrackType; //导入依赖的package包/类
/**
* NALProcessor to mux a raw h264 stream into a mp4 file
*
* @param file File to save to
* @param fps frame rate of the video
* @param width Width of the video
* @param height Height of the video
* @throws IOException
*/
public MP4H264Muxer(File file, int fps, int width, int height) throws IOException
{
this.width = width;
this.height = height;
channel = NIOUtils.writableFileChannel(file);
muxer = new MP4Muxer(channel);
timescale = fps;
track = muxer.addTrack(TrackType.VIDEO, timescale);
}
示例15: MP4MJPEGMovieBuilder
import org.jcodec.containers.mp4.TrackType; //导入依赖的package包/类
/**
* Create an MP4 file with MJPEG
*
* @param file Output file
* @param width Frame width
* @param height Frame height
* @param framerate
* @param quality JPEG quality 0 - 100
* @throws IOException
*/
public MP4MJPEGMovieBuilder(File file, int width, int height, int framerate, int quality) throws IOException
{
channel = NIOUtils.writableFileChannel(file);
muxer = new MP4Muxer(channel);
timescale = framerate;
track = muxer.addTrack(TrackType.VIDEO, timescale);
this.width = width;
this.height = height;
this.quality = quality;
}