本文整理汇总了Java中org.monte.media.BufferFlag类的典型用法代码示例。如果您正苦于以下问题:Java BufferFlag类的具体用法?Java BufferFlag怎么用?Java BufferFlag使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
BufferFlag类属于org.monte.media包,在下文中一共展示了BufferFlag类的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: process
import org.monte.media.BufferFlag; //导入依赖的package包/类
@Override
public int process(Buffer in, Buffer out) {
if (state == null) {
state = new TechSmithCodecCore();
}
if (in.isFlag(BufferFlag.DISCARD)) {
out.setMetaTo(in);
return CODEC_OK;
}
if (outputFormat.get(EncodingKey).equals(ENCODING_AVI_TECHSMITH_SCREEN_CAPTURE)) {
return encode(in, out);
} else {
return decode(in, out);
}
}
示例2: doWrite
import org.monte.media.BufferFlag; //导入依赖的package包/类
/**
* The actual writing of the buffer happens here. <p> This method is called
* exclusively from the writer thread in startWriter().
*
* @param buf
* @throws java.io.IOException
*/
private void doWrite(Buffer buf) throws IOException {
MovieWriter mw = w;
// Close file on a separate thread if file is full or an hour
// has passed.
// The if-statement must ensure that we only start a new video file
// at a key-frame.
// FIXME - this assumes that all audio frames are key-frames
// FIXME - this does not guarantee that audio and video track have
// the same duration
long now = System.currentTimeMillis();
if (buf.track == videoTrack && buf.isFlag(BufferFlag.KEYFRAME)
&& (mw.isDataLimitReached() || now - fileStartTime > maxRecordingTime)) {
final MovieWriter closingWriter = mw;
new Thread() {
@Override
public void run() {
try {
closingWriter.close();
} catch (IOException ex) {
ex.printStackTrace();
}
}
}.start();
mw = createMovieWriter();
}
//}
mw.write(buf.track, buf);
}
示例3: readMovie
import org.monte.media.BufferFlag; //导入依赖的package包/类
private static BufferedImage[] readMovie(File file) throws IOException {
ArrayList<BufferedImage> frames=new ArrayList<BufferedImage> ();
MovieReader in = Registry.getInstance().getReader(file);
Format format = new Format(DataClassKey, BufferedImage.class);
int track=in.findTrack(0,new Format(MediaTypeKey,MediaType.VIDEO));
Codec codec=Registry.getInstance().getCodec(in.getFormat(track), format);
try {
Buffer inbuf = new Buffer();
Buffer codecbuf = new Buffer();
do {
in.read(track, inbuf);
codec.process(inbuf, codecbuf);
if (!codecbuf.isFlag(BufferFlag.DISCARD)) {
frames.add(Images.cloneImage((BufferedImage)codecbuf.data));
}
} while (!inbuf.isFlag(BufferFlag.END_OF_MEDIA));
} finally {
in.close();
}
return frames.toArray(new BufferedImage[frames.size()]);
}
示例4: doWrite
import org.monte.media.BufferFlag; //导入依赖的package包/类
/**
* The actual writing of the buffer happens here. <p> This method is called
* exclusively from the writer thread in startWriter().
*
* @param buf
* @throws IOException
*/
private void doWrite(Buffer buf) throws IOException {
MovieWriter mw = w;
// Close file on a separate thread if file is full or an hour
// has passed.
// The if-statement must ensure that we only start a new video file
// at a key-frame.
// FIXME - this assumes that all audio frames are key-frames
// FIXME - this does not guarantee that audio and video track have
// the same duration
long now = System.currentTimeMillis();
if (buf.track == videoTrack && buf.isFlag(BufferFlag.KEYFRAME)
&& (mw.isDataLimitReached() || now - fileStartTime > maxRecordingTime)) {
final MovieWriter closingWriter = mw;
new Thread() {
@Override
public void run() {
try {
closingWriter.close();
} catch (IOException ex) {
ex.printStackTrace();
}
}
}.start();
mw = createMovieWriter();
}
//}
mw.write(buf.track, buf);
}
示例5: write
import org.monte.media.BufferFlag; //导入依赖的package包/类
/**
* Encodes the provided image and writes its sample data into the specified
* track.
*
* @param track The track index.
* @param image The image of the video frame.
* @param duration Duration given in media time units.
*
* @throws IndexOutofBoundsException if the track index is out of bounds.
* @throws if the duration is less than 1, or if the dimension of the frame
* does not match the dimension of the video.
* @throws UnsupportedOperationException if the {@code MovieWriter} does not
* have a built-in encoder for this video format.
* @throws java.io.IOException if writing the sample data failed.
*/
public void write(int track, BufferedImage image, long duration) throws IOException {
ensureStarted();
VideoTrack vt = (VideoTrack) tracks.get(track);
if (vt.codec == null) {
createCodec(track);
}
if (vt.codec == null) {
throw new UnsupportedOperationException("No codec for this format: " + vt.format);
}
// The dimension of the image must match the dimension of the video track
Format fmt = vt.format;
if (fmt.get(WidthKey) != image.getWidth() || fmt.get(HeightKey) != image.getHeight()) {
throw new IllegalArgumentException("Dimensions of image[" + vt.samples.size()
+ "] (width=" + image.getWidth() + ", height=" + image.getHeight()
+ ") differs from video format of track: " + fmt);
}
// Encode pixel data
{
if (vt.outputBuffer == null) {
vt.outputBuffer = new Buffer();
}
boolean isKeyframe = vt.syncInterval == 0 ? false : vt.samples.size() % vt.syncInterval == 0;
Buffer inputBuffer = new Buffer();
inputBuffer.flags = (isKeyframe) ? EnumSet.of(KEYFRAME) : EnumSet.noneOf(BufferFlag.class);
inputBuffer.data = image;
vt.codec.process(inputBuffer, vt.outputBuffer);
if (vt.outputBuffer.flags.contains(DISCARD)) {
return;
}
// Encode palette data
isKeyframe = vt.outputBuffer.flags.contains(KEYFRAME);
boolean paletteChange = writePalette(track, image, isKeyframe);
writeSample(track, (byte[]) vt.outputBuffer.data, vt.outputBuffer.offset, vt.outputBuffer.length, isKeyframe && !paletteChange);
/*
long offset = getRelativeStreamPosition();
DataChunk videoFrameChunk = new DataChunk(vt.getSampleChunkFourCC(isKeyframe));
moviChunk.add(videoFrameChunk);
videoFrameChunk.getOutputStream().write((byte[]) vt.outputBuffer.data, vt.outputBuffer.offset, vt.outputBuffer.length);
videoFrameChunk.finish();
long length = getRelativeStreamPosition() - offset;
Sample s=new Sample(videoFrameChunk.chunkType, 1, offset, length, isKeyframe&&!paletteChange);
vt.addSample(s);
idx1.add(s);
if (getRelativeStreamPosition() > 1L << 32) {
throw new IOException("AVI file is larger than 4 GB");
}*/
}
}
示例6: write
import org.monte.media.BufferFlag; //导入依赖的package包/类
/**
* Encodes the provided image and writes its sample data into the specified
* track.
*
* @param track The track index.
* @param image The image of the video frame.
* @param duration Duration given in media time units.
*
* @throws IndexOutofBoundsException if the track index is out of bounds.
* @throws if the duration is less than 1, or if the dimension of the frame
* does not match the dimension of the video.
* @throws UnsupportedOperationException if the {@code MovieWriter} does not
* have a built-in encoder for this video format.
* @throws IOException if writing the sample data failed.
*/
public void write(int track, BufferedImage image, long duration) throws IOException {
ensureStarted();
VideoTrack vt = (VideoTrack) tracks.get(track);
if (vt.codec == null) {
createCodec(track);
}
if (vt.codec == null) {
throw new UnsupportedOperationException("No codec for this format: " + vt.format);
}
// The dimension of the image must match the dimension of the video track
Format fmt = vt.format;
if (fmt.get(WidthKey) != image.getWidth() || fmt.get(HeightKey) != image.getHeight()) {
throw new IllegalArgumentException("Dimensions of image[" + vt.samples.size()
+ "] (width=" + image.getWidth() + ", height=" + image.getHeight()
+ ") differs from video format of track: " + fmt);
}
// Encode pixel data
{
if (vt.outputBuffer == null) {
vt.outputBuffer = new Buffer();
}
boolean isKeyframe = vt.syncInterval == 0 ? false : vt.samples.size() % vt.syncInterval == 0;
Buffer inputBuffer = new Buffer();
inputBuffer.flags = (isKeyframe) ? EnumSet.of(KEYFRAME) : EnumSet.noneOf(BufferFlag.class);
inputBuffer.data = image;
vt.codec.process(inputBuffer, vt.outputBuffer);
if (vt.outputBuffer.flags.contains(DISCARD)) {
return;
}
// Encode palette data
isKeyframe = vt.outputBuffer.flags.contains(KEYFRAME);
boolean paletteChange = writePalette(track, image, isKeyframe);
writeSample(track, (byte[]) vt.outputBuffer.data, vt.outputBuffer.offset, vt.outputBuffer.length, isKeyframe && !paletteChange);
/*
long offset = getRelativeStreamPosition();
DataChunk videoFrameChunk = new DataChunk(vt.getSampleChunkFourCC(isKeyframe));
moviChunk.add(videoFrameChunk);
videoFrameChunk.getOutputStream().write((byte[]) vt.outputBuffer.data, vt.outputBuffer.offset, vt.outputBuffer.length);
videoFrameChunk.finish();
long length = getRelativeStreamPosition() - offset;
Sample s=new Sample(videoFrameChunk.chunkType, 1, offset, length, isKeyframe&&!paletteChange);
vt.addSample(s);
idx1.add(s);
if (getRelativeStreamPosition() > 1L << 32) {
throw new IOException("AVI file is larger than 4 GB");
}*/
}
}