本文整理汇总了Java中com.xuggle.xuggler.IVideoPicture.getHeight方法的典型用法代码示例。如果您正苦于以下问题:Java IVideoPicture.getHeight方法的具体用法?Java IVideoPicture.getHeight怎么用?Java IVideoPicture.getHeight使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类com.xuggle.xuggler.IVideoPicture
的用法示例。
在下文中一共展示了IVideoPicture.getHeight方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: loadFirstFrame
import com.xuggle.xuggler.IVideoPicture; //导入方法依赖的package包/类
/**
* Loads the first frame of the given video and then seeks back to the beginning of the stream.
* @param container the video container
* @param videoCoder the video stream coder
* @return BufferedImage
* @throws MediaException thrown if an error occurs during decoding
*/
private BufferedImage loadFirstFrame(IContainer container, IStreamCoder videoCoder) throws MediaException {
// walk through each packet of the container format
IPacket packet = IPacket.make();
while (container.readNextPacket(packet) >= 0) {
// make sure the packet belongs to the stream we care about
if (packet.getStreamIndex() == videoCoder.getStream().getIndex()) {
// create a new picture for the video data to be stored in
IVideoPicture picture = IVideoPicture.make(videoCoder.getPixelType(), videoCoder.getWidth(), videoCoder.getHeight());
int offset = 0;
// decode the video
while (offset < packet.getSize()) {
int bytesDecoded = videoCoder.decodeVideo(picture, packet, offset);
if (bytesDecoded < 0) {
LOGGER.error("No bytes found in container.");
throw new MediaException();
}
offset += bytesDecoded;
// make sure that we have a full picture from the video first
if (picture.isComplete()) {
// convert the picture to an Java buffered image
BufferedImage target = new BufferedImage(picture.getWidth(), picture.getHeight(), BufferedImage.TYPE_3BYTE_BGR);
IConverter converter = ConverterFactory.createConverter(target, picture.getPixelType());
return converter.toImage(picture);
}
}
}
}
return null;
}
示例2: toImage
import com.xuggle.xuggler.IVideoPicture; //导入方法依赖的package包/类
@Override
public BufferedImage toImage(IVideoPicture picture) {
// test that the picture is valid
this.validatePicture(picture);
// resample as needed
IVideoPicture resamplePicture = null;
final AtomicReference<JNIReference> ref = new AtomicReference<JNIReference>(null);
try {
if (this.willResample()) {
resamplePicture = AConverter.resample(picture, this.mToImageResampler);
picture = resamplePicture;
}
// get picture parameters
final int w = picture.getWidth();
final int h = picture.getHeight();
final float[][] r = this.bimg.img.bands.get(0).pixels;
final float[][] g = this.bimg.img.bands.get(1).pixels;
final float[][] b = this.bimg.img.bands.get(2).pixels;
picture.getDataCached().get(0, this.buffer, 0, this.buffer.length);
for (int y = 0, i = 0; y < h; y++) {
for (int x = 0; x < w; x++, i += 3) {
b[y][x] = ImageUtilities.BYTE_TO_FLOAT_LUT[(this.buffer[i] & 0xFF)];
g[y][x] = ImageUtilities.BYTE_TO_FLOAT_LUT[(this.buffer[i + 1] & 0xFF)];
r[y][x] = ImageUtilities.BYTE_TO_FLOAT_LUT[(this.buffer[i + 2] & 0xFF)];
}
}
return this.bimg;
} finally {
if (resamplePicture != null)
resamplePicture.delete();
if (ref.get() != null)
ref.get().delete();
}
}
示例3: QueuedVideoData
import com.xuggle.xuggler.IVideoPicture; //导入方法依赖的package包/类
QueuedVideoData(IVideoPicture pic, long timeStamp, TimeUnit timeUnit) {
ByteBuffer buf = pic.getByteBuffer();
picture = new byte[buf.limit()];
buf.get(picture);
buf.flip();
//this.type = pic.getPixelType();
this.width = pic.getWidth();
this.height = pic.getHeight();
this.timeStamp = timeStamp;
this.timeUnit = timeUnit;
}
示例4: onVideoPicture
import com.xuggle.xuggler.IVideoPicture; //导入方法依赖的package包/类
@Override
public void onVideoPicture(IVideoPictureEvent event) {
log.debug("Adjust onVideo");
IVideoPicture in = event.getPicture();
log.debug("Video ts: {}", in.getFormattedTimeStamp());
int inWidth = in.getWidth();
int inHeight = in.getHeight();
if (inHeight != height || inWidth != width) {
log.debug("VideoAdjustTool onVideoPicture");
log.trace("Video timestamp: {} pixel type: {}", event.getTimeStamp(), in.getPixelType());
log.trace("Video in: {} x {} out: {} x {}", new Object[] { inWidth, inHeight, width, height });
if (resampler == null) {
resampler = IVideoResampler.make(width, height, pixelType, inWidth, inHeight, in.getPixelType());
log.debug("Video resampler: {}", resampler);
}
if (resampler != null) {
IVideoPicture out = IVideoPicture.make(pixelType, width, height);
if (resampler.resample(out, in) >= 0) {
//check complete
if (out.isComplete()) {
// queue video
facade.queueVideo(out, event.getTimeStamp(), event.getTimeUnit());
in.delete();
} else {
log.warn("Resampled picture was not marked as complete");
}
} else {
log.warn("Resample failed");
}
out.delete();
} else {
log.debug("Resampler was null");
}
log.debug("VideoAdjustTool onVideoPicture - end");
} else {
// queue video
facade.queueVideo(in, event.getTimeStamp(), event.getTimeUnit());
}
}