本文整理汇总了Java中com.xuggle.mediatool.event.IVideoPictureEvent.getTimeStamp方法的典型用法代码示例。如果您正苦于以下问题:Java IVideoPictureEvent.getTimeStamp方法的具体用法?Java IVideoPictureEvent.getTimeStamp怎么用?Java IVideoPictureEvent.getTimeStamp使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类com.xuggle.mediatool.event.IVideoPictureEvent
的用法示例。
在下文中一共展示了IVideoPictureEvent.getTimeStamp方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: onVideoPicture
import com.xuggle.mediatool.event.IVideoPictureEvent; //导入方法依赖的package包/类
@Override
public void onVideoPicture(IVideoPictureEvent event) {
// < 0 means the file we are rolling off of has < RECORD_LENGTH
// seconds of footage
if (event.getTimeStamp() >= startingTimestamp || startingTimestamp < 0) {
final IVideoPicture picture = event.getPicture();
if (startTimestamp == -1) {
startTimestamp = picture.getTimeStamp();
}
lastTimestamp = picture.getTimeStamp() - startTimestamp;
picture.setTimeStamp(lastTimestamp);
writer.encodeVideo(0, picture);
}
}
示例2: onVideoPicture
import com.xuggle.mediatool.event.IVideoPictureEvent; //导入方法依赖的package包/类
@Override
public void onVideoPicture(IVideoPictureEvent event) {
final long currentTimestamp = event.getTimeStamp(TimeUnit.MILLISECONDS);
if (doDelay) {
try {
final long delay = currentTimestamp - lastTimestamp;
Thread.sleep(delay);
} catch (final InterruptedException e) {
logger.error("Error while reading video frames", e);
}
}
lastTimestamp = currentTimestamp;
imageView.setImage(SwingFXUtils.toFXImage(event.getImage(), null));
if (isPlaying || !doDelay) Platform.runLater(() -> listener.frameUpdated(currentTimestamp));
}
示例3: onVideoPicture
import com.xuggle.mediatool.event.IVideoPictureEvent; //导入方法依赖的package包/类
/**
* Event handler for dealing with each frame in the stream.
*
* @param event Event that occurred
*/
@Override
public void onVideoPicture(IVideoPictureEvent event) {
if (event.getStreamIndex() != mVideoStreamIndex) {
if (mVideoStreamIndex == -1) {
mVideoStreamIndex = event.getStreamIndex();
} else {
return;
}
}
if (event.getTimeStamp() != 0 && event.getTimeStamp() >= grabAt) {
dumpImage(event.getImage());
gotStill = true;
}
}
示例4: onVideoPicture
import com.xuggle.mediatool.event.IVideoPictureEvent; //导入方法依赖的package包/类
/**
* Gets called when FFMPEG transcoded a frame
*/
public void onVideoPicture(IVideoPictureEvent event) {
lastRead = System.currentTimeMillis();
if (event.getStreamIndex() != mVideoStreamIndex) {
if (mVideoStreamIndex == -1){
mVideoStreamIndex = event.getStreamIndex();
}else return;
}
if(frameNr % frameSkip < groupSize) try{
BufferedImage frame = event.getImage();
byte[] buffer = ImageUtils.imageToBytes(frame, imageType);
long timestamp = event.getTimeStamp(TimeUnit.MILLISECONDS);
if(frameMs > 0 ) timestamp = frameNr * frameMs;
Frame newFrame = new Frame(streamId, frameNr, imageType, buffer, timestamp, new Rectangle(0, 0,frame.getWidth(), frame.getHeight()));
newFrame.getMetadata().put("uri", streamLocation);
frameQueue.put(newFrame);
// enforced throttling
if(sleepTime > 0) Utils.sleep(sleepTime);
// queue based throttling
if(frameQueue.size() > 20) Utils.sleep(frameQueue.size());
}catch(Exception e){
logger.warn("Unable to process new frame due to: "+e.getMessage(), e);
}
frameNr++;
}
示例5: onVideoPicture
import com.xuggle.mediatool.event.IVideoPictureEvent; //导入方法依赖的package包/类
@Override
public void onVideoPicture(IVideoPictureEvent event) {
BufferedImage image = event.getImage();
long ts = event.getTimeStamp();
if (ts / 1000000 > 15) {
if (ts / 1000000 % 2 == 1) {
Graphics2D g = image.createGraphics();
Rectangle2D bounds = new Rectangle2D.Float(0, 0,
logoImage.getWidth(), logoImage.getHeight());
// compute the amount to inset the time stamp and translate the image to that position
double insetX = bounds.getWidth();
double insetY = bounds.getHeight();
// g.translate(inset, event.getImage().getHeight() - inset);
g.translate(insetX, insetY);
g.setColor(Color.WHITE);
g.fill(bounds);
g.setColor(Color.BLACK);
g.drawImage(logoImage, 0, 0, null);
}
// call parent which will pass the video to next tool in chain
super.onVideoPicture(event);
}
}
示例6: onVideoPicture
import com.xuggle.mediatool.event.IVideoPictureEvent; //导入方法依赖的package包/类
@Override
public void onVideoPicture(IVideoPictureEvent event) {
BufferedImage currentFrame = event.getImage();
if (initialSystemTimeAtVideoStart == -1) initialSystemTimeAtVideoStart = System.currentTimeMillis();
currentFrameTimestamp = (event.getTimeStamp() / 1000) + initialSystemTimeAtVideoStart;
if (frameCount == 0) {
if (cameraEventListener.isPresent())
setViewSize(new Dimension(currentFrame.getWidth(), currentFrame.getHeight()));
cameraEventListener.get().setFeedResolution(currentFrame.getWidth(), currentFrame.getHeight());
}
if (lastVideoTimestamp > -1 && (frameCount % 30) == 0) {
double estimateFPS = (double) SECOND_IN_MICROSECONDS
/ (double) (event.getTimeStamp() - lastVideoTimestamp);
setFPS(estimateFPS);
}
lastVideoTimestamp = event.getTimeStamp();
if (cameraEventListener.isPresent())
cameraEventListener.get().newFrame(new Frame(Camera.bufferedImageToMat(currentFrame), currentFrameTimestamp));
frameCount++;
}
示例7: onVideoPicture
import com.xuggle.mediatool.event.IVideoPictureEvent; //导入方法依赖的package包/类
public void onVideoPicture(IVideoPictureEvent event) {
try{
BufferedImage frame = event.getImage();
byte[] buffer = ImageUtils.imageToBytes(frame, imageType);
Frame newFrame = new Frame(streamId, seqNum+frames.size()*frameSkip, imageType, buffer, event.getTimeStamp(TimeUnit.MILLISECONDS), new Rectangle(0, 0,frame.getWidth(), frame.getHeight()));
frames.add(newFrame);
}catch(IOException ioe){
logger.error("Exception while decoding video: "+ioe.getMessage(), ioe);
}
}