本文整理汇总了Java中javax.media.format.VideoFormat类的典型用法代码示例。如果您正苦于以下问题:Java VideoFormat类的具体用法?Java VideoFormat怎么用?Java VideoFormat使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
VideoFormat类属于javax.media.format包,在下文中一共展示了VideoFormat类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: getFrame
import javax.media.format.VideoFormat; //导入依赖的package包/类
public BufferedImage getFrame(int index) {
if (getState() != READY || index < 0 || index > getFrameCount()) {
return null;
}
_framePositioningControl.seek(index);
Buffer buffer = _frameGrabbingControl.grabFrame();
Image img = new BufferToImage((VideoFormat) buffer.
getFormat()).createImage(buffer);
// image creation may also fail!
if (img != null) {
BufferedImage bi = new BufferedImage(img.getWidth(null),
img.getHeight(null), BufferedImage.TYPE_INT_ARGB);
Graphics2D g = bi.createGraphics();
g.drawImage(img, 0, 0, null);
return bi;
}
return null;
}
示例2: getSupportedFormats
import javax.media.format.VideoFormat; //导入依赖的package包/类
public Format[] getSupportedFormats()
{
if ((captureStream != null) && connected)
{
try
{
final List<com.lti.civil.VideoFormat> formatList = captureStream
.enumVideoFormats();
final Format[] formats = new Format[formatList.size()];
for (int i = 0; i < formatList.size(); i++)
{
formats[i] = net.sf.fmj.media.protocol.civil.DataSource
.convertCivilFormat(formatList.get(i));
}
return formats;
} catch (CaptureException e)
{
logger.log(Level.WARNING, "" + e, e);
}
}
// must be in the state connected, is this correct ? or should we
// return null ?
return new Format[0];
}
示例3: processRGB
import javax.media.format.VideoFormat; //导入依赖的package包/类
/**
* If either the R, G, or B components of a pixel pass the threshold, then
* the corresponding byte in the output buffer is set to 100% (255).
* NOTE: Output is a byte-per-pixel format, NOT 3 bytes per pixel.
*/
protected boolean processRGB(byte[] bin, byte[] bout, VideoFormat format) {
/*
if(supportedOuts == null) {
supportedOuts = new Format[] { new IndexedColorFormat(format.getSize(),
format.getMaxDataLength(), byte[].class, format.getFrameRate(),
format.getSize().width, 8,
LabelColors.REDS, LabelColors.GREENS, LabelColors.BLUES)
};
}*/
passCount = 0;
int p = 0;
// TODO bin.length / 3 ??
for(int i = 0; i < bin.length; i+=3, p++) {
if ((char) bin[i] > threshold || (char) bin[i+1] > threshold || (char) bin[i+2] > threshold) {
bout[p] = (byte) 255;
passCount++;
} else {
bout[p] = 0;
}
}
passRatio = (float) passCount / (float) (format.getSize().width * format.getSize().height);
//System.out.println(String.format("Threshold pass: %.2f", passRatio*100));
return true;
}
示例4: processRGB
import javax.media.format.VideoFormat; //导入依赖的package包/类
protected boolean processRGB(byte[] bin, byte[] bout, VideoFormat format) {
if(bgUpdater.getBackground() != null) {
//// Calculate difference between input and background
int now, before, diff;
//long totalAmt = 0;
for(int i = 0; i < bin.length; i++) {
now = (int) bin[i] & 0xff;
before = (int) bgUpdater.getBackground()[i] & 0xff;
diff = Math.abs(now - before);
bout[i] = (byte)(diff);
}
} else {
//// Make output show no differences
for(int i = 0; i < bin.length; i++) {
bout[i] = 0;
}
}
return true;
}
示例5: processRGB
import javax.media.format.VideoFormat; //导入依赖的package包/类
/**
* If either the R, G, or B components of a pixel pass the threshold, then
* the corresponding byte in the output buffer is set to 100% (255).
* NOTE: Output is a byte-per-pixel format, NOT 3 bytes per pixel.
*/
protected boolean processRGB(byte[] bin, byte[] bout, VideoFormat format) {
if (regionIndicesImage != null) {
if (regionIndicesImage.length != bin.length) {
throw new IllegalArgumentException("Region indices image has a different size from the input image.");
}
Arrays.fill(sums, 0f);
for (int i = 0; i < bin.length; i++) {
if (regionIndicesImage[i] != NO_INDEX) {
sums[ regionIndicesImage[i] ] += (float) bin[i];
}
}
}
propSupport.firePropertyChange(PROP_SUMS, null, sums);
return false;
}
示例6: initFrame
import javax.media.format.VideoFormat; //导入依赖的package包/类
/**
* @param format
* @return
*/
private int initFrame(VideoFormat format) {
int i;
frameNumber++;
this.format = format;
blobManager.setVideoSize(format.getSize());
//// Initialize
for (i = 1; i <= MAX_LABELS; i++) {
// Set equivalence array to have each label assigned to itself (no re-mappings yet)
changeLabel[i] = i;
// Reset label pixel count
//blobPixelCount[i] = 0;
}
// Make sure the label image is the same size as the video
int size = format.getSize().width * format.getSize().height;
if (size != labelImage.length) {
labelImage = new int[size];
}
Arrays.fill(labelImage, 0);
return size;
}
示例7: resolveLabelEquivalences
import javax.media.format.VideoFormat; //导入依赖的package包/类
/**
* <p>Resolves the label equivalences in the labelImage.</p>
* <p>Note: if the labels were contiguous before, they remain so afterwards.</p>
* @param bout
* @param format
* @param nLabels
*/
private void resolveLabelEquivalences(byte[] bout, VideoFormat format, int nLabels) {
int i;
int x;
int y;
/* Now scan and resolve the labels in the label image accordingly. */
i = format.getSize().width;
for(y = 1; y < format.getSize().height; y++) {
i++; // skip first pixel in row
for(x = 1; x < format.getSize().width - 1; x++, i++) {
if (labelImage[i] != 0) {
labelImage[i] = changeLabel[labelImage[i]];
// FIXME no need to duplicate to labelImage and bout both. (except to show output)
if (labelImage[i] <= 255) {
bout[i] = (byte) labelImage[i];
}
}
assert(labelImage[i] <= nLabels);
}
i++; // skip last pixel in row
}
}
示例8: updateImage
import javax.media.format.VideoFormat; //导入依赖的package包/类
protected void updateImage(byte[] bout, VideoFormat vformat) {
synchronized (displayImage) {
//// Copy pixels to image
WritableRaster rast = displayImage.getRaster();
int[] pixel = new int[] {0, 0, 0, 255};
int p = 0;
int label;
for (int y = vformat.getSize().height - 1; y >= 0; y--) {
for (int x = 0; x < vformat.getSize().width; x++) {
label = (int) labelImage[p] & 0xFF;
pixel[0] = LabelColors.REDS[label];
pixel[1] = LabelColors.GREENS[label];
pixel[2] = LabelColors.BLUES[label];
rast.setPixel(x, y, pixel);
++p;
}
}
}
}
示例9: ImageSourceStream
import javax.media.format.VideoFormat; //导入依赖的package包/类
public ImageSourceStream(VideoFormat format, FrameGenerator frameGenerator, Camera[] framesPath,
final BoundedRangeModel progressModel)
{
this.frameGenerator = frameGenerator;
this.framesPath = framesPath;
this.progressModel = progressModel;
this.format = format;
try
{
// Retrieve main thread AppContext instance by reflection
this.mainThreadContext = Class.forName("sun.awt.AppContext").getMethod("getAppContext").invoke(null);
}
catch (Throwable ex)
{
// Let's consider app context is not necessary for the program
}
}
示例10: getMatchingOutputFormats
import javax.media.format.VideoFormat; //导入依赖的package包/类
@Override
protected Format[] getMatchingOutputFormats(Format input) {
if (supportedInputFormats[0].matches(input)) {
VideoFormat inf = (VideoFormat) input;
Dimension s = inf.getSize();
RGBFormat outf = new RGBFormat(s,
s.width < 0 || s.height < 0 ? Format.NOT_SPECIFIED : s.width * s.height,
Format.intArray, inf.getFrameRate(), 32, rMask, gMask, bMask, 1, s.width, Format.FALSE, RGBFormat.BIG_ENDIAN);
return new Format[]{outf};
}
return new Format[0];
}
示例11: getMyTracks
import javax.media.format.VideoFormat; //导入依赖的package包/类
private void getMyTracks() {
Format format;
TrackControl[] track = p.getTrackControls();
track_amount = track.length;
trackInfos = new String[track_amount];
Time t = p.getDuration();
trackTime = t.getSeconds();
for (int i = 0; i < track_amount; i++) {
format = track[i].getFormat();
trackInfos[i] = format.toString();
if (format instanceof VideoFormat) {
videoTrack = true;
}
if (format instanceof AudioFormat) {
audioTrack = true;
}
}
read = true;
}
示例12: getFrame
import javax.media.format.VideoFormat; //导入依赖的package包/类
public BufferedImage getFrame(int index) {
if (getState() != READY || index < 0 || index > getFrameCount()) {
return null;
}
_framePositioningControl.seek(index);
Buffer buffer = _frameGrabbingControl.grabFrame();
Image img = new BufferToImage((VideoFormat) buffer.getFormat())
.createImage(buffer);
// image creation may also fail!
if (img != null) {
BufferedImage bi = new BufferedImage(img.getWidth(null),
img.getHeight(null), BufferedImage.TYPE_INT_ARGB);
Graphics2D g = bi.createGraphics();
g.drawImage(img, 0, 0, null);
return bi;
}
return null;
}
示例13: setHandlers
import javax.media.format.VideoFormat; //导入依赖的package包/类
/**
* Set appropriate handlers for the audio and video tracks.
* We create our own handlers, which in turn will call the
* handlers provided to us in the constructor of this class.
*/
private void setHandlers()
{
TrackControl controls[] = processor.getTrackControls();
int nControls = controls.length;
int i;
for (i=0;i<nControls;i++) {
Format format = controls[i].getFormat();
try {
if (format instanceof VideoFormat) {
videoRenderer = new VideoRenderer(videoHandler);
controls[i].setRenderer(videoRenderer);
} else if (format instanceof AudioFormat) {
controls[i].setRenderer(new AudioRenderer());
} else {
System.err.println("Unknown track type");
}
} catch (UnsupportedPlugInException e) {
System.err.println("Got exception "+e);
}
}
}
示例14: VideoChannel
import javax.media.format.VideoFormat; //导入依赖的package包/类
/**
* Creates an Audio Channel for a desired jmf locator. For instance: new MediaLocator("dsound://")
*
* @param locator
* @param ipAddress
* @param localPort
* @param remotePort
* @param format
*/
public VideoChannel(MediaLocator locator,
String localIpAddress,
String ipAddress,
int localPort,
int remotePort,
Format format) {
this.locator = locator;
this.localIpAddress = localIpAddress;
this.ipAddress = ipAddress;
this.localPort = localPort;
this.portBase = remotePort;
this.format = new VideoFormat(VideoFormat.JPEG_RTP);
}
示例15: getFormat
import javax.media.format.VideoFormat; //导入依赖的package包/类
public Format getFormat()
{
try
{
com.lti.civil.VideoFormat vf = captureStream.getVideoFormat();
return net.sf.fmj.media.protocol.civil.DataSource
.convertCivilFormat(vf);
} catch (Exception ex)
{
}
return outputVideoFormat;
}