本文整理汇总了Java中javax.media.Buffer.getFormat方法的典型用法代码示例。如果您正苦于以下问题:Java Buffer.getFormat方法的具体用法?Java Buffer.getFormat怎么用?Java Buffer.getFormat使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类javax.media.Buffer
的用法示例。
在下文中一共展示了Buffer.getFormat方法的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: bufferToImage
import javax.media.Buffer; //导入方法依赖的package包/类
/**
* Description of the Method
*
* @param buffer Description of the Parameter
* @return Description of the Return Value
*/
public BufferedImage bufferToImage(Buffer buffer) {
RGBFormat format = (RGBFormat) buffer.getFormat();
int rMask;
int gMask;
int bMask;
Object data = buffer.getData();
DirectColorModel dcm;
rMask = format.getRedMask();
gMask = format.getGreenMask();
bMask = format.getBlueMask();
int[] masks = new int[3];
masks[0] = rMask;
masks[1] = gMask;
masks[2] = bMask;
DataBuffer db = new DataBufferInt((int[]) data,
format.getLineStride() *
format.getSize().height);
SampleModel sm = new SinglePixelPackedSampleModel(DataBuffer.TYPE_INT,
format.getLineStride(),
format.getSize().height,
masks);
WritableRaster wr = Raster.createWritableRaster(sm, db, new Point(0, 0));
dcm = new DirectColorModel(24, rMask, gMask, bMask);
return new BufferedImage(dcm, wr, true, null);
}
示例2: doVideoEffect
import javax.media.Buffer; //导入方法依赖的package包/类
void doVideoEffect(Buffer dest, Buffer source /*next*/,
float effectRatio) {
//System.err.println("Doing effect at " + effectRatio);
// Assume wipe
byte[] destData = (byte[]) dest.getData();
byte[] srcData = (byte[]) source.getData();
RGBFormat rgb = (RGBFormat) dest.getFormat();
int videoHeight = rgb.getSize().height;
int stride = rgb.getLineStride();
if (effectRatio > 1.0f)
effectRatio = 1.0f;
if (effectRatio <= 0.0f)
return;
if (effectVideo[currentLocator].equalsIgnoreCase("Scroll")) {
int nInLines = (int) (videoHeight * effectRatio);
if (nInLines == 0)
return;
System.arraycopy(destData, nInLines * stride,
destData, 0,
(videoHeight - nInLines) * stride);
System.arraycopy(srcData, 0,
destData, (videoHeight - nInLines) * stride,
nInLines * stride);
} else if (effectVideo[currentLocator].equalsIgnoreCase("Fade")) {
for (int y = 0; y < videoHeight * stride; y++) {
destData[y] = (byte)
((destData[y] & 0xFF) * (1.0 - effectRatio) +
(srcData[y] & 0xFF) * effectRatio);
}
}
}
示例3: read
import javax.media.Buffer; //导入方法依赖的package包/类
public void read(Buffer buffer) {
//System.err.println("entering read");
synchronized (this) {
while (!dataPending) {
try {
wait();
} catch (InterruptedException ie) {
}
}
}
//System.err.println("... in read");
// Swap
Object data = buffer.getData();
Format format = buffer.getFormat();
Object header = buffer.getHeader();
buffer.setData(pendingBuffer.getData());
buffer.setFormat(pendingBuffer.getFormat());
buffer.setHeader(pendingBuffer.getHeader());
buffer.setTimeStamp(pendingBuffer.getTimeStamp());
buffer.setFlags(pendingBuffer.getFlags() | Buffer.FLAG_NO_SYNC);
buffer.setLength(pendingBuffer.getLength());
buffer.setOffset(pendingBuffer.getOffset());
buffer.setSequenceNumber(pendingBuffer.getSequenceNumber());
pendingBuffer.setData(data);
pendingBuffer.setFormat(format);
pendingBuffer.setHeader(header);
dataPending = false;
synchronized (this) {
notifyAll();
}
}
示例4: printDataInfo
import javax.media.Buffer; //导入方法依赖的package包/类
void printDataInfo(Buffer buffer) {
//System.err.println("Read from stream: " + stream);
if (buffer.getFormat() instanceof AudioFormat)
System.err.println("Read audio data:");
else
System.err.println("Read video data:");
System.err.println(" Time stamp: " + buffer.getTimeStamp());
System.err.println(" Sequence #: " + buffer.getSequenceNumber());
System.err.println(" Data length: " + buffer.getLength());
if (buffer.isEOM())
System.err.println(" Got EOM!");
}
示例5: process
import javax.media.Buffer; //导入方法依赖的package包/类
/**
* Processes the data now and make it ready for the texture.
*
* @param buffer The source for the data
* @return A control code indicating success or failure
*/
public synchronized int process(Buffer buffer)
{
if(buffer.isEOM())
return BUFFER_PROCESSED_OK;
Format inf = buffer.getFormat();
if(inf == null)
return BUFFER_PROCESSED_FAILED;
if((inf != currentFormat) || !buffer.getFormat().equals(currentFormat))
{
if(setInputFormat(inf) != null)
return BUFFER_PROCESSED_FAILED;
}
Object data = buffer.getData();
if(!(data instanceof byte[]))
return BUFFER_PROCESSED_FAILED;
// Take the pixels and write them out to individual bytes
byte[] src_pixels = (byte[])data;
int num_pixels = frameWidth * frameHeight * 3;
System.arraycopy(src_pixels, 0, inProgressFrame, 0, num_pixels);
// Swap the two buffers and set the ready flag.
byte[] tmp = processedFrame;
processedFrame = inProgressFrame;
inProgressFrame = tmp;
newFrameAvailable = true;
// Convert and write to our local image
return BUFFER_PROCESSED_OK;
}
示例6: process
import javax.media.Buffer; //导入方法依赖的package包/类
public int process(Buffer inputBuffer, Buffer outputBuffer){
// == prolog
if (isEOM(inputBuffer) ) {
propagateEOM(outputBuffer);
return BUFFER_PROCESSED_OK;
}
byte [] inBuffer = (byte[])inputBuffer.getData();
int inLength = inputBuffer.getLength();
int inOffset = inputBuffer.getOffset();
int samplesNumber = inLength;
AudioFormat af = (AudioFormat) inputBuffer.getFormat();
if (enabled) {
int shiftZero = 0;
int shiftOne = 8;
if (af.getEndian() == AudioFormat.BIG_ENDIAN) {
shiftZero = 8;
shiftOne = 0;
}
// == main
int spa = ((int) af.getSampleRate() * af.getChannels()) / nPowersPerSec;
long npa = 1000000000L / nPowersPerSec;
long timeStamp = inputBuffer.getTimeStamp(); // in nanos
float average = 0;
long cspa = 0;
for (int i = 0; i < inLength; i += 2) {
short sample = (short)
(((0xFF & inBuffer[inOffset + i]) << shiftZero) |
((0xFF & inBuffer[inOffset + i + 1]) << shiftOne));
float normal = (float) sample;
average = average + normal * normal;
cspa++;
if (cspa == spa) {
cspa = 0;
average = (float) Math.sqrt((average / spa)) / 32768;
push(timeStamp, average);
timeStamp += npa;
average = 0;
}
}
}
// == epilog
inputBuffer.setData(outputBuffer.getData());
outputBuffer.setFormat(af);
outputBuffer.setData(inBuffer);
outputBuffer.setLength(inLength);
outputBuffer.setOffset(inOffset);
outputBuffer.setTimeStamp(inputBuffer.getTimeStamp());
outputBuffer.setFlags(inputBuffer.getFlags());
return BUFFER_PROCESSED_OK;
}
示例7: process
import javax.media.Buffer; //导入方法依赖的package包/类
public int process(Buffer inBuffer, Buffer outBuffer) {
int outputDataLength = ((VideoFormat)outputFormat).getMaxDataLength();
validateByteArraySize(outBuffer, outputDataLength);
outBuffer.setLength(outputDataLength);
outBuffer.setFormat(outputFormat);
outBuffer.setFlags(inBuffer.getFlags());
byte [] inData = (byte[]) inBuffer.getData();
byte [] outData = (byte[]) outBuffer.getData();
RGBFormat vfIn = (RGBFormat) inBuffer.getFormat();
Dimension sizeIn = vfIn.getSize();
int pixStrideIn = vfIn.getPixelStride();
int lineStrideIn = vfIn.getLineStride();
int iw = sizeIn.width;
int ih = sizeIn.height;
int cx = iw/2;
int cy = ih/2;
int ip = 0;
int op = 0;
int x, y;
double vsin, vcos, ratio;
if ( outData.length < iw*ih*3 ) {
System.out.println("the buffer is not full");
return BUFFER_PROCESSED_FAILED;
}
// System.out.println("count = " + count);
vsin = sinTable[count];
vcos = cosTable[count];
ratio = 1.0;//rateTable[count];
// System.out.println("vsin = " + vsin + " vcos = " + vcos);
for ( int j = -cy; j < ih-cy; j++ )
for ( int i = -cx; i < iw-cx; i++ ) {
x = (int)((vcos * i - vsin * j)*ratio + cx + 0.5);
y = (int)((vsin * i + vcos * j)*ratio + cy + 0.5);
if ( x < 0 || x >= iw || y < 0 || y >= ih) {
outData[op++] = 0;
outData[op++] = 0;
outData[op++] = 0;
} else {
ip = lineStrideIn * y + x * pixStrideIn;
outData[op++] = inData[ip++];
outData[op++] = inData[ip++];
outData[op++] = inData[ip++];
}
}
count ++;
if ( count >= num )
count = 0;
return BUFFER_PROCESSED_OK;
}
示例8: process
import javax.media.Buffer; //导入方法依赖的package包/类
/**
* Processes the data and renders it to a component
*/
public int process(Buffer buffer) {
//logger.info("process...");
if (buffer == null || buffer.getLength() <= 0) {
//logger.info("zero length buffer");
return BUFFER_PROCESSED_OK;
}
Format format = buffer.getFormat();
if (format instanceof VideoFormat) {
Dimension size = ((VideoFormat) format).getSize();
if ((size != null) && (size.width != inWidth || size.height != inHeight)) {
inWidth = size.width;
inHeight = size.height;
updateScale();
}
}
synchronized (syncObject) {
Object data = buffer.getData();
if (data == null) {
return BUFFER_PROCESSED_FAILED;
}
frameArray = buffer.getData();
if (format.getDataType() == Format.byteArray) {
frameData = ByteBuffer.wrap((byte[]) frameArray);
}
else if (format.getDataType() == Format.intArray) {
frameData = IntBuffer.wrap((int[]) frameArray);
}
else {
return BUFFER_PROCESSED_FAILED;
}
/*
if (format.getDataType() == Format.byteArray) {
if (frameArray == null || ((byte[])frameArray).length < buffer.getLength() ) {
frameArray = new byte[buffer.getLength()];
frameData = ByteBuffer.wrap((byte[]) frameArray);
}
}
else if (format.getDataType() == Format.intArray) {
if (frameArray == null || ((int[])frameArray).length < buffer.getLength() ) {
frameArray = new int[buffer.getLength()];
frameData = IntBuffer.wrap((int[]) frameArray);
}
}
else {
return BUFFER_PROCESSED_FAILED;
}
// copy buffer data to the array that is wrapped in a nio.Buffer
System.arraycopy(data, 0, frameArray, 0, buffer.getLength());
*/
}
// repaint the canvas
canvas.repaint();
return BUFFER_PROCESSED_OK;
}
示例9: process
import javax.media.Buffer; //导入方法依赖的package包/类
public int process(Buffer in, Buffer out) {
if (in.getFormat() instanceof VideoFormat && in.getData() != null) {
byte[] bin;
byte[] bout;
if (in.getData() instanceof byte[]) {
bin = (byte[]) in.getData();
} else if (in.getData() instanceof int[]) {
int[] iin = (int[]) in.getData();
bin = new byte[iin.length * 3];
int bi, ii;
for (bi = 0, ii = 0; bi < bin.length; bi += 3, ii++) {
int v = iin[ii];
bin[bi + 2] = (byte) (v & 0xff);
bin[bi + 1] = (byte) ((v >> 8) & 0xff);
bin[bi] = (byte) ((v >> 16) & 0xff);
}
} else {
return PlugIn.BUFFER_PROCESSED_FAILED;
}
// byte[] bin = (byte[]) in.getData();
// byte[] bout;
if (!(out.getData() instanceof byte[]) || ((byte[])out.getData()).length < bin.length) {
bout = new byte[bin.length];
out.setData(bout);
} else {
bout = (byte[]) out.getData();
}
VideoFormat vformat = (VideoFormat) in.getFormat();
if (vformat.getSize().width != videoSize.width
|| vformat.getSize().height != videoSize.height) {
videoSize = vformat.getSize();
propSupport.firePropertyChange(PROP_VIDEO_SIZE, null, videoSize);
}
//// Assure output buffer is large enough
if(bout == null || bout.length < bin.length) {
}
byte[] buffToDraw = bout;
boolean processed = false;
if (active) {
long startTime = System.nanoTime();
processed = processRGB(bin, bout, vformat);
long stopTime = System.nanoTime();
totalTime += (stopTime - startTime) / 1.0e9;
++nCalls;
}
if(!processed) {
// Swap the data between the input & output.
Object data = in.getData();
in.setData(out.getData());
out.setData(data);
buffToDraw = bin;
}
//// Update frame image available to UI
if (frameListenerList.size() > 0) {
//// Assure the image is the proper size
if (displayImage.getWidth() != vformat.getSize().width || displayImage.getHeight() != vformat.getSize().height) {
displayImage = new BufferedImage(vformat.getSize().width, vformat.getSize().height,
BufferedImage.TYPE_INT_RGB);
}
updateImage(buffToDraw, vformat);
notifyVideoFrameListeners();
}
}
// Copy the input attributes to the output
//out.setFormat(in.getFormat());
//out.setLength(in.getLength());
//out.setOffset(in.getOffset());
return BUFFER_PROCESSED_OK;
}