本文整理汇总了Java中javax.media.Buffer.setData方法的典型用法代码示例。如果您正苦于以下问题:Java Buffer.setData方法的具体用法?Java Buffer.setData怎么用?Java Buffer.setData使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类javax.media.Buffer
的用法示例。
在下文中一共展示了Buffer.setData方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: process
import javax.media.Buffer; //导入方法依赖的package包/类
public int process(Buffer in, Buffer out) {
// This is the "Callback" to access individual frames.
accessFrame(in);
// Swap the data between the input & output.
Object data = in.getData();
in.setData(out.getData());
out.setData(data);
// Copy the input attributes to the output
out.setFormat(in.getFormat());
out.setLength(in.getLength());
out.setOffset(in.getOffset());
return BUFFER_PROCESSED_OK;
}
示例2: validateByteArraySize
import javax.media.Buffer; //导入方法依赖的package包/类
byte[] validateByteArraySize(Buffer buffer,int newSize) {
Object objectArray=buffer.getData();
byte[] typedArray;
if (objectArray instanceof byte[]) { // is correct type AND not null
typedArray=(byte[])objectArray;
if (typedArray.length >= newSize ) { // is sufficient capacity
return typedArray;
}
byte[] tempArray=new byte[newSize]; // re-alloc array
System.arraycopy(typedArray,0,tempArray,0,typedArray.length);
typedArray = tempArray;
} else {
typedArray = new byte[newSize];
}
buffer.setData(typedArray);
return typedArray;
}
示例3: read
import javax.media.Buffer; //导入方法依赖的package包/类
public void read(Buffer buffer) throws IOException {
synchronized (this) {
Object outdata = buffer.getData();
if (outdata == null || !(outdata.getClass() == Format.intArray) ||
((int[])outdata).length < maxDataLength) {
outdata = new int[maxDataLength];
buffer.setData(outdata);
}
buffer.setFormat( rgbFormat );
buffer.setTimeStamp( (long) (seqNo * (1000 / frameRate) * 1000000) );
BufferedImage bi = robot.createScreenCapture(
new Rectangle(x, y, width, height));
bi.getRGB(0, 0, width, height,
(int[])outdata, 0, width);
buffer.setSequenceNumber( seqNo );
buffer.setLength(maxDataLength);
buffer.setFlags(Buffer.FLAG_KEY_FRAME);
buffer.setHeader( null );
seqNo++;
}
}
示例4: mute
import javax.media.Buffer; //导入方法依赖的package包/类
/**
* Replaces the media data contained in a specific <tt>Buffer</tt> with a
* compatible representation of silence.
*
* @param buffer the <tt>Buffer</tt> the data contained in which is to be
* replaced with silence
*/
public static void mute(Buffer buffer)
{
Object data = buffer.getData();
if (data != null)
{
Class<?> dataClass = data.getClass();
final int fromIndex = buffer.getOffset();
final int toIndex = fromIndex + buffer.getLength();
if (Format.byteArray.equals(dataClass))
Arrays.fill((byte[]) data, fromIndex, toIndex, (byte) 0);
else if (Format.intArray.equals(dataClass))
Arrays.fill((int[]) data, fromIndex, toIndex, 0);
else if (Format.shortArray.equals(dataClass))
Arrays.fill((short[]) data, fromIndex, toIndex, (short) 0);
buffer.setData(data);
}
}
示例5: readFrame
import javax.media.Buffer; //导入方法依赖的package包/类
@Override
public void readFrame(Buffer buffer)
{
synchronized (QT_SYNC_OBJ)
{
BufferedImage bi;
try
{
qtSnapper.next();
bi = qtSnapper.getFrame();
} catch (QTException e)
{
throw new RuntimeException(e); // TODO: how to handle.
}
if (bi != null)
{
final Buffer b = ImageToBuffer.createBuffer(bi, format.getFrameRate());
buffer.setData(b.getData());
buffer.setLength(b.getLength());
buffer.setOffset(b.getOffset());
buffer.setEOM(false);
buffer.setDiscard(false);
buffer.setTimeStamp((qtSnapper.getFrameTime() * 1000000000L) / qtSnapper.getTimeScale());
} else
{
buffer.setEOM(true);
buffer.setLength(0);
}
}
}
示例6: read
import javax.media.Buffer; //导入方法依赖的package包/类
public void read(Buffer buffer) throws IOException {
synchronized (this) {
Object outdata = buffer.getData();
if (outdata == null || !(outdata.getClass() == Format.byteArray) ||
((byte[])outdata).length < maxDataLength) {
outdata = new byte[maxDataLength];
buffer.setData(outdata);
}
if (videoData) {
buffer.setFormat( rgbFormat );
buffer.setTimeStamp( (long) (seqNo * (1000 / frameRate) * 1000000) );
int lineNo = (seqNo * 2) % size.height;
int chunkStart = lineNo * size.width * 3;
System.arraycopy(data, chunkStart,
outdata, 0,
maxDataLength - (chunkStart));
if (chunkStart != 0) {
System.arraycopy(data, 0,
outdata, maxDataLength - chunkStart,
chunkStart);
}
} else {
buffer.setFormat( audioFormat );
buffer.setTimeStamp( 1000000000 / 8 );
for (int i = 0; i < 1000; i++) {
((byte[])outdata)[i] = (byte) (Math.sin(i / freq) * 32);
freq = (freq + 0.01);
if (freq > 10.0)
freq = 2.0;
}
}
buffer.setSequenceNumber( seqNo );
buffer.setLength(maxDataLength);
buffer.setFlags(0);
buffer.setHeader( null );
seqNo++;
}
}
示例7: read
import javax.media.Buffer; //导入方法依赖的package包/类
public void read(Buffer buffer) {
//System.err.println("entering read");
synchronized (this) {
while (!dataPending) {
try {
wait();
} catch (InterruptedException ie) {
}
}
}
//System.err.println("... in read");
// Swap
Object data = buffer.getData();
Format format = buffer.getFormat();
Object header = buffer.getHeader();
buffer.setData(pendingBuffer.getData());
buffer.setFormat(pendingBuffer.getFormat());
buffer.setHeader(pendingBuffer.getHeader());
buffer.setTimeStamp(pendingBuffer.getTimeStamp());
buffer.setFlags(pendingBuffer.getFlags() | Buffer.FLAG_NO_SYNC);
buffer.setLength(pendingBuffer.getLength());
buffer.setOffset(pendingBuffer.getOffset());
buffer.setSequenceNumber(pendingBuffer.getSequenceNumber());
pendingBuffer.setData(data);
pendingBuffer.setFormat(format);
pendingBuffer.setHeader(header);
dataPending = false;
synchronized (this) {
notifyAll();
}
}
示例8: read
import javax.media.Buffer; //导入方法依赖的package包/类
/**
* This is called from the Processor to read a frame worth of video data.
*/
public void read(Buffer buffer) throws IOException
{
buffer.setOffset(0);
// Check if we've finished all the frames
if (endOfStream())
{
buffer.setEOM(true);
buffer.setLength(0);
}
else
{
checkAppContext();
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
BufferedImage frame = this.frameGenerator.renderImageAt(this.framesPath[this.imageIndex],
this.imageIndex == this.framesPath.length - 1);
ImageIO.write(frame, "JPEG", outputStream);
byte[] data = outputStream.toByteArray();
buffer.setData(data);
buffer.setLength(data.length);
buffer.setFormat(this.format);
buffer.setFlags(buffer.getFlags() | Buffer.FLAG_KEY_FRAME);
final int progressionValue = this.imageIndex++;
EventQueue.invokeLater(new Runnable()
{
public void run()
{
progressModel.setValue(progressionValue);
}
});
}
}
示例9: read
import javax.media.Buffer; //导入方法依赖的package包/类
public void read(Buffer buffer) throws IOException {
if (recordingStream.isFinished()) {
System.out.println("Done reading all images.");
buffer.setEOM(true);
buffer.setOffset(0);
buffer.setLength(0);
return;
}
RenderedImage newImage = recordingStream.readFrame();
if (newImage != null) {
image = newImage;
}
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ImageOutputStream ios = ImageIO.createImageOutputStream(outputStream);
ImageWriter writer = ImageIO.getImageWritersByFormatName("jpeg").next();
writer.setOutput(ios);
ImageWriteParam iwp = writer.getDefaultWriteParam();
iwp.setCompressionMode(ImageWriteParam.MODE_EXPLICIT);
iwp.setCompressionType("JPEG");
iwp.setCompressionQuality(1);
writer.write(null, new IIOImage(image, null, null), iwp);
writer.dispose();
byte[] data = outputStream.toByteArray();
nextImage++;
System.out.println("Processing frame: " + nextImage);
buffer.setData(data);
buffer.setOffset(0);
buffer.setLength(data.length);
buffer.setFormat(format);
buffer.setFlags(buffer.getFlags() | Buffer.FLAG_KEY_FRAME);
}
示例10: process
import javax.media.Buffer; //导入方法依赖的package包/类
@Override
public int process(Buffer input, Buffer output) {
//System.err.println("ZMBVDecoder " + input.getTimeStamp());
if (input.isDiscard()) {
output.setDiscard(true);
return BUFFER_PROCESSED_OK;
}
if (input.isEOM()) {
output.setEOM(true);
output.setData(null);
return BUFFER_PROCESSED_OK;
}
output.copy(input);
output.setFormat(outputFormat);
output.setData(pixels);
output.setOffset(0);
output.setLength(pixels.length);
//if (input.)
byte[] inDat = (byte[]) input.getData();
int[] swap = prevPixels;
prevPixels = pixels;
pixels = swap;
// Detect if frames were skipped
long sequenceNumber = input.getSequenceNumber();
boolean framesWereSkipped = (sequenceNumber != previousSequenceNumber + 1);
boolean isKeyframe = state.decode(inDat, input.getOffset(), input.getLength(), pixels, prevPixels, outputFormat.getSize().width, outputFormat.getSize().height,
framesWereSkipped);
if (framesWereSkipped && !isKeyframe) {
output.setDiscard(true);
} else {
previousSequenceNumber = sequenceNumber;
}
setFlag(output, Buffer.FLAG_KEY_FRAME, isKeyframe);
return BUFFER_PROCESSED_OK;
}
示例11: process
import javax.media.Buffer; //导入方法依赖的package包/类
public int process(Buffer inputBuffer, Buffer outputBuffer){
// == prolog
if (isEOM(inputBuffer) ) {
propagateEOM(outputBuffer);
return BUFFER_PROCESSED_OK;
}
byte [] inBuffer = (byte[])inputBuffer.getData();
int inLength = inputBuffer.getLength();
int inOffset = inputBuffer.getOffset();
int samplesNumber = inLength;
AudioFormat af = (AudioFormat) inputBuffer.getFormat();
if (enabled) {
int shiftZero = 0;
int shiftOne = 8;
if (af.getEndian() == AudioFormat.BIG_ENDIAN) {
shiftZero = 8;
shiftOne = 0;
}
// == main
int spa = ((int) af.getSampleRate() * af.getChannels()) / nPowersPerSec;
long npa = 1000000000L / nPowersPerSec;
long timeStamp = inputBuffer.getTimeStamp(); // in nanos
float average = 0;
long cspa = 0;
for (int i = 0; i < inLength; i += 2) {
short sample = (short)
(((0xFF & inBuffer[inOffset + i]) << shiftZero) |
((0xFF & inBuffer[inOffset + i + 1]) << shiftOne));
float normal = (float) sample;
average = average + normal * normal;
cspa++;
if (cspa == spa) {
cspa = 0;
average = (float) Math.sqrt((average / spa)) / 32768;
push(timeStamp, average);
timeStamp += npa;
average = 0;
}
}
}
// == epilog
inputBuffer.setData(outputBuffer.getData());
outputBuffer.setFormat(af);
outputBuffer.setData(inBuffer);
outputBuffer.setLength(inLength);
outputBuffer.setOffset(inOffset);
outputBuffer.setTimeStamp(inputBuffer.getTimeStamp());
outputBuffer.setFlags(inputBuffer.getFlags());
return BUFFER_PROCESSED_OK;
}
示例12: read
import javax.media.Buffer; //导入方法依赖的package包/类
/**
* This is called from the Processor to read a frame worth
* of video data.
*/
public void read(Buffer buf) throws IOException {
// Check if we've finished all the frames.
if (nextImage >= images.size()) {
// We are done. Set EndOfMedia.
System.err.println("Done reading all images.");
buf.setEOM(true);
buf.setOffset(0);
buf.setLength(0);
ended = true;
return;
}
String imageFile = (String)images.elementAt(nextImage);
nextImage++;
System.err.println(" - reading image file: " + imageFile);
// Open a random access file for the next image.
RandomAccessFile raFile;
raFile = new RandomAccessFile(imageFile, "r");
byte data[] = null;
// Check the input buffer type & size.
if (buf.getData() instanceof byte[])
data = (byte[])buf.getData();
// Check to see the given buffer is big enough for the frame.
if (data == null || data.length < raFile.length()) {
data = new byte[(int)raFile.length()];
buf.setData(data);
}
// Read the entire JPEG image from the file.
raFile.readFully(data, 0, (int)raFile.length());
System.err.println(" read " + raFile.length() + " bytes.");
buf.setOffset(0);
buf.setLength((int)raFile.length());
buf.setFormat(format);
buf.setFlags(buf.getFlags() | Buffer.FLAG_KEY_FRAME);
// Close the random access file.
raFile.close();
}
示例13: readFrame
import javax.media.Buffer; //导入方法依赖的package包/类
@Override
public void readFrame(Buffer buffer)
{
synchronized (OGG_SYNC_OBJ)
{
try
{
nextAudioBuffer(); // TODO: this often generates discard
// buffers, we could be smarter about it.
// Same for video.
} catch (IOException e)
{
buffer.setLength(0);
buffer.setDiscard(true);
throw new RuntimeException(e); // TODO: how to handle?
}
/* If playback has begun, top audio buffer off immediately. */
if (stateflag == 0)
{
buffer.setEOM(eomAudio);
buffer.setLength(0);
if (!eomAudio)
buffer.setDiscard(true);
return;
} else
{
if (audiobuf_ready == 0)
{
buffer.setEOM(eomAudio);
buffer.setLength(0);
if (!eomAudio)
buffer.setDiscard(true);
// System.out.println("Generated discard buffer: ");
return;
} else
{
// convert from short array to byte array. TODO:
// inefficient, should just store in byte array to begin
// with.
final byte[] data = new byte[audiobuf.length * 2];
for (int i = 0; i < audiobuf.length; ++i)
{
// little-endian:
data[i * 2] = (byte) (audiobuf[i] & 0xff);
data[i * 2 + 1] = (byte) ((audiobuf[i] >> 8) & 0xff);
}
buffer.setData(data);
buffer.setLength(data.length);
buffer.setOffset(0);
buffer.setEOM(false);
buffer.setDiscard(false);
buffer.setTimeStamp(System.currentTimeMillis()); // TODO
// System.out.println("Generated audio buffer: " +
// data.length);
audiobuf_fill = 0;
audiobuf_ready = 0;
}
}
}
}
示例14: process
import javax.media.Buffer; //导入方法依赖的package包/类
public int process(Buffer in, Buffer out) {
if (in.getFormat() instanceof VideoFormat && in.getData() != null) {
byte[] bin;
byte[] bout;
if (in.getData() instanceof byte[]) {
bin = (byte[]) in.getData();
} else if (in.getData() instanceof int[]) {
int[] iin = (int[]) in.getData();
bin = new byte[iin.length * 3];
int bi, ii;
for (bi = 0, ii = 0; bi < bin.length; bi += 3, ii++) {
int v = iin[ii];
bin[bi + 2] = (byte) (v & 0xff);
bin[bi + 1] = (byte) ((v >> 8) & 0xff);
bin[bi] = (byte) ((v >> 16) & 0xff);
}
} else {
return PlugIn.BUFFER_PROCESSED_FAILED;
}
// byte[] bin = (byte[]) in.getData();
// byte[] bout;
if (!(out.getData() instanceof byte[]) || ((byte[])out.getData()).length < bin.length) {
bout = new byte[bin.length];
out.setData(bout);
} else {
bout = (byte[]) out.getData();
}
VideoFormat vformat = (VideoFormat) in.getFormat();
if (vformat.getSize().width != videoSize.width
|| vformat.getSize().height != videoSize.height) {
videoSize = vformat.getSize();
propSupport.firePropertyChange(PROP_VIDEO_SIZE, null, videoSize);
}
//// Assure output buffer is large enough
if(bout == null || bout.length < bin.length) {
}
byte[] buffToDraw = bout;
boolean processed = false;
if (active) {
long startTime = System.nanoTime();
processed = processRGB(bin, bout, vformat);
long stopTime = System.nanoTime();
totalTime += (stopTime - startTime) / 1.0e9;
++nCalls;
}
if(!processed) {
// Swap the data between the input & output.
Object data = in.getData();
in.setData(out.getData());
out.setData(data);
buffToDraw = bin;
}
//// Update frame image available to UI
if (frameListenerList.size() > 0) {
//// Assure the image is the proper size
if (displayImage.getWidth() != vformat.getSize().width || displayImage.getHeight() != vformat.getSize().height) {
displayImage = new BufferedImage(vformat.getSize().width, vformat.getSize().height,
BufferedImage.TYPE_INT_RGB);
}
updateImage(buffToDraw, vformat);
notifyVideoFrameListeners();
}
}
// Copy the input attributes to the output
//out.setFormat(in.getFormat());
//out.setLength(in.getLength());
//out.setOffset(in.getOffset());
return BUFFER_PROCESSED_OK;
}
示例15: read
import javax.media.Buffer; //导入方法依赖的package包/类
/**
* This is called from the Processor to read a frame worth
* of video data.
*/
public void read(Buffer buf) throws IOException {
// Check if we've finished all the frames.
if (JPGImages.finished) {
// We are done. Set EndOfMedia.
System.err.println("Done reading all images.");
System.err.println("Frames: " + JPGImages.totalPics);
System.err.println("Missed frames: " +
(JPGImages.sentPics - JPGImages.totalPics));
buf.setEOM(true);
buf.setOffset(0);
buf.setLength(0);
ended = true;
return;
}
float time1 = seqNo * (1000 / frameRate) * 1000000;
long time = (long) time1;
buf.setTimeStamp(time);
buf.setSequenceNumber(seqNo++);
byte[] picBytes = JPGImages.readNode(); // read the next image in line
// in the DataList.
byte data[] = null;
// int data[] = new int[picBytes.length / 4];
// Read the entire JPEG image from the file.
data = picBytes;
/* The commented out code below is remains from a
* failed attempt to include avi output. The code is
* left in the source like this as a reminder to the
* author
*/
// int dataCnt = 0;
// int mult;
// for (int cnt = 0; cnt < data.length; cnt ++) {
// mult = 256*256*256;
// for (int loopCnt = 0; loopCnt < 4; loopCnt++) {
// data[picCnt] += picBytes[dataCnt++] * mult;
// mult /= 256;
// }
// }
buf.setData(data);
buf.setOffset(0);
buf.setLength((int) picBytes.length);
buf.setFormat(format);
buf.setFlags(buf.getFlags() | buf.FLAG_KEY_FRAME);
}