本文整理汇总了Java中javax.media.Buffer.setOffset方法的典型用法代码示例。如果您正苦于以下问题:Java Buffer.setOffset方法的具体用法?Java Buffer.setOffset怎么用?Java Buffer.setOffset使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类javax.media.Buffer
的用法示例。
在下文中一共展示了Buffer.setOffset方法的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: process
import javax.media.Buffer; //导入方法依赖的package包/类
public int process(Buffer in, Buffer out) {
// This is the "Callback" to access individual frames.
accessFrame(in);
// Swap the data between the input & output.
Object data = in.getData();
in.setData(out.getData());
out.setData(data);
// Copy the input attributes to the output
out.setFormat(in.getFormat());
out.setLength(in.getLength());
out.setOffset(in.getOffset());
return BUFFER_PROCESSED_OK;
}
示例2: readFrame
import javax.media.Buffer; //导入方法依赖的package包/类
@Override
public void readFrame(Buffer buffer)
{
synchronized (QT_SYNC_OBJ)
{
BufferedImage bi;
try
{
qtSnapper.next();
bi = qtSnapper.getFrame();
} catch (QTException e)
{
throw new RuntimeException(e); // TODO: how to handle.
}
if (bi != null)
{
final Buffer b = ImageToBuffer.createBuffer(bi, format.getFrameRate());
buffer.setData(b.getData());
buffer.setLength(b.getLength());
buffer.setOffset(b.getOffset());
buffer.setEOM(false);
buffer.setDiscard(false);
buffer.setTimeStamp((qtSnapper.getFrameTime() * 1000000000L) / qtSnapper.getTimeScale());
} else
{
buffer.setEOM(true);
buffer.setLength(0);
}
}
}
示例3: read
import javax.media.Buffer; //导入方法依赖的package包/类
public void read(Buffer buffer) {
//System.err.println("entering read");
synchronized (this) {
while (!dataPending) {
try {
wait();
} catch (InterruptedException ie) {
}
}
}
//System.err.println("... in read");
// Swap
Object data = buffer.getData();
Format format = buffer.getFormat();
Object header = buffer.getHeader();
buffer.setData(pendingBuffer.getData());
buffer.setFormat(pendingBuffer.getFormat());
buffer.setHeader(pendingBuffer.getHeader());
buffer.setTimeStamp(pendingBuffer.getTimeStamp());
buffer.setFlags(pendingBuffer.getFlags() | Buffer.FLAG_NO_SYNC);
buffer.setLength(pendingBuffer.getLength());
buffer.setOffset(pendingBuffer.getOffset());
buffer.setSequenceNumber(pendingBuffer.getSequenceNumber());
pendingBuffer.setData(data);
pendingBuffer.setFormat(format);
pendingBuffer.setHeader(header);
dataPending = false;
synchronized (this) {
notifyAll();
}
}
示例4: read
import javax.media.Buffer; //导入方法依赖的package包/类
/**
* This is called from the Processor to read a frame worth of video data.
*/
public void read(Buffer buffer) throws IOException
{
buffer.setOffset(0);
// Check if we've finished all the frames
if (endOfStream())
{
buffer.setEOM(true);
buffer.setLength(0);
}
else
{
checkAppContext();
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
BufferedImage frame = this.frameGenerator.renderImageAt(this.framesPath[this.imageIndex],
this.imageIndex == this.framesPath.length - 1);
ImageIO.write(frame, "JPEG", outputStream);
byte[] data = outputStream.toByteArray();
buffer.setData(data);
buffer.setLength(data.length);
buffer.setFormat(this.format);
buffer.setFlags(buffer.getFlags() | Buffer.FLAG_KEY_FRAME);
final int progressionValue = this.imageIndex++;
EventQueue.invokeLater(new Runnable()
{
public void run()
{
progressModel.setValue(progressionValue);
}
});
}
}
示例5: read
import javax.media.Buffer; //导入方法依赖的package包/类
public void read(Buffer buffer) throws IOException {
if (recordingStream.isFinished()) {
System.out.println("Done reading all images.");
buffer.setEOM(true);
buffer.setOffset(0);
buffer.setLength(0);
return;
}
RenderedImage newImage = recordingStream.readFrame();
if (newImage != null) {
image = newImage;
}
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
ImageOutputStream ios = ImageIO.createImageOutputStream(outputStream);
ImageWriter writer = ImageIO.getImageWritersByFormatName("jpeg").next();
writer.setOutput(ios);
ImageWriteParam iwp = writer.getDefaultWriteParam();
iwp.setCompressionMode(ImageWriteParam.MODE_EXPLICIT);
iwp.setCompressionType("JPEG");
iwp.setCompressionQuality(1);
writer.write(null, new IIOImage(image, null, null), iwp);
writer.dispose();
byte[] data = outputStream.toByteArray();
nextImage++;
System.out.println("Processing frame: " + nextImage);
buffer.setData(data);
buffer.setOffset(0);
buffer.setLength(data.length);
buffer.setFormat(format);
buffer.setFlags(buffer.getFlags() | Buffer.FLAG_KEY_FRAME);
}
示例6: process
import javax.media.Buffer; //导入方法依赖的package包/类
@Override
public int process(Buffer input, Buffer output) {
//System.err.println("ZMBVDecoder " + input.getTimeStamp());
if (input.isDiscard()) {
output.setDiscard(true);
return BUFFER_PROCESSED_OK;
}
if (input.isEOM()) {
output.setEOM(true);
output.setData(null);
return BUFFER_PROCESSED_OK;
}
output.copy(input);
output.setFormat(outputFormat);
output.setData(pixels);
output.setOffset(0);
output.setLength(pixels.length);
//if (input.)
byte[] inDat = (byte[]) input.getData();
int[] swap = prevPixels;
prevPixels = pixels;
pixels = swap;
// Detect if frames were skipped
long sequenceNumber = input.getSequenceNumber();
boolean framesWereSkipped = (sequenceNumber != previousSequenceNumber + 1);
boolean isKeyframe = state.decode(inDat, input.getOffset(), input.getLength(), pixels, prevPixels, outputFormat.getSize().width, outputFormat.getSize().height,
framesWereSkipped);
if (framesWereSkipped && !isKeyframe) {
output.setDiscard(true);
} else {
previousSequenceNumber = sequenceNumber;
}
setFlag(output, Buffer.FLAG_KEY_FRAME, isKeyframe);
return BUFFER_PROCESSED_OK;
}
示例7: process
import javax.media.Buffer; //导入方法依赖的package包/类
public int process(Buffer inputBuffer, Buffer outputBuffer){
// == prolog
if (isEOM(inputBuffer) ) {
propagateEOM(outputBuffer);
return BUFFER_PROCESSED_OK;
}
byte [] inBuffer = (byte[])inputBuffer.getData();
int inLength = inputBuffer.getLength();
int inOffset = inputBuffer.getOffset();
int samplesNumber = inLength;
AudioFormat af = (AudioFormat) inputBuffer.getFormat();
if (enabled) {
int shiftZero = 0;
int shiftOne = 8;
if (af.getEndian() == AudioFormat.BIG_ENDIAN) {
shiftZero = 8;
shiftOne = 0;
}
// == main
int spa = ((int) af.getSampleRate() * af.getChannels()) / nPowersPerSec;
long npa = 1000000000L / nPowersPerSec;
long timeStamp = inputBuffer.getTimeStamp(); // in nanos
float average = 0;
long cspa = 0;
for (int i = 0; i < inLength; i += 2) {
short sample = (short)
(((0xFF & inBuffer[inOffset + i]) << shiftZero) |
((0xFF & inBuffer[inOffset + i + 1]) << shiftOne));
float normal = (float) sample;
average = average + normal * normal;
cspa++;
if (cspa == spa) {
cspa = 0;
average = (float) Math.sqrt((average / spa)) / 32768;
push(timeStamp, average);
timeStamp += npa;
average = 0;
}
}
}
// == epilog
inputBuffer.setData(outputBuffer.getData());
outputBuffer.setFormat(af);
outputBuffer.setData(inBuffer);
outputBuffer.setLength(inLength);
outputBuffer.setOffset(inOffset);
outputBuffer.setTimeStamp(inputBuffer.getTimeStamp());
outputBuffer.setFlags(inputBuffer.getFlags());
return BUFFER_PROCESSED_OK;
}
示例8: read
import javax.media.Buffer; //导入方法依赖的package包/类
/**
* This is called from the Processor to read a frame worth
* of video data.
*/
public void read(Buffer buf) throws IOException {
// Check if we've finished all the frames.
if (nextImage >= images.size()) {
// We are done. Set EndOfMedia.
System.err.println("Done reading all images.");
buf.setEOM(true);
buf.setOffset(0);
buf.setLength(0);
ended = true;
return;
}
String imageFile = (String)images.elementAt(nextImage);
nextImage++;
System.err.println(" - reading image file: " + imageFile);
// Open a random access file for the next image.
RandomAccessFile raFile;
raFile = new RandomAccessFile(imageFile, "r");
byte data[] = null;
// Check the input buffer type & size.
if (buf.getData() instanceof byte[])
data = (byte[])buf.getData();
// Check to see the given buffer is big enough for the frame.
if (data == null || data.length < raFile.length()) {
data = new byte[(int)raFile.length()];
buf.setData(data);
}
// Read the entire JPEG image from the file.
raFile.readFully(data, 0, (int)raFile.length());
System.err.println(" read " + raFile.length() + " bytes.");
buf.setOffset(0);
buf.setLength((int)raFile.length());
buf.setFormat(format);
buf.setFlags(buf.getFlags() | Buffer.FLAG_KEY_FRAME);
// Close the random access file.
raFile.close();
}
示例9: readFrame
import javax.media.Buffer; //导入方法依赖的package包/类
@Override
public void readFrame(Buffer buffer)
{
synchronized (OGG_SYNC_OBJ)
{
try
{
nextAudioBuffer(); // TODO: this often generates discard
// buffers, we could be smarter about it.
// Same for video.
} catch (IOException e)
{
buffer.setLength(0);
buffer.setDiscard(true);
throw new RuntimeException(e); // TODO: how to handle?
}
/* If playback has begun, top audio buffer off immediately. */
if (stateflag == 0)
{
buffer.setEOM(eomAudio);
buffer.setLength(0);
if (!eomAudio)
buffer.setDiscard(true);
return;
} else
{
if (audiobuf_ready == 0)
{
buffer.setEOM(eomAudio);
buffer.setLength(0);
if (!eomAudio)
buffer.setDiscard(true);
// System.out.println("Generated discard buffer: ");
return;
} else
{
// convert from short array to byte array. TODO:
// inefficient, should just store in byte array to begin
// with.
final byte[] data = new byte[audiobuf.length * 2];
for (int i = 0; i < audiobuf.length; ++i)
{
// little-endian:
data[i * 2] = (byte) (audiobuf[i] & 0xff);
data[i * 2 + 1] = (byte) ((audiobuf[i] >> 8) & 0xff);
}
buffer.setData(data);
buffer.setLength(data.length);
buffer.setOffset(0);
buffer.setEOM(false);
buffer.setDiscard(false);
buffer.setTimeStamp(System.currentTimeMillis()); // TODO
// System.out.println("Generated audio buffer: " +
// data.length);
audiobuf_fill = 0;
audiobuf_ready = 0;
}
}
}
}
示例10: read
import javax.media.Buffer; //导入方法依赖的package包/类
/**
* This is called from the Processor to read a frame worth
* of video data.
*/
public void read(Buffer buf) throws IOException {
// Check if we've finished all the frames.
if (JPGImages.finished) {
// We are done. Set EndOfMedia.
System.err.println("Done reading all images.");
System.err.println("Frames: " + JPGImages.totalPics);
System.err.println("Missed frames: " +
(JPGImages.sentPics - JPGImages.totalPics));
buf.setEOM(true);
buf.setOffset(0);
buf.setLength(0);
ended = true;
return;
}
float time1 = seqNo * (1000 / frameRate) * 1000000;
long time = (long) time1;
buf.setTimeStamp(time);
buf.setSequenceNumber(seqNo++);
byte[] picBytes = JPGImages.readNode(); // read the next image in line
// in the DataList.
byte data[] = null;
// int data[] = new int[picBytes.length / 4];
// Read the entire JPEG image from the file.
data = picBytes;
/* The commented out code below is remains from a
* failed attempt to include avi output. The code is
* left in the source like this as a reminder to the
* author
*/
// int dataCnt = 0;
// int mult;
// for (int cnt = 0; cnt < data.length; cnt ++) {
// mult = 256*256*256;
// for (int loopCnt = 0; loopCnt < 4; loopCnt++) {
// data[picCnt] += picBytes[dataCnt++] * mult;
// mult /= 256;
// }
// }
buf.setData(data);
buf.setOffset(0);
buf.setLength((int) picBytes.length);
buf.setFormat(format);
buf.setFlags(buf.getFlags() | buf.FLAG_KEY_FRAME);
}