本文整理汇总了Java中javax.media.Buffer.setTimeStamp方法的典型用法代码示例。如果您正苦于以下问题:Java Buffer.setTimeStamp方法的具体用法?Java Buffer.setTimeStamp怎么用?Java Buffer.setTimeStamp使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类javax.media.Buffer
的用法示例。
在下文中一共展示了Buffer.setTimeStamp方法的9个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: read
import javax.media.Buffer; //导入方法依赖的package包/类
public void read(Buffer buffer) throws IOException {
synchronized (this) {
Object outdata = buffer.getData();
if (outdata == null || !(outdata.getClass() == Format.intArray) ||
((int[])outdata).length < maxDataLength) {
outdata = new int[maxDataLength];
buffer.setData(outdata);
}
buffer.setFormat( rgbFormat );
buffer.setTimeStamp( (long) (seqNo * (1000 / frameRate) * 1000000) );
BufferedImage bi = robot.createScreenCapture(
new Rectangle(x, y, width, height));
bi.getRGB(0, 0, width, height,
(int[])outdata, 0, width);
buffer.setSequenceNumber( seqNo );
buffer.setLength(maxDataLength);
buffer.setFlags(Buffer.FLAG_KEY_FRAME);
buffer.setHeader( null );
seqNo++;
}
}
示例2: readFrame
import javax.media.Buffer; //导入方法依赖的package包/类
@Override
public void readFrame(Buffer buffer)
{
synchronized (QT_SYNC_OBJ)
{
BufferedImage bi;
try
{
qtSnapper.next();
bi = qtSnapper.getFrame();
} catch (QTException e)
{
throw new RuntimeException(e); // TODO: how to handle.
}
if (bi != null)
{
final Buffer b = ImageToBuffer.createBuffer(bi, format.getFrameRate());
buffer.setData(b.getData());
buffer.setLength(b.getLength());
buffer.setOffset(b.getOffset());
buffer.setEOM(false);
buffer.setDiscard(false);
buffer.setTimeStamp((qtSnapper.getFrameTime() * 1000000000L) / qtSnapper.getTimeScale());
} else
{
buffer.setEOM(true);
buffer.setLength(0);
}
}
}
示例3: read
import javax.media.Buffer; //导入方法依赖的package包/类
public void read(Buffer buffer) throws IOException {
synchronized (this) {
Object outdata = buffer.getData();
if (outdata == null || !(outdata.getClass() == Format.byteArray) ||
((byte[])outdata).length < maxDataLength) {
outdata = new byte[maxDataLength];
buffer.setData(outdata);
}
if (videoData) {
buffer.setFormat( rgbFormat );
buffer.setTimeStamp( (long) (seqNo * (1000 / frameRate) * 1000000) );
int lineNo = (seqNo * 2) % size.height;
int chunkStart = lineNo * size.width * 3;
System.arraycopy(data, chunkStart,
outdata, 0,
maxDataLength - (chunkStart));
if (chunkStart != 0) {
System.arraycopy(data, 0,
outdata, maxDataLength - chunkStart,
chunkStart);
}
} else {
buffer.setFormat( audioFormat );
buffer.setTimeStamp( 1000000000 / 8 );
for (int i = 0; i < 1000; i++) {
((byte[])outdata)[i] = (byte) (Math.sin(i / freq) * 32);
freq = (freq + 0.01);
if (freq > 10.0)
freq = 2.0;
}
}
buffer.setSequenceNumber( seqNo );
buffer.setLength(maxDataLength);
buffer.setFlags(0);
buffer.setHeader( null );
seqNo++;
}
}
示例4: read
import javax.media.Buffer; //导入方法依赖的package包/类
public void read(Buffer buffer) {
//System.err.println("entering read");
synchronized (this) {
while (!dataPending) {
try {
wait();
} catch (InterruptedException ie) {
}
}
}
//System.err.println("... in read");
// Swap
Object data = buffer.getData();
Format format = buffer.getFormat();
Object header = buffer.getHeader();
buffer.setData(pendingBuffer.getData());
buffer.setFormat(pendingBuffer.getFormat());
buffer.setHeader(pendingBuffer.getHeader());
buffer.setTimeStamp(pendingBuffer.getTimeStamp());
buffer.setFlags(pendingBuffer.getFlags() | Buffer.FLAG_NO_SYNC);
buffer.setLength(pendingBuffer.getLength());
buffer.setOffset(pendingBuffer.getOffset());
buffer.setSequenceNumber(pendingBuffer.getSequenceNumber());
pendingBuffer.setData(data);
pendingBuffer.setFormat(format);
pendingBuffer.setHeader(header);
dataPending = false;
synchronized (this) {
notifyAll();
}
}
示例5: copyMetaTo
import javax.media.Buffer; //导入方法依赖的package包/类
/** Copies all meta-data field from in to out. */
protected void copyMetaTo(Buffer in, Buffer out) {
//out.setData(in.getData());
//out.setHeader(in.getHeader());
//out.setFormat(in.getFormat());
//out.setLength(in.getLength());
///out.setOffset(in.getOffset());
out.setTimeStamp(in.getTimeStamp());
out.setDuration(in.getDuration());
out.setSequenceNumber(in.getSequenceNumber());
out.setFlags(in.getFlags());
}
示例6: process
import javax.media.Buffer; //导入方法依赖的package包/类
public int process(Buffer inputBuffer, Buffer outputBuffer){
// == prolog
if (isEOM(inputBuffer) ) {
propagateEOM(outputBuffer);
return BUFFER_PROCESSED_OK;
}
byte [] inBuffer = (byte[])inputBuffer.getData();
int inLength = inputBuffer.getLength();
int inOffset = inputBuffer.getOffset();
int samplesNumber = inLength;
AudioFormat af = (AudioFormat) inputBuffer.getFormat();
if (enabled) {
int shiftZero = 0;
int shiftOne = 8;
if (af.getEndian() == AudioFormat.BIG_ENDIAN) {
shiftZero = 8;
shiftOne = 0;
}
// == main
int spa = ((int) af.getSampleRate() * af.getChannels()) / nPowersPerSec;
long npa = 1000000000L / nPowersPerSec;
long timeStamp = inputBuffer.getTimeStamp(); // in nanos
float average = 0;
long cspa = 0;
for (int i = 0; i < inLength; i += 2) {
short sample = (short)
(((0xFF & inBuffer[inOffset + i]) << shiftZero) |
((0xFF & inBuffer[inOffset + i + 1]) << shiftOne));
float normal = (float) sample;
average = average + normal * normal;
cspa++;
if (cspa == spa) {
cspa = 0;
average = (float) Math.sqrt((average / spa)) / 32768;
push(timeStamp, average);
timeStamp += npa;
average = 0;
}
}
}
// == epilog
inputBuffer.setData(outputBuffer.getData());
outputBuffer.setFormat(af);
outputBuffer.setData(inBuffer);
outputBuffer.setLength(inLength);
outputBuffer.setOffset(inOffset);
outputBuffer.setTimeStamp(inputBuffer.getTimeStamp());
outputBuffer.setFlags(inputBuffer.getFlags());
return BUFFER_PROCESSED_OK;
}
示例7: readFrame
import javax.media.Buffer; //导入方法依赖的package包/类
@Override
public void readFrame(Buffer buffer)
{
synchronized (OGG_SYNC_OBJ)
{
try
{
nextAudioBuffer(); // TODO: this often generates discard
// buffers, we could be smarter about it.
// Same for video.
} catch (IOException e)
{
buffer.setLength(0);
buffer.setDiscard(true);
throw new RuntimeException(e); // TODO: how to handle?
}
/* If playback has begun, top audio buffer off immediately. */
if (stateflag == 0)
{
buffer.setEOM(eomAudio);
buffer.setLength(0);
if (!eomAudio)
buffer.setDiscard(true);
return;
} else
{
if (audiobuf_ready == 0)
{
buffer.setEOM(eomAudio);
buffer.setLength(0);
if (!eomAudio)
buffer.setDiscard(true);
// System.out.println("Generated discard buffer: ");
return;
} else
{
// convert from short array to byte array. TODO:
// inefficient, should just store in byte array to begin
// with.
final byte[] data = new byte[audiobuf.length * 2];
for (int i = 0; i < audiobuf.length; ++i)
{
// little-endian:
data[i * 2] = (byte) (audiobuf[i] & 0xff);
data[i * 2 + 1] = (byte) ((audiobuf[i] >> 8) & 0xff);
}
buffer.setData(data);
buffer.setLength(data.length);
buffer.setOffset(0);
buffer.setEOM(false);
buffer.setDiscard(false);
buffer.setTimeStamp(System.currentTimeMillis()); // TODO
// System.out.println("Generated audio buffer: " +
// data.length);
audiobuf_fill = 0;
audiobuf_ready = 0;
}
}
}
}
示例8: read
import javax.media.Buffer; //导入方法依赖的package包/类
/**
* This is called from the Processor to read a frame worth
* of video data.
*/
public void read(Buffer buf) throws IOException {
// Check if we've finished all the frames.
if (JPGImages.finished) {
// We are done. Set EndOfMedia.
System.err.println("Done reading all images.");
System.err.println("Frames: " + JPGImages.totalPics);
System.err.println("Missed frames: " +
(JPGImages.sentPics - JPGImages.totalPics));
buf.setEOM(true);
buf.setOffset(0);
buf.setLength(0);
ended = true;
return;
}
float time1 = seqNo * (1000 / frameRate) * 1000000;
long time = (long) time1;
buf.setTimeStamp(time);
buf.setSequenceNumber(seqNo++);
byte[] picBytes = JPGImages.readNode(); // read the next image in line
// in the DataList.
byte data[] = null;
// int data[] = new int[picBytes.length / 4];
// Read the entire JPEG image from the file.
data = picBytes;
/* The commented out code below is remains from a
* failed attempt to include avi output. The code is
* left in the source like this as a reminder to the
* author
*/
// int dataCnt = 0;
// int mult;
// for (int cnt = 0; cnt < data.length; cnt ++) {
// mult = 256*256*256;
// for (int loopCnt = 0; loopCnt < 4; loopCnt++) {
// data[picCnt] += picBytes[dataCnt++] * mult;
// mult /= 256;
// }
// }
buf.setData(data);
buf.setOffset(0);
buf.setLength((int) picBytes.length);
buf.setFormat(format);
buf.setFlags(buf.getFlags() | buf.FLAG_KEY_FRAME);
}
示例9: read
import javax.media.Buffer; //导入方法依赖的package包/类
public void read(Buffer buffer) throws IOException {
synchronized (this) {
// {
Object outdata = buffer.getData();
if (rateChanged || outdata == null
|| !(outdata.getClass() == Format.intArray)
|| ((int[]) outdata).length < maxDataLength) {
outdata = new int[maxDataLength];
buffer.setData(outdata);
// System.out.println("rateChanged is: "+rateChanged+" and framerate is: "
// + frameRate);
rateChanged = false;
}
buffer.setFormat(rgbFormat);
if (!everChanged) {
timeStamp = (long) (seqNo * (1000 / frameRate) * 1000000);
} else {
if (rateChanged2) {
OldStampCons = timeStamp;
rateChanged2 = false;
}
timeStamp = OldStampCons
+ (long) (seqNo * (1000 / frameRate) * 1000000);
// System.out.println("timeStamp is: " + timeStamp);
}
/*
* if (rateChanged2 && everChanged) { timeStamp = timeStamp + (long)
* (seqNo * (1000 / frameRate) * 1000000);
* System.out.println("timeStamp is: " + timeStamp); rateChanged2
* ++; } else { timeStamp = (long) (seqNo * (1000 / frameRate) *
* 1000000); System.out.println("timeStamp is: " + timeStamp); }
*/
// buffer.setTimeStamp( (long) (seqNo * (1000 / frameRate) *
// 1000000) );
buffer.setTimeStamp(timeStamp);
// System.out.println("timeStamp is: " + timeStamp);
// System.out.println("framerate is: " + frameRate);
int ww = width;
// ww = ww+1;
// x = x+1;
// y = y+1;
// height = height+1;
// System.out.println("width is: "+width);
BufferedImage bi = robot.createScreenCapture(new Rectangle(x, y,
width, height));
// bi.getRGB(0, 0, width, height,
// (int[])outdata, 0, width);
bi.getRGB(0, 0, width, height, (int[]) outdata, 0, width);
buffer.setSequenceNumber(seqNo);
buffer.setLength(maxDataLength);
buffer.setFlags(Buffer.FLAG_KEY_FRAME);
buffer.setHeader(null);
seqNo++;
}
}