本文整理汇总了Java中org.monte.media.io.ByteArrayImageInputStream.readUnsignedInt方法的典型用法代码示例。如果您正苦于以下问题:Java ByteArrayImageInputStream.readUnsignedInt方法的具体用法?Java ByteArrayImageInputStream.readUnsignedInt怎么用?Java ByteArrayImageInputStream.readUnsignedInt使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.monte.media.io.ByteArrayImageInputStream
的用法示例。
在下文中一共展示了ByteArrayImageInputStream.readUnsignedInt方法的6个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: readAVIH
import org.monte.media.io.ByteArrayImageInputStream; //导入方法依赖的package包/类
/**
* Reads the AVI Main Header and returns a MainHeader object.
*/
private MainHeader readAVIH(byte[] data) throws IOException, ParseException {
ByteArrayImageInputStream in = new ByteArrayImageInputStream(data, ByteOrder.LITTLE_ENDIAN);
MainHeader mh = new MainHeader();
mh.microSecPerFrame = in.readUnsignedInt();
mh.maxBytesPerSec = in.readUnsignedInt();
mh.paddingGranularity = in.readUnsignedInt();
mh.flags = in.readInt();
mh.totalFrames = in.readUnsignedInt();
mh.initialFrames = in.readUnsignedInt();
mh.streams = in.readUnsignedInt();
mh.suggestedBufferSize = in.readUnsignedInt();
mh.size = new Dimension(in.readInt(), in.readInt());
return mh;
}
示例2: readVideoSTRF
import org.monte.media.io.ByteArrayImageInputStream; //导入方法依赖的package包/类
/**
* </pre> //---------------------- // AVI Bitmap Info Header //
* ---------------------- typedef struct { BYTE blue; BYTE green; BYTE red;
* BYTE reserved; } RGBQUAD;
*
* // Values for this enum taken from: //
* http://www.fourcc.org/index.php?http%3A//www.fourcc.org/rgb.php enum {
* BI_RGB = 0x00000000, RGB = 0x32424752, // Alias for BI_RGB BI_RLE8 =
* 0x01000000, RLE8 = 0x38454C52, // Alias for BI_RLE8 BI_RLE4 = 0x00000002,
* RLE4 = 0x34454C52, // Alias for BI_RLE4 BI_BITFIELDS = 0x00000003, raw =
* 0x32776173, RGBA = 0x41424752, RGBT = 0x54424752, cvid = "cvid" }
* bitmapCompression;
*
* typedef struct { DWORD structSize; DWORD width; DWORD height; WORD
* planes; WORD bitCount; FOURCC enum bitmapCompression compression; DWORD
* imageSizeInBytes; DWORD xPelsPerMeter; DWORD yPelsPerMeter; DWORD
* numberOfColorsUsed; DWORD numberOfColorsImportant; RGBQUAD colors[]; }
* BITMAPINFOHEADER;
* </pre>
*
*
* @param tr
* @param data
* @throws java.io.IOException
*/
private void readVideoSTRF(VideoTrack tr, byte[] data) throws IOException {
ByteArrayImageInputStream in = new ByteArrayImageInputStream(data, ByteOrder.LITTLE_ENDIAN);
long structSize = in.readUnsignedInt();
tr.width = in.readInt();
tr.height = in.readInt();
tr.planes = in.readUnsignedShort();
tr.bitCount = in.readUnsignedShort();
in.setByteOrder(ByteOrder.BIG_ENDIAN);
tr.compression = intToType(in.readInt());
in.setByteOrder(ByteOrder.LITTLE_ENDIAN);
long imageSizeInBytes = in.readUnsignedInt();
tr.xPelsPerMeter = in.readUnsignedInt();
tr.yPelsPerMeter = in.readUnsignedInt();
tr.clrUsed = in.readUnsignedInt();
tr.clrImportant = in.readUnsignedInt();
if (tr.bitCount == 0) {
tr.bitCount = (int) (imageSizeInBytes / tr.width / tr.height * 8);
}
tr.format = new Format(MimeTypeKey, MIME_AVI,
MediaTypeKey, MediaType.VIDEO,
EncodingKey, tr.compression,
DataClassKey, byte[].class,
WidthKey, tr.width,
HeightKey, tr.height,
DepthKey, tr.bitCount,
PixelAspectRatioKey, new Rational(1, 1),
FrameRateKey, new Rational(tr.rate, tr.scale),
FixedFrameRateKey, true);
}
示例3: readVideoSTRF
import org.monte.media.io.ByteArrayImageInputStream; //导入方法依赖的package包/类
/**
* </pre> //---------------------- // AVI Bitmap Info Header //
* ---------------------- typedef struct { BYTE blue; BYTE green; BYTE red;
* BYTE reserved; } RGBQUAD;
*
* // Values for this enum taken from: //
* http://www.fourcc.org/index.php?http%3A//www.fourcc.org/rgb.php enum {
* BI_RGB = 0x00000000, RGB = 0x32424752, // Alias for BI_RGB BI_RLE8 =
* 0x01000000, RLE8 = 0x38454C52, // Alias for BI_RLE8 BI_RLE4 = 0x00000002,
* RLE4 = 0x34454C52, // Alias for BI_RLE4 BI_BITFIELDS = 0x00000003, raw =
* 0x32776173, RGBA = 0x41424752, RGBT = 0x54424752, cvid = "cvid" }
* bitmapCompression;
*
* typedef struct { DWORD structSize; DWORD width; DWORD height; WORD
* planes; WORD bitCount; FOURCC enum bitmapCompression compression; DWORD
* imageSizeInBytes; DWORD xPelsPerMeter; DWORD yPelsPerMeter; DWORD
* numberOfColorsUsed; DWORD numberOfColorsImportant; RGBQUAD colors[]; }
* BITMAPINFOHEADER;
* </pre>
*
*
* @param tr
* @param data
* @throws IOException
*/
private void readVideoSTRF(VideoTrack tr, byte[] data) throws IOException {
ByteArrayImageInputStream in = new ByteArrayImageInputStream(data, ByteOrder.LITTLE_ENDIAN);
long structSize = in.readUnsignedInt();
tr.width = in.readInt();
tr.height = in.readInt();
tr.planes = in.readUnsignedShort();
tr.bitCount = in.readUnsignedShort();
in.setByteOrder(ByteOrder.BIG_ENDIAN);
tr.compression = intToType(in.readInt());
in.setByteOrder(ByteOrder.LITTLE_ENDIAN);
long imageSizeInBytes = in.readUnsignedInt();
tr.xPelsPerMeter = in.readUnsignedInt();
tr.yPelsPerMeter = in.readUnsignedInt();
tr.clrUsed = in.readUnsignedInt();
tr.clrImportant = in.readUnsignedInt();
if (tr.bitCount == 0) {
tr.bitCount = (int) (imageSizeInBytes / tr.width / tr.height * 8);
}
tr.format = new Format(MimeTypeKey, MIME_AVI,
MediaTypeKey, MediaType.VIDEO,
EncodingKey, tr.compression,
DataClassKey, byte[].class,
WidthKey, tr.width,
HeightKey, tr.height,
DepthKey, tr.bitCount,
PixelAspectRatioKey, new Rational(1, 1),
FrameRateKey, new Rational(tr.rate, tr.scale),
FixedFrameRateKey, true);
}
示例4: readSTRH
import org.monte.media.io.ByteArrayImageInputStream; //导入方法依赖的package包/类
/**
* Reads an AVI Stream Header and returns a Track object.
*/
/*typedef struct {
* FOURCC enum aviStrhType type;
* // Contains a FOURCC that specifies the type of the data contained in
* // the stream. The following standard AVI values for video and audio are
* // defined.
* FOURCC handler;
* DWORD set aviStrhFlags flags;
* WORD priority;
* WORD language;
* DWORD initialFrames;
* DWORD scale;
* DWORD rate;
* DWORD startTime;
* DWORD length;
* DWORD suggestedBufferSize;
* DWORD quality;
* DWORD sampleSize;
* aviRectangle frame;
* } AVISTREAMHEADER; */
private Track readSTRH(byte[] data) throws IOException, ParseException {
ByteArrayImageInputStream in = new ByteArrayImageInputStream(data, ByteOrder.LITTLE_ENDIAN);
Track tr = null;
in.setByteOrder(ByteOrder.BIG_ENDIAN);
String type = intToType(in.readInt());
in.setByteOrder(ByteOrder.LITTLE_ENDIAN);
int handler = in.readInt();
if (type.equals(AVIMediaType.AUDIO.fccType)) {
tr = new AudioTrack(tracks.size(), handler);
} else if (type.equals(AVIMediaType.VIDEO.fccType)) {
tr = new VideoTrack(tracks.size(), handler, null);
} else if (type.equals(AVIMediaType.MIDI.fccType)) {
tr = new MidiTrack(tracks.size(), handler);
} else if (type.equals(AVIMediaType.TEXT.fccType)) {
tr = new TextTrack(tracks.size(), handler);
} else {
throw new ParseException("Unknown track type " + type);
}
tr.fccHandler = handler;
tr.flags = in.readInt();
tr.priority = in.readUnsignedShort();
tr.language = in.readUnsignedShort();
tr.initialFrames = in.readUnsignedInt();
tr.scale = in.readUnsignedInt();
tr.rate = in.readUnsignedInt();
tr.startTime = in.readUnsignedInt();
tr.length = in.readUnsignedInt();
/*tr.suggestedBufferSize=*/ in.readUnsignedInt();
tr.quality = in.readInt();
/*tr.sampleSize=*/ in.readUnsignedInt();
tr.frameLeft = in.readShort();
tr.frameTop = in.readShort();
tr.frameRight = in.readShort();
tr.frameBottom = in.readShort();
return tr;
}
示例5: readAudioSTRF
import org.monte.media.io.ByteArrayImageInputStream; //导入方法依赖的package包/类
/**
* /**
* <p> The format of a video track is defined in a "strf" chunk, which
* contains a {@code WAVEFORMATEX} struct.
* <pre>
* ----------------------
* AVI Wave Format Header
* ----------------------
* // values for this enum taken from mmreg.h
* enum {
* WAVE_FORMAT_PCM = 0x0001,
* // Microsoft Corporation
* ...many more...
* } wFormatTagEnum;
*
* typedef struct {
* WORD enum wFormatTagEnum formatTag;
* WORD numberOfChannels;
* DWORD samplesPerSec;
* DWORD avgBytesPerSec;
* WORD blockAlignment;
* WORD bitsPerSample;
* WORD cbSize;
* // Size, in bytes, of extra format information appended to the end of the
* // WAVEFORMATEX structure. This information can be used by non-PCM formats
* // to store extra attributes for the "wFormatTag". If no extra information
* // is required by the "wFormatTag", this member must be set to zero. For
* // WAVE_FORMAT_PCM formats (and only WAVE_FORMAT_PCM formats), this member
* // is ignored.
* byte[cbSize] extra;
* } WAVEFORMATEX;
* </pre>
*
*
* @param tr
* @param data
* @throws java.io.IOException
*/
private void readAudioSTRF(AudioTrack tr, byte[] data) throws IOException {
ByteArrayImageInputStream in = new ByteArrayImageInputStream(data, ByteOrder.LITTLE_ENDIAN);
String formatTag = RIFFParser.idToString(in.readUnsignedShort());
tr.channels = in.readUnsignedShort();
tr.samplesPerSec = in.readUnsignedInt();
tr.avgBytesPerSec = in.readUnsignedInt();
tr.blockAlign = in.readUnsignedShort();
tr.bitsPerSample = in.readUnsignedShort();
if (data.length > 16) {
long cbSize = in.readUnsignedShort();
// FIXME - Don't ignore extra format information
}
tr.format = new Format(MimeTypeKey, MIME_AVI,
MediaTypeKey, MediaType.AUDIO,
EncodingKey, formatTag,
SampleRateKey, Rational.valueOf(tr.samplesPerSec),
SampleSizeInBitsKey, tr.bitsPerSample,
ChannelsKey, tr.channels,
FrameSizeKey, tr.blockAlign,
FrameRateKey, new Rational(tr.samplesPerSec, 1),
SignedKey, tr.bitsPerSample != 8,
ByteOrderKey, ByteOrder.LITTLE_ENDIAN);
}
示例6: readAudioSTRF
import org.monte.media.io.ByteArrayImageInputStream; //导入方法依赖的package包/类
/**
* /**
* <p> The format of a video track is defined in a "strf" chunk, which
* contains a {@code WAVEFORMATEX} struct.
* <pre>
* ----------------------
* AVI Wave Format Header
* ----------------------
* // values for this enum taken from mmreg.h
* enum {
* WAVE_FORMAT_PCM = 0x0001,
* // Microsoft Corporation
* ...many more...
* } wFormatTagEnum;
*
* typedef struct {
* WORD enum wFormatTagEnum formatTag;
* WORD numberOfChannels;
* DWORD samplesPerSec;
* DWORD avgBytesPerSec;
* WORD blockAlignment;
* WORD bitsPerSample;
* WORD cbSize;
* // Size, in bytes, of extra format information appended to the end of the
* // WAVEFORMATEX structure. This information can be used by non-PCM formats
* // to store extra attributes for the "wFormatTag". If no extra information
* // is required by the "wFormatTag", this member must be set to zero. For
* // WAVE_FORMAT_PCM formats (and only WAVE_FORMAT_PCM formats), this member
* // is ignored.
* byte[cbSize] extra;
* } WAVEFORMATEX;
* </pre>
*
*
* @param tr
* @param data
* @throws IOException
*/
private void readAudioSTRF(AudioTrack tr, byte[] data) throws IOException {
ByteArrayImageInputStream in = new ByteArrayImageInputStream(data, ByteOrder.LITTLE_ENDIAN);
String formatTag = RIFFParser.idToString(in.readUnsignedShort());
tr.channels = in.readUnsignedShort();
tr.samplesPerSec = in.readUnsignedInt();
tr.avgBytesPerSec = in.readUnsignedInt();
tr.blockAlign = in.readUnsignedShort();
tr.bitsPerSample = in.readUnsignedShort();
if (data.length > 16) {
long cbSize = in.readUnsignedShort();
// FIXME - Don't ignore extra format information
}
tr.format = new Format(MimeTypeKey, MIME_AVI,
MediaTypeKey, MediaType.AUDIO,
EncodingKey, formatTag,
SampleRateKey, Rational.valueOf(tr.samplesPerSec),
SampleSizeInBitsKey, tr.bitsPerSample,
ChannelsKey, tr.channels,
FrameSizeKey, tr.blockAlign,
FrameRateKey, new Rational(tr.samplesPerSec, 1),
SignedKey, tr.bitsPerSample != 8,
ByteOrderKey, ByteOrder.LITTLE_ENDIAN);
}