本文整理汇总了Java中javax.sound.sampled.AudioFileFormat.Type类的典型用法代码示例。如果您正苦于以下问题:Java Type类的具体用法?Java Type怎么用?Java Type使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
Type类属于javax.sound.sampled.AudioFileFormat包,在下文中一共展示了Type类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: write
import javax.sound.sampled.AudioFileFormat.Type; //导入依赖的package包/类
@Override
public int write(AudioInputStream stream, Type fileType, OutputStream out)
throws IOException {
Objects.requireNonNull(stream);
Objects.requireNonNull(fileType);
Objects.requireNonNull(out);
checkFormat(fileType, stream);
if (stream.getFormat().isBigEndian())
stream = toLittleEndian(stream);
RIFFWriter writer = new RIFFWriter(new NoCloseOutputStream(out), "WAVE");
write(stream, writer);
int fpointer = (int) writer.getFilePointer();
writer.close();
return fpointer;
}
示例2: getAudioFileTypes
import javax.sound.sampled.AudioFileFormat.Type; //导入依赖的package包/类
@Override
public Type[] getAudioFileTypes(AudioInputStream stream) {
Type[] filetypes = new Type[types.length];
System.arraycopy(types, 0, filetypes, 0, types.length);
// make sure we can write this stream
AudioFormat format = stream.getFormat();
AudioFormat.Encoding encoding = format.getEncoding();
if (AudioFormat.Encoding.ALAW.equals(encoding)
|| AudioFormat.Encoding.ULAW.equals(encoding)
|| AudioFormat.Encoding.PCM_SIGNED.equals(encoding)
|| AudioFormat.Encoding.PCM_UNSIGNED.equals(encoding)
|| AudioFormat.Encoding.PCM_FLOAT.equals(encoding)) {
return filetypes;
}
return new Type[0];
}
示例3: write
import javax.sound.sampled.AudioFileFormat.Type; //导入依赖的package包/类
@Override
public int write(AudioInputStream stream, Type fileType, OutputStream out) throws IOException {
Objects.requireNonNull(stream);
Objects.requireNonNull(fileType);
Objects.requireNonNull(out);
// we must know the total data length to calculate the file length
//$$fb 2001-07-13: fix for bug 4351296: do not throw an exception
//if( stream.getFrameLength() == AudioSystem.NOT_SPECIFIED ) {
// throw new IOException("stream length not specified");
//}
// throws IllegalArgumentException if not supported
AuFileFormat auFileFormat = (AuFileFormat)getAudioFileFormat(fileType, stream);
return writeAuFile(stream, auFileFormat, out);
}
示例4: stop
import javax.sound.sampled.AudioFileFormat.Type; //导入依赖的package包/类
@Override
public void stop() throws RenderCommandException {
byte[] bytes = new byte[buffer.size() * 2];
for(int i = buffer.size(); --i >= 0;) {
int val = (int) (buffer.get(i) * Short.MAX_VALUE);
bytes[i*2+0] = (byte) val;
bytes[i*2+1] = (byte) (val >> 8);
}
AudioInputStream in = new AudioInputStream(new ByteArrayInputStream(bytes),
new AudioFormat(sRate, 16, 1, true, false),
buffer.size());
try(FileOutputStream out = new FileOutputStream(file)) {
AudioSystem.write(in, Type.WAVE, out);
} catch (IOException e) {
throw new RenderCommandException(e);
}
super.stop();
}
示例5: MidiAudio
import javax.sound.sampled.AudioFileFormat.Type; //导入依赖的package包/类
MidiAudio(InputStream data) throws MidiUnavailableException {
getSequencer();
setSequence(data);
sequencer.addMetaEventListener(new MetaEventListener() {
public void meta(MetaMessage msg) {
if (msg.getType() == 47) {
try {
sequencer.setSequence(sequence);
} catch (InvalidMidiDataException e) {
ErrorHandler.alert(e);
}
sequencer.setTickPosition(0);
if (loop) { // End of track
sequencer.start();
}
}
}
});
AudioFormat base = new AudioFormat(44100, 16, 2, true, false);
format = new AudioFileFormat(new Type("MIDI", "mid"), base, (int) (base.getFrameRate() * (sequence.getMicrosecondLength() / 1000000 + 4))).getFormat();
}
示例6: glitchPixels
import javax.sound.sampled.AudioFileFormat.Type; //导入依赖的package包/类
@Override
public byte[] glitchPixels(byte[] inputImageBytes) throws Exception
{
int audioBitRate = ((Integer) getPixelGlitchParameters().get("bitRateBlend")).intValue();
float bitRateBlend = (float) audioBitRate / 10;
if(bitRateBlend < 0.1F || bitRateBlend > 0.9F)
{
return null;
}
BufferedImage inputImage = ImageUtil.getImageFromBytes(inputImageBytes);
InputStream imageInputStream = new ByteArrayInputStream(inputImageBytes);
AudioInputStream distortionAudioStream = new AudioInputStream(imageInputStream, new AudioFormat(AudioFormat.Encoding.ULAW, ThreadLocalRandom.current().nextInt(8000, 20000), 8, 5, 9, ThreadLocalRandom.current().nextInt(8000, 20000), true), inputImageBytes.length);
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
AudioSystem.write(distortionAudioStream, Type.WAVE, outputStream);
BufferedImage outputImage = new BufferedImage(inputImage.getWidth(), inputImage.getHeight(), BufferedImage.TYPE_4BYTE_ABGR);
byte[] imageData = ((DataBufferByte) outputImage.getRaster().getDataBuffer()).getData();
System.arraycopy(outputStream.toByteArray(),0,imageData,0,outputStream.toByteArray().length);
int[] abgrOffsets = {3, 2, 1, 0};
DataBuffer outputBuffer = new DataBufferByte(imageData, imageData.length);
WritableRaster raster = Raster.createInterleavedRaster(outputBuffer, inputImage.getWidth(), inputImage.getHeight(), 4 * inputImage.getWidth(), 4, abgrOffsets, null);
ColorModel colorModel = new ComponentColorModel(ColorSpace.getInstance(ColorSpace.CS_sRGB), true, false, Transparency.TRANSLUCENT, DataBuffer.TYPE_BYTE);
BufferedImage rasterizedImage = new BufferedImage(colorModel, raster, colorModel.isAlphaPremultiplied(), null);
rasterizedImage = resizeImage(rasterizedImage, inputImage.getWidth() * 4, inputImage.getHeight() * 4);
Graphics2D g2d = rasterizedImage.createGraphics();
g2d.setComposite(AlphaComposite.SrcOver.derive(bitRateBlend));
g2d.drawImage(inputImage, 0, 0, null);
g2d.dispose();
rasterizedImage = rasterizedImage.getSubimage(0, 0, inputImage.getWidth(), inputImage.getHeight());
return ImageUtil.getImageBytes(rasterizedImage);
}
示例7: checkFormat
import javax.sound.sampled.AudioFileFormat.Type; //导入依赖的package包/类
private void checkFormat(AudioFileFormat.Type type, AudioInputStream stream) {
if (!Type.WAVE.equals(type))
throw new IllegalArgumentException("File type " + type
+ " not supported.");
if (!stream.getFormat().getEncoding().equals(Encoding.PCM_FLOAT))
throw new IllegalArgumentException("File format "
+ stream.getFormat() + " not supported.");
}
示例8: write
import javax.sound.sampled.AudioFileFormat.Type; //导入依赖的package包/类
public int write(AudioInputStream stream, Type fileType, OutputStream out)
throws IOException {
checkFormat(fileType, stream);
if (stream.getFormat().isBigEndian())
stream = toLittleEndian(stream);
RIFFWriter writer = new RIFFWriter(new NoCloseOutputStream(out), "WAVE");
write(stream, writer);
int fpointer = (int) writer.getFilePointer();
writer.close();
return fpointer;
}
示例9: getAudioFileTypes
import javax.sound.sampled.AudioFileFormat.Type; //导入依赖的package包/类
@Override
public Type[] getAudioFileTypes(AudioInputStream stream) {
if (!stream.getFormat().getEncoding().equals(Encoding.PCM_FLOAT))
return new Type[0];
return new Type[] { Type.WAVE };
}
示例10: getAudioFileFormat
import javax.sound.sampled.AudioFileFormat.Type; //导入依赖的package包/类
/**
* Returns the AudioFileFormat describing the file that will be written from this AudioInputStream.
* Throws IllegalArgumentException if not supported.
*/
private AudioFileFormat getAudioFileFormat(Type type, AudioInputStream stream) {
if (!isFileTypeSupported(type, stream)) {
throw new IllegalArgumentException("File type " + type + " not supported.");
}
AudioFormat streamFormat = stream.getFormat();
AudioFormat.Encoding encoding = streamFormat.getEncoding();
if (AudioFormat.Encoding.PCM_UNSIGNED.equals(encoding)) {
encoding = AudioFormat.Encoding.PCM_SIGNED;
}
// We always write big endian au files, this is by far the standard
AudioFormat format = new AudioFormat(encoding,
streamFormat.getSampleRate(),
streamFormat.getSampleSizeInBits(),
streamFormat.getChannels(),
streamFormat.getFrameSize(),
streamFormat.getFrameRate(), true);
int fileSize;
if (stream.getFrameLength() != AudioSystem.NOT_SPECIFIED) {
fileSize = (int)stream.getFrameLength()*streamFormat.getFrameSize() + AuFileFormat.AU_HEADERSIZE;
} else {
fileSize = AudioSystem.NOT_SPECIFIED;
}
return new AuFileFormat(Type.AU, fileSize, format,
(int) stream.getFrameLength());
}
示例11: main
import javax.sound.sampled.AudioFileFormat.Type; //导入依赖的package包/类
public static void main(final String[] args) throws Exception {
for (final AudioFileFormat.Type type : types) {
for (final AudioFormat format : formats) {
testAS(type, format);
for (final AudioFileWriter afw : load(AudioFileWriter.class)) {
testAFW(afw, type, format);
}
}
}
Files.delete(Paths.get(FILE.getAbsolutePath()));
}
示例12: main
import javax.sound.sampled.AudioFileFormat.Type; //导入依赖的package包/类
public static void main(final String[] args) throws Exception {
for (final AudioFileFormat.Type type : types) {
for (final AudioFormat format : formats) {
testAS(type, format);
for (final AudioFileWriter afw : load(AudioFileWriter.class)) {
testAFW(afw, type, format);
}
}
}
}
示例13: getAudioFileTypes
import javax.sound.sampled.AudioFileFormat.Type; //导入依赖的package包/类
public Type[] getAudioFileTypes(AudioInputStream stream) {
if (!stream.getFormat().getEncoding().equals(
AudioFloatConverter.PCM_FLOAT))
return new Type[0];
return new Type[] { Type.WAVE };
}
示例14: checkFormat
import javax.sound.sampled.AudioFileFormat.Type; //导入依赖的package包/类
private void checkFormat(AudioFileFormat.Type type, AudioInputStream stream) {
if (!Type.WAVE.equals(type))
throw new IllegalArgumentException("File type " + type
+ " not supported.");
if (!stream.getFormat().getEncoding().equals(
AudioFloatConverter.PCM_FLOAT))
throw new IllegalArgumentException("File format "
+ stream.getFormat() + " not supported.");
}
示例15: write
import javax.sound.sampled.AudioFileFormat.Type; //导入依赖的package包/类
@Override
public int write(AudioInputStream ais, Type type, File out)
throws IOException
{
// TODO Auto-generated method stub
return 0;
}