本文整理匯總了Java中javax.sound.midi.Receiver.send方法的典型用法代碼示例。如果您正苦於以下問題:Java Receiver.send方法的具體用法?Java Receiver.send怎麽用?Java Receiver.send使用的例子?那麽, 這裏精選的方法代碼示例或許可以為您提供幫助。您也可以進一步了解該方法所在類javax.sound.midi.Receiver
的用法示例。
在下文中一共展示了Receiver.send方法的15個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: main
import javax.sound.midi.Receiver; //導入方法依賴的package包/類
public static void main(String[] args) throws Exception {
AudioSynthesizer synth = new SoftSynthesizer();
Receiver recv = synth.getReceiver();
assertTrue(recv != null);
ShortMessage sm = new ShortMessage();
sm.setMessage(ShortMessage.NOTE_OFF, 0, 64, 64);
synth.open(new DummySourceDataLine(), null);
recv.send(sm, -1);
synth.close();
try
{
recv.send(sm, -1);
throw new RuntimeException("Exception not thrown!");
}
catch(Exception e)
{
// Just checking if exception is thrown
}
}
示例2: sendMessage
import javax.sound.midi.Receiver; //導入方法依賴的package包/類
void sendMessage(byte[] data, long timeStamp) {
try {
synchronized(transmitters) {
int size = transmitters.size();
if (TRACE_TRANSMITTER) Printer.println("Sending long message to "+size+" transmitter's receivers");
for (int i = 0; i < size; i++) {
Receiver receiver = transmitters.get(i).getReceiver();
if (receiver != null) {
//$$fb 2002-04-02: SysexMessages are mutable, so
// an application could change the contents of this object,
// or try to use the object later. So we can't get around object creation
// But the array need not be unique for each FastSysexMessage object,
// because it cannot be modified.
receiver.send(new FastSysexMessage(data), timeStamp);
}
}
}
} catch (InvalidMidiDataException e) {
// this happens when invalid data comes over the wire. Ignore it.
return;
}
}
示例3: testReceiverSend
import javax.sound.midi.Receiver; //導入方法依賴的package包/類
/**
* Execute Receiver.send() and expect that there is no exception.
*/
private static boolean testReceiverSend() {
boolean result = true;
Receiver receiver;
ShortMessage shMsg = new ShortMessage();
try {
receiver = MidiSystem.getReceiver();
shMsg.setMessage(ShortMessage.NOTE_ON, 0,60, 93);
try {
receiver.send( shMsg, -1 );
} catch(IllegalStateException ilEx) {
ilEx.printStackTrace(System.out);
out("IllegalStateException was thrown incorrectly!");
result = false;
}
receiver.close();
} catch(MidiUnavailableException e) {
out("Midi unavailable, cannot test.");
} catch(InvalidMidiDataException ine) {
out("InvalidMidiDataException, cannot test.");
}
return result;
}
示例4: captureMidiMessages
import javax.sound.midi.Receiver; //導入方法依賴的package包/類
@Override
protected void captureMidiMessages(final Receiver receiver) throws MidiUnavailableException {
// Modify all MidiMessages so that they specify channel 10 (encoded as 9,
// which I suppose means that 0 indicates midi channel 1)
Receiver setChannelReceiver = new Receiver() {
@Override
public void send(MidiMessage message, long timeStamp) {
if (message instanceof ShortMessage) {
ShortMessage sm = (ShortMessage) message;
byte[] data = sm.getMessage();
byte orig = data[0];
data[0] = (byte) ((orig | 9) & 0xff);
message = Midi.createShortMessage(data);
}
receiver.send(message, timeStamp);
}
@Override
public void close() {
receiver.close();
}
};
super.captureMidiMessages(setChannelReceiver);
}
示例5: releasePendingNoteOffs
import javax.sound.midi.Receiver; //導入方法依賴的package包/類
/**
* Send note-off to all pending notes
*
*/
public final void releasePendingNoteOffs()
{
for(Receiver recv : pendingNoteOffs.keySet())
{
for (int note : pendingNoteOffs.get(recv)) {
ShortMessage shm = new ShortMessage();
try {
shm.setMessage(ShortMessage.NOTE_ON, (note >> 8) & 0xf,
note & 0xff, 0);
recv.send(shm, -1);
} catch (InvalidMidiDataException e) {
e.printStackTrace();
}
}
}
pendingNoteOffs.clear();
}
示例6: test
import javax.sound.midi.Receiver; //導入方法依賴的package包/類
public static void test(Soundbank soundbank) throws Exception {
// Create instance of synthesizer using the testing soundbank above
AudioSynthesizer synth = new SoftSynthesizer();
AudioInputStream stream = synth.openStream(format, null);
synth.unloadAllInstruments(synth.getDefaultSoundbank());
synth.loadAllInstruments(soundbank);
Receiver recv = synth.getReceiver();
// Set volume to max and turn reverb off
ShortMessage reverb_off = new ShortMessage();
reverb_off.setMessage(ShortMessage.CONTROL_CHANGE, 91, 0);
recv.send(reverb_off, -1);
ShortMessage full_volume = new ShortMessage();
full_volume.setMessage(ShortMessage.CONTROL_CHANGE, 7, 127);
recv.send(full_volume, -1);
Random random = new Random(3485934583945l);
// Create random timestamps
long[] test_timestamps = new long[30];
for (int i = 1; i < test_timestamps.length; i++) {
test_timestamps[i] = i * 44100
+ (int) (random.nextDouble() * 22050.0);
}
// Send midi note on message to synthesizer
for (int i = 0; i < test_timestamps.length; i++) {
ShortMessage midi_on = new ShortMessage();
midi_on.setMessage(ShortMessage.NOTE_ON, 69, 127);
recv.send(midi_on,
(long) ((test_timestamps[i] / 44100.0) * 1000000.0));
}
// Measure timing from rendered audio
float[] fbuffer = new float[100];
byte[] buffer = new byte[fbuffer.length * format.getFrameSize()];
long firsts = -1;
int counter = 0;
long s = 0;
long max_jitter = 0;
outerloop: for (int k = 0; k < 10000000; k++) {
stream.read(buffer);
AudioFloatConverter.getConverter(format).toFloatArray(buffer,
fbuffer);
for (int i = 0; i < fbuffer.length; i++) {
if (fbuffer[i] != 0) {
if (firsts == -1)
firsts = s;
long measure_time = (s - firsts);
long predicted_time = test_timestamps[counter];
long jitter = Math.abs(measure_time - predicted_time);
if (jitter > 10)
max_jitter = jitter;
counter++;
if (counter == test_timestamps.length)
break outerloop;
}
s++;
}
}
synth.close();
if (counter == 0)
throw new Exception("Nothing was measured!");
if (max_jitter != 0) {
throw new Exception("Jitter has occurred! "
+ "(max jitter = " + max_jitter + ")");
}
}
示例7: write
import javax.sound.midi.Receiver; //導入方法依賴的package包/類
public static void write(OutputStream out, List<MidiEvent> events, MidiToAudioSettings settings) throws Throwable {
MidiToAudioSynth.instance().openSynth();
MidiToAudioSynth.instance().loadSoundbank(getPatchs(events), settings.getSoundbankPath());
int usqTempo = 60000000 / 120;
long previousTick = 0;
long timePosition = 0;
MidiToAudioWriter.sort(events);
Receiver receiver = MidiToAudioSynth.instance().getReceiver();
AudioInputStream stream = MidiToAudioSynth.instance().getStream();
Iterator<MidiEvent> it = events.iterator();
while(it.hasNext()){
MidiEvent event = (MidiEvent)it.next();
MidiMessage msg = event.getMessage();
timePosition += ( (event.getTick() - previousTick) * usqTempo) / TGDuration.QUARTER_TIME;
if (msg instanceof MetaMessage) {
if (((MetaMessage) msg).getType() == 0x51) {
byte[] data = ((MetaMessage) msg).getData();
usqTempo = ((data[0] & 0xff) << 16) | ((data[1] & 0xff) << 8) | (data[2] & 0xff);
}
} else {
receiver.send(msg, timePosition);
}
previousTick = event.getTick();
}
long duration = (long) (stream.getFormat().getFrameRate() * ( (timePosition / 1000000.0) ));
AudioInputStream srcStream = new AudioInputStream(stream, stream.getFormat(), duration );
AudioInputStream dstStream = AudioSystem.getAudioInputStream(settings.getFormat(), srcStream );
AudioSystem.write(new AudioInputStream(dstStream, dstStream.getFormat(), duration ), settings.getType(), out);
dstStream.close();
srcStream.close();
MidiToAudioSynth.instance().closeSynth();
}
示例8: noteOn
import javax.sound.midi.Receiver; //導入方法依賴的package包/類
public static void noteOn(final Receiver recv, final int note, final int channel, final int velocity)
{
try
{
ShortMessage msg = new ShortMessage();
msg.setMessage(ShortMessage.NOTE_ON,channel,note,velocity);
recv.send(msg,-1);
}
catch(Exception e) {e.printStackTrace();}
}
示例9: noteOff
import javax.sound.midi.Receiver; //導入方法依賴的package包/類
public static void noteOff(final Receiver recv,final int note, final int channel)
{
try
{
ShortMessage msg = new ShortMessage();
msg.setMessage(ShortMessage.NOTE_ON,channel,note,0);
recv.send(msg,-1);
}
catch(Exception e) {e.printStackTrace();}
}
示例10: sendControlChange
import javax.sound.midi.Receiver; //導入方法依賴的package包/類
private static void sendControlChange(Receiver receiver, int midiChan, int ctrlId, int value) {
ShortMessage msg = new ShortMessage();
try {
msg.setMessage(ShortMessage.CONTROL_CHANGE, midiChan, ctrlId, value);
} catch (InvalidMidiDataException ex) {
throw new RuntimeException(ex);
}
receiver.send(msg, -1);
}
示例11: sendSysexMessage
import javax.sound.midi.Receiver; //導入方法依賴的package包/類
private static void sendSysexMessage(Receiver receiver, byte... message) {
SysexMessage msg = new SysexMessage();
try {
msg.setMessage(message, message.length);
} catch (InvalidMidiDataException ex) {
throw new RuntimeException(ex);
}
receiver.send(msg, -1);
}
示例12: createMessageSource
import javax.sound.midi.Receiver; //導入方法依賴的package包/類
private void createMessageSource(final Instrument liveInstr) throws MidiUnavailableException,
IOException {
this.liveSynth = getInstrumentInfo(liveInstr);
// Filter incoming MidiMessages to:
// - change to channel 10 (if this is a percussion instrument)
// - add them to capturedEvents list
final Receiver delegate = liveSynth.source;
liveSynth.source = new Receiver() {
@Override
public void send(MidiMessage message, long timeStamp) {
if (liveInstr.getType() == InstrumentType.MIDI_PERCUSSION) {
// Percussion messages should on channel 10
if (message instanceof ShortMessage) {
ShortMessage smsg = (ShortMessage) message;
message = Midi.createShortMessage(smsg.getStatus()|9, smsg.getData1(), smsg.getData2());
}
}
capturedEvents.add(new MidiMessageAndTimeStamp(message, timeStamp));
delegate.send(message, timeStamp);
}
@Override
public void close() {
delegate.close();
}
};
}
示例13: main
import javax.sound.midi.Receiver; //導入方法依賴的package包/類
public static void main(String argv[]) {
Sequencer s = null;
try {
s = MidiSystem.getSequencer();
s.open();
} catch (final MidiUnavailableException ignored) {
// the test is not applicable
return;
}
try {
Sequence seq = new Sequence(Sequence.PPQ, 384, 2);
s.setSequence(seq);
Track t = seq.getTracks()[0];
ShortMessage msg = new ShortMessage();
msg.setMessage(0x90, 0x40, 0x7F);
t.add(new MidiEvent(msg, 11000));
msg.setMessage(0x90, 0x40, 0x00);
t.add(new MidiEvent(msg, 12000));
t = seq.getTracks()[1];
s.recordEnable(t, -1);
System.out.println("Started recording...");
s.startRecording();
Receiver r = s.getReceiver();
Thread.sleep(100);
// send a normal message
System.out.println("Recording a normal NOTE ON message...");
msg.setMessage(0x90, 0x40, 0x6F);
r.send(msg, -1);
Thread.sleep(100);
// send a normal message
System.out.println("Recording a normal NOTE OFF message...");
msg.setMessage(0x90, 0x40, 0x00);
r.send(msg, -1);
Thread.sleep(100);
s.stop();
// now see if the messages were recorded
System.out.println("Recorded messages:");
int sameMessage = 0;
for (int i = 0; i < t.size(); i++) {
System.out.print(" "+(i+1)+". ");
printEvent(t.get(i));
if (t.get(i).getMessage() == msg) {
System.out.println("## Failed: Same Message reference!");
sameMessage++;
}
}
if (sameMessage > 0) {
System.out.println("## Failed: The same instance was recorded!");
throw new Exception("Test FAILED!");
}
System.out.println("Did not detect any duplicate messages.");
System.out.println("Test passed.");
} catch (Exception e) {
System.out.println("Unexpected Exception: "+e);
//e.printStackTrace();
throw new RuntimeException("Test FAILED!");
} finally {
s.close();
}
}
示例14: getAudioInputStream
import javax.sound.midi.Receiver; //導入方法依賴的package包/類
private AudioInputStream getAudioInputStream(final Sequence seq)
throws InvalidMidiDataException {
AudioSynthesizer synth = (AudioSynthesizer) new SoftSynthesizer();
AudioInputStream stream;
Receiver recv;
try {
stream = synth.openStream(format, null);
recv = synth.getReceiver();
} catch (MidiUnavailableException e) {
throw new InvalidMidiDataException(e.toString());
}
float divtype = seq.getDivisionType();
Track[] tracks = seq.getTracks();
int[] trackspos = new int[tracks.length];
int mpq = 500000;
int seqres = seq.getResolution();
long lasttick = 0;
long curtime = 0;
while (true) {
MidiEvent selevent = null;
int seltrack = -1;
for (int i = 0; i < tracks.length; i++) {
int trackpos = trackspos[i];
Track track = tracks[i];
if (trackpos < track.size()) {
MidiEvent event = track.get(trackpos);
if (selevent == null || event.getTick() < selevent.getTick()) {
selevent = event;
seltrack = i;
}
}
}
if (seltrack == -1)
break;
trackspos[seltrack]++;
long tick = selevent.getTick();
if (divtype == Sequence.PPQ)
curtime += ((tick - lasttick) * mpq) / seqres;
else
curtime = (long) ((tick * 1000000.0 * divtype) / seqres);
lasttick = tick;
MidiMessage msg = selevent.getMessage();
if (msg instanceof MetaMessage) {
if (divtype == Sequence.PPQ) {
if (((MetaMessage) msg).getType() == 0x51) {
byte[] data = ((MetaMessage) msg).getData();
if (data.length < 3) {
throw new InvalidMidiDataException();
}
mpq = ((data[0] & 0xff) << 16)
| ((data[1] & 0xff) << 8) | (data[2] & 0xff);
}
}
} else {
recv.send(msg, curtime);
}
}
long totallen = curtime / 1000000;
long len = (long) (stream.getFormat().getFrameRate() * (totallen + 4));
stream = new AudioInputStream(stream, stream.getFormat(), len);
return stream;
}
示例15: getAudioInputStream
import javax.sound.midi.Receiver; //導入方法依賴的package包/類
public AudioInputStream getAudioInputStream(Sequence seq)
throws UnsupportedAudioFileException, IOException {
AudioSynthesizer synth = (AudioSynthesizer) new SoftSynthesizer();
AudioInputStream stream;
Receiver recv;
try {
stream = synth.openStream(format, null);
recv = synth.getReceiver();
} catch (MidiUnavailableException e) {
throw new IOException(e.toString());
}
float divtype = seq.getDivisionType();
Track[] tracks = seq.getTracks();
int[] trackspos = new int[tracks.length];
int mpq = 500000;
int seqres = seq.getResolution();
long lasttick = 0;
long curtime = 0;
while (true) {
MidiEvent selevent = null;
int seltrack = -1;
for (int i = 0; i < tracks.length; i++) {
int trackpos = trackspos[i];
Track track = tracks[i];
if (trackpos < track.size()) {
MidiEvent event = track.get(trackpos);
if (selevent == null || event.getTick() < selevent.getTick()) {
selevent = event;
seltrack = i;
}
}
}
if (seltrack == -1)
break;
trackspos[seltrack]++;
long tick = selevent.getTick();
if (divtype == Sequence.PPQ)
curtime += ((tick - lasttick) * mpq) / seqres;
else
curtime = (long) ((tick * 1000000.0 * divtype) / seqres);
lasttick = tick;
MidiMessage msg = selevent.getMessage();
if (msg instanceof MetaMessage) {
if (divtype == Sequence.PPQ) {
if (((MetaMessage) msg).getType() == 0x51) {
byte[] data = ((MetaMessage) msg).getData();
if (data.length < 3) {
throw new UnsupportedAudioFileException();
}
mpq = ((data[0] & 0xff) << 16)
| ((data[1] & 0xff) << 8) | (data[2] & 0xff);
}
}
} else {
recv.send(msg, curtime);
}
}
long totallen = curtime / 1000000;
long len = (long) (stream.getFormat().getFrameRate() * (totallen + 4));
stream = new AudioInputStream(stream, stream.getFormat(), len);
return stream;
}