本文整理汇总了C#中NAudio.Wave.WaveOut类的典型用法代码示例。如果您正苦于以下问题:C# WaveOut类的具体用法?C# WaveOut怎么用?C# WaveOut使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
WaveOut类属于NAudio.Wave命名空间,在下文中一共展示了WaveOut类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: Speaker
//------------------------------------------------------------------------------------------------------------------------
#endregion
#region Constructor
//------------------------------------------------------------------------------------------------------------------------
public Speaker()
{
waveout = new WaveOut();
bufferedWaveProvider = new BufferedWaveProvider(new WaveFormat(8000, 16, 2));
waveout.PlaybackStopped += Waveout_PlaybackStopped;
volumeProvider = new VolumeWaveProvider16(bufferedWaveProvider);
waveout.Init(volumeProvider);
}
示例2: AudioPlayer
public AudioPlayer(DiscordVoiceConfig __config)
{
config = __config;
callbackInfo = WaveCallbackInfo.FunctionCallback();
outputDevice = new WaveOut(callbackInfo);
bufferedWaveProvider = new BufferedWaveProvider(new WaveFormat(48000, 16, config.Channels));
}
示例3: Stop
public void Stop()
{
if (WaveOut != null)
{
WaveOut.Stop();
WaveOut.Dispose();
WaveOut = null;
}
if (WaveChannel != null)
{
WaveChannel.Dispose();
WaveChannel = null;
}
if (WaveFileReader != null)
{
WaveFileReader.Dispose();
WaveFileReader = null;
}
if (Mp3FileReader != null)
{
Mp3FileReader.Dispose();
Mp3FileReader = null;
}
}
示例4: PlaySound
public void PlaySound(string name, Action done = null)
{
FileStream ms = File.OpenRead(_soundLibrary[name]);
var rdr = new Mp3FileReader(ms);
WaveStream wavStream = WaveFormatConversionStream.CreatePcmStream(rdr);
var baStream = new BlockAlignReductionStream(wavStream);
var waveOut = new WaveOut(WaveCallbackInfo.FunctionCallback());
waveOut.Init(baStream);
waveOut.Play();
var bw = new BackgroundWorker();
bw.DoWork += (s, o) =>
{
while (waveOut.PlaybackState == PlaybackState.Playing)
{
Thread.Sleep(100);
}
waveOut.Dispose();
baStream.Dispose();
wavStream.Dispose();
rdr.Dispose();
ms.Dispose();
if (done != null) done();
};
bw.RunWorkerAsync();
}
示例5: PlayAudioFromConnection
private void PlayAudioFromConnection(TcpClient client)
{
var inputStream = new BufferedStream(client.GetStream());
var bufferedWaveProvider = new BufferedWaveProvider(waveFormat);
var savingWaveProvider = new SavingWaveProvider(bufferedWaveProvider, "temp.wav");
var player = new WaveOut();
player.Init(savingWaveProvider);
player.Play();
while (client.Connected)
{
if (terminate)
{
client.Close();
break;
}
var available = client.Available;
if (available > 0)
{
var buffer = new byte[available];
var bytes = inputStream.Read(buffer, 0, buffer.Length);
bufferedWaveProvider.AddSamples(buffer, 0, bytes);
Console.WriteLine("{0} \t {1} bytes", client.Client.RemoteEndPoint, bytes);
}
}
player.Stop();
savingWaveProvider.Dispose();
}
示例6: Form1
public Form1()
{
_serialPort.PortName = "COM6";
_serialPort.BaudRate = 9600;
_serialPort.Parity = Parity.None;
_serialPort.DataBits = 8;
_serialPort.StopBits = StopBits.Two;
_serialPort.Handshake = Handshake.None;
_serialPort.DataReceived += new SerialDataReceivedEventHandler(sp_DataReceived);
_serialPort.Open();
//Set up audio outputs
jaws[0] = new WaveOut();
jaws[1] = new WaveOut();
jaws[2] = new WaveOut();
var jawsAudio1 = new WaveChannel32(new WaveFileReader("Sounds/Jaws3.wav"));
jaws[0].Init(jawsAudio1);
var jawsAudio2 = new LoopStream(new WaveFileReader("Sounds/Jaws2.wav"));
jaws[1].Init(jawsAudio2);
var jawsAudio3 = new LoopStream(new WaveFileReader("Sounds/Jaws1.wav"));
jaws[2].Init(jawsAudio3);
//Set the shark to a random position
resetShark();
InitializeComponent();
}
示例7: Start
public void Start()
{
if (WaveIn.DeviceCount < 1)
throw new Exception("Insufficient input device(s)!");
if (WaveOut.DeviceCount < 1)
throw new Exception("Insufficient output device(s)!");
frame_size = toxav.CodecSettings.audio_sample_rate * toxav.CodecSettings.audio_frame_duration / 1000;
toxav.PrepareTransmission(CallIndex, false);
WaveFormat format = new WaveFormat((int)toxav.CodecSettings.audio_sample_rate, (int)toxav.CodecSettings.audio_channels);
wave_provider = new BufferedWaveProvider(format);
wave_provider.DiscardOnBufferOverflow = true;
wave_out = new WaveOut();
//wave_out.DeviceNumber = config["device_output"];
wave_out.Init(wave_provider);
wave_source = new WaveIn();
//wave_source.DeviceNumber = config["device_input"];
wave_source.WaveFormat = format;
wave_source.DataAvailable += wave_source_DataAvailable;
wave_source.RecordingStopped += wave_source_RecordingStopped;
wave_source.BufferMilliseconds = (int)toxav.CodecSettings.audio_frame_duration;
wave_source.StartRecording();
wave_out.Play();
}
示例8: Main
static void Main(string[] args)
{
string mp3FilesDir = Directory.GetCurrentDirectory();
if (args.Length > 0)
{
mp3FilesDir = args.First();
}
var waveOutDevice = new WaveOut();
var idToFile = Directory.GetFiles(mp3FilesDir, "*.mp3", SearchOption.AllDirectories).ToDictionary(k => int.Parse(Regex.Match(Path.GetFileName(k), @"^\d+").Value));
while (true)
{
Console.WriteLine("Wprowadz numer nagrania");
var trackId = int.Parse(Console.ReadLine());
using (var audioFileReader = new AudioFileReader(idToFile[trackId]))
{
waveOutDevice.Init(audioFileReader);
waveOutDevice.Play();
Console.ReadLine();
}
}
}
示例9: TriggerPanel
public TriggerPanel()
{
InitializeComponent();
mWaveOut = new WaveOut();
mWaveOut.PlaybackStopped += new EventHandler(mWaveOut_PlaybackStopped);
AppController.Instance().AddPanel(this);
}
示例10: WaveFormat
private WaveFormat _waveFormat = new WaveFormat(8000, 16, 1); // The format that both the input and output audio streams will use, i.e. PCMU.
#endregion Fields
#region Constructors
public AudioChannel()
{
// Set up the device that will play the audio from the RTP received from the remote end of the call.
m_waveOut = new WaveOut();
m_waveProvider = new BufferedWaveProvider(_waveFormat);
m_waveOut.Init(m_waveProvider);
m_waveOut.Play();
// Set up the input device that will provide audio samples that can be encoded, packaged into RTP and sent to
// the remote end of the call.
m_waveInEvent = new WaveInEvent();
m_waveInEvent.BufferMilliseconds = 20;
m_waveInEvent.NumberOfBuffers = 1;
m_waveInEvent.DeviceNumber = 0;
m_waveInEvent.DataAvailable += RTPChannelSampleAvailable;
m_waveInEvent.WaveFormat = _waveFormat;
// Create a UDP socket to use for sending and receiving RTP packets.
int port = FreePort.FindNextAvailableUDPPort(DEFAULT_START_RTP_PORT);
_rtpEndPoint = new IPEndPoint(_defaultLocalAddress, port);
m_rtpChannel = new RTPChannel(_rtpEndPoint);
m_rtpChannel.OnFrameReady += RTPChannelSampleReceived;
_audioLogger.Debug("RTP channel endpoint " + _rtpEndPoint.ToString());
}
示例11: Initialise
public void Initialise(WaveFormat format, WaveOut driver)
{
if (driver == null)
{
throw new ArgumentNullException("driver", "Must specify a WaveIn device instance");
}
if (format == null)
{
throw new ArgumentNullException("format", "Must specify an audio format");
}
var caps = WaveOut.GetCapabilities(driver.DeviceNumber);
device = new WaveOutDeviceData
{
Driver = driver,
Name = caps.ProductName,
Channels = caps.Channels,
Buffers = new float[caps.Channels][]
};
Format = WaveFormat.CreateIeeeFloatWaveFormat(format.SampleRate, caps.Channels);
OutputBuffer = new BufferedWaveProvider(Format);
OutputBuffer.DiscardOnBufferOverflow = true;
driver.Init(OutputBuffer);
mapOutputs();
}
示例12: APU
public APU()
{
this.audioBuffer = new AudioBuffer();
NESWaveProvider nesWaveProvider = new NESWaveProvider(audioBuffer);
waveOut = new WaveOut();
waveOut.Init(nesWaveProvider);
}
示例13: Form1_Load
private void Form1_Load(object sender, EventArgs e)
{
byte[] apk, ask, bpk, bsk;
NaClClient.CreateKeys(out apk, out ask);
NaClClient.CreateKeys(out bpk, out bsk);
var hasher = System.Security.Cryptography.SHA256.Create();
_clientA = NaClClient.Create(apk, ask, bpk);
_clientB = NaClClient.Create(bpk, bsk, apk);
_sw = new Stopwatch();
_sw.Start();
_wave = new WaveIn(this.Handle);
_wave.WaveFormat = new WaveFormat(12000, 8, 1);
_wave.BufferMilliseconds = 100;
_wave.DataAvailable += _wave_DataAvailable;
_wave.StartRecording();
_playback = new BufferedWaveProvider(_wave.WaveFormat);
_waveOut = new WaveOut();
_waveOut.DesiredLatency = 100;
_waveOut.Init(_playback);
_waveOut.Play();
}
示例14: CreateDevice
public IWavePlayer CreateDevice(int latency)
{
IWavePlayer device;
WaveCallbackStrategy strategy = _waveOutSettingsPanel.CallbackStrategy;
if (strategy == WaveCallbackStrategy.Event)
{
WaveOutEvent waveOut = new WaveOutEvent
{
DeviceNumber = _waveOutSettingsPanel.SelectedDeviceNumber,
DesiredLatency = latency
};
device = waveOut;
}
else
{
WaveCallbackInfo callbackInfo = strategy == WaveCallbackStrategy.NewWindow ? WaveCallbackInfo.NewWindow() : WaveCallbackInfo.FunctionCallback();
WaveOut outputDevice = new WaveOut(callbackInfo)
{
DeviceNumber = _waveOutSettingsPanel.SelectedDeviceNumber,
DesiredLatency = latency
};
device = outputDevice;
}
// TODO: configurable number of buffers
return device;
}
示例15: Main
static void Main(string[] args)
{
var adj = new AdjustableTFunc {Value = 1600};
var tFuncWaveProvider = new TFuncWaveProvider
{
// Amplitude = TFunc.Sin(new Frequency(adj.TFunc))
// Amplitude = TFunc.Sin(new Frequency(t => TFuncs.Sin(Frequency.Hertz(1))(t) + 1000))
Amplitude = TFunc.Sin(TFunc.Sin(Frequency.Hertz(2)) + 1000)
};
var waveOut = new WaveOut();
waveOut.Init(tFuncWaveProvider);
waveOut.Play();
Console.WriteLine("Press q to kill");
char k;
while ((k = Console.ReadKey().KeyChar) != 'q')
{
if (k == 'u')
{
adj.Value+=10;
}
if (k == 'd')
{
adj.Value-=10;
}
Console.Write(" ");
Console.WriteLine(adj.Value);
}
waveOut.Stop();
waveOut.Dispose();
}