本文整理汇总了C#中NAudio.Wave.BufferedWaveProvider.AddSamples方法的典型用法代码示例。如果您正苦于以下问题:C# BufferedWaveProvider.AddSamples方法的具体用法?C# BufferedWaveProvider.AddSamples怎么用?C# BufferedWaveProvider.AddSamples使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类NAudio.Wave.BufferedWaveProvider
的用法示例。
在下文中一共展示了BufferedWaveProvider.AddSamples方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: FullReadsByDefault
public void FullReadsByDefault()
{
var bwp = new BufferedWaveProvider(new WaveFormat());
var buffer = new byte[44100];
bwp.AddSamples(buffer, 0, 2000);
var read = bwp.Read(buffer, 0, buffer.Length);
Assert.AreEqual(buffer.Length, read);
Assert.AreEqual(0, bwp.BufferedBytes);
}
示例2: WhenBufferHasMoreThanNeededReadFully
public void WhenBufferHasMoreThanNeededReadFully()
{
var bwp = new BufferedWaveProvider(new WaveFormat());
var buffer = new byte[44100];
bwp.AddSamples(buffer, 0, 5000);
var read = bwp.Read(buffer, 0, 2000);
Assert.AreEqual(2000, read);
Assert.AreEqual(3000, bwp.BufferedBytes);
}
示例3: PartialReadsPossibleWithReadFullyFalse
public void PartialReadsPossibleWithReadFullyFalse()
{
var bwp = new BufferedWaveProvider(new WaveFormat());
bwp.ReadFully = false;
var buffer = new byte[44100];
bwp.AddSamples(buffer, 0, 2000);
var read = bwp.Read(buffer, 0, buffer.Length);
Assert.AreEqual(2000, read);
Assert.AreEqual(0, bwp.BufferedBytes);
}
示例4: BufferedBytesAreReturned
public void BufferedBytesAreReturned()
{
var bytesToBuffer = 1000;
var bwp = new BufferedWaveProvider(new WaveFormat(44100, 16, 2));
var data = Enumerable.Range(1, bytesToBuffer).Select(n => (byte)(n % 256)).ToArray();
bwp.AddSamples(data, 0, data.Length);
Assert.AreEqual(bytesToBuffer, bwp.BufferedBytes);
var readBuffer = new byte[bytesToBuffer];
var bytesRead = bwp.Read(readBuffer, 0, bytesToBuffer);
Assert.AreEqual(bytesRead, bytesToBuffer);
Assert.AreEqual(readBuffer,data);
Assert.AreEqual(0, bwp.BufferedBytes);
}
示例5: Receiver
public void Receiver()
{
udpListener = new UdpClient();
udpListener.Client.SetSocketOption(SocketOptionLevel.Socket, SocketOptionName.ReuseAddress, true);
IPEndPoint endPoint = new IPEndPoint(IPAddress.Any, 810);
waveProvider = new BufferedWaveProvider(waveIn.WaveFormat); //getting proper format from wavein
waveProvider.DiscardOnBufferOverflow = true; //With true value buffer ignores new packages, if it is full
byte[] b; //buffer for data recieving
waveOut.Init(waveProvider);
udpListener.Client.Bind(endPoint);
waveOut.Play();
do
{
b = udpListener.Receive(ref endPoint); //receiving message
//Console.WriteLine("{0}", b.Length+" "+waveProvider.BufferLength+" "+waveProvider.BufferedBytes);
waveProvider.AddSamples(b, 0, b.Length); //adding data to buffer
Thread.CurrentThread.Join(0);
}
while (b.Length != 0);
waveOut.Dispose();
waveOut.Stop();
}
示例6: playBack
// Sets up and plays music file that was read in
public void playBack()
{
playback = new WaveOut();
NAudio.Wave.WaveFormat waveFormat = new NAudio.Wave.WaveFormat(waveIn.SampleRate, waveIn.BitsPerSample, waveIn.NumChannels);
//for (int ii = 0; ii < (int)Math.Floor((double)waveIn.data.Length/1024); ii++)
//{
byte[] sound = new byte[waveIn.data.Length];
for (int jj = 0; jj < sound.Length; jj++)
{
sound[jj] = waveIn.data[jj];
}
BufferedWaveProvider bwp = new BufferedWaveProvider(waveFormat);
bwp.DiscardOnBufferOverflow = true;
bwp.AddSamples(sound, 0, sound.Length);
playback.Init(bwp);
playback.Play();
//}
}
示例7: Start
private void Start()
{
if (rawData == null)
{
return;
}
int newSampleRate = myWavReader.SampleRate;
if (speed == "Slow")
{
newSampleRate = (int)(newSampleRate * 0.8);
}
else if (speed == "Slower")
{
newSampleRate = (int)(newSampleRate * 0.5);
}
pictureBox1.Image = originalImage;
timeline.Reset();
NAudio.Wave.WaveFormat format = new NAudio.Wave.WaveFormat(newSampleRate, myWavReader.BitDepth, myWavReader.Channels);
waveOut = new WaveOut();
BufferedWaveProvider waveProvider = new BufferedWaveProvider(format);
waveProvider.BufferLength = rawData.Length;
waveProvider.AddSamples(rawData, 0, rawData.Length);
waveOut.Init(waveProvider);
panel1.HorizontalScroll.Value = 0;
tmrTimeline.Start();
waveOut.Play();
}
示例8: FfmpegListener
private void FfmpegListener()
{
AudioFileReader afr = null;
Program.WriterMutex.WaitOne();
try
{
afr = new AudioFileReader();
afr.Open(_source);
}
catch (Exception ex)
{
Log.Error("",ex);//MainForm.LogExceptionToFile(ex);
}
Program.WriterMutex.ReleaseMutex();
if (afr == null || !afr.IsOpen)
{
if (AudioFinished!=null)
AudioFinished(this, ReasonToFinishPlaying.AudioSourceError);
return;
}
RecordingFormat = new WaveFormat(afr.SampleRate, 16, afr.Channels);
_waveProvider = new BufferedWaveProvider(RecordingFormat) { DiscardOnBufferOverflow = true };
_sampleChannel = new SampleChannel(_waveProvider);
_sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter;
byte[] data;
int mult = afr.BitsPerSample/8;
double btrg = Convert.ToDouble(afr.SampleRate*mult*afr.Channels);
DateTime lastPacket = DateTime.Now;
bool realTime = _source.IndexOf("://") != -1;
try
{
DateTime req = DateTime.Now;
while (!_stopEvent.WaitOne(0, false))
{
data = afr.ReadAudioFrame();
if (data.Length>0)
{
lastPacket = DateTime.Now;
if (DataAvailable != null)
{
//forces processing of volume level without piping it out
_waveProvider.AddSamples(data, 0, data.Length);
var sampleBuffer = new float[data.Length];
_sampleChannel.Read(sampleBuffer, 0, data.Length);
if (WaveOutProvider!=null && Listening)
{
WaveOutProvider.AddSamples(data, 0, data.Length);
}
var da = new DataAvailableEventArgs((byte[]) data.Clone());
DataAvailable(this, da);
}
if (realTime)
{
if (_stopEvent.WaitOne(10, false))
break;
}
else
{
double f = (data.Length/btrg)*1000;
if (f > 0)
{
var span = DateTime.Now.Subtract(req);
var msec = Convert.ToInt32(f - (int) span.TotalMilliseconds);
if ((msec > 0) && (_stopEvent.WaitOne(msec, false)))
break;
req = DateTime.Now;
}
}
}
else
{
if ((DateTime.Now - lastPacket).TotalMilliseconds > 5000)
{
afr.Close();
Stop();
throw new Exception("Audio source timeout");
}
if (_stopEvent.WaitOne(30, false))
break;
}
}
if (AudioFinished != null)
AudioFinished(this, ReasonToFinishPlaying.StoppedByUser);
}
catch (Exception e)
{
if (AudioSourceError!=null)
AudioSourceError(this, new AudioSourceErrorEventArgs(e.Message));
Log.Error("",e);//MainForm.LogExceptionToFile(e);
}
}
示例9: Execute
public static void Execute(QiSession session)
{
string serviceName = "CSharpSoundDownloaderSpare";
var audioDevice = session.GetService("ALAudioDevice");
var waveIn = new WaveInEvent();
#region 1/4: ロボットへ音を投げる方の仕込み
//出力サンプリングレートをデフォルト(48kHz)から16kHzに下げる
//16000, 22050, 44100, 48000のいずれかしか選択できない点に注意
audioDevice["setParameter"].Call("outputSampleRate", 16000);
//下のDataAvailableイベントが発生する頻度、バッファの長さに影響する。
//バッファ長は16384を超えてはいけない点に注意
//(詳細は公式ドキュメンテーション参照)
waveIn.BufferMilliseconds = 200;
//マイクの集音時フォーマット: 周波数を上で設定した値に合わせる
waveIn.WaveFormat = new WaveFormat(16000, 16, 2);
int count = 0;
waveIn.DataAvailable += (_, e) =>
{
if (e.BytesRecorded > 16384) return;
byte[] bufferToSend = new byte[e.BytesRecorded];
Array.Copy(e.Buffer, bufferToSend, e.BytesRecorded);
int p = audioDevice["sendRemoteBufferToOutput"].Post(bufferToSend.Length / 4, bufferToSend);
Console.WriteLine($"received data, {count}");
count++;
};
#endregion
#region 2/4 ロボットから音を拾う, 再生デバイス準備
var mmDevice = new MMDeviceEnumerator().GetDefaultAudioEndpoint(DataFlow.Render, Role.Multimedia);
var wavProvider = new BufferedWaveProvider(new WaveFormat(16000, 16, 1));
var wavPlayer = new WasapiOut(mmDevice, AudioClientShareMode.Shared, false, 200);
wavPlayer.Init(new VolumeWaveProvider16(wavProvider));
wavPlayer.Play();
#endregion
#region 3/4 ロボットから音を拾う, ロボットのマイク監視モードに入る
var objBuilder = QiObjectBuilder.Create();
//コールバックであるprocessRemote関数を登録することでALAudioDevice側の仕様に対応
objBuilder.AdvertiseMethod(
"processRemote::v(iimm)",
(sig, arg) =>
{
//ここで処理
//Console.WriteLine("Received Buffer!");
//Console.WriteLine(arg.Dump());
//データの内容については上記のダンプを行うことである程度確認可能
byte[] raw = arg[3].ToBytes();
wavProvider.AddSamples(raw, 0, raw.Length);
return QiValue.Void;
});
//上記のコールバック取得用サービスを登録
session.Listen("tcp://0.0.0.0:0").Wait();
ulong registeredId = session.RegisterService(serviceName, objBuilder.BuildObject()).GetUInt64(0UL);
#endregion
#region 4/4 設定を調整して実際に入出力を行う
//マジックナンバーあるけど詳細は右記参照 http://www.baku-dreameater.net/archives/2411
audioDevice["setClientPreferences"].Call(serviceName, 16000, 3, 0);
//開始
audioDevice["subscribe"].Call(serviceName);
waveIn.StartRecording();
#endregion
Console.WriteLine("Press ENTER to quit..");
Console.ReadLine();
audioDevice["unsubscribe"].Call(serviceName);
session.UnregisterService((uint)registeredId);
wavPlayer.Stop();
wavPlayer.Dispose();
waveIn.StopRecording();
waveIn.Dispose();
}
示例10: PlayAudioFromConnection
private void PlayAudioFromConnection(TcpClient client)
{
var inputStream = new BufferedStream(client.GetStream());
var bufferedWaveProvider = new BufferedWaveProvider(waveFormat);
var savingWaveProvider = new SavingWaveProvider(bufferedWaveProvider, "temp.wav");
var player = new WaveOut();
player.Init(savingWaveProvider);
player.Play();
while (client.Connected)
{
if (terminate)
{
client.Close();
break;
}
var available = client.Available;
if (available > 0)
{
var buffer = new byte[available];
var bytes = inputStream.Read(buffer, 0, buffer.Length);
bufferedWaveProvider.AddSamples(buffer, 0, bytes);
Console.WriteLine("{0} \t {1} bytes", client.Client.RemoteEndPoint, bytes);
}
}
player.Stop();
savingWaveProvider.Dispose();
}
示例11: EventSource_OnPlaySoundEffect
private void EventSource_OnPlaySoundEffect(object sender, Client.PlaySoundEffectEventArgs e)
{
if (SelectedOutput != null)
{
IWavePlayer waveOut = null;
BufferedWaveProvider provider;
SampleChannel sampleChannel;
try
{
waveOut = InstanciateWavePlayerForOutput(SelectedOutput, 150, AudioClientShareMode.Shared, false);
provider = new BufferedWaveProvider(e.Format);
sampleChannel = new SampleChannel(provider, false);
waveOut.Init(sampleChannel);
provider.AddSamples(e.Data, 0, e.Data.Length);
waveOut.PlaybackStopped += waveOut_PlaybackStopped;
}
catch
{
if (waveOut != null)
waveOut.Dispose();
return;
}
new Action<IWavePlayer, BufferedWaveProvider>((player, buffer) =>
{
waveOut.Play();
while (waveOut.PlaybackState == PlaybackState.Playing && buffer.BufferedDuration > TimeSpan.Zero)
{
System.Threading.Thread.Sleep(100);
}
}).BeginInvoke(waveOut, provider, new AsyncCallback(PlaybackDone), waveOut);
}
}
示例12: FfmpegListener
private void FfmpegListener()
{
_reasonToStop = ReasonToFinishPlaying.StoppedByUser;
_afr = null;
bool open = false;
string errmsg = "";
try
{
Program.FFMPEGMutex.WaitOne();
_afr = new AudioFileReader();
int i = _source.IndexOf("://", StringComparison.Ordinal);
if (i > -1)
{
_source = _source.Substring(0, i).ToLower() + _source.Substring(i);
}
_afr.Timeout = Timeout;
_afr.AnalyzeDuration = AnalyseDuration;
_afr.Open(_source);
open = true;
}
catch (Exception ex)
{
MainForm.LogExceptionToFile(ex, "FFMPEG");
}
finally
{
try
{
Program.FFMPEGMutex.ReleaseMutex();
}
catch (ObjectDisposedException)
{
//can happen on shutdown
}
}
if (_afr == null || !_afr.IsOpen || !open)
{
ShutDown("Could not open audio stream" + ": " + _source);
return;
}
RecordingFormat = new WaveFormat(_afr.SampleRate, 16, _afr.Channels);
_waveProvider = new BufferedWaveProvider(RecordingFormat) { DiscardOnBufferOverflow = true, BufferDuration = TimeSpan.FromMilliseconds(500) };
_sampleChannel = new SampleChannel(_waveProvider);
_sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter;
int mult = _afr.BitsPerSample / 8;
double btrg = Convert.ToDouble(_afr.SampleRate * mult * _afr.Channels);
LastFrame = DateTime.UtcNow;
bool realTime = !IsFileSource;
try
{
DateTime req = DateTime.UtcNow;
while (!_stopEvent.WaitOne(10, false) && !MainForm.ShuttingDown)
{
byte[] data = _afr.ReadAudioFrame();
if (data == null || data.Equals(0))
{
if (!realTime)
{
break;
}
}
if (data != null && data.Length > 0)
{
LastFrame = DateTime.UtcNow;
var da = DataAvailable;
if (da != null)
{
//forces processing of volume level without piping it out
_waveProvider.AddSamples(data, 0, data.Length);
var sampleBuffer = new float[data.Length];
_sampleChannel.Read(sampleBuffer, 0, data.Length);
da(this, new DataAvailableEventArgs((byte[])data.Clone()));
if (WaveOutProvider != null && Listening)
{
WaveOutProvider.AddSamples(data, 0, data.Length);
}
}
if (realTime)
{
if (_stopEvent.WaitOne(30, false))
break;
}
else
{
//
double f = (data.Length / btrg) * 1000;
if (f > 0)
{
//.........这里部分代码省略.........
示例13: AudioIn
private void AudioIn(Socket mySocket)
{
var wf = new WaveFormat(8000,16,1);
DirectSoundOut dso;
if (String.IsNullOrEmpty(iSpyServer.Default.AudioOutDevice))
dso = new DirectSoundOut(100);
else
{
dso = new DirectSoundOut(Guid.Parse(iSpyServer.Default.AudioOutDevice));
}
var bwp = new BufferedWaveProvider(wf);
dso.Init(bwp);
dso.Play();
var bBuffer = new byte[3200];
try
{
while (mySocket.Connected)
{
int i = mySocket.Receive(bBuffer, 0, 3200, SocketFlags.None);
byte[] dec;
ALawDecoder.ALawDecode(bBuffer, i, out dec);
bwp.AddSamples(dec, 0, dec.Length);
Thread.Sleep(100);
}
}
catch(Exception ex)
{
mySocket.Close();
mySocket = null;
}
dso.Stop();
dso.Dispose();
}
示例14: FfmpegListener
private void FfmpegListener()
{
_reasonToStop = ReasonToFinishPlaying.StoppedByUser;
_afr = null;
bool open = false;
string errmsg = "";
try
{
Program.FfmpegMutex.WaitOne();
_afr = new AudioFileReader();
int i = _source.IndexOf("://", StringComparison.Ordinal);
if (i>-1)
{
_source = _source.Substring(0, i).ToLower() + _source.Substring(i);
}
_afr.Timeout = Timeout;
_afr.AnalyzeDuration = AnalyseDuration;
_afr.Headers = Headers;
_afr.Cookies = Cookies;
_afr.UserAgent = UserAgent;
_afr.Open(_source);
open = true;
}
catch (Exception ex)
{
Logger.LogExceptionToFile(ex,"FFMPEG");
}
finally
{
try
{
Program.FfmpegMutex.ReleaseMutex();
}
catch (ObjectDisposedException)
{
//can happen on shutdown
}
}
if (_afr == null || !_afr.IsOpen || !open)
{
ShutDown("Could not open audio stream" + ": " + _source);
return;
}
RecordingFormat = new WaveFormat(_afr.SampleRate, 16, _afr.Channels);
_waveProvider = new BufferedWaveProvider(RecordingFormat) { DiscardOnBufferOverflow = true, BufferDuration = TimeSpan.FromMilliseconds(500) };
_sampleChannel = new SampleChannel(_waveProvider);
_sampleChannel.PreVolumeMeter += SampleChannelPreVolumeMeter;
LastFrame = DateTime.UtcNow;
try
{
while (!_stopEvent.WaitOne(10, false) && !MainForm.ShuttingDown)
{
byte[] data = _afr.ReadAudioFrame();
if (data!=null && data.Length > 0)
{
LastFrame = DateTime.UtcNow;
var da = DataAvailable;
if (da != null)
{
//forces processing of volume level without piping it out
_waveProvider.AddSamples(data, 0, data.Length);
var sampleBuffer = new float[data.Length];
int read = _sampleChannel.Read(sampleBuffer, 0, data.Length);
da(this, new DataAvailableEventArgs((byte[])data.Clone(),read));
if (Listening)
{
WaveOutProvider?.AddSamples(data, 0, read);
}
}
if (_stopEvent.WaitOne(30, false))
break;
}
else
{
if ((DateTime.UtcNow - LastFrame).TotalMilliseconds > Timeout)
{
throw new Exception("Audio source timeout");
}
if (_stopEvent.WaitOne(30, false))
break;
}
}
}
catch (Exception e)
{
Logger.LogExceptionToFile(e,"FFMPEG");
//.........这里部分代码省略.........
示例15: AddChopPrevention
private void AddChopPrevention(BufferedWaveProvider bwp)
{
var delayOf20Ms = (_waveFormat.SampleRate / 25);
byte[] delay = new byte[delayOf20Ms * 2];
bwp.AddSamples(delay, 0, delay.Length);
}