本文整理汇总了C#中NAudio.Wave.WaveIn类的典型用法代码示例。如果您正苦于以下问题:C# WaveIn类的具体用法?C# WaveIn怎么用?C# WaveIn使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
WaveIn类属于NAudio.Wave命名空间,在下文中一共展示了WaveIn类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: button_rec_Click
//Кнопка "Запись"
private void button_rec_Click(object sender, EventArgs e)
{
button_stop.Enabled = true;
timer.Start();
ind = 1;
try
{
waveIn = new WaveIn();
waveIn.DeviceNumber = 0;//Дефолтное устройство для записи (если оно имеется)
waveIn.DataAvailable += waveIn_DataAvailable;//Прикрепляем к событию DataAvailable обработчик, возникающий при наличии записываемых данных
waveIn.RecordingStopped += waveIn_RecordingStopped;//Прикрепляем обработчик завершения записи
waveIn.WaveFormat = new WaveFormat(8000, 1);//Формат wav-файла - принимает параметры - частоту дискретизации и количество каналов(здесь mono)
writer = new WaveFileWriter(outputFilename, waveIn.WaveFormat);//Инициализируем объект WaveFileWriter
waveIn.StartRecording();//Начало записи
button_play.Enabled = false;
button_rec.Enabled = false;
numeric.Enabled = false;
}
catch (Exception ex)
{
button_play.Enabled = true;
button_rec.Enabled = true;
numeric.Enabled = true;
MessageBox.Show(ex.Message);
}
}
示例2: Recorder
public Recorder()
{
int waveInDevices = WaveIn.DeviceCount;
//for (int waveInDevice = 0; waveInDevice < waveInDevices; waveInDevice++)
//{
// WaveInCapabilities deviceInfo = WaveIn.GetCapabilities(waveInDevice);
// comboBox1.Items.Add(string.Format("Device {0}: {1}, {2} channels", waveInDevice, deviceInfo.ProductName, deviceInfo.Channels));
//}
waveIn = new WaveIn();
waveIn.DeviceNumber = 0;
waveIn.DataAvailable += waveIn_DataAvailable;
waveIn.RecordingStopped += waveIn_RecordingStopped;
int sampleRate = 16000; // 16 kHz
int channels = 1; // mono
int bits = 16;
recordingFormat = new WaveFormat(sampleRate, bits, channels);
waveIn.WaveFormat = recordingFormat;
string path = "C:\\temp";
if( !Directory.Exists(path) )
{
Directory.CreateDirectory(path);
}
TempWavFileName = String.Format("{0}\\{1}.wav", path, Guid.NewGuid().ToString());
writer = new WaveFileWriter(TempWavFileName, recordingFormat);
}
示例3: Start
public void Start()
{
if (WaveIn.DeviceCount < 1)
throw new Exception("Insufficient input device(s)!");
if (WaveOut.DeviceCount < 1)
throw new Exception("Insufficient output device(s)!");
frame_size = toxav.CodecSettings.audio_sample_rate * toxav.CodecSettings.audio_frame_duration / 1000;
toxav.PrepareTransmission(CallIndex, false);
WaveFormat format = new WaveFormat((int)toxav.CodecSettings.audio_sample_rate, (int)toxav.CodecSettings.audio_channels);
wave_provider = new BufferedWaveProvider(format);
wave_provider.DiscardOnBufferOverflow = true;
wave_out = new WaveOut();
//wave_out.DeviceNumber = config["device_output"];
wave_out.Init(wave_provider);
wave_source = new WaveIn();
//wave_source.DeviceNumber = config["device_input"];
wave_source.WaveFormat = format;
wave_source.DataAvailable += wave_source_DataAvailable;
wave_source.RecordingStopped += wave_source_RecordingStopped;
wave_source.BufferMilliseconds = (int)toxav.CodecSettings.audio_frame_duration;
wave_source.StartRecording();
wave_out.Play();
}
示例4: Connect
private void Connect(IPEndPoint endPoint, int inputDeviceNumber, INetworkChatCodec codec)
{
waveIn = new WaveIn();
waveIn.BufferMilliseconds = 50;
waveIn.DeviceNumber = inputDeviceNumber;
waveIn.WaveFormat = codec.RecordFormat;
waveIn.DataAvailable += waveIn_DataAvailable;
waveIn.StartRecording();
udpSender = new UdpClient();
udpListener = new UdpClient();
// To allow us to talk to ourselves for test purposes:
// http://stackoverflow.com/questions/687868/sending-and-receiving-udp-packets-between-two-programs-on-the-same-computer
udpListener.Client.SetSocketOption(SocketOptionLevel.Socket, SocketOptionName.ReuseAddress, true);
udpListener.Client.Bind(endPoint);
udpSender.Connect(endPoint);
waveOut = new WaveOut();
waveProvider = new BufferedWaveProvider(codec.RecordFormat);
waveOut.Init(waveProvider);
waveOut.Play();
connected = true;
var state = new ListenerThreadState { Codec = codec, EndPoint = endPoint };
ThreadPool.QueueUserWorkItem(ListenerThread, state);
}
示例5: ilPanel1_Load
// this gets called when the panel is loaded into the form
private void ilPanel1_Load(object sender, EventArgs e) {
m_shutdown = false;
// setup the scene
ilPanel1.Scene.Add(new ILPlotCube(twoDMode: false) {
Children = {
// create two line plots: the first is used to display the data itself ...
new ILLinePlot(0, DefaultLinePlotTag, Color.Magenta, lineWidth: 1),
// .. the second is used for marking magnitude peaks, it gets the line hidden
new ILLinePlot(0, DefaultMarkerPlotTag, markerStyle:MarkerStyle.Square) { Line = { Visible = false }}
},
// we want both axes in logarithmic scale
ScaleModes = { XAxisScale = AxisScale.Logarithmic, YAxisScale = AxisScale.Logarithmic },
// configure axis labels
Axes = { XAxis = { Label = { Text = "Frequency [1/\\omega]" }, LabelPosition = new Vector3(1, 1, 0) },
YAxis = { Label = { Text = "Magnitude [dB]" }, LabelPosition = new Vector3(1, 1, 0), LabelAnchor = new PointF(1,0) }
}
});
// setup audio stream (this is not related to ILNumerics but to the NAudio helper lib)
m_waveInStream = new WaveIn();
m_waveInStream.WaveFormat = new WaveFormat(m_sampFreq, m_bitRate, 1); // 1: mono
m_waveInStream.DeviceNumber = 0;
m_waveInStream.BufferMilliseconds = (int)(m_fftlen / (float)m_sampFreq * 1010); // roughly one buffersize
m_waveInStream.DataAvailable += new EventHandler<WaveInEventArgs>(waveInStream_DataAvailable);
try {
m_waveInStream.StartRecording();
} catch (NAudio.MmException exc) {
// when no device exists or no microphone is plugged in, an exception will be thrown here
MessageBox.Show("Error initializing audio device. Make sure that a default recording device is available!" + Environment.NewLine + "Error details:" + exc.Message);
}
}
示例6: button2_Click
// NAudio.Wave.WaveStream stream = null;
private void button2_Click(object sender, EventArgs e)
{
if (listView1.SelectedItems.Count == 0) return;
int deviceNumber = listView1.SelectedItems[0].Index;
//waveOut = new NAudio.Wave.WaveOut(); ;
sourceStream = new NAudio.Wave.WaveIn();
sourceStream.DeviceNumber = deviceNumber;
sourceStream.WaveFormat = new NAudio.Wave.WaveFormat(48000, NAudio.Wave.WaveIn.GetCapabilities(deviceNumber).Channels);
sourceStream.DataAvailable += new EventHandler<WaveInEventArgs>(sourceStream_DataAvailable);
//waveWriter = new NAudio.Wave.WaveFileWriter(save.FileName, sourceStream.WaveFormat);
sourceStream.BufferMilliseconds = 100;
//wavebuffer = new NAudio.Wave.WaveBuffer();
//bwp = new NAudio.Wave.BufferedWaveProvider(sourceStream.WaveFormat);
// bwp.DiscardOnBufferOverflow = true;
NAudio.Wave.WaveInProvider waveIn = new NAudio.Wave.WaveInProvider(sourceStream);
waveOut = new NAudio.Wave.DirectSoundOut();
waveOut.Init(waveIn);
sourceStream.StartRecording();
//waveOut.Init(bwp);
waveOut.Play();
// sourceStream.StopRecording();
// Start(sender,e);
timer1.Enabled=true;
++count;
}
示例7: SequenceRecorder
public SequenceRecorder(Conductor cond, KeyboardConfiguration key)
{
conductor = cond;
keyboard = key;
InitializeComponent();
int waveInDevices = NAudio.Wave.WaveIn.DeviceCount;
for (int waveInDevice = 0; waveInDevice < waveInDevices; waveInDevice++)
{
NAudio.Wave.WaveInCapabilities deviceInfo = NAudio.Wave.WaveIn.GetCapabilities(waveInDevice);
microphoneList.Items.Add(waveInDevice+": "+deviceInfo.ProductName);
}
microphoneList.SelectedIndex = 0;
sensitivity.SelectedIndex = 0;
sourceStream = new NAudio.Wave.WaveIn();
sourceStream.DeviceNumber = 0;
sourceStream.WaveFormat = conductor.GetWaveFormat();// new NAudio.Wave.WaveFormat(44100, NAudio.Wave.WaveIn.GetCapabilities(0).Channels);
bytesPerChannel = (sourceStream.WaveFormat.BitsPerSample / 8);
bytesPerSample = bytesPerChannel * sourceStream.WaveFormat.Channels;
sourceStream.DataAvailable += new EventHandler<NAudio.Wave.WaveInEventArgs>(sourceStream_DataAvailable);
sourceStream.StartRecording();
}
示例8: RecordSound
public static void RecordSound(string name)
{
int waveDeviceCount = WaveIn.DeviceCount;
//detect presence of recording hardware
if (waveDeviceCount > 0)
{
inputDevice = 0;
}
else
{
MessageBox.Show("No recording hardware detected", "iMasomoAdmin", MessageBoxButton.OK, MessageBoxImage.Error);
return;
}
wordName = name;
try
{
waveIn = new WaveIn();
waveIn.DeviceNumber = inputDevice;
waveIn.WaveFormat = new NAudio.Wave.WaveFormat(44100, WaveIn.GetCapabilities(inputDevice).Channels);
//in the presence of incoming data, write the data to a buffer
waveIn.DataAvailable += waveIn_DataAvailable;
waveWriter = new WaveFileWriter(Environment.CurrentDirectory + @"\Media\" + wordName + ".wav", waveIn.WaveFormat);
waveIn.StartRecording();
}
catch(Exception ex)
{
MessageBox.Show(ex.Message);
}
}
示例9: StartEncoding
void StartEncoding()
{
_startTime = DateTime.Now;
_bytesSent = 0;
_segmentFrames = 960;
_encoder = new OpusEncoder(48000, 1, OpusNet.OpusApplication.Voip);
_encoder.Bitrate = 8192;
_decoder = new OpusDecoder(48000, 1);
_bytesPerSegment = _encoder.FrameByteCount(_segmentFrames);
_waveIn = new WaveIn(WaveCallbackInfo.FunctionCallback());
_waveIn.BufferMilliseconds = 50;
_waveIn.DeviceNumber = comboBox1.SelectedIndex;
_waveIn.DataAvailable += _waveIn_DataAvailable;
_waveIn.WaveFormat = new WaveFormat(48000, 16, 1);
_playBuffer = new BufferedWaveProvider(new WaveFormat(48000, 16, 1));
_waveOut = new WaveOut(WaveCallbackInfo.FunctionCallback());
_waveOut.DeviceNumber = comboBox2.SelectedIndex;
_waveOut.Init(_playBuffer);
_waveOut.Play();
_waveIn.StartRecording();
if (_timer == null)
{
_timer = new Timer();
_timer.Interval = 1000;
_timer.Tick += _timer_Tick;
}
_timer.Start();
}
示例10: Form1_Load
private void Form1_Load(object sender, EventArgs e)
{
byte[] apk, ask, bpk, bsk;
NaClClient.CreateKeys(out apk, out ask);
NaClClient.CreateKeys(out bpk, out bsk);
var hasher = System.Security.Cryptography.SHA256.Create();
_clientA = NaClClient.Create(apk, ask, bpk);
_clientB = NaClClient.Create(bpk, bsk, apk);
_sw = new Stopwatch();
_sw.Start();
_wave = new WaveIn(this.Handle);
_wave.WaveFormat = new WaveFormat(12000, 8, 1);
_wave.BufferMilliseconds = 100;
_wave.DataAvailable += _wave_DataAvailable;
_wave.StartRecording();
_playback = new BufferedWaveProvider(_wave.WaveFormat);
_waveOut = new WaveOut();
_waveOut.DesiredLatency = 100;
_waveOut.Init(_playback);
_waveOut.Play();
}
示例11: CreateWaveInDevice
private IWaveIn CreateWaveInDevice()
{
IWaveIn newWaveIn;
if (radioButtonWaveIn.Checked)
{
newWaveIn = new WaveIn();
newWaveIn.WaveFormat = new WaveFormat(8000, 1);
}
else if (radioButtonWaveInEvent.Checked)
{
newWaveIn = new WaveInEvent();
newWaveIn.WaveFormat = new WaveFormat(8000, 1);
}
else if (radioButtonWasapi.Checked)
{
// can't set WaveFormat as WASAPI doesn't support SRC
var device = (MMDevice) comboWasapiDevices.SelectedItem;
newWaveIn = new WasapiCapture(device);
}
else
{
// can't set WaveFormat as WASAPI doesn't support SRC
newWaveIn = new WasapiLoopbackCapture();
}
newWaveIn.DataAvailable += OnDataAvailable;
newWaveIn.RecordingStopped += OnRecordingStopped;
return newWaveIn;
}
示例12: InitializeStream
public void InitializeStream()
{
sourceStream = new WaveIn();
sourceStream.BufferMilliseconds = 50;
sourceStream.DeviceNumber = 0;
sourceStream.WaveFormat = new WaveFormat(44100, 16, WaveIn.GetCapabilities(0).Channels);
}
示例13: Initialise
//TODO wrap WaveIn to allow DI
public void Initialise(WaveFormat format, WaveIn driver)
{
if (driver == null)
{
throw new ArgumentNullException("driver", "Must specify a WaveIn device instance");
}
if (format == null)
{
throw new ArgumentNullException("format", "Must specify an audio format");
}
this.driver = driver;
driver.DataAvailable += device_DataAvailable;
var caps = WaveIn.GetCapabilities(driver.DeviceNumber);
driver.WaveFormat = format;
device = new WaveInDeviceData
{
Driver = driver,
Name = caps.ProductName,
Channels = caps.Channels,
Buffers = new float[caps.Channels][]
};
Format = WaveFormat.CreateIeeeFloatWaveFormat(format.SampleRate, device.Channels);
formatPerLine = WaveFormat.CreateIeeeFloatWaveFormat(format.SampleRate, 1);
mapInputs(device.Channels);
}
示例14: AudioRecorder
public AudioRecorder(int microphone)
{
waveIn = new WaveIn();
waveIn.DeviceNumber = microphone;
waveIn.WaveFormat = new WaveFormat(44100, 1);
bufferedWaveProvider = new BufferedWaveProvider(waveIn.WaveFormat);
writer = new WaveFileWriter(Settings.Default.tempSoundLocation, waveIn.WaveFormat);
}
示例15: PrepareMic
public static void PrepareMic()
{
WaveIn waveIn = new WaveIn();
waveIn.WaveFormat = new WaveFormat(SAMPLE_FREQ, 16, 1);
waveIn.StartRecording();
waveIn.StopRecording();
waveIn.Dispose();
}