本文整理汇总了C#中NAudio.Wave.WaveIn.StartRecording方法的典型用法代码示例。如果您正苦于以下问题:C# WaveIn.StartRecording方法的具体用法?C# WaveIn.StartRecording怎么用?C# WaveIn.StartRecording使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类NAudio.Wave.WaveIn
的用法示例。
在下文中一共展示了WaveIn.StartRecording方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: ilPanel1_Load
// this gets called when the panel is loaded into the form
private void ilPanel1_Load(object sender, EventArgs e) {
m_shutdown = false;
// setup the scene
ilPanel1.Scene.Add(new ILPlotCube(twoDMode: false) {
Children = {
// create two line plots: the first is used to display the data itself ...
new ILLinePlot(0, DefaultLinePlotTag, Color.Magenta, lineWidth: 1),
// .. the second is used for marking magnitude peaks, it gets the line hidden
new ILLinePlot(0, DefaultMarkerPlotTag, markerStyle:MarkerStyle.Square) { Line = { Visible = false }}
},
// we want both axes in logarithmic scale
ScaleModes = { XAxisScale = AxisScale.Logarithmic, YAxisScale = AxisScale.Logarithmic },
// configure axis labels
Axes = { XAxis = { Label = { Text = "Frequency [1/\\omega]" }, LabelPosition = new Vector3(1, 1, 0) },
YAxis = { Label = { Text = "Magnitude [dB]" }, LabelPosition = new Vector3(1, 1, 0), LabelAnchor = new PointF(1,0) }
}
});
// setup audio stream (this is not related to ILNumerics but to the NAudio helper lib)
m_waveInStream = new WaveIn();
m_waveInStream.WaveFormat = new WaveFormat(m_sampFreq, m_bitRate, 1); // 1: mono
m_waveInStream.DeviceNumber = 0;
m_waveInStream.BufferMilliseconds = (int)(m_fftlen / (float)m_sampFreq * 1010); // roughly one buffersize
m_waveInStream.DataAvailable += new EventHandler<WaveInEventArgs>(waveInStream_DataAvailable);
try {
m_waveInStream.StartRecording();
} catch (NAudio.MmException exc) {
// when no device exists or no microphone is plugged in, an exception will be thrown here
MessageBox.Show("Error initializing audio device. Make sure that a default recording device is available!" + Environment.NewLine + "Error details:" + exc.Message);
}
}
示例2: SequenceRecorder
public SequenceRecorder(Conductor cond, KeyboardConfiguration key)
{
conductor = cond;
keyboard = key;
InitializeComponent();
int waveInDevices = NAudio.Wave.WaveIn.DeviceCount;
for (int waveInDevice = 0; waveInDevice < waveInDevices; waveInDevice++)
{
NAudio.Wave.WaveInCapabilities deviceInfo = NAudio.Wave.WaveIn.GetCapabilities(waveInDevice);
microphoneList.Items.Add(waveInDevice+": "+deviceInfo.ProductName);
}
microphoneList.SelectedIndex = 0;
sensitivity.SelectedIndex = 0;
sourceStream = new NAudio.Wave.WaveIn();
sourceStream.DeviceNumber = 0;
sourceStream.WaveFormat = conductor.GetWaveFormat();// new NAudio.Wave.WaveFormat(44100, NAudio.Wave.WaveIn.GetCapabilities(0).Channels);
bytesPerChannel = (sourceStream.WaveFormat.BitsPerSample / 8);
bytesPerSample = bytesPerChannel * sourceStream.WaveFormat.Channels;
sourceStream.DataAvailable += new EventHandler<NAudio.Wave.WaveInEventArgs>(sourceStream_DataAvailable);
sourceStream.StartRecording();
}
示例3: Form1_Load
private void Form1_Load(object sender, EventArgs e)
{
byte[] apk, ask, bpk, bsk;
NaClClient.CreateKeys(out apk, out ask);
NaClClient.CreateKeys(out bpk, out bsk);
var hasher = System.Security.Cryptography.SHA256.Create();
_clientA = NaClClient.Create(apk, ask, bpk);
_clientB = NaClClient.Create(bpk, bsk, apk);
_sw = new Stopwatch();
_sw.Start();
_wave = new WaveIn(this.Handle);
_wave.WaveFormat = new WaveFormat(12000, 8, 1);
_wave.BufferMilliseconds = 100;
_wave.DataAvailable += _wave_DataAvailable;
_wave.StartRecording();
_playback = new BufferedWaveProvider(_wave.WaveFormat);
_waveOut = new WaveOut();
_waveOut.DesiredLatency = 100;
_waveOut.Init(_playback);
_waveOut.Play();
}
示例4: Connect
private void Connect(IPEndPoint endPoint, int inputDeviceNumber, INetworkChatCodec codec)
{
waveIn = new WaveIn();
waveIn.BufferMilliseconds = 50;
waveIn.DeviceNumber = inputDeviceNumber;
waveIn.WaveFormat = codec.RecordFormat;
waveIn.DataAvailable += waveIn_DataAvailable;
waveIn.StartRecording();
udpSender = new UdpClient();
udpListener = new UdpClient();
// To allow us to talk to ourselves for test purposes:
// http://stackoverflow.com/questions/687868/sending-and-receiving-udp-packets-between-two-programs-on-the-same-computer
udpListener.Client.SetSocketOption(SocketOptionLevel.Socket, SocketOptionName.ReuseAddress, true);
udpListener.Client.Bind(endPoint);
udpSender.Connect(endPoint);
waveOut = new WaveOut();
waveProvider = new BufferedWaveProvider(codec.RecordFormat);
waveOut.Init(waveProvider);
waveOut.Play();
connected = true;
var state = new ListenerThreadState { Codec = codec, EndPoint = endPoint };
ThreadPool.QueueUserWorkItem(ListenerThread, state);
}
示例5: RecordSound
public static void RecordSound(string name)
{
int waveDeviceCount = WaveIn.DeviceCount;
//detect presence of recording hardware
if (waveDeviceCount > 0)
{
inputDevice = 0;
}
else
{
MessageBox.Show("No recording hardware detected", "iMasomoAdmin", MessageBoxButton.OK, MessageBoxImage.Error);
return;
}
wordName = name;
try
{
waveIn = new WaveIn();
waveIn.DeviceNumber = inputDevice;
waveIn.WaveFormat = new NAudio.Wave.WaveFormat(44100, WaveIn.GetCapabilities(inputDevice).Channels);
//in the presence of incoming data, write the data to a buffer
waveIn.DataAvailable += waveIn_DataAvailable;
waveWriter = new WaveFileWriter(Environment.CurrentDirectory + @"\Media\" + wordName + ".wav", waveIn.WaveFormat);
waveIn.StartRecording();
}
catch(Exception ex)
{
MessageBox.Show(ex.Message);
}
}
示例6: StartEncoding
void StartEncoding()
{
_startTime = DateTime.Now;
_bytesSent = 0;
_segmentFrames = 960;
_encoder = new OpusEncoder(48000, 1, OpusNet.OpusApplication.Voip);
_encoder.Bitrate = 8192;
_decoder = new OpusDecoder(48000, 1);
_bytesPerSegment = _encoder.FrameByteCount(_segmentFrames);
_waveIn = new WaveIn(WaveCallbackInfo.FunctionCallback());
_waveIn.BufferMilliseconds = 50;
_waveIn.DeviceNumber = comboBox1.SelectedIndex;
_waveIn.DataAvailable += _waveIn_DataAvailable;
_waveIn.WaveFormat = new WaveFormat(48000, 16, 1);
_playBuffer = new BufferedWaveProvider(new WaveFormat(48000, 16, 1));
_waveOut = new WaveOut(WaveCallbackInfo.FunctionCallback());
_waveOut.DeviceNumber = comboBox2.SelectedIndex;
_waveOut.Init(_playBuffer);
_waveOut.Play();
_waveIn.StartRecording();
if (_timer == null)
{
_timer = new Timer();
_timer.Interval = 1000;
_timer.Tick += _timer_Tick;
}
_timer.Start();
}
示例7: PrepareMic
public static void PrepareMic()
{
WaveIn waveIn = new WaveIn();
waveIn.WaveFormat = new WaveFormat(SAMPLE_FREQ, 16, 1);
waveIn.StartRecording();
waveIn.StopRecording();
waveIn.Dispose();
}
示例8: StartListener
public void StartListener()
{
waveListener = new WaveIn();
waveListener.DeviceNumber = 0;
waveListener.DataAvailable += ListenerDataAvailable;
waveListener.WaveFormat = new WaveFormat(32000, 1);
waveListener.StartRecording();
}
示例9: MainWindow
public MainWindow()
{
InitializeComponent();
this.WindowStartupLocation = System.Windows.WindowStartupLocation.CenterScreen;
int waveInDevices = WaveIn.DeviceCount;
for (int waveInDevice = 0; waveInDevice < waveInDevices; waveInDevice++)
{
WaveInCapabilities deviceInfo = WaveIn.GetCapabilities(waveInDevice);
Console.WriteLine("Device {0}: {1}, {2} channels",
waveInDevice, deviceInfo.ProductName, deviceInfo.Channels);
}
waveIn = new WaveIn();
waveIn.BufferMilliseconds = 47*buffersize/2048;
waveIn.DeviceNumber = 0;
waveIn.WaveFormat = new WaveFormat(44100, 32, 1);
waveIn.DataAvailable += waveIn_DataAvailable;
try
{
waveIn.StartRecording();
}
catch(NAudio.MmException e)
{
Console.WriteLine(e.ToString() + "\nPlug in a microphone!");
}
bin = new int[buffersize * 2];
sampledata = new float[buffersize * 2];
priori = new double[buffersize * 2];
channelLabel = new int[1];
channelLabel[0] = 1;
velocity = new int[1];
velocity[0] = 0;
for (int i = 0; i < buffersize * 2; i++)
{
bin[i] = i;
sampledata[i] = 0;
priori[i] = 0;
}
chart1.Viewport.Visible = new DataRect(0, -1.0, buffersize * 2, 2.0);
chart2.Viewport.Visible = new DataRect(1620, 0, 280, 110);
bins = new EnumerableDataSource<int>(bin);
bins.SetXMapping(x => x);
rawIn = new EnumerableDataSource<float>(sampledata);
rawIn.SetYMapping(y => y);
CompositeDataSource comp1 = new CompositeDataSource(bins, rawIn);
chart1.AddLineGraph(comp1);
CompositeDataSource comp2 = new CompositeDataSource(bins, rawIn);
chart2.AddLineGraph(comp2);
}
示例10: StartAudioIn
public void StartAudioIn()
{
m_WaveIn = new WaveIn();
m_WaveIn.DataAvailable += new EventHandler<WaveInEventArgs>(WaveIn_DataAvailable);
//m_WaveIn.WaveFormat = new WaveFormat(44100, 32, 2);
m_WaveIn.WaveFormat = new WaveFormat(44100, 16, 1);
m_WaveIn.StartRecording();
}
示例11: Record
public void Record(string fileName, int volume = 100)
{
_waveIn = new WaveIn { WaveFormat = new WaveFormat() };
_writer = new WaveFileWriter(fileName, _waveIn.WaveFormat);
TrySetVolumeControl(_waveIn.GetMixerLine(), volume);
_waveIn.DataAvailable += new_dataAvailable;
_waveIn.StartRecording();
}
示例12: StartListening
public void StartListening()
{
if (!IsActive)
{
_waveIn = new WaveIn {DeviceNumber = Options.DeviceNumber};
_waveIn.DataAvailable += WaveIn_DataAvailable;
_waveIn.WaveFormat = new WaveFormat(Options.MicrophoneBitRate, 16, 1);
_waveIn.StartRecording();
OnAfterStartListening();
}
}
示例13: InitializeNAudio
public void InitializeNAudio(int device)
{
waveIn = new WaveIn();
waveIn.DeviceNumber = device;
waveIn.DataAvailable += waveIn_DataAvailable;
waveIn.RecordingStopped += new EventHandler<StoppedEventArgs>(waveIn_RecordingStopped);
int sampleRate = 8000; // 8 kHz
int channels = 1; // mono
waveIn.WaveFormat = new WaveFormat(sampleRate, channels);
waveIn.StartRecording();
}
示例14: StartRecording
public void StartRecording()
{
waveInStream = new WaveIn();
waveInStream.WaveFormat = new WaveFormat(44100, 16, 1);
writer = new WaveFileWriter(FileName, waveInStream.WaveFormat);
waveInStream.DataAvailable += WaveInStream_DataAvailable;
waveInStream.StartRecording();
}
示例15: RestartRecording
public void RestartRecording()
{
if (WaveInEvent.DeviceCount > 0)
{
waveIn = new WaveIn();
int inputDeviceNumber = WaveInEvent.DeviceCount - 1;
waveIn.DeviceNumber = inputDeviceNumber;
waveIn.BufferMilliseconds = 10;
waveIn.DataAvailable += new EventHandler<WaveInEventArgs>(waveIn_DataAvailable);
waveIn.StartRecording();
}
}