本文整理汇总了C#中IAudioSource.Start方法的典型用法代码示例。如果您正苦于以下问题:C# IAudioSource.Start方法的具体用法?C# IAudioSource.Start怎么用?C# IAudioSource.Start使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类IAudioSource
的用法示例。
在下文中一共展示了IAudioSource.Start方法的10个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: btnStart_Click
/// <summary>
/// Starts capturing from the chosen audio input interface
/// </summary>
///
void btnStart_Click(object sender, EventArgs e)
{
// Get the device currently selected in the combobox
AudioDeviceInfo info = comboBox1.SelectedItem as AudioDeviceInfo;
if (info == null)
{
MessageBox.Show("No audio devices available.");
return;
}
// Create a new audio capture device
source = new AudioCaptureDevice(info)
{
// Capture at 22050 Hz
DesiredFrameSize = 2048,
SampleRate = 22050
};
// Wire up some notification events
source.NewFrame += source_NewFrame;
source.AudioSourceError += source_AudioSourceError;
// Start it!
source.Start();
}
示例2: Button1Click
void Button1Click(object sender, EventArgs e)
{
AudioDeviceInfo info = comboBox1.SelectedItem as AudioDeviceInfo;
if (info == null)
{
MessageBox.Show("No audio devices available.");
return;
}
source = new AudioCaptureDevice(info.Guid);
initializing = true;
lbStatus.Text = "Waiting for soundcard...";
source = new AudioCaptureDevice(info.Guid);
source.SampleRate = 44100;
source.DesiredFrameSize = 5000;
source.NewFrame += source_NewFrame;
detector = new EnergyBeatDetector(43);
detector.Beat += new EventHandler(detector_Beat);
sample = new List<ComplexSignal>();
source.Start();
}
示例3: Button1Click
void Button1Click(object sender, EventArgs e)
{
AudioDeviceInfo info = comboBox1.SelectedItem as AudioDeviceInfo;
if (info == null)
{
MessageBox.Show("No audio devices available.");
return;
}
source = new AudioCaptureDevice(info.Guid);
source.DesiredFrameSize = 2048;
source.SampleRate = 22050;
source.NewFrame += source_NewFrame;
source.AudioSourceError += source_AudioSourceError;
window = RaisedCosineWindow.Hamming(source.DesiredFrameSize);
source.Start();
}
示例4: Enable
//.........这里部分代码省略.........
}
if (AudioSource == null)
{
SetErrorState("Mic source offline");
AudioSourceErrorState = true;
_requestRefresh = true;
}
}
break;
case 5:
int imic;
if (Int32.TryParse(Micobject.settings.sourcename, out imic))
{
var vl = MainForm.InstanceReference.GetVolumeLevel(imic);
if (vl != null)
{
AudioSource = vl.AudioSource;
if (AudioSource?.RecordingFormat != null)
{
Micobject.settings.samples = AudioSource.RecordingFormat.SampleRate;
Micobject.settings.channels = AudioSource.RecordingFormat.Channels;
}
vl.AudioDeviceDisabled -= MicrophoneDisabled;
vl.AudioDeviceEnabled -= MicrophoneEnabled;
vl.AudioDeviceReConnected -= MicrophoneReconnected;
vl.AudioDeviceDisabled += MicrophoneDisabled;
vl.AudioDeviceEnabled += MicrophoneEnabled;
vl.AudioDeviceReConnected += MicrophoneReconnected;
}
}
if (AudioSource == null)
{
SetErrorState("Mic source offline");
AudioSourceErrorState = true;
_requestRefresh = true;
}
break;
case 6: //wav stream
AudioSource = new WavStream(Micobject.settings.sourcename)
{
RecordingFormat = new WaveFormat(Micobject.settings.samples, 16, Micobject.settings.channels),
};
break;
}
if (AudioSource != null)
{
WaveOut = !string.IsNullOrEmpty(Micobject.settings.deviceout)
? new DirectSoundOut(new Guid(Micobject.settings.deviceout), 100)
: new DirectSoundOut(100);
AudioSource.AudioFinished -= AudioDeviceAudioFinished;
AudioSource.DataAvailable -= AudioDeviceDataAvailable;
AudioSource.LevelChanged -= AudioDeviceLevelChanged;
AudioSource.AudioFinished += AudioDeviceAudioFinished;
AudioSource.DataAvailable += AudioDeviceDataAvailable;
AudioSource.LevelChanged += AudioDeviceLevelChanged;
var l = new float[Micobject.settings.channels];
for (int i = 0; i < l.Length; i++)
{
l[i] = 0.0f;
}
AudioDeviceLevelChanged(this, new LevelChangedEventArgs(l));
if (!AudioSource.IsRunning && !IsClone && !(AudioSource is IVideoSource))
{
lock (_lockobject)
{
AudioSource.Start();
}
}
}
SoundDetected = false;
_soundRecentlyDetected = false;
Alerted = false;
FlashCounter = DateTime.MinValue;
ReconnectCount = 0;
Listening = false;
LastSoundDetected = Helper.Now;
UpdateFloorplans(false);
Micobject.settings.active = true;
MainForm.NeedsSync = true;
_requestRefresh = true;
AudioDeviceEnabled?.Invoke(this, EventArgs.Empty);
}
catch (Exception ex)
{
ErrorHandler?.Invoke(ex.Message);
}
_enabling = false;
}
示例5: TalkTo
public void TalkTo(CameraWindow cw, bool talk)
{
if (string.IsNullOrEmpty(Conf.TalkMic))
return;
if (_talkSource != null)
{
_talkSource.Stop();
_talkSource = null;
}
if (_talkTarget != null)
{
_talkTarget.Stop();
_talkTarget = null;
}
if (!talk)
{
if (cw.VolumeControl != null)
{
cw.VolumeControl.Listening = false;
}
return;
}
Application.DoEvents();
TalkCamera = cw;
_talkSource = new TalkDeviceStream(Conf.TalkMic) {RecordingFormat = new WaveFormat(8000, 16, 1)};
_talkSource.AudioFinished += _talkSource_AudioFinished;
if (!_talkSource.IsRunning)
_talkSource.Start();
_talkTarget = TalkHelper.GetTalkTarget(cw.Camobject, _talkSource);
_talkTarget.TalkStopped += TalkTargetTalkStopped;
_talkTarget.Start();
//auto listen
if (cw.VolumeControl != null)
{
cw.VolumeControl.Listening = true;
}
}
示例6: btnRecord_Click
/// <summary>
/// Starts recording audio from the sound card
/// </summary>
///
private void btnRecord_Click(object sender, EventArgs e)
{
// Create capture device
source = new AudioCaptureDevice()
{
// Listen on 22050 Hz
DesiredFrameSize = 4096,
SampleRate = 22050,
// We will be reading 16-bit PCM
Format = SampleFormat.Format16Bit
};
// Wire up some events
source.NewFrame += source_NewFrame;
source.AudioSourceError += source_AudioSourceError;
// Create buffer for wavechart control
current = new float[source.DesiredFrameSize];
// Create stream to store file
stream = new MemoryStream();
encoder = new WaveEncoder(stream);
// Start
source.Start();
updateButtons();
}
示例7: TalkTo
public void TalkTo(CameraWindow cw, bool talk)
{
if (_talkSource != null)
{
_talkSource.Stop();
_talkSource = null;
}
if (_talkTarget != null)
{
_talkTarget.Stop();
_talkTarget = null;
}
if (!talk)
{
if (cw.VolumeControl != null)
{
cw.VolumeControl.Listening = false;
}
return;
}
Application.DoEvents();
TalkCamera = cw;
_talkSource = new TalkDeviceStream(Conf.TalkMic) { RecordingFormat = new WaveFormat(8000, 16, 1) };
_talkSource.AudioSourceError += _talkSource_AudioSourceError;
if (!_talkSource.IsRunning)
_talkSource.Start();
switch (cw.Camobject.settings.audiomodel)
{
default:
_talkTarget = new TalkFoscam(cw.Camobject.settings.audioip, cw.Camobject.settings.audioport, cw.Camobject.settings.audiousername, cw.Camobject.settings.audiopassword, _talkSource);
break;
case "iSpyServer":
_talkTarget = new TalkiSpyServer(cw.Camobject.settings.audioip, cw.Camobject.settings.audioport, _talkSource);
break;
case "NetworkKinect":
_talkTarget = new TalkNetworkKinect(cw.Camobject.settings.audioip, cw.Camobject.settings.audioport, _talkSource);
break;
case "Axis":
_talkTarget = new TalkAxis(cw.Camobject.settings.audioip, cw.Camobject.settings.audioport, cw.Camobject.settings.audiousername, cw.Camobject.settings.audiopassword, _talkSource);
break;
}
_talkTarget.TalkStopped += TalkTargetTalkStopped;
_talkTarget.Start();
//auto listen
if (cw.VolumeControl != null)
{
cw.VolumeControl.Listening = true;
}
}
示例8: StartRecording
/// <summary>
/// Starts recording. Only works if the player has
/// already been started and is grabbing frames.
/// </summary>
///
public void StartRecording()
{
if (IsRecording || !IsPlaying)
return;
Rectangle area = CaptureRegion;
string fileName = newFileName();
int height = area.Height;
int width = area.Width;
int framerate = 1000 / screenStream.FrameInterval;
int videoBitRate = 1200 * 1000;
int audioBitRate = 320 * 1000;
OutputPath = Path.Combine(main.CurrentDirectory, fileName);
RecordingStartTime = DateTime.MinValue;
videoWriter = new VideoFileWriter();
// Create audio devices which have been checked
var audioDevices = new List<AudioCaptureDevice>();
foreach (var audioViewModel in AudioCaptureDevices)
{
if (!audioViewModel.Checked)
continue;
var device = new AudioCaptureDevice(audioViewModel.DeviceInfo);
device.AudioSourceError += device_AudioSourceError;
device.Format = SampleFormat.Format16Bit;
device.SampleRate = Settings.Default.SampleRate;
device.DesiredFrameSize = 2 * 4098;
device.Start();
audioDevices.Add(device);
}
if (audioDevices.Count > 0) // Check if we need to record audio
{
audioDevice = new AudioSourceMixer(audioDevices);
audioDevice.AudioSourceError += device_AudioSourceError;
audioDevice.NewFrame += audioDevice_NewFrame;
audioDevice.Start();
videoWriter.Open(OutputPath, width, height, framerate, VideoCodec.H264, videoBitRate,
AudioCodec.MP3, audioBitRate, audioDevice.SampleRate, audioDevice.Channels);
}
else
{
videoWriter.Open(OutputPath, width, height, framerate, VideoCodec.H264, videoBitRate);
}
HasRecorded = false;
IsRecording = true;
}
示例9: ListenForCommands
public void ListenForCommands(object sender, EventArgs e, bool forCalibration)
{
ListOfPowerSpectrum = new List<double[]>();
soundsDetectedIndexes = new List<int>();
var audioDevices = new AudioDeviceCollection(AudioDeviceCategory.Capture).First();
source = new AudioCaptureDevice(audioDevices)
{
// Capture at 22050 Hz
DesiredFrameSize = 2048,
SampleRate = 22050
};
source.NewFrame += source_NewFrame;
source.AudioSourceError += source_AudioSourceError;
// Start it!
Console.WriteLine("Start");
source.Start();
if (forCalibration)
{
Thread.Sleep(TimeSpan.FromSeconds(10));
Console.WriteLine("Stop");
source.Stop();
}
}
示例10: btnRecord_Click
private void btnRecord_Click(object sender, EventArgs e)
{
// Create capture device
source = new AudioCaptureDevice();
source.DesiredFrameSize = 4096;
source.SampleRate = 22050;
source.NewFrame += source_NewFrame;
source.AudioSourceError += source_AudioSourceError;
// Create buffer for wavechart control
current = new float[source.DesiredFrameSize];
// Create stream to store file
stream = new MemoryStream();
encoder = new WaveEncoder(stream);
// Start
source.Start();
updateButtons();
}