本文整理汇总了C#中NAudio.Wave.WaveFormat类的典型用法代码示例。如果您正苦于以下问题:C# WaveFormat类的具体用法?C# WaveFormat怎么用?C# WaveFormat使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
WaveFormat类属于NAudio.Wave命名空间,在下文中一共展示了WaveFormat类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: Recorder
public Recorder()
{
int waveInDevices = WaveIn.DeviceCount;
//for (int waveInDevice = 0; waveInDevice < waveInDevices; waveInDevice++)
//{
// WaveInCapabilities deviceInfo = WaveIn.GetCapabilities(waveInDevice);
// comboBox1.Items.Add(string.Format("Device {0}: {1}, {2} channels", waveInDevice, deviceInfo.ProductName, deviceInfo.Channels));
//}
waveIn = new WaveIn();
waveIn.DeviceNumber = 0;
waveIn.DataAvailable += waveIn_DataAvailable;
waveIn.RecordingStopped += waveIn_RecordingStopped;
int sampleRate = 16000; // 16 kHz
int channels = 1; // mono
int bits = 16;
recordingFormat = new WaveFormat(sampleRate, bits, channels);
waveIn.WaveFormat = recordingFormat;
string path = "C:\\temp";
if( !Directory.Exists(path) )
{
Directory.CreateDirectory(path);
}
TempWavFileName = String.Format("{0}\\{1}.wav", path, Guid.NewGuid().ToString());
writer = new WaveFileWriter(TempWavFileName, recordingFormat);
}
示例2: CallbackWaveProvider16
public CallbackWaveProvider16(WaveFormat format, RenderAudioBufferDelegate renderCallback, object syncLock)
{
SyncLock = syncLock;
m_Format = format;
SilenceBuffer = new byte[m_Format.BitsPerSample / 8 * m_Format.Channels * 2];
RenderCallback = renderCallback;
}
示例3: RecordableMixerStream32
/// <summary>
/// Creates a new 32 bit WaveMixerStream
/// </summary>
public RecordableMixerStream32(int sampleRate, int channels)
{
this.autoStop = true;
this.waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(sampleRate, channels);
this.bytesPerSample = 4;
this.inputStreams = new List<WaveStream>();
}
示例4: ComplexFilter
public ComplexFilter(
WaveFormat format,
IWindowFunction windowFunction,
IFilterImplementation filterImplementation)
{
if (format == null)
{
throw new ArgumentNullException("format", "Format cannot be null");
}
if (windowFunction == null)
{
throw new ArgumentNullException("windowFunction", "Window function cannot be null");
}
if (filterImplementation == null)
{
throw new ArgumentNullException("filterImplementation", "Filter implementation cannot be null");
}
this.format = format;
this.filterOrder = 20;
this.windowFunction = windowFunction;
this.FilterImplementation = filterImplementation;
this.filters = new ObservableCollection<IDigitalFilter>();
this.filters.CollectionChanged += filters_CollectionChanged;
updateCoefficients();
}
示例5: StartCapture
private void StartCapture(WaveFormat captureFormat)
{
EnsureDeviceIsCreated();
captureDevice.WaveFormat = captureFormat;
captureDevice.StartRecording();
IsCapturing = true;
}
示例6: MediaFoundationTransform
/// <summary>
/// Constructs a new MediaFoundationTransform wrapper
/// Will read one second at a time
/// </summary>
/// <param name="sourceProvider">The source provider for input data to the transform</param>
/// <param name="outputFormat">The desired output format</param>
public MediaFoundationTransform(IWaveProvider sourceProvider, WaveFormat outputFormat)
{
this.outputWaveFormat = outputFormat;
this.sourceProvider = sourceProvider;
sourceBuffer = new byte[ComputeSourceBufferSize(sourceProvider)];
outputBuffer = new byte[ComputeOutputBufferSize(outputFormat)]; // we will grow this buffer if needed, but try to make something big enough
}
示例7: Initialise
public void Initialise(WaveFormat format, WaveOut driver)
{
if (driver == null)
{
throw new ArgumentNullException("driver", "Must specify a WaveIn device instance");
}
if (format == null)
{
throw new ArgumentNullException("format", "Must specify an audio format");
}
var caps = WaveOut.GetCapabilities(driver.DeviceNumber);
device = new WaveOutDeviceData
{
Driver = driver,
Name = caps.ProductName,
Channels = caps.Channels,
Buffers = new float[caps.Channels][]
};
Format = WaveFormat.CreateIeeeFloatWaveFormat(format.SampleRate, caps.Channels);
OutputBuffer = new BufferedWaveProvider(Format);
OutputBuffer.DiscardOnBufferOverflow = true;
driver.Init(OutputBuffer);
mapOutputs();
}
示例8: WaveFileReader
private WaveFileReader(Stream inputStream, bool ownInput)
{
this.waveStream = inputStream;
var chunkReader = new WaveFileChunkReader();
try
{
chunkReader.ReadWaveHeader(inputStream);
this.waveFormat = chunkReader.WaveFormat;
this.dataPosition = chunkReader.DataChunkPosition;
this.dataChunkLength = chunkReader.DataChunkLength;
this.chunks = chunkReader.RiffChunks;
}
catch
{
if (ownInput)
{
inputStream.Dispose();
}
throw;
}
Position = 0;
this.ownInput = ownInput;
}
示例9: AutoDisposeSampleProvider
public AutoDisposeSampleProvider(ISampleProvider provider,
IEnumerable<IDisposable> disposables)
{
this._provider = provider;
this._disposables = new CompositeDisposable(disposables);
this.WaveFormat = provider.WaveFormat;
}
示例10: MediaBankBase
protected MediaBankBase(WaveFormat targetFormat)
{
TargetWaveFormat = targetFormat;
Random = new Random();
LoadMedia();
}
示例11: AsioCard
public AsioCard(
WaveFormat format,
AsioOut driver,
AsioInputMapper inputMapper,
AsioOutputMapper outputMapper)
{
if (format == null)
{
throw new ArgumentNullException("format", "Must specify an audio format");
}
if (driver == null)
{
throw new ArgumentNullException("driver", "Asio driver cannot be null");
}
if (inputMapper == null)
{
throw new ArgumentNullException("inputMapper", "Asio input mapper cannot be null");
}
if (outputMapper == null)
{
throw new ArgumentNullException("outputMapper", "Asio output mapper cannot be null");
}
this.format = format;
this.driver = driver;
this.inputMapper = inputMapper;
this.outputMapper = outputMapper;
}
示例12: ResampleIfNeeded
public static ISampleProvider ResampleIfNeeded(this ISampleProvider node, WaveFormat format)
{
if (!node.WaveFormat.Equals(format))
{
ISampleProvider provider = node;
if (node.WaveFormat.Channels != format.Channels)
{
if (node.WaveFormat.Channels == 1 && format.Channels == 2)
{
provider = provider.ToStereo();
}
else if (node.WaveFormat.Channels == 2 && format.Channels == 1)
{
provider = provider.ToMono();
}
else
{
throw new ArgumentException("Cannot change channel count from " + node.WaveFormat.Channels + " to " + format.Channels);
}
}
return new WdlResamplingSampleProvider(provider, format.SampleRate);
}
else
{
return node;
}
}
示例13: WaveInStream
/// <summary>
/// Creates a new Wave input stream
/// </summary>
/// <param name="deviceNumber">The device to open - 0 is default</param>
/// <param name="desiredFormat">The PCM format to record in</param>
/// <param name="callbackWindow">If this parameter is non-null, the Wave In Messages
/// will be sent to the message loop of the supplied control. This is considered a
/// safer way to use the waveIn functionality</param>
public WaveInStream(int deviceNumber, WaveFormat desiredFormat, System.Windows.Forms.Control callbackWindow)
{
this.waveFormat = desiredFormat;
callback = new WaveInterop.WaveCallback(Callback);
if (callbackWindow == null)
{
MmException.Try(WaveInterop.waveInOpen(out waveInHandle, deviceNumber, desiredFormat, callback, 0, WaveInterop.CallbackFunction), "waveInOpen");
}
else
{
waveInWindow = new WaveWindowNative(callback);
MmException.Try(WaveInterop.waveInOpenWindow(out waveInHandle, deviceNumber, desiredFormat, callbackWindow.Handle, 0, WaveInterop.CallbackWindow), "waveInOpen");
waveInWindow.AssignHandle(callbackWindow.Handle);
}
// Default to three buffers of 100ms each
int bufferSize = desiredFormat.AverageBytesPerSecond / 10;
numBuffers = 3;
buffers = new WaveInBuffer[numBuffers];
for (int n = 0; n < numBuffers; n++)
{
buffers[n] = new WaveInBuffer(waveInHandle, bufferSize);
}
}
示例14: StructureSizeIsCorrect
public void StructureSizeIsCorrect()
{
WaveFormat waveFormat = new WaveFormat(8000, 16, 1);
Assert.AreEqual(18, Marshal.SizeOf(waveFormat), "WaveFormat Size");
AdpcmWaveFormat adpcmWaveFormat = new AdpcmWaveFormat(8000,1);
Assert.AreEqual(18 + 32, Marshal.SizeOf(adpcmWaveFormat), "WaveFormat Size");
}
示例15: AddInputStream
/// <summary>
/// Add a new input to the mixer
/// </summary>
/// <param name="waveStream">The wave input to add</param>
public void AddInputStream(WaveStream waveStream)
{
if (waveStream.WaveFormat.Encoding != WaveFormatEncoding.IeeeFloat)
throw new ArgumentException("Must be IEEE floating point", "waveStream");
if (waveStream.WaveFormat.BitsPerSample != 32)
throw new ArgumentException("Only 32 bit audio currently supported", "waveStream");
if (inputStreams.Count == 0)
{
// first one - set the format
int sampleRate = waveStream.WaveFormat.SampleRate;
int channels = waveStream.WaveFormat.Channels;
this.waveFormat = WaveFormat.CreateIeeeFloatWaveFormat(sampleRate, channels);
}
else
{
if (!waveStream.WaveFormat.Equals(waveFormat))
throw new ArgumentException("All incoming channels must have the same format", "waveStream");
}
lock (inputsLock)
{
this.inputStreams.Add(waveStream);
this.length = Math.Max(this.length, waveStream.Length);
// get to the right point in this input file
waveStream.Position = Position;
}
}