本文整理汇总了C#中NAudio.Wave.AudioFileReader.Read方法的典型用法代码示例。如果您正苦于以下问题:C# AudioFileReader.Read方法的具体用法?C# AudioFileReader.Read怎么用?C# AudioFileReader.Read使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类NAudio.Wave.AudioFileReader
的用法示例。
在下文中一共展示了AudioFileReader.Read方法的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: CachedSound
public CachedSound(string audioFileName)
{
using (var audioFileReader = new AudioFileReader(audioFileName))
{
WaveFormat = audioFileReader.WaveFormat;
if (WaveFormat.SampleRate != 44100 || WaveFormat.Channels != 2)
{
using (var resampled = new ResamplerDmoStream(audioFileReader, WaveFormat.CreateIeeeFloatWaveFormat(44100, 2)))
{
var resampledSampleProvider = resampled.ToSampleProvider();
WaveFormat = resampledSampleProvider.WaveFormat;
var wholeFile = new List<float>((int) (resampled.Length));
var readBuffer = new float[resampled.WaveFormat.SampleRate * resampled.WaveFormat.Channels];
int samplesRead;
while ((samplesRead = resampledSampleProvider.Read(readBuffer, 0, readBuffer.Length)) > 0)
{
wholeFile.AddRange(readBuffer.Take(samplesRead));
}
AudioData = wholeFile.ToArray();
}
}
else
{
var wholeFile = new List<float>((int) (audioFileReader.Length / 4));
var readBuffer = new float[audioFileReader.WaveFormat.SampleRate * audioFileReader.WaveFormat.Channels];
int samplesRead;
while ((samplesRead = audioFileReader.Read(readBuffer, 0, readBuffer.Length)) > 0)
{
wholeFile.AddRange(readBuffer.Take(samplesRead));
}
AudioData = wholeFile.ToArray();
}
}
}
示例2: Main
public static void Main(string[] argv)
{
AudioDevice device = new AudioDevice();
ISampleProvider reader = new AudioFileReader("samples/sample.wav");
float[] samples = new float[1024];
while(reader.Read(samples, 0, samples.Length) > 0)
{
device.WriteSamples(samples);
}
System.Threading.Thread.Sleep(10000);
device.Dispose();
}
示例3: CachedSound
public CachedSound(string audioFileName)
{
using (var audioFileReader = new AudioFileReader(audioFileName))
{
WaveFormat = audioFileReader.WaveFormat;
var wholeFile = new List<float>((int)(audioFileReader.Length / 4));
var readBuffer = new float[audioFileReader.WaveFormat.SampleRate * audioFileReader.WaveFormat.Channels];
int samplesRead;
while ((samplesRead = audioFileReader.Read(readBuffer, 0, readBuffer.Length)) > 0)
{
wholeFile.AddRange(readBuffer.Take(samplesRead));
}
AudioData = wholeFile.ToArray();
}
}
示例4: ReadInAllSamples
public static float[] ReadInAllSamples(string file)
{
ISampleProvider reader = new AudioFileReader(file);
List<float> allSamples = new List<float>();
float[] samples = new float[16384];
while(reader.Read(samples, 0, samples.Length) > 0)
{
for(int i = 0; i < samples.Length; i++)
allSamples.Add(samples[i]);
}
samples = new float[allSamples.Count];
for(int i = 0; i < samples.Length; i++)
samples[i] = allSamples[i];
return samples;
}
示例5: Main
public static void Main(string[] argv)
{
ISampleProvider reader = new AudioFileReader("samples/sample.wav");
List<float> allSamples = new List<float>();
float[] samples = new float[1024];
while(reader.Read(samples, 0, samples.Length) > 0)
{
for(int i = 0; i < samples.Length; i++)
allSamples.Add(samples[i]);
}
samples = new float[allSamples.Count];
for(int i = 0; i < samples.Length; i++)
samples[i] = allSamples[i];
Plot plot = new Plot("Wave Plot", 512, 512);
plot.plot(samples, 44100 / 1000, Color.Red);
Application.Run(plot);
}
示例6: CachedSound
/// <summary>
/// Load the AudioFile into the memory using the right reader.
/// </summary>
/// <param name="audioFileName"></param>
/// <exception cref="ArgumentException">Audio file doesn't exists</exception>
public CachedSound(string audioFileName)
{
if (!File.Exists(audioFileName))
{
throw new CachedSoundFileNotExistsException("The audio file doesn't exists");
}
FilePath = audioFileName;
using (var audioFileReader = new AudioFileReader(audioFileName))
{
// TODO: could add resampling in here if required
WaveFormat = audioFileReader.WaveFormat;
var wholeFile = new List<byte>((int) (audioFileReader.Length));
var readBuffer = new byte[audioFileReader.WaveFormat.SampleRate*audioFileReader.WaveFormat.Channels];
int samplesRead;
while ((samplesRead = audioFileReader.Read(readBuffer, 0, readBuffer.Length)) > 0)
{
wholeFile.AddRange(readBuffer.Take(samplesRead));
}
AudioData = wholeFile.ToArray();
}
}
示例7: waveformGenerateWorker_DoWork
private void waveformGenerateWorker_DoWork(object sender, DoWorkEventArgs e)
{
WaveformGenerationParams waveformParams = e.Argument as WaveformGenerationParams;
/*
float[] audio = CommonUtils.Audio.NAudio.AudioUtilsNAudio.ReadMonoFromFile(waveformParams.Path, 44100, 0, 0);
Dispatcher.CurrentDispatcher.Invoke(new Action(() =>
{
WaveformData = audio;
}));
return;
*/
ISampleProvider sampleProvider = new AudioFileReader(waveformParams.Path);
WaveStream fileWaveStream = (WaveStream) sampleProvider;
WaveChannel32 waveformInputStream = new WaveChannel32(fileWaveStream);
waveformInputStream.PadWithZeroes = false;
waveformInputStream.Sample += waveStream_Sample;
int frameLength = fftDataSize;
int frameCount = (int)((double)waveformInputStream.Length / (double)frameLength);
int waveformLength = frameCount * 2;
float[] samples = new float[frameLength];
List<float> floatList = new List<float>();
while(sampleProvider.Read(samples, 0, samples.Length) > 0)
{
if (waveformInputStream.WaveFormat.Channels == 1) {
floatList.AddRange(samples);
} else if (waveformInputStream.WaveFormat.Channels == 2) {
switch(stereoProcessing) {
case StereoProcessingType.CHANNEL_STEREO_LEFT:
for (int i = 0; i < samples.Length; i+=2) {
float left = samples[i];
float right = samples[i+1];
floatList.Add(left);
}
break;
case StereoProcessingType.CHANNEL_STEREO_RIGHT:
for (int i = 0; i < samples.Length; i+=2) {
float left = samples[i];
float right = samples[i+1];
floatList.Add(right);
}
break;
case StereoProcessingType.CHANNEL_MONOMIX:
default:
for (int i = 0; i < samples.Length; i+=2) {
float left = samples[i];
float right = samples[i+1];
// Make stored channel data stereo by averaging left and right values.
floatList.Add(( (left + right) / 2.0f));
}
break;
}
}
if (waveformGenerateWorker.CancellationPending)
{
e.Cancel = true;
break;
}
}
Dispatcher.CurrentDispatcher.Invoke(new Action(() =>
{
WaveformData = floatList.ToArray();
}));
waveformInputStream.Close();
waveformInputStream.Dispose();
waveformInputStream = null;
}
示例8: Main
public static void Main(string[] argv)
{
//ISampleProvider reader = new AudioFileReader(@"C:\Users\perivar.nerseth\Music\Sleep Away16.wav");
//ISampleProvider reader = new AudioFileReader("samples/sample.wav");
ISampleProvider reader = new AudioFileReader(@"C:\Users\perivar.nerseth\Music\Sleep Away32f.wav");
AudioDevice device = new AudioDevice();
float[] samples = new float[1024];
while(reader.Read(samples, 0, samples.Length) > 0)
{
device.WriteSamples(samples);
}
System.Threading.Thread.Sleep(10000);
device.Dispose();
/*
WaveFileReader wfr = new WaveFileReader("samples/sample.wav");
WaveOut audioOutput = new WaveOut();
WaveChannel32 wc = new WaveChannel32(wfr);
wc.PadWithZeroes = false;
audioOutput.Init(wc);
audioOutput.PlaybackStopped += new EventHandler<StoppedEventArgs>(_waveOutDevice_PlaybackStopped);
audioOutput.Play();
while (audioOutput.PlaybackState != PlaybackState.Stopped) {
System.Threading.Thread.Sleep(100);
}
*/
}
示例9: SplitStereoWaveFileToMono
/// <summary>
/// Split a Stereo Wave file into two mono float arrays
/// </summary>
/// <param name="filePath">file to use</param>
/// <param name="audioDataLeft">returned float array for the left channel</param>
/// <param name="audioDataRight">returned float array for the right channel</param>
public static void SplitStereoWaveFileToMono(string filePath, out float[] audioDataLeft, out float[] audioDataRight)
{
using (AudioFileReader pcm = new AudioFileReader(filePath))
{
int channels = pcm.WaveFormat.Channels;
int bytesPerSample = pcm.WaveFormat.BitsPerSample/8;
long samplesDesired = pcm.Length;
byte[] buffer = new byte[samplesDesired];
audioDataLeft = new float[samplesDesired/bytesPerSample/channels];
audioDataRight = new float[samplesDesired/bytesPerSample/channels];
int bytesRead = pcm.Read(buffer, 0, buffer.Length);
int index = 0;
for(int sample = 0; sample < bytesRead/bytesPerSample/channels; sample++)
{
if (bytesPerSample == 4) {
// 32 bit pcm data as float
audioDataLeft[sample] = BitConverter.ToSingle(buffer, index);
index += bytesPerSample;
audioDataRight[sample] = BitConverter.ToSingle(buffer, index);
index += bytesPerSample;
} else if (bytesPerSample == 2) {
// 16 bit pcm data
audioDataLeft[sample] = (float)BitConverter.ToInt16(buffer, index) / 32768f;
index += bytesPerSample;
audioDataRight[sample] = (float)BitConverter.ToInt16(buffer, index) / 32768f;
index += bytesPerSample;
}
}
}
}
示例10: DrawWaveform
private static Bitmap DrawWaveform(AudioFileReader reader, int width, int height, int style = 0)
{
// calculate number of samples
long nSamples = reader.Length / ((reader.WaveFormat.BitsPerSample * reader.WaveFormat.Channels) / 8);
if (nSamples < 2)
return null;
// drawing position/scaling factors
int yBase = height;
double yScale = -(height - 3);
if (style == 1)
{
yBase = height / 2;
yScale = -((double)height - 3) / 2;
}
double sampleWidth = width / (double)nSamples;
double currPosition = 0;
Bitmap res = new Bitmap(width, height);
using (Graphics g = Graphics.FromImage(res))
using (Pen linePen = new Pen(Color.Red))
using (Brush fillBrush = new SolidBrush(Color.Red))
{
//g.Clear(Color.Black);
// Data for current column
int currColumn = 0;
float minVal = float.PositiveInfinity, maxVal = float.NegativeInfinity;
// Data for previous column
int prevColumn = 0;
int prevMinY = 0, prevMaxY = 0;
// Buffer for reading samples
float[] buffer = new float[8192];
int readCount;
while ((readCount = reader.Read(buffer, 0, 8192)) > 0)
{
// Merge stereo samples to mono
if (reader.WaveFormat.Channels == 2)
{
for (int i = 0, o = 0; i < readCount; i += 2, o++)
buffer[o] = (buffer[i] + buffer[i + 1]) / 2;
readCount >>= 1;
}
// process samples
foreach (float sample in buffer.Take(readCount))
{
minVal = Math.Min(minVal, sample);
maxVal = Math.Max(maxVal, sample);
currPosition += sampleWidth;
// on column change, draw to bitmap
if ((int)currPosition > currColumn)
{
if (!float.IsInfinity(minVal) && !float.IsInfinity(maxVal))
{
// calculate Y coordinates for min & max
int minY = 0, maxY = 0;
if (style == 0)
{
minY = yBase;
maxY = (int)(yBase + yScale * Math.Max(Math.Abs(minVal), Math.Abs(maxVal)));
}
else if (style == 1)
{
minY = (int)(yBase + yScale * minVal);
maxY = (int)(yBase + yScale * maxVal);
}
if (sampleWidth > 1)
{
// more columns than samples, use polygon drawing to fill gapes
g.FillPolygon(fillBrush, new Point[] {
new Point(prevColumn, prevMinY), new Point(prevColumn, prevMaxY),
new Point(currColumn, maxY), new Point(currColumn, minY) });
}
else
{
// more samples than columns, draw lines only
g.DrawLine(linePen, currColumn, minY, currColumn, maxY);
}
// save current data to previous
prevColumn = currColumn;
prevMinY = minY;
prevMaxY = maxY;
}
// update column number and reset accumulators
currColumn = (int)currPosition;
minVal = float.PositiveInfinity;
maxVal = float.NegativeInfinity;
}
}
//.........这里部分代码省略.........
示例11: Main
public static void Main(string[] argv)
{
ISampleProvider decoder = new AudioFileReader(FILE);
FFT fft = new FFT(1024, 44100);
fft.Window(FFT.HAMMING);
float[] samples = new float[1024];
float[] spectrum = new float[1024 / 2 + 1];
float[] lastSpectrum = new float[1024 / 2 + 1];
List<float> spectralFlux = new List<float>();
while(decoder.Read(samples, 0, samples.Length) > 0)
{
fft.Forward(samples);
System.Array.Copy(spectrum, 0, lastSpectrum, 0, spectrum.Length);
System.Array.Copy(fft.GetSpectrum(), 0, spectrum, 0, spectrum.Length);
float flux = 0;
for(int i = 0; i < spectrum.Length; i++)
{
float @value = (spectrum[i] - lastSpectrum[i]);
flux += @value < 0 ? 0 : @value;
}
spectralFlux.Add(flux);
}
Plot plot = new Plot("Hamming Spectral Flux", 1024, 512);
plot.plot(spectralFlux, 1, Color.Red);
new PlaybackVisualizer(plot, 1024, FILE);
}