本文整理汇总了C#中NAudio.Wave.WaveFileReader.Close方法的典型用法代码示例。如果您正苦于以下问题:C# WaveFileReader.Close方法的具体用法?C# WaveFileReader.Close怎么用?C# WaveFileReader.Close使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类NAudio.Wave.WaveFileReader
的用法示例。
在下文中一共展示了WaveFileReader.Close方法的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: ConvertWavStreamToMp3File
public static void ConvertWavStreamToMp3File(Configuration cfg, ref MemoryStream ms, string savetofilename)
{
//rewind to beginning of stream
ms.Seek(0, SeekOrigin.Begin);
using (var retMs = new MemoryStream())
{
using (var rdr = new WaveFileReader(ms))
{
using (var wtr = new LameMP3FileWriter(savetofilename, rdr.WaveFormat, cfg.MP3BitRate))
{
rdr.CopyTo(wtr);
wtr.Close();
}
rdr.Close();
Console.WriteLine(savetofilename + " ok.");
}
}
}
示例2: ProcessAudioWave
public static void ProcessAudioWave(string fileIn, string fileOut, float newTempo = 1, float newPitch = 1, float newRate = 1)
{
WaveFileReader reader = new WaveFileReader(fileIn);
int numChannels = reader.WaveFormat.Channels;
if (numChannels > 2)
throw new Exception("SoundTouch supports only mono or stereo.");
int sampleRate = reader.WaveFormat.SampleRate;
int bitPerSample = reader.WaveFormat.BitsPerSample;
const int BUFFER_SIZE = 1024 * 16;
SoundStretcher stretcher = new SoundStretcher(sampleRate, numChannels);
WaveFileWriter writer = new WaveFileWriter(fileOut, new WaveFormat(sampleRate, 16, numChannels));
stretcher.Tempo = newTempo;
stretcher.Pitch = newPitch;
stretcher.Rate = newRate;
byte[] buffer = new byte[BUFFER_SIZE];
short[] buffer2 = null;
if (bitPerSample != 16 && bitPerSample != 8)
{
throw new Exception("Not implemented yet.");
}
if (bitPerSample == 8)
{
buffer2 = new short[BUFFER_SIZE];
}
bool finished = false;
while (true)
{
int bytesRead = 0;
if (!finished)
{
bytesRead = reader.Read(buffer, 0, BUFFER_SIZE);
if (bytesRead == 0)
{
finished = true;
stretcher.Flush();
}
else
{
if (bitPerSample == 16)
{
stretcher.PutSamplesFromBuffer(buffer, 0, bytesRead);
}
else if (bitPerSample == 8)
{
for (int i = 0; i < BUFFER_SIZE; i++)
buffer2[i] = (short)((buffer[i] - 128) * 256);
stretcher.PutSamples(buffer2);
}
}
}
bytesRead = stretcher.ReceiveSamplesToBuffer(buffer, 0, BUFFER_SIZE);
writer.Write(buffer, 0, bytesRead);
if (finished && bytesRead == 0)
break;
}
reader.Close();
writer.Close();
}
示例3: GetAudioBufferToAppendTo
/// ------------------------------------------------------------------------------------
protected virtual byte[] GetAudioBufferToAppendTo(string waveFileName)
{
using (var stream = new WaveFileReader(waveFileName))
{
var buffer = new byte[stream.Length];
var count = (int)Math.Min(stream.Length, int.MaxValue);
int offset = 0;
while (stream.Read(buffer, offset, count) > 0)
offset += count;
stream.Close();
return buffer;
}
}
示例4: analyzeFileBtn_Click
private void analyzeFileBtn_Click(object sender, EventArgs e) {
// Only allow either real time or file analysis at a time
if (openFileDialog.ShowDialog() == DialogResult.OK) {
// Make an array of readers from all files selected
List<WaveFileReader> readers = new List<WaveFileReader>(openFileDialog.FileNames.Length);
foreach (string fileName in openFileDialog.FileNames) {
WaveFileReader fileReader = new WaveFileReader(fileName);
if (fileReader.CanRead && fileReader.WaveFormat.SampleRate == 44100 &&
fileReader.WaveFormat.BitsPerSample == 16 && fileReader.WaveFormat.Encoding == WaveFormatEncoding.Pcm) {
readers.Add(fileReader);
} else {
MessageBox.Show("Could not open the file " + openFileDialog.FileName + " as WAV file. Use only 44.1khz 16-bit PCM WAV.");
fileReader.Close();
}
}
if (readers.Count > 0) {
analyzeFileBtn.Enabled = false;
// Start a thread to read the flie piece by piece and shuffle it to be analyzed
// without blocking the ui in the mean time
Thread readingThread = new Thread(() => WavReadingThreadStart(readers.ToArray()));
readingThread.Start();
isAnalyzingFile = true;
updateUI();
}
}
}
示例5: PlayAudioFilePaused
public virtual void PlayAudioFilePaused()
{
string currentDirectory = AppDomain.CurrentDomain.BaseDirectory;
string wavFile = Path.Combine (currentDirectory, "example.wav");
WaveFileReader reader = new WaveFileReader (wavFile);
Wave16ToFloatProvider converter = new Wave16ToFloatProvider (reader);
_jackOut.Init (converter);
_jackOut.Play ();
_jackOut.Pause ();
Thread.Sleep (100);
_jackOut.Stop ();
Assert.AreEqual (0, reader.Position);
reader.Close ();
}
示例6: PlayAudioFile
public virtual void PlayAudioFile()
{
string currentDirectory = AppDomain.CurrentDomain.BaseDirectory;
string wavFile = Path.Combine (currentDirectory, "example.wav");
WaveFileReader reader = new WaveFileReader (wavFile);
Wave16ToFloatProvider converter = new Wave16ToFloatProvider (reader);
Analyser analyser = new Analyser ();
_client.ProcessFunc += analyser.AnalyseOutAction;
_jackOut.Init (converter);
_jackOut.Play ();
Thread.Sleep (100);
_jackOut.Stop ();
reader.Close ();
Assert.AreNotEqual (0, analyser.NotEmptySamples);
}
示例7: SlowAudio
private static byte[] SlowAudio(byte[] mp3, float newTempo = 0.95f, float newPitch = 1.2f, float newRate = .8f)
{
const int BUFFER_SIZE = 1024 * 16;
byte[] buffer = new byte[BUFFER_SIZE];
var format = new WaveFormat();
try
{
using (var output = new MemoryStream())
{
File.Delete("temp.wav");
File.Delete("temp2.wav");
using (var mp3Stream = new MemoryStream(mp3))
using (Mp3FileReader reader = new Mp3FileReader(mp3Stream))
{
WaveFileWriter.CreateWaveFile("temp.wav", reader);
}
using (WaveFileReader reader = new WaveFileReader("temp.wav"))
{
int numChannels = reader.WaveFormat.Channels;
if (numChannels > 2)
throw new Exception("SoundTouch supports only mono or stereo.");
int sampleRate = reader.WaveFormat.SampleRate;
int bitPerSample = reader.WaveFormat.BitsPerSample;
SoundStretcher stretcher = new SoundStretcher(sampleRate, numChannels);
var writer = new NAudio.Lame.LameMP3FileWriter(output, new WaveFormat(sampleRate, 16, numChannels), 128);
stretcher.Tempo = newTempo;
stretcher.Pitch = newPitch;
stretcher.Rate = newRate;
short[] buffer2 = null;
if (bitPerSample != 16 && bitPerSample != 8)
{
throw new Exception("Not implemented yet.");
}
if (bitPerSample == 8)
{
buffer2 = new short[BUFFER_SIZE];
}
bool finished = false;
while (true)
{
int bytesRead = 0;
if (!finished)
{
bytesRead = reader.Read(buffer, 0, BUFFER_SIZE);
if (bytesRead == 0)
{
finished = true;
stretcher.Flush();
}
else
{
if (bitPerSample == 16)
{
stretcher.PutSamplesFromBuffer(buffer, 0, bytesRead);
}
else if (bitPerSample == 8)
{
for (int i = 0; i < BUFFER_SIZE; i++)
buffer2[i] = (short)((buffer[i] - 128) * 256);
stretcher.PutSamples(buffer2);
}
}
}
bytesRead = stretcher.ReceiveSamplesToBuffer(buffer, 0, BUFFER_SIZE);
writer.Write(buffer, 0, bytesRead);
if (finished && bytesRead == 0)
break;
}
reader.Close();
writer.Close();
}
return output.ToArray();
}
}
finally
{
File.Delete("temp.wav");
File.Delete("temp2.wav");
}
/////////////////////////////
}
示例8: TrimAudioLength
private static string TrimAudioLength(string File)
{
TimeSpan TargetDuration = new TimeSpan(0, 29, 0); // 29 minutes
WaveFileReader reader = new WaveFileReader(File);
//Calculate Tempo
float PercentChange = CalculateTempo(reader.TotalTime, TargetDuration);
string FileOut = File.Replace(Settings.Default.SpeechCleanedDirectory, Settings.Default.TrimmedDirectory);
processWave(File, FileOut, 1 + PercentChange * 0.01f, 1.0f, 1.0f);
//AddIntroOutro(FileOut);
reader.Close();
return FileOut;
}
示例9: waveIn_RecordingStopped
private void waveIn_RecordingStopped(object sender, EventArgs e)
{
if (writeToFile == 1)
{
waveIn.Dispose();
waveIn = null;
writer.Close();
writer = null;
int bytes_to_read = (rec_times + 1) * 6400;
byte[] wav_bytes = new byte[bytes_to_read];
WaveFileReader wfr = new WaveFileReader("file.wav");
if (wfr.Length < bytes_to_read)
{
wfr.Read(wav_bytes, 0, (int)wfr.Length);
}
else
{
wfr.Position = wfr.Length - 1 - bytes_to_read;
wfr.Read(wav_bytes, 0, bytes_to_read);
}
wfr.Dispose();
wfr.Close();
WaveIn second_waveIn = new WaveIn();
second_waveIn.DeviceNumber = 0;
second_waveIn.WaveFormat = new WaveFormat(16000, 2);
WaveFileWriter second_writer = new WaveFileWriter("cmd.wav", second_waveIn.WaveFormat);
second_waveIn.StartRecording();
second_writer.Write(wav_bytes, 0, bytes_to_read);
second_waveIn.StopRecording();
second_waveIn.Dispose();
second_waveIn = null;
second_writer.Close();
second_writer = null;
listBox1.Items.Add("CONVERTING");
listBox1.SelectedIndex = listBox1.Items.Count - 1;
Wav2Flac("cmd.wav", "file.flac");
result = GoogleSpeechRequest(16000);
string res = result;
int k = res.IndexOf("utterance\":\"") + "utterance\":\"".Length;
int k1 = res.IndexOf("\"", k + 1);
string cmd = res.Substring(k, k1 - k);
listBox1.Items.Add("RECOGNIZED");
richTextBox1.Text += cmd + "\n";
File.Delete("cmd.wav");
rec_times = 0;
writeToFile = 0;
}
else
if (writeToFile == 0)
{
waveIn.Dispose();
waveIn = null;
writer.Close();
writer = null;
}
}
示例10: ProcessMixingFinal
private void ProcessMixingFinal(RcvData data, int dataSize)
{
string processingFn = string.Format("e:\\{0}_{1}_{2}.wav", data.seqnum, data.extension, data.peernumber);
List<RecInfos> ls0 = lExtension0.FindAll(
delegate(RecInfos list)
{
return list.rcvData.Equals(data) && list.isExtension == 0;
});
List<RecInfos> ls1 = lExtension1.FindAll(
delegate(RecInfos list)
{
return list.rcvData.Equals(data) && list.isExtension == 1;
});
IsExtensionComparer isExtensionCompare = new IsExtensionComparer();
ls0.Sort(isExtensionCompare);
ls1.Sort(isExtensionCompare);
int count = 0;
int count0 = ls0.Count();
int count1 = ls1.Count();
if (count0 - count1 < 0)
count = count0;
else
count = count1;
for (int i = 0; i < count; i++)
{
if (ls0[i].seq == ls1[i].seq)
{
// 믹싱
byte[] wavSrc0 = new byte[160];
byte[] wavSrc1 = new byte[160];
Array.Copy(ls0[i].voice, 12, wavSrc0, 0, wavSrc0.Length);
Array.Copy(ls1[i].voice, 12, wavSrc1, 0, wavSrc1.Length);
WaveMixerStream32 mixer = new WaveMixerStream32();
//mixer.AutoStop = true;
WaveChannel32 channelStm = null;
for (int j = 0; j < 2; j++)
{
MemoryStream memStm = null;
BufferedStream bufStm = null;
RawSourceWaveStream rawSrcStm = null;
WaveFormatConversionStream conversionStm = null;
if (j == 0)
memStm = new MemoryStream(wavSrc0);
else
memStm = new MemoryStream(wavSrc1);
bufStm = new BufferedStream(memStm);
rawSrcStm = new RawSourceWaveStream(bufStm, mulawFormat);
conversionStm = new WaveFormatConversionStream(pcmFormat, rawSrcStm);
channelStm = new WaveChannel32(conversionStm);
mixer.AddInputStream(channelStm);
}
mixer.Position = 0;
if (File.Exists(processingFn))
{
var wavefilestream = new WaveFileReader(processingFn);
byte[] wavefilebyte = new byte[(int)wavefilestream.Length];
int chk0 = wavefilestream.Read(wavefilebyte, 0, wavefilebyte.Length);
Wave32To16Stream to16 = new Wave32To16Stream(mixer);
var conversionStm = new WaveFormatConversionStream(pcmFormat, to16);
byte[] tobyte = new byte[(int)conversionStm.Length];
int chk1 = conversionStm.Read(tobyte, 0, (int)conversionStm.Length);
byte[] desByte = new byte[wavefilebyte.Length + tobyte.Length];
conversionStm.Close();
wavefilestream.Close();
Buffer.BlockCopy(wavefilebyte, 0, desByte, 0, wavefilebyte.Length);
Buffer.BlockCopy(tobyte, 0, desByte, wavefilebyte.Length, tobyte.Length);
using (MemoryStream memStm = new MemoryStream(desByte))
using (BufferedStream buffStm = new BufferedStream(memStm))
using (RawSourceWaveStream rawSrcStm = new RawSourceWaveStream(buffStm, pcmFormat))
{
WaveFileWriter.CreateWaveFile(processingFn, rawSrcStm);
}
}
else
{
var mixedStm = new Wave32To16Stream(mixer);
var convStm = new WaveFormatConversionStream(pcmFormat, mixedStm);
WaveFileWriter.CreateWaveFile(processingFn, convStm);
convStm.Close();
mixedStm.Close();
//.........这里部分代码省略.........
示例11: WaveFileWriting
private void WaveFileWriting(byte[] buff, string fn)
{
if (buff.Length < 1) return;
if (File.Exists(fn))
{
var wavefilestream = new WaveFileReader(fn);
byte[] wavefilebyte = new byte[(int)wavefilestream.Length];
int chk = wavefilestream.Read(wavefilebyte, 0, wavefilebyte.Length);
wavefilestream.Close();
byte[] desByte = new byte[wavefilebyte.Length + buff.Length];
Buffer.BlockCopy(wavefilebyte, 0, desByte, 0, wavefilebyte.Length);
Buffer.BlockCopy(buff, 0, desByte, wavefilebyte.Length, buff.Length);
using (MemoryStream memStm = new MemoryStream(desByte))
using (BufferedStream bufStm = new BufferedStream(memStm, 2048))
using (RawSourceWaveStream rawSrcStm = new RawSourceWaveStream(bufStm, pcmFormat))
{
WaveFileWriter.CreateWaveFile(fn, rawSrcStm);
}
}
else
{
using (MemoryStream memStm = new MemoryStream(buff))
using (BufferedStream bufStm = new BufferedStream(memStm, 2048))
using (RawSourceWaveStream rawSrcStm = new RawSourceWaveStream(bufStm, pcmFormat))
{
WaveFileWriter.CreateWaveFile(fn, rawSrcStm);
}
}
}