当前位置: 首页>>代码示例>>C#>>正文


C# WaveChannel32.Dispose方法代码示例

本文整理汇总了C#中NAudio.Wave.WaveChannel32.Dispose方法的典型用法代码示例。如果您正苦于以下问题:C# WaveChannel32.Dispose方法的具体用法?C# WaveChannel32.Dispose怎么用?C# WaveChannel32.Dispose使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在NAudio.Wave.WaveChannel32的用法示例。


在下文中一共展示了WaveChannel32.Dispose方法的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。

示例1: Play

        public void Play()
        {
            var file = Path.Combine(BasicTeraData.Instance.ResourceDirectory, "sound/", File);
            try
            {
                var outputStream = new MediaFoundationReader(file);
                var volumeStream = new WaveChannel32(outputStream);
                volumeStream.Volume = Volume;
                //Create WaveOutEvent since it works in Background and UI Threads
                var player = new DirectSoundOut();
                //Init Player with Configured Volume Stream
                player.Init(volumeStream);
                player.Play();

                var timer = new Timer((obj) =>
                {
                    player.Stop();
                    player.Dispose();
                    volumeStream.Dispose();
                    outputStream.Dispose();
                    outputStream = null;
                    player = null;
                    volumeStream = null;
                }, null, Duration, Timeout.Infinite);
            }
            catch (Exception e)
            {
                // Get stack trace for the exception with source file information
                var st = new StackTrace(e, true);
                // Get the top stack frame
                var frame = st.GetFrame(0);
                // Get the line number from the stack frame
                var line = frame.GetFileLineNumber();
                BasicTeraData.LogError("Sound ERROR test" + e.Message + Environment.NewLine + e.StackTrace + Environment.NewLine + e.InnerException + Environment.NewLine + e + Environment.NewLine + "filename:" + file + Environment.NewLine + "line:" + line, false, true);
            }
        }
开发者ID:neowutran,项目名称:ShinraMeter,代码行数:36,代码来源:Music.cs

示例2: waveformGenerateWorker_DoWork

        private void waveformGenerateWorker_DoWork(object sender, DoWorkEventArgs e)
        {
            WaveformGenerationParams waveformParams = e.Argument as WaveformGenerationParams;
            Mp3FileReader waveformMp3Stream = new Mp3FileReader(waveformParams.Path);
            WaveChannel32 waveformInputStream = new WaveChannel32(waveformMp3Stream);
            waveformInputStream.Sample += waveStream_Sample;

            int frameLength = fftDataSize;
            int frameCount = (int)((double)waveformInputStream.Length / (double)frameLength);
            int waveformLength = frameCount * 2;
            byte[] readBuffer = new byte[frameLength];
            waveformAggregator = new SampleAggregator(frameLength);

            float maxLeftPointLevel = float.MinValue;
            float maxRightPointLevel = float.MinValue;
            int currentPointIndex = 0;
            float[] waveformCompressedPoints = new float[waveformParams.Points];
            List<float> waveformData = new List<float>();
            List<int> waveMaxPointIndexes = new List<int>();

            for (int i = 1; i <= waveformParams.Points; i++)
            {
                waveMaxPointIndexes.Add((int)Math.Round(waveformLength * ((double)i / (double)waveformParams.Points), 0));
            }
            int readCount = 0;
            while (currentPointIndex * 2 < waveformParams.Points && waveformInputStream.Position < (waveformInputStream.Length - 1024))
            {
                waveformInputStream.Read(readBuffer, 0, readBuffer.Length);

                waveformData.Add(waveformAggregator.LeftMaxVolume);
                waveformData.Add(waveformAggregator.RightMaxVolume);

                if (waveformAggregator.LeftMaxVolume > maxLeftPointLevel)
                    maxLeftPointLevel = waveformAggregator.LeftMaxVolume;
                if (waveformAggregator.RightMaxVolume > maxRightPointLevel)
                    maxRightPointLevel = waveformAggregator.RightMaxVolume;

                if (readCount > waveMaxPointIndexes[currentPointIndex])
                {
                    waveformCompressedPoints[(currentPointIndex * 2)] = maxLeftPointLevel;
                    waveformCompressedPoints[(currentPointIndex * 2) + 1] = maxRightPointLevel;
                    maxLeftPointLevel = float.MinValue;
                    maxRightPointLevel = float.MinValue;
                    currentPointIndex++;
                }
                if (readCount % 3000 == 0)
                {
                    float[] clonedData = (float[])waveformCompressedPoints.Clone();
                    App.Current.Dispatcher.Invoke(new Action(() =>
                    {
                        WaveformData = clonedData;
                    }));
                }

                if (waveformGenerateWorker.CancellationPending)
                {
                    e.Cancel = true;
                    break;
                }
                readCount++;
            }

            float[] finalClonedData = (float[])waveformCompressedPoints.Clone();
            App.Current.Dispatcher.Invoke(new Action(() =>
            {
                fullLevelData = waveformData.ToArray();
                WaveformData = finalClonedData;
            }));
            waveformInputStream.Close();
            waveformInputStream.Dispose();
            waveformInputStream = null;
            waveformMp3Stream.Close();
            waveformMp3Stream.Dispose();
            waveformMp3Stream = null;
        }
开发者ID:moezRebai,项目名称:LightMusicPalyer,代码行数:75,代码来源:NAudioEngine.cs

示例3: waveformGenerateWorker_DoWork

        private void waveformGenerateWorker_DoWork(object sender, DoWorkEventArgs e)
        {
            WaveformGenerationParams waveformParams = e.Argument as WaveformGenerationParams;

            /*
            float[] audio = CommonUtils.Audio.NAudio.AudioUtilsNAudio.ReadMonoFromFile(waveformParams.Path, 44100, 0, 0);
            Dispatcher.CurrentDispatcher.Invoke(new Action(() =>
                                                           {
                                                           	WaveformData = audio;
                                                           }));
            return;
             */

            ISampleProvider sampleProvider = new AudioFileReader(waveformParams.Path);
            WaveStream fileWaveStream = (WaveStream) sampleProvider;
            WaveChannel32 waveformInputStream = new WaveChannel32(fileWaveStream);
            waveformInputStream.PadWithZeroes = false;
            waveformInputStream.Sample += waveStream_Sample;

            int frameLength = fftDataSize;
            int frameCount = (int)((double)waveformInputStream.Length / (double)frameLength);
            int waveformLength = frameCount * 2;
            float[] samples = new float[frameLength];
            List<float> floatList = new List<float>();
            while(sampleProvider.Read(samples, 0, samples.Length) > 0)
            {
                if (waveformInputStream.WaveFormat.Channels == 1) {
                    floatList.AddRange(samples);
                } else if (waveformInputStream.WaveFormat.Channels == 2) {
                    switch(stereoProcessing) {
                        case StereoProcessingType.CHANNEL_STEREO_LEFT:
                            for (int i = 0; i < samples.Length; i+=2) {
                                float left = samples[i];
                                float right = samples[i+1];
                                floatList.Add(left);
                            }
                            break;
                        case StereoProcessingType.CHANNEL_STEREO_RIGHT:
                            for (int i = 0; i < samples.Length; i+=2) {
                                float left = samples[i];
                                float right = samples[i+1];
                                floatList.Add(right);
                            }
                            break;
                        case StereoProcessingType.CHANNEL_MONOMIX:
                        default:
                            for (int i = 0; i < samples.Length; i+=2) {
                                float left = samples[i];
                                float right = samples[i+1];
                                // Make stored channel data stereo by averaging left and right values.
                                floatList.Add(( (left + right) / 2.0f));
                            }
                            break;
                    }
                }

                if (waveformGenerateWorker.CancellationPending)
                {
                    e.Cancel = true;
                    break;
                }
            }

            Dispatcher.CurrentDispatcher.Invoke(new Action(() =>
                                                           {
                                                           	WaveformData = floatList.ToArray();
                                                           }));

            waveformInputStream.Close();
            waveformInputStream.Dispose();
            waveformInputStream = null;
        }
开发者ID:remy22,项目名称:AudioVSTToolbox,代码行数:72,代码来源:NAudioEngine.cs

示例4: EncodeButton_Click


//.........这里部分代码省略.........
                            sp = new SoundPlayer(str);
                            sp.Play();
                            Thread.Sleep(503 + Int32.Parse(TextSpeed.Text));
                            break;

                       case '1':

                            str = Properties.Resources.n1;
                            sp = new SoundPlayer(str);
                            sp.Play();
                            Thread.Sleep(463 + Int32.Parse(TextSpeed.Text));
                            break;

                       case '2':

                            str = Properties.Resources.n2;
                            sp = new SoundPlayer(str);
                            sp.Play();
                            Thread.Sleep(418 + Int32.Parse(TextSpeed.Text));
                            break;

                       case '3':

                            str = Properties.Resources.n3;
                            sp = new SoundPlayer(str);
                            sp.Play();
                            Thread.Sleep(450 + Int32.Parse(TextSpeed.Text));
                            break;

                       case '4':

                            str = Properties.Resources.n4;
                            sp = new SoundPlayer(str);
                            sp.Play();
                            Thread.Sleep(507 + Int32.Parse(TextSpeed.Text));
                            break;
                       case '5':

                            str = Properties.Resources.n5;
                            sp = new SoundPlayer(str);
                            sp.Play();
                            Thread.Sleep(582 + Int32.Parse(TextSpeed.Text));
                            break;
                       case '6':

                            str = Properties.Resources.n6;
                            sp = new SoundPlayer(str);
                            sp.Play();
                            Thread.Sleep(582 + Int32.Parse(TextSpeed.Text));
                            break;
                       case '7':

                            str = Properties.Resources.n7;
                            sp = new SoundPlayer(str);
                            sp.Play();
                            Thread.Sleep(520 + Int32.Parse(TextSpeed.Text));
                            break;
                       case '8':

                            str = Properties.Resources.n8;
                            sp = new SoundPlayer(str);
                            sp.Play();
                            Thread.Sleep(373 + Int32.Parse(TextSpeed.Text));
                            break;
                       case '9':

                            str = Properties.Resources.n9;
                            sp = new SoundPlayer(str);
                            sp.Play();
                            Thread.Sleep(523 + Int32.Parse(TextSpeed.Text));
                            break;
                       case '.':
                            str = Properties.Resources.dot;
                            sp = new SoundPlayer(str);
                            sp.Play();
                            Thread.Sleep(663 + Int32.Parse(TextSpeed.Text));
                            break;

                       case ' ':
                            Thread.Sleep(Math.Abs(Int32.Parse(TextSpeed.Text)));
                            break;

                   }

                }

            }
            try
            { glc.Stop();
            glc.Dispose();
            }
            catch{}
            la.Stop();
            la.Dispose();

            wfo.Dispose();
            wc.Dispose();
            wfr.Dispose();
            GC.Collect();
        }
开发者ID:stdark,项目名称:ABC-Encoder,代码行数:101,代码来源:Form1.cs

示例5: waveformGenerateWorker_DoWork

        private void waveformGenerateWorker_DoWork(object sender, DoWorkEventArgs e)
        {
            var waveformParams = (WaveformGenerationParams)e.Argument;

            float[] waveformDataFromCache;
            if (waveformDataCache.TryGet(waveformParams.Path, out waveformDataFromCache))
            {
                Application.Current.Dispatcher.Invoke(() => WaveformData = waveformDataFromCache);
                return;
            }

            Mp3FileReader waveformMp3Stream = new Mp3FileReader(waveformParams.Path);
            WaveChannel32 waveformInputStream = new WaveChannel32(waveformMp3Stream);
            waveformInputStream.Sample += waveStream_Sample;

            int frameLength = fftDataSize;
            int frameCount = (int)((double)waveformInputStream.Length / (double)frameLength);
            int waveformLength = frameCount * 2;
            byte[] readBuffer = new byte[frameLength];
            waveformAggregator = new SampleAggregator(frameLength);

            float maxLeftPointLevel = float.MinValue;
            float maxRightPointLevel = float.MinValue;
            int currentPointIndex = 0;
            float[] waveformCompressedPoints = new float[waveformParams.Points];
            List<float> waveformData = new List<float>();
            List<int> waveMaxPointIndexes = new List<int>();

            for (int i = 1; i <= waveformParams.Points; i++)
            {
                waveMaxPointIndexes.Add((int)Math.Round(waveformLength * ((double)i / (double)waveformParams.Points), 0));
            }
            int readCount = 0;
            while (currentPointIndex * 2 < waveformParams.Points)
            {
                waveformInputStream.Read(readBuffer, 0, readBuffer.Length);

                waveformData.Add(waveformAggregator.LeftMaxVolume);
                waveformData.Add(waveformAggregator.RightMaxVolume);

                if (waveformAggregator.LeftMaxVolume > maxLeftPointLevel)
                    maxLeftPointLevel = waveformAggregator.LeftMaxVolume;
                if (waveformAggregator.RightMaxVolume > maxRightPointLevel)
                    maxRightPointLevel = waveformAggregator.RightMaxVolume;

                if (readCount > waveMaxPointIndexes[currentPointIndex])
                {
                    waveformCompressedPoints[(currentPointIndex * 2)] = maxLeftPointLevel;
                    waveformCompressedPoints[(currentPointIndex * 2) + 1] = maxRightPointLevel;
                    maxLeftPointLevel = float.MinValue;
                    maxRightPointLevel = float.MinValue;
                    currentPointIndex++;
                }
                if (readCount % 3000 == 0)
                {
                    float[] clonedData = (float[])waveformCompressedPoints.Clone();

                    try
                    {
                        App.Current.Dispatcher.Invoke(new Action(() =>
                        {
                            WaveformData = clonedData;
                        }));
                    }
                    catch (TaskCanceledException)
                    {
                        e.Cancel = true;
                        return;
                    }
                    
                }

                if (waveformGenerateWorker.CancellationPending)
                {
                    e.Cancel = true;
                    break;
                }
                readCount++;
            }

            float[] finalClonedData = (float[])waveformCompressedPoints.Clone();

            // Don't cache half-finished waveforms (my CDJ-1000MK3s do this...)
            if (!waveformGenerateWorker.CancellationPending)
                waveformDataCache.Add(waveformParams.Path, finalClonedData);

            App.Current.Dispatcher.Invoke(new Action(() =>
            {
                fullLevelData = waveformData.ToArray();
                WaveformData = finalClonedData;
            }));
            waveformInputStream.Close();
            waveformInputStream.Dispose();
            waveformInputStream = null;
            waveformMp3Stream.Close();
            waveformMp3Stream.Dispose();
            waveformMp3Stream = null;
        }
开发者ID:rdingwall,项目名称:mixplanner,代码行数:98,代码来源:NAudioEngine.cs

示例6: worker_DoWork

        private void worker_DoWork(object sender, DoWorkEventArgs e)
        {
            Mp3FileReader reader = new Mp3FileReader(FileName);
            WaveChannel32 channel = new WaveChannel32(reader);
            channel.Sample += new EventHandler<SampleEventArgs>(channel_Sample);

            int points = 2000;

            int frameLength = (int)FFTDataSize.FFT2048;
            int frameCount = (int)((double)channel.Length / (double)frameLength);
            int waveformLength = frameCount * 2;
            byte[] readBuffer = new byte[frameLength];

            float maxLeftPointLevel = float.MinValue;
            float maxRightPointLevel = float.MinValue;
            int currentPointIndex = 0;
            float[] waveformCompressedPoints = new float[points];
            List<float> waveformData = new List<float>();
            List<int> waveMaxPointIndexes = new List<int>();

            for (int i = 1; i <= points; i++)
            {
                waveMaxPointIndexes.Add((int)Math.Round(waveformLength * ((double)i / (double)points), 0));
            }
            int readCount = 0;
            while (currentPointIndex * 2 < points)
            {
                channel.Read(readBuffer, 0, readBuffer.Length);

                waveformData.Add(InputSampler.LeftMax);
                waveformData.Add(InputSampler.RightMax);

                if (InputSampler.LeftMax > maxLeftPointLevel)
                    maxLeftPointLevel = InputSampler.LeftMax;
                if (InputSampler.RightMax > maxRightPointLevel)
                    maxRightPointLevel = InputSampler.RightMax;

                if (readCount > waveMaxPointIndexes[currentPointIndex])
                {
                    waveformCompressedPoints[(currentPointIndex * 2)] = maxLeftPointLevel;
                    waveformCompressedPoints[(currentPointIndex * 2) + 1] = maxRightPointLevel;
                    maxLeftPointLevel = float.MinValue;
                    maxRightPointLevel = float.MinValue;
                    currentPointIndex++;
                }
                if (readCount % 3000 == 0)
                {
                    WaveformData = (float[])waveformCompressedPoints.Clone();
                }

                if (worker.CancellationPending)
                {
                    e.Cancel = true;
                    break;
                }
                readCount++;
            }

            FullLevelData = waveformData.ToArray();
            WaveformData = (float[])waveformCompressedPoints.Clone();

            // Cleanup
            channel.Close();
            channel.Dispose();
            channel = null;
            reader.Close();
            reader.Dispose();
            reader = null;
        }
开发者ID:pbeardshear,项目名称:TempoMonkey,代码行数:69,代码来源:WaveformTimeline.cs

示例7: StopRecord_Click

        //Кнопка остановки записи и вывода осциллограмы посредством элемента управления Microsoft Chart
        private void StopRecord_Click(object sender, EventArgs e)
        {
            timer1.Stop();
            // если переменная от DirectSoundOut не налл, то останавливаем воспроизведение
            if (_waveOut != null)
            {
                _waveOut.Stop();
            }
            // если переменная от WaveIn не налл, то останавливаем запись
            if (_sourceStream != null)
            {
                _sourceStream.StopRecording();
            }
            // если переменная от WaveFileWriter не налл, то освобождаем ресурс, инициализируем наллом и строим осциллограму
            if (_waveWriter != null)
            {
                StartRecord.Enabled = true;
                _waveWriter.Dispose();
                _waveWriter = null;
                // Очищаем чарт для нового вывода осциллограмы 
                chart1.Series.Clear();
                // Устанавливаем чарт
                chart1.Series.Add("wave");
                chart1.Series["wave"].ChartType = System.Windows.Forms.DataVisualization.Charting.SeriesChartType.FastLine;
                chart1.Series["wave"].ChartArea = "ChartArea1";
                chart1.ChartAreas[0].AxisY.Interval = 0.1;
                // WaveChannel32 представляет канал для WaveMixerStream, который может микшировать несколько 32 битовых потоков ввода (обычно используется с входных стереоканалов)
                // Присваиваем переменную запоминающую путь сохранения .wav файла  
                WaveChannel32 waveChannel32 = new WaveChannel32(new WaveFileReader(_path));
                //размер буффера
                var buffer = new byte[3916384];
                // Перебор текущей позиции что будет длится по всей длине поступаемого потока с waveChannel32
                while (waveChannel32.Position < waveChannel32.Length)
                {
                    //bytesread = waveChannel32.Read(Читает байты из потока; Cмещение в буфере; Количество считанных байтов)
                    var read = waveChannel32.Read(buffer, 0, 3916384);
                    //chart time-magnitude
                    for (var i = 0; i < read / 4; i++)
                    {
                        chart1.Series["wave"].Points.Add(BitConverter.ToSingle(buffer, i * 4));
                        _volumeLeft = (BitConverter.ToSingle(buffer, i * 4));
                        i++;
                        _volumeRight = (BitConverter.ToSingle(buffer, i * 4));
                        if (_volumeLeft  > 0.5f && _volumeRight > 0.5f)
                        {
                            _ampLeft.Add(_volumeLeft); //Добавляем в массив левый канал
                            _ampRigh.Add(_volumeRight); //Добавляем в массив правый канал
                            _counter.Add(i / (2 * 44.1)); //Добавляем в массив время
                        }
                    }
                }

                for (int i = 0; i < _ampLeft.Count; i++)
                {
                    //начинаем порядковый номер с 1
                    int number = i + 1;
                    //создаем элемент
                    ListViewItem list = new ListViewItem(number.ToString("00"));
                    //добавляем в listview
                    list.SubItems.Add(_ampLeft[i].ToString());
                    list.SubItems.Add(_ampRigh[i].ToString());
                    double a = Convert.ToDouble(_counter[i].ToString());
                    list.SubItems.Add(string.Format("{00:00:000}", a));
                    listView1.Items.Add(list);
                }
                // для отделения значений разных записей
                ListViewItem list2 = new ListViewItem();
                list2.SubItems.Add(" ");
                list2.SubItems.Add(" ");
                listView1.Items.Add(list2);
                // Освободжаем waveChannel32
                waveChannel32.Dispose();
            }
        }
开发者ID:8box,项目名称:AudioRecord,代码行数:75,代码来源:Form1.cs


注:本文中的NAudio.Wave.WaveChannel32.Dispose方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。