本文整理汇总了TypeScript中web-audio-api-player.PlayerCore.getAudioContext方法的典型用法代码示例。如果您正苦于以下问题:TypeScript PlayerCore.getAudioContext方法的具体用法?TypeScript PlayerCore.getAudioContext怎么用?TypeScript PlayerCore.getAudioContext使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类web-audio-api-player.PlayerCore
的用法示例。
在下文中一共展示了PlayerCore.getAudioContext方法的1个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的TypeScript代码示例。
示例1: PlayerCore
$(function () {
let options: ICoreOptions = {
soundsBaseUrl: 'https://mp3l.jamendo.com/?trackid=',
playingProgressIntervalTime: 500,
//volume: 80
};
let player = new PlayerCore(options);
player.setVolume(80);
let visualizerAudioGraph: any = {};
player.getAudioContext().then((audioContext) => {
let bufferInterval = 1024;
let numberOfInputChannels = 1;
let numberOfOutputChannels = 1;
// create the audio graph
visualizerAudioGraph.gainNode = audioContext.createGain();
visualizerAudioGraph.delayNode = audioContext.createDelay(1);
visualizerAudioGraph.scriptProcessorNode = audioContext.createScriptProcessor(bufferInterval, numberOfInputChannels, numberOfOutputChannels);
visualizerAudioGraph.analyserNode = audioContext.createAnalyser();
// analyser options
visualizerAudioGraph.analyserNode.smoothingTimeConstant = 0.2;
visualizerAudioGraph.analyserNode.minDecibels = -100;
visualizerAudioGraph.analyserNode.maxDecibels = -33;
visualizerAudioGraph.analyserNode.fftSize = 16384;
//visualizerAudioGraph.analyserNode.fftSize = 2048;
// connect the nodes
visualizerAudioGraph.delayNode.connect(audioContext.destination);
visualizerAudioGraph.scriptProcessorNode.connect(audioContext.destination);
visualizerAudioGraph.analyserNode.connect(visualizerAudioGraph.scriptProcessorNode);
visualizerAudioGraph.gainNode.connect(visualizerAudioGraph.delayNode);
player.setAudioGraph(visualizerAudioGraph);
});
let isPlaying = false;
// canvas painting loop
function looper() {
if (!isPlaying) {
return;
}
window.webkitRequestAnimationFrame(looper);
// visualizer
var initialArray = new Uint8Array(visualizerAudioGraph.analyserNode.frequencyBinCount);
visualizerAudioGraph.analyserNode.getByteFrequencyData(initialArray);
console.log(initialArray);
//var binsArray = transformToVisualBins(initialArray);
//console.log(binsArray);
var VisualData = GetVisualBins(initialArray)
var TransformedVisualData = transformToVisualBins(VisualData)
console.log(TransformedVisualData);
ctx.clearRect(0, 0, canvas.width, canvas.height); // Clear the canvas
ctx.fillStyle = 'red'; // Color of the bars
for (var y = 0; y < SpectrumBarCount; y++) {
let bar_x = y * barWidth;
let bar_width = barWidth;
let bar_height = TransformedVisualData[y];
// fillRect( x, y, width, height ) // Explanation of the parameters below
//ctx.fillRect(0, 0, canvas.width, canvas.height);
ctx.fillRect(bar_x, (canvas.height / 2) - bar_height, bar_width, bar_height);
ctx.fillRect(bar_x, canvas.height / 2, bar_width, bar_height);
}
}
// initialize player ui
let playerUI = new PlayerUI(player);
// add songs to player queue
let firstSoundAttributes: ISoundAttributes = {
sources: '1314412&format=mp31',
id: 1314412,
//sources: '1214935&format=ogg1',
//id: 1214935,
playlistId: 0,
onLoading: (loadingProgress, maximumValue, currentValue) => {
//.........这里部分代码省略.........