js-基于AudioContext在canvas上显示声音波形

我心飞翔 分类:javascript

js-基于AudioContext在canvas上显示声音波形 目录

[TOC]


前言

  • ES7后开始启用AudioContex
  • 常用API是:createScriptProcessor, onaudioprocess, getChannelData
  • 注意:onaudioprocess已经废弃,开始改用AnalyserNode,MDN-AnalyserNode
  • Cyandev-CSDN - AnalyserNode

效果展示

image-20210408002306773.png

代码展示

index.html

<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>Canvas Audio</title>
<style>
body, div {
margin: 0px;
padding: 0px;
text-align: center;
}
#cav {
border: 1px solid black;
border-radius: 4px;
margin: 10px auto;
}
</style>
</head>
<body>
<canvas id="cav" width="400" height="300"></canvas>
</body>
<script src="Aud.js"></script>
<script>
let ctx = document.getElementById('cav').getContext('2d');
let SampleSize = 1024;
window.onload = function (){
init();
};
function init() {
AudManager.urls = ["test.mp3"];
AudManager.init();
AudManager.load(()=>{
let jNode = AudManager.createNode(2, [SampleSize, 1, 1]);
let pNode = AudManager.createNode();
ctx.strokeStyle = "red";
jNode.onaudioprocess = function (e){
renderWave(e);
};
let sound1 = AudManager.items[0];
sound1.addNode(pNode);
sound1.addNode(jNode, 1);
sound1.play();
});
}
function renderWave(e){
ctx.clearRect(0,0,400,300);
let bufData = e.inputBuffer.getChannelData(0);
ctx.beginPath();
ctx.moveTo(0,150);
for (let i = 0, len = bufData.length, mix = Math.min(400, len); i < mix; i++){
ctx.lineTo(i, bufData[i]*150+150);
}
ctx.stroke();
ctx.closePath();
}
</script>
</html>

Aud.js

let Aud = function (ctx, url) {
this.ctx = ctx;
this.url = url;
//    source节点
this.src = ctx.createBufferSource();
//    多个处理节点组
this.pNode = [];
};
Aud.prototype = {
output(){
for (let i = 0; i < this.pNode.length; i++){
let tNode = this.src;
for (let j = 0; j < this.pNode[i].length; j++){
tNode.connect(this.pNode[i][j]);
tNode = this.pNode[i][j];
}
tNode.connect(this.ctx.destination);
}
},
play(loop){
this.src.loop = loop || false;
this.output();
this.src.start(0);
},
stop() {
this.src.stop();
},
addNode(node, groupIdx = 0){
this.pNode[groupIdx] = this.pNode[groupIdx] || [];
this.pNode[groupIdx].push(node);
}
};
//设置节点类型
Aud.NODETYPE = {
GNODE: 0 // 表示gainNode节点
}
//Aud管理对象
AudManager = {
urls: [],
items: [],
ctx: null,
init(){
try{
this.ctx = new AudioContext();
}catch (e) {
console.log(`${e}`);
}
},
load(callback){
for (let i = 0; i < this.urls.length; i++){
this.loadSingle(this.urls[i], callback);
}
},
loadSingle(url, callback){
let req = new XMLHttpRequest();
req.open('GET', url, true);
req.responseType = 'arraybuffer';
let self = this;
req.onload = function () {
self.ctx.decodeAudioData(this.response)
.then(
buf => {
let aud = new Aud(self.ctx, url);
aud.src.buffer = buf;
self.items.push(aud);
if (self.items.length == self.urls.length){
callback();
}
},
err => {
console.log(`decode error:${err}`);
}
)
};
req.send();
},
createNode(nodeType, param){
let node = null;
switch (nodeType) {
case 1:
node = this.ctx.createPanner();
break;
case 2:
node = this.ctx.createScriptProcessor(param[0], param[1], param[2]);
break;
default:
node = this.ctx.createGain();
}
return node;
}
};

回复

我来回复
  • 暂无回复内容