音频blob转ArrayBuffer并显示波形
时间: 2023-09-12 12:08:01 浏览: 164
将音频Blob对象转换为ArrayBuffer与上面的示例相同,可以使用上面提到的`blobToArrayBuffer`函数。接下来,需要使用Web Audio API来读取ArrayBuffer并显示波形。
具体实现步骤如下:
1. 创建一个AudioContext对象。
```javascript
var audioContext = new AudioContext();
```
2. 创建一个AudioBufferSourceNode节点,并将其连接到AudioContext的destination。
```javascript
var sourceNode = audioContext.createBufferSource();
sourceNode.connect(audioContext.destination);
```
3. 将ArrayBuffer转换为AudioBuffer对象。
```javascript
audioContext.decodeAudioData(arrayBuffer, function(decodedData) {
sourceNode.buffer = decodedData;
});
```
4. 使用AnalyserNode节点来获取音频数据。
```javascript
var analyserNode = audioContext.createAnalyser();
analyserNode.fftSize = 2048; // 设置FFT大小
sourceNode.connect(analyserNode);
```
5. 使用Canvas绘制波形。
```javascript
var canvas = document.getElementById("canvas");
var canvasContext = canvas.getContext("2d");
function draw() {
requestAnimationFrame(draw);
var bufferLength = analyserNode.frequencyBinCount;
var dataArray = new Uint8Array(bufferLength);
analyserNode.getByteTimeDomainData(dataArray);
canvasContext.fillStyle = "rgb(200, 200, 200)";
canvasContext.fillRect(0, 0, canvas.width, canvas.height);
canvasContext.lineWidth = 2;
canvasContext.strokeStyle = "rgb(0, 0, 0)";
canvasContext.beginPath();
var sliceWidth = canvas.width * 1.0 / bufferLength;
var x = 0;
for(var i = 0; i < bufferLength; i++) {
var v = dataArray[i] / 128.0;
var y = v * canvas.height/2;
if(i === 0) {
canvasContext.moveTo(x, y);
} else {
canvasContext.lineTo(x, y);
}
x += sliceWidth;
}
canvasContext.lineTo(canvas.width, canvas.height/2);
canvasContext.stroke();
}
draw();
```
这里创建了一个canvas元素,并使用Canvas绘制音频波形。需要注意的是,绘制波形的代码需要在音频数据解码完成后调用,并且需要使用requestAnimationFrame函数来不断绘制波形。完整的代码如下:
```javascript
var audioContext = new AudioContext();
function blobToArrayBuffer(blob, callback) {
var reader = new FileReader();
reader.onload = function() {
callback(reader.result);
}
reader.readAsArrayBuffer(blob);
}
function drawWaveform(arrayBuffer) {
var sourceNode = audioContext.createBufferSource();
sourceNode.connect(audioContext.destination);
audioContext.decodeAudioData(arrayBuffer, function(decodedData) {
sourceNode.buffer = decodedData;
var analyserNode = audioContext.createAnalyser();
analyserNode.fftSize = 2048;
sourceNode.connect(analyserNode);
var canvas = document.getElementById("canvas");
var canvasContext = canvas.getContext("2d");
function draw() {
requestAnimationFrame(draw);
var bufferLength = analyserNode.frequencyBinCount;
var dataArray = new Uint8Array(bufferLength);
analyserNode.getByteTimeDomainData(dataArray);
canvasContext.fillStyle = "rgb(200, 200, 200)";
canvasContext.fillRect(0, 0, canvas.width, canvas.height);
canvasContext.lineWidth = 2;
canvasContext.strokeStyle = "rgb(0, 0, 0)";
canvasContext.beginPath();
var sliceWidth = canvas.width * 1.0 / bufferLength;
var x = 0;
for(var i = 0; i < bufferLength; i++) {
var v = dataArray[i] / 128.0;
var y = v * canvas.height/2;
if(i === 0) {
canvasContext.moveTo(x, y);
} else {
canvasContext.lineTo(x, y);
}
x += sliceWidth;
}
canvasContext.lineTo(canvas.width, canvas.height/2);
canvasContext.stroke();
}
draw();
});
}
var audioBlob = new Blob([/*音频数据*/], {type: "audio/wav"});
blobToArrayBuffer(audioBlob, function(arrayBuffer) {
drawWaveform(arrayBuffer);
});
```
这里的`audioBlob`变量是一个包含音频数据的Blob对象,需要将其替换为实际的音频Blob对象。同时,需要在HTML代码中添加一个canvas元素:
```html
<canvas id="canvas"></canvas>
```
阅读全文