问题
I am getting started with the Web Audio Api to experiment a little with it wanted to see how best to work with it in AngularJS.
Other Web Audio stuff I have tried seems to work in Angular, such as creating a web audio sine wave etc, but I am just not sure of the best way to load audio in angular, if I then want to be able to manipulate it with the Web Audio API
I naively tried to put some functions directly into a AngularJS Controller which doesn't seem right - code below.
In the Chrome Dev Tools Network tab I can see the test.mp3 that should load notifies 'Failed to load response data'. However the path to the file is correct, and if I click on this in the dev tools the file opens up and starts playing.
Any help is greatly appreciated
// Controller and attempt to load audio
(function () {
'use strict';
angular
.module('app')
.controller('mvMainCtrl', mvMainCtrl);
mvMainCtrl.$inject = ['$resource', '$scope', '$http' ];
function mvMainCtrl($scope, $http, $resource) {
var ctx; //audio context
var buf; //audio buffer
//init the sound system
function init() {
console.log("in init");
try {
ctx = new AudioContext();
loadFile();
} catch(e) {
alert('you need webaudio support');
}
}
function loadFile() {
var req = new XMLHttpRequest();
req.open("GET","/app/main/sounds/test.mp3",true);
req.responseType = "arraybuffer";
req.onload = function() {
//decode the loaded data
ctx.decodeAudioData(req.response, function(buffer) {
buf = buffer;
play();
});
};
req.send();
}
loadFile();
function playback() {
var playSound = ctx.createBufferSource();
playSound.buffer = buf;
playSound.connect(ctx.destination);
playSound.start(audioContext.currentTime);
}
playback();
}
})();
回答1:
I just had the same problem and after a couple of hours spent on this issue, I finaly found a way to get it work:
Promise style:
$http({
method: 'GET',
url: url,
responseType: 'arraybuffer'
}).then(function(response) {
audioContext.decodeAudioData(response.data, function(buffer) {
mainBuffer = buffer
}, function(err) {
console.log(err)
})
})
Standard style:
$http({
method: 'GET',
url: url,
responseType: 'arraybuffer'
}).success(function(data) {
audioContext.decodeAudioData(data, function(buffer) {
mainBuffer = buffer
}, function(err) {
console.log(err)
})
})
With XMLHttpRequest()
, the file was corrupt, and the responseType attribut should be specified as arraybuffer.
回答2:
Here is an implementation of a BufferLoader you can use to load an array of sound files:
// BufferLoader
function BufferLoader(context, urlList, callback) {
this.context = context;
this.urlList = urlList;
this.onload = callback;
this.bufferList = new Array();
this.loadCount = 0;
}
BufferLoader.prototype.loadBuffer = function(url, index) {
// Load buffer asynchronously
var request = new XMLHttpRequest();
request.open("GET", url, true);
request.responseType = "arraybuffer";
var loader = this;
request.onload = function() {
// Asynchronously decode the audio file data in request.response
loader.context.decodeAudioData(
request.response,
function(buffer) {
if (!buffer) {
alert('error decoding file data: ' + url);
return;
}
loader.bufferList[index] = buffer;
if (++loader.loadCount == loader.urlList.length)
loader.onload(loader.bufferList);
},
function(error) {
console.error('decodeAudioData error', error);
}
);
}
request.onerror = function() {
alert('BufferLoader: XHR error');
}
request.send();
}
BufferLoader.prototype.load = function() {
for (var i = 0; i < this.urlList.length; ++i)
this.loadBuffer(this.urlList[i], i);
}
How you can use it to load sounds:
var sounds = ["assets/test.mp3", "test2.mp3"]
var context = window.AudioContext || window.webkitAudioContext || window.MozAudioContext || window.mozAudioContext;
var loader = new BufferLoader(context, sounds, onComplete);
var audioBuffers;
function onComplete(bufferList){
console.log("Sounds loaded!");
audioBuffers = bufferList;
}
function playSound(i){
// create buffer source node which is used to play a sound one time
var bufferSourceNode = context.createBufferSource();
// set buffer to one of the loaded sounds;
bufferSourceNode.buffer = audioBuffers[i];
// connect to output
bufferSourceNode.connect(context.destination);
// play the sound
bufferSourceNode.start()
}
Now to play a sound you have to call:
playSound(0); // play first sound in array of loaded sounds
In your example I don't see where the play()
function is defined that you call in decodeAudioData
. You probably wanted to call playback()
. Also you call playback on Controller initialization where the sounds are not yet loaded, which you shouldn't do.
来源:https://stackoverflow.com/questions/34019824/how-do-i-load-an-audiobuffer-with-angular-http