问题
I'm trying to use FrameDetector to process an existing MP4 file. I've put the video file and the HTML/JavaScript below inside a node.js server and run it at localhost, so there shouldn't be aby CORS issues.
The detector starts correctly, but when I send image data to it, the same two things happen every time:
- The first request returns an empty
faces
array - The second request returns an error (with a unique, trailing number each time):
worker code reported an exception14920304
.
I'm not really sure what to do with that message- does anyone have any suggestions?
//- FrameDetector.pug
html
head
title FrameDetector Demo
script(src='https://download.affectiva.com/js/3.2/affdex.js')
body
canvas#canvas
video#video-to-analyze(preload="auto" controls="true")
source(type="video/mp4" src="video/my-video.mp4")
script(src='js/FrameDetector.js')
and
// FrameDetector.js
var heartbeat, startTimestamp;
document.addEventListener('DOMContentLoaded', function(){
var v = document.getElementById('video-to-analyze');
var canvas = document.getElementById('canvas');
var context = canvas.getContext('2d');
var cw = Math.floor(canvas.clientWidth / 100);
var ch = Math.floor(canvas.clientHeight / 100);
canvas.width = cw;
canvas.height = ch;
v.addEventListener('play', function(){
draw(this,context,cw,ch);
},false);
},false);
function draw(v,c,w,h) {
if(v.paused || v.ended) return false;
c.drawImage(v,0,0,w,h);
setTimeout(draw,20,v,c,w,h);
}
function analyzeVideoFrame() {
//Get a canvas element from DOM
var aCanvas = document.getElementById("canvas");
var context = aCanvas.getContext('2d');
//Get imageData object.
var imageData = context.getImageData(0, 0, 640, 360);
console.log("Captured imageData.", imageData);
//Get current time in seconds
var now = (new Date()).getTime() / 1000;
//Get delta time between the first frame and the current frame.
var deltaTime = now - startTimestamp;
//Process the frame
detector.process(imageData, deltaTime);
}
function onImageResultsSuccess(faces, image, timestamp) {
console.log("onImageResultsSuccess:", timestamp, faces.length, faces[0]);
}
function onImageResultsFailure(image, timestamp, err_detail) {
console.error("onImageResultsFailure:", timestamp, err_detail);
clearInterval(heartbeat);
}
if (typeof(affdex)=="undefined") {
console.log("The affdex global variable has not been loaded.");
}
var detector = new affdex.FrameDetector(affdex.FaceDetectorMode.LARGE_FACES);
detector.detectAllExpressions();
detector.detectAllEmotions();
detector.detectAllAppearance();
detector.addEventListener("onInitializeSuccess", function() {
document.getElementById('video-to-analyze').play();
startTimestamp = (new Date()).getTime() / 1000;
heartbeat = setInterval(analyzeVideoFrame, 1000);
});
detector.addEventListener("onInitializeFailure", function() {
console.error("Affectiva failed to initialize.");
});
detector.addEventListener("onImageResultsSuccess", onImageResultsSuccess);
detector.addEventListener("onImageResultsFailure", onImageResultsFailure);
detector.start();
Output in the console:
Captured imageData. ImageData {data: Uint8ClampedArray(921600), width: 640, height: 360}
onImageResultsSuccess: 0.005000114440917969 0 undefined
Captured imageData. ImageData {data: Uint8ClampedArray(921600), width: 640, height: 360}
onImageResultsFailure: 0.0009999275207519531 worker code reported an exception14920304
回答1:
Got it. Once I got the image drawn correctly to the canvas element, the Affectiva code worked fine. Here's a my corrected code:
//- FrameDetector.pug
html
head
title FrameDetector Demo
script(src='http://ajax.googleapis.com/ajax/libs/jquery/1/jquery.min.js')
script(src='https://download.affectiva.com/js/3.2/affdex.js')
body
canvas#canvas(width="640" height="360" style="display:none;")
video#video(preload="auto" controls="true")
source(type="video/mp4" src="video/my-video.mp4")
script(src='js/FrameDetector.js')
and
// FrameDetector.js
var heartbeat, startTimestamp;
function onVideoPlay() {
var $this = this; //cache
(function loop() {
if (!$this.paused && !$this.ended) {
ctx.drawImage($this, 0, 0);
setTimeout(loop, 1000 / 30); // drawing at 30fps
}
})();
}
function analyzeVideoFrame() {
//Get a canvas element from DOM
var aCanvas = document.getElementById("canvas");
var context = aCanvas.getContext('2d');
//Get imageData object.
var imageData = context.getImageData(0, 0, 640, 360);
console.log("Captured imageData.", imageData);
//Get current time in seconds
var now = (new Date()).getTime() / 1000;
//Get delta time between the first frame and the current frame.
var deltaTime = now - startTimestamp;
//Process the frame
detector.process(imageData, deltaTime);
}
function onImageResultsSuccess(faces, image, timestamp) {
console.log("onImageResultsSuccess:", timestamp, faces.length, faces[0]);
}
function onImageResultsFailure(image, timestamp, err_detail) {
console.error("onImageResultsFailure:", timestamp, err_detail);
clearInterval(heartbeat);
}
$(function() {
if (typeof(affdex)=="undefined") {
console.log("The affdex global variable has not been loaded.");
}
var canvas = document.getElementById('canvas');
var ctx = canvas.getContext('2d');
var video = document.getElementById('video');
var detector = new affdex.FrameDetector(affdex.FaceDetectorMode.LARGE_FACES);
// Set up a loop to draw frames to the canvas element
video.addEventListener('play', onVideoPlay, 0);
// Set up and start the detector
detector.detectAllExpressions();
detector.detectAllEmotions();
detector.detectAllAppearance();
detector.addEventListener("onInitializeSuccess", function() {
document.getElementById('video').play();
startTimestamp = (new Date()).getTime() / 1000;
heartbeat = setInterval(analyzeVideoFrame, 1000);
});
detector.addEventListener("onInitializeFailure", function() {
console.error("Affectiva failed to initialize.");
});
detector.addEventListener("onImageResultsSuccess", onImageResultsSuccess);
detector.addEventListener("onImageResultsFailure", onImageResultsFailure);
detector.start();
});
来源:https://stackoverflow.com/questions/47646782/debugging-affectiva-framedetector-worker-code-reported-an-exception