Visualization¶
To visualize real-time face tracking one can use gazefilter.visualizer. It only needs HTMLCanvasElement to render visualization:
<script>
let canvas = document.getElementById("tracker-canvas");
gazefilter.visualizer.setCanvas(canvas);
</script>
...
<body>
<canvas id="tracker-canvas"></canvas>
</body>
After canvas is set visualizer adds filter, change, pause listeners to tracker. These listeners has default functions, but one can alter them by setting own listeners with setListener()
.
function render(ctx, trackEvent) {
// draw video frame
ctx.drawImage(
gazefilter.tracker.videoElement(),
0, 0, ctx.canvas.width, ctx.canvas.height
);
// set drawing style
ctx.strokeStyle = 'white';
ctx.fillStyle = 'white';
ctx.lineWidth = 2;
// draw facial landmarks
let shapeArray = trackEvent.shapeArrayView();
for (let i = 0; i < trackEvent.shapeSize(); i++) {
ctx.beginPath();
let pointX = shapeArray[i * 2];
let pointY = shapeArray[i * 2 + 1];
ctx.arc(pointX, pointY, 2, 0, Math.PI * 2);
ctx.fill();
}
// draw pupil center points
ctx.strokeStyle = 'red';
let [lx, ly, rx, ry] = trackEvent.pupilArray();
ctx.beginPath();
ctx.arc(lx, ly, 3, 0, Math.PI * 2);
ctx.stroke();
ctx.beginPath();
ctx.arc(rx, ry, 3, 0, Math.PI * 2);
ctx.stroke();
}
gazefilter.visualizer.setListener("filter", render);
First argument in any listener function for visualizer is always CanvasRenderingContext2D, and second – argument like in tracker listener.