Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
121 changes: 121 additions & 0 deletions src/components/calibration/CalibrationCanvas.vue
Original file line number Diff line number Diff line change
@@ -0,0 +1,121 @@
<template>
<canvas id="calibration-canvas" style="z-index: 0;" />
</template>

<script>
export default {
name: 'CalibrationCanvas',
props: {
backgroundColor: {
type: String,
default: '#FFFFFFFF'
},
pointColor: {
type: String,
default: '#000000FF'
},
radius: {
type: Number,
required: true
},
innerCircleRadius: {
type: Number,
default: 5
},
animationFrames: {
type: Number,
default: 250
},
animationRefreshRate: {
type: Number,
default: 10
}
},
mounted() {
this.resizeCanvas();
window.addEventListener('resize', this.resizeCanvas);
},
beforeDestroy() {
window.removeEventListener('resize', this.resizeCanvas);
},
methods: {
resizeCanvas() {
const canvas = document.getElementById('calibration-canvas');
if (canvas) {
canvas.width = window.innerWidth;
canvas.height = window.innerHeight;
}
},
clearCanvas() {
const canvas = document.getElementById('calibration-canvas');
if (!canvas) return;
const ctx = canvas.getContext('2d');
ctx.clearRect(0, 0, canvas.width, canvas.height);
},
drawPoint(x, y, currentRadius) {
const canvas = document.getElementById('calibration-canvas');
if (!canvas) return;

const ctx = canvas.getContext('2d');
ctx.clearRect(0, 0, canvas.width, canvas.height);

// Fill background
ctx.fillStyle = this.backgroundColor;
ctx.fillRect(0, 0, canvas.width, canvas.height);

// Draw main circle
ctx.beginPath();
ctx.strokeStyle = this.pointColor;
ctx.fillStyle = this.pointColor;
ctx.arc(x, y, currentRadius, 0, Math.PI * 2, false);
ctx.stroke();
ctx.fill();

// Draw inner red dot
ctx.beginPath();
ctx.strokeStyle = "red";
ctx.fillStyle = "red";
ctx.arc(x, y, this.innerCircleRadius, 0, Math.PI * 2, false);
ctx.stroke();
ctx.fill();

// Draw hollow circumference representing maximum radius
ctx.strokeStyle = this.pointColor;
ctx.lineWidth = 1;
ctx.beginPath();
ctx.arc(x, y, this.radius, 0, 2 * Math.PI, false);
ctx.stroke();
},
async triggerAnimation(origin, target) {
const frames = this.animationFrames;
const deltaX = (target.x - origin.x) / frames;
const deltaY = (target.y - origin.y) / frames;

for (let d = 1; d <= frames; d++) {
const xPosition = origin.x + deltaX * d;
const yPosition = origin.y + deltaY * d;

if (d === frames) {
this.drawPoint(xPosition, yPosition, 1);
} else {
// Shrink radius as it moves
const currentRadius = (this.radius / frames) * (frames - d);
this.drawPoint(xPosition, yPosition, currentRadius);
}
await new Promise(resolve => setTimeout(resolve, this.animationRefreshRate));
}
}
}
};
</script>

<style scoped>
#calibration-canvas {
position: absolute;
top: 0;
left: 0;
width: 100vw;
height: 100vh;
pointer-events: none; /* Let clicks pass through to UI underneath if needed */
}
</style>
91 changes: 91 additions & 0 deletions src/components/calibration/WebcamManager.vue
Original file line number Diff line number Diff line change
@@ -0,0 +1,91 @@
<template>
<video
id="video-tag"
autoplay
playsinline
style="display: none;"
@loadedmetadata="onMetadataLoaded"
></video>
</template>

<script>
export default {
name: 'WebcamManager',
data() {
return {
recordWebCam: null,
webcamfile: null,
configWebCam: {
audio: false,
video: true
}
}
},
beforeDestroy() {
this.stopWebCamCapture();
},
methods: {
async startWebCamCapture() {
try {
const mediaStreamObj = await navigator.mediaDevices.getUserMedia(this.configWebCam);

// Create media recorder object
this.recordWebCam = new MediaRecorder(mediaStreamObj, {
mimeType: "video/webm;",
});

let recordingWebCam = [];
let video = document.getElementById("video-tag");
video.srcObject = mediaStreamObj;

// Save frames to recordingWebCam array
this.recordWebCam.ondataavailable = (ev) => {
recordingWebCam.push(ev.data);
};

// OnStop WebCam Record
this.recordWebCam.onstop = () => {
// Generate blob from the frames
let blob = new Blob(recordingWebCam, { type: "video/webm" });
recordingWebCam = [];
this.webcamfile = { blob: blob, name: mediaStreamObj.id };

// End webcam capture
mediaStreamObj.getTracks().forEach((track) => track.stop());
this.$emit('capture-stopped', this.webcamfile);
};

// Init record webcam
this.recordWebCam.start();
return true;
} catch (e) {
console.error("Error starting webcam:", e);
this.$emit('error', e);
return false;
}
},

async onMetadataLoaded() {
// Additional wait to ensure video renders properly
await new Promise(resolve => setTimeout(resolve, 200));
const video = document.getElementById("video-tag");
if (video && video.videoWidth > 0 && video.videoHeight > 0) {
this.$emit('video-ready', video);
}
},

async stopRecord() {
if (!this.recordWebCam) return;
if (this.recordWebCam.state !== "inactive") {
await this.stopWebCamCapture();
}
},

async stopWebCamCapture() {
if (this.recordWebCam && this.recordWebCam.state !== 'inactive') {
this.recordWebCam.stop();
}
}
}
}
</script>
85 changes: 85 additions & 0 deletions src/services/ml/FaceDetector.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,85 @@
class FaceDetector {
constructor(model) {
this.model = model;
}

/**
* Estimates the face landmarks from a given video element.
* Includes defensive checking for video readiness.
*
* @param {HTMLVideoElement} videoElement The video element to analyze
* @returns {Promise<Array>} The prediction array from TensorFlow
*/
async detectFace(videoElement) {
if (!this.model) {
throw new Error("TensorFlow Face Landmarks Model not loaded.");
}

if (!videoElement || videoElement.videoWidth === 0 || videoElement.videoHeight === 0) {
console.warn('FaceDetector: Video not ready or has 0 dimensions.');
// Return null rather than hanging or looping internally, letting the caller retry
return null;
}

try {
const predictions = await this.model.estimateFaces({
input: videoElement,
});
return predictions;
} catch (error) {
console.error("FaceDetector: Error estimating faces", error);
return null;
}
}

/**
* Helper utility to safely extract exact iris and eyelid data
* and detect blinks.
*
* @param {Object} prediction A single face prediction object from the model
* @param {Number} leftEyeThreshold
* @param {Number} rightEyeThreshold
* @returns {Object|null} Formatted prediction data or null if invalid/blinking
*/
processPrediction(prediction, leftEyeThreshold, rightEyeThreshold) {
if (!prediction || !prediction.annotations || !prediction.annotations.leftEyeIris || !prediction.annotations.rightEyeIris) {
return null;
}

const annotations = prediction.annotations;

// left eye
const leftIris = annotations.leftEyeIris;
const leftEyelid = annotations.leftEyeUpper0.concat(annotations.leftEyeLower0);
const leftEyelidTip = leftEyelid[3];
const leftEyelidBottom = leftEyelid[11];
const leftDistance = this._calculateDistance(leftEyelidTip, leftEyelidBottom);
const isLeftBlink = leftDistance < leftEyeThreshold;

// right eye
const rightIris = annotations.rightEyeIris;
const rightEyelid = annotations.rightEyeUpper0.concat(annotations.rightEyeLower0);
const rightEyelidTip = rightEyelid[3];
const rightEyelidBottom = rightEyelid[11];
const rightDistance = this._calculateDistance(rightEyelidTip, rightEyelidBottom);
const isRightBlink = rightDistance < rightEyeThreshold;

if (isLeftBlink || isRightBlink) {
return { isBlinking: true };
}

return {
isBlinking: false,
leftIris: leftIris[0],
rightIris: rightIris[0]
};
}

_calculateDistance(point1, point2) {
const xDistance = point2[0] - point1[0];
const yDistance = point2[1] - point1[1];
return Math.sqrt(xDistance * xDistance + yDistance * yDistance);
}
}

export default FaceDetector;
Loading