Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -22,3 +22,6 @@ pnpm-debug.log*
*.njsproj
*.sln
*.sw?

# Firebase config (local only)
src/firebase.js
21 changes: 21 additions & 0 deletions src/App.vue
Original file line number Diff line number Diff line change
Expand Up @@ -12,3 +12,24 @@ export default {
name: 'App',
};
</script>

<style>
html, body {
margin: 0;
padding: 0;
height: auto;
min-height: 100vh;
overflow-y: auto;
overflow-x: hidden;
}

/* Vuetify root fix */
.v-application {
min-height: 100vh !important;
}

.v-main {
min-height: 100vh;
overflow-y: auto;
}
</style>
8 changes: 3 additions & 5 deletions src/main.js
Original file line number Diff line number Diff line change
Expand Up @@ -2,18 +2,16 @@ import Vue from 'vue'
import App from './App.vue'
import router from './router'
import store from './store'
import vuetify from './plugins/vuetify';
import vuetify from './plugins/vuetify'
import './services/axios'
import firebase from 'firebase/app'
import 'firebase/firestore'
import { envConfig } from './config/environment'

Vue.config.productionTip = false

const firebaseConfig = envConfig.firebase;

// Initialize Firebase
firebase.initializeApp(firebaseConfig);

Vue.config.productionTip = false

new Vue({
router,
Expand Down
139 changes: 139 additions & 0 deletions src/mixins/CameraMixin.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,139 @@
/**
* CameraMixin — Reusable webcam / MediaRecorder logic.
*
* Extracted from DoubleCalibrationRecord.vue (Issue #112).
* Mix this into any Vue component that needs camera capture
* and face-detection support.
*
* Requirements for the host component:
* - Must have a <video id="video-tag"> in its template (for ML inference).
* - Must have a <video id="video-preview"> in its template (for UI display).
* - Must provide `this.model` (the face-landmarks-detection model instance)
* and `this.configWebCam` (getUserMedia constraints) via its own
* data / computed / store.
*/

export default {
data() {
return {
/** @type {MediaRecorder|null} */
recordWebCam: null,
/** Blob + name of the recorded webcam video */
webcamfile: null,
};
},

methods: {
/**
* Request camera access, wire up MediaRecorder, and start recording.
* Resolves once the video element fires `loadeddata`.
*/
async startWebCamCapture() {
try {
let mediaStreamObj;

try {
// Try with ideal constraints first
mediaStreamObj = await navigator.mediaDevices.getUserMedia(
this.configWebCam,
);
} catch (error) {
console.warn(
'Failed with ideal constraints, trying basic video:',
error,
);
mediaStreamObj = await navigator.mediaDevices.getUserMedia({
audio: false,
video: true,
});
}

// Create media recorder
this.recordWebCam = new MediaRecorder(mediaStreamObj, {
mimeType: 'video/webm;',
});

let recordingWebCam = [];
const video = document.getElementById('video-tag'); // ML
const preview = document.getElementById('video-preview'); // UI

if (!video || !preview) {
console.error('Video elements not found');
return;
}

video.srcObject = mediaStreamObj;
preview.srcObject = mediaStreamObj;

// Save frames
this.recordWebCam.ondataavailable = (ev) => {
recordingWebCam.push(ev.data);
};

// On stop → build blob
const self = this;
this.recordWebCam.onstop = () => {
const blob = new Blob(recordingWebCam, { type: 'video/webm' });
recordingWebCam = [];
self.webcamfile = { blob, name: mediaStreamObj.id };
mediaStreamObj.getTracks().forEach((track) => track.stop());
self.stopRecord();
};

// Start recording
this.recordWebCam.start();

// Wait for video to be ready
return new Promise((resolve) => {
video.onloadeddata = () => {
this.detectFace();
resolve();
};
});
} catch (e) {
console.error('Error accessing webcam:', e);
alert(
'Camera access is required for eye tracking. Please allow camera permissions and refresh the page.',
);
throw e;
}
},

/**
* Run face-landmark detection on the current video frame.
* Retries automatically if the video isn't ready yet.
*
* @returns {Promise<Array>} Face predictions from the model.
*/
async detectFace() {
const video = document.getElementById('video-tag');

if (!video || video.videoWidth === 0 || video.videoHeight === 0) {
console.warn('Video not ready yet, waiting...');
await new Promise((resolve) => setTimeout(resolve, 100));
return this.detectFace();
}

const lastPrediction = await this.model.estimateFaces({ input: video });
return lastPrediction;
},

/**
* Stop the MediaRecorder if it is still active.
*/
async stopRecord() {
if (!this.recordWebCam) return;
if (this.recordWebCam.state !== 'inactive') {
await this.stopWebCamCapture();
}
},

/**
* Stop the MediaRecorder and mark calibration as finished.
*/
async stopWebCamCapture() {
await this.recordWebCam.stop();
this.calibFinished = true;
},
},
};
170 changes: 170 additions & 0 deletions src/services/CalibrationCalculator.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,170 @@
/**
* CalibrationCalculator — Pure calibration math utilities.
*
* Extracted from DoubleCalibrationRecord.vue (Issue #112) so they
* can be unit-tested independently of the Vue component.
*/

/**
* Generate a calibration-point pattern for a given screen size.
*
* @param {number} pointCount Number of calibration points (1-9).
* @param {number} width Screen / viewport width in px.
* @param {number} height Screen / viewport height in px.
* @param {number} offset Margin from screen edge in px.
* @returns {{ x: number, y: number }[]}
*/
export function generateCalibrationPattern(pointCount, width, height, offset) {
const patterns = [];

switch (pointCount) {
case 1:
patterns.push({ x: width / 2, y: height / 2 });
break;

case 2:
patterns.push({ x: offset, y: height / 2 });
patterns.push({ x: width - offset, y: height / 2 });
break;

case 3:
patterns.push({ x: offset, y: height / 2 });
patterns.push({ x: width / 2, y: height / 2 });
patterns.push({ x: width - offset, y: height / 2 });
break;

case 4:
patterns.push({ x: offset, y: offset });
patterns.push({ x: width - offset, y: offset });
patterns.push({ x: offset, y: height - offset });
patterns.push({ x: width - offset, y: height - offset });
break;

case 5:
patterns.push({ x: offset, y: offset });
patterns.push({ x: width - offset, y: offset });
patterns.push({ x: width / 2, y: height / 2 });
patterns.push({ x: offset, y: height - offset });
patterns.push({ x: width - offset, y: height - offset });
break;

case 6: {
const stepX6 = (width - 2 * offset) / 2;
const stepY6 = (height - 2 * offset) / 1;
for (let i = 0; i < 2; i++) {
for (let j = 0; j < 3; j++) {
patterns.push({
x: offset + j * stepX6,
y: offset + i * stepY6,
});
}
}
break;
}

case 7:
patterns.push({ x: offset, y: offset });
patterns.push({ x: width / 2, y: offset });
patterns.push({ x: width - offset, y: offset });
patterns.push({ x: offset, y: height / 2 });
patterns.push({ x: width - offset, y: height / 2 });
patterns.push({ x: offset, y: height - offset });
patterns.push({ x: width - offset, y: height - offset });
break;

case 8:
patterns.push({ x: offset, y: offset });
patterns.push({ x: width / 2, y: offset });
patterns.push({ x: width - offset, y: offset });
patterns.push({ x: offset, y: height / 2 });
patterns.push({ x: width - offset, y: height / 2 });
patterns.push({ x: offset, y: height - offset });
patterns.push({ x: width / 2, y: height - offset });
patterns.push({ x: width - offset, y: height - offset });
break;

case 9:
default: {
const cols = 3;
const rows = 3;
const usableWidth = width - 2 * offset;
const usableHeight = height - 2 * offset;
const stepX = usableWidth / (cols - 1);
const stepY = usableHeight / (rows - 1);

for (let i = 0; i < rows; i++) {
for (let j = 0; j < cols; j++) {
patterns.push({
x: offset + j * stepX,
y: offset + i * stepY,
});
}
}
break;
}
}

return patterns;
}

/**
* Generate a runtime calibration pattern based on screen dimensions.
*
* @param {Object} opts
* @param {boolean} opts.isSinglePointMode Use centre-only pattern.
* @param {number} opts.offset Margin from screen edge.
* @param {number} opts.pointNumber Number of calibration points.
* @param {number} [opts.width] Viewport width (default: window.innerWidth).
* @param {number} [opts.height] Viewport height (default: window.innerHeight).
* @returns {{ x: number, y: number }[]}
*/
export function generateRuntimePattern({
isSinglePointMode,
offset = 100,
pointNumber = 9,
width = window.innerWidth,
height = window.innerHeight,
}) {
if (isSinglePointMode) {
return [{ x: width / 2, y: height / 2 }];
}

const points = pointNumber;
const minCols = 3;
const cols = Math.max(minCols, Math.round(Math.sqrt(points)));
const rows = Math.ceil(points / cols);

const usableWidth = width - 2 * offset;
const usableHeight = height - 2 * offset;

const stepX = usableWidth / (cols - 1);
const stepY = usableHeight / (rows - 1);

const pattern = [];

for (let i = 0; i < rows; i++) {
for (let j = 0; j < cols; j++) {
if (pattern.length < points) {
pattern.push({
x: offset + j * stepX,
y: offset + i * stepY,
});
}
}
}

return pattern;
}

/**
* Euclidean distance between two 2-D landmark points (arrays).
*
* @param {number[]} pointA [x, y, …]
* @param {number[]} pointB [x, y, …]
* @returns {number}
*/
export function calculateDistance(pointA, pointB) {
const dx = pointB[0] - pointA[0];
const dy = pointB[1] - pointA[1];
return Math.sqrt(dx * dx + dy * dy);
}
Loading