-
Notifications
You must be signed in to change notification settings - Fork 2
/
Copy pathscript.js
80 lines (68 loc) · 2.78 KB
/
script.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
const video = document.getElementById('video');
const canvas = document.getElementById('canvas');
const startBtn = document.getElementById('start-btn');
const landingPage = document.getElementById('landing-page');
const cameraPage = document.getElementById('camera-page');
const loadingStatus = document.getElementById('loading-status');
document.addEventListener('DOMContentLoaded', initializeApp);
async function initializeApp() {
try {
await loadModels();
loadingStatus.textContent = 'Models loaded successfully!';
startBtn.disabled = false;
} catch (error) {
console.error('Failed to load models:', error);
loadingStatus.textContent = 'Failed to load models. Please refresh the page.';
}
}
async function loadModels() {
await faceapi.nets.tinyFaceDetector.loadFromUri('/models');
await faceapi.nets.faceRecognitionNet.loadFromUri('/models');
await faceapi.nets.faceExpressionNet.loadFromUri('/models');
await faceapi.nets.ageGenderNet.loadFromUri('/models');
}
startBtn.addEventListener('click', startApp);
async function startApp() {
landingPage.style.display = 'none';
cameraPage.style.display = 'block';
startVideo();
}
function startVideo() {
navigator.mediaDevices.getUserMedia({ video: {} })
.then(stream => {
video.srcObject = stream;
})
.catch(err => {
console.error(err);
alert('Camera access denied. Please grant permission and refresh the page.');
});
}
video.addEventListener('play', () => {
const displaySize = { width: video.videoWidth, height: video.videoHeight };
canvas.width = displaySize.width;
canvas.height = displaySize.height;
faceapi.matchDimensions(canvas, displaySize);
setInterval(async () => {
const options = new faceapi.TinyFaceDetectorOptions({
inputSize: 224,
scoreThreshold: 0.4
});
const detections = await faceapi.detectAllFaces(video, options)
.withFaceExpressions()
.withAgeAndGender();
const resizedDetections = faceapi.resizeResults(detections, displaySize);
canvas.getContext('2d').clearRect(0, 0, canvas.width, canvas.height);
faceapi.draw.drawDetections(canvas, resizedDetections, { withScore: true });
faceapi.draw.drawFaceExpressions(canvas, resizedDetections, 0.05);
resizedDetections.forEach(detection => {
const { age, gender, genderProbability } = detection;
new faceapi.draw.DrawTextField(
[
`Age: ${Math.round(age)} years`,
`Gender: ${gender} (${Math.round(genderProbability * 100)}%)`
],
detection.detection.box.bottomRight
).draw(canvas);
});
}, 100);
});