-
Notifications
You must be signed in to change notification settings - Fork 7
/
Copy pathstream.js
58 lines (52 loc) · 1.88 KB
/
stream.js
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
const socketIOProvider = require('socket.io');
const cv = require('opencv4nodejs');
const fps = 30; //frames per second
/**
* video source set to 0 for stream from webcam
* video source can be set url from ip cam also eg: "http://192.168.1.112:8080/video"
*/
const videoSource = 0;
const videoCap = new cv.VideoCapture(videoSource);
videoCap.set(cv.CAP_PROP_FRAME_WIDTH, 600);
videoCap.set(cv.CAP_PROP_FRAME_HEIGHT, 600);
const stream = (server) => {
const io = socketIOProvider(server);
let processingIntervalMultiple = 10;
setInterval(() => {
const frame = videoCap.read();
const image = cv.imencode('.jpg', frame).toString('base64');
io.emit('new-frame', { live: image });
}, 1000 / fps);
/**
* Since video/image transformations are computionally expensive operations, these operations are performed independent of live feed streaming.
*/
setInterval(() => {
const frame = videoCap.read();
const frameWithFaces = faceDetector(frame);
const imageWithFaces = cv.imencode('.jpg', frameWithFaces).toString('base64');
io.emit('new-frame', { transformed: imageWithFaces });
}, 10000 / fps);
};
/**
*
* Face detection transformation on the stream
*/
const faceDetector = (frame) => {
const classifier = new cv.CascadeClassifier(cv.HAAR_FRONTALFACE_ALT2);
const detection = classifier.detectMultiScale(frame.bgrToGray());
if (!detection.objects.length) {
// no faces detectd
return frame;
}
// draw faces
const frameWithFaces = frame.copy();
detection.objects.forEach((rect, i) => {
const blue = new cv.Vec(255, 0, 0);
frameWithFaces.drawRectangle(
new cv.Point(rect.x, rect.y),
new cv.Point(rect.x + rect.width, rect.y + rect.height), { color: blue, thickness: 2 }
);
});
return frameWithFaces;
};
module.exports = stream;