Security camera - mpalitto/HomeHacks GitHub Wiki
Node JS Server using node-media-server
in a window exec node app.js
const NodeMediaServer = require('node-media-server');
const config = {
rtmp: {
port: 1935,
chunk_size: 60000,
gop_cache: true,
ping: 30,
ping_timeout: 60
},
http: {
port: 8000,
allow_origin: '*'
}
};
In a different window exec ffmpeg command:
ffmpeg -f mjpeg -i "http://192.168.1.242:81/videostream.cgi?loginuse=matteo&loginpas=oettam68" -c:v libx264 -preset superfast -tune zerolatency -c:a aac -ar 44100 -f flv rtmp://localhost/live/STREAM_NAME
In browser load this HTML page
<html>
<body>
<!-- Load TensorFlow.js. This is required to use coco-ssd model. -->
<script src="https://cdn.jsdelivr.net/npm/@tensorflow/tfjs"> </script>
<!-- Load the coco-ssd model. -->
<script src="https://cdn.jsdelivr.net/npm/@tensorflow-models/coco-ssd"> </script>
<script src="https://cdn.bootcss.com/flv.js/1.5.0/flv.min.js"></script>
<div style="text-align:center">
<video hidden id="videoElement" muted="muted" width="480" height="360"></video>
<canvas id="canvas"></canvas>
</div>
<script>
var videoElement = document.getElementById('videoElement');
const canvas = document.getElementById("canvas");
if (flvjs.isSupported()) {
var flvPlayer = flvjs.createPlayer({
type: 'flv',
url: 'http://localhost:8000/live/STREAM_NAME.flv'
});
flvPlayer.attachMediaElement(videoElement);
flvPlayer.load();
flvPlayer.play();
}
// Notice there is no 'import' statement. 'mobilenet' and 'tf' is
// available on the index-page because of the script tag above.
// Load the model.
console.log('loding model...');
cocoSsd.load().then(model => {
console.log('model is loaded');
detectFrame(videoElement, model);
});
detectFrame = (video, model) => {
// detect objects in the image.
model.detect(video).then(predictions => {
console.log('Predictions: ', predictions);
renderPredictions(predictions);
});
requestAnimationFrame(() => {
this.detectFrame(video, model);})
}
renderPredictions = predictions => {
const ctx = canvas.getContext("2d");
canvas.width = 480;
canvas.height = 360;
ctx.clearRect(0, 0, ctx.canvas.width, ctx.canvas.height);
// Fonts
const font = "16px sans-serif";
ctx.font = font;
ctx.textBaseline = "top";
ctx.drawImage(videoElement,0,0,canvas.width,canvas.height);
predictions.forEach(prediction => {
// Bounding boxes's coordinates and sizes
const x = prediction.bbox[0];
const y = prediction.bbox[1];
const width = prediction.bbox[2];
const height = prediction.bbox[3];
// Bounding box style
ctx.strokeStyle = "#00FFFF";
ctx.lineWidth = 2;
// Draw the bounding
ctx.strokeRect(x, y, width, height);
// Label background
ctx.fillStyle = "#00FFFF";
const textWidth = ctx.measureText(prediction.class).width;
const textHeight = parseInt(font, 10); // base 10
ctx.fillRect(x, y, textWidth + 4, textHeight + 4);
});
predictions.forEach(prediction => {
// Write prediction class names
const x = prediction.bbox[0];
const y = prediction.bbox[1];
ctx.fillStyle = "#000000";
ctx.fillText(prediction.class, x, y);});
};
</script>
</body>
</html>