I am trying to create a face-detection software using django and opencv, for which I need live stream from client's webcam.
After a lot of search I used bits and pieces and reached this code.
main.html
{%load static%}
<!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport"
content="width=device-width, user-scalable=no, initial-scale=1.0, maximum-scale=1.0, minimum-scale=1.0">
<meta http-equiv="X-UA-Compatible" content="ie=edge">
<link rel="stylesheet" href="https://stackpath.bootstrapcdn.com/bootstrap/4.1.3/css/bootstrap.min.css">
<link rel="stylesheet" href="{%static 'css/maincam.css'%}" type="text/css">
<title>streaming</title>
</head>
<body>
<a href="/">Video Chat</a>
<a href="/cam/">Screenshoot and all</a>
<div class="display-cover">
<video autoplay id="video"></video>
<video autoplay id="video1"></video>
<canvas class=""></canvas>
<div class="video-options">
<select name="" id="" class="custom-select">
<option value="">Select camera</option>
</select>
</div>
<img class="screenshot-image d-none" alt="">
<div class="controls">
<button class="btn btn-danger play" title="Play"><i data-feather="play-circle"></i></button>
<button class="btn btn-info pause d-none" title="Pause"><i data-feather="pause"></i></button>
<button class="btn btn-outline-success screenshot d-none" title="ScreenShot"><i
data-feather="image"></i></button>
</div>
</div>
<script src="https://unpkg.com/feather-icons"></script>
<script src="{%static 'js/videostreammain.js'%}" type="text/javascript"></script>
</body>
</html>
maincam.css
.screenshot-image {
width: 150px;
height: 90px;
border-radius: 4px;
border: 2px solid whitesmoke;
box-shadow: 0 1px 2px 0 rgba(0, 0, 0, 0.1);
position: absolute;
bottom: 5px;
left: 10px;
background: white;
}
.display-cover {
display: flex;
justify-content: center;
align-items: center;
width: 70%;
margin: 5% auto;
position: relative;
}
video {
width: 100%;
background: rgba(0, 0, 0, 0.2);
}
.video-options {
position: absolute;
left: 20px;
top: 30px;
}
.controls {
position: absolute;
right: 20px;
top: 20px;
display: flex;
}
.controls > button {
width: 45px;
height: 45px;
text-align: center;
border-radius: 100%;
margin: 0 6px;
background: transparent;
}
.controls > button:hover svg {
color: white !important;
}
@media (min-width: 300px) and (max-width: 400px) {
.controls {
flex-direction: column;
}
.controls button {
margin: 5px 0 !important;
}
}
.controls > button > svg {
height: 20px;
width: 18px;
text-align: center;
margin: 0 auto;
padding: 0;
}
.controls button:nth-child(1) {
border: 2px solid #D2002E;
}
.controls button:nth-child(1) svg {
color: #D2002E;
}
.controls button:nth-child(2) {
border: 2px solid #008496;
}
.controls button:nth-child(2) svg {
color: #008496;
}
.controls button:nth-child(3) {
border: 2px solid #00B541;
}
.controls button:nth-child(3) svg {
color: #00B541;
}
.controls > button {
width: 45px;
height: 45px;
text-align: center;
border-radius: 100%;
margin: 0 6px;
background: transparent;
}
.controls > button:hover svg {
color: white;
}
videostreammain.js
const video = document.querySelector('#video');
const video1 = document.querySelector('#video1');
const canvas = document.querySelector('canvas');
const controls = document.querySelector('.controls');
const cameraOptions = document.querySelector('.video-options>select');
const screenshotImage = document.querySelector('img');
const buttons = [...controls.querySelectorAll('button')];
var videoTracks,localStream;
const [play, pause, screenshot] = buttons;
let streamStarted = false;
const constraints = {
video: {
width: {
min: 1280,
ideal: 1920,
max: 2560,
},
height: {
min: 720,
ideal: 1080,
max: 1440
},
facingMode: 'user'
}
};
const ws = new WebSocket('ws://localhost:8000/video');
console.log('url',ws.url)
function startCam(constraint) {
userMedia = navigator.mediaDevices.getUserMedia(
constraint).then(stream => {
localStream = stream;
localStream.getVideoTracks()[0].getSettings().focusMode = "auto";
video1.srcObject = localStream;;
// video1.play()
videoTracks = stream.getVideoTracks();
console.log('video track',videoTracks);
}).catch(err => {
console.log('error accessing media.', err)
});
}
ws.addEventListener('open', async (e) => {
console.log('open')
ws.send(JSON.stringify({ test: 'test', imageprocess: true }));
// console.log(stream)
// startStream(constraints);
startCam(constraints)
console.log('stream',localStream);
setTimeout(() => {
console.log('stream',localStream);
console.log('src',video1.srcObject)
const encoder = new MediaRecorder(video1.srcObject);
encoder.ondataavailable = async (event) => {
console.log('available')
ws.send(JSON.stringify({ frame: event.data }));
};
// encoder.addEventListener('dataavailable', function (event) {
// console.log('available')
// ws.send(JSON.stringify({ frame: event.data }));
// }
// );
encoder.start();
}, 3000);
});
ws.addEventListener('message', function (event) {
// Receive video frames and display them.
console.log('message from websocket!', event.data);
const data = JSON.parse(event.data);
const frame = new Image();
frame.src = data.frame;
canvas.getContext('2d').drawImage(frame, 0, 0, canvas.width, canvas.height);
});
view.py
def video_processing(r):
return render(r,'main.html')
consumers.py
class VideoConsumer(AsyncWebsocketConsumer):
async def connect(self):
self.room_name = 'Test'
await self.channel_layer.group_add(
self.room_name,
self.channel_name,
)
await self.accept()
# return await super().connect()
async def disconnect(self, code):
await self.channel_layer.group_discard(self.room_name, self.channel_name)
print('disconnected')
# return await super().disconnect(code)
# async def receive(self, text_data):
async def receive(self, text_data=None, bytes_data=None):
print(text_data[:500])
recieved_data = json.loads(text_data)
await self.send(text_data=json.dumps('Image Proocessing'))
async def send_message(self, event):
recieved_data = event['recieved_data']
await self.send(text_data=json.dumps(recieved_data))
routing.py
websocket_urlpatterns = [
re_path(r'',consumers.VideoConsumer.as_asgi()),
]
Can someone please help me to send MediaStream data to django channels for image processing and send that processed image back to the client and display the processed image on #video.
I am open to any solution to stream client's live webcam to django for image processing with opencv and return the processed video stream to the client and display it to the client.
For simplicity sake just the processed video stream which has processed stream written on the video frames.