How to record webcam and audio using webRTC and a server-based Peer connection
I would like to record the users webcam and audio and save it to a file on the server. These files would then be able to be served up to other users.
I have no problems with playback, however I'm having problems getting the content to record.
My understanding is that the getUserMedia .record()
function has not yet been written - only a proposal has been made for it so far.
I would like to create a peer connection on my server using the PeerConnectionAPI. I understand this is a bit hacky, but I'm thinking it should be possible to create a peer on the server and record what the client-peer sends.
If this is possible, I should then be able to save this data to flv or any other video format.
My preference is actually to record the webcam + audio client-side, to allow the client to re-record videos if they didn't like their first attempt before uploading. This would also allow for interruptions in network connections. I've seen some code which allows recording of individual 'images' from the webcam by sending the data to the canvas - that's cool, but I need the audio too.
Here's the client side code I have so far:
<video autoplay></video>
<script language="javascript" type="text/javascript">
function onVideoFail(e) {
console.log('webcam fail!', e);
};
function hasGetUserMedia() {
// Note: Opera is unprefixed.
return !!(navigator.getUserMedia || navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia || navigator.msGetUserMedia);
}
if (hasGetUserMedia()) {
// Good to go!
} else {
alert('getUserMedia() is not supported in your browser');
}
window.URL = window.URL || window.webkitURL;
navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia || navigator.msGetUserMedia;
var video = document.querySelector('video');
var streamRecorder;
var webcamstream;
if (navigator.getUserMedia) {
navigator.getUserMedia({audio: true, video: true}, function(stream) {
video.src = window.URL.createObjectURL(stream);
webcamstream = stream;
// streamrecorder = webcamstream.record();
}, onVideoFail);
} else {
alert ('failed');
}
function startRecording() {
streamRecorder = webcamstream.record();
setTimeout(stopRecording, 10000);
}
function stopRecording() {
streamRecorder.getRecordedData(postVideoToServer);
}
function postVideoToServer(videoblob) {
/* var x = new XMLHttpRequest();
x.open('POST', 'uploadMessage');
x.send(videoblob);
*/
var data = {};
data.video = videoblob;
data.metadata = 'test metadata';
data.action = "upload_video";
jQuery.post("http://www.foundthru.co.uk/uploadvideo.php", data, onUploadSuccess);
}
function onUploadSuccess() {
alert ('video uploaded');
}
</script>
<div id="webcamcontrols">
<a class="recordbutton" href="javascript:startRecording();">RECORD</a>
</div>
You should definitely have a look at Kurento. It provides a WebRTC server infrastructure that allows you to record from a WebRTC feed and much more. You can also find some examples for the application you are planning here. It is really easy to add recording capabilities to that demo, and store the media file in a URI (local disk or wherever).
The project is licensed under LGPL Apache 2.0
EDIT 1
Since this post, we've added a new tutorial that shows how to add the recorder in a couple of scenarios
- kurento-hello-world-recording: simple recording tutorial, showing the different capabilities of the recording endpoint.
- kurento-one2one-recording: How to record a one-to-one communication in the media server.
- kurento-hello-world-repository: use an external repository to record the file.
Disclaimer: I'm part of the team that develops Kurento.
I believe using kurento or other MCUs just for recording videos would be bit of overkill, especially considering the fact that Chrome has MediaRecorder API support from v47 and Firefox since v25. So at this junction, you might not even need an external js library to do the job, try this demo I made to record video/ audio using MediaRecorder:
Demo - would work in chrome and firefox (intentionally left out pushing blob to server code)
Github Code Source
If running firefox, you could test it in here itself( chrome needs https
):
'use strict'
let log = console.log.bind(console),
id = val => document.getElementById(val),
ul = id('ul'),
gUMbtn = id('gUMbtn'),
start = id('start'),
stop = id('stop'),
stream,
recorder,
counter = 1,
chunks,
media;
gUMbtn.onclick = e => {
let mv = id('mediaVideo'),
mediaOptions = {
video: {
tag: 'video',
type: 'video/webm',
ext: '.mp4',
gUM: {
video: true,
audio: true
}
},
audio: {
tag: 'audio',
type: 'audio/ogg',
ext: '.ogg',
gUM: {
audio: true
}
}
};
media = mv.checked ? mediaOptions.video : mediaOptions.audio;
navigator.mediaDevices.getUserMedia(media.gUM).then(_stream => {
stream = _stream;
id('gUMArea').style.display = 'none';
id('btns').style.display = 'inherit';
start.removeAttribute('disabled');
recorder = new MediaRecorder(stream);
recorder.ondataavailable = e => {
chunks.push(e.data);
if (recorder.state == 'inactive') makeLink();
};
log('got media successfully');
}).catch(log);
}
start.onclick = e => {
start.disabled = true;
stop.removeAttribute('disabled');
chunks = [];
recorder.start();
}
stop.onclick = e => {
stop.disabled = true;
recorder.stop();
start.removeAttribute('disabled');
}
function makeLink() {
let blob = new Blob(chunks, {
type: media.type
}),
url = URL.createObjectURL(blob),
li = document.createElement('li'),
mt = document.createElement(media.tag),
hf = document.createElement('a');
mt.controls = true;
mt.src = url;
hf.href = url;
hf.download = `${counter++}${media.ext}`;
hf.innerHTML = `donwload ${hf.download}`;
li.appendChild(mt);
li.appendChild(hf);
ul.appendChild(li);
}
button {
margin: 10px 5px;
}
li {
margin: 10px;
}
body {
width: 90%;
max-width: 960px;
margin: 0px auto;
}
#btns {
display: none;
}
h1 {
margin-bottom: 100px;
}
<link type="text/css" rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.6/css/bootstrap.min.css">
<h1> MediaRecorder API example</h1>
<p>For now it is supported only in Firefox(v25+) and Chrome(v47+)</p>
<div id='gUMArea'>
<div>
Record:
<input type="radio" name="media" value="video" checked id='mediaVideo'>Video
<input type="radio" name="media" value="audio">audio
</div>
<button class="btn btn-default" id='gUMbtn'>Request Stream</button>
</div>
<div id='btns'>
<button class="btn btn-default" id='start'>Start</button>
<button class="btn btn-default" id='stop'>Stop</button>
</div>
<div>
<ul class="list-unstyled" id='ul'></ul>
</div>
<script src="https://code.jquery.com/jquery-2.2.0.min.js"></script>
<script src="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.6/js/bootstrap.min.js"></script>
Please, check the RecordRTC
RecordRTC is MIT licensed on github.
yes, as you understood, MediaStreamRecorder is currently unimplemented.
MediaStreamRecorder is a WebRTC API for recording getUserMedia() streams . It allows web apps to create a file from a live audio/video session.
alternatively you may do like this http://ericbidelman.tumblr.com/post/31486670538/creating-webm-video-from-getusermedia but audio is missing part.