Below is the tutorial on how we can use Screen Record API in javascript.
First, we will create an index.html file in a newly created project.
<html>
<head>
<meta charset="UTF-8" />
<link rel="stylesheet" type="text/css" href="index.css" />
<link
href="https://cdn.jsdelivr.net/npm/bootstrap@5.3.0-alpha1/dist/css/bootstrap.min.css"
rel="stylesheet"
integrity="sha384-GLhlTQ8iRABdZLl6O3oVMWSktQOp6b7In1Zl3/Jr59b6EGGoI1aFkw7cmDA6j6gD"
crossorigin="anonymous"
/>
<title>Screen capture api Demo</title>
</head>
<body>
<div class="container">
<h3>
Click the "Start" button to begin video recording. You can stop the
video by clicking the creatively-named "Stop" button. The "Download"
button will download the received data
</h3>
<br />
<div class="row">
<div class="col-md-4 col-sm-12">
<h2>Preview</h2>
<video id="preview" width="300" height="200" autoplay muted></video>
<button id="startButton" class="btn btn-primary mt-2">
Start Recording
</button>
<button id="stopButton" class="btn btn-danger mt-2">
Stop Recording
</button>
<div>
<pre id="log"></pre>
</div>
</div>
<div class="col-md-4 col-sm-12">
<h2>Recording</h2>
<video id="recording" width="300" height="200" controls></video>
<br />
<a id="downloadButton" class="btn btn-success mt-2"> Download </a>
</div>
</div>
</div>
<script src="script.js"></script>
</body>
</html>
I am using “bootstrap” as the styling library and have added some CSS styling in the index.css file.
body {
font: 14px "Open Sans", "Arial", sans-serif;
}
.container {
margin: auto;
background: paleturquoise;
height: 800px;
}
video {
margin-top: 2px;
border: 1px solid black;
}
The result will look something like this.
Now, we will write some javascript, first, we will add a script.js file and import this into our index.html file.
First, we will define all the elements by getting them from id’s defined.
let preview = document.getElementById("preview");
let recording = document.getElementById("recording");
let startButton = document.getElementById("startButton");
let stopButton = document.getElementById("stopButton");
let downloadButton = document.getElementById("downloadButton");
let logElement = document.getElementById("log");
Whenever a user clicks on the “Start Recording” button it will call the method “navigator.mediaDevices.getDisplayMedia”. And we will pass a JSON object with two keys video and audio. This configuration will allow the app to record the video and audio.
navigator.mediaDevices
.getDisplayMedia({
video: true,
audio: true,
})
The above method will return a “promise”. In the “then” block we will get the “stream” which will be the MediaStreamTrack object of javascript. We will use this stream to show the live preview in the preview window.
navigator.mediaDevices
.getDisplayMedia({
video: true,
audio: true,
})
.then((stream) => {
preview.srcObject = stream;
preview.captureStream =
preview.captureStream || preview.mozCaptureStream;
return new Promise((resolve) => (preview.onplaying = resolve));
})
Now we will call another and then block on the same promise which will call a method “startRecording” and we will pass the “preview.captureStream()” in this method. The “captureStream” will return a MediaStream object which is streaming a real-time capture of the content being rendered in the media element.
navigator.mediaDevices
.getDisplayMedia({
video: true,
audio: true,
})
.then((stream) => {
preview.srcObject = stream;
preview.captureStream =
preview.captureStream || preview.mozCaptureStream;
return new Promise((resolve) => (preview.onplaying = resolve));
})
.then(() => startRecording(preview.captureStream()))
And the “startRecording” method will be as follows
function startRecording(stream) {
//start recording
recorder = new MediaRecorder(stream); //api to record media in javascript provides different functionalities
// as media pause, resume, start, stop, requestData - request blob of recorded media
let data = [];
//ondataavailable - fires periodically each time timeslice milliseconds of media have been recorded or
//when the entire media is recorded if no timeslice is specified
recorder.ondataavailable = (event) => data.push(event.data);
recorder.start(); //strt the recording
log('"Recording..."');
//when stopped it will resolve the promise
let stopped = new Promise((resolve, reject) => {
recorder.onstop = resolve;
recorder.onerror = (event) => reject(event.name);
});
//when stopped it will return the data when it is recorded and stopped completely
return Promise.all([stopped, recorder]).then(() => data);
}
In the above method, we are using a “log” method to log messages in the app.
function log(msg) {
//log messages on screen
logElement.innerHTML = msg + "\n";
}
Now, we have to add the functionality for the “stop” button.
stopButton.addEventListener(
"click",
function () {
//passing the stream as argument - MediaStreamTrack
stop(preview.srcObject);
},
false
);
function stop(stream) {
if (recorder.state == "recording") {
recorder.stop();
}
//getTracks = returns a sequence that represents all
//the MediaStreamTrack objects and stop
//all of them
stream.getTracks().forEach((track) => track.stop());
}
When you stop the recording it will stop every stream track as well as the recorder instance.
After this, we will add functionality for the recorded video preview and the downloading of recorded media. For this, we have to add another then block in the “getDisplayMedia” method.
navigator.mediaDevices
.getDisplayMedia({
video: true,
audio: true,
})
.then((stream) => {
preview.srcObject = stream;
preview.captureStream =
preview.captureStream || preview.mozCaptureStream;
return new Promise((resolve) => (preview.onplaying = resolve));
})
.then(() => startRecording(preview.captureStream()))
.then((recordedChunks) => {
let recordedBlob = new Blob(recordedChunks, { type: "video/webm" });
recording.src = URL.createObjectURL(recordedBlob);
downloadButton.href = recording.src;
downloadButton.download = "RecordedVideo.webm";
log(
"Successfully recorded " +
recordedBlob.size +
" bytes of " +
recordedBlob.type +
" media."
);
})
.catch(log);
In the last “then” block we will get the recorded media chunks and now, we will create a “blob” object with the type “video/webm” and also set the “recording.src” equal to “recordedBlob”. It will preview the recorded media. Then in the next step, we will set the download button ‘href’ equal to the recorded media URL. And the “downloadButton.download” is equal to the name of the exported file.
When we will click on the “download” button it will start downloading the recorded media.
The final script.js file will look something like this.
let preview = document.getElementById("preview");
let recording = document.getElementById("recording");
let startButton = document.getElementById("startButton");
let stopButton = document.getElementById("stopButton");
let downloadButton = document.getElementById("downloadButton");
let logElement = document.getElementById("log");
let recorder;
function log(msg) {
//log messages on screen
logElement.innerHTML = msg + "\n";
}
function startRecording(stream) {
//start recording
recorder = new MediaRecorder(stream); //api to record media in javascript provides different functionalities
// as media pause, resume, start, stop, requestData - request blob of recorded media
let data = [];
//ondataavailable - fires periodically each time timeslice milliseconds of media have been recorded or
//when the entire media is recorded if no timeslice is specified
recorder.ondataavailable = (event) => data.push(event.data);
recorder.start(); //strt the recording
log('"Recording..."');
//when stopped it will resolve the promise
let stopped = new Promise((resolve, reject) => {
recorder.onstop = resolve;
recorder.onerror = (event) => reject(event.name);
});
//when stopped it will return the data when it is recorded and stopped completely
return Promise.all([stopped, recorder]).then(() => data);
}
function stop(stream) {
if (recorder.state == "recording") {
recorder.stop();
}
//getTracks = returns a sequence that represents all the MediaStreamTrack objects and stops
//all them
stream.getTracks().forEach((track) => track.stop());
}
startButton.addEventListener(
"click",
function () {
navigator.mediaDevices
.getDisplayMedia({
video: true,
audio: true,
})
.then((stream) => {
//stream - MediaStreamTrack
preview.srcObject = stream;
// downloadButton.href = stream;
preview.captureStream =
preview.captureStream || preview.mozCaptureStream;
return new Promise((resolve) => (preview.onplaying = resolve));
})
.then(() => startRecording(preview.captureStream()))
//captureStream() will return a MediaStream object
//which is streaming a real-time capture of the
// content being rendered in the media element.
.then((recordedChunks) => {
let recordedBlob = new Blob(recordedChunks, { type: "video/webm" });
recording.src = URL.createObjectURL(recordedBlob);
downloadButton.href = recording.src;
downloadButton.download = "RecordedVideo.webm";
log(
"Successfully recorded " +
recordedBlob.size +
" bytes of " +
recordedBlob.type +
" media."
);
})
.catch(log);
},
false
);
stopButton.addEventListener(
"click",
function () {
//passing the recorded chunks as argument
stop(preview.srcObject);
},
false
);
The final app will look like this
GitHub- https://github.com/sumankalia/screen_capture_and_download
Live Preview- https://main--cool-semolina-7c039d.netlify.app/
Youtube tutorial link in “Hindi”-
Thanks for reading 😀
Top comments (0)