Send MediaRecorder blobs to server and build file on backend
For those who are still interesting in the flow of the continuous saving of the media stream using MediaRecorder API and WebSockets...
Client side:
const ws = new WebSocket(someWsUrl);
const mediaStream = new MediaStream();
const videoTrack = someStream.getVideoTracks()[0];
const audioTrack = someStream.getAudioTracks()[0];
mediaStream.addTrack(videoTrack);
mediaStream.addTrack(audioTrack);
const recorderOptions = {
mimeType: 'video/webm',
videoBitsPerSecond: 200000 // 0.2 Mbit/sec.
};
const mediaRecorder = new MediaRecorder(mediaStream, recorderOptions);
mediaRecorder.start(1000); // 1000 - the number of milliseconds to record into each Blob
mediaRecorder.ondataavailable = (event) => {
console.debug('Got blob data:', event.data);
if (event.data && event.data.size > 0) {
ws.send(event.data);
}
};
Server side:
const WebSocket = require('ws');
const wss = new WebSocket.Server({ port: 3000 });
wss.on('connection', (ws, req) => {
const fileStream = fs.createWriteStream(filePath, { flags: 'a' });
ws.on('message', message => {
// Only raw blob data can be sent
fileStream.write(Buffer.from(new Uint8Array(message)));
});
});
This is my solution if it helps anybody:
I send chuncks in binary format (I have selected Uint8Array), and add each chunk to a file in the server side after packing the received data (converted to binary in the same unsigned char decoding)
Client Side Javascript:
let order=0;
mediaRecorder.ondataavailable = async (e) => {
if(e.data && e.data.size > 0) {
var reader = new FileReader();
reader.readAsArrayBuffer(e.data);
reader.onloadend = async function(event) {
let arrayBuffer = reader.result;
let uint8View = new Uint8Array(arrayBuffer);
let response = await fetch('save-video.php', {
method: 'POST',
body: JSON.stringify({
chunk: uint8View,
order: order
})
});
order += 1;
}
}
}
Server Side PHP:
<?php
$request = json_decode(file_get_contents("php://input"), true);
$chunk = $request['chunk'];
$order = $request['order'];
$binarydata = pack("C*", ...$chunk);
$filePath = "uploads/file.webm";
$out = fopen("{$filePath}", $order == 0 ? "wb" : "ab");
if ($out) {
fwrite($out, $binarydata);
fclose($out);
}
?>
So I solved it by doing the following (once I get the merge action call):
const dir = `${__dirname}/.tmp/`;
const fileName = getFileNameFromEvent(eventId);
const path = dir + fileName;
//First get the path for every file chunk ordered (otherwise it'll lose quality)
let recParts = await RecordingParts.find({
where: {
filename: fileName
}
}).sort('index ASC');
let wstream = fs.createWriteStream(path);
for (let i = 0; i < recParts.length; i++){
let aux = await readFile(recParts[i].tmpPath, null);
wstream.write(aux);
//Delete chunks
fs.unlink(recParts[i].tmpPath, (err) => {
if (err) throw err;
});
}
wstream.end();
//Utils function
const readFile = (path, opts = 'utf8') =>
new Promise((res, rej) => {
fs.readFile(path, opts, (err, data) => {
if (err) rej(err);
else res(data)
})
});
After
wstream.end();
You will have the merged file at path