Logo Questions Linux Laravel Mysql Ubuntu Git Menu
 

How to concat chunks of incoming binary into video (webm) file node js?

I am trying to upload chunks of base64 to node js server and save those chunks into one file

let chunks = [];

app.post('/api', (req, res) => {
    let {blob} = req.body;
    //converting chunks of base64 to buffer
    chunks.push(Buffer.from(blob, 'base64'));
    res.json({gotit:true})

});

app.post('/finish', (req, res) => {
    let buf = Buffer.concat(chunks);
    fs.writeFile('finalvideo.webm', buf, (err) => {
        console.log('Ahh....', err)
    });
    console.log('SAVED')
    res.json({save:true})
});

Problem with the above code is video is not playable I don't why Am I really doing something wrong and I've also tried writable streams it is not working either

UPDATE - I

Instead of sending blobs I've implemented to send binary but even though I am facing a problem like TypeError: First argument must be a string, Buffer, ArrayBuffer, Array, or array-like object.

client.js

 postBlob = async blob => {
       let arrayBuffer = await new Response(blob).arrayBuffer();
        let binary = new Uint8Array(arrayBuffer)
        console.log(binary) // logging typed Uint8Array
        axios.post('/api',{binary})
            .then(res => {
                console.log(res)
            })

    };

server.js

 let chunks = [];

    app.post('/api', (req, res) => {
        let {binary} = req.body;



        let chunkBuff = Buffer.from(binary) // This code throwing Error
        chunks.push(chunkBuff);

        console.log(chunkBuff)

         res.json({gotit:true})

    });

//Somehow combine those chunks into one file
app.post('/finish', (req, res) => {
    console.log('Combinig the files',chunks.length);

     let buf = Buffer.concat(chunks);

    console.log(buf) //empty buff
    fs.writeFile('save.webm', buf, (err) => {
        console.log('Ahh....', err)
    });

    res.json({save:true})
});

UPDATE - II

I am able to receive the binary chunk and append to a stream but in the final video only first chunk is playing I don't know what happened to other chunks and the video ends.

code

const writeMyStream = fs.createWriteStream(__dirname+'/APPENDED.webm', {flags:'a', encoding:null});

app.post('/api', (req, res) => {
    let {binary} = req.body;
 let chunkBuff = Buffer.from(new Uint8Array(binary));
    writeMyStream.write(chunkBuff);
res.json({gotit:true})

});

UPDATE - III

my client code | Note: I've tried other ways to upload blobs I've commented out

     customRecordStream = stream => {



            let recorder = new MediaStreamRecorder(stream);
            recorder.mimeType = 'video/webm;codecs=vp9';


            recorder.ondataavailable = this.postBlob 
            recorder.start(INT_REC)

        };

 postBlob = async blob => {


        let arrayBuffer = await new Response(blob).arrayBuffer();
        let binary = new Uint8Array(arrayBuffer)


            axios.post('/api',{binary})
                .then(res => {
                    console.log(res)
                })
        // let binaryUi8 = new Uint8Array(arrayBuffer);
        // let binArr = Array.from(binaryUi8);
        // // console.log(new Uint8Array(arrayBuffer))
        //
        // console.log(blob);


        // console.log(binArr)

        // let formData = new FormData();
        // formData.append('fname', 'test.webm')
        // formData.append("file", blob);
        //
        // console.log(formData,'Checjk Me',blob)
        // axios({
        //     method:'post',
        //     url:'/api',
        //     data:formData,
        //     config: { headers: {'Content-Type': 'multipart/form-data' }}
        // }).then(res => {
        //     console.log(res,'FROM SERBER')
        //
        // })
        //
        //
        //     .then(res => {
        //         console.log(res)
        //     })

        // this.blobToDataURL(blob, (blobURL) => {
        //
        //     axios.post('/api',{blob:blobURL})
        //         .then(res => {
        //             console.log(res)
        //         })
        // })


    };
like image 764
Nane Avatar asked Jun 30 '19 15:06

Nane


2 Answers

I was able to get this working by converting to base64 encoding on the front-end with the FileReader api. On the backend, create a new Buffer from the data chunk sent and write it to a file stream. Some key things with my code sample:

  1. I'm using fetch because I didn't want to pull in axios.
  2. When using fetch, you have to make sure you use bodyParser on the backend
  3. I'm not sure how much data you're collecting in your chunks (i.e. the duration value passed to the start method on the MediaRecorder object), but you'll want to make sure your backend can handle the size of the data chunk coming in. I set mine really high to 50MB, but this may not be necessary.
  4. I never close the write stream explicitly... you could potentially do this in your /final route. Otherwise, createWriteStream defaults to AutoClose, so the node process will do it automatically.

Full working example below:

Front End:

const mediaSource = new MediaSource();
mediaSource.addEventListener('sourceopen', handleSourceOpen, false);
let mediaRecorder;
let sourceBuffer;

function customRecordStream(stream) {
  // should actually check to see if the given mimeType is supported on the browser here.
  let options = { mimeType: 'video/webm;codecs=vp9' };
  recorder = new MediaRecorder(window.stream, options);
  recorder.ondataavailable = postBlob 
  recorder.start(INT_REC)
};

function postBlob(event){
  if (event.data && event.data.size > 0) {
    sendBlobAsBase64(event.data);
  }
}

function handleSourceOpen(event) {
  sourceBuffer = mediaSource.addSourceBuffer('video/webm; codecs="vp8"');
} 

function sendBlobAsBase64(blob) {
  const reader = new FileReader();

  reader.addEventListener('load', () => {
    const dataUrl = reader.result;
    const base64EncodedData = dataUrl.split(',')[1];
    console.log(base64EncodedData)
    sendDataToBackend(base64EncodedData);
  });

  reader.readAsDataURL(blob);
};

function sendDataToBackend(base64EncodedData) {
  const body = JSON.stringify({
    data: base64EncodedData
  });
  fetch('/api', {
    method: 'POST',
    headers: {
      'Content-Type': 'application/json',
    },
    body
  }).then(res => {
    return res.json()
  }).then(json => console.log(json));
}; 

Back End:

const fs = require('fs');
const path = require('path');
const express = require('express');
const bodyParser = require('body-parser');
const app = express();
const server = require('http').createServer(app);

app.use(bodyParser.urlencoded({ extended: true }));
app.use(bodyParser.json({ limit: "50MB", type:'application/json'}));

app.post('/api', (req, res) => {
  try {
    const { data } = req.body;
    const dataBuffer = new Buffer(data, 'base64');
    const fileStream = fs.createWriteStream('finalvideo.webm', {flags: 'a'});
    fileStream.write(dataBuffer);
    console.log(dataBuffer);
    return res.json({gotit: true});
  } catch (error) {
    console.log(error);
    return res.json({gotit: false});
  }
});
like image 59
willascend Avatar answered Nov 04 '22 04:11

willascend


Inspired by @willascend answer:

Backend-side:

app.use(express.raw());
app.post('/video-chunck', (req, res) => {
    fs.createWriteStream('myvideo.webm', { flags: 'a' }).write(req.body);
    res.sendStatus(200);
});

Frontend-side:

mediaRecorder.ondataavailable = event => {
    if (event.data && event.data.size > 0) {
        fetch(this.serverUrl + '/video-chunck', {
            method: 'POST',
            headers: {'Content-Type': 'application/octet-stream'},
            body: event.data
        });
    }
};

My express version is 4.17.1

like image 20
Thibaud Michel Avatar answered Nov 04 '22 05:11

Thibaud Michel