Logo Questions Linux Laravel Mysql Ubuntu Git Menu
 

Display getUserMediaStream live video with media stream extensions (MSE)

I am trying to display a MediaStream taken from a webcam using getUserMedia, and to relay it to a remote peer using whatever mechanism possible for it to be played (as an experiment). I am not using webRTC directly as I want control over the raw data.

The issue I encounter is that my video element displays nothing, and I don't get any errors back. I am using Chrome Version 51.0.2704.103 (64-bit) on Elementary OS (Ubuntu 14.04 based linux OS).

As a sidenote, if I record all the blobs into an array and then create a new blob and set the video's src element to URL.createObjectUrl(blob), it displays video correctly.

Here is the code I tried to accomplish this (minus the relaying, I'm just trying to play it locally):

var ms = new MediaSource();
var video = document.querySelector("video"); 
video.src = window.URL.createObjectURL(ms);

ms.addEventListener("sourceopen", function() {
    var sourceBuffer = ms.addSourceBuffer('video/webm; codecs="vorbis,vp8"');

    navigator.getUserMedia({video: {width: 320, height: 240, framerate: 30}, audio: true}, function(stream) {
        var recorder = new MediaRecorder(stream);

        recorder.ondataavailable = function(event) {
            var reader = new FileReader();
            reader.addEventListener("loadend", function () {
                var uint8Chunk = new Uint8Array(reader.result);
                if (!sourceBuffer.updating) {
                    sourceBuffer.appendBuffer(uint8Chunk);
                }
                if (video.paused) video.play();
            });
            reader.readAsArrayBuffer(event.data);
        };

        recorder.start(10);
    }, function(error) {
        console.error(error);
    });
}, false);

Here is the info I get in chrome://media-internal:

render_id: 147
player_id: 0
pipeline_state: kPlaying
event: WEBMEDIAPLAYER_CREATED
url: blob:http%3A//localhost%3A8080/e5c51dd8-5709-4e6f-9457-49ac8c34756b
found_audio_stream: true
audio_codec_name: opus
found_video_stream: true
video_codec_name: vp8
duration: unknown
audio_dds: false
audio_decoder: OpusAudioDecoder
video_dds: false
video_decoder: FFmpegVideoDecoder

Also the log:

00:00:00 00 pipeline_state  kCreated
00:00:00 00 event   WEBMEDIAPLAYER_CREATED
00:00:00 00 url blob:http%3A//localhost%3A8080/e5c51dd8-5709-4e6f-9457-49ac8c34756b
00:00:00 00 pipeline_state  kInitDemuxer
00:00:01 603    found_audio_stream  true
00:00:01 603    audio_codec_name    opus
00:00:01 603    found_video_stream  true
00:00:01 603    video_codec_name    vp8
00:00:01 604    duration    unknown
00:00:01 604    pipeline_state  kInitRenderer
00:00:01 604    audio_dds   false
00:00:01 604    audio_decoder   OpusAudioDecoder
00:00:01 604    video_dds   false
00:00:01 604    video_decoder   FFmpegVideoDecoder
00:00:01 604    pipeline_state  kPlaying

Update: I've tried sending the data to node and saving it to a webm file with ffmpeg (fluent-ffmpeg), and I can view the file in VLC correctly.

Update 2: After streaming it back from node, I get the following: Media segment did not contain any video coded frames, mismatching initialization segment. Therefore, MSE coded frame processing may not interoperably detect discontinuities in appended media. . After doing some research, it appears that webm files must be segmented to work, however I have not come across a way to do this (either using ffmpeg or other tools) for live streams. Any ideas here?

like image 357
Ionut Campean Avatar asked Jun 22 '16 01:06

Ionut Campean


1 Answers

A little late, but you can try it like this (in chrome):

<html>

<body>
    <video class="real1" autoplay controls></video>
    <video class="real2" controls></video>

    <script>
        const constraints = {video: {width: 320, height: 240, framerate: 30}, audio: true};

        const video1 = document.querySelector('.real1');
        const video2 = document.querySelector('.real2');

        var mediaSource = new MediaSource();
        video2.src = window.URL.createObjectURL(mediaSource);
        var sourceBuffer;
        mediaSource.addEventListener('sourceopen', function () {
            sourceBuffer = mediaSource.addSourceBuffer('video/webm; codecs=opus,vp8');
            console.log(sourceBuffer);
        })

        var isFirst = true;
        var mediaRecorder;
        var i = 0;
        function handleSuccess(stream) {
            video1.srcObject = stream;
            mediaRecorder = new MediaRecorder(stream, { mimeType: 'video/webm; codecs=opus,vp8' });
            console.log(mediaRecorder.mimeType)
            mediaRecorder.ondataavailable = function (e) {
                var reader = new FileReader();
                reader.onload = function (e) {              
                    sourceBuffer.appendBuffer(new Uint8Array(e.target.result));
                }
                reader.readAsArrayBuffer(e.data);

                if (video2.paused) {
                    video2.play(0);
                }
            }
            mediaRecorder.start(20);
        }

        function handleError(error) {
            console.error('Reeeejected!', error);
        }
        navigator.mediaDevices.getUserMedia(constraints).
            then(handleSuccess).catch(handleError);
    </script>
</body>

</html>

I think you missed setting the same (supported) codec to both, recorder and sourceBuffer.

like image 137
Cracker0dks Avatar answered Nov 17 '22 08:11

Cracker0dks