Logo Questions Linux Laravel Mysql Ubuntu Git Menu
 

WebRTC Play Audio Input as Microphone

I want to play my audio file as microphone input (without sending my live voice but my audio file) to the WebRTC connected user. Can anybody tell me how could it be done?

I have done some following tries in the JS code, like:

1. base64 Audio

<script>
var base64string = "T2dnUwACAAAAAAA..";
var snd = new Audio("data:audio/wav;base64," + base64string);
snd.play();
var Sound = (function () {
var df = document.createDocumentFragment();
return function Sound(src) {
    var snd = new Audio(src);
    df.appendChild(snd); 
    snd.addEventListener('ended', function () {df.removeChild(snd);});
    snd.play();
    return snd;
}
}());

var snd = Sound("data:audio/wav;base64," + base64string);
</script>

2. AudioBuffer

window.AudioContext = window.AudioContext || window.webkitAudioContext;

var audioContext = new AudioContext();
var isPlaying = false;
var sourceNode = null;
var theBuffer = null;

window.onload = function() {
var request = new XMLHttpRequest();
request.open("GET", "sounds/DEMO_positive_resp.wav", true);
request.responseType = "arraybuffer";
request.onload = function() {
  audioContext.decodeAudioData( request.response, function(buffer) { 
        theBuffer = buffer;
    } );
}
request.send();
}

function togglePlayback() {
        var now = audioContext.currentTime;

        if (isPlaying) {
            //stop playing and return
            sourceNode.stop( now );
            sourceNode = null;
            analyser = null;
            isPlaying = false;
            if (!window.cancelAnimationFrame)
                window.cancelAnimationFrame = window.webkitCancelAnimationFrame;
            //window.cancelAnimationFrame( rafID );
            return "start";
        }

        sourceNode = audioContext.createBufferSource();
        sourceNode.buffer = theBuffer;
        sourceNode.loop = true;

        analyser = audioContext.createAnalyser();
        analyser.fftSize = 2048;
        sourceNode.connect( analyser );
        analyser.connect( audioContext.destination );
        sourceNode.start( now );
        isPlaying = true;
        isLiveInput = true;
        return "stop";
    }

Please help me out in this case. It would be highly appreciable.

like image 209
webwarrior Avatar asked Aug 27 '14 16:08

webwarrior


1 Answers

Here is a demo that may help you stream mp3 or wav using chrome:

  • https://www.webrtc-experiment.com/RTCMultiConnection/stream-mp3-live.html

Here is, how it is written:

  • http://www.rtcmulticonnection.org/docs/getting-started/#stream-mp3-live

And source code of the demo:

  • https://github.com/muaz-khan/RTCMultiConnection/blob/master/demos/stream-mp3-live.html
  • https://github.com/muaz-khan/WebRTC-Experiment/issues/222

Use in 3rd party WebRTC applications

window.AudioContext = window.AudioContext || window.webkitAudioContext;

var context = new AudioContext();
var gainNode = context.createGain();
gainNode.connect(context.destination);

// don't play for self
gainNode.gain.value = 0;

document.querySelector('input[type=file]').onchange = function() {
    this.disabled = true;

    var reader = new FileReader();
    reader.onload = (function(e) {
        // Import callback function that provides PCM audio data decoded as an audio buffer
        context.decodeAudioData(e.target.result, function(buffer) {
            // Create the sound source
            var soundSource = context.createBufferSource();

            soundSource.buffer = buffer;
            soundSource.start(0, 0 / 1000);
            soundSource.connect(gainNode);

            var destination = context.createMediaStreamDestination();
            soundSource.connect(destination);

            createPeerConnection(destination.stream);
        });
    });

    reader.readAsArrayBuffer(this.files[0]);
};

function createPeerConnection(mp3Stream) {
    // you need to place 3rd party WebRTC code here
}

Updated at: 5:55 PM - Thursday, August 28, 2014

Here is how to get mp3 from server:

function HTTP_GET(url, callback) {
    var xhr = new XMLHttpRequest();
    xhr.open('GET', url, true);
    xhr.responseType = 'arraybuffer';
    xhr.send();

    xhr.onload = function(e) {
        if (xhr.status != 200) {
            alert("Unexpected status code " + xhr.status + " for " + url);
            return false;
        }

        callback(xhr.response); // return array-buffer
    };
}

// invoke above "HTTP_GET" method
// to load mp3 as array-buffer

HTTP_GET('http://domain.com/file.mp3', function(array_buffer) {

    // Import callback function that provides PCM audio data decoded as an audio buffer
    context.decodeAudioData(array_buffer, function(buffer) {
        // Create the sound source
        var soundSource = context.createBufferSource();

        soundSource.buffer = buffer;
        soundSource.start(0, 0 / 1000);
        soundSource.connect(gainNode);

        var destination = context.createMediaStreamDestination();
        soundSource.connect(destination);

        createPeerConnection(destination.stream);
    });
});
like image 124
Muaz Khan Avatar answered Sep 24 '22 19:09

Muaz Khan