I am trying to develop a Video Calling/Conferencing application using WebRTC and node.js. Right now there is no facility to control bandwidth during during video call. Is there any way to control/reduce bandwidth. (like I want make whole my web application to work on 150 kbps while video conferencing).
Any suggestions are highly appreciated. Thanks in advance.
Current WebRTC implementations use Opus and VP8 codecs: The Opus codec is used for audio and supports constant and variable bitrate encoding and requires 6–510 Kbit/s of bandwidth.
In the context of VoIP (and WebRTC), bitrate is the number of bits per second that are being actively sent or received over the network. A few things to remember about bitrate: The maximum bitrate possible is capped by the bandwidth available, which can be dynamic throughout a single session.
Try this demo. You can inject bandwidth attributes (b=AS
) in the session descriptions:
audioBandwidth = 50; videoBandwidth = 256; function setBandwidth(sdp) { sdp = sdp.replace(/a=mid:audio\r\n/g, 'a=mid:audio\r\nb=AS:' + audioBandwidth + '\r\n'); sdp = sdp.replace(/a=mid:video\r\n/g, 'a=mid:video\r\nb=AS:' + videoBandwidth + '\r\n'); return sdp; } // ---------------------------------------------------------- peer.createOffer(function (sessionDescription) { sessionDescription.sdp = setBandwidth(sessionDescription.sdp); peer.setLocalDescription(sessionDescription); }, null, constraints); peer.createAnswer(function (sessionDescription) { sessionDescription.sdp = setBandwidth(sessionDescription.sdp); peer.setLocalDescription(sessionDescription); }, null, constraints);
b=AS
is already present in sdp for data m-line
; its default value is 50
.
Here is a library that provides full control over both audio/video tracks' bitrates:
// here is how to use it var bandwidth = { screen: 300, // 300kbits minimum audio: 50, // 50kbits minimum video: 256 // 256kbits (both min-max) }; var isScreenSharing = false; sdp = BandwidthHandler.setApplicationSpecificBandwidth(sdp, bandwidth, isScreenSharing); sdp = BandwidthHandler.setVideoBitrates(sdp, { min: bandwidth.video, max: bandwidth.video }); sdp = BandwidthHandler.setOpusAttributes(sdp);
Here is the library code. Its quite big but it works!
// BandwidthHandler.js var BandwidthHandler = (function() { function setBAS(sdp, bandwidth, isScreen) { if (!!navigator.mozGetUserMedia || !bandwidth) { return sdp; } if (isScreen) { if (!bandwidth.screen) { console.warn('It seems that you are not using bandwidth for screen. Screen sharing is expected to fail.'); } else if (bandwidth.screen < 300) { console.warn('It seems that you are using wrong bandwidth value for screen. Screen sharing is expected to fail.'); } } // if screen; must use at least 300kbs if (bandwidth.screen && isScreen) { sdp = sdp.replace(/b=AS([^\r\n]+\r\n)/g, ''); sdp = sdp.replace(/a=mid:video\r\n/g, 'a=mid:video\r\nb=AS:' + bandwidth.screen + '\r\n'); } // remove existing bandwidth lines if (bandwidth.audio || bandwidth.video || bandwidth.data) { sdp = sdp.replace(/b=AS([^\r\n]+\r\n)/g, ''); } if (bandwidth.audio) { sdp = sdp.replace(/a=mid:audio\r\n/g, 'a=mid:audio\r\nb=AS:' + bandwidth.audio + '\r\n'); } if (bandwidth.video) { sdp = sdp.replace(/a=mid:video\r\n/g, 'a=mid:video\r\nb=AS:' + (isScreen ? bandwidth.screen : bandwidth.video) + '\r\n'); } return sdp; } // Find the line in sdpLines that starts with |prefix|, and, if specified, // contains |substr| (case-insensitive search). function findLine(sdpLines, prefix, substr) { return findLineInRange(sdpLines, 0, -1, prefix, substr); } // Find the line in sdpLines[startLine...endLine - 1] that starts with |prefix| // and, if specified, contains |substr| (case-insensitive search). function findLineInRange(sdpLines, startLine, endLine, prefix, substr) { var realEndLine = endLine !== -1 ? endLine : sdpLines.length; for (var i = startLine; i < realEndLine; ++i) { if (sdpLines[i].indexOf(prefix) === 0) { if (!substr || sdpLines[i].toLowerCase().indexOf(substr.toLowerCase()) !== -1) { return i; } } } return null; } // Gets the codec payload type from an a=rtpmap:X line. function getCodecPayloadType(sdpLine) { var pattern = new RegExp('a=rtpmap:(\\d+) \\w+\\/\\d+'); var result = sdpLine.match(pattern); return (result && result.length === 2) ? result[1] : null; } function setVideoBitrates(sdp, params) { params = params || {}; var xgoogle_min_bitrate = params.min; var xgoogle_max_bitrate = params.max; var sdpLines = sdp.split('\r\n'); // VP8 var vp8Index = findLine(sdpLines, 'a=rtpmap', 'VP8/90000'); var vp8Payload; if (vp8Index) { vp8Payload = getCodecPayloadType(sdpLines[vp8Index]); } if (!vp8Payload) { return sdp; } var rtxIndex = findLine(sdpLines, 'a=rtpmap', 'rtx/90000'); var rtxPayload; if (rtxIndex) { rtxPayload = getCodecPayloadType(sdpLines[rtxIndex]); } if (!rtxIndex) { return sdp; } var rtxFmtpLineIndex = findLine(sdpLines, 'a=fmtp:' + rtxPayload.toString()); if (rtxFmtpLineIndex !== null) { var appendrtxNext = '\r\n'; appendrtxNext += 'a=fmtp:' + vp8Payload + ' x-google-min-bitrate=' + (xgoogle_min_bitrate || '228') + '; x-google-max-bitrate=' + (xgoogle_max_bitrate || '228'); sdpLines[rtxFmtpLineIndex] = sdpLines[rtxFmtpLineIndex].concat(appendrtxNext); sdp = sdpLines.join('\r\n'); } return sdp; } function setOpusAttributes(sdp, params) { params = params || {}; var sdpLines = sdp.split('\r\n'); // Opus var opusIndex = findLine(sdpLines, 'a=rtpmap', 'opus/48000'); var opusPayload; if (opusIndex) { opusPayload = getCodecPayloadType(sdpLines[opusIndex]); } if (!opusPayload) { return sdp; } var opusFmtpLineIndex = findLine(sdpLines, 'a=fmtp:' + opusPayload.toString()); if (opusFmtpLineIndex === null) { return sdp; } var appendOpusNext = ''; appendOpusNext += '; stereo=' + (typeof params.stereo != 'undefined' ? params.stereo : '1'); appendOpusNext += '; sprop-stereo=' + (typeof params['sprop-stereo'] != 'undefined' ? params['sprop-stereo'] : '1'); if (typeof params.maxaveragebitrate != 'undefined') { appendOpusNext += '; maxaveragebitrate=' + (params.maxaveragebitrate || 128 * 1024 * 8); } if (typeof params.maxplaybackrate != 'undefined') { appendOpusNext += '; maxplaybackrate=' + (params.maxplaybackrate || 128 * 1024 * 8); } if (typeof params.cbr != 'undefined') { appendOpusNext += '; cbr=' + (typeof params.cbr != 'undefined' ? params.cbr : '1'); } if (typeof params.useinbandfec != 'undefined') { appendOpusNext += '; useinbandfec=' + params.useinbandfec; } if (typeof params.usedtx != 'undefined') { appendOpusNext += '; usedtx=' + params.usedtx; } if (typeof params.maxptime != 'undefined') { appendOpusNext += '\r\na=maxptime:' + params.maxptime; } sdpLines[opusFmtpLineIndex] = sdpLines[opusFmtpLineIndex].concat(appendOpusNext); sdp = sdpLines.join('\r\n'); return sdp; } return { setApplicationSpecificBandwidth: function(sdp, bandwidth, isScreen) { return setBAS(sdp, bandwidth, isScreen); }, setVideoBitrates: function(sdp, params) { return setVideoBitrates(sdp, params); }, setOpusAttributes: function(sdp, params) { return setOpusAttributes(sdp, params); } }; })();
Here is how to set advance opus bitrate parameters:
sdp = BandwidthHandler.setOpusAttributes(sdp, { 'stereo': 0, // to disable stereo (to force mono audio) 'sprop-stereo': 1, 'maxaveragebitrate': 500 * 1024 * 8, // 500 kbits 'maxplaybackrate': 500 * 1024 * 8, // 500 kbits 'cbr': 0, // disable cbr 'useinbandfec': 1, // use inband fec 'usedtx': 1, // use dtx 'maxptime': 3 });
A more up-to-date answer
const videobitrate = 20000; var offer = pc.localDescription; // Set bandwidth for video offer.sdp = offer.sdp.replace(/(m=video.*\r\n)/g, `$1b=AS:${videobitrate}\r\n`); pc.setLocalDescription(offer);
Explanation: a=mid:video
is not a guaranteed tag. For receive only video, you might not see it or see a=mid:0
. Generally it's a better bet to look for the m=video xxxx xxxx
(or similar audio) tag and append the bandwidth parameters underneath
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With