Streaming / broadcasting Live Video call to non webrtc supported browsers and media players

As the title of this article suggests I am going to pen my attempts of streaming / broadcasting Live Video WebRTC call to non WebRTC supported browsers and media players such as VLC , ffplay , default video player in Linux etc .

I am currently attempting to do this by making my own MP4 engine from WebRTC feed . However I am sharing my past experiments in hope of helping someone whose objective is not the same as mine and might get some help from these threads .


Attempt 1 : use one to many brodcasting API :

<!DOCTYPE html>
<html id=”home” lang=”en”>

<head>
<meta http-equiv=”Content-Type” content=”text/html; charset=UTF-8″>
<meta charset=utf-8>
<meta name=”viewport” content=”width=device-width, initial-scale=1.0, user-scalable=no”>
<meta name=”author” content=”altanai”>
<meta http-equiv=”X-UA-Compatible” content=”IE=edge,chrome=1″>

<link rel=”stylesheet” type=”text/css” href=”style.css”>

</head>

<body>

<table class=”visible”>
<tr>
<td style=”text-align: right;”>
<input type=”text” id=”conference-name” placeholder=”Broadcast Name”>
</td>
<td>
<select id=”broadcasting-option”>
<option>Audio + Video</option>
<option>Only Audio</option>
<option>Screen</option>
</select>
</td>
<td>
<button id=”start-conferencing”>Start Broadcasting</button>
</td>
</tr>
</table>
<table id=”rooms-list” class=”visible”></table>

<div id=”participants”></div>

<script src=”RTCPeerConnection-v1.5.js”></script>
<script src=”firebase.js”></script>
<script src=”broadcast.js”></script>
<script src=”broadcast-ui.js”></script>

</body>

</html>
 

It uses API fromwebrtc-experiment.com. The broadcast is in one direction only where the viewrs are never asked for their mic / webcam permission .

problem : The broadcast is for WebRTC browsers only and doesnt support non webrtc players / browsers


Attempt 1.1: Stream the media directly to nodejs through websocket


window.addEventListener('DOMContentLoaded', function() {

var v = document.getElementById('v');
navigator.getUserMedia = (navigator.getUserMedia || 
navigator.webkitGetUserMedia || 
navigator.mozGetUserMedia || 
navigator.msGetUserMedia);

if (navigator.getUserMedia) {
// Request access to video only
navigator.getUserMedia(
{
video:true,
audio:false
}, 
function(stream) {
var url = window.URL || window.webkitURL;
v.src = url ? url.createObjectURL(stream) : stream;
v.play();

var ws = new WebSocket('ws://localhost:3000', 'echo-protocol');
waitForSocketConnection(ws, function(){

console.log(" url.createObjectURL(stream)-----", url.createObjectURL(stream))
ws.send(stream);

console.log("message sent!!!"); 
});

},
function(error) {
alert('Something went wrong. (error code ' + error.code + ')');
return;
}
);
}
else {
alert('Sorry, the browser you are using doesn\'t support getUserMedia');
return;
}
});

//Make the function wait until the connection is made...
function waitForSocketConnection(socket, callback){
setTimeout(
function () {
if (socket.readyState === 1) {
console.log("Connection is made")
if(callback != null){
callback();
}
return;

} else {
console.log("wait for connection...")
waitForSocketConnection(socket, callback);
}

}, 5); // wait 5 milisecond for the connection...
}

problem : The video is in form of buffer and doesnot play


Attempt 2: Record the WebRTC media ( 5 secs each ) into chunks of webm format->  transfer them to other end -> append the chunks together like a regular file 

This process involved the following components :

  • Recorder Javascript library : RecordJs
  • Transfer mechanism : Record using RecordRTC.js -> send to other end for media server -> stitching together the small webm files into big one at runtime and play
  • Programs :

Code for video recorder

navigator.getUserMedia(videoConstraints, function(stream) {

video.onloadedmetadata = function() {
video.width = 320;
video.height = 240;

var options = {
type: isRecordVideo ? 'video' : 'gif',
video: video,
canvas: {
width: canvasWidth_input.value,
height: canvasHeight_input.value
}
};

recorder = window.RecordRTC(stream, options);
recorder.startRecording();
};
video.src = URL.createObjectURL(stream);
}, function() {
if (document.getElementById('record-screen').checked) {
if (location.protocol === 'http:')
alert('&lt;https&gt; is mandatory to capture screen.');
else
alert('Multi-capturing of screen is not allowed. Capturing process is denied. Are you enabled flag: "Enable screen capture support in getUserMedia"?');
} else
alert('Webcam access is denied.');
});

Code for video append-er

var FILE1 = '1.webm';
var FILE2 = '2.webm';
var FILE3 = '3.webm';
var FILE4 = '4.webm';
var FILE5 = '5.webm';

var NUM_CHUNKS = 5;
var video = document.querySelector('video');

window.MediaSource = window.MediaSource || window.WebKitMediaSource;
if (!!!window.MediaSource) {
alert('MediaSource API is not available');
}

var mediaSource = new MediaSource();

video.src = window.URL.createObjectURL(mediaSource);

function callback(e) {

var sourceBuffer = mediaSource.addSourceBuffer('video/webm; codecs="vorbis,vp8"');

GET(FILE1, function(uInt8Array) {

var file = new Blob([uInt8Array], {type: 'video/webm'});
var i = 1;

(function readChunk_(i) {

var reader = new FileReader();

reader.onload = function(e) {

sourceBuffer.appendBuffer(new Uint8Array(e.target.result));

if (i == NUM_CHUNKS) mediaSource.endOfStream();

else {
if (video.paused) {
video.play(); // Start playing after 1st chunk is appended.
}
readChunk_(++i);
}

};

reader.readAsArrayBuffer(file);

})(i); // Start the recursive call by self calling.
});
}

mediaSource.addEventListener('sourceopen', callback, false);
mediaSource.addEventListener('webkitsourceopen', callback, false);
mediaSource.addEventListener('webkitsourceended', function(e) {
logger.log('mediaSource readyState: ' + this.readyState);
}, false);

// function get the video via XHR
function GET(url, callback) {

var xhr = new XMLHttpRequest();
xhr.open('GET', url, true);
xhr.responseType = 'arraybuffer';
xhr.send();

xhr.onload = function(e) {

if (xhr.status != 200) {
alert("Unexpected status code " + xhr.status + " for " + url);
return false;
}

callback(new Uint8Array(xhr.response));
};
}

Shortcoming of this approach

  1. The webm files failed to play on most of the media players
  2. The recorder can only either record video or audio file at a time .

Attempt 2.1: Record the WebRTC media ( 5 secs each ) into chunks of webm format ( RecordRTC.js) >  Use Kurento JS script ( kws-media-api,js) to make a HTTP Endpoint to recorded Webm files  -> append the chunks together like a regular file at runtime 


function getByID(id) {
return document.getElementById(id);
}

var recordAudio = getByID('record-audio'),
recordVideo = getByID('record-video'),
stopRecordingAudio = getByID('stop-recording-audio'),
stopRecordingVideo = getByID('stop-recording-video'),
broadcasting=getByID('broadcasting');

var canvasWidth_input = getByID('canvas-width-input'),
canvasHeight_input = getByID('canvas-height-input');

var video = getByID('video');
var audio = getByID('audio');

var videoConstraints = {
audio: false,
video: {
mandatory: {},
optional: []
}
};

var audioConstraints = {
audio: true,
video: false
};

const ws_uri = 'ws://localhost:8888/kurento';
var URL_SMALL="http://localhost:8080/streamtomp4/approach1/5561840332.webm";


var audioStream;
var recorder;

recordAudio.onclick = function() {
if (!audioStream)
navigator.getUserMedia(audioConstraints, function(stream) {

if (window.IsChrome) stream = new window.MediaStream(stream.getAudioTracks());
audioStream = stream;

audio.src = URL.createObjectURL(audioStream);
audio.muted = true;
audio.play();

// "audio" is a default type
recorder = window.RecordRTC(stream, {
type: 'audio'
});
recorder.startRecording();
}, function() {});
else {
audio.src = URL.createObjectURL(audioStream);
audio.muted = true;
audio.play();
if (recorder) recorder.startRecording();
}


window.isAudio = true;

this.disabled = true;
stopRecordingAudio.disabled = false;
};

stopRecordingAudio.onclick = function() {
this.disabled = true;
recordAudio.disabled = false;
audio.src = '';

if (recorder)
recorder.stopRecording(function(url) {
audio.src = url;
audio.muted = false;
audio.play();

document.getElementById('audio-url-preview').innerHTML = '&lt;a href="' + url + '" target="_blank"&gt;Recorded Audio URL&lt;/a&gt;';
});
};

recordVideo.onclick = function() {
recordVideoOrGIF(true);
};


function recordVideoOrGIF(isRecordVideo) {
navigator.getUserMedia(videoConstraints, function(stream) {

video.onloadedmetadata = function() {
video.width = 320;
video.height = 240;

var options = {
type: isRecordVideo ? 'video' : 'gif',
video: video,
canvas: {
width: canvasWidth_input.value,
height: canvasHeight_input.value
}
};

recorder = window.RecordRTC(stream, options);
recorder.startRecording();
};
video.src = URL.createObjectURL(stream);
}, function() {
if (document.getElementById('record-screen').checked) {
if (location.protocol === 'http:')
alert('&lt;https&gt; is mandatory to capture screen.');
else
alert('Multi-capturing of screen is not allowed. Capturing process is denied. Are you enabled flag: "Enable screen capture support in getUserMedia"?');
} else
alert('Webcam access is denied.');
});

window.isAudio = false;

if (isRecordVideo) {
recordVideo.disabled = true;
stopRecordingVideo.disabled = false;
} else {
recordGIF.disabled = true;
stopRecordingGIF.disabled = false;
}
}

stopRecordingVideo.onclick = function() {
this.disabled = true;
recordVideo.disabled = false;

if (recorder)
recorder.stopRecording(function(url) {
video.src = url;
video.play();
document.getElementById('video-url-preview').innerHTML = '&lt;a href="' + url + '" target="_blank"&gt;Recorded Video URL&lt;/a&gt;';

});
};


/*--------------------------broadcasting -----------------------------------*/

function onerror(error)
{
console.log( " error occured");
console.error(error);
};

broadcast.onclick = function() {
var videoOutput = document.getElementById("videoOutput");

KwsMedia(ws_uri, function(error, kwsMedia)
{
if(error) return onerror(error);

// Create pipeline
kwsMedia.create('MediaPipeline', function(error, pipeline)
{
if(error) return onerror(error);

// Create pipeline media elements (endpoints &amp; filters)
pipeline.create('PlayerEndpoint', {uri: URL_SMALL},
function(error, player)
{
if(error) return console.error(error);

pipeline.create('HttpGetEndpoint', function(error, httpGet)
{
if(error) return onerror(error);

// Connect media element between them
player.connect(httpGet, function(error, pipeline)
{
if(error) return onerror(error);
// Set the video on the video tag
httpGet.getUrl(function(error, url)
{
if(error) return onerror(error);

videoOutput.src = url;

console.log(url);

// Start player
player.play(function(error)
{
if(error) return onerror(error);

console.log('player.play');
});
});
});

// Subscribe to HttpGetEndpoint EOS event
httpGet.on('EndOfStream', function(event)
{
console.log("EndOfStream event:", event);
});
});
});
});
},
onerror);

}

problem : dissecting the live video into small the files and appending to each other on reception is an expensive , time and resource consuming process . Also involves heavy buffering and other problems pertaining to real-time streaming .


Attempt 2.2 : Send the recorded chunks of webm to a port on linux server . Use socket programming to pick up these individual files and play using  VLC player from UDP port of the Linux Server

Screenshot from 2015-01-22 15:32:51


Attempt 2.3: Send the recorded chunks of webm to a port on linux server socket . Use socket programming to pick up these individual webm files and convert to H264 format so that they can be send to a media server. 

This process involved the following components :

  • Recorder Javascript library : RecordJs
  • Transfer mechanism :WebRTC endpoint -> Call handler ( Record in chunks ) -> ffmpeg / gstreamer to put it on RTP -> streaming server like wowza – > viewers
  • Programs : Use HTML webpage Webscoket connection -> nodejs program to write content from websocket to linux socket -> nodejs program to read that socket and print the content on console

Program to transfer the webm recorder files over websocket to nodejs program

//Make the function wait until the connection is made...
function waitForSocketConnection(socket, callback){
setTimeout(
function () {
if (socket.readyState === 1) {
console.log("Connection is made")
if(callback != null){
callback();
}
return;

} else {
console.log("wait for connection...")
waitForSocketConnection(socket, callback);
}

}, 5); // wait 5 milisecond for the connection...
}

function previewFile() {
var preview = document.querySelector('img');
var file = document.querySelector('input[type=file]').files[0];
var reader = new FileReader();

reader.onloadend = function () {

preview.src = reader.result;
console.log(" reader result ", reader.result);

var video=document.getElementById("v");
video.src=reader.result;
console.log(" video played ");

var ws = new WebSocket('ws://localhost:3000', 'echo-protocol');

waitForSocketConnection(ws, function(){
ws.send(reader.result); 
console.log("message sent!!!"); 
});

}

if (file) {
// converts to base64 encoded string of the file data
//reader.readAsDataURL(file);

reader.readAsBinaryString(file);

} else {
preview.src = "";
}
}

Program for Linux Sockets sender which creates the socket for the webm files

var net = require('net');
var fs = require('fs');
var socketPath = '/tmp/tfxsocket';
var http = require('http');
var stream = require('stream');
var util = require('util');

var WebSocketServer = require('ws').Server;
var port = 3000;
var serverUrl = "localhost";

var socket;
/*--------------------------------http server -----------------------------*/
var server= http.createServer(function (request, response) {

});

server.listen(port, serverUrl);

console.log('HTTP Server running at ',serverUrl,port);

/*--------------------------------websocket server -----------------------------*/

var wss = new WebSocketServer({server: server});

wss.on("connection", function(ws) {
console.log("websocket connection open");

ws.on('message', function (message) {
console.log(" stream recived from broadcast client on port 3000 ");

var s = require('net').Socket();
s.connect(socketPath);
s.write(message);

console.log(" send the stream to socketPath",socketPath); 
});

ws.on("close", function() {
console.log("websocket connection close")
});

});

Program for Linux Socket Listener using nodejs and socket . Here the socket is in node /tmp/mysocket

var net = require('net');

var client = net.createConnection("/tmp/mysocket");

client.on("connect", function() {
console.log("connected to mysocket");
});

client.on("data", function(data) {
console.log(data);
});

client.on('end', function() {
console.log('server disconnected');
});

Output 1: Video Buffer displayed

Screenshot from 2015-01-22 15:35:06 (copy)

Output 2 : Random data from Video displayed

Screenshot from 2015-01-23 12:57:35

ffmpeg format of transfering the content from socket to UDP IP and port

ffmpeg -i unix://tmp/mysocket -f format udp://192.168.0.119:8083

problems of this approach : The video was on a passing stage from the socket and contained no information as such when tried to play / show console


Attempt 3 : Send the live WebRTC stream from Kurento WebRTC endpoint to Kurento HTTP endpoint . play using  Mozilla VLC web plugin

VLC mozilla plugin can be embedded by :


name=”video2″
autoplay=”yes” loop=”no” hidden=”no”
target=”rtp://@192.165.0.119:8086″ />

screenshot of failure on part of Mozilla VLC plugin to play from a WebRTC endpoint

Screenshot from 2015-01-29 10:37:06Screenshot from 2015-01-29 10:37:17

Screenshot from 2015-01-29 12:06:14

problem : VLC mozilla plugin was unable to play the video

………………………………………………………………………………………………………………..

The 4th , 5th and 6th sections of this article are in the next blog :

continue : Streaming / broadcasting Live Video call to non webrtc supported browsers and media players

Advertisements

2 responses to “Streaming / broadcasting Live Video call to non webrtc supported browsers and media players

  1. great article . helped me a lot to complete my assignment .
    especially the description of

    Attempt 2.1: Record the WebRTC media ( 5 secs each ) into chunks of webm format ( RecordRTC.js) > Use Kurento JS script ( kws-media-api,js) to make a HTTP Endpoint to recorded Webm files -> append the chunks together like a regular file at runtime

    However can u specify the Kurento version you are using for this ?
    Thanks .

Leave a Reply

Fill in your details below or click an icon to log in:

WordPress.com Logo

You are commenting using your WordPress.com account. Log Out / Change )

Twitter picture

You are commenting using your Twitter account. Log Out / Change )

Facebook photo

You are commenting using your Facebook account. Log Out / Change )

Google+ photo

You are commenting using your Google+ account. Log Out / Change )

Connecting to %s