Browser Physics Using the new web standards to produce physics

2Jul/139

Real time screencast to HTML5 <video> using ffmpeg and Node.js

The HTML5 editor that we are working with at the lab needs real time simulation rendering. I have earlier made attempts on live streaming simulation data from the server and render it using new HTML5 client-side rendering tools, but these solutions were limited and also custom made for each simulation. To be able to render simulations from any tool, a more general solution is needed. That's when I started investigating HTML5 video. If the HTML5 video element could be used to show live-streamed video, rendered on the server, that would be very useful.

I know you're impatient (I am!), so here's the demo:

The working prototype. To the left is a sample animation, run on the server machine. In the background, ffmpeg is capturing this window and renders it to HTML5-compatible video on the fly. The client browser is shown to the right, and it is streaming the resulting video.

What I learned when building the app:

  • HTML5 video supports only a few video formats (vp8/webm, h246/mp4), and the supported formats depends on what browser you use.
  • The JavaScript video API is very limited. Can't control buffering, can't seek when the video length is unknown, etc.
  • ffmpeg is a very powerful tool. It's your swiss army knife for video. Perhaps it would be even better to use its built-in video broadcasting server instead of using Node.js.
  • Video encoding takes time. Rendering real time video for streaming is almost impossible. It boils down to tuning the encoder to do as little as possible.

You can find the source code in the form of a Node.js script below. Make sure to install all dependencies using NPM and then run the script in Node.

The app starts an Express server and exposes the URLs / and /video.webm to the world. The sooner renders a page with a simple <video> player. The latter does more interesting things: first it starts glxgears as a subprocess, which is commonly used to demo OpenGL. Then it executes xwininfo to get information about the glxgears X window size and position. Last, it starts ffmpeg with some carefully chosen parameters including the window coords and size. ffmpeg records that part of the screen and the recorded video stream is piped to the client stream.
When the client opens /, an HTML5 video player is shown, and it will try to show the video at /video.webm. If everything works as expected, the glxgears animation is shown to the user.

var express = require('express')
  , http = require('http')
  , path = require('path')
  , child_process = require("child_process")

var app = express();

// Server settings
app.set('port', process.env.PORT || 3000);
app.set('views', __dirname + '/views');
app.set('view engine', 'ejs');
app.use(express.favicon());
app.use(express.logger('dev'));
app.use(express.bodyParser());
app.use(express.methodOverride());
app.use(app.router);
app.use(express.static(path.join(__dirname, 'public')));
app.use(express.errorHandler());

// View start page
app.get('/',function(req,res){
    res.render("video"); // Render basic HTML5 video player for /video.webm
});

// Serve the video
app.get('/video.webm',function(req,res){

    // Start demo "simulation"
    var glxgears = child_process.spawn("glxgears",[]);

    // Wait for glxgears to start properly
    setTimeout(function(){

        // Use xwininfo to get the glxgears window position
        var cmd = "xwininfo -root -tree";
        child_process.exec(cmd,function(err,stdout,stderr){
            if(err) return next(err);

            // Get the X11 window size and position
            var lines = stdout.toString().split("\n");
            var width = 300;
            var height = 300;
            var top = 55;
            var left = 52;
            for(var i=0; i<lines.length; i++){
                if(lines[i].match(/gears/)){
                    // Got something like:
                    // 0x5000002 "glxgears": ()  300x300+0+0  +328+89
                    var r = /(\d+)x(\d+)\+(\d+)\+(\d+)\s+\+(\d+)\+(\d+)/;
                    var m = lines[i].match(r);
                    width = m[1];
                    height = m[2];
                    top = m[5];
                    left = m[6];
                    break;
                }
            }

            // Write header
            res.writeHead(200, {
              'Content-Type': 'video/webm'
            });

            // Start ffmpeg
            var ffmpeg = child_process.spawn("ffmpeg",[
                "-re",                   // Real time mode
                "-f","x11grab",          // Grab screen
                "-r","100",              // Framerate
                "-s",width+"x"+height,   // Capture size
                "-i",":0+"+top+","+left, // Capture offset
                "-g","0",                // All frames are i-frames
                "-me_method","zero",     // Motion algorithms off
                "-flags2","fast",
                "-vcodec","libvpx",      // vp8 encoding
                "-preset","ultrafast",
                "-tune","zerolatency",
                "-b:v","1M",             // Target bit rate
                "-crf","40",             // Quality
                "-qmin","5",             // Quantization
                "-qmax","5",
                "-f","webm",             // File format
                "-"                      // Output to STDOUT
            ]);

            // Pipe the video output to the client response
            ffmpeg.stdout.pipe(res);

            // Kill the subprocesses when client disconnects
            res.on("close",function(){
                glxgears.kill();
                ffmpeg.kill();
            })
        });
    },500);
});

// Start server
http.createServer(app).listen(app.get('port'), function(){
  console.log('Express server listening on port ' + app.get('port'));
});

And the "video" view:

<!DOCTYPE html>
<html>
  <head>
    <title>Screencast test</title>
    <link rel='stylesheet' href='/stylesheets/style.css' />
  </head>
  <body>
    <h1>Screencast test</h1>
    <video controls preload="none" autoplay="autoplay" autobuffer id="video">
        <source src="video.webm" type="video/webm">
        <p>Your browser does not support WebM video.</p>
    </video>
  </body>
</html>

To set up this project locally, first create an Express project using command "express --ejs" in a folder. Follow the instructions. Replace app.js by the JS code in this blog entry. Create a view views/video.ejs, and copy over the code from here too. Install all dependencies with npm install.

Run this app on command line by running: node app.js

The code worked on a setup with Node.js 0.8.14, Chromium 28, and ffmpeg 0.8.6 on vanilla Ubuntu 13.04.

Limitations of this app:

  • X11 is used to get window info and capture video from screen. This makes it limited to Linux.
  • The app uses the default DISPLAY, which is quite inconvenient. Should use a virtual hidden display.
  • Only 1 connection at a time is supported.
Comments (9) Trackbacks (3)
  1. Hi,
    I would like to know if you could share your ejs views and tell me with which version of node did this work. Im not able to make it work with node 0.10, nor with chrome nor with firefox.
    Thanks in adavance!

    • I am currently using Node.js 0.8.14, Chromium 28, ffmpeg 0.8.6 on vanilla Ubuntu 13.04. Do you get any error messages? What OS/version do you run it on?

  2. Hello,

    I am trying to run it in ubuntu. I am not getting any response. Can you please tell me step by step for running this using nodejs.

    Thanks.

    • Added some instructions on how to set it up locally using “express –ejs” on command line.
      No response? From the web server?

      • Hello,

        I was able to run code and video tag is there, but it is not displaying gears. I am getting some response. I checked in network tab in chrome. but I am not getting response after few milliseconds. I am trying to make live video streaming using webcam , node ,ffmpeg, and html5 video tag. So I will create one server and many clients will connect to that stream. If can help me than I would be grateful to you.

        Thanks,
        Mohit

  3. events.js:72
    throw er; // Unhandled ‘error’ event
    ^
    Error: spawn ENOENT
    at errnoException (child_process.js:980:11)
    at Process.ChildProcess._handle.onexit (child_process.js:771:34)

    I followed the same steps . getting above error in Ubuntu. Can you please help on this.

  4. My ubuntu was missing glxgeras, It Worked after installing glxgears.

  5. Hi, I would like to ask about the options that you appended to the ffmpeg application when u spawn it on your server. The file format that you have opted in ffmpeg was webM but file and header settings that you piped out was in mp4. Is that allowed and how does that work out? Thank you.


Leave a comment


*