March 19

I wanted to stream videos from two cameras at the same time. First, I tried to do it in the same main thread, but I noticed one of the cameras was always delayed. Then I searched for information on preventing this delay when using opencv. A StackOverflow post gave me the idea to perform the second stream in another thread. I was almost surprised that it worked flawlessly with DCV. In short, you can stream from two cameras simultaneously in the same program at FPS values around 80 (using a webcam and a USB camera (PS3 eye in this case)).

module dcv.example.video;

/**
 * Video streaming example using dcv library.
 */

import core.stdc.stdio;
import core.stdc.stdlib;

import std.datetime.stopwatch : StopWatch;

import dcv.videoio;
import dcv.imgproc.color;
import dcv.core;
import dcv.plot;

import dplug.core.thread;
import mir.ndslice;

@nogc nothrow:

ulong frameCount = 0;
double elapsedTime = 0.0;
double realTimeFPS = 0.0;

TtfFont font;

void main()
{
    font = TtfFont(cast(ubyte[])import("Nunito-Regular.ttf"));
    //////////// Open the video stream ////////////////

    InputStream inStream1 = mallocNew!InputStream(false); // set forceRGB false to make RGB conversion using dcv
    scope(exit) destroyFree(inStream1);


    InputStream inStream2 = mallocNew!InputStream(false); // set forceRGB false to make RGB conversion using dcv
    scope(exit) destroyFree(inStream2);

    string path1 = "/dev/video0";
    string path2 = "/dev/video2";
    InputStreamType type = InputStreamType.LIVE; // type of the stream (file or live)

    enum W = 640;
    enum H = 480;
    inStream1.setVideoSizeRequest(W, H);
    inStream1.open(path1, type);

    inStream2.setVideoSizeRequest(W, H);
    inStream2.open(path2, type);

    // Check if video has been opened correctly
    if (!inStream1.isOpen)
    {
        printf("Cannot open input video stream 1");
        exit(-1);
    }

    if (!inStream2.isOpen)
    {
        printf("Cannot open input video stream 2");
        exit(-1);
    }

    //////////// Read video frames //////////////////
    auto fig2 = imshow(rcslice!ubyte(H, W, 3), path2);

    // Define a thread function to read frames from the second stream
    scope nothrow @nogc ThreadDelegate readStream2 = ()
    {
        Image frame2;

        while (inStream2.readFrame(frame2))
        {
            if (frame2.format == ImageFormat.IF_YUV){
                auto toShow = frame2.sliced.yuv2rgb!ubyte;
                fig2.draw(toShow, ImageFormat.IF_RGB);
            }else{
                fig2.draw(frame2);// it never comes here since forceRGB is false
            }

            destroyFree(frame2);
            frame2 = null;

            if (!fig2.visible)
                break;
        }
    };

    // Start the thread to read frames from the second stream
    auto thread2 = makeThread(readStream2);
    thread2.start;

    Image frame1;

    // read the frame rate, if info is available.
    double fps = inStream1.frameRate ? inStream1.frameRate : 30.0;
    // calculate frame wait time in miliseconds - if video is live, set to minimal value.
    double waitFrame = (type == InputStreamType.LIVE) ? 1.0 : 1000.0 / fps;

    StopWatch s;
    s.start;

    auto fig1 = imshow(rcslice!ubyte(H, W, 3), path1);


    // Read each next frame of the video in the loop.
    while (inStream1.readFrame(frame1))
    {
        import std.algorithm.comparison : max;

        s.reset;

        // If video frame pixel format is YUV, convert the data to RGB, then show it on screen
        if (frame1.format == ImageFormat.IF_YUV){
            auto toShow = frame1.sliced.yuv2rgb!ubyte;
            fig1.draw(toShow, ImageFormat.IF_RGB);
        }else{
            fig1.draw(frame1);// it never comes here since forceRGB is false
        }

        destroyFree(frame1);
        frame1 = null;

        // Compensate fps wait for lost time on color conversion.
        int wait = max(1, cast(int)waitFrame - cast(int)s.peek.total!"msecs");

        // If user presses escape key, stop the streaming.
        if (waitKey(wait) == KEY_ESCAPE)
            break;

        drawFPS(fig1, s);
        /*
        Ask if figure with given name is visible.

        Normally, you can close the figure window by pressing the 'x' button.
        That way, figure closes, and visible property will return false.
        So, if user presses the 'x' button, normal behavior would be to break the
        streaming loop.
        */
        if (!fig1.visible)
            break;
    }

    thread2.join();

    destroyFigures(); // destroy all figures allocated
}

void drawFPS(Figure fig, ref StopWatch s){
    frameCount++;
    elapsedTime += s.peek.total!"msecs" / 1000.0;

    // Calculate FPS if elapsed time is non-zero
    if (elapsedTime > 0)
    {
        realTimeFPS = cast(double)frameCount / elapsedTime;
    }

    import core.stdc.stdio;
    char[32] buff;
    snprintf(buff.ptr, buff.length, "FPS: %.2f", realTimeFPS);

    fig.drawText(font, buff[], PlotPoint(20.0f, 20.0f),
                0.0f, 30, plotGreen);

    if (elapsedTime > 1.0) {
        frameCount = 0;
        elapsedTime = 0.0;
    }
}