Gstreamer uses appsink to get frames and then uses opencv to display the picture, which will be blocked

  Kiến thức lập trình

I use Gstreamer’s appsink to get each frame of the video stream. Now I want to display it through opencv’s imshow window. The terminal is blocked after printing a frame of message. What is the problem?

#include <gst/gst.h>
#include <gst/app/gstappsink.h>
#include <string.h>
#include <iostream>
#include <opencv2/opencv.hpp>
#include <vector>
#include <thread>
#include <mutex>
#include <X11/Xlib.h>

#define CAPS "video/x-raw,format=RGB,pixel-aspect-ratio=1/1"

typedef struct _CustomData {
    GstElement *pipeline;
    GstElement *source;
    GstElement *jitterbuffer;
    GstElement *depayloader;
    GstElement *avdec;
    GstElement *convert;
    GstElement *sink;
    GMainLoop *main_loop;
    std::string rtsp_path;
} CustomData;

std::mutex display_mutex;

static GstFlowReturn new_sample(GstElement *sink, CustomData *data) {
    GstSample *sample;
    GstBuffer *buffer;
    GstCaps *caps;
    GstMapInfo map;
    gint width, height;

    g_signal_emit_by_name(sink, "pull-sample", &sample);
    if (sample) {
        buffer = gst_sample_get_buffer(sample);
        caps = gst_sample_get_caps(sample);
        GstStructure *s = gst_caps_get_structure(caps, 0);
        gst_structure_get_int(s, "width", &width);
        gst_structure_get_int(s, "height", &height);

        if (gst_buffer_map(buffer, &map, GST_MAP_READ)) {
            cv::Mat frame(height, width, CV_8UC3, (void*)map.data);

            std::lock_guard<std::mutex> lock(display_mutex);
            cv::Mat grayImage;
            cv::Mat binaryImage;
        
            if (!frame.empty()) {       
                cv::imshow(data->rtsp_path, frame);
                cv::waitKey(1); 
            }
            

            gst_buffer_unmap(buffer, &map);
        }
        gst_sample_unref(sample);
    }
    return GST_FLOW_OK;
}

static void pad_added_handler(GstElement *src, GstPad *new_pad, CustomData *data) {
    GstPad *sink_pad = gst_element_get_static_pad(data->jitterbuffer, "sink");
    if (gst_pad_is_linked(sink_pad)) {
        gst_object_unref(sink_pad);
        return;
    }

    GstCaps *new_pad_caps = gst_pad_get_current_caps(new_pad);
    GstStructure *new_pad_struct = gst_caps_get_structure(new_pad_caps, 0);
    const gchar *new_pad_type = gst_structure_get_name(new_pad_struct);

    if (!g_str_has_prefix(new_pad_type, "application/x-rtp")) {
        gst_caps_unref(new_pad_caps);
        gst_object_unref(sink_pad);
        return;
    }

    gst_pad_link(new_pad, sink_pad);
    gst_caps_unref(new_pad_caps);
    gst_object_unref(sink_pad);
}

void start_pipeline(CustomData *data) {
    gst_init(nullptr, nullptr);

    data->pipeline = gst_pipeline_new("pipeline");
    data->source = gst_element_factory_make("rtspsrc", "source");
    data->jitterbuffer = gst_element_factory_make("rtpjitterbuffer", "jitterbuffer");
    data->depayloader = gst_element_factory_make("rtph264depay", "depayloader");
    data->avdec = gst_element_factory_make("avdec_h264", "avdec");
    data->convert = gst_element_factory_make("videoconvert", "convert");
    data->sink = gst_element_factory_make("appsink", "sink");

    g_object_set(data->source, "location", data->rtsp_path.c_str(), NULL);
    g_object_set(data->sink, "emit-signals", TRUE, NULL);
    g_signal_connect(data->sink, "new-sample", G_CALLBACK(new_sample), data);
    g_signal_connect(data->source, "pad-added", G_CALLBACK(pad_added_handler), data);

    gst_bin_add_many(GST_BIN(data->pipeline), data->source, data->jitterbuffer, data->depayloader, data->avdec, data->convert, data->sink, NULL);
    gst_element_link_many(data->jitterbuffer, data->depayloader, data->avdec, data->convert, data->sink, NULL);

    data->main_loop = g_main_loop_new(NULL, FALSE);
    gst_element_set_state(data->pipeline, GST_STATE_PLAYING);
    g_main_loop_run(data->main_loop);

    gst_element_set_state(data->pipeline, GST_STATE_NULL);
    gst_object_unref(GST_OBJECT(data->pipeline));
    g_main_loop_unref(data->main_loop);
}

int main(int argc, char *argv[]) {

    XInitThreads();
    std::vector<std::string> rtsp_paths = {
        //"rtsp://admin:[email protected]:557/LiveMedia/ch1/Media1",
        //"rtsp://admin:[email protected]:562/h264/ch1/av_stream",
        "rtsp://admin:[email protected]:561/h264/ch1/av_stream"
    };

    std::vector<std::thread> threads;
    std::vector<CustomData> data(rtsp_paths.size());

    for (size_t i = 0; i < rtsp_paths.size(); ++i) {
        data[i].rtsp_path = rtsp_paths[i];
        threads.emplace_back(start_pipeline, &data[i]);
    }

    for (auto &t : threads) {
        t.join();
    }

    return 0;
}

I tried to call imshow separately, and used threads to run opencv separately, but it didn’t work. I also tried to modify the waitkey time. I hope opencv can display the read frames in the window, which is convenient for testing.

Theme wordpress giá rẻ Theme wordpress giá rẻ Thiết kế website

LEAVE A COMMENT