C++ 如果视频发送器关闭,则不显示信号图像

C++ 如果视频发送器关闭,则不显示信号图像,c++,gstreamer,C++,Gstreamer,在本例中,如果关闭发送器,则接收器显示的视频将冻结。是否有办法显示静态无信号图像,例如全蓝色图像,并在发送方重新启动时返回视频 寄件人 gst-launch-1.0 videotestsrc ! video/x-raw,format=GRAY8 ! videoconvert ! x264enc pass=qual quantizer=20 tune=zerolatency ! rtph264pay ! udpsink host=127.0.0.1 port=5000 接受者 gst-launc

在本例中,如果关闭发送器,则接收器显示的视频将冻结。是否有办法显示静态无信号图像,例如全蓝色图像,并在发送方重新启动时返回视频

寄件人

gst-launch-1.0 videotestsrc ! video/x-raw,format=GRAY8 ! videoconvert ! x264enc pass=qual quantizer=20 tune=zerolatency ! rtph264pay ! udpsink host=127.0.0.1 port=5000
接受者

gst-launch-1.0 udpsrc port=5000 ! application/x-rtp ! rtph264depay ! avdec_h264 ! videoconvert ! autovideosink
编辑

这段代码似乎很接近,但由于某种原因,如果我通过取消注释注释掉的行来添加videotestsrc,udpsrc将不再调用超时回调:

// g++ gst_client.cpp `pkg-config --cflags gstreamer-1.0` `pkg-config --libs gstreamer-1.0`

#include <gst/gst.h>

#include <cstdlib>
#include <cstdio>


struct gstreamer_data {
    GstElement* pipeline;
    GstElement* no_signal_source;
    GstElement* udp_source;
    GstElement* rtp_decoder;
    GstElement* video_decoder;
    GstElement* input_selector;
    GstElement* video_converter;
    GstElement* video_sink;
    gulong signal_handler_id;
    GMainLoop* main_loop;
};


static void element_callback(GstBus* bus, GstMessage* message, gstreamer_data* data);
static GstPadProbeReturn have_data_callback(GstPad* pad, GstPadProbeInfo *info, gstreamer_data* user_data);


static GstPadProbeReturn have_data_callback(GstPad* pad, GstPadProbeInfo *info, gstreamer_data* user_data) {
    GstBus* bus;


    printf("have data\n");

    bus = gst_element_get_bus(user_data->pipeline);
    user_data->signal_handler_id = g_signal_connect(G_OBJECT(bus), "message::element", (GCallback) element_callback, user_data);
    gst_object_unref(bus);

    return GST_PAD_PROBE_REMOVE;
}


static void element_callback(GstBus* bus, GstMessage* message, gstreamer_data* data) {
    const GstStructure* st = gst_message_get_structure(message);
    GstPad* pad;


    if (GST_MESSAGE_TYPE(message) == GST_MESSAGE_ELEMENT) {
        if (gst_structure_has_name(st, "GstUDPSrcTimeout")) {
            printf("Timeout received from udpsrc\n");

            g_signal_handler_disconnect(G_OBJECT(bus), data->signal_handler_id);

            pad = gst_element_get_static_pad(data->udp_source, "src");
            gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER, (GstPadProbeCallback) have_data_callback, data, NULL);
        }
    }
}


static void error_callback(GstBus* bus, GstMessage* message, gstreamer_data* data) {
    (void) bus;
    GError* err;
    gchar* debug_info;


    gst_message_parse_error(message, &err, &debug_info);
    g_printerr("Error received from element %s: %s\n", GST_OBJECT_NAME(message->src), err->message);
    g_printerr("Debugging information: %s\n", debug_info ? debug_info : "none");
    g_clear_error(&err);
    g_free(debug_info);

    g_main_loop_quit(data->main_loop);
}


int main() {
    gstreamer_data data;
    GstStateChangeReturn ret;
    GstBus* bus;
    GstPad* pad;


    gst_init(NULL, NULL);


    data.no_signal_source = gst_element_factory_make("videotestsrc", "no_signal_source");
    g_object_set(G_OBJECT(data.no_signal_source),
        "pattern", 6,
        NULL);

    data.udp_source = gst_element_factory_make("udpsrc", "udp_source");
    g_object_set(G_OBJECT(data.udp_source),
        "port", 5000,
        "caps", gst_caps_new_empty_simple("application/x-rtp"),
        "timeout", 1000000000,
        NULL);


    data.rtp_decoder = gst_element_factory_make("rtph264depay", "rtp_decoder");

    data.video_decoder = gst_element_factory_make("avdec_h264", "video_decoder");

    data.input_selector = gst_element_factory_make("input-selector", "input_selector");

    data.video_converter = gst_element_factory_make("videoconvert", "video_converter");

    data.video_sink = gst_element_factory_make("autovideosink", "video_sink");

    data.pipeline = gst_pipeline_new("pipeline");

    if (
        !data.pipeline ||
        !data.no_signal_source ||
        !data.udp_source ||
        !data.rtp_decoder ||
        !data.video_decoder ||
        !data.input_selector ||
        !data.video_converter ||
        !data.video_sink
        )
        {
            g_printerr("Not all elements could be created.\n");
            exit(-1);
        }

    gst_bin_add_many(
        GST_BIN(data.pipeline),
        //data.no_signal_source,
        data.udp_source,
        data.rtp_decoder,
        data.video_decoder,
        data.input_selector,
        data.video_converter,
        data.video_sink,
        NULL);


    if (gst_element_link_many(
        data.udp_source,
        data.rtp_decoder,
        data.video_decoder,
        NULL) != TRUE)
        {
            g_printerr("Elements could not be linked.\n");
            gst_object_unref(data.pipeline);
            exit(-1);
        }


    GstPad* src_1 = gst_element_get_static_pad(data.video_decoder, "src");
    GstPad* sink_1 = gst_element_get_request_pad(data.input_selector, "sink_%u");
    gst_pad_link(src_1, sink_1);

/*
    GstPad* src_2 = gst_element_get_static_pad(data.no_signal_source, "src");
    GstPad* sink_2 = gst_element_get_request_pad(data.input_selector, "sink_%u");
    gst_pad_link(src_2, sink_2);
*/


    g_object_set(G_OBJECT(data.input_selector),
        "active-pad", sink_1,
        NULL);


    if (gst_element_link_many(
        data.input_selector,
        data.video_converter,
        data.video_sink,
        NULL) != TRUE)
        {
            g_printerr("Elements could not be linked.\n");
            gst_object_unref(data.pipeline);
            exit(-1);
        }


    pad = gst_element_get_static_pad(data.udp_source, "src");
    gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER, (GstPadProbeCallback) have_data_callback, &data, NULL);


    bus = gst_element_get_bus(data.pipeline);
    gst_bus_add_signal_watch(bus);
    data.signal_handler_id = g_signal_connect(G_OBJECT(bus), "message::error", (GCallback) error_callback, &data);
    gst_object_unref(bus);


    ret = gst_element_set_state(data.pipeline, GST_STATE_PLAYING);
    if (ret == GST_STATE_CHANGE_FAILURE) {
        g_printerr("Unable to set the pipeline to the playing state.\n");
        gst_object_unref(data.pipeline);
        exit(-1);
    }

    data.main_loop = g_main_loop_new(NULL, FALSE);
    g_main_loop_run(data.main_loop);

    return 0;
}
//g++gst\u client.cpp`pkg config--cflags gstreamer-1.0``pkg config--libs gstreamer-1.0`
#包括
#包括
#包括
结构gstreamer_数据{
GstElement*管道;
GstElement*无信号源;
GstElement*udp_源;
GstElement*rtp_解码器;
GstElement*视频解码器;
GstElement*输入选择器;
GstElement*视频转换器;
GstElement*视频接收器;
古龙信号处理器id;
GMainLoop*主循环;
};
静态void元素_回调(GstBus*总线、GstMessage*消息、gstreamer_数据*数据);
静态GstPadProbeReturn具有_数据_回调(GstPad*pad、GstPadProbeInfo*info、gstreamer_数据*用户_数据);
静态GstPadProbeReturn具有\u数据\u回调(GstPad*pad、GstPadProbeInfo*info、gstreamer\u数据*用户\u数据){
GstBus*总线;
printf(“有数据”);
总线=gst\u元素\u获取\u总线(用户数据->管道);
用户\数据->信号\处理程序\ id=g\信号\连接(g\对象(总线),“消息::元素”,(GCallback)元素\回调,用户\数据);
商品及服务税(巴士);
返回GST\u焊盘\u探头\u移除;
}
静态void元素_回调(GstBus*总线、GstMessage*消息、gstreamer_数据*data){
const gst结构*st=gst\u消息\u获取\u结构(消息);
GstPad*焊盘;
if(GST\U消息类型(消息)==GST\U消息元素){
如果(gst结构有名称(st,“GSTUDPSRCCTimeout”)){
printf(“从udpsrc接收超时\n”);
g_信号处理程序\u断开(g_对象(总线),数据->信号处理程序\u id);
pad=gst\u element\u get\u static\u pad(数据->udp\u源,“src”);
gst_pad_add_probe(pad,gst_pad_probe_TYPE_BUFFER,(GstPadProbeCallback)具有_data_callback,data,NULL);
}
}
}
静态无效错误\u回调(GstBus*总线、GstMessage*消息、gstreamer\u数据*数据){
(b)巴士;
GError*err;
gchar*调试信息;
gst\消息\解析\错误(消息、错误和调试\信息);
g_printerr(“从元素%s接收到错误:%s\n”,GST_对象名称(消息->src),错误->消息);
g_printerr(“调试信息:%s\n”,调试信息?调试信息:“无”);
g_清除_错误(&err);
g_免费(调试信息);
g_主循环_退出(数据->主循环);
}
int main(){
gstreamer_数据;
GSTStateChangeRet;
GstBus*总线;
GstPad*焊盘;
gst_init(空,空);
data.no_signal_source=gst_element_factory_make(“videotestsrc”,“no_signal_source”);
g_对象集(g_对象(数据、无信号源),
“模式”,6,
无效);
data.udp_source=gst_元素_工厂制造(“udpsrc”、“udp_source”);
g_对象集(g_对象(data.udp_源),
“港口”,5000,
“caps”,gst\u caps\u new\u empty\u simple(“应用程序/x-rtp”),
“超时”,100000000,
无效);
data.rtp_解码器=gst_元素_工厂制造(“rtph264depay”、“rtp_解码器”);
data.video_解码器=gst_元素_工厂制造(“avdec_h264”,“视频_解码器”);
data.input_selector=gst_元素_工厂_品牌(“输入选择器”、“输入选择器”);
data.video_converter=gst_元素_工厂制造(“videoconvert”、“video_converter”);
data.video_sink=gst_元素_工厂制造(“自动视频接收器”、“视频接收器”);
data.pipeline=gst_pipeline_new(“管道”);
如果(
!data.pipeline||
!data.no\u信号源||
!data.udp\u源||
!data.rtp_解码器||
!data.video_解码器||
!data.input\u选择器||
!data.video\u转换器||
!data.video\u接收器
)
{
g_printerr(“并非所有元素都可以创建。\n”);
出口(-1);
}
gst\u bin\u添加\u许多(
GST_BIN(数据管道),
//数据无信号源,
data.udp_来源,
data.rtp_解码器,
data.video_解码器,
data.input_选择器,
data.video_转换器,
data.video_接收器,
无效);
如果(gst\u元素\u链接\u多个(
data.udp_来源,
data.rtp_解码器,
data.video_解码器,
空)!=真)
{
g_printerr(“无法链接元素。\n”);
gst_object_unref(数据管道);
出口(-1);
}
GstPad*src_1=gst_元素_获取_静态_垫(data.video_解码器,“src”);
GstPad*sink_1=gst_元素_获取_请求_焊盘(data.input_选择器,“sink_u%u”);
gst_焊盘_链路(src_1,接收器_1);
/*
GstPad*src_2=gst_元素_获取_静态_焊盘(data.no_信号_源,“src”);
GstPad*sink_2=gst_元素_获取_请求_焊盘(data.input_选择器,“sink_u%u”);
gst_焊盘_链路(src_2、接收器_2);
*/
g_对象集(g_对象(数据输入选择器),
“主动焊盘”,水槽1,
无效);
如果(gst\u元素\u链接\u多个(
data.input_选择器,
data.video_转换器,
data.video_接收器,
空)!=真)
{
g_printerr(“无法链接元素。\n”);
gst_object_unref(数据管道);
出口(-1);
}
pad=gst\u element\u get\u static\u pad(data.udp\u source,“src”);
gst_pad_add_probe(pad,gst_pad_probe_TYPE_BUFFER,(GstPadProbeCallback)具有_data_callback和data,NULL);
总线=gst\u元素\u获取\u总线(数据管道);
gst_总线_添加_信号_手表(总线);
data.signal\u handler\u id=g\u signal\u connect(g\u对象(总线),“message::error”,(GCallback)error\u回调,&data);
商品及服务税(巴士);
ret=gst\u元素\u集合\u状态(data.pipeline,gst\u状态\u PLAYING);
如果(ret==GST\u状态\u变化\u失败){
g_printerr(“无法将管道设置为播放状态。\n”);
gst_object_unref(数据管道);
出口(-1);
}
data.main_lo
// g++ gst_client.cpp `pkg-config --cflags gstreamer-1.0` `pkg-config --libs gstreamer-1.0`

#include <gst/gst.h>

#include <cstdlib>
#include <cstdio>


struct gstreamer_data {
    GstElement* pipeline;
    GstElement* video_source;
    GstElement* udp_source;
    GstElement* rtp_decoder;
    GstElement* video_decoder;
    GstElement* video_converter;
    GstElement* input_selector;
    GstPad* sink_1;
    GstPad* sink_2;
    GstElement* video_sink;
    gulong signal_handler_id;
    GMainLoop* main_loop;
};


static void element_callback(GstBus* bus, GstMessage* message, gstreamer_data* data);
static GstPadProbeReturn have_data_callback(GstPad* pad, GstPadProbeInfo *info, gstreamer_data* user_data);


static GstPadProbeReturn have_data_callback(GstPad* pad, GstPadProbeInfo *info, gstreamer_data* user_data) {
    GstBus* bus;


    printf("have data\n");
/*
    g_object_set(G_OBJECT(user_data->input_selector),
        "active-pad", user_data->sink_2,
        NULL);
*/
    bus = gst_element_get_bus(user_data->pipeline);
    user_data->signal_handler_id = g_signal_connect(G_OBJECT(bus), "message::element", (GCallback) element_callback, user_data);
    gst_object_unref(bus);

    return GST_PAD_PROBE_REMOVE;
}


static void element_callback(GstBus* bus, GstMessage* message, gstreamer_data* data) {
    const GstStructure* st = gst_message_get_structure(message);
    GstPad* pad;


    if (GST_MESSAGE_TYPE(message) == GST_MESSAGE_ELEMENT) {
        if (gst_structure_has_name(st, "GstUDPSrcTimeout")) {

            printf("no data\n");
/*
            g_object_set(G_OBJECT(data->input_selector),
                "active-pad", data->sink_1,
                NULL);
*/
            g_signal_handler_disconnect(G_OBJECT(bus), data->signal_handler_id);

            pad = gst_element_get_static_pad(data->udp_source, "src");
            gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER, (GstPadProbeCallback) have_data_callback, data, NULL);
            gst_object_unref(pad);
        }
    }
}


static void error_callback(GstBus* bus, GstMessage* message, gstreamer_data* data) {
    (void) bus;
    GError* err;
    gchar* debug_info;


    gst_message_parse_error(message, &err, &debug_info);
    g_printerr("Error received from element %s: %s\n", GST_OBJECT_NAME(message->src), err->message);
    g_printerr("Debugging information: %s\n", debug_info ? debug_info : "none");
    g_clear_error(&err);
    g_free(debug_info);

    g_main_loop_quit(data->main_loop);
}


int main() {
    gstreamer_data data;
    GstStateChangeReturn ret;
    GstBus* bus;
    GstPad* pad;


    gst_init(NULL, NULL);


    data.video_source = gst_element_factory_make("videotestsrc", "video_source");
    g_object_set(G_OBJECT(data.video_source),
        "pattern", 6,
        "is-live", true,
        NULL);


    data.udp_source = gst_element_factory_make("udpsrc", "udp_source");
    g_object_set(G_OBJECT(data.udp_source),
        "port", 5000,
        "caps", gst_caps_new_empty_simple("application/x-rtp"),
        "timeout", 1000000000,
        NULL);

    data.rtp_decoder = gst_element_factory_make("rtph264depay", "rtp_decoder");

    data.video_decoder = gst_element_factory_make("avdec_h264", "video_decoder");

    data.video_converter = gst_element_factory_make("videoconvert", "video_converter");


    data.input_selector = gst_element_factory_make("input-selector", "input_selector");

    data.video_sink = gst_element_factory_make("autovideosink", "video_sink");


    data.pipeline = gst_pipeline_new("pipeline");


    if (
        !data.pipeline ||
        !data.video_source ||
        !data.udp_source ||
        !data.rtp_decoder ||
        !data.video_decoder ||
        !data.video_converter ||
        !data.input_selector ||
        !data.video_sink
        )
        {
            g_printerr("Not all elements could be created.\n");
            exit(-1);
        }


    gst_bin_add_many(
        GST_BIN(data.pipeline),
        data.video_source,
        data.udp_source,
        data.rtp_decoder,
        data.video_decoder,
        data.video_converter,
        data.input_selector,
        data.video_sink,
        NULL);


    if (gst_element_link_many(
        data.udp_source,
        data.rtp_decoder,
        data.video_decoder,
        data.video_converter,
        NULL) != TRUE)
        {
            g_printerr("Elements could not be linked.\n");
            gst_object_unref(data.pipeline);
            exit(-1);
        }


    GstPad* src_1 = gst_element_get_static_pad(data.video_source, "src");
    data.sink_1 = gst_element_get_request_pad(data.input_selector, "sink_%u");
    gst_pad_link(src_1, data.sink_1);
    gst_object_unref(src_1);    


    GstPad* src_2 = gst_element_get_static_pad(data.video_converter, "src");
    data.sink_2 = gst_element_get_request_pad(data.input_selector, "sink_%u");
    gst_pad_link(src_2, data.sink_2);
    gst_object_unref(src_2);


    if (gst_element_link_many(
        data.input_selector,
        data.video_sink,
        NULL) != TRUE)
        {
            g_printerr("Elements could not be linked.\n");
            gst_object_unref(data.pipeline);
            exit(-1);
        }


    pad = gst_element_get_static_pad(data.udp_source, "src");
    gst_pad_add_probe(pad, GST_PAD_PROBE_TYPE_BUFFER, (GstPadProbeCallback) have_data_callback, &data, NULL);
    gst_object_unref(pad);

    bus = gst_element_get_bus(data.pipeline);
    gst_bus_add_signal_watch(bus);
    data.signal_handler_id = g_signal_connect(G_OBJECT(bus), "message::error", (GCallback) error_callback, &data);
    gst_object_unref(bus);


    ret = gst_element_set_state(data.pipeline, GST_STATE_PLAYING);
    if (ret == GST_STATE_CHANGE_FAILURE) {
        g_printerr("Unable to set the pipeline to the playing state.\n");
        gst_object_unref(data.pipeline);
        exit(-1);
    }

    data.main_loop = g_main_loop_new(NULL, FALSE);
    g_main_loop_run(data.main_loop);

    return 0;
}