如何使用Gstreamer C API编程视频混合器

如何使用Gstreamer C API编程视频混合器,c,gstreamer,C,Gstreamer,我正在尝试使用C API模拟以下gstreamer管道: gst-launch -e videomixer name=mix ! ffmpegcolorspace ! xvimagesink \ videotestsrc pattern=1 ! video/x-raw-yuv,width=100,height=100 ! videobox border-alpha=0 top=0 left=0 ! mix. \ videotestsrc pattern=0 ! video/x-r

我正在尝试使用C API模拟以下gstreamer管道:

gst-launch -e videomixer name=mix ! ffmpegcolorspace ! xvimagesink \
   videotestsrc pattern=1  ! video/x-raw-yuv,width=100,height=100 ! videobox border-alpha=0 top=0 left=0 ! mix. \
   videotestsrc pattern=0  ! video/x-raw-yuv,width=100,height=100 ! videobox border-alpha=0 top=0 left=-100 ! mix. 
到目前为止,我已经:

#include <gst/gst.h>
#include <glib.h>


static gboolean
bus_call (GstBus     *bus,
          GstMessage *msg,
          gpointer    data)
{
  GMainLoop *loop = (GMainLoop *) data;

  switch (GST_MESSAGE_TYPE (msg)) {

    case GST_MESSAGE_EOS:
      g_print ("End of stream\n");
      g_main_loop_quit (loop);
      break;

    case GST_MESSAGE_ERROR: {
      gchar  *debug;
      GError *error;

      gst_message_parse_error (msg, &error, &debug);
      g_free (debug);

      g_printerr ("Error: %s\n", error->message);
      g_error_free (error);

      g_main_loop_quit (loop);
      break;
    }
    default:
      break;
  }

  return TRUE;
}


int
main (int   argc,
      char *argv[])
{
  GMainLoop *loop;

  GstElement *pipeline;
  GstElement *source1,*source2;
  GstElement *scale,*filter;
  GstElement *videobox1,*videobox2; //just one.
  GstElement *mixer,*clrspace,*sink;
  GstCaps *filtercaps;
  GstBus *bus;

  /* Initialisation */
  gst_init (&argc, &argv);

  loop = g_main_loop_new (NULL, FALSE);


  /* Check input arguments */
  /*if (argc != 2) {
    g_printerr ("Usage: %s <Ogg/Vorbis filename>\n", argv[0]);
    return -1;
  }*/

 //gst-launch videotestsrc pattern=snow ! ximagesink

  /* Create gstreamer elements */
  pipeline = gst_pipeline_new ("player");
  source1   = gst_element_factory_make ("videotestsrc",       "source1");
  source2   = gst_element_factory_make ("videotestsrc",       "source2");
//  source2   = gst_element_factory_make ("uridecodebin",       "file-source2");
  scale   = gst_element_factory_make ("videoscale",       "scale");
  filter = gst_element_factory_make("capsfilter","filter");
  videobox1 = gst_element_factory_make ("videobox",       "videobox1");
  videobox2 = gst_element_factory_make ("videobox",       "videobox2");
  mixer = gst_element_factory_make ("videomixer",       "mixer");
  clrspace  = gst_element_factory_make ("ffmpegcolorspace",       "clrspace");

//  demuxer  = gst_element_factory_make ("oggdemux",      "ogg-demuxer");
//  decoder  = gst_element_factory_make ("vorbisdec",     "vorbis-decoder");
//  conv     = gst_element_factory_make ("audioconvert",  "converter");
  sink     = gst_element_factory_make ("xvimagesink", "sink");

  /*if (!pipeline || !source || !demuxer || !decoder || !conv || !sink) {
    g_printerr ("One element could not be created. Exiting.\n");
    return -1;
  }*/
  if (!pipeline || !source1 || !source2 || !sink) {
    g_printerr ("One element could not be created. Exiting.\n");
    return -1;
  }

  filtercaps = gst_caps_new_simple ("video/x-raw-yuv",
          "width", G_TYPE_INT, 200,
          "height", G_TYPE_INT, 100,          
          NULL);
  g_object_set (G_OBJECT (filter), "caps", filtercaps, NULL);
  //gst_caps_unref (filtercaps);

  g_object_set(videobox1,"border-alpha",0,"top",0,"left",0,NULL);
  g_object_set(videobox2,"border-alpha",0,"top",0,"left",-200,NULL);


  /* Set up the pipeline */

  /* we set the input filename to the source element */
    g_object_set (G_OBJECT (source1), "pattern", 0, NULL);
    g_object_set (G_OBJECT (source2), "pattern", 1, NULL);

  /* we add a message handler */
  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  gst_bus_add_watch (bus, bus_call, loop);
  gst_object_unref (bus);

  /* we add all elements into the pipeline */
  gst_bin_add_many (GST_BIN (pipeline),
                    source1,filter,videobox1,mixer,clrspace, sink, source2,videobox2, NULL);

  /* we link the elements together */
    //gst_element_link_many (source1, scale, filter, videobox1, mixer, clrspace, sink);
    //gst_element_link_many (source2, scale, filter, videobox2, mixer, clrspace, sink);
    gst_element_link_many (source1, filter, videobox1, mixer, clrspace, sink);
    gst_element_link_many (source2, filter, videobox2, mixer, clrspace, sink);


  /* Set the pipeline to "playing" state*/
  gst_element_set_state (pipeline, GST_STATE_PLAYING);

  /* Iterate */
  g_print ("Running...\n");
  g_main_loop_run (loop);

  /* Out of the main loop, clean up nicely */
  g_print ("Returned, stopping playback\n");
  gst_element_set_state (pipeline, GST_STATE_NULL);

  g_print ("Deleting pipeline\n");
  gst_object_unref (GST_OBJECT (pipeline));

  return 0;
}
为什么它抱怨source2没有链接

gst_element_link_many (source1, filter, videobox1, mixer, clrspace, sink, NULL);
gst_element_link_many (source2, filter, videobox2, mixer, NULL);

还请了解videomixerpad上的x、y、z属性,您可以通过这种方式节省videobox元素,并在稍晚的时候获得性能,但可能会有所帮助:

如果您查看,您将看到videomixer的水槽垫是请求垫。您需要先创建这些焊盘,然后再链接它们

/* Manually link the mixer, which has "Request" pads */
mixer_sink_pad_template = gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS    (mixer), "sink_%u");
mixer_sink_pad = gst_element_request_pad (mixer, mixer_sink_pad_template, NULL, NULL);
sink_pad = gst_element_get_static_pad (clrspace, "src");
gst_pad_link ( sink_pad,mixer_sink_pad);

类似地,将为任意数量的流创建请求板。

我知道
videomixerpad
。然而,在使用你的代码片段时,我仍然会遇到同样的错误。我们会解决“未链接”的问题。例如:GST\u DEBUG\u DUMP\u DOT\u DIR=$PWD GST launch。。。ls*.dot-Tpng-ograph.png该图片显示了管道的许多细节。
/* Manually link the mixer, which has "Request" pads */
mixer_sink_pad_template = gst_element_class_get_pad_template (GST_ELEMENT_GET_CLASS    (mixer), "sink_%u");
mixer_sink_pad = gst_element_request_pad (mixer, mixer_sink_pad_template, NULL, NULL);
sink_pad = gst_element_get_static_pad (clrspace, "src");
gst_pad_link ( sink_pad,mixer_sink_pad);