使用python gstreamer玩RTSP

使用python gstreamer玩RTSP,python,h.264,gstreamer,rtsp,ip-camera,Python,H.264,Gstreamer,Rtsp,Ip Camera,我使用gstreamer从IP摄像机(如Axis)播放RTSP流 我使用如下命令行: gst-launch-0.10 rtspsrc location=rtsp://192.168.0.127/axis-media/media.amp latency=0 ! decodebin ! autovideosink 而且效果很好 我想用pygtk中的gui来控制它,所以我使用gstreamer python绑定。 我写了这段代码: [...] self.player = gst.Pipeline("

我使用gstreamer从IP摄像机(如Axis)播放RTSP流 我使用如下命令行:

gst-launch-0.10 rtspsrc location=rtsp://192.168.0.127/axis-media/media.amp latency=0 ! decodebin ! autovideosink
而且效果很好

我想用pygtk中的gui来控制它,所以我使用gstreamer python绑定。 我写了这段代码:

[...]
self.player = gst.Pipeline("player")
source = gst.element_factory_make("rtspsrc", "source")
source.set_property("location", "rtsp://192.168.0.127/axis-media/media.amp")
decoder = gst.element_factory_make("decodebin", "decoder")
sink = gst.element_factory_make("autovideosink", "sink")

self.player.add(source, decoder, sink)
gst.element_link_many(source, decoder, sink)

bus = self.player.get_bus()
bus.add_signal_watch()
bus.enable_sync_message_emission()
bus.connect("message", self.on_message)
bus.connect("sync-message::element", self.on_sync_message)
[...]
但它不起作用,请使用以下消息退出:

gst.element_link_many(source, decoder,sink)
gst.LinkError: failed to link source with decoder
由于我只使用h264,因此我还尝试使用此选项来改进CLI:

gst-launch-0.10 -v rtspsrc location=rtsp://192.168.0.127/axis-media/media.amp ! rtph264depay !  ffdec_h264 ! xvimagesink
并在我的python代码中实现它,如下所示:

[...]
self.player = gst.Pipeline("player")
source = gst.element_factory_make("rtspsrc", "source")
depay = gst.element_factory_make("rtph264depay", "depay")
decoder = gst.element_factory_make("ffdec_h264", "decoder")
sink = gst.element_factory_make("xvimagesink", "output")

self.player.add(source, depay, decoder, sink)
gst.element_link_many(source, depay, decoder, sink)
[...]
但我也犯了同样的错误:(

我的源代码(rtspsrc)之间存在一些错误,因为它与decodebin和filesrc一起工作(当然不与rtph264depay一起工作)

我不明白为什么它不工作,因为它在cli中工作。 有没有gstreamer的专家可以帮我

提前谢谢


关于,

这个答案解释了为什么您会得到一个
gst.LinkError

使用
gst.parse_launch
,您可以命名元素,然后检索它们以设置属性:

pipeline = gst.parse_launch('rtspsrc name=source latency=0 ! decodebin ! autovideosink')
source = pipeline.get_by_name('source')
source.props.location = 'rtsp://192.168.0.127/axis-media/media.amp'
我有一个你要找的代码的“C”实现。我认为转换成“Python”应该相当简单

更新

等效Java代码

 // Display RTSP streaming of video
 // (c) 2011 enthusiasticgeek
 // This code is distributed in the hope that it will be useful,
 // but WITHOUT ANY WARRANTY; without even the implied warranty of
 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE
 // Leave Credits intact

package video2; //replace this with your package
import java.awt.BorderLayout;
import java.awt.Dimension;

import javax.swing.JFrame;
import javax.swing.SwingUtilities;

//import org.gstreamer.Caps;
import org.gstreamer.Element;
import org.gstreamer.ElementFactory;
import org.gstreamer.Gst;
import org.gstreamer.Pad;
import org.gstreamer.PadDirection;
import org.gstreamer.Pipeline;
import org.gstreamer.swing.VideoComponent;

/**
 * A Simple videotest example.
 */
public class Main {
    public Main() {
    }
    private static Pipeline pipe;
    public static void main(String[] args) {
    // Quartz is abysmally slow at scaling video for some reason, so turn it off.
    System.setProperty("apple.awt.graphics.UseQuartz", "false");

    args = Gst.init("SwingVideoTest", args);

    pipe = new Pipeline("pipeline");
    /*
    final Element videosrc = ElementFactory.make("videotestsrc", "source");
    final Element videofilter = ElementFactory.make("capsfilter", "flt");
    videofilter.setCaps(Caps.fromString("video/x-raw-yuv, width=720, height=576"
            + ", bpp=32, depth=32, framerate=25/1"));
    */

     pipe.getBus().connect(new Bus.ERROR() {
        public void errorMessage(GstObject source, int code, String message) {
            System.out.println("Error occurred: " + message);
            Gst.quit();
        }
    });
    pipe.getBus().connect(new Bus.STATE_CHANGED() {
        public void stateChanged(GstObject source, State old, State current, State pending) {
            if (source == pipe) {
                System.out.println("Pipeline state changed from " + old + " to " + current);
            }
        }
    });
    pipe.getBus().connect(new Bus.EOS() {
        public void endOfStream(GstObject source) {
            System.out.println("Finished playing file");
            Gst.quit();
        }
    });        

     pipe.getBus().connect(new Bus.TAG() {
        public void tagsFound(GstObject source, TagList tagList) {
            for (String tag : tagList.getTagNames()) {
                System.out.println("Found tag " + tag + " = "
                        + tagList.getValue(tag, 0));
            }
        }
    });

    final Element source = ElementFactory.make("rtspsrc", "Source");
    final Element demux = ElementFactory.make("rtpmp4vdepay", "Depay");
    final Element decoder=ElementFactory.make("ffdec_mpeg4", "Decoder");
    final Element colorspace = ElementFactory.make("ffmpegcolorspace",  "Colorspace");
    //final Element sink = ElementFactory.make ("autovideosink", "Output");

    SwingUtilities.invokeLater(new Runnable() {

        public void run() {
            // Create the video component and link it in
            VideoComponent videoComponent = new VideoComponent();
            Element videosink = videoComponent.getElement();

           source.connect(new Element.PAD_ADDED() {
           public void padAdded(Element element, Pad pad) {
            pad.link(demux.getStaticPad("sink"));
           }
            });

           Pad p = new Pad(null, PadDirection.SRC);
           source.addPad(p);

            source.set("location","rtsp://<user>:<pass>@<ip>/mpeg4/1/media.amp");  //replace this with your source

            pipe.addMany(source, demux, decoder, colorspace, videosink);
            Element.linkMany(demux, decoder, colorspace, videosink);

            // Now create a JFrame to display the video output
            JFrame frame = new JFrame("Swing Video Test");
            frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
            frame.add(videoComponent, BorderLayout.CENTER);
            videoComponent.setPreferredSize(new Dimension(720, 576));
            frame.pack();
            frame.setVisible(true);

            // Start the pipeline processing
            pipe.play();
        }
    });
    }
}
//显示RTSP视频流
//(c)2011狂热极客
//分发此代码是为了希望它有用,
//但没有任何保证;甚至没有对
//适销性或适合某一特定目的
//保留信用证不变
package video2;//将其替换为您的软件包
导入java.awt.BorderLayout;
导入java.awt.Dimension;
导入javax.swing.JFrame;
导入javax.swing.SwingUtilities;
//导入org.gstreamer.Caps;
导入org.gstreamer.Element;
导入org.gstreamer.ElementFactory;
导入org.gstreamer.Gst;
导入org.gstreamer.Pad;
导入org.gstreamer.PadDirection;
导入org.gstreamer.Pipeline;
导入org.gstreamer.swing.VideoComponent;
/**
*一个简单的视频测试示例。
*/
公共班机{
公用干管(){
}
专用静态管道;
公共静态void main(字符串[]args){
//由于某些原因,石英在缩放视频时速度非常慢,所以请将其关闭。
System.setProperty(“apple.awt.graphics.UseQuartz”、“false”);
args=Gst.init(“SwingVideoTest”,args);
管道=新管道(“管道”);
/*
最终元素videosrc=ElementFactory.make(“videotestsrc”,“source”);
最终元件videofilter=ElementFactory.make(“capsfilter”、“flt”);
videofilter.setCaps(Caps.fromString(“video/x-raw-yuv,宽度=720,高度=576”)
+“,bpp=32,深度=32,帧速率=25/1”);
*/
pipe.getBus().connect(new Bus.ERROR()){
公共无效错误消息(GstObject源代码、int代码、字符串消息){
System.out.println(“发生错误:+消息”);
Gst.quit();
}
});
pipe.getBus().connect(新的总线状态_已更改(){
public void stateChanged(GstObject源、状态旧、状态当前、状态挂起){
如果(源==管道){
System.out.println(“管道状态从“+old+”更改为“+current”);
}
}
});
pipe.getBus().connect(new Bus.EOS()){
public void endOfStream(GstObject源){
System.out.println(“完成播放文件”);
Gst.quit();
}
});        
pipe.getBus().connect(new Bus.TAG()){
public void标记查找(GstObject源、标记列表标记列表){
for(字符串标记:tagList.getTagNames()){
System.out.println(“找到的标记”+标记+”=“
+tagList.getValue(tag,0));
}
}
});
最终元素源=ElementFactory.make(“rtspsrc”,“源”);
最终元件demux=元件工厂制造(“rtpmp4vdepay”、“Depay”);
最终元素解码器=元素工厂制造(“ffdec_mpeg4”,“解码器”);
最终元素colorspace=ElementFactory.make(“ffmpegcolorspace”、“colorspace”);
//最终元素接收器=ElementFactory.make(“自动视频接收器”、“输出”);
SwingUtilities.invokeLater(新的Runnable(){
公开募捐{
//创建视频组件并将其链接到
VideoComponent VideoComponent=新的VideoComponent();
Element videosink=videoComponent.getElement();
source.connect(新元素.PAD_添加(){
已添加公共空白焊盘(图元、焊盘){
pad.link(demux.getStaticPad(“sink”));
}
});
Pad p=新Pad(null,PadDirection.SRC);
来源:addPad(p);
source.set(“location”,“rtsp://:@/mpeg4/1/media.amp”);//将其替换为您的源代码
pipe.addMany(源、解复用、解码器、色彩空间、视频接收器);
linkMany元素(解复用器、解码器、色彩空间、视频接收器);
//现在创建一个JFrame来显示视频输出
JFrame=新JFrame(“Swing视频测试”);
frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
frame.add(videoComponent,BorderLayout.CENTER);
videoComponent.setPreferredSize(新尺寸(720576));
frame.pack();
frame.setVisible(true);
//启动管道处理
吹奏;
}
});
}
}

尝试
gst\u parse\u launch
api调用。它允许您在即将变得更短的代码中使用gst launch语法。通过命名元素,您可以根据需要设置参数,而无需进行字符串插值。您好,谢谢您的回答。我已经尝试过gst\u parse\u launch,但我的问题是我的GstPipeline对象没有我假设“命名元素”(正如您所说)可以解决我的问题,但我不知道如何解决:(我可以问一下RTSP描述支持H264的Axis相机需要什么完整的source.props.location吗?
 //Display RTSP streaming of video
 //(c) 2011 enthusiasticgeek
 // This code is distributed in the hope that it will be useful,
 // but WITHOUT ANY WARRANTY; without even the implied warranty of
 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  

#include <string.h>
#include <math.h>
#include <gst/gst.h>
#include <glib.h>

static gboolean bus_call (GstBus *bus,GstMessage *msg, gpointer data){
  GMainLoop *loop = (GMainLoop *) data;

  switch (GST_MESSAGE_TYPE (msg)) {

    case GST_MESSAGE_EOS:
      g_print ("Stream Ends\n");
      g_main_loop_quit (loop);
      break;

    case GST_MESSAGE_ERROR: {
      gchar  *debug;
      GError *error;

      gst_message_parse_error (msg, &error, &debug);
      g_free (debug);

      g_printerr ("Error: %s\n", error->message);
      g_error_free (error);

      g_main_loop_quit (loop);
      break;
    }
    default:
      break;
  }

  return TRUE;
}

static void on_pad_added (GstElement *element, GstPad *pad, gpointer data){

  GstPad *sinkpad;
  GstElement *decoder = (GstElement *) data;

  /* We can now link this pad with the rtsp-decoder sink pad */
  g_print ("Dynamic pad created, linking source/demuxer\n");

  sinkpad = gst_element_get_static_pad (decoder, "sink");

  gst_pad_link (pad, sinkpad);

  gst_object_unref (sinkpad);
}

int main (int argc, char *argv[])
{
  GMainLoop *loop;
  GstBus *bus;
  GstElement *source;
  GstElement *decoder;
  GstElement *sink;
  GstElement *pipeline;
  GstElement *demux;
  GstElement *colorspace;

  /* Initializing GStreamer */
  gst_init (&argc, &argv);
  loop = g_main_loop_new (NULL, FALSE);

 //gst-launch-0.10 rtspsrc location=rtsp://<ip> ! decodebin ! ffmpegcolorspace ! autovideosink
 //gst-launch -v rtspsrc location="rtsp://<ip> ! rtpmp4vdepay ! mpeg4videoparse ! ffdec_mpeg4 ! ffmpegcolorspace! autovideosink
 //gst-launch -v rtspsrc location="rtsp://<ip> ! rtpmp4vdepay ! ffdec_mpeg4 ! ffmpegcolorspace! autovideosink
  /* Create Pipe's Elements */
  pipeline = gst_pipeline_new ("video player");
  g_assert (pipeline);
  source   = gst_element_factory_make ("rtspsrc", "Source");
  g_assert (source);
  demux = gst_element_factory_make ("rtpmp4vdepay", "Depay");
  g_assert (demux);
  decoder = gst_element_factory_make ("ffdec_mpeg4", "Decoder");
  g_assert (decoder);
  colorspace     = gst_element_factory_make ("ffmpegcolorspace",  "Colorspace");
  g_assert(colorspace);
  sink     = gst_element_factory_make ("autovideosink", "Output");
  g_assert (sink);

  /*Make sure: Every elements was created ok*/
  if (!pipeline || !source || !demux || !decoder || !colorspace || !sink) {
    g_printerr ("One of the elements wasn't create... Exiting\n");
    return -1;
  }

  g_printf(" \nPipeline is Part(A) ->(dynamic/runtime link)  Part(B)[ Part(B-1) -> Part(B-2) -> Part(B-3) ]\n\n");
  g_printf(" [source](dynamic)->(dynamic)[demux]->[decoder]->[colorspace]->[videosink] \n\n");

  /* Set video Source */
  g_object_set (G_OBJECT (source), "location", argv[1], NULL);
  //g_object_set (G_OBJECT (source), "do-rtcp", TRUE, NULL);
  g_object_set (G_OBJECT (source), "latency", 0, NULL);

  /* Putting a Message handler */
  bus = gst_pipeline_get_bus (GST_PIPELINE (pipeline));
  gst_bus_add_watch (bus, bus_call, loop);
  gst_object_unref (bus);

  /* Add Elements to the Bin */
  gst_bin_add_many (GST_BIN (pipeline), source, demux, decoder, colorspace, sink, NULL);

  /* Link confirmation */
  if (!gst_element_link_many (demux, decoder, colorspace, sink, NULL)){
     g_warning ("Linking part (B) Fail...");
  }

  g_printf("\nNote that the source will be linked to the demuxer(depayload) dynamically.\n\
     The reason is that rtspsrc may contain various elements (for example\n\
     audio and video). The source pad(s) will be created at run time,\n\
     by the rtspsrc when it detects the amount and nature of elements.\n\
     Therefore we connect a callback function which will be executed\n\
     when the \"pad-added\" is emitted.\n");

  /* Dynamic Pad Creation */
  if(! g_signal_connect (source, "pad-added", G_CALLBACK (on_pad_added),demux))
  {
    g_warning ("Linking part (A) with part (B) Fail...");
  }
  /* Run the pipeline */
  g_print ("Playing: %s\n", argv[1]);
  gst_element_set_state (pipeline, GST_STATE_PLAYING);
  g_main_loop_run (loop);

  /* Ending Playback */
  g_print ("End of the Streaming... ending the playback\n");
  gst_element_set_state (pipeline, GST_STATE_NULL);

  /* Eliminating Pipeline */
  g_print ("Eliminating Pipeline\n");
  gst_object_unref (GST_OBJECT (pipeline));

  return 0;
}
test = test12
ext = c
CC = gcc
CPP = g++
gstreamer:
    $(CC) -g $(test).$(ext) -o $(test) `pkg-config gstreamer-0.10 --libs --cflags` `pkg-config gtk+-2.0 --libs --cflags`
clean:
    rm -rf $(test)
 // Display RTSP streaming of video
 // (c) 2011 enthusiasticgeek
 // This code is distributed in the hope that it will be useful,
 // but WITHOUT ANY WARRANTY; without even the implied warranty of
 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE
 // Leave Credits intact

package video2; //replace this with your package
import java.awt.BorderLayout;
import java.awt.Dimension;

import javax.swing.JFrame;
import javax.swing.SwingUtilities;

//import org.gstreamer.Caps;
import org.gstreamer.Element;
import org.gstreamer.ElementFactory;
import org.gstreamer.Gst;
import org.gstreamer.Pad;
import org.gstreamer.PadDirection;
import org.gstreamer.Pipeline;
import org.gstreamer.swing.VideoComponent;

/**
 * A Simple videotest example.
 */
public class Main {
    public Main() {
    }
    private static Pipeline pipe;
    public static void main(String[] args) {
    // Quartz is abysmally slow at scaling video for some reason, so turn it off.
    System.setProperty("apple.awt.graphics.UseQuartz", "false");

    args = Gst.init("SwingVideoTest", args);

    pipe = new Pipeline("pipeline");
    /*
    final Element videosrc = ElementFactory.make("videotestsrc", "source");
    final Element videofilter = ElementFactory.make("capsfilter", "flt");
    videofilter.setCaps(Caps.fromString("video/x-raw-yuv, width=720, height=576"
            + ", bpp=32, depth=32, framerate=25/1"));
    */

     pipe.getBus().connect(new Bus.ERROR() {
        public void errorMessage(GstObject source, int code, String message) {
            System.out.println("Error occurred: " + message);
            Gst.quit();
        }
    });
    pipe.getBus().connect(new Bus.STATE_CHANGED() {
        public void stateChanged(GstObject source, State old, State current, State pending) {
            if (source == pipe) {
                System.out.println("Pipeline state changed from " + old + " to " + current);
            }
        }
    });
    pipe.getBus().connect(new Bus.EOS() {
        public void endOfStream(GstObject source) {
            System.out.println("Finished playing file");
            Gst.quit();
        }
    });        

     pipe.getBus().connect(new Bus.TAG() {
        public void tagsFound(GstObject source, TagList tagList) {
            for (String tag : tagList.getTagNames()) {
                System.out.println("Found tag " + tag + " = "
                        + tagList.getValue(tag, 0));
            }
        }
    });

    final Element source = ElementFactory.make("rtspsrc", "Source");
    final Element demux = ElementFactory.make("rtpmp4vdepay", "Depay");
    final Element decoder=ElementFactory.make("ffdec_mpeg4", "Decoder");
    final Element colorspace = ElementFactory.make("ffmpegcolorspace",  "Colorspace");
    //final Element sink = ElementFactory.make ("autovideosink", "Output");

    SwingUtilities.invokeLater(new Runnable() {

        public void run() {
            // Create the video component and link it in
            VideoComponent videoComponent = new VideoComponent();
            Element videosink = videoComponent.getElement();

           source.connect(new Element.PAD_ADDED() {
           public void padAdded(Element element, Pad pad) {
            pad.link(demux.getStaticPad("sink"));
           }
            });

           Pad p = new Pad(null, PadDirection.SRC);
           source.addPad(p);

            source.set("location","rtsp://<user>:<pass>@<ip>/mpeg4/1/media.amp");  //replace this with your source

            pipe.addMany(source, demux, decoder, colorspace, videosink);
            Element.linkMany(demux, decoder, colorspace, videosink);

            // Now create a JFrame to display the video output
            JFrame frame = new JFrame("Swing Video Test");
            frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
            frame.add(videoComponent, BorderLayout.CENTER);
            videoComponent.setPreferredSize(new Dimension(720, 576));
            frame.pack();
            frame.setVisible(true);

            // Start the pipeline processing
            pipe.play();
        }
    });
    }
}