Warning: file_get_contents(/data/phpspider/zhask/data//catemap/8/perl/10.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Gstreamer 在Linux中的wayland客户端上显示FPS_Gstreamer_Wayland - Fatal编程技术网

Gstreamer 在Linux中的wayland客户端上显示FPS

Gstreamer 在Linux中的wayland客户端上显示FPS,gstreamer,wayland,Gstreamer,Wayland,我想问一下,我正在使用Gstreamer使用fpsdisplaysink在Linux终端上显示正在播放的视频的FPS(帧速率)。但现在,我想在屏幕上显示FPS(通过weston背景下的wayland客户端)。 有人能帮忙吗?谢谢 编辑:下面是用C编写的源gstreamer #include <gst/gst.h> #include <fcntl.h> #include <stdbool.h> #include <stdio.h> #include

我想问一下,我正在使用Gstreamer使用fpsdisplaysink在Linux终端上显示正在播放的视频的FPS(帧速率)。但现在,我想在屏幕上显示FPS(通过weston背景下的wayland客户端)。 有人能帮忙吗?谢谢

编辑:下面是用C编写的源gstreamer

#include <gst/gst.h>
#include <fcntl.h>
#include <stdbool.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <unistd.h>
#include <wayland-client.h>
#include <wayland-egl.h>
#include <wayland-client-protocol.h>
#include <cairo.h>

#include "helpers.h"

#define INPUT_FILE "/home/root/videos/vga1.h264"
#define POSITION_X 100
#define POSITION_Y 100
#define DELAY_VALUE 1000000

static const unsigned WIDTH = 320;
static const unsigned HEIGHT = 200;
static const unsigned CURSOR_WIDTH = 100;
static const unsigned CURSOR_HEIGHT = 59;
static const int32_t CURSOR_HOT_SPOT_X = 10;
static const int32_t CURSOR_HOT_SPOT_Y = 35;
static char prv_time_str[25] = {0,};
static volatile int fps_counter = 0;
static char str_fps[10] = "";
static time_t timer;
static char time_str[25];
static struct tm* tm_info;
int ignored_first = 0;
static GstElement *overlay;

static GstPadProbeReturn
cb_have_data (GstPad          *pad,
              GstPadProbeInfo *info,
              gpointer         user_data)
{
    time(&timer);
    tm_info = localtime(&timer);
    strftime(time_str, 25, "%Y:%m:%d%H:%M:%S\n", tm_info);

    fps_counter++;
    if (!strlen(prv_time_str))
        strcpy(prv_time_str, time_str);
    if (strcmp(prv_time_str, time_str)) {
        if (ignored_first) {
            sprintf(str_fps, "FPS: %d", fps_counter);
            g_object_set (G_OBJECT (overlay), "text", str_fps, NULL);
            g_print("fps: %d\n", fps_counter);
        }
        ignored_first = 1;
        fps_counter = 0;
    }
    strcpy(prv_time_str, time_str);

    return GST_PAD_PROBE_OK;
}

int
main (int argc, char *argv[])
{
  GstElement *pipeline, *source, *parser, *decoder, *sink;
  GstBus *bus;
  GstMessage *msg;
  GstPad *pad;
  gchar *fps_msg;
  guint delay_show_FPS = 0;

  const gchar *input_file = INPUT_FILE;

  /* Initialization */
  gst_init (&argc, &argv);

  /* Create gstreamer elements */
  pipeline = gst_pipeline_new ("video-play");
  source = gst_element_factory_make ("filesrc", "file-source");
  parser = gst_element_factory_make ("h264parse", "h264-parser");
  decoder = gst_element_factory_make ("omxh264dec", "h264-decoder");
  sink = gst_element_factory_make ("waylandsink", "video-output");
  overlay = gst_element_factory_make ("textoverlay", "overlay");

  if (!pipeline || !source || !parser || !decoder || !sink || !overlay ) {
    g_printerr ("One element could not be created. Exiting.\n");
    return -1;
  }

  /* Set input video file for source element */
  g_object_set (G_OBJECT (source), "location", input_file, NULL);

  /* Set position for displaying (100, 100) */
  g_object_set (G_OBJECT (sink), "position-x", POSITION_X, "position-y", POSITION_Y, NULL);

  /* Add textoverlay element to display text in foreground */
  g_object_set (G_OBJECT (overlay), "font-desc", "Sans, 72", NULL);

  /* Add all elements into the pipeline */
  /* pipeline---[ file-source + h264-parser + h264-decoder + video-output ] */
  gst_bin_add_many (GST_BIN (pipeline), source, parser, decoder, overlay, sink, NULL);

  /* Link the elements together */
  /* file-source -> h264-parser -> h264-decoder -> video-output */
  if (gst_element_link_many (source, parser, decoder, overlay, sink, NULL) != TRUE) {
    g_printerr ("Elements could not be linked.\n");
    gst_object_unref (pipeline);
    return -1;
  }

  /* Retrieve a pad from waylandsink */
  pad = gst_element_get_static_pad (sink, "sink");

  /* Add buffer probe from within event probe callback of having data */
  gst_pad_add_probe (pad, GST_PAD_PROBE_TYPE_BUFFER, (GstPadProbeCallback)cb_have_data, NULL, NULL);

  gst_object_unref (pad);

  /* Set the pipeline to "playing" state */
  g_print ("Now playing: %s\n", input_file);
  if (gst_element_set_state (pipeline, GST_STATE_PLAYING) == GST_STATE_CHANGE_FAILURE) {
    g_printerr ("Unable to set the pipeline to the playing state.\n");
    gst_object_unref (pipeline);
    return -1;
  }

  g_print ("Running...\n");

  /* Wait until error or EOS */
  bus = gst_element_get_bus (pipeline);
  while(1) {
    msg = gst_bus_pop (bus);

    /* Loop forever until a matching message was posted 
       on the bus (GST_MESSAGE_ERROR or GST_MESSAGE_EOS). */
    if (msg != NULL) {
      GError *err;
      gchar *debug_info;
      switch (GST_MESSAGE_TYPE (msg)) {
        case GST_MESSAGE_ERROR:
          gst_message_parse_error (msg, &err, &debug_info);
          g_printerr ("Error received from element %s: %s.\n",
            GST_OBJECT_NAME (msg->src), err->message);
          g_printerr ("Debugging information: %s.\n",
            debug_info ? debug_info : "none");
          g_clear_error (&err);
          g_free (debug_info);
          goto stop_pipeline;
        case GST_MESSAGE_EOS:
          g_print ("End-Of-Stream reached.\n");
          goto stop_pipeline;
        default:
          /* do nothing */
          break;
      }
      gst_message_unref (msg);
    }
  }

  /* Free resources and change state to NULL */
stop_pipeline:
  gst_object_unref (bus);
  g_print ("Returned, stopping playback...\n");
  gst_element_set_state (pipeline, GST_STATE_NULL);
  g_print ("Freeing pipeline...\n");
  gst_object_unref (GST_OBJECT (pipeline));
  g_print ("Completed. Goodbye!\n");
  return EXIT_SUCCESS;
}
#包括
#包括
#包括
#包括
#包括
#包括
#包括
#包括
#包括
#包括
#包括
#包括“helpers.h”
#定义输入文件“/home/root/videos/vga1.h264”
#定义位置×100
#定义位置_Y 100
#定义延迟值1000000
静态常数无符号宽度=320;
静态常数无符号高度=200;
静态常量无符号游标\u宽度=100;
静态常数无符号游标\高度=59;
静态常数int32\u t光标\u热点\u X=10;
静态常数int32\u t光标\u热点\u Y=35;
静态字符prv_time_str[25]={0,};
静态易失性int fps_计数器=0;
静态字符str_fps[10]=“”;
静态定时器;
静态字符时间_str[25];
静态结构tm*tm_信息;
int忽略_first=0;
静态GstElement*覆盖;
静态GSTPADProbe返回
cb_有_数据(GstPad*pad,
GstPadProbeInfo*信息,
gpointer用户(U数据)
{
时间(&计时器);
tm_info=本地时间(&计时器);
strftime(time\u str,25,“%Y:%m:%d%H:%m:%S\n”,tm\u info);
fps_计数器++;
如果(!strlen(prv_time_str))
strcpy(prv_time_str,time_str);
if(strcmp(prv_时间_str,时间_str)){
如果(首先忽略){
sprintf(str_fps,“fps:%d”,fps_计数器);
g_object_set(g_object(overlay),“text”,str_fps,NULL);
g_打印(“fps:%d\n”,fps_计数器);
}
忽略_first=1;
fps_计数器=0;
}
strcpy(prv_time_str,time_str);
返回GST\u焊盘\u探头\u正常;
}
int
main(int argc,char*argv[])
{
GstElement*管道、*源、*解析器、*解码器、*接收器;
GstBus*总线;
GstMessage*msg;
GstPad*焊盘;
gchar*fps_msg;
吉尼特延迟显示FPS=0;
常量gchar*输入文件=输入文件;
/*初始化*/
gst_init(&argc,&argv);
/*创建gstreamer元素*/
管道=gst_管道_新(“视频播放”);
来源=gst元素工厂制造(“文件来源”);
parser=gst_元素_工厂_制造(“h264parse”、“h264 parser”);
解码器=gst元素工厂制造(“omxh264dec”、“h264解码器”);
接收器=gst元件工厂制造商(“waylandsink”,“视频输出”);
覆盖层=gst元素工厂制造(“文本覆盖层”、“覆盖层”);
if(!pipeline | | |!source | |!parser | |!decoder | |!sink | |!overlay){
g_printerr(“无法创建一个元素。正在退出。\n”);
返回-1;
}
/*为源元素设置输入视频文件*/
g_对象集(g_对象(源),“位置”,输入文件,空);
/*设置显示位置(100100)*/
g_对象集(g_对象(接收器),“位置-x”,位置-x,“位置-y”,位置-y,空);
/*添加textoverlay元素以在前景中显示文本*/
g_对象集(g_对象(覆盖),“字体描述”,“Sans,72”,空);
/*将所有元素添加到管道中*/
/*管道--[文件源+h264解析器+h264解码器+视频输出]*/
gst_bin_add_many(gst_bin(管道)、源、解析器、解码器、覆盖、接收器、NULL);
/*将元素链接在一起*/
/*文件源->h264解析器->h264解码器->视频输出*/
if(gst\u元素\u链接\u多(源、解析器、解码器、覆盖、接收器、空)!=TRUE){
g_printerr(“无法链接元素。\n”);
gst_对象_unref(管道);
返回-1;
}
/*从waylandsink中检索一个焊盘*/
pad=gst_元件_获取_静态_垫(水槽,“水槽”);
/*从具有数据的事件探测回调中添加缓冲区探测*/
gst_pad_add_probe(pad,gst_pad_probe_TYPE_BUFFER,(GstPadProbeCallback)cb_有_数据,NULL,NULL);
商品及服务税(pad);
/*将管道设置为“正在播放”状态*/
g_打印(“正在播放:%s\n”,输入_文件);
if(gst\U元素\U设置\U状态(管道、gst\U状态\U播放)==gst\U状态\U更改\U失败){
g_printerr(“无法将管道设置为播放状态。\n”);
gst_对象_unref(管道);
返回-1;
}
g_打印(“运行…\n”);
/*等待错误或EOS*/
总线=gst\u元件\u获取\u总线(管道);
而(1){
msg=gst_总线_pop(总线);
/*永远循环,直到发布匹配的消息
在总线上(GST_消息_错误或GST_消息_EOS)*/
如果(msg!=NULL){
GError*err;
gchar*调试信息;
开关(GST\信息\类型(msg)){
案例GST\u消息\u错误:
gst\消息\解析\错误(消息、错误和调试\信息);
g_printerr(“从元素%s接收到错误:%s。\n”,
GST\u对象\u名称(msg->src),错误->消息);
g_printerr(“调试信息:%s。\n”,
调试信息?调试信息:“无”);
g_清除_错误(&err);
g_免费(调试信息);
去停止管道;
案例GST\信息\ EOS:
g_print(“到达流结束。\n”);
去停止管道;
违约:
/*无所事事*/
打破
}
gst_消息_unref(msg);
}
}
/*释放资源并将状态更改为NULL*/
停止管道:
商品及服务税(巴士);
g_print(“返回,停止播放…\n”);
gst\元素\集合\状态(管道,gst\状态\空);
g_print(“释放管道…\n”);
gst_对象_unref(gst_对象(管道));
g_打印(“已完成。再见!\n”);
返回退出成功;
}

我尝试了上面的代码,但似乎不起作用。

下面是示例代码,您可以从中获取xImageSink的fps,因此如果您可以将接收器替换为waylandsink,您应该能够获取waylandsink的fps

#include <stdio.h>
#include <string.h>

#include <gst/gst.h>

static char prv_time_str[25] = {0,};
static volatile int fps_counter = 0;
static char str_fps[10] = "";
static time_t timer;
static char time_str[25];
static struct tm* tm_info;
int ignored_first = 0;
static GstElement *pipeline, *src, *overlay, *sink;

static GstPadProbeReturn
cb_have_data (GstPad          *pad,
              GstPadProbeInfo *info,
              gpointer         user_data)
{
    time(&timer);
    tm_info = localtime(&timer);
    strftime(time_str, 25, "%Y:%m:%d%H:%M:%S\n", tm_info);

    fps_counter++;
    if (!strlen(prv_time_str))
        strcpy(prv_time_str, time_str);
    if (strcmp(prv_time_str, time_str)) {
        if (ignored_first) {
            sprintf(str_fps, "FPS: %d", fps_counter);
            g_object_set (G_OBJECT (overlay), "text", str_fps, NULL);
            g_print("fps: %d\n", fps_counter);
        }
        ignored_first = 1;
        fps_counter = 0;
    }
    strcpy(prv_time_str, time_str);

    return GST_PAD_PROBE_OK;
}

gint main (gint argc, gchar *argv[])
{
    GMainLoop *loop;
    GstPad *pad;

    /* init GStreamer */
    gst_init (&argc, &argv);
    loop = g_main_loop_new (NULL, FALSE);

    /* build */
    pipeline = gst_pipeline_new ("my-pipeline");
    src = gst_element_factory_make ("videotestsrc", "src");
    if (src == NULL)
        g_error ("Could not create 'videotestsrc' element");

    overlay = gst_element_factory_make ("textoverlay", "overlay");
    if (overlay == NULL) {
        g_error ("Could not create neither 'textoverlay' nor 'ximagesink' element");
    }

    g_object_set (G_OBJECT (overlay), "font-desc", "Sans, 72", NULL);

    sink = gst_element_factory_make ("xvimagesink", "sink");
    if (sink == NULL) {
        g_error ("Could not create neither 'xvimagesink' nor 'ximagesink' element");
    }

    gst_bin_add_many (GST_BIN (pipeline), src, overlay, sink, NULL);
    gst_element_link_many (src, overlay, sink, NULL);

    pad = gst_element_get_static_pad (sink, "sink");
    gst_pad_add_probe (pad, GST_PAD_PROBE_TYPE_BUFFER,
    (GstPadProbeCallback) cb_have_data, NULL, NULL);
    gst_object_unref (pad);

    /* run */
    gst_element_set_state (pipeline, GST_STATE_PLAYING);

    /* wait until it's up and running or failed */
    if (gst_element_get_state (pipeline, NULL, NULL, -1) == GST_STATE_CHANGE_FAILURE) {
        g_error ("Failed to go into PLAYING state");
    }

    g_print ("Running ...\n");
    g_main_loop_run (loop);

    /* exit */
    gst_element_set_state (pipeline, GST_STATE_NULL);
    gst_object_unref (pipeline);

    return 0;
}
#包括
#包括
#包括
静态字符prv_time_str[25]={0,};
静态易失性int fps_计数器=0;
静态字符str_fps[10]=“”;
静态定时器;
静态字符时间_str[25];
静态结构tm*tm_信息;
int忽略_first=0;
静态GstElement*管道、*src、*overlay、,