C 如何通过gstreamer获取h264帧
我熟悉ffmpeg,但不熟悉GStreamer。我知道如何通过ffmpeg获得H264帧,例如,我可以通过AVPacket获得H264帧。但我不知道如何使用GStreamer获得h264的帧。我不打算将H264数据直接保存为本地文件,因为我需要进行其他处理。谁能给我一些示例代码?我将非常感激。以下是我从别人的代码中学到的C 如何通过gstreamer获取h264帧,c,gstreamer,h.264,codec,C,Gstreamer,H.264,Codec,我熟悉ffmpeg,但不熟悉GStreamer。我知道如何通过ffmpeg获得H264帧,例如,我可以通过AVPacket获得H264帧。但我不知道如何使用GStreamer获得h264的帧。我不打算将H264数据直接保存为本地文件,因为我需要进行其他处理。谁能给我一些示例代码?我将非常感激。以下是我从别人的代码中学到的 #include <stdio.h> #include <string.h> #include <fstream> #include <
#include <stdio.h>
#include <string.h>
#include <fstream>
#include <unistd.h>
#include <gst/gst.h>
#include <gst/app/gstappsrc.h>
typedef struct {
GstPipeline *pipeline;
GstAppSrc *src;
GstElement *filter1;
GstElement *encoder;
GstElement *filter2;
GstElement *parser;
GstElement *qtmux;
GstElement *sink;
GstClockTime timestamp;
guint sourceid;
} gst_app_t;
static gst_app_t gst_app;
int main()
{
gst_app_t *app = &gst_app;
GstStateChangeReturn state_ret;
gst_init(NULL, NULL); //Initialize Gstreamer
app->timestamp = 0; //Set timestamp to 0
//Create pipeline, and pipeline elements
app->pipeline = (GstPipeline*)gst_pipeline_new("mypipeline");
app->src = (GstAppSrc*)gst_element_factory_make("appsrc", "mysrc");
app->filter1 = gst_element_factory_make ("capsfilter", "myfilter1");
app->encoder = gst_element_factory_make ("omxh264enc", "myomx");
app->filter2 = gst_element_factory_make ("capsfilter", "myfilter2");
app->parser = gst_element_factory_make("h264parse" , "myparser");
app->qtmux = gst_element_factory_make("qtmux" , "mymux");
app->sink = gst_element_factory_make ("filesink" , NULL);
if( !app->pipeline ||
!app->src || !app->filter1 ||
!app->encoder || !app->filter2 ||
!app->parser || !app->qtmux ||
!app->sink ) {
printf("Error creating pipeline elements!\n");
exit(2);
}
//Attach elements to pipeline
gst_bin_add_many(
GST_BIN(app->pipeline),
(GstElement*)app->src,
app->filter1,
app->encoder,
app->filter2,
app->parser,
app->qtmux,
app->sink,
NULL);
//Set pipeline element attributes
g_object_set (app->src, "format", GST_FORMAT_TIME, NULL);
GstCaps *filtercaps1 = gst_caps_new_simple ("video/x-raw",
"format", G_TYPE_STRING, "I420",
"width", G_TYPE_INT, 1280,
"height", G_TYPE_INT, 720,
"framerate", GST_TYPE_FRACTION, 1, 1,
NULL);
g_object_set (G_OBJECT (app->filter1), "caps", filtercaps1, NULL);
GstCaps *filtercaps2 = gst_caps_new_simple ("video/x-h264",
"stream-format", G_TYPE_STRING, "byte-stream",
NULL);
g_object_set (G_OBJECT (app->filter2), "caps", filtercaps2, NULL);
g_object_set (G_OBJECT (app->sink), "location", "output.h264", NULL);
//Link elements together
g_assert( gst_element_link_many(
(GstElement*)app->src,
app->filter1,
app->encoder,
app->filter2,
app->parser,
app->qtmux,
app->sink,
NULL ) );
//Play the pipeline
state_ret = gst_element_set_state((GstElement*)app->pipeline, GST_STATE_PLAYING);
g_assert(state_ret == GST_STATE_CHANGE_ASYNC);
//Get a pointer to the test input
FILE *testfile = fopen("test.yuv", "rb");
g_assert(testfile != NULL);
//Push the data from buffer to gstpipeline 100 times
for(int i = 0; i < 100; i++) {
char* filebuffer = (char*)malloc (1382400); //Allocate memory for framebuffer
if (filebuffer == NULL) {printf("Memory error\n"); exit (2);} //Errorcheck
size_t bytesread = fread(filebuffer, 1 , (1382400), testfile); //Read to filebuffer
//printf("File Read: %zu bytes\n", bytesread);
GstBuffer *pushbuffer; //Actual databuffer
GstFlowReturn ret; //Return value
pushbuffer = gst_buffer_new_wrapped (filebuffer, 1382400); //Wrap the data
//Set frame timestamp
GST_BUFFER_PTS (pushbuffer) = app->timestamp;
GST_BUFFER_DTS (pushbuffer) = app->timestamp;
GST_BUFFER_DURATION (pushbuffer) = gst_util_uint64_scale_int (1, GST_SECOND, 1);
app->timestamp += GST_BUFFER_DURATION (pushbuffer);
//printf("Frame is at %lu\n", app->timestamp);
ret = gst_app_src_push_buffer( app->src, pushbuffer); //Push data into pipeline
g_assert(ret == GST_FLOW_OK);
}
usleep(100000);
//Declare end of stream
gst_app_src_end_of_stream (GST_APP_SRC (app->src));
printf("End Program.\n");
return 0;
}
#包括
#包括
#包括
#包括
#包括
#包括
类型定义结构{
GstPipeline*管道;
GstAppSrc*src;
GstElement*过滤器1;
GstElement*编码器;
GstElement*过滤器2;
GstElement*解析器;
GstElement*qtmux;
GstElement*水槽;
GstClockTime时间戳;
金氏源ID;
}商品及服务税;
静态gst\u应用程序\u t gst\u应用程序;
int main()
{
gst_app_t*app=&gst_app;
GSTStateChangeReturnstate;
gst_init(NULL,NULL);//初始化Gstreamer
app->timestamp=0;//将timestamp设置为0
//创建管道和管道元素
app->pipeline=(GSTPIPLINE*)gst_pipeline_new(“我的管道”);
app->src=(GSTAPSRC*)gst元素工厂制造(“appsrc”、“mysrc”);
app->filter1=gst\u元素\u工厂制造商(“capsfilter”、“myfilter1”);
应用->编码器=gst\u元件\u工厂\u制造(“omxh264enc”、“myomx”);
app->filter2=gst\u元素\u工厂制造商(“capsfilter”、“myfilter2”);
app->parser=gst\u element\u factory\u make(“h264parse”、“myparser”);
app->qtmux=gst\u元素\u工厂制造(“qtmux”、“mymux”);
app->sink=gst\u element\u factory\u make(“文件链接”,NULL);
如果(!app->pipeline ||
!app->src | |!app->filter1 |
!应用程序->编码器| |!应用程序->过滤器2 | |
!app->parser | |!app->qtmux |
!应用程序->接收器){
printf(“创建管道元素时出错!\n”);
出口(2);
}
//将元件连接到管道
gst\u bin\u添加\u许多(
GST_BIN(应用程序->管道),
(GstElement*)应用程序->src,
应用->过滤器1,
应用程序->编码器,
应用->过滤器2,
应用程序->解析器,
应用程序->qtmux,
应用程序->水槽,
无效);
//设置管道元素属性
g_对象_集(app->src,“格式”,GST_格式_时间,空);
GstCaps*filtercaps1=gst\u caps\u new\u simple(“视频/x-raw”,
“格式”,G_类型_字符串,“I420”,
“宽度”,G_TYPE_INT,1280,
“高度”,G_TYPE_INT,720,
“帧率”,GST_类型_分数,1,1,
无效);
g_对象集(g_对象(应用程序->过滤器1),“caps”,过滤器过滤器过滤器1,NULL);
GstCaps*filtercaps2=gst\u caps\u new\u simple(“视频/x-h264”,
“流格式”,G_类型_字符串,“字节流”,
无效);
g_object_set(g_object(应用程序->过滤器2),“caps”,过滤器过滤器过滤器过滤器2,NULL);
g_对象集(g_对象(应用程序->接收器),“位置”,“输出.h264”,空);
//将元素链接在一起
g\u断言(gst\u元素\u链接\u多个(
(GstElement*)应用程序->src,
应用->过滤器1,
应用程序->编码器,
应用->过滤器2,
应用程序->解析器,
应用程序->qtmux,
应用程序->水槽,
空);
//发挥管道作用
state\u ret=gst\u element\u set\u state((GstElement*)应用程序->管道,gst\u state\u播放);
g_assert(state_ret==GST_state_CHANGE_ASYNC);
//获取指向测试输入的指针
FILE*testfile=fopen(“test.yuv”、“rb”);
g_断言(testfile!=NULL);
//将数据从缓冲区推送到gstpipeline 100次
对于(int i=0;i<100;i++){
char*filebuffer=(char*)malloc(1382400);//为帧缓冲区分配内存
如果(filebuffer==NULL){printf(“内存错误”\n”);退出(2);}//错误检查
size_t bytesread=fread(filebuffer,1,(1382400),testfile);//读取到filebuffer
//printf(“文件读取:%zu字节\n”,字节读取);
GstBuffer*pushbuffer;//实际数据缓冲区
GstFlowReturn-ret;//返回值
pushbuffer=gst\u buffer\u new\u wrapped(filebuffer,1382400);//包装数据
//设置帧时间戳
GST_BUFFER_PTS(pushbuffer)=应用程序->时间戳;
GST_BUFFER_DTS(pushbuffer)=应用程序->时间戳;
GST\U缓冲区持续时间(pushbuffer)=GST\U util\u uint64\u scale\u int(1,GST\u秒,1);
应用->时间戳+=GST\u缓冲区\u持续时间(pushbuffer);
//printf(“帧位于%lu\n”,应用程序->时间戳);
ret=gst\u app\u src\u push\u buffer(app->src,pushbuffer);//将数据推送到管道中
g_assert(ret==GST_FLOW_OK);
}
美国LEEP(100000);
//声明流的结束
gst_应用_src_结束_流(gst_应用_src(应用->src));
printf(“结束程序。\n”);
返回0;
}
下面是代码源代码的链接
您的示例的目的是将应用程序中的数据提供给GStreamer,希望使用x264进行编码,并将结果保存到文件中 您需要的(我猜是这样)是从文件中读取数据,比如movie.mp4,然后将解码后的数据放入应用程序(?) 我相信你有两个选择: 1,使用appsink而不是filesink,并使用filesrc从文件中馈送数据。因此,如果除抓取h264帧外还需要其他处理(如播放或通过网络发送),则必须使用
tee
将管道拆分为两个输出分支,如下面的示例所示。输出管道的一个分支将连接到例如窗口输出-autovideosink,另一部分连接到您的应用程序
为了演示这个拆分,并且仍然向您展示实际发生的情况,我将使用调试元素标识,它能够转储通过它的数据。
通过这种方式,你将学会使用这个方便的工具进行实验和验证,你知道你在做什么。这不是您需要的解决方案
gst-launch-1.0-q filesrc location=movie.mp4!qtdemux name=qt!视频/x-h264!h264parse!t形三通名称=t!队列avdec_h264!视频转换!自动视频接收器t!队列身份转储=1!fakesink sync=true
这条管道
/* The appsink has received a buffer */
static GstFlowReturn new_sample (GstElement *sink, CustomData *data) {
GstSample *sample;
/* Retrieve the buffer */
g_signal_emit_by_name (sink, "pull-sample", &sample);
if (sample) {
/* The only thing we do in this example is print a * to indicate a received buffer */
g_print ("*");
gst_sample_unref (sample);
return GST_FLOW_OK;
}
return GST_FLOW_ERROR;
}
// somewhere in main()
// construction and linkage of elements
g_signal_connect (data.app_sink, "new-sample", G_CALLBACK (new_sample), &data);
static GstPadProbeReturn
cb_have_data (GstPad *pad,
GstPadProbeInfo *info,
gpointer user_data)
{
// ... the code for writing the buffer data somewhere ..
}
// ... later in main()
pad = gst_element_get_static_pad (src, "src");
gst_pad_add_probe (pad, GST_PAD_PROBE_TYPE_BUFFER,
(GstPadProbeCallback) cb_have_data, NULL, NULL);