C# 使用ffmpeg.autogen,可以从IP摄像头捕获视频,但不会捕获音频,代码中是否缺少任何内容?

C# 使用ffmpeg.autogen,可以从IP摄像头捕获视频,但不会捕获音频,代码中是否缺少任何内容?,c#,ffmpeg,C#,Ffmpeg,下面是代码:这是一个控制台的主要内容 应用程序,代码编译并运行,视频被捕获但未被捕获 音频 FFmpegBinariesHelper.RegisterFFmpegBinaries(); ffmpeg.av_register_all(); ffmpeg.avcodec_register_all(); ffmpeg.avformat_network_init(); AVFormatContext* context =

下面是代码:这是一个控制台的主要内容 应用程序,代码编译并运行,视频被捕获但未被捕获 音频

        FFmpegBinariesHelper.RegisterFFmpegBinaries();

        ffmpeg.av_register_all();
        ffmpeg.avcodec_register_all();
        ffmpeg.avformat_network_init();
        AVFormatContext* context = ffmpeg.avformat_alloc_context();
        int video_stream_index = 0;

        ffmpeg.av_register_all();
        ffmpeg.avcodec_register_all();
        ffmpeg.avformat_network_init();

        //open rtsp
        if (ffmpeg.avformat_open_input(&context, "rtsp://user:pass@IPAddress/axis-media/media.amp?", null, null) != 0)
        {
            return ;
        }

        if (ffmpeg.avformat_find_stream_info(context, null) < 0)
        {
            return;
        }

        //search video stream
        for (int i = 0; i < context->nb_streams; i++)
        {
            if (context->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
                video_stream_index = i;
        }

        AVPacket packet;
        ffmpeg.av_init_packet(&packet);

        //open output file



        AVOutputFormat* fmt = ffmpeg.av_guess_format("mp4", null, null);
        // AVFormatContext* oc = ffmpeg.avformat_alloc_context();
        AVFormatContext* oc = null;
        ffmpeg.avformat_alloc_output_context2(&oc, fmt, null, null);
        oc->oformat = fmt;


        ffmpeg.avio_open2(&oc->pb, "test.mp4", ffmpeg.AVIO_FLAG_WRITE, null, null);

        AVStream* stream = null;
        int cnt = 0;
        //start reading packets from stream and write them to file

        ffmpeg.av_read_play(context);//play RTSP
        while (ffmpeg.av_read_frame(context, &packet) >= 0 && cnt < 1000)
        {//read 100 frames
            if (packet.stream_index == video_stream_index)
            {//packet is video               
                if (stream == null)
                {//create stream in file
                    stream = ffmpeg.avformat_new_stream(oc, context->streams[video_stream_index]->codec->codec);
                    ffmpeg.avcodec_copy_context(stream->codec, context->streams[video_stream_index]->codec);
                    stream->sample_aspect_ratio = context->streams[video_stream_index]->codec->sample_aspect_ratio;
                    ffmpeg.avformat_write_header(oc, null);
                }
                packet.stream_index = stream->id;

                ffmpeg.av_interleaved_write_frame(oc, &packet);
                cnt++;
            }
            ffmpeg.av_free_packet(&packet);
            ffmpeg.av_init_packet(&packet);
        }
        ffmpeg.av_read_pause(context);
        ffmpeg.av_write_trailer(oc);
        ffmpeg.avio_close(oc->pb);
        ffmpeg.avformat_free_context(oc);
FFmpegBinariesHelper.RegisterFFmpegBinaries();
ffmpeg.av_register_all();
ffmpeg.avcodec_register_all();
avformat_network_init();
AVFormatContext*context=ffmpeg.avformat_alloc_context();
int video_stream_index=0;
ffmpeg.av_register_all();
ffmpeg.avcodec_register_all();
avformat_network_init();
//开放式rtsp
如果(ffmpeg.avformat\u open\u输入和上下文,”rtsp://user:pass@IP地址/axis介质/介质放大器?”,空,空)!=0)
{
返回;
}
if(ffmpeg.avformat\u find\u stream\u info(context,null)<0)
{
返回;
}
//搜索视频流
对于(int i=0;inb\u streams;i++)
{
如果(上下文->流[i]->编解码器->编解码器类型==AVMediaType.AVMediaType\U视频)
视频流索引=i;
}
数据包;
ffmpeg.av_init_包(&packet);
//打开输出文件
AVOutputFormat*fmt=ffmpeg.av_guess_格式(“mp4”,空,空);
//AVFormatContext*oc=ffmpeg.avformat_alloc_context();
AVFormatContext*oc=null;
ffmpeg.avformat\u alloc\u output\u context2(&oc,fmt,null,null);
oc->oformat=fmt;
avio_open2(&oc->pb,“test.mp4”,ffmpeg.avio_FLAG_WRITE,null,null);
AVStream*stream=null;
int-cnt=0;
//开始从流中读取数据包并将其写入文件
ffmpeg.av_read_play(上下文)//播放RTSP
而(ffmpeg.av_读取_帧(上下文和数据包)>=0&&cnt<1000)
{//读取100帧
if(packet.stream\u index==视频流\u index)
{//数据包是视频
if(流==null)
{//在文件中创建流
stream=ffmpeg.avformat\u new\u stream(oc,context->streams[video\u stream\u index]->codec->codec);
ffmpeg.avcodec\u copy\u context(流->编解码器,上下文->流[视频流\u索引]->codec);
流->示例\u纵横比=上下文->流[视频\u流\u索引]->编解码器->示例\u纵横比;
ffmpeg.avformat_write_头(oc,null);
}
packet.stream_index=stream->id;
av交织写入帧(oc和数据包);
cnt++;
}
ffmpeg.av_-free_包(&packet);
ffmpeg.av_init_包(&packet);
}
ffmpeg.av_read_pause(上下文);
视频写入预告片(oc);
ffmpeg.avio_关闭(oc->pb);
ffmpeg.avformat_free_context(oc);

我找到了为音频添加代码的方法,现在音频正在复制并与视频同步。代码如下:

        AVFormatContext* ifcx = null;

        AVCodecContext* v_iccx = null;
        AVCodec* v_icodec = null;
        AVStream* v_ist = null;
        int v_index;


        AVCodecContext* a_iccx = null;
        AVCodec* a_icodec = null;
        AVStream* a_ist = null;
        int a_index;
        DateTime timenow, timestart;

        AVFormatContext* ofcx;
        AVOutputFormat* ofmt;

        AVStream* ost;    
        AVPacket packet;

        string sFileInput;
        string sFileOutput;



        sFileInput = rtspUrl;

        var startNumber = 0;
        var filePrefix = "camera" + cameraId;

        // create folder if not exist
        if (!Directory.Exists(destinationFolder))
        {
            Directory.CreateDirectory(destinationFolder);
        }

        var files = Directory.GetFiles(destinationFolder, "*" + filePrefix + "*");
        if (files.Any())
        {
            var lastFile = files.Last();
            var temp = lastFile.Substring(lastFile.Length - 7, 3);
            if (int.TryParse(temp, out startNumber))
            {
                startNumber++;

            }

        }

        string NextFile = string.Format("{0}\\{1}-{2:000}.mp4", destinationFolder, filePrefix, startNumber);
        //EventLog.WriteEntry(sSource, "Capturing " + NextFile );
        sFileOutput = NextFile;

        FFmpegBinariesHelper.RegisterFFmpegBinaries();
        // Initialize library
        ffmpeg.av_log_set_level(ffmpeg.AV_LOG_DEBUG);
        ffmpeg.av_register_all();
        ffmpeg.avcodec_register_all();
        ffmpeg.avformat_network_init();

        //
        // Input
        //
        AVFormatContext** tmpIfcx = &ifcx;
        var ts = new CancellationTokenSource();
        CancellationToken ct = ts.Token;

        var task = new Task<int>(() => Avformat_open_input_async(tmpIfcx, sFileInput),ct);

        task.Start();            

        task.Wait(2000);            

        if (!task.IsCompleted)
        {
            ts.Cancel();
            //EventLog.WriteEntry(sSource, "Waiting on task Avformat_open_input_async ", EventLogEntryType.Warning);
            task.Wait(2000);
            //EventLog.WriteEntry(sSource, "Timeout callling " + sFileInput, EventLogEntryType.Error);                
            return;
        }

        var result = task.Result;
        //open rtsp

        // ifcx = tmpIfcx;
        if (result != 0)
        {                
            EventLog.WriteEntry(sSource, "ERROR: Cannot open input file " + sFileInput, EventLogEntryType.Error);
            return;
        }

        if (ffmpeg.avformat_find_stream_info(ifcx, null) < 0)
        {             
            EventLog.WriteEntry(sSource, "ERROR: Cannot find stream info\n", EventLogEntryType.Error);
            ffmpeg.avformat_close_input(&ifcx);
            return;
        }

        //search video stream
        v_index = -1;
        a_index = -1;
        for (int ix = 0; ix < ifcx->nb_streams; ix++)
        {

            if (ifcx->streams[ix]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
            {
                v_ist = ifcx->streams[ix];
                v_icodec = ifcx->video_codec;
                v_index = ix;
                v_iccx = ifcx->streams[ix]->codec;

            }
            if (ifcx->streams[ix]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_AUDIO)
            {
                a_ist = ifcx->streams[ix];
                a_icodec = ifcx->video_codec;
                a_index = ix;
                a_iccx = ifcx->streams[ix]->codec;

            }
        }
        if (v_index < 0)
        {
            EventLog.WriteEntry(sSource, "ERROR: Cannot find input video stream\n",EventLogEntryType.Error);
            ffmpeg.avformat_close_input(&ifcx);
            return;
        }

        //
        // Output
        //

        //open output file
        ofmt = ffmpeg.av_guess_format(null, sFileOutput, "mp4");

       // ffmpeg.format
        ofcx = ffmpeg.avformat_alloc_context();
        ofcx->oformat = ofmt;

        ffmpeg.avio_open(&ofcx->pb, sFileOutput, ffmpeg.AVIO_FLAG_WRITE);

        // Create output stream
        ost = ffmpeg.avformat_new_stream( ofcx, (AVCodec *) v_iccx->codec );
        AVStream* a_ost = ffmpeg.avformat_new_stream(ofcx, (AVCodec*)a_iccx->codec);
        //ost = ffmpeg.avformat_new_stream(ofcx, ifcx->video_codec);

        ffmpeg.avcodec_copy_context(ost->codec, v_iccx);
        ffmpeg.avcodec_copy_context(a_ost->codec, a_iccx);

        ffmpeg.avcodec_open2(v_iccx, v_icodec, null);
        ffmpeg.avcodec_open2(a_iccx, a_icodec, null);


        // Assume r_frame_rate is accurate
        var avRational = new AVRational();
        avRational.den = ost->r_frame_rate.den * 2;
        avRational.num = ost->r_frame_rate.num ;

        var aaRational = new AVRational();
        aaRational.den = a_ost->r_frame_rate.den ;
        aaRational.num = a_ost->r_frame_rate.num ;

        ost->r_frame_rate = avRational;
        ost->avg_frame_rate = ost->r_frame_rate;
        ost->time_base = av_inv_q(ost->r_frame_rate);
        ost->codec->time_base = ost->time_base;

        a_ost->r_frame_rate = aaRational;
        a_ost->avg_frame_rate = a_ost->r_frame_rate;
        a_ost->time_base = av_inv_q(a_ost->r_frame_rate);
        a_ost->codec->time_base = a_ost->time_base;

        ffmpeg.avformat_write_header(ofcx, null);

        //start reading packets from stream and write them to file

        ffmpeg.av_dump_format(ifcx, 0, ifcx->filename.ToString(), 0);
        ffmpeg.av_dump_format(ofcx, 0, ofcx->filename.ToString(), 1);

        timestart = timenow = DateTime.Now;

        ffmpeg.av_init_packet(&packet);
        if (segmentLength == 0)
            segmentLength = 15;
         var dateToEnd = DateTime.Now.AddMinutes(segmentLength);
        //EventLog.WriteEntry(sSource, "date to end capture " + dateToEnd.ToString());
        while ( (dateToEnd - DateTime.Now).TotalMinutes > 0 && IsCapturing)
        {
            if (endDateTime.HasValue && DateTime.Compare(DateTime.Now, endDateTime.Value) >= 0)
            {
                ffmpeg.av_packet_unref(&packet);
                ffmpeg.av_init_packet(&packet);
                IsCapturing = false;                    
                break;
            }
            int readFrame = -1;
            try
            {
                readFrame = ffmpeg.av_read_frame(ifcx, &packet);

            }
            catch(Exception ex)
            {
                EventLog.WriteEntry(sSource, $"Error av_read_frame {ex.ToString()}", EventLogEntryType.Error);
                break;
            }

            if (readFrame < 0)
            {
                EventLog.WriteEntry(sSource, "reafFrame < 0 " + NextFile, EventLogEntryType.Error);
                ffmpeg.av_packet_unref(&packet);
                ffmpeg.av_init_packet(&packet);
                break;
            }

            if (packet.stream_index == v_index)
            { //packet is video 
                packet.stream_index = v_ist->index;
                ffmpeg.av_interleaved_write_frame(ofcx, &packet);

            }
            if (packet.stream_index == a_index)
            { //packet is audio               

                SetPacketProperties(&packet, a_iccx, a_ist);
                ffmpeg.av_interleaved_write_frame(ofcx, &packet);

            }
            ffmpeg.av_packet_unref(&packet);
            ffmpeg.av_init_packet(&packet);

        }
        ffmpeg.av_read_pause(ifcx);
        ffmpeg.av_write_trailer(ofcx);
        ffmpeg.avio_close(ofcx->pb);
        ffmpeg.avformat_free_context(ofcx);

        ffmpeg.avformat_network_deinit();
AVFormatContext*ifcx=null;
AVCodecContext*v_iccx=null;
AVCodec*v_icodec=null;
AVStream*v_ist=null;
int v_指数;
AVCodecContext*a_iccx=null;
AVCodec*a_icodec=null;
AVStream*a_ist=null;
INTA_指数;
DateTime timenow,timestart;
AVFormatContext*ofcx;
AVOutputFormat*ofmt;
AVStream*ost;
数据包;
字符串输入;
字符串输出;
sFileInput=rtspUrl;
var startNumber=0;
var filePrefix=“camera”+cameraId;
//创建文件夹(如果不存在)
如果(!Directory.Exists(destinationFolder))
{
目录.CreateDirectory(destinationFolder);
}
var files=Directory.GetFiles(destinationFolder,“*”+filePrefix+“*”);
if(files.Any())
{
var lastFile=files.Last();
var temp=lastFile.Substring(lastFile.Length-7,3);
if(内部锥巴色(温度、外部起始编号))
{
startNumber++;
}
}
string NextFile=string.Format(“{0}\\{1}-{2:000}.mp4”,destinationFolder,filePrefix,startNumber);
//WriteEntry(sSource,“捕获”+NextFile);
sFileOutput=NextFile;
FFmpegBinariesHelper.RegisterFFmpegBinaries();
//初始化库
ffmpeg.av_log_set_level(ffmpeg.av_log_DEBUG);
ffmpeg.av_register_all();
ffmpeg.avcodec_register_all();
avformat_network_init();
//
//输入
//
AVFormatContext**tmpIfcx=&ifcx;
var ts=新的CancellationTokenSource();
取消令牌ct=ts.令牌;
var task=新任务(()=>Avformat\u open\u input\u async(tmpIfcx,sFileInput),ct);
task.Start();
任务.等待(2000年);
如果(!task.IsCompleted)
{
t.取消();
//WriteEntry(sSource,“等待任务Avformat\u open\u input\u async”,EventLogEntryType.Warning);
任务.等待(2000年);
//WriteEntry(sSource,“超时调用”+sFileInput,EventLogEntryType.Error);
返回;
}
var result=task.result;
//开放式rtsp
//ifcx=tmpIfcx;
如果(结果!=0)
{                
WriteEntry(sSource,“错误:无法打开输入文件”+sFileInput,EventLogEntryType.ERROR);
返回;
}
if(ffmpeg.avformat\u find\u stream\u info(ifcx,null)<0)
{             
EventLog.WriteEntry(sSource,“错误:找不到流信息\n”,EventLogEntryType.ERROR);
ffmpeg.avformat\u close\u输入(&ifcx);
返回;
}
//搜索视频流
v_指数=-1;
a_指数=-1;
对于(intix=0;ixnb_流;ix++)
{
 private unsafe void SetPacketProperties(AVPacket* packet, AVCodecContext* codecContext, AVStream* stream)
    {

        packet->pts = ffmpeg.av_rescale_q_rnd(packet->pts, codecContext->time_base, stream->time_base, AVRounding.AV_ROUND_NEAR_INF | AVRounding.AV_ROUND_PASS_MINMAX);
        packet->dts = ffmpeg.av_rescale_q_rnd(packet->dts, codecContext->time_base, stream->time_base, AVRounding.AV_ROUND_NEAR_INF | AVRounding.AV_ROUND_PASS_MINMAX);
        packet->duration = (int)ffmpeg.av_rescale_q(packet->duration, codecContext->time_base, stream->time_base);
        packet->stream_index = stream->index;


    }