使用ffmpeg和H264编解码器创建的视频不能在android上播放:播放机说它不是divx文件

使用ffmpeg和H264编解码器创建的视频不能在android上播放:播放机说它不是divx文件,android,ffmpeg,Android,Ffmpeg,我用C语言中的ffmpeg和H264编解码器从图像创建了一个视频。 我可以在android设备上使用第三方播放器(V播放器)运行视频。 但是本地播放器不播放视频。它表示该文件不是Divx文件。 下面是我用来从图像创建视频的代码: JNIEXPORT void Java_com_canvasm_mediclinic_VideoGenerator_generate(JNIEnv *pEnv, jobject pObj,jobjectArray stringArray,int famerate,int

我用C语言中的ffmpeg和H264编解码器从图像创建了一个视频。 我可以在android设备上使用第三方播放器(V播放器)运行视频。 但是本地播放器不播放视频。它表示该文件不是Divx文件。 下面是我用来从图像创建视频的代码:

JNIEXPORT void Java_com_canvasm_mediclinic_VideoGenerator_generate(JNIEnv *pEnv, jobject pObj,jobjectArray stringArray,int famerate,int width,int height,jstring videoFilename)
{
    AVCodec *codec;
    AVCodecContext *c= NULL;
    //int framesnum=5;
    int i,looper, out_size, size, x, y,j;
    int ret,pts,got_pkt_ptr;

    int imagecount= (*pEnv)->GetArrayLength(pEnv, stringArray); 
    int retval=-10;
    uint8_t endcode[]={0,0,1,0xb7};
    AVPacket outpacket;
    FILE *f;
    AVFrame *picture,*encoded_avframe;  
    jbyte *raw_record;
    char logdatadata[100];
    int returnvalue = -1,numBytes =-1;
    const char *gVideoFileName = (char *)(*pEnv)->GetStringUTFChars(pEnv, videoFilename, NULL); 
        /* find the mpeg1 video encoder */  
    codec = avcodec_find_encoder_by_name("libx264");
    if (!codec) {
        __android_log_write(ANDROID_LOG_INFO, "record","codec not found");
        exit(1);
    }
    c= avcodec_alloc_context();
    c->bit_rate = 500000;
    c->width = width;
    c->height = height;
    c->time_base= (AVRational){1,famerate};
    c->gop_size = 12; // emit one intra frame every ten frames
    c->max_b_frames=1;
    c->pix_fmt = PIX_FMT_YUV420P;
    c->codec_type = AVMEDIA_TYPE_VIDEO;
    c->codec_id = codec->id;
    c->max_b_frames = 0;
    c->me_range = 16;
    c->max_qdiff = 4;
    c->qmin = 10;
    c->qmax = 26;
    c->qcompress = 0.6;
    c->trellis=0;
    c->level = 30;
    c->refs = 5;
    c->coder_type = 0;
    c->scenechange_threshold = 0;
    //new
    c->flags|=CODEC_FLAG_LOOP_FILTER;//new  
    c->scenechange_threshold = 40; //new
    c-> rc_buffer_size = 0;
    c->gop_size=250; //new
    c->max_b_frames=1;//new
    c->me_method=7;
    c->me_cmp|= 1;
    c->me_subpel_quality = 6;
    c->qmax=51;
    c->keyint_min=25;
    av_opt_set(c->priv_data,"subq","6",0);
    av_opt_set(c->priv_data,"crf","20.0",0);
    av_opt_set(c->priv_data,"weighted_p_pred","0",0);
    av_opt_set(c->priv_data,"profile","baseline",AV_OPT_SEARCH_CHILDREN);
    av_opt_set(c->priv_data,"preset","medium",0);
    av_opt_set(c->priv_data,"tune","zerolatency",0);
    av_opt_set(c->priv_data,"x264opts","rc-lookahead=0",0);




    /* open it */
    retval = avcodec_open(c, codec);
    if ( retval < 0)
    {
    __android_log_write(ANDROID_LOG_INFO, "record","could not open codec");
        exit(1);
    }   
    f = fopen(gVideoFileName, "ab");

    if (!f) {
        __android_log_write(ANDROID_LOG_INFO, "record","could not open video file");
        exit(1);
    }
    pts = 0;
    for(i=0;i<=imagecount;i++) {


        jboolean isCp;
        int progress = 0;
        float temp;

        jstring string;
        if(i==imagecount)
            string = (jstring) (*pEnv)->GetObjectArrayElement(pEnv, stringArray,
                      imagecount-1);
        else
            string = (jstring) (*pEnv)->GetObjectArrayElement(pEnv, stringArray, i);

        const char *rawString = (*pEnv)->GetStringUTFChars(pEnv, string, &isCp);        
        picture = OpenImage(rawString,width,height,i);      
        av_init_packet(&outpacket);
        fflush(stdout);


        {               
            picture->pts=i ;//c->frame_number;          
            do{
                out_size = avcodec_encode_video2(c, &outpacket, 
                                 picture,&got_pkt_ptr);             
            }while(!got_pkt_ptr);   

        }

        returnvalue = fwrite(outpacket.data, 1, outpacket.size, f);
        av_free_packet(&outpacket); 

    }


    /* get the delayed frames */
    for(got_pkt_ptr =1; got_pkt_ptr; i++) {
        fflush(stdout);
        avcodec_encode_video2(c, &outpacket, NULL,&got_pkt_ptr);

        if(got_pkt_ptr)
        {
            fwrite(outpacket.data, 1, outpacket.size, f);
            av_free_packet(&outpacket);
        }   
    }

    fwrite(endcode,1,sizeof(endcode),f);
    fclose(f);  
    avcodec_close(c);
    av_free(c);     
}
AVFrame* OpenImage(const char* imageFileName,int w,int h,int index)
{
    AVFrame *pFrame;
    AVCodec *pCodec ;
    AVFormatContext *pFormatCtx = NULL;
    AVCodecContext *pCodecCtx;
    uint8_t *buffer;
    int frameFinished,framesNumber = 0,retval = -1,numBytes=0;
    AVPacket packet;
    char logdatadata[100];
    int result = -1;



    result=avformat_open_input(&pFormatCtx,imageFileName,NULL,NULL);
    if(result!=0)       
    {       
        __android_log_write(ANDROID_LOG_INFO, "record",
                "Can't open image file ");
        return NULL;
    }
    pCodecCtx = pFormatCtx->streams[0]->codec;
    pCodecCtx->width = w;
    pCodecCtx->height = h;
    pCodecCtx->pix_fmt = PIX_FMT_YUV420P;

    // Find the decoder for the video stream
    pCodec = avcodec_find_decoder(pCodecCtx->codec_id);
    if (!pCodec)
    {
        __android_log_write(ANDROID_LOG_INFO, "record",
                "Can't open image file ");
        return NULL;
    }

    pFrame = avcodec_alloc_frame();

    numBytes = avpicture_get_size(PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx->height);
    buffer = (uint8_t *) av_malloc(numBytes * sizeof(uint8_t)); 

    retval = avpicture_fill((AVPicture *) pFrame, buffer, PIX_FMT_YUV420P, pCodecCtx->width, pCodecCtx-
            >height);

    // Open codec
    if(avcodec_open(pCodecCtx, pCodec)<0)
    {
        __android_log_write(ANDROID_LOG_INFO, "record","Could not open codec");
        return NULL;
    }

    if (!pFrame)
    {
        __android_log_write(ANDROID_LOG_INFO, "record","Can't allocate memory for AVFrame\n");
        return NULL;
    }
    int readval = -5;
    while (readval = av_read_frame(pFormatCtx, &packet) >= 0)
    {
        if(packet.stream_index != 0)
            continue;       
        int ret = avcodec_decode_video2(pCodecCtx, pFrame, &frameFinished, &packet);

        if (ret > 0)
        {
            __android_log_write(ANDROID_LOG_INFO, "record","Frame is decoded\n");
            pFrame->quality = 4;
            av_free_packet(&packet);
            av_close_input_file(pFormatCtx);            
            return pFrame;
        }
        else
        {
            __android_log_write(ANDROID_LOG_INFO, "record","error while decoding frame \n");
        }
    }   
}
JNIEXPORT void Java\u com\u canvasm\u mediclinic\u VideoGenerator\u generator(JNIEnv*pEnv、jobject pObj、jobjectArray stringArray、int famerate、int width、int height、jstring videoFilename)
{
AVCodec*编解码器;
AVCodecContext*c=NULL;
//int framesnum=5;
内部i,活套,外部尺寸,尺寸,x,y,j;
int ret、pts、got_pkt_ptr;
intimagecount=(*pEnv)->GetArrayLength(pEnv,stringArray);
int-retval=-10;
uint8_t端码[]={0,0,1,0xb7};
数据包输出包;
文件*f;
AVFrame*图片,*编码的\u AVFrame;
jbyte*原始记录;
char logdatadata[100];
int returnvalue=-1,numBytes=-1;
const char*gVideoFileName=(char*)(*pEnv)->GetStringUTFChars(pEnv,videoFilename,NULL);
/*查找mpeg1视频编码器*/
编解码器=avcodec按名称查找编码器(“libx264”);
如果(!编解码器){
__android_log_write(android_log_信息,“记录”,“未找到编解码器”);
出口(1);
}
c=avcodec_alloc_context();
c->比特率=500000;
c->宽度=宽度;
c->高度=高度;
c->time_base=(AVRational){1,famerate};
c->gop_size=12;//每十帧发射一帧
c->max_b_frames=1;
c->pix_fmt=pix_fmt_YUV420P;
c->codec\u type=AVMEDIA\u type\u VIDEO;
c->codec_id=codec->id;
c->max_b_frames=0;
c->me_范围=16;
c->max_qdiff=4;
c->qmin=10;
c->qmax=26;
c->qcompress=0.6;
c->网格=0;
c->level=30;
c->refs=5;
c->coder_type=0;
c->scenechange_阈值=0;
//新的
c->flags |=编解码器(标记)循环(过滤器)//新建
c->scenechange_阈值=40;//新建
c->rc\U缓冲区大小=0;
c->gop_size=250;//新建
c->max_b_frames=1;//新建
c->me_方法=7;
c->me_cmp |=1;
c->me\u subpel\u quality=6;
c->qmax=51;
c->keyint_min=25;
av_opt_set(c->priv_data,“subq”,“6”,0);
av_opt_集合(c->PRV_数据,“crf”,“20.0”,0);
av_opt_集(c->priv_数据,“加权_p_pred”,“0”,0);
av选项集(c->priv数据,“配置文件”,“基线”,av选项搜索子项);
av_opt_set(c->priv_数据,“预设”,“中等”,0);
av_opt_set(c->priv_data,“tune”,“zerolatency”,0);
av选项集(c->PRV数据,“x264opts”,“rc前瞻=0”,0);
/*打开它*/
retval=avcodec_open(c,codec);
如果(返回值<0)
{
__android_log_write(android_log_INFO,“记录”,“无法打开编解码器”);
出口(1);
}   
f=fopen(GVIDEO文件名,“ab”);
如果(!f){
__android_log_write(android_log_INFO,“录制”,“无法打开视频文件”);
出口(1);
}
pts=0;
对于(i=0;iGetObjectArrayElement(pEnv,stringArray,
图像计数-1);
其他的
string=(jstring)(*pEnv)->GetObjectArrayElement(pEnv,stringArray,i);
const char*rawString=(*pEnv)->GetStringUTFChars(pEnv、string和isCp);
picture=OpenImage(原始字符串、宽度、高度、i);
av_初始_数据包(和输出包);
fflush(stdout);
{               
picture->pts=i;//c->frame\u编号;
做{
out_size=avcodec_encode_video2(c和输出包),
图片和图片(已完成);
}而(!got_pkt_ptr);
}
returnvalue=fwrite(outpacket.data,1,outpacket.size,f);
av_免费_数据包(和输出包);
}
/*获取延迟帧*/
for(got_pkt_ptr=1;got_pkt_ptr;i++){
fflush(stdout);
avcodec_encode_video2(c、输出包、NULL和got_pkt_ptr);
如果(获得了许可证)
{
fwrite(outpacket.data,1,outpacket.size,f);
av_免费_数据包(和输出包);
}   
}
fwrite(endcode,1,sizeof(endcode),f);
fclose(f);
avcodec_close(c);
无AVU(c);
}
AVFrame*OpenImage(常量字符*图像文件名,int w,int h,int索引)
{
AVFrame*pFrame;
AVCodec*pCodec;
AVFormatContext*pFormatCtx=NULL;
AVCodecContext*pCodecCtx;
uint8_t*缓冲器;
int frameFinished,framesNumber=0,retval=-1,numBytes=0;
数据包;
char logdatadata[100];
int结果=-1;
结果=avformat\u open\u输入(&pFormatCtx,imageFileName,NULL,NULL);
如果(结果!=0)
{       
__安卓日志写入(安卓日志信息,“记录”,
“无法打开图像文件”);
返回NULL;
}
pCodecCtx=pFormatCtx->streams[0]->编解码器;
PCODECTX->宽度=w;
pCodecCtx->高度=h;
pCodecCtx->pix_fmt=pix_fmt_YUV420P;
//查找视频流的解码器
pCodec=avcodec\u find\u解码器(pCodecCtx->codec\u id);
如果(!pCodec)
{
__安卓日志写入(安卓日志信息,“记录”,
“无法打开图像文件”);
返回NULL;
}
pFrame=avcodec_alloc_frame();
numBytes=avpicture\u get\u size(PIX\u FMT\u YUV420P,pCodecCtx->宽度,pCodecCtx->高度);
缓冲区=(uint8_t*)av_malloc(numBytes*sizeof(uint8_t));
retval=avpicture\u fill((avpicture*)pFrame,buffer,PIX\u FMT\u YUV420P,pCodecCtx->width,pCodecCtx-
>高度);
//开放式编解码器
如果(avcodec_打开(PCODECTX,pCodec)=0)
{
if(packet.stream_index!=0)
继续;
int-ret=avcodec\u decode\u video2(pCodecCtx、pFrame、frameFinished和packet);
如果(ret>0)
{
__android_log_write(android_log_INFO,“记录”,“帧解码\n”);
pFrame->质量=4;
av_免费_数据包(&数据包);
av_关闭_输入_文件(pFormatCtx);
返回帧;
}
其他的
{
__安卓日志写入(安卓日志信息,“记录”,
changeExeMode(SYSTEM_BIN_CHMOD +" 777 "+ APP_DATA_PATH+FFMPEG_EXE);// this is a function defined ahead

    String[] ffmpegCommand = new String[] {APP_DATA_PATH+FFMPEG_EXE,
                        "-f","image2",

                        "-r",""+frameVisibleTime,
                        "-i",Environment.getExternalStorageDirectory().toString() + Constants.logoImageDir + "/"+"%d.jpg",path};
                Process exeffmpeg = Runtime.getRuntime().exec(ffmpegCommand);


void changeExeMode(String exeName) throws IOException, InterruptedException{
        Process chm =Runtime.getRuntime().exec(exeName);
        chm.waitFor();
        int exitCode = chm.exitValue();
        if(exitCode == 0) {
        }
        else {          
            printFailureReason(chm);
        }
    }