Warning: file_get_contents(/data/phpspider/zhask/data//catemap/3/android/188.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181

Warning: file_get_contents(/data/phpspider/zhask/data//catemap/3/wix/2.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Android 通过Mediacodec解码视频并再次编码会得到一个损坏的文件_Android_Video_Opengl Es_Android Mediacodec_Mediaextractor - Fatal编程技术网

Android 通过Mediacodec解码视频并再次编码会得到一个损坏的文件

Android 通过Mediacodec解码视频并再次编码会得到一个损坏的文件,android,video,opengl-es,android-mediacodec,mediaextractor,Android,Video,Opengl Es,Android Mediacodec,Mediaextractor,我正在努力实施 但是通过使用视频文件mp4修改源代码。mime tipe是视频/avc,比特率288kbps,iframeinterval 100,宽度:176,高度:144。文件大小为6MB。 当我解码视频并将帧放入outputsurface时,我可以将帧保存为位图,并可以看到非常棒的帧。但最后,在编码后(使用与原始视频相同的参数),我得到一个700kb的文件,我看不到视频(可能是一个损坏的文件) extractor=新媒体提取器(); extractor.SetDataSource(文件路径

我正在努力实施 但是通过使用视频文件mp4修改源代码。mime tipe是视频/avc,比特率288kbps,iframeinterval 100,宽度:176,高度:144。文件大小为6MB。 当我解码视频并将帧放入outputsurface时,我可以将帧保存为位图,并可以看到非常棒的帧。但最后,在编码后(使用与原始视频相同的参数),我得到一个700kb的文件,我看不到视频(可能是一个损坏的文件)

extractor=新媒体提取器();
extractor.SetDataSource(文件路径);
对于(int i=0;i
以及解码编码部分:

            while (!outputDone)
            {
                if (VERBOSE) Log.Debug(TAG, "edit loop");
                // Feed more data to the decoder.
                if (!inputDone)
                {

                    int inputBufIndex = decoder.DequeueInputBuffer(TIMEOUT_USEC);
                    if (inputBufIndex >= 0)
                    {
                        ByteBuffer buffer = decoderInputBuffers[inputBufIndex];
                        int sampleSize = extractor.ReadSampleData(buffer, 0);
                        if (sampleSize < 0)
                        {
                            inputChunk++;
                            // End of stream -- send empty frame with EOS flag set.
                            decoder.QueueInputBuffer(inputBufIndex, 0, 0, 0L,
                                    MediaCodecBufferFlags.EndOfStream);
                            inputDone = true;
                            if (VERBOSE) Log.Debug(TAG, "sent input EOS (with zero-length frame)");
                        }
                        else {
                            // Copy a chunk of input to the decoder.  The first chunk should have
                            // the BUFFER_FLAG_CODEC_CONFIG flag set.
                            buffer.Clear();
                            decoder.QueueInputBuffer(inputBufIndex, 0, sampleSize, extractor.SampleTime, 0);
                            extractor.Advance();

                            inputChunk++;
                        }
                    }
                    else {
                        if (VERBOSE) Log.Debug(TAG, "input buffer not available");
                    }
                }
                // Assume output is available.  Loop until both assumptions are false.
                bool decoderOutputAvailable = !decoderDone;
                bool encoderOutputAvailable = true;
                while (decoderOutputAvailable || encoderOutputAvailable)
                {
                    // Start by draining any pending output from the encoder.  It's important to
                    // do this before we try to stuff any more data in.
                    int encoderStatus = encoder.DequeueOutputBuffer(info, TIMEOUT_USEC);
                    if (encoderStatus == (int)MediaCodecInfoState.TryAgainLater)
                    {
                        // no output available yet
                        if (VERBOSE) Log.Debug(TAG, "no output from encoder available");
                        encoderOutputAvailable = false;
                    }
                    else if (encoderStatus == (int)MediaCodecInfoState.OutputBuffersChanged)
                    {
                        encoderOutputBuffers = encoder.GetOutputBuffers();
                        if (VERBOSE) Log.Debug(TAG, "encoder output buffers changed");
                    }
                    else if (encoderStatus == (int)MediaCodecInfoState.OutputFormatChanged)
                    {
                        MediaFormat newFormat = encoder.OutputFormat;
                        if (VERBOSE) Log.Debug(TAG, "encoder output format changed: " + newFormat);
                    }
                    else if (encoderStatus < 0)
                    {
                        Log.Error(TAG, "unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus);
                    }
                    else { // encoderStatus >= 0
                        ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
                        if (encodedData == null)
                        {
                            Log.Error(TAG,"encoderOutputBuffer " + encoderStatus + " was null");
                        }
                        // Write the data to the output "file".
                        if (info.Size != 0)
                        {
                            encodedData.Position(info.Offset);
                            encodedData.Limit(info.Offset + info.Size);
                            byte[] data = new byte[encodedData.Remaining()];
                            encodedData.Get(data);
                            fStream.Write(data, 0, data.Length);
                           // outputData.addChunk(encodedData, (int)info.Flags, info.PresentationTimeUs);
                            outputCount++;
                            if (VERBOSE) Log.Debug(TAG, "encoder output " + info.Size + " bytes");
                        }
                        outputDone = (info.Flags & MediaCodecBufferFlags.EndOfStream) != 0;
                        encoder.ReleaseOutputBuffer(encoderStatus, false);
                    }
                    if (encoderStatus != (int)MediaCodecInfoState.TryAgainLater)
                    {
                        // Continue attempts to drain output.
                        continue;
                    }
                    // Encoder is drained, check to see if we've got a new frame of output from
                    // the decoder.  (The output is going to a Surface, rather than a ByteBuffer,
                    // but we still get information through BufferInfo.)
                    if (!decoderDone)
                    {
                        int decoderStatus = decoder.DequeueOutputBuffer(info, TIMEOUT_USEC);
                        if (decoderStatus == (int)MediaCodecInfoState.TryAgainLater)
                        {
                            // no output available yet
                            if (VERBOSE) Log.Debug(TAG, "no output from decoder available");
                            decoderOutputAvailable = false;
                        }
                        else if (decoderStatus == (int)MediaCodecInfoState.OutputBuffersChanged)
                        {
                            //decoderOutputBuffers = decoder.GetOutputBuffers();
                            if (VERBOSE) Log.Debug(TAG, "decoder output buffers changed (we don't care)");
                        }
                        else if (decoderStatus == (int)MediaCodecInfoState.OutputFormatChanged)
                        {
                            // expected before first buffer of data
                            MediaFormat newFormat = decoder.OutputFormat;
                            if (VERBOSE) Log.Debug(TAG, "decoder output format changed: " + newFormat);
                        }
                        else if (decoderStatus < 0)
                        {
                            Log.Error(TAG,"unexpected result from decoder.dequeueOutputBuffer: " + decoderStatus);
                        }
                        else { // decoderStatus >= 0
                            if (VERBOSE) Log.Debug(TAG, "surface decoder given buffer "
                                    + decoderStatus + " (size=" + info.Size + ")");
                            // The ByteBuffers are null references, but we still get a nonzero
                            // size for the decoded data.
                            bool doRender = (info.Size != 0);
                            // As soon as we call releaseOutputBuffer, the buffer will be forwarded
                            // to SurfaceTexture to convert to a texture.  The API doesn't
                            // guarantee that the texture will be available before the call
                            // returns, so we need to wait for the onFrameAvailable callback to
                            // fire.  If we don't wait, we risk rendering from the previous frame.
                            decoder.ReleaseOutputBuffer(decoderStatus, doRender);
                            if (doRender)
                            {
                                // This waits for the image and renders it after it arrives.
                                if (VERBOSE) Log.Debug(TAG, "awaiting frame");
                                outputSurface.awaitNewImage();
                                outputSurface.drawImage();
                                outputSurface.saveFrame(Android.OS.Environment.ExternalStorageDirectory + "/test.jpg", mWidth, mHeight);
                                // Send it to the encoder.
                                inputSurface.setPresentationTime(info.PresentationTimeUs * 1000);
                                if (VERBOSE) Log.Debug(TAG, "swapBuffers");
                                inputSurface.swapBuffers();
                            }
                            if ((info.Flags & MediaCodecBufferFlags.EndOfStream) != 0)
                            {
                                // forward decoder EOS to encoder
                                if (VERBOSE) Log.Debug(TAG, "signaling input EOS");
                                if (WORK_AROUND_BUGS)
                                {
                                    // Bail early, possibly dropping a frame.
                                    return;
                                }
                                else {
                                    encoder.SignalEndOfInputStream();
                                }
                            }
                        }
                    }
                }
            }
            if (inputChunk != outputCount)
            {
                throw new RuntimeException("frame lost: " + inputChunk + " in, " +
                        outputCount + " out");
            }
            fStream.Close();
while(!outputDone)
{
if(VERBOSE)Log.Debug(标记“编辑循环”);
//向解码器提供更多数据。
如果(!inputDone)
{
int inputBufIndex=解码器.DequeueInputBuffer(超时\u USEC);
如果(inputBufIndex>=0)
{
ByteBuffer buffer=decoderInputBuffers[inputBufIndex];
int sampleSize=extractor.ReadSampleData(缓冲区,0);
如果(样本大小<0)
{
inputChunk++;
//流结束——发送设置了EOS标志的空帧。
解码器。QueueInputBuffer(inputBufIndex,0,0,0L,
MediaCodecBufferFlags.EndOfStream);
输入完成=真;
if(VERBOSE)Log.Debug(标记“发送的输入EOS(具有零长度帧)”);
}
否则{
//将输入块复制到解码器。第一个块应具有
//缓冲区\u标志\u编解码器\u配置标志集。
buffer.Clear();
解码器.QueueInputBuffer(inputBufIndex,0,sampleSize,extractor.SampleTime,0);
提取器;
inputChunk++;
}
}
否则{
if(VERBOSE)Log.Debug(标记“输入缓冲区不可用”);
}
}
//假设输出可用。循环直到两个假设都为假。
bool decoderOutputAvailable=!decoderOne;
bool encoderOutputAvailable=true;
while(decoderOutputAvailable | | encoderOutputAvailable)
{
//首先,从编码器中排出所有挂起的输出
//在我们尝试填充更多数据之前,请执行此操作。
int encoderStatus=encoder.DequeueOutputBuffer(信息,超时\u USEC);
if(encoderStatus==(int)MediaCodecInfoState.TryAgainLater)
{
//还没有可用的输出
if(VERBOSE)Log.Debug(标记“编码器无输出可用”);
encoderOutputAvailable=false;
}
else if(encoderStatus==(int)MediaCodecInfoState.OutputBuffersChanged)
{
encoderOutputBuffers=encoder.GetOutputBuffers();
if(VERBOSE)Log.Debug(标记“编码器输出缓冲区已更改”);
}
else if(encoderStatus==(int)MediaCodecInfoState.OutputFormatChanged)
{
MediaFormat newFormat=encoder.OutputFormat;
如果(详细)Log.Debug(标记,“编码器输出格式已更改:”+n
            while (!outputDone)
            {
                if (VERBOSE) Log.Debug(TAG, "edit loop");
                // Feed more data to the decoder.
                if (!inputDone)
                {

                    int inputBufIndex = decoder.DequeueInputBuffer(TIMEOUT_USEC);
                    if (inputBufIndex >= 0)
                    {
                        ByteBuffer buffer = decoderInputBuffers[inputBufIndex];
                        int sampleSize = extractor.ReadSampleData(buffer, 0);
                        if (sampleSize < 0)
                        {
                            inputChunk++;
                            // End of stream -- send empty frame with EOS flag set.
                            decoder.QueueInputBuffer(inputBufIndex, 0, 0, 0L,
                                    MediaCodecBufferFlags.EndOfStream);
                            inputDone = true;
                            if (VERBOSE) Log.Debug(TAG, "sent input EOS (with zero-length frame)");
                        }
                        else {
                            // Copy a chunk of input to the decoder.  The first chunk should have
                            // the BUFFER_FLAG_CODEC_CONFIG flag set.
                            buffer.Clear();
                            decoder.QueueInputBuffer(inputBufIndex, 0, sampleSize, extractor.SampleTime, 0);
                            extractor.Advance();

                            inputChunk++;
                        }
                    }
                    else {
                        if (VERBOSE) Log.Debug(TAG, "input buffer not available");
                    }
                }
                // Assume output is available.  Loop until both assumptions are false.
                bool decoderOutputAvailable = !decoderDone;
                bool encoderOutputAvailable = true;
                while (decoderOutputAvailable || encoderOutputAvailable)
                {
                    // Start by draining any pending output from the encoder.  It's important to
                    // do this before we try to stuff any more data in.
                    int encoderStatus = encoder.DequeueOutputBuffer(info, TIMEOUT_USEC);
                    if (encoderStatus == (int)MediaCodecInfoState.TryAgainLater)
                    {
                        // no output available yet
                        if (VERBOSE) Log.Debug(TAG, "no output from encoder available");
                        encoderOutputAvailable = false;
                    }
                    else if (encoderStatus == (int)MediaCodecInfoState.OutputBuffersChanged)
                    {
                        encoderOutputBuffers = encoder.GetOutputBuffers();
                        if (VERBOSE) Log.Debug(TAG, "encoder output buffers changed");
                    }
                    else if (encoderStatus == (int)MediaCodecInfoState.OutputFormatChanged)
                    {
                        MediaFormat newFormat = encoder.OutputFormat;
                        if (VERBOSE) Log.Debug(TAG, "encoder output format changed: " + newFormat);
                    }
                    else if (encoderStatus < 0)
                    {
                        Log.Error(TAG, "unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus);
                    }
                    else { // encoderStatus >= 0
                        ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
                        if (encodedData == null)
                        {
                            Log.Error(TAG,"encoderOutputBuffer " + encoderStatus + " was null");
                        }
                        // Write the data to the output "file".
                        if (info.Size != 0)
                        {
                            encodedData.Position(info.Offset);
                            encodedData.Limit(info.Offset + info.Size);
                            byte[] data = new byte[encodedData.Remaining()];
                            encodedData.Get(data);
                            fStream.Write(data, 0, data.Length);
                           // outputData.addChunk(encodedData, (int)info.Flags, info.PresentationTimeUs);
                            outputCount++;
                            if (VERBOSE) Log.Debug(TAG, "encoder output " + info.Size + " bytes");
                        }
                        outputDone = (info.Flags & MediaCodecBufferFlags.EndOfStream) != 0;
                        encoder.ReleaseOutputBuffer(encoderStatus, false);
                    }
                    if (encoderStatus != (int)MediaCodecInfoState.TryAgainLater)
                    {
                        // Continue attempts to drain output.
                        continue;
                    }
                    // Encoder is drained, check to see if we've got a new frame of output from
                    // the decoder.  (The output is going to a Surface, rather than a ByteBuffer,
                    // but we still get information through BufferInfo.)
                    if (!decoderDone)
                    {
                        int decoderStatus = decoder.DequeueOutputBuffer(info, TIMEOUT_USEC);
                        if (decoderStatus == (int)MediaCodecInfoState.TryAgainLater)
                        {
                            // no output available yet
                            if (VERBOSE) Log.Debug(TAG, "no output from decoder available");
                            decoderOutputAvailable = false;
                        }
                        else if (decoderStatus == (int)MediaCodecInfoState.OutputBuffersChanged)
                        {
                            //decoderOutputBuffers = decoder.GetOutputBuffers();
                            if (VERBOSE) Log.Debug(TAG, "decoder output buffers changed (we don't care)");
                        }
                        else if (decoderStatus == (int)MediaCodecInfoState.OutputFormatChanged)
                        {
                            // expected before first buffer of data
                            MediaFormat newFormat = decoder.OutputFormat;
                            if (VERBOSE) Log.Debug(TAG, "decoder output format changed: " + newFormat);
                        }
                        else if (decoderStatus < 0)
                        {
                            Log.Error(TAG,"unexpected result from decoder.dequeueOutputBuffer: " + decoderStatus);
                        }
                        else { // decoderStatus >= 0
                            if (VERBOSE) Log.Debug(TAG, "surface decoder given buffer "
                                    + decoderStatus + " (size=" + info.Size + ")");
                            // The ByteBuffers are null references, but we still get a nonzero
                            // size for the decoded data.
                            bool doRender = (info.Size != 0);
                            // As soon as we call releaseOutputBuffer, the buffer will be forwarded
                            // to SurfaceTexture to convert to a texture.  The API doesn't
                            // guarantee that the texture will be available before the call
                            // returns, so we need to wait for the onFrameAvailable callback to
                            // fire.  If we don't wait, we risk rendering from the previous frame.
                            decoder.ReleaseOutputBuffer(decoderStatus, doRender);
                            if (doRender)
                            {
                                // This waits for the image and renders it after it arrives.
                                if (VERBOSE) Log.Debug(TAG, "awaiting frame");
                                outputSurface.awaitNewImage();
                                outputSurface.drawImage();
                                outputSurface.saveFrame(Android.OS.Environment.ExternalStorageDirectory + "/test.jpg", mWidth, mHeight);
                                // Send it to the encoder.
                                inputSurface.setPresentationTime(info.PresentationTimeUs * 1000);
                                if (VERBOSE) Log.Debug(TAG, "swapBuffers");
                                inputSurface.swapBuffers();
                            }
                            if ((info.Flags & MediaCodecBufferFlags.EndOfStream) != 0)
                            {
                                // forward decoder EOS to encoder
                                if (VERBOSE) Log.Debug(TAG, "signaling input EOS");
                                if (WORK_AROUND_BUGS)
                                {
                                    // Bail early, possibly dropping a frame.
                                    return;
                                }
                                else {
                                    encoder.SignalEndOfInputStream();
                                }
                            }
                        }
                    }
                }
            }
            if (inputChunk != outputCount)
            {
                throw new RuntimeException("frame lost: " + inputChunk + " in, " +
                        outputCount + " out");
            }
            fStream.Close();