Warning: file_get_contents(/data/phpspider/zhask/data//catemap/9/solr/3.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181

Warning: file_get_contents(/data/phpspider/zhask/data//catemap/1/typescript/9.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Android MediaCodec:ExtractDecodeEditEncodeMuxTest行后退出:输出表面:等待新图像_Android_Android Mediacodec - Fatal编程技术网

Android MediaCodec:ExtractDecodeEditEncodeMuxTest行后退出:输出表面:等待新图像

Android MediaCodec:ExtractDecodeEditEncodeMuxTest行后退出:输出表面:等待新图像,android,android-mediacodec,Android,Android Mediacodec,我想从中运行ExtractEditEncodeMuxtest 我对代码进行了编辑,以便它可以从sdcard获取输入并输出到sdcard,从而简化代码。但行outputSurface.awaitNewImage()之后的main while循环中断;。解码和编码停止 private String mInputFile = Environment.getExternalStorageDirectory().getAbsolutePath()+"/dingdong.mp4"; private Str

我想从中运行ExtractEditEncodeMuxtest

我对代码进行了编辑,以便它可以从sdcard获取输入并输出到sdcard,从而简化代码。但行outputSurface.awaitNewImage()之后的main while循环中断;。解码和编码停止

private  String mInputFile = Environment.getExternalStorageDirectory().getAbsolutePath()+"/dingdong.mp4";
private String mOutputFile =  Environment.getExternalStorageDirectory().getAbsolutePath()+"/compressed_output.mp4";
private String mOutputVideoMimeType;


@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    try {
        extractDecodeEditEncodeMux();
    } catch (Exception e) {
        Log.e(TAG,e.getMessage(),e);
    }
}


private void extractDecodeEditEncodeMux() throws Exception {
    // Exception that may be thrown during release.
    Exception exception = null;
    MediaCodecInfo videoCodecInfo = selectCodec(OUTPUT_VIDEO_MIME_TYPE);
    if (videoCodecInfo == null) {
        // Don't fail CTS if they don't have an AVC codec (not here, anyway).
        Log.e(TAG, "Unable to find an appropriate codec for " + OUTPUT_VIDEO_MIME_TYPE);
        return;
    }
    if (VERBOSE) Log.d(TAG, "video found codec: " + videoCodecInfo.getName());
    MediaCodecInfo audioCodecInfo = selectCodec(OUTPUT_AUDIO_MIME_TYPE);
    if (audioCodecInfo == null) {
        // Don't fail CTS if they don't have an AAC codec (not here, anyway).
        Log.e(TAG, "Unable to find an appropriate codec for " + OUTPUT_AUDIO_MIME_TYPE);
        return;
    }
    if (VERBOSE) Log.d(TAG, "audio found codec: " + audioCodecInfo.getName());
    MediaExtractor videoExtractor = null;
    MediaExtractor audioExtractor = null;
    OutputSurface outputSurface = null;
    MediaCodec videoDecoder = null;
    MediaCodec audioDecoder = null;
    MediaCodec videoEncoder = null;
    MediaCodec audioEncoder = null;
    MediaMuxer muxer = null;
    InputSurface inputSurface = null;
    try {
        if (mCopyVideo) {
            videoExtractor = createExtractor();
            int videoInputTrack = getAndSelectVideoTrackIndex(videoExtractor);
            assertTrue("missing video track in test video", videoInputTrack != -1);
            MediaFormat inputFormat = videoExtractor.getTrackFormat(videoInputTrack);
            // We avoid the device-specific limitations on width and height by using values
            // that are multiples of 16, which all tested devices seem to be able to handle.
            MediaFormat outputVideoFormat =
                    MediaFormat.createVideoFormat(OUTPUT_VIDEO_MIME_TYPE, mOutputWidth, mOutputHeight);
            // Set some properties. Failing to specify some of these can cause the MediaCodec
            // configure() call to throw an unhelpful exception.
            outputVideoFormat.setInteger(
                    MediaFormat.KEY_COLOR_FORMAT, OUTPUT_VIDEO_COLOR_FORMAT);
            outputVideoFormat.setInteger(MediaFormat.KEY_BIT_RATE, OUTPUT_VIDEO_BIT_RATE);
            outputVideoFormat.setInteger(MediaFormat.KEY_FRAME_RATE, OUTPUT_VIDEO_FRAME_RATE);
            outputVideoFormat.setInteger(
                    MediaFormat.KEY_I_FRAME_INTERVAL, OUTPUT_VIDEO_IFRAME_INTERVAL);
            if (VERBOSE) Log.d(TAG, "video format: " + outputVideoFormat);
            // Create a MediaCodec for the desired codec, then configure it as an encoder with
            // our desired properties. Request a Surface to use for input.
            AtomicReference<Surface> inputSurfaceReference = new AtomicReference<Surface>();
            videoEncoder = createVideoEncoder(
                    videoCodecInfo, outputVideoFormat, inputSurfaceReference);
            inputSurface = new InputSurface(inputSurfaceReference.get());
            inputSurface.makeCurrent();
            // Create a MediaCodec for the decoder, based on the extractor's format.
            outputSurface = new OutputSurface();
            outputSurface.changeFragmentShader(FRAGMENT_SHADER);
            videoDecoder = createVideoDecoder(inputFormat, outputSurface.getSurface());
        }
        if (mCopyAudio) {
            audioExtractor = createExtractor();
            int audioInputTrack = getAndSelectAudioTrackIndex(audioExtractor);
            assertTrue("missing audio track in test video", audioInputTrack != -1);
            MediaFormat inputFormat = audioExtractor.getTrackFormat(audioInputTrack);
            MediaFormat outputAudioFormat = MediaFormat.createAudioFormat(OUTPUT_AUDIO_MIME_TYPE, OUTPUT_AUDIO_SAMPLE_RATE_HZ, OUTPUT_AUDIO_CHANNEL_COUNT);
            outputAudioFormat.setInteger(MediaFormat.KEY_BIT_RATE, OUTPUT_AUDIO_BIT_RATE);
            outputAudioFormat.setInteger(MediaFormat.KEY_AAC_PROFILE, OUTPUT_AUDIO_AAC_PROFILE);
            // Create a MediaCodec for the desired codec, then configure it as an encoder with
            // our desired properties. Request a Surface to use for input.
            audioEncoder = createAudioEncoder(audioCodecInfo, outputAudioFormat);
            // Create a MediaCodec for the decoder, based on the extractor's format.
            audioDecoder = createAudioDecoder(inputFormat);
        }
        // Creates a muxer but do not start or add tracks just yet.
        muxer = createMuxer();
        doExtractDecodeEditEncodeMux(videoExtractor, audioExtractor, videoDecoder, videoEncoder, audioDecoder, audioEncoder, muxer, inputSurface, outputSurface);
    } finally {
        if (VERBOSE) Log.d(TAG, "releasing extractor, decoder, encoder, and muxer");
        // Try to release everything we acquired, even if one of the releases fails, in which
        // case we save the first exception we got and re-throw at the end (unless something
        // other exception has already been thrown). This guarantees the first exception thrown
        // is reported as the cause of the error, everything is (attempted) to be released, and
        // all other exceptions appear in the logs.
        try {
            if (videoExtractor != null) {
                videoExtractor.release();
            }
        } catch(Exception e) {
            Log.e(TAG, "error while releasing videoExtractor", e);
            if (exception == null) {
                exception = e;
            }
        }
        try {
            if (audioExtractor != null) {
                audioExtractor.release();
            }
        } catch(Exception e) {
            Log.e(TAG, "error while releasing audioExtractor", e);
            if (exception == null) {
                exception = e;
            }
        }
        try {
            if (videoDecoder != null) {
                videoDecoder.stop();
                videoDecoder.release();
            }
        } catch(Exception e) {
            Log.e(TAG, "error while releasing videoDecoder", e);
            if (exception == null) {
                exception = e;
            }
        }
        try {
            if (outputSurface != null) {
                outputSurface.release();
            }
        } catch(Exception e) {
            Log.e(TAG, "error while releasing outputSurface", e);
            if (exception == null) {
                exception = e;
            }
        }
        try {
            if (videoEncoder != null) {
                videoEncoder.stop();
                videoEncoder.release();
            }
        } catch(Exception e) {
            Log.e(TAG, "error while releasing videoEncoder", e);
            if (exception == null) {
                exception = e;
            }
        }
        try {
            if (audioDecoder != null) {
                audioDecoder.stop();
                audioDecoder.release();
            }
        } catch(Exception e) {
            Log.e(TAG, "error while releasing audioDecoder", e);
            if (exception == null) {
                exception = e;
            }
        }
        try {
            if (audioEncoder != null) {
                audioEncoder.stop();
                audioEncoder.release();
            }
        } catch(Exception e) {
            Log.e(TAG, "error while releasing audioEncoder", e);
            if (exception == null) {
                exception = e;
            }
        }
        try {
            if (muxer != null) {
                muxer.stop();
                muxer.release();
            }
        } catch(Exception e) {
            Log.e(TAG, "error while releasing muxer", e);
            if (exception == null) {
                exception = e;
            }
        }
        try {
            if (inputSurface != null) {
                inputSurface.release();
            }
        } catch(Exception e) {
            Log.e(TAG, "error while releasing inputSurface", e);
            if (exception == null) {
                exception = e;
            }
        }
    }
    if (exception != null) {
        throw exception;
    }
}
/**
 * Creates an extractor that reads its frames from {@link #mSourceResId}.
 */
private MediaExtractor createExtractor() throws IOException {
    MediaExtractor extractor = new MediaExtractor();
    extractor.setDataSource(mInputFile);
    return extractor;
}
/**
 * Creates a decoder for the given format, which outputs to the given surface.
 *
 * @param inputFormat the format of the stream to decode
 * @param surface into which to decode the frames
 */
private MediaCodec createVideoDecoder(MediaFormat inputFormat, Surface surface) throws IOException {
    MediaCodec decoder = MediaCodec.createDecoderByType(getMimeTypeFor(inputFormat));
    decoder.configure(inputFormat, surface, null, 0);
    decoder.start();
    return decoder;
}

private MediaCodec createVideoEncoder(
        MediaCodecInfo codecInfo,
        MediaFormat format,
        AtomicReference<Surface> surfaceReference) throws IOException {
    MediaCodec encoder = MediaCodec.createByCodecName(codecInfo.getName());
    encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
    // Must be called before start() is.
    surfaceReference.set(encoder.createInputSurface());
    encoder.start();
    return encoder;
}

private MediaCodec createAudioDecoder(MediaFormat inputFormat) throws IOException {
    MediaCodec decoder = MediaCodec.createDecoderByType(getMimeTypeFor(inputFormat));
    decoder.configure(inputFormat, null, null, 0);
    decoder.start();
    return decoder;
}

private MediaCodec createAudioEncoder(MediaCodecInfo codecInfo, MediaFormat format) throws IOException {
    MediaCodec encoder = MediaCodec.createByCodecName(codecInfo.getName());
    encoder.configure(format, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
    encoder.start();
    return encoder;
}

private MediaMuxer createMuxer() throws IOException {
    return new MediaMuxer(mOutputFile, MediaMuxer.OutputFormat.MUXER_OUTPUT_MPEG_4);
}
private int getAndSelectVideoTrackIndex(MediaExtractor extractor) {
    for (int index = 0; index < extractor.getTrackCount(); ++index) {
        if (VERBOSE) {
            Log.d(TAG, "format for track " + index + " is "
                    + getMimeTypeFor(extractor.getTrackFormat(index)));
        }
        if (isVideoFormat(extractor.getTrackFormat(index))) {
            extractor.selectTrack(index);
            MediaFormat inputFormat = extractor.getTrackFormat(index);
            mOutputWidth = inputFormat.getInteger(MediaFormat.KEY_WIDTH);
            mOutputHeight = inputFormat.getInteger(MediaFormat.KEY_HEIGHT);
            return index;
        }
    }
    return -1;
}
private int getAndSelectAudioTrackIndex(MediaExtractor extractor) {
    for (int index = 0; index < extractor.getTrackCount(); ++index) {
        if (VERBOSE) {
            Log.d(TAG, "format for track " + index + " is "
                    + getMimeTypeFor(extractor.getTrackFormat(index)));
        }
        if (isAudioFormat(extractor.getTrackFormat(index))) {
            extractor.selectTrack(index);
            return index;
        }
    }
    return -1;
}

private void doExtractDecodeEditEncodeMux(MediaExtractor videoExtractor, MediaExtractor audioExtractor, MediaCodec videoDecoder, MediaCodec videoEncoder, MediaCodec audioDecoder, MediaCodec audioEncoder, MediaMuxer muxer, InputSurface inputSurface, OutputSurface outputSurface) {
    ByteBuffer[] videoDecoderInputBuffers = null;
    ByteBuffer[] videoDecoderOutputBuffers = null;
    ByteBuffer[] videoEncoderOutputBuffers = null;
    MediaCodec.BufferInfo videoDecoderOutputBufferInfo = null;
    MediaCodec.BufferInfo videoEncoderOutputBufferInfo = null;
    if (mCopyVideo) {
        videoDecoderInputBuffers = videoDecoder.getInputBuffers();
        videoDecoderOutputBuffers = videoDecoder.getOutputBuffers();
        videoEncoderOutputBuffers = videoEncoder.getOutputBuffers();
        videoDecoderOutputBufferInfo = new MediaCodec.BufferInfo();
        videoEncoderOutputBufferInfo = new MediaCodec.BufferInfo();
    }
    ByteBuffer[] audioDecoderInputBuffers = null;
    ByteBuffer[] audioDecoderOutputBuffers = null;
    ByteBuffer[] audioEncoderInputBuffers = null;
    ByteBuffer[] audioEncoderOutputBuffers = null;
    MediaCodec.BufferInfo audioDecoderOutputBufferInfo = null;
    MediaCodec.BufferInfo audioEncoderOutputBufferInfo = null;
    if (mCopyAudio) {
        audioDecoderInputBuffers = audioDecoder.getInputBuffers();
        audioDecoderOutputBuffers =  audioDecoder.getOutputBuffers();
        audioEncoderInputBuffers = audioEncoder.getInputBuffers();
        audioEncoderOutputBuffers = audioEncoder.getOutputBuffers();
        audioDecoderOutputBufferInfo = new MediaCodec.BufferInfo();
        audioEncoderOutputBufferInfo = new MediaCodec.BufferInfo();
    }
    // We will get these from the decoders when notified of a format change.
    MediaFormat decoderOutputVideoFormat = null;
    MediaFormat decoderOutputAudioFormat = null;
    // We will get these from the encoders when notified of a format change.
    MediaFormat encoderOutputVideoFormat = null;
    MediaFormat encoderOutputAudioFormat = null;
    // We will determine these once we have the output format.
    int outputVideoTrack = -1;
    int outputAudioTrack = -1;
    // Whether things are done on the video side.
    boolean videoExtractorDone = false;
    boolean videoDecoderDone = false;
    boolean videoEncoderDone = false;
    // Whether things are done on the audio side.
    boolean audioExtractorDone = false;
    boolean audioDecoderDone = false;
    boolean audioEncoderDone = false;
    // The audio decoder output buffer to process, -1 if none.
    int pendingAudioDecoderOutputBufferIndex = -1;
    boolean muxing = false;
    int videoExtractedFrameCount = 0;
    int videoDecodedFrameCount = 0;
    int videoEncodedFrameCount = 0;
    int audioExtractedFrameCount = 0;
    int audioDecodedFrameCount = 0;
    int audioEncodedFrameCount = 0;
    while ((mCopyVideo && !videoEncoderDone) || (mCopyAudio && !audioEncoderDone)) {



        //1: Extract video from file and feed to decoder.
        // Do not extract video if we have determined the output format but we are not yet
        // ready to mux the frames.
        while (mCopyVideo && !videoExtractorDone && (encoderOutputVideoFormat == null || muxing)) {

            int decoderInputBufferIndex = videoDecoder.dequeueInputBuffer(TIMEOUT_USEC);

            if (decoderInputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
                if (VERBOSE) Log.d(TAG, "no video decoder input buffer");
                break;
            }
            if (VERBOSE)Log.d(TAG, "video decoder: returned input buffer: " + decoderInputBufferIndex);

            ByteBuffer decoderInputBuffer = videoDecoderInputBuffers[decoderInputBufferIndex];

            int size = videoExtractor.readSampleData(decoderInputBuffer, 0);
            long presentationTime = videoExtractor.getSampleTime();
            if (VERBOSE)Log.d(TAG, "video extractor: returned buffer of size " + size +" for time "+presentationTime);


            if (size >= 0) videoDecoder.queueInputBuffer(decoderInputBufferIndex, 0, size, presentationTime,videoExtractor.getSampleFlags());

            videoExtractorDone = !videoExtractor.advance();
            if (videoExtractorDone) {
                if (VERBOSE) Log.d(TAG, "video extractor: EOS");
                videoDecoder.queueInputBuffer(decoderInputBufferIndex, 0, 0, 0, MediaCodec.BUFFER_FLAG_END_OF_STREAM);
            }
            videoExtractedFrameCount++;
            // We extracted a frame, let's try something else next.
            break;
        }

        //3: Poll output frames from the video decoder and feed the encoder.
        while (mCopyVideo && !videoDecoderDone && (encoderOutputVideoFormat == null || muxing)) {

            int decoderOutputBufferIndex = videoDecoder.dequeueOutputBuffer(videoDecoderOutputBufferInfo, TIMEOUT_USEC);

            if (decoderOutputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
                if (VERBOSE) Log.d(TAG, "no video decoder output buffer");
                break;
            }
            if (decoderOutputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
                if (VERBOSE) Log.d(TAG, "video decoder: output buffers changed");
                videoDecoderOutputBuffers = videoDecoder.getOutputBuffers();
                break;
            }
            if (decoderOutputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                decoderOutputVideoFormat = videoDecoder.getOutputFormat();
                if (VERBOSE) Log.d(TAG, "video decoder: output format changed: " + decoderOutputVideoFormat);
                break;
            }
            if (VERBOSE) {
                Log.d(TAG, "video decoder: returned output buffer: " + decoderOutputBufferIndex);
                Log.d(TAG, "video decoder: returned buffer of size " + videoDecoderOutputBufferInfo.size);
            }
            ByteBuffer decoderOutputBuffer = videoDecoderOutputBuffers[decoderOutputBufferIndex];
            if ((videoDecoderOutputBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG)!= 0) {
                if (VERBOSE) Log.d(TAG, "video decoder: codec config buffer");
                videoDecoder.releaseOutputBuffer(decoderOutputBufferIndex, false);
                break;
            }
            if (VERBOSE)Log.d(TAG, "video decoder: returned buffer for time " + videoDecoderOutputBufferInfo.presentationTimeUs);

            boolean render = videoDecoderOutputBufferInfo.size != 0;
            videoDecoder.releaseOutputBuffer(decoderOutputBufferIndex, render);
            if (render) {
                if (VERBOSE) Log.d(TAG, "output surface: await new image");
                outputSurface.awaitNewImage();
                // Edit the frame and send it to the encoder.
                if (VERBOSE) Log.d(TAG, "output surface: draw image");
                outputSurface.drawImage();
                inputSurface.setPresentationTime(videoDecoderOutputBufferInfo.presentationTimeUs * 1000);
                if (VERBOSE) Log.d(TAG, "input surface: swap buffers");
                inputSurface.swapBuffers();
                if (VERBOSE) Log.d(TAG, "video encoder: notified of new frame");
            }
            if ((videoDecoderOutputBufferInfo.flags
                    & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                if (VERBOSE) Log.d(TAG, "video decoder: EOS");
                videoDecoderDone = true;
                videoEncoder.signalEndOfInputStream();
            }
            videoDecodedFrameCount++;
            // We extracted a pending frame, let's try something else next.
            break;
        }



        //6: Poll frames from the video encoder and send them to the muxer.
        while (mCopyVideo && !videoEncoderDone && (encoderOutputVideoFormat == null || muxing)) {
            int encoderOutputBufferIndex = videoEncoder.dequeueOutputBuffer(videoEncoderOutputBufferInfo, TIMEOUT_USEC);
            if (encoderOutputBufferIndex == MediaCodec.INFO_TRY_AGAIN_LATER) {
                if (VERBOSE) Log.d(TAG, "no video encoder output buffer");
                break;
            }
            if (encoderOutputBufferIndex == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
                if (VERBOSE) Log.d(TAG, "video encoder: output buffers changed");
                videoEncoderOutputBuffers = videoEncoder.getOutputBuffers();
                break;
            }
            if (encoderOutputBufferIndex == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                if (VERBOSE) Log.d(TAG, "video encoder: output format changed");
                if (outputVideoTrack >= 0) {
                    fail("video encoder changed its output format again?");
                }
                encoderOutputVideoFormat = videoEncoder.getOutputFormat();
                break;
            }
            assertTrue("should have added track before processing output", muxing);
            if (VERBOSE) {
                Log.d(TAG, "video encoder: returned output buffer: " + encoderOutputBufferIndex);
                Log.d(TAG, "video encoder: returned buffer of size " + videoEncoderOutputBufferInfo.size);
            }
            ByteBuffer encoderOutputBuffer = videoEncoderOutputBuffers[encoderOutputBufferIndex];
            if ((videoEncoderOutputBufferInfo.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
                if (VERBOSE) Log.d(TAG, "video encoder: codec config buffer");
                // Simply ignore codec config buffers.
                videoEncoder.releaseOutputBuffer(encoderOutputBufferIndex, false);
                break;
            }
            if (VERBOSE) Log.d(TAG, "video encoder: returned buffer for time " + videoEncoderOutputBufferInfo.presentationTimeUs);

            if (videoEncoderOutputBufferInfo.size != 0) {muxer.writeSampleData(outputVideoTrack, encoderOutputBuffer, videoEncoderOutputBufferInfo);
            }
            if ((videoEncoderOutputBufferInfo.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM)
                    != 0) {
                if (VERBOSE) Log.d(TAG, "video encoder: EOS");
                videoEncoderDone = true;
            }
            videoEncoder.releaseOutputBuffer(encoderOutputBufferIndex, false);
            videoEncodedFrameCount++;
            // We enqueued an encoded frame, let's try something else next.
            break;
        }

        if (!muxing
                && (!mCopyAudio || encoderOutputAudioFormat != null)
                && (!mCopyVideo || encoderOutputVideoFormat != null)) {
            if (mCopyVideo) {
                Log.d(TAG, "muxer: adding video track.");
                outputVideoTrack = muxer.addTrack(encoderOutputVideoFormat);
            }
            if (mCopyAudio) {
                Log.d(TAG, "muxer: adding audio track.");
                outputAudioTrack = muxer.addTrack(encoderOutputAudioFormat);
            }
            Log.d(TAG, "muxer: starting");
            muxer.start();
            muxing = true;
        }
    }
    // Basic sanity checks.
    if (mCopyVideo) {
        assertEquals("encoded and decoded video frame counts should match",
                videoDecodedFrameCount, videoEncodedFrameCount);
        assertTrue("decoded frame count should be less than extracted frame count",
                videoDecodedFrameCount <= videoExtractedFrameCount);
    }
    if (mCopyAudio) {
        assertEquals("no frame should be pending", -1, pendingAudioDecoderOutputBufferIndex);
    }
    // TODO: Check the generated output file.
}
private static boolean isVideoFormat(MediaFormat format) {
    return getMimeTypeFor(format).startsWith("video/");
}
private static boolean isAudioFormat(MediaFormat format) {
    return getMimeTypeFor(format).startsWith("audio/");
}
private static String getMimeTypeFor(MediaFormat format) {
    return format.getString(MediaFormat.KEY_MIME);
}
/**
 * Returns the first codec capable of encoding the specified MIME type, or null if no match was
 * found.
 */
private static MediaCodecInfo selectCodec(String mimeType) {
    int numCodecs = MediaCodecList.getCodecCount();
    for (int i = 0; i < numCodecs; i++) {
        MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);
        if (!codecInfo.isEncoder()) {
            continue;
        }
        String[] types = codecInfo.getSupportedTypes();
        for (int j = 0; j < types.length; j++) {
            if (types[j].equalsIgnoreCase(mimeType)) {
                return codecInfo;
            }
        }
    }
    return null;
}
private String mInputFile=Environment.getExternalStorageDirectory().getAbsolutePath()+“/dingdong.mp4”;
私有字符串mOutputFile=Environment.getExternalStorageDirectory().getAbsolutePath()+“/compressed_output.mp4”;
私有字符串mOutputVideoMimeType;
@凌驾
创建时受保护的void(Bundle savedInstanceState){
super.onCreate(savedInstanceState);
试一试{
ExtractDecodeCodeMux();
}捕获(例外e){
Log.e(标记,e.getMessage(),e);
}
}
私有void extractDecodeEditEncodeMux()引发异常{
//发布期间可能引发的异常。
异常=空;
MediaCodecInfo videoCodecInfo=选择编解码器(输出\视频\ MIME \类型);
if(videoCodecInfo==null){
//如果CTS没有AVC编解码器,不要让CTS失败(无论如何,这里没有)。
Log.e(标记“找不到“+输出\视频\ MIME \类型”的适当编解码器);
返回;
}
if(VERBOSE)Log.d(标记“video found codec:”+VideoCodeInfo.getName());
MediaCodecInfo audioCodecInfo=选择编解码器(输出\音频\ MIME \类型);
if(audiocodeinfo==null){
//如果CTS没有AAC编解码器,就不要让CTS失败(无论如何,这里没有)。
Log.e(标记“找不到“+输出\音频\ MIME \类型”的适当编解码器);
返回;
}
if(VERBOSE)Log.d(标记“audio found codec:”+audioCodecInfo.getName());
MediaExtractor-videoExtractor=null;
MediaExtractor audioExtractor=null;
OutputSurface OutputSurface=null;
MediaCodec视频解码器=空;
MediaCodec音频解码器=空;
MediaCodec视频编码器=空;
MediaCodec音频编码器=空;
MediaMuxer-muxer=null;
InputSurface InputSurface=null;
试一试{
if(mCopyVideo){
videoExtractor=createExtractor();
int videoInputTrack=Get并选择VideoTrackIndex(videoExtractor);
assertTrue(“测试视频中缺少视频曲目”,VideoInputRack!=-1);
MediaFormat inputFormat=videoExtractor.getTrackFormat(VideoInputRack);
//我们通过使用值来避免设备特定的宽度和高度限制
//这是16的倍数,所有测试设备似乎都能处理。
MediaFormat输出视频格式=
createVideoFormat(输出\视频\ MIME \类型、输出宽度、输出宽度);
//设置一些属性。如果未能指定其中一些属性,可能会导致MediaCodec错误
//configure()调用以引发无效异常。
outputVideoFormat.setInteger(
MediaFormat.KEY\u COLOR\u FORMAT、OUTPUT\u VIDEO\u COLOR\u FORMAT);
outputVideoFormat.setInteger(MediaFormat.KEY\u比特率、OutputVideou比特率);
outputVideoFormat.setInteger(MediaFormat.KEY\u帧速率、OutputVideou帧速率);
outputVideoFormat.setInteger(
MediaFormat.KEY\u I\u帧间隔、输出\u视频\u IFRAME\u间隔);
if(VERBOSE)Log.d(标签,“视频格式:”+outputVideoFormat);
//为所需的编解码器创建MediaCodec,然后使用
//我们所需的属性。请求用于输入的曲面。
AtomicReference inputSurfaceReference=新的AtomicReference();
视频编码器=创建视频编码器(
VideoCodeInfo、outputVideoFormat、inputSurfaceReference);
inputSurface=新的inputSurface(inputSurfaceReference.get());
inputSurface.makeCurrent();
//根据提取器的格式为解码器创建MediaCodec。
outputSurface=新的outputSurface();
changeFragmentShader(片段着色器);
videoDecoder=createVideoDecoder(inputFormat,outputSurface.getSurface());
}
如果(mCopyAudio){
audioExtractor=createExtractor();
int audioInputTrack=Get并选择AudioTrackIndex(音频提取器);
assertTrue(“测试视频中缺少音轨”,AudioInputRack!=-1);
MediaFormat inputFormat=audioExtractor.getTrackFormat(audioInputTrack);
MediaFormat outputAudioFormat=MediaFormat.createAudioFormat(输出音频MIME类型、输出音频采样率、输出音频通道计数);
OutputaAudioFormat.setInteger(MediaFormat.KEY\u比特率、OutputaAudio\u比特率);
outputAudioFormat.setInteger(MediaFormat.KEY\u AAC\u配置文件、outputaudio\u AAC\u配置文件);
//为所需的编解码器创建MediaCodec,然后使用
//我们所需的属性。请求用于输入的曲面。
audioEncoder=createAudioEncoder(AudioCodeInfo,OutputaAudioFormat);
//根据提取器的格式为解码器创建MediaCodec。
audioDecoder=createAudioDecoder(inputFormat);
}
//创建muxer,但暂时不要启动或添加曲目。
muxer=createMuxer();
doExtractDecodeEditEncodeMux(视频提取器、音频提取器、视频解码器、视频编码器、音频解码器、音频编码器、多路复用器、输入表面、输出表面);
}最后{
if(VERBOSE)Log.d(标签,“释放提取器、解码器、编码器和muxer”);
//试着释放我们所获得的一切,即使其中一个版本失败,其中
//如果我们保存第一个异常,并在最后重新抛出(除非
//已引发其他异常)。这将保证引发第一个异常
//被报告为错误的原因,一切都是(a
  @Override
  protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);

    Thread myThread = new Thread(new Runnable() {
        @Override
        public void run() {
            try {
                extractDecodeEditEncodeMux();
            } catch (Exception e) {
                e.printStackTrace();
            }
        }
    });
    myThread.start();
}