Warning: file_get_contents(/data/phpspider/zhask/data//catemap/3/android/209.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Android 使用MediaCodec和MediaMuxer将图像转换为视频_Android_Android Mediacodec - Fatal编程技术网

Android 使用MediaCodec和MediaMuxer将图像转换为视频

Android 使用MediaCodec和MediaMuxer将图像转换为视频,android,android-mediacodec,Android,Android Mediacodec,我有一堆本地图像保存为jpeg文件。我的图像是使用CameraPreview拍摄的,预览格式默认为:NV21。我想从固定数量的图像生成一个小视频 我不打算使用FFMpeg,因为它需要NDK,并且会带来兼容性问题 MediaCodec和MediaMuxer似乎可以工作,但在web上没有一个可行的解决方案 有一些参考资料可以帮助我找到当前的解决方案 1.EncodeAndMuxTest: 这是法登写的。它非常适合我的需要,除了他使用的是createInputSurface而不是queueInputB

我有一堆本地图像保存为jpeg文件。我的图像是使用CameraPreview拍摄的,预览格式默认为:NV21。我想从固定数量的图像生成一个小视频

我不打算使用FFMpeg,因为它需要NDK,并且会带来兼容性问题

MediaCodec和MediaMuxer似乎可以工作,但在web上没有一个可行的解决方案

有一些参考资料可以帮助我找到当前的解决方案

1.EncodeAndMuxTest

这是法登写的。它非常适合我的需要,除了他使用的是createInputSurface而不是queueInputBuffer

2.

我按照这个答案进行转换

3.

这个问题看起来和我的很相似,但我不想麻烦使用MediaMuxer

我的代码如下:

public class EncodeAndMux extends Activity {
private static final String TAG = "EncodeAndMuxTest";

private static final boolean VERBOSE = false;

private static final File OUTPUT_DIR = Environment
        .getExternalStorageDirectory();

private static final String MIME_TYPE = "video/avc";

private static final int FRAME_RATE = 10;
// 10 seconds between I-frames
private static final int IFRAME_INTERVAL = 10;

private static final int NUM_FRAMES = 5;
private static final String DEBUG_FILE_NAME_BASE = "/sdcard/test";
// two seconds of video size of a frame, in pixels
private int mWidth = -1;

private int mHeight = -1;
// bit rate, in bits per second
private int mBitRate = -1;

private byte[] mFrame;

// largest color component delta seen (i.e. actual vs. expected)
private int mLargestColorDelta;
// encoder / muxer state
private MediaCodec mEncoder;
private MediaMuxer mMuxer;
private int mTrackIndex;
private boolean mMuxerStarted;
private Utils mUtils;
private float mPadding;
private int mColumnWidth;

private static final int TEST_Y = 120; // YUV values for colored rect
private static final int TEST_U = 160;
private static final int TEST_V = 200;
private static final int TEST_R0 = 0; // RGB equivalent of {0,0,0}
private static final int TEST_G0 = 136;
private static final int TEST_B0 = 0;
private static final int TEST_R1 = 236; // RGB equivalent of {120,160,200}
private static final int TEST_G1 = 50;
private static final int TEST_B1 = 186;

private static final boolean DEBUG_SAVE_FILE = false; // save copy of
                                                        // encoded movie
// allocate one of these up front so we don't need to do it every time
private MediaCodec.BufferInfo mBufferInfo;
private ArrayList<String> mImagePaths = new ArrayList<String>();

byte[] getNV21(int inputWidth, int inputHeight, Bitmap scaled) {

    int[] argb = new int[inputWidth * inputHeight];
    scaled.getPixels(argb, 0, inputWidth, 0, 0, inputWidth, inputHeight);
    byte[] yuv = new byte[inputWidth * inputHeight * 3 / 2];
    encodeYUV420SP(yuv, argb, inputWidth, inputHeight);
    scaled.recycle();
    return yuv;
}

void encodeYUV420SP(byte[] yuv420sp, int[] argb, int width, int height) {
    final int frameSize = width * height;

    int yIndex = 0;
    int uvIndex = frameSize;

    int a, R, G, B, Y, U, V;
    int index = 0;
    for (int j = 0; j < height; j++) {
        for (int i = 0; i < width; i++) {

            a = (argb[index] & 0xff000000) >> 24; // a is not used obviously
            R = (argb[index] & 0xff0000) >> 16;
            G = (argb[index] & 0xff00) >> 8;
            B = (argb[index] & 0xff) >> 0;

            // well known RGB to YUV algorithm
            Y = ((66 * R + 129 * G + 25 * B + 128) >> 8) + 16;
            U = ((-38 * R - 74 * G + 112 * B + 128) >> 8) + 128;
            V = ((112 * R - 94 * G - 18 * B + 128) >> 8) + 128;

            // NV21 has a plane of Y and interleaved planes of VU each
            // sampled by a factor of 2
            // meaning for every 4 Y pixels there are 1 V and 1 U. Note the
            // sampling is every other
            // pixel AND every other scanline.
            yuv420sp[yIndex++] = (byte) ((Y < 0) ? 0
                    : ((Y > 255) ? 255 : Y));
            if (j % 2 == 0 && index % 2 == 0) {
                yuv420sp[uvIndex++] = (byte) ((V < 0) ? 0
                        : ((V > 255) ? 255 : V));
                yuv420sp[uvIndex++] = (byte) ((U < 0) ? 0
                        : ((U > 255) ? 255 : U));
            }

            index++;
        }
    }
}

public static Bitmap decodeFile(String filePath, int WIDTH, int HIGHT) {
    try {

        File f = new File(filePath);

        BitmapFactory.Options o = new BitmapFactory.Options();
        o.inJustDecodeBounds = true;
        o.inPurgeable = true;
        o.inInputShareable = true;
        BitmapFactory.decodeStream(new FileInputStream(f), null, o);

        final int REQUIRED_WIDTH = WIDTH;
        final int REQUIRED_HIGHT = HIGHT;
        int scale = 1;
        while (o.outWidth / scale / 2 >= REQUIRED_WIDTH
                && o.outHeight / scale / 2 >= REQUIRED_HIGHT)
            scale *= 2;
        BitmapFactory.Options o2 = new BitmapFactory.Options();
        o2.inSampleSize = scale;
        o2.inPurgeable = true;
        o2.inInputShareable = true;
        return BitmapFactory.decodeStream(new FileInputStream(f), null, o2);
    } catch (FileNotFoundException e) {
        e.printStackTrace();
    }
    return null;
}

@Override
protected void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);
    setContentView(R.layout.activity_encode_and_mux);
    mUtils = new Utils(this);
    mImagePaths = mUtils.getBackFilePaths();
    mPadding = TypedValue.applyDimension(TypedValue.COMPLEX_UNIT_DIP,
            AppConstant.GRID_PADDING, getResources().getDisplayMetrics());
    mColumnWidth = (int) ((mUtils.getScreenWidth() - ((AppConstant.NUM_OF_COLUMNS + 1) * mPadding)) / AppConstant.NUM_OF_COLUMNS);


    try {
        testEncodeDecodeVideoFromBufferToSurface720p();
    } catch (Exception e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    } catch (Throwable e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
}

/**
 * Returns the first codec capable of encoding the specified MIME type, or null if no
 * match was found.
 */
private static MediaCodecInfo selectCodec(String mimeType) {
    int numCodecs = MediaCodecList.getCodecCount();
    for (int i = 0; i < numCodecs; i++) {
        MediaCodecInfo codecInfo = MediaCodecList.getCodecInfoAt(i);

        if (!codecInfo.isEncoder()) {
            continue;
        }

        String[] types = codecInfo.getSupportedTypes();
        for (int j = 0; j < types.length; j++) {
            if (types[j].equalsIgnoreCase(mimeType)) {
                return codecInfo;
            }
        }
    }
    return null;
}

/**
 * Returns a color format that is supported by the codec and by this test code.  If no
 * match is found, this throws a test failure -- the set of formats known to the test
 * should be expanded for new platforms.
 */
private static int selectColorFormat(MediaCodecInfo codecInfo, String mimeType) {
    MediaCodecInfo.CodecCapabilities capabilities = codecInfo.getCapabilitiesForType(mimeType);
    for (int i = 0; i < capabilities.colorFormats.length; i++) {
        int colorFormat = capabilities.colorFormats[i];
        if (isRecognizedFormat(colorFormat)) {
            return colorFormat;
        }
    }
    Log.e("","couldn't find a good color format for " + codecInfo.getName() + " / " + mimeType);
    return 0;   // not reached
}

/**
 * Returns true if this is a color format that this test code understands (i.e. we know how
 * to read and generate frames in this format).
 */
private static boolean isRecognizedFormat(int colorFormat) {
    switch (colorFormat) {
        // these are the formats we know how to handle for this test
        case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
        case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar:
        case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
        case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar:
        case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar:
            return true;
        default:
            return false;
    }
}

/**
 * Returns true if the specified color format is semi-planar YUV.  Throws an exception
 * if the color format is not recognized (e.g. not YUV).
 */
private static boolean isSemiPlanarYUV(int colorFormat) {
    switch (colorFormat) {
        case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
        case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar:
            return false;
        case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
        case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar:
        case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar:
            return true;
        default:
            throw new RuntimeException("unknown format " + colorFormat);
    }
}

/**
 * Does the actual work for encoding frames from buffers of byte[].
 */
private void doEncodeDecodeVideoFromBuffer(MediaCodec encoder, int encoderColorFormat,
        MediaCodec decoder, boolean toSurface) {
    final int TIMEOUT_USEC = 10000;
    ByteBuffer[] encoderInputBuffers = encoder.getInputBuffers();
    ByteBuffer[] encoderOutputBuffers = encoder.getOutputBuffers();
    ByteBuffer[] decoderInputBuffers = null;
    ByteBuffer[] decoderOutputBuffers = null;
    MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();
    MediaFormat decoderOutputFormat = null;
    int generateIndex = 0;
    int checkIndex = 0;
    int badFrames = 0;
    boolean decoderConfigured = false;
    OutputSurface outputSurface = null;

    // The size of a frame of video data, in the formats we handle, is stride*sliceHeight
    // for Y, and (stride/2)*(sliceHeight/2) for each of the Cb and Cr channels.  Application
    // of algebra and assuming that stride==width and sliceHeight==height yields:

    // Just out of curiosity.
    long rawSize = 0;
    long encodedSize = 0;

    // Save a copy to disk.  Useful for debugging the test.  Note this is a raw elementary
    // stream, not a .mp4 file, so not all players will know what to do with it.


    if (toSurface) {
        outputSurface = new OutputSurface(mWidth, mHeight);
    }

    // Loop until the output side is done.
    boolean inputDone = false;
    boolean encoderDone = false;
    boolean outputDone = false;
    while (!outputDone) {
        Log.e(TAG, "loop");

        // If we're not done submitting frames, generate a new one and submit it.  By
        // doing this on every loop we're working to ensure that the encoder always has
        // work to do.
        //
        // We don't really want a timeout here, but sometimes there's a delay opening
        // the encoder device, so a short timeout can keep us from spinning hard.
        if (!inputDone) {
            int inputBufIndex = encoder.dequeueInputBuffer(TIMEOUT_USEC);
            Log.e(TAG, "inputBufIndex=" + inputBufIndex);
            if (inputBufIndex >= 0) {
                long ptsUsec = computePresentationTime(generateIndex);
                if (generateIndex == NUM_FRAMES) {
                    // Send an empty frame with the end-of-stream flag set.  If we set EOS
                    // on a frame with data, that frame data will be ignored, and the
                    // output will be short one frame.
                    encoder.queueInputBuffer(inputBufIndex, 0, 0, ptsUsec,
                            MediaCodec.BUFFER_FLAG_END_OF_STREAM);
                    inputDone = true;
                    Log.e(TAG, "sent input EOS (with zero-length frame)");
                } else {
                    generateFrame(generateIndex, encoderColorFormat, mFrame);
                    //generateFrame(generateIndex);

                    ByteBuffer inputBuf = encoderInputBuffers[inputBufIndex];
                    // the buffer should be sized to hold one full frame
                    inputBuf.clear();
                    inputBuf.put(mFrame);

                    encoder.queueInputBuffer(inputBufIndex, 0, mFrame.length, ptsUsec, 0);
                    Log.e(TAG, "submitted frame " + generateIndex + " to enc");
                }
                generateIndex++;
            } else {
                // either all in use, or we timed out during initial setup
                Log.e(TAG, "input buffer not available");
            }
        }

        // Check for output from the encoder.  If there's no output yet, we either need to
        // provide more input, or we need to wait for the encoder to work its magic.  We
        // can't actually tell which is the case, so if we can't get an output buffer right
        // away we loop around and see if it wants more input.
        //
        // Once we get EOS from the encoder, we don't need to do this anymore.
        if (!encoderDone) {
            int encoderStatus = encoder.dequeueOutputBuffer(info, TIMEOUT_USEC);
            if (encoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
                // no output available yet
                Log.e(TAG, "no output from encoder available");
            } else if (encoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
                // not expected for an encoder
                encoderOutputBuffers = encoder.getOutputBuffers();
                Log.e(TAG, "encoder output buffers changed");
            } else if (encoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                // not expected for an encoder


                if (mMuxerStarted) {
                    throw new RuntimeException("format changed twice");
                }
                MediaFormat newFormat = encoder.getOutputFormat();
                Log.e(TAG, "encoder output format changed: " + newFormat);

                // now that we have the Magic Goodies, start the muxer
                mTrackIndex = mMuxer.addTrack(newFormat);
                Log.e(TAG, "muxer defined muxer format: " + newFormat);
                mMuxer.start();
                mMuxerStarted = true;

            } else if (encoderStatus < 0) {
                Log.e("","unexpected result from encoder.dequeueOutputBuffer: " + encoderStatus);
            } else { // encoderStatus >= 0
                ByteBuffer encodedData = encoderOutputBuffers[encoderStatus];
                if (encodedData == null) {
                    Log.e("","encoderOutputBuffer " + encoderStatus + " was null");
                }

                // It's usually necessary to adjust the ByteBuffer values to match BufferInfo.
                encodedData.position(info.offset);
                encodedData.limit(info.offset + info.size);

                encodedSize += info.size;

                if ((info.flags & MediaCodec.BUFFER_FLAG_CODEC_CONFIG) != 0) {
                    // Codec config info.  Only expected on first packet.  One way to
                    // handle this is to manually stuff the data into the MediaFormat
                    // and pass that to configure().  We do that here to exercise the API.

                    MediaFormat format =
                            MediaFormat.createVideoFormat(MIME_TYPE, mWidth, mHeight);
                    format.setByteBuffer("csd-0", encodedData);
                    decoder.configure(format, toSurface ? outputSurface.getSurface() : null,
                            null, 0);

                    decoder.start();
                    decoderInputBuffers = decoder.getInputBuffers();
                    decoderOutputBuffers = decoder.getOutputBuffers();
                    decoderConfigured = true;
                    Log.e(TAG, "decoder configured (" + info.size + " bytes)"+format);
                } else {
                    // Get a decoder input buffer, blocking until it's available.

                    int inputBufIndex = decoder.dequeueInputBuffer(-1);
                    ByteBuffer inputBuf = decoderInputBuffers[inputBufIndex];
                    inputBuf.clear();
                    inputBuf.put(encodedData);
                    decoder.queueInputBuffer(inputBufIndex, 0, info.size,
                            info.presentationTimeUs, info.flags);

                    encoderDone = (info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0;
                    Log.e(TAG, "passed " + info.size + " bytes to decoder"
                            + (encoderDone ? " (EOS)" : ""));
                    Log.e("encoderDone",encoderDone+"");
                }

                encoder.releaseOutputBuffer(encoderStatus, false);
            }
        }

        // Check for output from the decoder.  We want to do this on every loop to avoid
        // the possibility of stalling the pipeline.  We use a short timeout to avoid
        // burning CPU if the decoder is hard at work but the next frame isn't quite ready.
        //
        // If we're decoding to a Surface, we'll get notified here as usual but the
        // ByteBuffer references will be null.  The data is sent to Surface instead.
        if (decoderConfigured) {
            int decoderStatus = decoder.dequeueOutputBuffer(info, 3*TIMEOUT_USEC);
            if (decoderStatus == MediaCodec.INFO_TRY_AGAIN_LATER) {
                // no output available yet
                Log.e(TAG, "no output from decoder available");
            } else if (decoderStatus == MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED) {
                // The storage associated with the direct ByteBuffer may already be unmapped,
                // so attempting to access data through the old output buffer array could
                // lead to a native crash.
                Log.e(TAG, "decoder output buffers changed");
                decoderOutputBuffers = decoder.getOutputBuffers();
            } else if (decoderStatus == MediaCodec.INFO_OUTPUT_FORMAT_CHANGED) {
                // this happens before the first frame is returned
                decoderOutputFormat = decoder.getOutputFormat();
                Log.e(TAG, "decoder output format changed: " +
                        decoderOutputFormat);
            } else if (decoderStatus < 0) {
                Log.e(TAG, "unexpected result from deocder.dequeueOutputBuffer: " + decoderStatus);

            } else {  // decoderStatus >= 0
                if (!toSurface) {
                    ByteBuffer outputFrame = decoderOutputBuffers[decoderStatus];

                    outputFrame.position(info.offset);
                    outputFrame.limit(info.offset + info.size);
                    mMuxer.writeSampleData(mTrackIndex, outputFrame,
                            info);
                    rawSize += info.size;
                    if (info.size == 0) {
                        Log.e(TAG, "got empty frame");
                    } else {
                        Log.e(TAG, "decoded, checking frame " + checkIndex);

                        if (!checkFrame(checkIndex++, decoderOutputFormat, outputFrame)) {
                            badFrames++;
                        }
                    }

                    if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                        Log.e(TAG, "output EOS");
                        outputDone = true;
                    }
                    decoder.releaseOutputBuffer(decoderStatus, false /*render*/);
                } else {
                    Log.e(TAG, "surface decoder given buffer " + decoderStatus +
                            " (size=" + info.size + ")");
                    rawSize += info.size;
                    if ((info.flags & MediaCodec.BUFFER_FLAG_END_OF_STREAM) != 0) {
                        Log.e(TAG, "output EOS");
                        outputDone = true;
                    }

                    boolean doRender = (info.size != 0);

                    // As soon as we call releaseOutputBuffer, the buffer will be forwarded
                    // to SurfaceTexture to convert to a texture.  The API doesn't guarantee
                    // that the texture will be available before the call returns, so we
                    // need to wait for the onFrameAvailable callback to fire.
                    decoder.releaseOutputBuffer(decoderStatus, doRender);
                    if (doRender) {
                        Log.e(TAG, "awaiting frame " + checkIndex);

                        outputSurface.awaitNewImage();
                        outputSurface.drawImage();
                        if (!checkSurfaceFrame(checkIndex++)) {
                            badFrames++;
                        }
                    }
                }
            }
        }
    }

    Log.e(TAG, "decoded " + checkIndex + " frames at "
            + mWidth + "x" + mHeight + ": raw=" + rawSize + ", enc=" + encodedSize);

    if (outputSurface != null) {
        outputSurface.release();
    }

    if (checkIndex != NUM_FRAMES) {

        Log.e(TAG, "awaiting frame " + checkIndex);
    }
    if (badFrames != 0) {
        Log.e(TAG, "Found " + badFrames + " bad frames");
    }
}
private void generateFrame(int frameIndex) {

    Bitmap bitmap = decodeFile(mImagePaths.get(frameIndex), mColumnWidth,
            mColumnWidth);

    mFrame = getNV21(bitmap.getWidth(), bitmap.getHeight(), bitmap);
}

/**
 * Generates data for frame N into the supplied buffer.  We have an 8-frame animation
 * sequence that wraps around.  It looks like this:
 * <pre>
 *   0 1 2 3
 *   7 6 5 4
 * </pre>
 * We draw one of the eight rectangles and leave the rest set to the zero-fill color.
 */
private void generateFrame(int frameIndex, int colorFormat, byte[] mFrame) {
    final int HALF_WIDTH = mWidth / 2;
    boolean semiPlanar = isSemiPlanarYUV(colorFormat);
    // Set to zero.  In YUV this is a dull green.
    Arrays.fill(mFrame, (byte) 0);

    int startX, startY, countX, countY;

    frameIndex %= 8;
    //frameIndex = (frameIndex / 8) % 8;    // use this instead for debug -- easier to see
    if (frameIndex < 4) {
        startX = frameIndex * (mWidth / 4);
        startY = 0;
    } else {
        startX = (7 - frameIndex) * (mWidth / 4);
        startY = mHeight / 2;
    }

    for (int y = startY + (mHeight/2) - 1; y >= startY; --y) {
        for (int x = startX + (mWidth/4) - 1; x >= startX; --x) {
            if (semiPlanar) {
                // full-size Y, followed by UV pairs at half resolution
                // e.g. Nexus 4 OMX.qcom.video.encoder.avc COLOR_FormatYUV420SemiPlanar
                // e.g. Galaxy Nexus OMX.TI.DUCATI1.VIDEO.H264E
                //        OMX_TI_COLOR_FormatYUV420PackedSemiPlanar
                mFrame[y * mWidth + x] = (byte) TEST_Y;
                if ((x & 0x01) == 0 && (y & 0x01) == 0) {
                    mFrame[mWidth*mHeight + y * HALF_WIDTH + x] = (byte) TEST_U;
                    mFrame[mWidth*mHeight + y * HALF_WIDTH + x + 1] = (byte) TEST_V;
                }
            } else {
                // full-size Y, followed by quarter-size U and quarter-size V
                // e.g. Nexus 10 OMX.Exynos.AVC.Encoder COLOR_FormatYUV420Planar
                // e.g. Nexus 7 OMX.Nvidia.h264.encoder COLOR_FormatYUV420Planar
                mFrame[y * mWidth + x] = (byte) TEST_Y;
                if ((x & 0x01) == 0 && (y & 0x01) == 0) {
                    mFrame[mWidth*mHeight + (y/2) * HALF_WIDTH + (x/2)] = (byte) TEST_U;
                    mFrame[mWidth*mHeight + HALF_WIDTH * (mHeight / 2) +
                              (y/2) * HALF_WIDTH + (x/2)] = (byte) TEST_V;
                }
            }
        }
    }
}




 /**
 * Sets the desired frame size and bit rate.
 */
private void setParameters(int width, int height, int bitRate) {
    if ((width % 16) != 0 || (height % 16) != 0) {
        Log.w(TAG, "WARNING: width or height not multiple of 16");
    }
    mWidth = width;
    mHeight = height;
    mBitRate = bitRate;
    mFrame = new byte[mWidth * mHeight * 3 / 2];
}
public void testEncodeDecodeVideoFromBufferToSurface720p() throws Throwable {
    setParameters(1280, 720, 6000000);
    encodeDecodeVideoFromBuffer(false);
}

读取JPEG,解压缩它们,然后重新压缩它们将导致图像质量损失(并需要CPU的努力/时间),简单地将它们全部添加到视频容器中并将其扔到视频容器中会更快并生成更好的视频

MJpeg视频格式非常古老,因此(几乎)任何程序都可以播放MJpeg视频

我建议一个类似于IE的解决方案:用JPEG制作一部Mjpeg电影。有不止一个程序可供选择,使用搜索引擎(或我们的搜索栏)查找更多源代码

我测试了我的手机,看看它是否可以通过使用以下命令创建文件来理解MJPEG:

ffmpeg.exe-i test_in.mp4-vcodec mjpeg-acodec copy test_out.mp4

在:流#0:0(und):视频:h264(主)(avc1/0x31637661),yuv420p,1280x720[SAR 1:1 DAR 16:9],1568kb/s,29.97 fps,29.97 tbr,90k tbn,59.94 tbc(默认值)
输出:流#0:0(und):视频:mjpeg(l[0][0][0]/0x006C),yuvj420p,1280x720[SAR 1:1 DAR 16:9],q=2-31200KB/s,30k tbn,29.97 tbc(默认)


不幸的是,Android“Gallery Player”是不理解该格式的程序之一,但是BSPlayer、VLC和MPlayer for Android可以播放该格式,如果您希望生成的视频在手机上播放(无需编写更多代码)。

对于帧输入端,最接近您需要的可能是中的缓冲区到缓冲区或缓冲区到表面测试,它通过YUV420字节缓冲区向MediaCodec提供输入。因此,您可以选择将JPEG加载为纹理并通过GLES渲染以使用曲面输入方法,或者将JPEG转换为YUV420(使用平面或半平面,具体取决于设备支持的格式)并通过ByteBuffer。
EncodeDecodeTest
code插图展示了像素布局。非常感谢@fadden,这就是我们要走的路。我正在使用Nexus4进行调试。在注释中输入时,输出格式更改为0x7FA30C03 OMX_QCOM_COLOR_FORMATYUV420 PACKEDSEMIPLANAR64X32TILE2M8KA。视频无法播放。我需要将其转换为其他颜色格式吗?输出颜色格式对编码器不重要,只是输入颜色格式。通过“视频无法播放”,我假设您仍然可以播放.mp4文件,但内容看起来混乱了?我看不出代码中有任何明显的错误。如果将
encodeYUV420SP()
替换为琐碎的东西(可能是
encodetest
中的
generateFrame()
),它看起来合理吗?(只是试图减少可能的罪犯)是的,你是对的。万分感谢!这是转换的问题。您知道如何将JPEG压缩位图转换为YUV420半平面ByteBuffer吗?我使用的解决方案只能生成绿色加扰视频。任何对低于4.3的muxer解决方案感兴趣的人都可以在这里查看:
  12-17 18:25:47.405: E/EncodeAndMuxTest(16415): found codec: OMX.qcom.video.encoder.avc
  12-17 18:25:47.405: I/OMXClient(16415): Using client-side OMX mux.
  12-17 18:25:47.455: E/EncodeAndMuxTest(16415): found colorFormat: 21
  12-17 18:25:47.455: E/EncodeAndMuxTest(16415): format: {frame-rate=10, bitrate=6000000, height=720, mime=video/avc, color-format=21, i-frame-interval=10, width=1280}
  12-17 18:25:47.465: I/OMXClient(16415): Using client-side OMX mux.
  12-17 18:25:47.495: E/ACodec(16415): [OMX.qcom.video.encoder.avc] storeMetaDataInBuffers (output) failed w/ err -2147483648
  12-17 18:25:47.495: I/ACodec(16415): setupVideoEncoder succeeded
  12-17 18:25:47.535: I/OMXClient(16415): Using client-side OMX mux.
  12-17 18:25:47.545: E/EncodeAndMuxTest(16415): loop
  12-17 18:25:47.545: E/EncodeAndMuxTest(16415): inputBufIndex=0
  12-17 18:25:47.655: E/EncodeAndMuxTest(16415): submitted frame 0 to enc
  12-17 18:25:47.655: E/EncodeAndMuxTest(16415): encoder output format changed: {csd-1=java.nio.ByteArrayBuffer[position=0,limit=8,capacity=8], height=720, mime=video/avc, csd-0=java.nio.ByteArrayBuffer[position=0,limit=18,capacity=18], what=1869968451, width=1280}
  12-17 18:25:47.655: E/EncodeAndMuxTest(16415): muxer defined muxer format: {csd-1=java.nio.ByteArrayBuffer[position=0,limit=8,capacity=8], height=720, mime=video/avc, csd-0=java.nio.ByteArrayBuffer[position=0,limit=18,capacity=18], what=1869968451, width=1280}
 12-17 18:25:47.655: I/MPEG4Writer(16415): limits: 2147483647/0 bytes/us, bit rate: -1 bps and the estimated moov size 3072 bytes
 12-17 18:25:47.655: E/EncodeAndMuxTest(16415): inputBufIndex=2
 12-17 18:25:47.795: E/EncodeAndMuxTest(16415): submitted frame 1 to enc
 12-17 18:25:47.825: E/EncodeAndMuxTest(16415): decoder configured (26 bytes){csd-0=java.nio.DirectByteBuffer[position=0,limit=26,capacity=692224], height=720, width=1280, mime=video/avc}
 12-17 18:25:47.855: E/EncodeAndMuxTest(16415): no output from decoder available
  12-17 18:25:47.855: E/EncodeAndMuxTest(16415): inputBufIndex=0
  12-17 18:25:47.976: E/EncodeAndMuxTest(16415): submitted frame 2 to enc
  12-17 18:25:48.136: E/EncodeAndMuxTest(16415): passed 3188 bytes to decoder
  12-17 18:25:48.176: E/EncodeAndMuxTest(16415): no output from decoder available
  12-17 18:25:48.176: E/EncodeAndMuxTest(16415): inputBufIndex=1
  12-17 18:25:48.296: E/EncodeAndMuxTest(16415): submitted frame 3 to enc
  12-17 18:25:48.296: E/EncodeAndMuxTest(16415): passed 1249 bytes to decoder
 12-17 18:25:48.326: E/EncodeAndMuxTest(16415): no output from decoder available
  12-17 18:25:48.326: E/EncodeAndMuxTest(16415): loop
  12-17 18:25:48.326: E/EncodeAndMuxTest(16415): inputBufIndex=2
   12-17 18:25:48.396: E/EncodeAndMuxTest(16415): submitted frame 4 to enc
   12-17 18:25:48.396: E/EncodeAndMuxTest(16415): passed 3085 bytes to decoder
  12-17 18:25:48.436: E/EncodeAndMuxTest(16415): no output from decoder available
  12-17 18:25:48.436: E/EncodeAndMuxTest(16415): inputBufIndex=0
   12-17 18:25:48.436: E/EncodeAndMuxTest(16415): sent input EOS (with zero-length frame)
  12-17 18:25:48.436: E/EncodeAndMuxTest(16415): passed 3056 bytes to decoder
    12-17 18:25:48.466: E/EncodeAndMuxTest(16415): no output from decoder available
   12-17 18:25:48.466: E/EncodeAndMuxTest(16415): passed 1085 bytes to decoder (EOS)
  12-17 18:25:48.476: E/EncodeAndMuxTest(16415): decoder output buffers changed
  12-17 18:25:48.496: E/EncodeAndMuxTest(16415): decoder output format changed: