使用Jcodec在Android上创建mp4文件

使用Jcodec在Android上创建mp4文件,android,mp4,h.264,muxer,jcodec,Android,Mp4,H.264,Muxer,Jcodec,我在Android上使用MediaRecorder和Jcodec编写mp4文件时遇到了一些问题,下面是我的代码 public class SequenceEncoder { private final static String CLASSTAG = SequenceEncoder.class.getSimpleName(); private SeekableByteChannel ch; private byte[] yuv = null; private

我在Android上使用MediaRecorder和Jcodec编写mp4文件时遇到了一些问题,下面是我的代码

public class SequenceEncoder {
    private final static String CLASSTAG = SequenceEncoder.class.getSimpleName();

    private SeekableByteChannel ch;

    private byte[] yuv = null;

    private ArrayList<ByteBuffer> spsList;
    private ArrayList<ByteBuffer> ppsList;

    private CompressedTrack outTrack;

    private int frameNo;
    private MP4Muxer muxer;

    ArrayList<ByteBuffer> spsListTmp = new ArrayList<ByteBuffer>();
    ArrayList<ByteBuffer> ppsListTmp = new ArrayList<ByteBuffer>();

    // Encoder
    private MediaCodec mediaCodec = null;

    public SequenceEncoder(File out) throws IOException {
        this.ch = NIOUtils.writableFileChannel(out);

        // Muxer that will store the encoded frames
        muxer = new MP4Muxer(ch, Brand.MP4);

        // Add video track to muxer
        outTrack = muxer.addTrackForCompressed(TrackType.VIDEO, 25);

        // Encoder extra data ( SPS, PPS ) to be stored in a special place of
        // MP4
        spsList = new ArrayList<ByteBuffer>();
        ppsList = new ArrayList<ByteBuffer>();
    }

    @SuppressWarnings("unchecked")
    public void encodeImage(ByteBuffer buffer, int width, int height) throws IOException {
        if (yuv == null) {
            int bufferSize = width * height * 3 / 2;

            yuv = new byte[bufferSize];

            int bitRate = bufferSize;
            int frameRate = 25;
            String mimeType = "video/avc";

            // "video/avc"
            mediaCodec = MediaCodec.createEncoderByType(mimeType);
            MediaFormat mediaFormat = MediaFormat.createVideoFormat(mimeType, width, height);
            mediaFormat.setInteger(MediaFormat.KEY_BIT_RATE, bitRate); // 125000);
            mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, frameRate);
            mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar);
            mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 5);

            mediaCodec.configure(mediaFormat, null, null, MediaCodec.CONFIGURE_FLAG_ENCODE);
            mediaCodec.start();
        }

        byte[] rgba = buffer.array();

        // Convert RGBA image to NV12 (YUV420SemiPlanar)
        Rgba2Yuv420.convert(rgba, yuv, width, height);

        synchronized (mediaCodec) {
        try {
            ByteBuffer[] inputBuffers = mediaCodec.getInputBuffers();
            ByteBuffer[] outputBuffers = mediaCodec.getOutputBuffers();

            int inputBufferIndex = mediaCodec.dequeueInputBuffer(-1);
            if (inputBufferIndex >= 0) {
                ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
                inputBuffer.clear();
                inputBuffer.put(yuv);
                mediaCodec.queueInputBuffer(inputBufferIndex, 0,
                        yuv.length, 0, 0);
            }

            MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
            int outputBufferIndex = mediaCodec.dequeueOutputBuffer(
                    bufferInfo, 0);

            while (outputBufferIndex >= 0) {
                ByteBuffer outputBuffer = outputBuffers[outputBufferIndex];
                byte[] outData = new byte[bufferInfo.size];
                outputBuffer.get(outData);

                ByteBuffer frameBuffer = ByteBuffer.wrap(outData);

                spsListTmp.clear();
                ppsListTmp.clear();

                H264Utils.encodeMOVPacket(frameBuffer, spsListTmp, ppsListTmp);

                if (!spsListTmp.isEmpty())
                    spsList = (ArrayList<ByteBuffer>) spsListTmp.clone();
                if (!ppsListTmp.isEmpty())
                    ppsList = (ArrayList<ByteBuffer>) ppsListTmp.clone();

                outTrack.addFrame(new MP4Packet(frameBuffer, frameNo, 25, 1,
                        frameNo, true, null, frameNo, 0));

                frameNo++;

                mediaCodec.releaseOutputBuffer(outputBufferIndex, false);
                outputBufferIndex = mediaCodec.dequeueOutputBuffer(
                        bufferInfo, 0);
            }

            if (outputBufferIndex < 0)
                switch (outputBufferIndex) {
                case MediaCodec.INFO_OUTPUT_BUFFERS_CHANGED:
                    outputBuffers = mediaCodec.getOutputBuffers();
                    break;
                case MediaCodec.INFO_OUTPUT_FORMAT_CHANGED:
                    break;
                case MediaCodec.INFO_TRY_AGAIN_LATER:
                    break;
                default:
                    break;
                }
            } catch (Exception e) {
            }
        }
    }

    public void finish() throws IOException {
        if (!ch.isOpen())
            return;

        if (mediaCodec != null) {
            mediaCodec.stop();
            mediaCodec.release();
        }

        outTrack.addSampleEntry(H264Utils.createMOVSampleEntry(spsList, ppsList));

        // Write MP4 header and finalize recording
        muxer.writeHeader();
        NIOUtils.closeQuietly(ch);

        ch.close();
    }
}
公共类SequenceEncoder{
私有最终静态字符串CLASSTAG=SequenceEncoder.class.getSimpleName();
私人频道;
私有字节[]yuv=null;
私有数组列表;
私有数组列表;
私人压缩跑道出口;
私有int框架编号;
私人MP4Muxer-muxer;
ArrayList spsListTmp=新的ArrayList();
ArrayList ppsListTmp=新的ArrayList();
//编码器
专用MediaCodec MediaCodec=null;
公共SequenceEncoder(文件输出)引发IOException{
this.ch=NIOUtils.writableFileChannel(out);
//存储编码帧的多路复用器
muxer=新的MP4Muxer(ch,品牌.MP4);
//将视频曲目添加到muxer
outTrack=muxer.addTrackForCompressed(TrackType.VIDEO,25);
//编码器额外数据(SPS、PPS)存储在
//MP4
spsList=newarraylist();
ppsList=newarraylist();
}
@抑制警告(“未选中”)
public void encodeImage(ByteBuffer缓冲区、int-width、int-height)引发IOException{
if(yuv==null){
int bufferSize=宽度*高度*3/2;
yuv=新字节[bufferSize];
int比特率=缓冲区大小;
整数帧率=25;
字符串mimeType=“视频/avc”;
//“视频/avc”
mediaCodec=mediaCodec.createEncoderByType(mimeType);
MediaFormat MediaFormat=MediaFormat.createVideoFormat(mimeType,width,height);
setInteger(mediaFormat.KEY_BIT_RATE,bitRate);//125000);
mediaFormat.setInteger(mediaFormat.KEY\u FRAME\u RATE,frameRate);
mediaFormat.setInteger(mediaFormat.KEY\u COLOR\u FORMAT,MediaCodecInfo.CodecCapabilities.COLOR\u formatyuv420semipular);
mediaFormat.setInteger(mediaFormat.KEY\u I\u FRAME\u INTERVAL,5);
配置(mediaFormat,null,null,mediaCodec.configure\u FLAG\u ENCODE);
mediaCodec.start();
}
字节[]rgba=buffer.array();
//将RGBA图像转换为NV12(YUV420半平面)
Rgba2Yuv420.转换(rgba、yuv、宽度、高度);
已同步(mediaCodec){
试一试{
ByteBuffer[]inputBuffers=mediaCodec.getInputBuffers();
ByteBuffer[]outputBuffers=mediaCodec.getOutputBuffers();
int-inputBufferIndex=mediaCodec.dequeueInputBuffer(-1);
如果(inputBufferIndex>=0){
ByteBuffer inputBuffer=inputBuffers[inputBufferIndex];
inputBuffer.clear();
inputBuffer.put(yuv);
mediaCodec.queueInputBuffer(inputBufferIndex,0,
yuv.length,0,0);
}
MediaCodec.BufferInfo BufferInfo=新的MediaCodec.BufferInfo();
int outputBufferIndex=mediaCodec.dequeueOutputBuffer(
bufferInfo,0);
而(outputBufferIndex>=0){
ByteBuffer outputBuffer=outputBuffers[outputBufferIndex];
byte[]outData=新字节[bufferInfo.size];
outputBuffer.get(outData);
ByteBuffer帧缓冲区=ByteBuffer.wrap(outData);
spsListTmp.clear();
ppsListTmp.clear();
H264Utils.encodeMOVPacket(帧缓冲区、spsListTmp、PPListTMP);
如果(!spsListTmp.isEmpty())
spsList=(ArrayList)spsListTmp.clone();
如果(!ppsListTmp.isEmpty())
ppsList=(ArrayList)ppsListTmp.clone();
outTrack.addFrame(新的MP4包)(帧缓冲区,帧编号,25,1,
frameNo,true,null,frameNo,0);
frameNo++;
mediaCodec.releaseOutputBuffer(outputBufferIndex,false);
outputBufferIndex=mediaCodec.dequeueOutputBuffer(
bufferInfo,0);
}
如果(outputBufferIndex<0)
开关(outputBufferIndex){
案例MediaCodec.INFO\u输出\u缓冲区\u已更改:
outputBuffers=mediaCodec.getOutputBuffers();
打破
案例MediaCodec.INFO\u输出\u格式\u已更改:
打破
case MediaCodec.INFO\u请稍后再试:
打破
违约:
打破
}
}捕获(例外e){
}
}
}
public void finish()引发IOException{
如果(!ch.isOpen())
返回;
如果(mediaCodec!=null){
mediaCodec.stop();
mediaCodec.release();
}
outTrack.addSampleEntry(H264Utils.createMOVSampleEntry(spsList,ppsList));
//写入MP4标题并完成录制
muxer.writeHeader();
NIOUtils.closes(ch);
ch.close();
}
}
正如我们看到的Android MediaCodec期望Yuv420作为输入图像,所以我给了他正确的一个。因此,我有一个损坏的mp4文件和无效的颜色,当我从AVCon打开这个mp4文件时,我看到输出文件中的颜色格式是yuv420p,所以可能是这个问题?请建议如何修复此问题


还有另一个问题,即如何将压缩音频流添加到muxer,尚未找到示例。

Yuv420 4x4图像的半平面格式类似于YYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYYY UUVV。 我可以得到mp4文件与适当的颜色使用Jcodec和MediaCodec在Android上后,我传递了一个图像的格式

关于音频,我没有答案。

Android 4.3(API 18)有两个可能有用的新功能

首先,
MediaCodec
类接受来自曲面的输入,因此可以对曲面进行解码或使用OpenGL ES进行渲染的任何内容都可以记录下来,而无需费心处理