Warning: file_get_contents(/data/phpspider/zhask/data//catemap/1/firebase/6.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Android 使用MediaRecorder录制MPEG TS_Android_Video Streaming_Android Camera_Android Mediarecorder_Mpeg2 Ts - Fatal编程技术网

Android 使用MediaRecorder录制MPEG TS

Android 使用MediaRecorder录制MPEG TS,android,video-streaming,android-camera,android-mediarecorder,mpeg2-ts,Android,Video Streaming,Android Camera,Android Mediarecorder,Mpeg2 Ts,我正在尝试录制mpeg2 ts视频,以便使用Android MediaRecorder类将其流式传输到server/socket 代码如下: public class MediaRecorderDemo extends Activity { private final static String TAG = "MediaRecorderDemo"; Camera camera; CameraPreview cameraPreview; MediaRecorder mediaRecorder; F

我正在尝试录制
mpeg2 ts
视频,以便使用
Android MediaRecorder
类将其流式传输到
server/socket

代码如下:

public class MediaRecorderDemo extends Activity
{
private final static String TAG = "MediaRecorderDemo";
Camera camera;
CameraPreview cameraPreview;
MediaRecorder mediaRecorder;
File outputFile = new File(Environment.getExternalStorageDirectory().getPath() + "/out1.ts");

@Override
public void onCreate(Bundle savedInstanceState)
{
    super.onCreate(savedInstanceState);
    setContentView(R.layout.media_recorder_demo_layout);

    camera = getCameraInstance();
    cameraPreview = new CameraPreview(this);
    FrameLayout preview = (FrameLayout) findViewById(R.id.camera_preview);
    preview.addView(cameraPreview);
}

private Camera getCameraInstance()
{
    final String FUNCTION = "getCameraInstance";

    Camera c = null;
    try
    {
        c = Camera.open();
    }
    catch(Exception e)
    {
        Log.e(TAG, FUNCTION + " : " + e.getMessage());
    }

    return c;
}

void initMediaRecorder()
{
    final String FUNCTION = "initMediaRecorder";
    FileDescriptor outputFileFD = null;
    try
    {
        outputFile.createNewFile();
        outputFileFD = new FileOutputStream(outputFile).getFD();
    }
    catch(Exception e)
    {
        Log.e(TAG, FUNCTION + " : " + e.getMessage());
    }

    mediaRecorder = new MediaRecorder();
    mediaRecorder.setOnErrorListener(new MediaRecorder.OnErrorListener() {
        final static String TAG = "MediaRecorder.onErrorListener";

        @Override
        public void onError(MediaRecorder mr, int what, int extra) {
            Log.e(TAG, "Error : " + what + " " + extra);
        }
    });

    camera.unlock();
    mediaRecorder.setPreviewDisplay(cameraPreview.getHolder().getSurface());
    mediaRecorder.setCamera(camera);
    mediaRecorder.setAudioSource(MediaRecorder.AudioSource.DEFAULT);
    mediaRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
    //mediaRecorder.setProfile(CamcorderProfile.get(CamcorderProfile.QUALITY_LOW));
    mediaRecorder.setOutputFormat(8);
    Log.d(TAG, "File Exists : " + outputFile.exists());
    mediaRecorder.setOutputFile(outputFileFD);
    mediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
    mediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC);
    //mediaRecorder.setVideoSize(640, 480);
    mediaRecorder.setMaxDuration(-1);
    //mediaRecorder.setVideoFrameRate(16);
    mediaRecorder.setVideoEncodingBitRate(1024 * 1024);

    try
    {
        mediaRecorder.prepare();
        Log.d(TAG, "MediaRecorder Prepared.");
        mediaRecorder.start();

    }
    catch(Exception e)
    {
        Log.e(TAG, FUNCTION + " : " + e.getMessage());
        //releaseMediaRecorder();
    }
}

void releaseMediaRecorder()
{
    final String FUNCTION = "releaseMediaRecorder";

    try
    {
        if(mediaRecorder != null)
        {
            mediaRecorder.stop();
            mediaRecorder.reset();
            mediaRecorder.release();
            mediaRecorder = null;
            camera.lock();
        }
    }

    catch(Exception e)
    {
        Log.e(TAG, FUNCTION + " : " + e.getMessage());
    }
}

void releaseCamera()
{
    final String FUNCTION = "releaseCamera";
    try
    {
        if(camera != null)
        {
            camera.stopPreview();
            camera.release();
        }
        camera = null;
    }
    catch(Exception e)
    {
        Log.e(TAG, FUNCTION + " : " + e.getMessage());
    }
}

@Override
public void onStart()
{
    super.onStart();
}

@Override
public void onPause()
{
    super.onPause();
}

@Override
public void onResume()
{
    super.onResume();
}

@Override
public void onStop()
{
    super.onStop();
}

@Override
public void onDestroy()
{
    super.onDestroy();
}

public class CameraPreview extends SurfaceView
{
    private final static String TAG = "CameraPreview"; 
    SurfaceHolder holder;
    boolean isPreviewDisplaySet;

    public CameraPreview(Context context)
    {
        this(context, (AttributeSet)null);

        this.holder = getHolder();
        this.holder.addCallback(new SurfaceHolderCallback());
    }

    public CameraPreview(Context context, AttributeSet attrSet)
    {
        this(context, attrSet, 0);
    }

    public CameraPreview(Context context, AttributeSet attrSet, int defStyle)
    {
        super(context, attrSet, defStyle);
    }

    private void releaseCamera()
    {
        if(camera != null)
        {
            camera.release();
            camera = null;
        }
    }

    private class SurfaceHolderCallback implements SurfaceHolder.Callback
    {
        @Override
        public void surfaceCreated(SurfaceHolder holder)
        {
            final String FUNCTION = "surfaceCreated";
            Log.d(TAG, "Surface Created.");

            try
            {
                camera.setPreviewDisplay(holder);
                camera.startPreview();

                initMediaRecorder();
                //mediaRecorder.start();
            }
            catch(Exception e)
            {
                Log.e(TAG, FUNCTION + " : " + e.getMessage());
            }
        }

        @Override
        public void surfaceDestroyed(SurfaceHolder holder)
        {
            final String FUNCTION = "surfaceDestroyed";
            Log.d(TAG, "Surface Destroyed.");

            try
            {
                releaseMediaRecorder();
                releaseCamera();
            }
            catch(Exception e)
            {
                Log.e(TAG, FUNCTION + " : " + e.getMessage());
            } 
        }

        @Override
        public void surfaceChanged(SurfaceHolder holder, int format, int w, int h)
        {
            final String FUNCTION = "surfaceChanged";
            Log.d(TAG, "Surface Changed.");

            if(holder.getSurface() == null)
                return;

            try
            {
                camera.stopPreview();
            }
            catch(Exception e)
            {
                Log.e(TAG, FUNCTION + " : " + e.getMessage());
            }

            try
            {
                camera.setPreviewDisplay(holder);
                camera.startPreview();
            }
            catch(Exception e)
            {
                Log.e(TAG, FUNCTION + " : " + e.getMessage());
            }
        }
    }
}
}
调用
mediaRecorder.prepare()
mediaRecorder.start()
时没有任何错误,并显示相机预览。。。 但一段时间后,预览被切断,然后屏幕冻结,并在输出路径创建一个空文件。。。 该问题也在报告中,但尚未得到纠正

我曾尝试在Galaxy Note N7000上运行同一个应用程序,该应用程序带有Android ICS ROM,也在三星Galaxy Tab 2 P3100上运行,但带有定制的Android 4.2 ROM…因此它似乎与ROM或特定硬件配置无关

如果我遗漏了什么或者做错了什么,我会很高兴并且肯定会松一口气知道


谢谢…

我遇到了与您类似的问题,尽管不是在同一台设备上。 根据我的初步调查,在录制mpeg ts时,HAL中摄像机的记录堆(缓冲区)未正确释放。 但我仍然不确定yuv数据是否已经到达OMX。 高级原因应由每个硬件供应商检查。
希望有帮助。:)

解决方案是在
M2ts Writer
中应用补丁。构建
libstagefright.so
并推送到设备。在应用程序中也设置如下

recorder.setAudioSamplingRate(48000); 
recorder.setAudioEncodingBitRate(128000);
否则它将无法完全录制该剪辑。不过,我没有深入探讨设置上述参数的原因

libstagefright
中的
M2tsWriter
修补程序:

diff --git a/media/libstagefright/MPEG2TSWriter.cpp b/media/libstagefright/MPEG2TSWriter.cpp

index c9ed5bb..a42371f 100644

--- a/media/libstagefright/MPEG2TSWriter.cpp

+++ b/media/libstagefright/MPEG2TSWriter.cpp

@@ -411,6 +411,7 @@ void MPEG2TSWriter::SourceInfo::onMessageReceived(const sp<AMessage> &msg) { 
                            (const uint8_t *)buffer->data() 
                             + buffer->range_offset(), 
                            buffer->range_length()); 
+                    readMore(); 
                 } else if (buffer->range_length() > 0) {
                     if (mStreamType == 0x0f) { 
                         if (!appendAACFrames(buffer)) { 
diff--git a/media/libstagefright/MPEG2TSWriter.cpp b/media/libstagefright/MPEG2TSWriter.cpp
索引c9ed5bb..a42371f 100644
---a/media/libstagefright/MPEG2TSWriter.cpp
+++b/media/libstagefright/MPEG2TSWriter.cpp
@@-411,6+411,7@@void MPEG2TSWriter::SourceInfo::onMessageReceived(const sp&msg){
(const uint8_t*)缓冲区->数据()
+缓冲区->范围\偏移量(),
缓冲区->范围长度();
+readMore();
}else if(缓冲区->范围长度()>0){
如果(mStreamType==0x0f){
如果(!appendAACFrames(buffer)){

您的
logcat
是否有任何错误?如果您有日志,是否可以共享?此外,您是否仅使用
MPEG-2 TS
观察此行为,或者您是否也使用say
MPEG-4
录制观察此行为?目前,隐藏API输出格式\u MPEG2TS仅适用于视频,如果同时编码视频和音频,则TS输出格式不正确。解决方案是启动两个MediaRecorder分别处理视频和音频,然后自己多路复用视频和音频帧。您是否找到解决上述问题的方法或解决方案…?我看不到任何代码缺失。在media/libstagefright/MPEG2TSWriter.cpp中更改1行代码,并调用上述2组API进行aac编码器设置。