Warning: file_get_contents(/data/phpspider/zhask/data//catemap/4/video/2.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Android JavaCV-以纵向模式录制视频_Android_Video_Javacv - Fatal编程技术网

Android JavaCV-以纵向模式录制视频

Android JavaCV-以纵向模式录制视频,android,video,javacv,Android,Video,Javacv,我正在尝试使用JavaCV将实时流传输到RTMPnginx服务器。我用这个例子: 我想用这个例子在全屏人像模式下直播。我试着改变了一些事情,但是视频的结果是 这是我对布局、记录器和FFmpegFrameFilter的初始化: private int imageWidth = 720; private int imageHeight = 1280; private void initFilter() { filter = new FFmpegFrameFilter("trans

我正在尝试使用JavaCV将实时流传输到RTMPnginx服务器。我用这个例子:

我想用这个例子在全屏人像模式下直播。我试着改变了一些事情,但是视频的结果是

这是我对布局、记录器和FFmpegFrameFilter的初始化:

private int imageWidth = 720;
private int imageHeight = 1280;

private void initFilter() {
        filter = new FFmpegFrameFilter("transpose=clock", imageWidth-1, imageHeight-1);
        filter.setPixelFormat(avutil.AV_PIX_FMT_NV21); // default camera format on Android
        try {
            filter.start();
        } catch (FrameFilter.Exception e) {
            e.printStackTrace();
        }
    }

private void initLayout() {
        /* get size of screen */
        Display display = ((WindowManager) getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay();
        screenWidth = display.getWidth();
        screenHeight = display.getHeight();

        RelativeLayout.LayoutParams layoutParam = null;
        LayoutInflater myInflate = null;
        myInflate = (LayoutInflater) getSystemService(Context.LAYOUT_INFLATER_SERVICE);
        topLayout = (FrameLayout)findViewById(R.id.record_layout);
        RelativeLayout preViewLayout = (RelativeLayout) myInflate.inflate(R.layout.activity_main, null);
        layoutParam = new RelativeLayout.LayoutParams(screenWidth, screenHeight);
        topLayout.addView(preViewLayout, layoutParam);

        /* add camera view */

        layoutParam = new RelativeLayout.LayoutParams(screenWidth, screenHeight);

        cameraDevice = Camera.open();
        Log.i(LOG_TAG, "cameara open");
        cameraView = new CameraView(this, cameraDevice);
        topLayout.addView(cameraView, layoutParam);
        Log.i(LOG_TAG, "cameara preview start: OK");
    }

private void initRecorder() {

        Log.w(LOG_TAG, "init recorder");

        if (RECORD_LENGTH > 0) {
            imagesIndex = 0;
            images = new Frame[RECORD_LENGTH * frameRate];
            timestamps = new long[images.length];
            for (int i = 0; i < images.length; i++) {
                images[i] = new Frame(imageWidth, imageHeight, Frame.DEPTH_UBYTE, 2);
                timestamps[i] = -1;
            }
        } else if (yuvImage == null) {
            yuvImage = new Frame(imageWidth, imageHeight, Frame.DEPTH_UBYTE, 2);
            Log.i(LOG_TAG, "create yuvImage");
        }

        Log.i(LOG_TAG, "ffmpeg_url: " + ffmpeg_link);
        recorder = new FFmpegFrameRecorder(ffmpeg_link, imageWidth, imageHeight, 1);
        recorder.setFormat("flv");
        recorder.setSampleRate(sampleAudioRateInHz);
        // Set in the surface changed method
        recorder.setFrameRate(frameRate);

        Log.i(LOG_TAG, "recorder initialize success");

        audioRecordRunnable = new AudioRecordRunnable();
        audioThread = new Thread(audioRecordRunnable);
        runAudioThread = true;
    }
private int imageWidth=720;
专用int imageHeight=1280;
私有void initFilter(){
过滤器=新的FFmpegFrameFilter(“转置=时钟”,imageWidth-1,imageHeight-1);
filter.setPixelFormat(avutil.AV_PIX_FMT_NV21);//Android上的默认相机格式
试一试{
filter.start();
}catch(FrameFilter.e异常){
e、 printStackTrace();
}
}
私有void initLayout(){
/*获取屏幕的大小*/
Display Display=((WindowManager)getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay();
screenWidth=display.getWidth();
screenHeight=display.getHeight();
RelativeLayout.LayoutParams layoutParam=null;
LayoutFlater myInflate=null;
myInflate=(LayoutFlater)getSystemService(Context.LAYOUT\u充气器\u服务);
topLayout=(框架布局)findViewById(R.id.record\u布局);
RelativeLayout preViewLayout=(RelativeLayout)myInflate.inflate(R.layout.activity_main,null);
layoutParam=新的RelativeLayout.LayoutParams(屏幕宽度、屏幕高度);
topLayout.addView(预览布局、布局参数);
/*添加相机视图*/
layoutParam=新的RelativeLayout.LayoutParams(屏幕宽度、屏幕高度);
cameraDevice=Camera.open();
Log.i(Log_标签,“Cameraa open”);
cameraView=新cameraView(此为cameraDevice);
topLayout.addView(cameraView、layoutParam);
Log.i(Log_标签,“Camera预览开始:OK”);
}
私有void initRecorder(){
Log.w(Log_标签,“init recorder”);
如果(记录长度>0){
imagesIndex=0;
图像=新帧[记录长度*帧速率];
时间戳=新长[images.length];
对于(int i=0;i
这是surfaceView的代码

class CameraView extends SurfaceView implements SurfaceHolder.Callback, PreviewCallback {

        private SurfaceHolder mHolder;
        private Camera mCamera;

        public void addCallback()
        {
            mHolder.addCallback(CameraView.this);
        }

        public void removeCallback()
        {
            mHolder.removeCallback(CameraView.this);
        }

        public CameraView(Context context, Camera camera) {
            super(context);
            Log.w("camera","camera view");
            mCamera = camera;
            mHolder = getHolder();
            mHolder.addCallback(CameraView.this);
            mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
            mCamera.setPreviewCallback(CameraView.this);
        }

        @Override
        public void surfaceCreated(SurfaceHolder holder) {
            Log.v("StreamActivity","Surface Created");
            if (!paused) {
                try {
                    stopPreview();
                    mCamera.setPreviewDisplay(holder);
                    mCamera.setDisplayOrientation(90);
                } catch (IOException exception) {
                    mCamera.release();
                    mCamera = null;
                }
            }
        }

        public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
            Log.v("StreamActivity", "surfaceChanged");
            stopPreview();

            Camera.Parameters camParams = mCamera.getParameters();
            List<Camera.Size> sizes = camParams.getSupportedPreviewSizes();
            // Sort the list in ascending order
            Collections.sort(sizes, new Comparator<Camera.Size>() {

                public int compare(final Camera.Size a, final Camera.Size b) {
                    return a.width * a.height - b.width * b.height;
                }
            });
            int imageWidth2 = imageHeight;
            int imageHeight2 = imageWidth;
            // Pick the first preview size that is equal or bigger, or pick the last (biggest) option if we cannot
            // reach the initial settings of imageWidth/imageHeight.
            for (int i = 0; i < sizes.size(); i++) {
                if ((sizes.get(i).width >= imageWidth2 && sizes.get(i).height >= imageHeight2) || i == sizes.size() - 1) {
                    imageWidth2 = sizes.get(i).width;
                    imageHeight2 = sizes.get(i).height;
                    Log.v(LOG_TAG, "Changed to supported resolution: " + imageWidth2 + "x" + imageHeight2);
                    break;
                }
            }

            //
            // camParams.set("rotation",90);

            camParams.setPreviewSize(imageWidth2, imageHeight2);
            camParams.setPreviewFrameRate(frameRate);
            mCamera.setParameters(camParams);

            // Set the holder (which might have changed) again
            try {
                mCamera.setPreviewDisplay(holder);
                mCamera.setPreviewCallback(CameraView.this);
                startPreview();
            } catch (Exception e) {
                Log.e(LOG_TAG, "Could not set preview display in surfaceChanged");
            }
        }

        @Override
        public void surfaceDestroyed(SurfaceHolder holder) {
            Log.v("StreamActivity","Surface Destroyed");
            try {
                mHolder.addCallback(null);
                mCamera.setPreviewCallback(null);
            } catch (RuntimeException e) {
                // The camera has probably just been released, ignore.
            }
        }

        public void startPreview() {
            if (!isPreviewOn && mCamera != null) {
                isPreviewOn = true;
                mCamera.startPreview();
            }
        }

        public void stopPreview() {
            if (isPreviewOn && mCamera != null) {
                isPreviewOn = false;
                mCamera.stopPreview();
            }
        }

        @Override
        public void onPreviewFrame(byte[] data, Camera camera) {
            //Log.v("StreamActivity","onPreviewFrame");
            if (audioRecord == null || audioRecord.getRecordingState() != AudioRecord.RECORDSTATE_RECORDING) {
                startTime = System.currentTimeMillis();
                return;
            }
            if (RECORD_LENGTH > 0) {
                int i = imagesIndex++ % images.length;
                yuvImage = images[i];
                timestamps[i] = 1000 * (System.currentTimeMillis() - startTime);
            }
            /* get video data */
            if (yuvImage != null && recording) {
                ((ByteBuffer)yuvImage.image[0].position(0)).put(data);

                if (RECORD_LENGTH <= 0) try {
                    Log.v(LOG_TAG,"Writing Frame");
                    long t = 1000 * (System.currentTimeMillis() - startTime);
                    if (t > recorder.getTimestamp()) {
                        recorder.setTimestamp(t);
                    }

                    filter.push(yuvImage);

                    Frame frame;
                    while ((frame = filter.pull()) != null) {
                        recorder.record(frame,avutil.AV_PIX_FMT_NV21);
                    }

                    //recorder.record(yuvImage);1
                } catch (FFmpegFrameRecorder.Exception e) {
                    Log.v(LOG_TAG, e.getMessage());
                    e.printStackTrace();
                } catch (FrameFilter.Exception e) {
                    e.printStackTrace();
                }
            }
        }
    }
类CameraView扩展了SurfaceView实现了SurfaceHolder.Callback、PreviewCallback{ 私人地勤人员; 私人摄像机麦卡梅拉; 公共void addCallback() { mHolder.addCallback(CameraView.this); } public void removeCallback() { mHolder.removeCallback(CameraView.this); } 公共摄影机视图(上下文、摄影机){ 超级(上下文); Log.w(“摄像机”、“摄像机视图”); mCamera=摄像机; mHolder=getHolder(); mHolder.addCallback(CameraView.this); mHolder.setType(SurfaceHolder.SURFACE\u TYPE\u PUSH\u缓冲区); setPreviewCallback(CameraView.this); } @凌驾 已创建的公共空白表面(表面持有人){ Log.v(“流活动”、“创建的表面”); 如果(!暂停){ 试一试{ 回顾(); mCamera.setPreviewDisplay(支架); mCamera.setDisplayOrientation(90); }捕获(IOException异常){ mCamera.release(); mCamera=null; } } } 公共空白表面更改(表面文件夹持有者、整型格式、整型宽度、整型高度){ Log.v(“StreamActivity”、“surfaceChanged”); 回顾(); Camera.Parameters camParams=mCamera.getParameters(); 列表大小=camParams.getSupportedPreviewSizes(); //按升序对列表排序 Collections.sort(大小、新比较器(){ 公共整数比较(最终相机。尺寸a,最终相机。尺寸b){ 返回a.width*a.height-b.width*b.height; } }); int imageWidth2=图像高度; int IMAGEHEIGHT 2=图像宽度; //选择第一个相等或更大的预览大小,如果无法选择,则选择最后一个(最大)选项 //达到imageWidth/imageHeight的初始设置。 对于(int i=0;i=imageWidth2&&size.get(i).height>=imageHeight2)| i==size.size()-1){ imageWidth2=大小。获取(i)。宽度; imageHeight2=尺寸。获取(i)。高度; Log.v(Log_标签,“更改为支持的分辨率:”+imageWidth2+“x”+imageHeight2); 打破 } } // //camParams.集(“旋转”,90); camParams.setPreviewSize(图像宽度2,图像高度2); camParams.setPreviewFrameRate(帧速率); mCamera.setParameters(camParams); //再次设置支架(可能已更改) 试一试{ mCamera.setPreviewDisplay(支架);