Warning: file_get_contents(/data/phpspider/zhask/data//catemap/9/ios/109.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
在Swift for iOS中使用AVFoundation录制视频_Ios_Swift_Video Capture_Avcapturemoviefileoutput_Avcaptureoutput - Fatal编程技术网

在Swift for iOS中使用AVFoundation录制视频

在Swift for iOS中使用AVFoundation录制视频,ios,swift,video-capture,avcapturemoviefileoutput,avcaptureoutput,Ios,Swift,Video Capture,Avcapturemoviefileoutput,Avcaptureoutput,我无法使用提供的代码录制视频。我正在使用为录制视频而创建的示例代码 具体地说,我无法编译这一行,但没有以下错误:“无法将“ViewController”类型的值转换为指定的“AVCaptureFileOutputRecordingDelegate”类型 var recordingDelegate:AVCaptureFileOutputRecordingDelegate? = self 该行位于iAction函数中: @IBAction func RecordButtonPressed(

我无法使用提供的代码录制视频。我正在使用为录制视频而创建的示例代码

具体地说,我无法编译这一行,但没有以下错误:“无法将“ViewController”类型的值转换为指定的“AVCaptureFileOutputRecordingDelegate”类型

var recordingDelegate:AVCaptureFileOutputRecordingDelegate? = self
该行位于iAction函数中:

    @IBAction func RecordButtonPressed(_ sender: Any) {

    var recordingDelegate:AVCaptureFileOutputRecordingDelegate? = self

    var videoFileOutput = AVCaptureMovieFileOutput()
    self.captureSession.addOutput(videoFileOutput)

    let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]
    let filePath = documentsURL.appendingPathComponent("temp")

    videoFileOutput.startRecording(toOutputFileURL: filePath, recordingDelegate: recordingDelegate)

    RecordButton.setTitle("Stop", for: .normal);

}
其余代码如下所示:

import UIKit
import AVFoundation
import Darwin




class ViewController: UIViewController {



@IBOutlet weak var CameraView: UIImageView!

@IBOutlet weak var RecordButton: UIButton!

@IBOutlet weak var SelectFrButton: UIButton!

@IBOutlet weak var ISOslider: UISlider!

@IBOutlet weak var SSslider: UISlider!

@IBOutlet weak var ISOtextfield: UITextField!

@IBOutlet weak var SStextfield: UITextField!

@IBOutlet weak var TorchSlider: UISlider!

@IBOutlet weak var Torchtextfield: UITextField!

var captureSession = AVCaptureSession();
var DisplaySessionOutput = AVCaptureVideoDataOutput();
var SaveSessionOutput = AVCaptureMovieFileOutput();
var previewLayer = AVCaptureVideoPreviewLayer();
var CaptureDevice:AVCaptureDevice? = nil;
var CurrentTorchLevel:Float = 0.5;


override func viewDidLoad() {
    super.viewDidLoad()

    captureSession.sessionPreset = AVCaptureSessionPresetHigh
    // Loop through all the capture devices on this phone

    let deviceDiscoverySession = AVCaptureDeviceDiscoverySession(deviceTypes: [AVCaptureDeviceType.builtInDuoCamera, AVCaptureDeviceType.builtInTelephotoCamera,AVCaptureDeviceType.builtInWideAngleCamera], mediaType: AVMediaTypeVideo, position: AVCaptureDevicePosition.unspecified)

    for device in (deviceDiscoverySession?.devices)! {
        if(device.position == AVCaptureDevicePosition.back){
            do{

                try device.lockForConfiguration()


                device.setExposureModeCustomWithDuration(CMTimeMake(1, 30), iso: 50, completionHandler: { (time) in

                    // Set text and sliders to correct levels
                    self.ISOslider.maximumValue = (self.CaptureDevice?.activeFormat.maxISO)!;
                    self.ISOslider.minimumValue = (self.CaptureDevice?.activeFormat.minISO)!;

                    self.SSslider.maximumValue = Float((self.CaptureDevice?.activeFormat.maxExposureDuration.seconds)!);
                    self.SSslider.minimumValue = Float((self.CaptureDevice?.activeFormat.minExposureDuration.seconds)!);

                    self.ISOtextfield.text = device.iso.description;
                    self.ISOslider.setValue(device.iso, animated: false)

                    self.SStextfield.text = device.exposureDuration.seconds.description;
                    self.SSslider.setValue(Float(device.exposureDuration.seconds), animated: false);

                    self.TorchSlider.minimumValue = 0.01;
                    self.TorchSlider.maximumValue = 1;
                    self.TorchSlider.value = 0.5;
                    self.Torchtextfield.text = "0.5";
                })




                //Turn torch on

                if (device.torchMode == AVCaptureTorchMode.on) {
                    device.torchMode = AVCaptureTorchMode.off
                } else {
                    try device.setTorchModeOnWithLevel(1.0)

                }

                device.unlockForConfiguration();

                CaptureDevice = device;

                let input = try AVCaptureDeviceInput(device: CaptureDevice)
                if(captureSession.canAddInput(input)){
                    captureSession.addInput(input);

                    if(captureSession.canAddOutput(DisplaySessionOutput)){
                        captureSession.addOutput(DisplaySessionOutput);
                        previewLayer = AVCaptureVideoPreviewLayer(session: captureSession);
                        previewLayer.videoGravity = AVLayerVideoGravityResizeAspectFill;
                        previewLayer.connection.videoOrientation = AVCaptureVideoOrientation.portrait;
                        CameraView.layer.addSublayer(previewLayer);
                    }
                }
            }
            catch{
                print("exception!");
            }
        }
    }

    CameraView.transform = CGAffineTransform.init(scaleX: -1, y: -1);

    captureSession.startRunning()


}

    // Do any additional setup after loading the view, typically from a nib.


override func viewDidLayoutSubviews() {

    previewLayer.frame = CameraView.bounds

}


override func didReceiveMemoryWarning() {
    super.didReceiveMemoryWarning()
    // Dispose of any resources that can be recreated.
}


@IBAction func RecordButtonPressed(_ sender: Any) {

    var recordingDelegate:AVCaptureFileOutputRecordingDelegate? = self

    var videoFileOutput = AVCaptureMovieFileOutput()
    self.captureSession.addOutput(videoFileOutput)

    let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask)[0]
    let filePath = documentsURL.appendingPathComponent("temp")

    videoFileOutput.startRecording(toOutputFileURL: filePath, recordingDelegate: recordingDelegate)

    RecordButton.setTitle("Stop", for: .normal);

}

@IBAction func ISOvaluechanged(_ sender: Any) {

    SetVideoSettings(isolevel: ISOslider.value, exposurelevel: AVCaptureExposureDurationCurrent, TorchLevel: CurrentTorchLevel)
}

@IBAction func SSvaluechanged(_ sender: Any) {

    let time = CMTimeMake(Int64(self.SSslider.value * 1000000),1000000);
    SetVideoSettings(isolevel: AVCaptureISOCurrent, exposurelevel: time, TorchLevel: CurrentTorchLevel)
}

@IBAction func ISOtextchanged(_ sender: Any) {

}

@IBAction func SStextchanged(_ sender: Any) {

    //let time = CMTimeMake(Int64(exposurelevel * 100000),100000);

}


@IBAction func ChooseButtonPressed(_ sender: Any) {
}

func ShowAlert(AlertMessage: String) {

    let alertController = UIAlertController(title: "Alert", message: AlertMessage, preferredStyle: .alert)

    self.present(alertController, animated: true, completion:nil)

    let OKAction = UIAlertAction(title: "OK", style: .default) { (action:UIAlertAction) in
    }

    alertController.addAction(OKAction)

}

@IBAction func TorchSliderChanged(_ sender: Any) {

    CurrentTorchLevel = self.TorchSlider.value;
    SetVideoSettings(isolevel: AVCaptureISOCurrent, exposurelevel: AVCaptureExposureDurationCurrent, TorchLevel: CurrentTorchLevel);
}

func SetVideoSettings(isolevel: Float, exposurelevel: CMTime, TorchLevel: Float) {

    var newISOval = isolevel;
    var newSSval  = exposurelevel;
    let newTorchVal = TorchLevel;

    if(newISOval == FLT_MAX){
        // Pass through 0,0 for maintaining current SS.
    }

    else if(newISOval > (self.CaptureDevice?.activeFormat.maxISO)!) {

        newISOval = (self.CaptureDevice?.activeFormat.maxISO)!;
    }

    else if(newISOval < (self.CaptureDevice?.activeFormat.minISO)!) {

        newISOval = (self.CaptureDevice?.activeFormat.minISO)!;
    }

    if(newSSval.timescale == 0){
        // Pass through 0,0 for maintaining current SS.
    }

    else if(CMTimeCompare(newSSval, (self.CaptureDevice?.activeFormat.maxExposureDuration)!)  > 0) {

        newSSval = (self.CaptureDevice?.activeFormat.maxExposureDuration)!;
    }

    else if(CMTimeCompare(newSSval,(self.CaptureDevice?.activeFormat.minExposureDuration)!) < 0) {

        newSSval = (self.CaptureDevice?.activeFormat.minExposureDuration)!;
    }



      do {

        try self.CaptureDevice?.lockForConfiguration();

        try CaptureDevice?.setTorchModeOnWithLevel(newTorchVal);

        CaptureDevice?.setExposureModeCustomWithDuration(newSSval, iso: newISOval, completionHandler: { (time) in

            // Set text and sliders to correct levels
            self.ISOtextfield.text = self.CaptureDevice?.iso.description;
            self.ISOslider.setValue((self.CaptureDevice?.iso)!, animated: false)

            self.SStextfield.text = self.CaptureDevice?.exposureDuration.seconds.description;
            self.SSslider.setValue(Float((self.CaptureDevice?.exposureDuration.seconds)!), animated: false);

            self.TorchSlider.setValue(self.CurrentTorchLevel, animated: false);
            self.Torchtextfield.text = self.CurrentTorchLevel.description;

        })

        self.CaptureDevice?.unlockForConfiguration();

    }

    catch {
        ShowAlert(AlertMessage: "Unable to set camera settings");
        self.CaptureDevice?.unlockForConfiguration();


    }

}

func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) {
    return
}

func captureOutput(captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAtURL fileURL: NSURL!, fromConnections connections: [AnyObject]!) {
    return
}

}
导入UIKit
进口AVF基金会
导入达尔文
类ViewController:UIViewController{
@IBVAR摄像机视图:UIImageView!
@IBVAR记录按钮:UIButton!
@IBOUTLE弱var SelectFrButton:UIButton!
@IBVAR Isolader:UISlider!
@iBlider:UISlider!
@IBOutlet弱var ISOtextfield:UITextField!
@IBOutlet弱var SStextfield:UITextField!
@IBVAR火炬滑翔机:UISlider!
@IBEXTEXTFIELD:UITextField!
var captureSession=AVCaptureSession();
var DisplaySessionOutput=avcapturedevideodataoutput();
var SaveSessionOutput=AVCaptureMovieFileOutput();
var previewLayer=AVCaptureVideoPreviewLayer();
var CaptureDevice:AVCaptureDevice?=nil;
var CurrentTorchLevel:浮动=0.5;
重写func viewDidLoad(){
super.viewDidLoad()
captureSession.sessionPreset=AVCaptureSessionPresetHigh
//循环浏览此手机上的所有捕获设备
let deviceDiscoverySession=AVCaptureDeviceDiscoverySession(设备类型:[AVCaptureDeviceType.BuiltInDoocamera,AVCaptureDeviceType.BuiltInDoocamera,AVCaptureDeviceType.builtInWideAngleCamera],媒体类型:AVMediaTypeVideo,位置:AVCaptureDevicePosition.未指定)
对于(deviceDiscoverySession?.devices)中的设备{
if(device.position==AVCaptureDevicePosition.back){
做{
请尝试device.lockForConfiguration()
setExposureModeCustomWithDuration(CMTimeMake(1,30),iso:50,completionHandler:{(时间)in
//将文本和滑块设置为正确的级别
self.ISOslider.maximumValue=(self.CaptureDevice?.activeFormat.maxISO)!;
self.ISOslider.minimumValue=(self.CaptureDevice?.activeFormat.minISO)!;
self.SSslider.maximumValue=浮点((self.CaptureDevice?.activeFormat.maxExposureDuration.seconds)!);
self.SSslider.minimumValue=Float((self.CaptureDevice?.activeFormat.minExposureDuration.seconds)!);
self.ISOtextfield.text=device.iso.description;
self.ISOslider.setValue(device.iso,动画:false)
self.SStextfield.text=device.exposureDuration.seconds.description;
self.SSslider.setValue(Float(device.exposureDuration.seconds),动画:false);
自扭滑环最小值=0.01;
self.TorchSlider.maximumValue=1;
self.TorchSlider.value=0.5;
self.Torchtextfield.text=“0.5”;
})
//打开手电筒
if(device.torchMode==AVCaptureTorchMode.on){
device.torchMode=AVCaptureTorchMode.off
}否则{
请尝试使用device.SetTorchModelWithLevel(1.0)
}
device.unlockForConfiguration();
CaptureDevice=设备;
让输入=尝试AVCaptureDeviceInput(设备:CaptureDevice)
if(captureSession.canAddInput(输入)){
captureSession.addInput(输入);
if(captureSession.CanadOutput(DisplaySessionOutput)){
addOutput(DisplaySessionOutput);
previewLayer=AVCaptureVideoPreviewLayer(会话:captureSession);
previewLayer.videoGravity=AVLayerVideoGravityResizeAspectFill;
previewLayer.connection.videoOrientation=AVCaptureVideoOrientation.Grait;
CameraView.layer.addSublayer(预览层);
}
}
}
抓住{
打印(“例外!”);
}
}
}
CameraView.transform=CGAffineTransform.init(scaleX:-1,y:-1);
captureSession.startRunning()
}
//加载视图后,通常从nib执行任何其他设置。
重写func viewdilayoutsubviews(){
previewLayer.frame=CameraView.bounds
}
重写函数didReceiveMemoryWarning(){
超级。我收到了记忆警告()
//处置所有可以重新创建的资源。
}
@iAction func RecordButton已按下(\发送方:任意){
var recordingDelegate:AVCaptureFileOutputRecordingDelegate?=self
var videoFileOutput=AVCaptureMovieFileOutput()
self.captureSession.addOutput(videoFileOutput)
让documentsURL=FileManager.default.URL(对于:.documentDirectory,在:.userDomainMask中)[0]
让filePath=documentsURL.appendingPathComponent(“临时”)
videoFileOutput.startRecording(toOutputFileURL:filePath,recordingDelegate:recordingDelegate)
RecordButton.setTitle(“停止”,用于:。正常);
}
@iAction func ISOvaluechanged(\发送方:任意){
SetVideoSettings(isolevel:ISOslider.value,exposurelevel:AVCaptureExposureDurationCurrent,TorchLevel:CurrentTorchLevel)
}
@iAction func SSvaluechanged(\发送方:任意){
让时间=CMTimeMake(Int64(self.SSslider.value*1000000),1000000);
SetVideoSettings(isolevel:AVCaptureISOCurrent,曝光级别:时间,火炬级别:CurrentTorchLevel)
}
@iAction func ISOtextchanged(u发件人:任意){
}
@iAction func SStextchanged(uu发件人:任意){
//让时间=CMTimeMake(Int64(曝光级别*100000),100000);
}
@iAction func ChooseButtonPressed(uu发送方:任意){
}
func ShowAlert(AlertMessage:String){
让alertController=UIAlertController(标题:“警报”,消息:AlertMessage,首选样式:。警报)
self.present(alertController,动画:true,完成:nil)
让OKAction=UIAlertAction(标题:“OK”,
   class ViewController:UIViewController {
      //your methods as usual but remove the final two methods and add them to the extension that follows. Those methods are what will make you conform to AVCaptureFileOutputRecordingDelegate
   }

  extension ViewController: AVCaptureFileOutputRecordingDelegate {
    func capture(_ captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAt fileURL: URL!, fromConnections connections: [Any]!) {

}

func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) {

}
  }
    class ViewController:UIViewController, AVCaptureFileOutputRecordingDelegate {
  //your methods as usual but you keep your final two methods this time 

    func capture(_ captureOutput: AVCaptureFileOutput!, didStartRecordingToOutputFileAt fileURL: URL!, fromConnections connections: [Any]!) {

}

func capture(_ captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAt outputFileURL: URL!, fromConnections connections: [Any]!, error: Error!) {

}
    }