Warning: file_get_contents(/data/phpspider/zhask/data//catemap/8/swift/16.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Ios 使用AVCaptureSession录制视频,向其中添加CIFilter并将其保存到相册中_Ios_Swift_Video_Swift2_Video Capture - Fatal编程技术网

Ios 使用AVCaptureSession录制视频,向其中添加CIFilter并将其保存到相册中

Ios 使用AVCaptureSession录制视频,向其中添加CIFilter并将其保存到相册中,ios,swift,video,swift2,video-capture,Ios,Swift,Video,Swift2,Video Capture,我想在我的应用程序中定制录像机。 现在我可以录制视频并保存它,但我想在视频录制时添加过滤器,并使用新的过滤器将视频保存到相册中。这是我录制视频并保存它的代码 let captureSession = AVCaptureSession() let fileOutput = AVCaptureMovieFileOutput() func initVideoRecording() { do { try AVAudioSession.sharedInstance().set

我想在我的应用程序中定制录像机。 现在我可以录制视频并保存它,但我想在视频录制时添加过滤器,并使用新的过滤器将视频保存到相册中。这是我录制视频并保存它的代码

let captureSession = AVCaptureSession()
let fileOutput = AVCaptureMovieFileOutput()

func initVideoRecording() {



   do {
        try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryRecord)
        try AVAudioSession.sharedInstance().setActive(true)
    }catch {
        print("error in audio")
    }

    let session = AVCaptureSession()

    session.beginConfiguration()

    session.sessionPreset = AVCaptureSessionPresetMedium

    let videoLayer = AVCaptureVideoPreviewLayer(session: session)
    videoLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
    videoLayer.frame = myImage.bounds
    myImage.layer.addSublayer(videoLayer)

    let backCamera = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
    let audio = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio)
    do
    {
        let input = try AVCaptureDeviceInput(device: backCamera)
        let audioInput = try AVCaptureDeviceInput(device: audio)

        session.addInput(input)
        session.addInput(audioInput)

    }
    catch
    {
        print("can't access camera")
        return
    }

    session.addOutput(fileOutput)

    session.commitConfiguration()

    session.startRunning()

}

@IBAction func recordFunc() {
        if fileOutput.recording {
            myButton.setTitle("record", forState: .Normal)
            fileOutput.stopRecording()
        }else{
            let fileUrl = NSURL(fileURLWithPath: NSTemporaryDirectory()).URLByAppendingPathComponent("\(getCurrentDate())-capturedvideo.mp4")
        fileOutput.startRecordingToOutputFileURL(fileUrl, recordingDelegate: self)

        myButton.setTitle("stop", forState: .Normal)

    }
}

func captureOutput(captureOutput: AVCaptureFileOutput!, didFinishRecordingToOutputFileAtURL outputFileURL: NSURL!, fromConnections connections: [AnyObject]!, error: NSError!) {

//to save record video to photos album

    UISaveVideoAtPathToSavedPhotosAlbum(outputFileURL.path!, self, "video:didFinishSavingWithError:contextInfo:", nil)


}
我尝试使用
AVCaptureVideoDataOutput

在它的委托中,我使用这个代码

func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {


            connection.videoOrientation = AVCaptureVideoOrientation.Portrait
            let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)
            let cameraImage = CIImage(CVPixelBuffer: pixelBuffer!)

            let comicEffect = CIFilter(name: "CIComicEffect")

            comicEffect!.setValue(cameraImage, forKey: kCIInputImageKey)

            let filteredImage = UIImage(CIImage: comicEffect!.valueForKey(kCIOutputImageKey) as! CIImage!)


            dispatch_async(dispatch_get_main_queue())
            {
                self.myImage.image = filteredImage

            }

        }
使用此代码,它只显示过滤器,而不记录它

=========================/这是我问题的解决方案\====================== 请不要让此代码使用swift 2和Xcode 7.3

let captureSession = AVCaptureSession()
    let videoOutput = AVCaptureVideoDataOutput()
    let audioOutput = AVCaptureAudioDataOutput()

    var adapter:AVAssetWriterInputPixelBufferAdaptor!
    var record = false
    var videoWriter:AVAssetWriter!
    var writerInput:AVAssetWriterInput!
    var audioWriterInput:AVAssetWriterInput!
    var lastPath = ""
    var starTime = kCMTimeZero

    var outputSize = CGSizeMake(UIScreen.mainScreen().bounds.width, UIScreen.mainScreen().bounds.height)

override func viewDidAppear(animated: Bool) {
        super.viewDidAppear(animated)

        video()
    }

    func video() {

        do {
            try AVAudioSession.sharedInstance().setCategory(AVAudioSessionCategoryRecord)
            try AVAudioSession.sharedInstance().setActive(true)
        }catch {
            print("error in audio")
        }

        captureSession.beginConfiguration()

        captureSession.sessionPreset = AVCaptureSessionPresetMedium

        let videoLayer = AVCaptureVideoPreviewLayer(session: captureSession)
        videoLayer.videoGravity = AVLayerVideoGravityResizeAspectFill
        //videoLayer.frame = myImage.bounds
        //myImage.layer.addSublayer(videoLayer)

        view.layer.addSublayer(videoLayer)

        let backCamera = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeVideo)
        let audio = AVCaptureDevice.defaultDeviceWithMediaType(AVMediaTypeAudio)
        do
        {
            let input = try AVCaptureDeviceInput(device: backCamera)
            let audioInput = try AVCaptureDeviceInput(device: audio)

            captureSession.addInput(input)
            captureSession.addInput(audioInput)

        }
        catch
        {
            print("can't access camera")
            return
        }

        let queue = dispatch_queue_create("sample buffer delegate", DISPATCH_QUEUE_SERIAL)

        videoOutput.setSampleBufferDelegate(self,queue: queue)
        audioOutput.setSampleBufferDelegate(self, queue: queue)

        captureSession.addOutput(videoOutput)
        captureSession.addOutput(audioOutput)
        captureSession.commitConfiguration()

        captureSession.startRunning()

    }


    @IBAction func recordFunc() {

        if record {
            myButton.setTitle("record", forState: .Normal)
            record = false
            self.writerInput.markAsFinished()
            audioWriterInput.markAsFinished()
            self.videoWriter.finishWritingWithCompletionHandler { () -> Void in
                print("FINISHED!!!!!")
                UISaveVideoAtPathToSavedPhotosAlbum(self.lastPath, self, "video:didFinishSavingWithError:contextInfo:", nil)
            }


        }else{

            let fileUrl = NSURL(fileURLWithPath: NSTemporaryDirectory()).URLByAppendingPathComponent("\(getCurrentDate())-capturedvideo.MP4")

            lastPath = fileUrl.path!
            videoWriter = try? AVAssetWriter(URL: fileUrl, fileType: AVFileTypeMPEG4)



            let outputSettings = [AVVideoCodecKey : AVVideoCodecH264, AVVideoWidthKey : NSNumber(float: Float(outputSize.width)), AVVideoHeightKey : NSNumber(float: Float(outputSize.height))]

            writerInput = AVAssetWriterInput(mediaType: AVMediaTypeVideo, outputSettings: outputSettings)
            writerInput.expectsMediaDataInRealTime = true
            audioWriterInput = AVAssetWriterInput(mediaType: AVMediaTypeAudio, outputSettings: DejalActivityView.getAudioDictionary() as? [String:AnyObject])

            videoWriter.addInput(writerInput)
            videoWriter.addInput(audioWriterInput)

            adapter = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: writerInput, sourcePixelBufferAttributes: DejalActivityView.getAdapterDictionary() as? [String:AnyObject])









            videoWriter.startWriting()
            videoWriter.startSessionAtSourceTime(starTime)

            record = true
            myButton.setTitle("stop", forState: .Normal)

        }


    }

    func getCurrentDate()->String{
        let format = NSDateFormatter()
        format.dateFormat = "dd-MM-yyyy hh:mm:ss"
        format.locale = NSLocale(localeIdentifier: "en")
        let date = format.stringFromDate(NSDate())
        return date
    }


extension newCustomCameraViewController:AVCaptureVideoDataOutputSampleBufferDelegate,AVCaptureAudioDataOutputSampleBufferDelegate{


    func captureOutput(captureOutput: AVCaptureOutput!, didOutputSampleBuffer sampleBuffer: CMSampleBuffer!, fromConnection connection: AVCaptureConnection!) {
        starTime = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)

        if captureOutput == videoOutput {
            connection.videoOrientation = AVCaptureVideoOrientation.Portrait

            let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer)
            let cameraImage = CIImage(CVPixelBuffer: pixelBuffer!)

            let comicEffect = CIFilter(name: "CIHexagonalPixellate")

            comicEffect!.setValue(cameraImage, forKey: kCIInputImageKey)

            let filteredImage = UIImage(CIImage: comicEffect!.valueForKey(kCIOutputImageKey) as! CIImage!)
            //let filteredImage = UIImage(CIImage: cameraImage)
            if self.record == true{

                dispatch_sync(dispatch_queue_create("sample buffer append", DISPATCH_QUEUE_SERIAL), {
                    if self.record == true{
                        if self.writerInput.readyForMoreMediaData {
                        let bo = self.adapter.appendPixelBuffer(DejalActivityView.pixelBufferFromCGImage(self.convertCIImageToCGImage(comicEffect!.valueForKey(kCIOutputImageKey) as! CIImage!)).takeRetainedValue() as CVPixelBufferRef, withPresentationTime: self.starTime)

                        print("video is \(bo)")
                        }
                    }
                })
            }
            dispatch_async(dispatch_get_main_queue())
            {
                self.myImage.image = filteredImage

            }
        }else if captureOutput == audioOutput{

            if self.record == true{

                let bo = audioWriterInput.appendSampleBuffer(sampleBuffer)
                print("audio is \(bo)")
            }
        }



    }


    func convertCIImageToCGImage(inputImage: CIImage) -> CGImage! {
        let context:CIContext? = CIContext(options: nil)
        if context != nil {
            return context!.createCGImage(inputImage, fromRect: inputImage.extent)
        }
        return nil
    }

    func video(videoPath: NSString, didFinishSavingWithError error: NSError?, contextInfo info: AnyObject) {
        var title = "Success"
        var message = "Video was saved"

        if let saveError = error {
            title = "Error"
            message = "Video failed to save"
        }

        let alert = UIAlertController(title: title, message: message, preferredStyle: .Alert)
        alert.addAction(UIAlertAction(title: "OK", style: UIAlertActionStyle.Cancel, handler: nil))
        presentViewController(alert, animated: true, completion: nil)
    }
DejalActivityView中的这些方法在objective c中,我无法将其转换为Swift,因此如果有人可以转换,请编辑我的代码并将其转换

+ (CVPixelBufferRef )pixelBufferFromCGImage:(CGImageRef)image size:(CGSize)size
{
    NSDictionary *options = [NSDictionary dictionaryWithObjectsAndKeys:
                             [NSNumber numberWithBool:YES], kCVPixelBufferCGImageCompatibilityKey,
                             [NSNumber numberWithBool:YES], kCVPixelBufferCGBitmapContextCompatibilityKey, nil];
    CVPixelBufferRef pxbuffer = NULL;
    CVReturn status = CVPixelBufferCreate(kCFAllocatorDefault, size.width, size.height, kCVPixelFormatType_32ARGB, (__bridge CFDictionaryRef) options, &pxbuffer);
    // CVReturn status = CVPixelBufferPoolCreatePixelBuffer(NULL, adaptor.pixelBufferPool, &pxbuffer);

    NSParameterAssert(status == kCVReturnSuccess && pxbuffer != NULL);

    CVPixelBufferLockBaseAddress(pxbuffer, 0);
    void *pxdata = CVPixelBufferGetBaseAddress(pxbuffer);
    NSParameterAssert(pxdata != NULL);

    CGColorSpaceRef rgbColorSpace = CGColorSpaceCreateDeviceRGB();
    CGContextRef context = CGBitmapContextCreate(pxdata, size.width, size.height, 8, 4*size.width, rgbColorSpace, kCGImageAlphaPremultipliedFirst);
    NSParameterAssert(context);

    CGContextDrawImage(context, CGRectMake(0, 0, CGImageGetWidth(image), CGImageGetHeight(image)), image);

    CGColorSpaceRelease(rgbColorSpace);
    CGContextRelease(context);

    CVPixelBufferUnlockBaseAddress(pxbuffer, 0);

    return pxbuffer;
}

+(NSDictionary *)getAdapterDictionary{


    NSDictionary *sourcePixelBufferAttributesDictionary = [NSDictionary dictionaryWithObjectsAndKeys:
                                                           [NSNumber numberWithInt:kCVPixelFormatType_32ARGB], kCVPixelBufferPixelFormatTypeKey, nil];

    return sourcePixelBufferAttributesDictionary;
}

+(NSDictionary *) getAudioDictionary{
    AudioChannelLayout acl;
    bzero( &acl, sizeof(acl));
    acl.mChannelLayoutTag = kAudioChannelLayoutTag_Mono;


    NSDictionary* audioOutputSettings = nil;
    audioOutputSettings = [ NSDictionary dictionaryWithObjectsAndKeys:
                           [ NSNumber numberWithInt: kAudioFormatMPEG4AAC ], AVFormatIDKey,
                           //[ NSNumber numberWithInt: 16 ], AVEncoderBitDepthHintKey,
                           [ NSNumber numberWithFloat: 44100.0 ], AVSampleRateKey,
                           [ NSNumber numberWithInt: 1 ], AVNumberOfChannelsKey,
                           [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
                           nil ];
//    NSDictionary* audioOutputSettings = nil;
//        audioOutputSettings = [ NSDictionary dictionaryWithObjectsAndKeys:
//                               [ NSNumber numberWithInt: kAudioFormatMPEG4AAC_HE_V2 ], AVFormatIDKey,
//                               [ NSNumber numberWithFloat: 44100.0], AVSampleRateKey,
//                               [ NSData dataWithBytes: &acl length: sizeof( acl ) ], AVChannelLayoutKey,
//                               nil ];

    return audioOutputSettings;
}

您需要添加一个
AVAssetWriter

var videoRecorder: AVAssetWriter?
然后在代理回调中:

let timeStamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)

if videoRecorder?.status == .Unknown {
    startRecordingTime = timeStamp
    videoRecorder?.startWriting()
    videoRecorder?.startSessionAtSourceTime(timeStamp)
}
您需要为希望进行的每个录制配置记录器,还需要向记录器添加输入

您可能会开始遇到问题,因为您似乎还没有任何您需要的队列设置,但是作为参考,这个Github是一个非常好的资源

编辑:其他信息


您需要初始化()编写器,然后为视频/音频添加输入。

谢谢您的回答。但是我不明白我怎么能在我的代码中使用AVAssetWriter这并不简单,但你显然已经掌握了你目前掌握的代码,因此你应该能够从链接中完成其余的工作。你的回答对我很有帮助。我在google和stack overflow上做了一些搜索,但是我不理解这个链接中的任何内容,但是你的答案非常有用,所以我会将其标记为接受,并且我会发布在主要问题中找到的完整解决方案,如果我使用这种方法,视频保存的时间是小时。