Warning: file_get_contents(/data/phpspider/zhask/data//catemap/8/swift/19.json): failed to open stream: No such file or directory in /data/phpspider/zhask/libs/function.php on line 167

Warning: Invalid argument supplied for foreach() in /data/phpspider/zhask/libs/tag.function.php on line 1116

Notice: Undefined index: in /data/phpspider/zhask/libs/function.php on line 180

Warning: array_chunk() expects parameter 1 to be array, null given in /data/phpspider/zhask/libs/function.php on line 181
Ios 拍照时的快速内存管理问题_Ios_Swift_Memory - Fatal编程技术网

Ios 拍照时的快速内存管理问题

Ios 拍照时的快速内存管理问题,ios,swift,memory,Ios,Swift,Memory,我正在制作一个iOS应用程序,可以拍照、制作回飞棒视频和制作GIF。对于回飞棒,我要做的是拍16张照片,然后把它们放在一个阵列上,然后再按相反的顺序添加它们,以显示回飞棒效果,总共有31张照片,因为我不重复最后一张。我使用以下代码将此阵列转换为视频: func writeImagesAsMovie(_ allImages: [UIImage], videoPath: String, videoSize: CGSize, videoFPS: Int32, completion: @escaping

我正在制作一个iOS应用程序,可以拍照、制作回飞棒视频和制作GIF。对于回飞棒,我要做的是拍16张照片,然后把它们放在一个阵列上,然后再按相反的顺序添加它们,以显示回飞棒效果,总共有31张照片,因为我不重复最后一张。我使用以下代码将此阵列转换为视频:

func writeImagesAsMovie(_ allImages: [UIImage], videoPath: String, videoSize: CGSize, videoFPS: Int32, completion: @escaping (Bool) -> ()) -> Bool{

    guard let assetWriter = try? AVAssetWriter(outputURL: URL(string: videoPath)!, fileType: AVFileType.mp4) else {
        fatalError("AVVideoCodecType.h264 error")
    }
    let outputSettings = [AVVideoCodecKey : AVVideoCodecType.h264, AVVideoWidthKey : NSNumber(value: Float(videoSize.width)), AVVideoHeightKey : NSNumber(value: Float(videoSize.height))] as [String : Any]
    guard assetWriter.canApply(outputSettings: outputSettings, forMediaType: AVMediaType.video) else {
        fatalError("Negative : Can't apply the Output settings...")
    }
    let writerInput = AVAssetWriterInput(mediaType: AVMediaType.video, outputSettings: outputSettings)


    let sourcePixelBufferAttributesDictionary = [kCVPixelBufferPixelFormatTypeKey as String : NSNumber(value: kCVPixelFormatType_32ARGB), kCVPixelBufferWidthKey as String: NSNumber(value: Float(videoSize.width)), kCVPixelBufferHeightKey as String: NSNumber(value: Float(videoSize.height))]
    let pixelBufferAdaptor = AVAssetWriterInputPixelBufferAdaptor(assetWriterInput: writerInput, sourcePixelBufferAttributes: sourcePixelBufferAttributesDictionary)
    if assetWriter.canAdd(writerInput) {
        assetWriter.add(writerInput)
    }
    // Start writing session
    if assetWriter.startWriting() {
        assetWriter.startSession(atSourceTime: CMTime.zero)                       
        // -- Create queue for <requestMediaDataWhenReadyOnQueue>
        assert(pixelBufferAdaptor.pixelBufferPool != nil)
        let mediaQueue = DispatchQueue(label: "mediaInputQueue", attributes: [])

        // -- Set video parameters
        let frameDuration = CMTimeMake(value: 1, timescale: videoFPS)
        var frameCount = 0

        // -- Add images to video
        let numImages = allImages.count
        writerInput.requestMediaDataWhenReady(on: mediaQueue, using: { [unowned self]() -> Void in
            // Append unadded images to video but only while input ready
            while (writerInput.isReadyForMoreMediaData && frameCount < numImages) {
                let lastFrameTime = CMTimeMake(value: Int64(frameCount), timescale: videoFPS)
                let presentationTime = frameCount == 0 ? lastFrameTime : CMTimeAdd(lastFrameTime, frameDuration)

                if !self.appendPixelBufferForImageAtURL(allImages[frameCount], pixelBufferAdaptor: pixelBufferAdaptor, presentationTime: presentationTime) {
                    print("Error converting images to video: AVAssetWriterInputPixelBufferAdapter failed to append pixel buffer")
                    return
                }

                frameCount += 1
            }

            // No more images to add? End video.
            if (frameCount >= numImages) {
                writerInput.markAsFinished()
                assetWriter.finishWriting {
                    if (assetWriter.error != nil) {
                        print("Error converting images to video: \(String(describing: assetWriter.error))")
                    } else {
                        print("Converted images to movie @ \(videoPath)")
                        completion(true)
                    }
                }
            }
        })
    }

    return true
}


func createAssetWriter(_ path: String, size: CGSize) -> AVAssetWriter? {
    // Convert <path> to NSURL object
    let pathURL = URL(fileURLWithPath: path)

    // Return new asset writer or nil
    do {
        // Create asset writer
        let newWriter = try AVAssetWriter(outputURL: pathURL, fileType: AVFileType.mp4)

        // Define settings for video input
        let videoSettings: [String : AnyObject] = [
            AVVideoCodecKey  : AVVideoCodecType.h264 as AnyObject,
            AVVideoWidthKey  : size.width as AnyObject,
            AVVideoHeightKey : size.height as AnyObject,
        ]

        // Add video input to writer
        let assetWriterVideoInput = AVAssetWriterInput(mediaType: AVMediaType.video, outputSettings: videoSettings)
        newWriter.add(assetWriterVideoInput)

        // Return writer
        print("Created asset writer for \(size.width)x\(size.height) video")
        return newWriter
    } catch {
        print("Error creating asset writer: \(error)")
        return nil
    }
}


func appendPixelBufferForImageAtURL(_ image: UIImage, pixelBufferAdaptor: AVAssetWriterInputPixelBufferAdaptor, presentationTime: CMTime) -> Bool {
    var appendSucceeded = false

    autoreleasepool {
        if  let pixelBufferPool = pixelBufferAdaptor.pixelBufferPool {
            let pixelBufferPointer = UnsafeMutablePointer<CVPixelBuffer?>.allocate(capacity:1)
            let status: CVReturn = CVPixelBufferPoolCreatePixelBuffer(
                kCFAllocatorDefault,
                pixelBufferPool,
                pixelBufferPointer
            )

            if let pixelBuffer = pixelBufferPointer.pointee , status == 0 {
                fillPixelBufferFromImage(image, pixelBuffer: pixelBuffer)
                appendSucceeded = pixelBufferAdaptor.append(pixelBuffer, withPresentationTime: presentationTime)                    
                pixelBufferPointer.deinitialize(count: 1)
            } else {
                NSLog("Error: Failed to allocate pixel buffer from pool")
            }
            //capacity: 1
            pixelBufferPointer.deallocate()
        }
    }

    return appendSucceeded
}


func fillPixelBufferFromImage(_ image: UIImage, pixelBuffer: CVPixelBuffer) {
    CVPixelBufferLockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0)))

    let pixelData = CVPixelBufferGetBaseAddress(pixelBuffer)
    let rgbColorSpace = CGColorSpaceCreateDeviceRGB()

    // Create CGBitmapContext
    let context = CGContext(
        data: pixelData,
        width: Int(image.size.width),
        height: Int(image.size.height),
        bitsPerComponent: 8,
        bytesPerRow: CVPixelBufferGetBytesPerRow(pixelBuffer),
        space: rgbColorSpace,
        bitmapInfo: CGImageAlphaInfo.premultipliedFirst.rawValue
        )!

    // Draw image into context
    let drawCGRect = CGRect(x:0, y:0, width:image.size.width, height:image.size.height)
    var drawRect = NSCoder.string(for: drawCGRect);
    let ciImage = CIImage(image: image)
    let cgImage = convertCIImageToCGImage(inputImage: ciImage!)
    context.draw(cgImage!, in: CGRect(x: 0.0,y: 0.0,width: image.size.width,height: image.size.height))

    CVPixelBufferUnlockBaseAddress(pixelBuffer, CVPixelBufferLockFlags(rawValue: CVOptionFlags(0)))
}

func convertCIImageToCGImage(inputImage: CIImage) -> CGImage! {
    let context = CIContext(options: nil)
    if context != nil {
        return context.createCGImage(inputImage, from: inputImage.extent)
    }
    return nil
}
我不知道还有什么好尝试的,谢谢你的帮助

编辑

更仔细地检查仪器后,我意识到该应用程序将我拍摄的所有照片都保存在内存中。我将在处理完图像后立即删除对它们的所有引用。所以我假设AVCapturePhotoOutput将它们保存在内存中,并且从不释放它。有没有办法把它从记忆中清除

调用以生成回飞镖并使用writeImagesAsMovie函数的函数:

func tirarBoomerang(imageData: Data){
    UIGraphicsBeginImageContextWithOptions(previewView.frame.size, false, 0.0)
    previewView.superview!.layer.render(in: UIGraphicsGetCurrentContext()!)
    let foto = UIImage(data: imageData)!        
    let cropedPhoto = cropToBounds(image: foto, width: Double(moldura.larguraFoto), height: Double(moldura.alturaFoto))
    let resultImage =  blendImages(cropedPhoto, molduraImagem!)
    fotosBoomerangArray.append(resultImage!)        
    numFotosLabel.text = "\(fotosBoomerangArray.count)/16"
    if fotosBoomerangArray.count == 16 {
        for (index, fotoBoomerang) in fotosBoomerangArray.enumerated().reversed() {
            if index == 15{
                continue
            }
            autoreleasepool {
                fotosBoomerangArray.append(fotoBoomerang)
            }
        }

        SVProgressHUD.show(withStatus: "Aguarde")
        SVProgressHUD.setDefaultStyle(.dark)
        let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first!
        let fileURL = documentsURL.appendingPathComponent("\(public_id!.description).mp4")

        let sizeVideo = CGSize(width: moldura.larguraMoldura, height: moldura.alturaMoldura)

        imageToVideo.writeImagesAsMovie(fotosBoomerangArray, videoPath: fileURL.absoluteString, videoSize: sizeVideo, videoFPS: 30) { [weak self](success) in
            DispatchQueue.main.async {
                SVProgressHUD.dismiss()
                self!.irParaPreview()
            }
        }

    }
    else {
        fotoNumero += 1
        let settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg])
        self.stillImageOutput!.capturePhoto(with: settings, delegate: self)
    }
}

我确实用过它,但正如我所说的,不清楚问题出在哪里。没有内存警告或内存泄漏。我更新了更多信息,你是对的,当我更仔细地查看仪器时,我意识到了问题。好的,首先,32 8MB的图像阵列是错误的。我会说:当你收集你的图像数据时,立即将其写入磁盘,这样你的内存中就不会同时有超过一个图像的数据。您只需要一个包含32个文件名的数组。现在执行例行程序,当您从磁盘加载每个图像时,防止缓存图像数据(使用ImageIO加载技术来实现这一点)。此外,像素缓冲池可能存在内存管理问题。
func tirarBoomerang(imageData: Data){
    UIGraphicsBeginImageContextWithOptions(previewView.frame.size, false, 0.0)
    previewView.superview!.layer.render(in: UIGraphicsGetCurrentContext()!)
    let foto = UIImage(data: imageData)!        
    let cropedPhoto = cropToBounds(image: foto, width: Double(moldura.larguraFoto), height: Double(moldura.alturaFoto))
    let resultImage =  blendImages(cropedPhoto, molduraImagem!)
    fotosBoomerangArray.append(resultImage!)        
    numFotosLabel.text = "\(fotosBoomerangArray.count)/16"
    if fotosBoomerangArray.count == 16 {
        for (index, fotoBoomerang) in fotosBoomerangArray.enumerated().reversed() {
            if index == 15{
                continue
            }
            autoreleasepool {
                fotosBoomerangArray.append(fotoBoomerang)
            }
        }

        SVProgressHUD.show(withStatus: "Aguarde")
        SVProgressHUD.setDefaultStyle(.dark)
        let documentsURL = FileManager.default.urls(for: .documentDirectory, in: .userDomainMask).first!
        let fileURL = documentsURL.appendingPathComponent("\(public_id!.description).mp4")

        let sizeVideo = CGSize(width: moldura.larguraMoldura, height: moldura.alturaMoldura)

        imageToVideo.writeImagesAsMovie(fotosBoomerangArray, videoPath: fileURL.absoluteString, videoSize: sizeVideo, videoFPS: 30) { [weak self](success) in
            DispatchQueue.main.async {
                SVProgressHUD.dismiss()
                self!.irParaPreview()
            }
        }

    }
    else {
        fotoNumero += 1
        let settings = AVCapturePhotoSettings(format: [AVVideoCodecKey: AVVideoCodecType.jpeg])
        self.stillImageOutput!.capturePhoto(with: settings, delegate: self)
    }
}