NSProgressIndicator未在后台线程中更新-Swift 3
我已经建立了一个渲染器来处理图像,我想展示一下进度。虽然我似乎不知道如何更新用户界面。渲染完成后,ui将更新 这是我的设置,简化了:NSProgressIndicator未在后台线程中更新-Swift 3,swift,multithreading,Swift,Multithreading,我已经建立了一个渲染器来处理图像,我想展示一下进度。虽然我似乎不知道如何更新用户界面。渲染完成后,ui将更新 这是我的设置,简化了: DispatchQueue.global(qos: .background).async(execute: { for i in 0..<count { // CIImage manipulation and definition of progress DispatchQueue.main.async(execute
DispatchQueue.global(qos: .background).async(execute: {
for i in 0..<count {
// CIImage manipulation and definition of progress
DispatchQueue.main.async(execute: {
self.progress.doubleValue = progress
})
}
})
DispatchQueue.global(qos:.background).异步(执行:{
对于0中的i,请显示更多代码。您是否为进度指标设置了minValue
和maxValue
?进度的值是否正确地落在这些最小值和最大值之间?例如,macOS中的进度指标范围为0到100(而不是0.0和1.0,如果您来自iOS世界,您可能会想到这一点)。查看是否通过添加断点或通过添加打印语句调用代码。在for循环完成后,将调用代码get并更新ui。我将添加更多代码。所有打印(进度)直到结束时才会打印。。。
@IBAction func render_now(_ sender: Any) {
self.render_button.isEnabled = false
self.duration = self.player_x1?.currentItem?.asset.duration.seconds
let videoATrack: AVAssetTrack = (self.player_x1?.currentItem?.asset.tracks(withMediaType: AVMediaTypeVideo).last)!
self.fps = CMTimeScale(videoATrack.nominalFrameRate)
self.frame_count = Int(self.duration! * Double(self.fps!))
let asset_generator = AVAssetImageGenerator(asset: (self.player_x1?.currentItem?.asset)!)
var time_zero = kCMTimeZero
var asset_image_arr: [CGImage] = []
do {
for i in 0..<self.frame_count! {
let time = Double(i) / Double(self.fps!)
let asset_image = try asset_generator.copyCGImage(at: CMTime(seconds: time, preferredTimescale: self.fps!), actualTime: &time_zero)
asset_image_arr.append(asset_image)
}
} catch {print("err")}
let image_size = CGSize(width: asset_image_arr[0].width, height: asset_image_arr[0].height)
var ci_image_arr: [CIImage] = []
for i in 0..<self.frame_count! {
ci_image_arr.append(CIImage(cgImage: asset_image_arr[i], options: nil))
}
let x_factor = Int(NSDecimalNumber(decimal: pow(2, self.radio_index! + 1)))
let filter_brightness = CIFilter(name: "CIToneCurve")
let brightness = 1.0 / CGFloat(x_factor)
filter_brightness?.setValue(CIVector(x:0.0, y: 0.0), forKey: "inputPoint0")
filter_brightness?.setValue(CIVector(x:0.25, y: 0.25 * brightness), forKey: "inputPoint1")
filter_brightness?.setValue(CIVector(x:0.50, y: 0.5 * brightness), forKey: "inputPoint2")
filter_brightness?.setValue(CIVector(x:0.75, y: 0.75 * brightness), forKey: "inputPoint3")
filter_brightness?.setValue(CIVector(x:1.0, y: 1.0 * brightness), forKey: "inputPoint4")
let filter_add = CIFilter(name: "CIAdditionCompositing")
var images: [NSImage] = []
DispatchQueue.global(qos: .background).async(execute: {
for i in 0..<self.frame_count! / x_factor {
filter_brightness?.setValue(ci_image_arr[i * x_factor], forKey: kCIInputImageKey)
var ci_image: CIImage = (filter_brightness?.outputImage)!
for x in 1..<x_factor {
filter_brightness?.setValue(ci_image_arr[i * x_factor + x], forKey: kCIInputImageKey)
let ci_image_add: CIImage = (filter_brightness?.outputImage)!
filter_add?.setValue(ci_image, forKey: kCIInputImageKey)
filter_add?.setValue(ci_image_add, forKey: kCIInputBackgroundImageKey)
ci_image = (filter_add?.outputImage)!
}
let rep: NSCIImageRep = NSCIImageRep(ciImage: ci_image)
let nsImage: NSImage = NSImage(size: rep.size)
nsImage.addRepresentation(rep)
images.append(nsImage)
let progress = Double(i) / Double(self.frame_count! / x_factor - 1) * 100.0
DispatchQueue.main.async(execute: {
print(progress)
self.progress.doubleValue = progress
})
}
})
let desktopURL = FileManager.default.urls(for: .desktopDirectory, in: .userDomainMask).first!
let movie_out = MovieOut()
movie_out.writeImagesAsMovie(images, path: self.movie_out_x_urls[self.radio_index! - 1].path, size: image_size, fps: self.fps!) { () -> () in
let player_x = AVPlayer(url: self.movie_out_x_urls[self.radio_index! - 1])
self.player_view.player = player_x
self.view.addSubview(self.player_view)
self.save_button.isEnabled = true
}
}