Swift 4:从 DispatchQueue.main 访问变量(作用域)
Swift 4: Access Variables from DispatchQueue.main (Scope)
我有一个 CoreML 图像分类任务,它从 iOS 设备的 [video] 摄像头获取 "live stream" 并在后台发生。一旦识别出对象,并发生其他应用程序逻辑,我想用一些数据更新 UI 的标签。
有人可以解释一下 DispatchQueue.main.asyc(execute: { })
的标注是如何访问我一直在使用的变量的吗?我认为这本质上是一个范围界定问题?
我目前使用的代码:
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
processCameraBuffer(sampleBuffer: sampleBuffer)
}
func processCameraBuffer(sampleBuffer: CMSampleBuffer) {
let coreMLModel = Inceptionv3()
if let model = try? VNCoreMLModel(for: coreMLModel.model) {
let request = VNCoreMLRequest(model: model, completionHandler: { (request, error) in
if let results = request.results as? [VNClassificationObservation] {
var counter = 0
var otherVar = 0
for item in results[0...9] {
if item.identifier.contains("something") {
print("some app logic goes on here")
otherVar += 10 - counter
}
counter += 1
}
switch otherVar {
case _ where otherVar >= 10:
DispatchQueue.main.async(execute: {
let displayVarFormatted = String(format: "%.2f", otherVar / 65 * 100)
self.labelPrediction.text = "\(counter): \(displayVarFormatted)%"
})
default:
DispatchQueue.main.async(execute: {
self.labelPrediction.text = "No result!"
})
}
}
})
if let pixelBuffer: CVPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) {
let handler = VNImageRequestHandler(cvPixelBuffer: pixelBuffer, options: [:])
do {
try handler.perform([request])
} catch {
print(error.localizedDescription)
}
}
}
}
导致问题的是 switch 语句中的 self.labelPrediction.text = ""
行。此 var 当前始终为 0。
不是DispatchQueue
的问题。从 processCameraBuffer(sampleBuffer:)
,您的代码在获得结果之前更新了您的 UI。
要解决这个问题,您需要使用escaping closure
。您的函数应如下所示。
func processCameraBuffer(sampleBuffer: CMSampleBuffer, completion: @escaping (Int, String) -> Void) {
// 2.
let request = VNCoreMLRequest(model: model, completionHandler: { (request, error) in
DispatchQueue.main.async(execute: {
// 3.
let displayVarFormatted = String(format: "%.2f", otherVar / 65 * 100)
completion(counter, displayVarFormatted)
})
}
}
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
// 1.
processCameraBuffer(sampleBuffer) { counter, displayVarFormatted in
/*
This Closure will be executed from
completion(counter, displayVarFormatted)
*/
// 4.
self.labelPrediction.text = "\(counter): \(displayVarFormatted)%"
}
}
从这里开始,变量范围不是问题。您需要处理异步任务。
- 捕获发生。
- processCameraBuffer 被调用并
VNCoreMLRequest
被执行。
- 您将通过
completion()
. 获取数据并执行processCameraBuffer
的完成块
- 更新标签。
我有一个 CoreML 图像分类任务,它从 iOS 设备的 [video] 摄像头获取 "live stream" 并在后台发生。一旦识别出对象,并发生其他应用程序逻辑,我想用一些数据更新 UI 的标签。
有人可以解释一下 DispatchQueue.main.asyc(execute: { })
的标注是如何访问我一直在使用的变量的吗?我认为这本质上是一个范围界定问题?
我目前使用的代码:
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
processCameraBuffer(sampleBuffer: sampleBuffer)
}
func processCameraBuffer(sampleBuffer: CMSampleBuffer) {
let coreMLModel = Inceptionv3()
if let model = try? VNCoreMLModel(for: coreMLModel.model) {
let request = VNCoreMLRequest(model: model, completionHandler: { (request, error) in
if let results = request.results as? [VNClassificationObservation] {
var counter = 0
var otherVar = 0
for item in results[0...9] {
if item.identifier.contains("something") {
print("some app logic goes on here")
otherVar += 10 - counter
}
counter += 1
}
switch otherVar {
case _ where otherVar >= 10:
DispatchQueue.main.async(execute: {
let displayVarFormatted = String(format: "%.2f", otherVar / 65 * 100)
self.labelPrediction.text = "\(counter): \(displayVarFormatted)%"
})
default:
DispatchQueue.main.async(execute: {
self.labelPrediction.text = "No result!"
})
}
}
})
if let pixelBuffer: CVPixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) {
let handler = VNImageRequestHandler(cvPixelBuffer: pixelBuffer, options: [:])
do {
try handler.perform([request])
} catch {
print(error.localizedDescription)
}
}
}
}
导致问题的是 switch 语句中的 self.labelPrediction.text = ""
行。此 var 当前始终为 0。
不是DispatchQueue
的问题。从 processCameraBuffer(sampleBuffer:)
,您的代码在获得结果之前更新了您的 UI。
要解决这个问题,您需要使用escaping closure
。您的函数应如下所示。
func processCameraBuffer(sampleBuffer: CMSampleBuffer, completion: @escaping (Int, String) -> Void) {
// 2.
let request = VNCoreMLRequest(model: model, completionHandler: { (request, error) in
DispatchQueue.main.async(execute: {
// 3.
let displayVarFormatted = String(format: "%.2f", otherVar / 65 * 100)
completion(counter, displayVarFormatted)
})
}
}
func captureOutput(_ output: AVCaptureOutput, didOutput sampleBuffer: CMSampleBuffer, from connection: AVCaptureConnection) {
// 1.
processCameraBuffer(sampleBuffer) { counter, displayVarFormatted in
/*
This Closure will be executed from
completion(counter, displayVarFormatted)
*/
// 4.
self.labelPrediction.text = "\(counter): \(displayVarFormatted)%"
}
}
从这里开始,变量范围不是问题。您需要处理异步任务。
- 捕获发生。
- processCameraBuffer 被调用并
VNCoreMLRequest
被执行。 - 您将通过
completion()
. 获取数据并执行 - 更新标签。
processCameraBuffer
的完成块