ARKit:在 Xcode 中跟踪头部上下运动
ARKit: Tracking Head Up and Down Movement in Xcode
我实现了以下代码,可以跟踪一个人的不同面部表情。但是,使用此代码,我无法在 Xcode 中使用 ARKit 跟踪人的头部上下运动。如果有人能告诉我如何使用 Xcode!
通过 ARKit 跟踪人的头部上下运动,我将不胜感激
import UIKit
import SceneKit
import ARKit
class ViewController: UIViewController, ARSCNViewDelegate {
@IBOutlet var sceneView: ARSCNView!
@IBOutlet weak var faceLabel: UILabel!
@IBOutlet weak var labelView: UIView!
var analysis = ""
override func viewDidLoad() {
super.viewDidLoad()
labelView.layer.cornerRadius = 10
sceneView.delegate = self
sceneView.showsStatistics = true
guard ARFaceTrackingConfiguration.isSupported else {
fatalError("Face tracking is not supported on this device")
}
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
// Create a session configuration
let configuration = ARFaceTrackingConfiguration()
// Run the view's session
sceneView.session.run(configuration)
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
// Pause the view's session
sceneView.session.pause()
}
// MARK: - ARSCNViewDelegate
func renderer(_ renderer: SCNSceneRenderer, nodeFor anchor: ARAnchor) -> SCNNode? {
let faceMesh = ARSCNFaceGeometry(device: sceneView.device!)
let node = SCNNode(geometry: faceMesh)
node.geometry?.firstMaterial?.fillMode = .lines
return node
}
func renderer(_ renderer: SCNSceneRenderer, didUpdate node: SCNNode, for anchor: ARAnchor) {
if let faceAnchor = anchor as? ARFaceAnchor, let faceGeometry = node.geometry as? ARSCNFaceGeometry {
faceGeometry.update(from: faceAnchor.geometry)
expression(anchor: faceAnchor)
DispatchQueue.main.async {
self.faceLabel.text = self.analysis
}
}
}
func expression(anchor: ARFaceAnchor) {
let smileLeft = anchor.blendShapes[.mouthSmileLeft]
let smileRight = anchor.blendShapes[.mouthSmileRight]
let cheekPuff = anchor.blendShapes[.cheekPuff]
let tongue = anchor.blendShapes[.tongueOut]
self.analysis = ""
if ((smileLeft?.decimalValue ?? 0.0) + (smileRight?.decimalValue ?? 0.0)) > 0.9 {
self.analysis += "You are smiling. "
}
if cheekPuff?.decimalValue ?? 0.0 > 0.1 {
self.analysis += "Your cheeks are puffed. "
}
if tongue?.decimalValue ?? 0.0 > 0.1 {
self.analysis += "Don't stick your tongue out! "
}
}
}
Instance 属性 blendShapes
是一个包含 52 个表示面部表情的系数的字典。点头不是必须检测的面部表情,所以实现点头最快的方法是控制ARFaceAnchor
方向或节点的方向。
func renderer(_ renderer: SCNSceneRenderer,
didUpdate node: SCNNode,
for anchor: ARAnchor) {
if let faceAnchor = anchor as? ARFaceAnchor,
let faceGeo = node.geometry as? ARSCNFaceGeometry {
if faceAnchor.lookAtPoint.y <= 0 {
print("A head is...")
}
if node.orientation.x >= Float.pi/32 {
print("A head is...")
}
}
}
我实现了以下代码,可以跟踪一个人的不同面部表情。但是,使用此代码,我无法在 Xcode 中使用 ARKit 跟踪人的头部上下运动。如果有人能告诉我如何使用 Xcode!
通过 ARKit 跟踪人的头部上下运动,我将不胜感激import UIKit
import SceneKit
import ARKit
class ViewController: UIViewController, ARSCNViewDelegate {
@IBOutlet var sceneView: ARSCNView!
@IBOutlet weak var faceLabel: UILabel!
@IBOutlet weak var labelView: UIView!
var analysis = ""
override func viewDidLoad() {
super.viewDidLoad()
labelView.layer.cornerRadius = 10
sceneView.delegate = self
sceneView.showsStatistics = true
guard ARFaceTrackingConfiguration.isSupported else {
fatalError("Face tracking is not supported on this device")
}
}
override func viewWillAppear(_ animated: Bool) {
super.viewWillAppear(animated)
// Create a session configuration
let configuration = ARFaceTrackingConfiguration()
// Run the view's session
sceneView.session.run(configuration)
}
override func viewWillDisappear(_ animated: Bool) {
super.viewWillDisappear(animated)
// Pause the view's session
sceneView.session.pause()
}
// MARK: - ARSCNViewDelegate
func renderer(_ renderer: SCNSceneRenderer, nodeFor anchor: ARAnchor) -> SCNNode? {
let faceMesh = ARSCNFaceGeometry(device: sceneView.device!)
let node = SCNNode(geometry: faceMesh)
node.geometry?.firstMaterial?.fillMode = .lines
return node
}
func renderer(_ renderer: SCNSceneRenderer, didUpdate node: SCNNode, for anchor: ARAnchor) {
if let faceAnchor = anchor as? ARFaceAnchor, let faceGeometry = node.geometry as? ARSCNFaceGeometry {
faceGeometry.update(from: faceAnchor.geometry)
expression(anchor: faceAnchor)
DispatchQueue.main.async {
self.faceLabel.text = self.analysis
}
}
}
func expression(anchor: ARFaceAnchor) {
let smileLeft = anchor.blendShapes[.mouthSmileLeft]
let smileRight = anchor.blendShapes[.mouthSmileRight]
let cheekPuff = anchor.blendShapes[.cheekPuff]
let tongue = anchor.blendShapes[.tongueOut]
self.analysis = ""
if ((smileLeft?.decimalValue ?? 0.0) + (smileRight?.decimalValue ?? 0.0)) > 0.9 {
self.analysis += "You are smiling. "
}
if cheekPuff?.decimalValue ?? 0.0 > 0.1 {
self.analysis += "Your cheeks are puffed. "
}
if tongue?.decimalValue ?? 0.0 > 0.1 {
self.analysis += "Don't stick your tongue out! "
}
}
}
Instance 属性 blendShapes
是一个包含 52 个表示面部表情的系数的字典。点头不是必须检测的面部表情,所以实现点头最快的方法是控制ARFaceAnchor
方向或节点的方向。
func renderer(_ renderer: SCNSceneRenderer,
didUpdate node: SCNNode,
for anchor: ARAnchor) {
if let faceAnchor = anchor as? ARFaceAnchor,
let faceGeo = node.geometry as? ARSCNFaceGeometry {
if faceAnchor.lookAtPoint.y <= 0 {
print("A head is...")
}
if node.orientation.x >= Float.pi/32 {
print("A head is...")
}
}
}