Skip to content

Commit dce3682

Browse files
author
Cesar Vargas Casaseca
committed
Better format, add debug mode
1 parent f129ff2 commit dce3682

File tree

3 files changed

+43
-17
lines changed

3 files changed

+43
-17
lines changed

Sample/Sample/ViewController.swift

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -59,6 +59,16 @@ class ViewController: UIViewController {
5959
facialExpressionDetectorViewController.didMove(toParent: self)
6060
}
6161

62+
private func changeFacialExpressionMinimumValidCoefficient() {
63+
let changingAnalyzer = FacialExpressionAnalyzer(facialExpression: FacialExpression.mouthSmileLeft, blendShapeLocation: .mouthSmileLeft, minimumValidCoefficient: 0.2)
64+
65+
guard let index = facialExpressionDetectorViewController.analyzers.firstIndex(where: { $0.facialExpression == FacialExpression.mouthSmileLeft }) else {
66+
return
67+
}
68+
69+
facialExpressionDetectorViewController.analyzers[index] = changingAnalyzer
70+
}
71+
6272
private func addMoreFacialExpressionsToBeDetected() {
6373
facialExpressionDetectorViewController.analyzers.append(FacialExpressionAnalyzer(facialExpression: FacialExpression.eyeWideLeft, blendShapeLocation: .eyeWideLeft, minimumValidCoefficient: 0.6))
6474
}

Sources/Wink/FacialExpression.swift

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -31,8 +31,8 @@ extension FacialExpression {
3131
static let test = FacialExpression(rawValue: "")
3232
}
3333

34-
public struct FacialExpressionAnalyzer {
35-
let facialExpression: FacialExpression
34+
public struct FacialExpressionAnalyzer: Equatable {
35+
public let facialExpression: FacialExpression
3636
let blendShapeLocation: ARFaceAnchor.BlendShapeLocation
3737
let minimumValidCoefficient: Decimal
3838

Sources/Wink/FacialExpressionDetectorViewController.swift

Lines changed: 31 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -10,12 +10,16 @@ import UIKit
1010
import ARKit
1111
import Combine
1212

13-
public class FacialExpressionDetectorViewController: UIViewController, ARSCNViewDelegate {
13+
public class FacialExpressionDetectorViewController: UIViewController {
1414
var sceneView: ARSCNView!
1515

16+
/// Array of `FacialExpressionAnalyzer` responsible of detecting the new expressions
1617
public var analyzers = DefaultFacialExpressionAnalyzersProvider().defaultFacialExpressionAnalyzers()
17-
18+
/// When enabled, debug elements are shown in the camera view such as node lines and statistics. Default is `false`
19+
public var debugMode = false
20+
/// This publisher is updated with a new array of `FacialExpression` each time they are retrived
1821
lazy public var facialExpressionPublisher: AnyPublisher<[FacialExpression], Never> = facialExpressionSubject.eraseToAnyPublisher()
22+
1923
private let facialExpressionSubject: PassthroughSubject<[FacialExpression], Never> = PassthroughSubject<[FacialExpression], Never>()
2024

2125
public override func viewDidLoad() {
@@ -29,7 +33,10 @@ public class FacialExpressionDetectorViewController: UIViewController, ARSCNView
2933
adjustSceneViewConstraints()
3034

3135
sceneView.delegate = self
32-
sceneView.showsStatistics = true
36+
37+
if debugMode {
38+
sceneView.showsStatistics = true
39+
}
3340
}
3441

3542
private func checkFaceTrackingSupport() {
@@ -59,19 +66,7 @@ public class FacialExpressionDetectorViewController: UIViewController, ARSCNView
5966
sceneView.session.pause()
6067
}
6168

62-
public func renderer(_ renderer: SCNSceneRenderer, nodeFor anchor: ARAnchor) -> SCNNode? {
63-
let faceMesh = ARSCNFaceGeometry(device: sceneView.device!)
64-
let node = SCNNode(geometry: faceMesh)
65-
node.geometry?.firstMaterial?.fillMode = .lines
66-
return node
67-
}
6869

69-
public func renderer(_ renderer: SCNSceneRenderer, didUpdate node: SCNNode, for anchor: ARAnchor) {
70-
if let faceAnchor = anchor as? ARFaceAnchor, let faceGeometry = node.geometry as? ARSCNFaceGeometry {
71-
faceGeometry.update(from: faceAnchor.geometry)
72-
detectFacialExpression(from: faceAnchor)
73-
}
74-
}
7570

7671
func detectFacialExpression(from anchor: ARFaceAnchor) {
7772
let facialExpressions: [FacialExpression] = analyzers.compactMap {
@@ -83,3 +78,24 @@ public class FacialExpressionDetectorViewController: UIViewController, ARSCNView
8378
facialExpressionSubject.send(facialExpressions)
8479
}
8580
}
81+
82+
extension FacialExpressionDetectorViewController: ARSCNViewDelegate {
83+
public func renderer(_ renderer: SCNSceneRenderer, nodeFor anchor: ARAnchor) -> SCNNode? {
84+
let faceMesh = ARSCNFaceGeometry(device: sceneView.device!)
85+
let node = SCNNode(geometry: faceMesh)
86+
87+
if debugMode {
88+
node.geometry?.firstMaterial?.fillMode = .lines
89+
90+
}
91+
92+
return node
93+
}
94+
95+
public func renderer(_ renderer: SCNSceneRenderer, didUpdate node: SCNNode, for anchor: ARAnchor) {
96+
if let faceAnchor = anchor as? ARFaceAnchor, let faceGeometry = node.geometry as? ARSCNFaceGeometry {
97+
faceGeometry.update(from: faceAnchor.geometry)
98+
detectFacialExpression(from: faceAnchor)
99+
}
100+
}
101+
}

0 commit comments

Comments
 (0)