general: cleanup for manual var tuning
This commit is contained in:
44
Gaze/Constants/EyeTrackingConstants.swift
Normal file
44
Gaze/Constants/EyeTrackingConstants.swift
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
//
|
||||||
|
// EyeTrackingConstants.swift
|
||||||
|
// Gaze
|
||||||
|
//
|
||||||
|
// Created by Mike Freno on 1/14/26.
|
||||||
|
//
|
||||||
|
|
||||||
|
import Foundation
|
||||||
|
|
||||||
|
enum EyeTrackingConstants {
|
||||||
|
// MARK: - Logging
|
||||||
|
/// Interval between log messages in seconds
|
||||||
|
static let logInterval: TimeInterval = 0.5
|
||||||
|
|
||||||
|
// MARK: - Eye Closure Detection
|
||||||
|
/// Threshold for eye closure (smaller value means eye must be more closed to trigger)
|
||||||
|
/// Range: 0.0 to 1.0 (approximate eye opening ratio)
|
||||||
|
static let eyeClosedThreshold: CGFloat = 0.02
|
||||||
|
|
||||||
|
// MARK: - Face Pose Thresholds
|
||||||
|
/// Maximum yaw (left/right head turn) in radians before considering user looking away
|
||||||
|
/// 0.20 radians ≈ 11.5 degrees (Tightened from 0.35)
|
||||||
|
static let yawThreshold: Double = 0.20
|
||||||
|
|
||||||
|
/// Pitch threshold for looking UP (above screen).
|
||||||
|
/// Since camera is at top, looking at screen is negative pitch.
|
||||||
|
/// Values > 0.1 imply looking straight ahead or up (away from screen).
|
||||||
|
static let pitchUpThreshold: Double = 0.1
|
||||||
|
|
||||||
|
/// Pitch threshold for looking DOWN (at keyboard/lap).
|
||||||
|
/// Values < -0.45 imply looking too far down.
|
||||||
|
static let pitchDownThreshold: Double = -0.45
|
||||||
|
|
||||||
|
// MARK: - Pupil Tracking Thresholds
|
||||||
|
/// Minimum horizontal pupil ratio (0.0 = right edge, 1.0 = left edge)
|
||||||
|
/// Values below this are considered looking right (camera view)
|
||||||
|
/// Tightened from 0.25 to 0.35
|
||||||
|
static let minPupilRatio: Double = 0.35
|
||||||
|
|
||||||
|
/// Maximum horizontal pupil ratio
|
||||||
|
/// Values above this are considered looking left (camera view)
|
||||||
|
/// Tightened from 0.75 to 0.65
|
||||||
|
static let maxPupilRatio: Double = 0.65
|
||||||
|
}
|
||||||
@@ -12,72 +12,73 @@ import Vision
|
|||||||
@MainActor
|
@MainActor
|
||||||
class EyeTrackingService: NSObject, ObservableObject {
|
class EyeTrackingService: NSObject, ObservableObject {
|
||||||
static let shared = EyeTrackingService()
|
static let shared = EyeTrackingService()
|
||||||
|
|
||||||
@Published var isEyeTrackingActive = false
|
@Published var isEyeTrackingActive = false
|
||||||
@Published var isEyesClosed = false
|
@Published var isEyesClosed = false
|
||||||
@Published var userLookingAtScreen = true
|
@Published var userLookingAtScreen = true
|
||||||
@Published var faceDetected = false
|
@Published var faceDetected = false
|
||||||
|
|
||||||
private var captureSession: AVCaptureSession?
|
private var captureSession: AVCaptureSession?
|
||||||
private var videoOutput: AVCaptureVideoDataOutput?
|
private var videoOutput: AVCaptureVideoDataOutput?
|
||||||
private let videoDataOutputQueue = DispatchQueue(label: "com.gaze.videoDataOutput", qos: .userInitiated)
|
private let videoDataOutputQueue = DispatchQueue(
|
||||||
|
label: "com.gaze.videoDataOutput", qos: .userInitiated)
|
||||||
private var _previewLayer: AVCaptureVideoPreviewLayer?
|
private var _previewLayer: AVCaptureVideoPreviewLayer?
|
||||||
|
|
||||||
// Logging throttle
|
// Logging throttle
|
||||||
private var lastLogTime: Date = .distantPast
|
private var lastLogTime: Date = .distantPast
|
||||||
private let logInterval: TimeInterval = 0.5 // Log every 0.5 seconds
|
private let logInterval: TimeInterval = EyeTrackingConstants.logInterval
|
||||||
|
|
||||||
var previewLayer: AVCaptureVideoPreviewLayer? {
|
var previewLayer: AVCaptureVideoPreviewLayer? {
|
||||||
guard let session = captureSession else {
|
guard let session = captureSession else {
|
||||||
_previewLayer = nil
|
_previewLayer = nil
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Reuse existing layer if session hasn't changed
|
// Reuse existing layer if session hasn't changed
|
||||||
if let existing = _previewLayer, existing.session === session {
|
if let existing = _previewLayer, existing.session === session {
|
||||||
return existing
|
return existing
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create new layer only when session changes
|
// Create new layer only when session changes
|
||||||
let layer = AVCaptureVideoPreviewLayer(session: session)
|
let layer = AVCaptureVideoPreviewLayer(session: session)
|
||||||
layer.videoGravity = .resizeAspectFill
|
layer.videoGravity = .resizeAspectFill
|
||||||
_previewLayer = layer
|
_previewLayer = layer
|
||||||
return layer
|
return layer
|
||||||
}
|
}
|
||||||
|
|
||||||
private override init() {
|
private override init() {
|
||||||
super.init()
|
super.init()
|
||||||
}
|
}
|
||||||
|
|
||||||
func startEyeTracking() async throws {
|
func startEyeTracking() async throws {
|
||||||
print("👁️ startEyeTracking called")
|
print("👁️ startEyeTracking called")
|
||||||
guard !isEyeTrackingActive else {
|
guard !isEyeTrackingActive else {
|
||||||
print("⚠️ Eye tracking already active")
|
print("⚠️ Eye tracking already active")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
let cameraService = CameraAccessService.shared
|
let cameraService = CameraAccessService.shared
|
||||||
print("👁️ Camera authorized: \(cameraService.isCameraAuthorized)")
|
print("👁️ Camera authorized: \(cameraService.isCameraAuthorized)")
|
||||||
|
|
||||||
if !cameraService.isCameraAuthorized {
|
if !cameraService.isCameraAuthorized {
|
||||||
print("👁️ Requesting camera access...")
|
print("👁️ Requesting camera access...")
|
||||||
try await cameraService.requestCameraAccess()
|
try await cameraService.requestCameraAccess()
|
||||||
}
|
}
|
||||||
|
|
||||||
guard cameraService.isCameraAuthorized else {
|
guard cameraService.isCameraAuthorized else {
|
||||||
print("❌ Camera access denied")
|
print("❌ Camera access denied")
|
||||||
throw CameraAccessError.accessDenied
|
throw CameraAccessError.accessDenied
|
||||||
}
|
}
|
||||||
|
|
||||||
print("👁️ Setting up capture session...")
|
print("👁️ Setting up capture session...")
|
||||||
try await setupCaptureSession()
|
try await setupCaptureSession()
|
||||||
|
|
||||||
print("👁️ Starting capture session...")
|
print("👁️ Starting capture session...")
|
||||||
captureSession?.startRunning()
|
captureSession?.startRunning()
|
||||||
isEyeTrackingActive = true
|
isEyeTrackingActive = true
|
||||||
print("✓ Eye tracking active")
|
print("✓ Eye tracking active")
|
||||||
}
|
}
|
||||||
|
|
||||||
func stopEyeTracking() {
|
func stopEyeTracking() {
|
||||||
captureSession?.stopRunning()
|
captureSession?.stopRunning()
|
||||||
captureSession = nil
|
captureSession = nil
|
||||||
@@ -88,194 +89,165 @@ class EyeTrackingService: NSObject, ObservableObject {
|
|||||||
userLookingAtScreen = true
|
userLookingAtScreen = true
|
||||||
faceDetected = false
|
faceDetected = false
|
||||||
}
|
}
|
||||||
|
|
||||||
private func setupCaptureSession() async throws {
|
private func setupCaptureSession() async throws {
|
||||||
let session = AVCaptureSession()
|
let session = AVCaptureSession()
|
||||||
session.sessionPreset = .vga640x480
|
session.sessionPreset = .vga640x480
|
||||||
|
|
||||||
guard let videoDevice = AVCaptureDevice.default(for: .video) else {
|
guard let videoDevice = AVCaptureDevice.default(for: .video) else {
|
||||||
throw EyeTrackingError.noCamera
|
throw EyeTrackingError.noCamera
|
||||||
}
|
}
|
||||||
|
|
||||||
let videoInput = try AVCaptureDeviceInput(device: videoDevice)
|
let videoInput = try AVCaptureDeviceInput(device: videoDevice)
|
||||||
guard session.canAddInput(videoInput) else {
|
guard session.canAddInput(videoInput) else {
|
||||||
throw EyeTrackingError.cannotAddInput
|
throw EyeTrackingError.cannotAddInput
|
||||||
}
|
}
|
||||||
session.addInput(videoInput)
|
session.addInput(videoInput)
|
||||||
|
|
||||||
let output = AVCaptureVideoDataOutput()
|
let output = AVCaptureVideoDataOutput()
|
||||||
output.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA]
|
output.videoSettings = [
|
||||||
|
kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA
|
||||||
|
]
|
||||||
output.setSampleBufferDelegate(self, queue: videoDataOutputQueue)
|
output.setSampleBufferDelegate(self, queue: videoDataOutputQueue)
|
||||||
output.alwaysDiscardsLateVideoFrames = true
|
output.alwaysDiscardsLateVideoFrames = true
|
||||||
|
|
||||||
guard session.canAddOutput(output) else {
|
guard session.canAddOutput(output) else {
|
||||||
throw EyeTrackingError.cannotAddOutput
|
throw EyeTrackingError.cannotAddOutput
|
||||||
}
|
}
|
||||||
session.addOutput(output)
|
session.addOutput(output)
|
||||||
|
|
||||||
self.captureSession = session
|
self.captureSession = session
|
||||||
self.videoOutput = output
|
self.videoOutput = output
|
||||||
}
|
}
|
||||||
|
|
||||||
private func processFaceObservations(_ observations: [VNFaceObservation]?) {
|
private func processFaceObservations(_ observations: [VNFaceObservation]?) {
|
||||||
let shouldLog = Date().timeIntervalSince(lastLogTime) >= logInterval
|
|
||||||
|
|
||||||
if shouldLog {
|
|
||||||
print("🔍 Processing face observations...")
|
|
||||||
}
|
|
||||||
|
|
||||||
guard let observations = observations, !observations.isEmpty else {
|
guard let observations = observations, !observations.isEmpty else {
|
||||||
if shouldLog {
|
|
||||||
print("❌ No faces detected")
|
|
||||||
}
|
|
||||||
faceDetected = false
|
faceDetected = false
|
||||||
userLookingAtScreen = false
|
userLookingAtScreen = false
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
faceDetected = true
|
faceDetected = true
|
||||||
let face = observations.first!
|
let face = observations.first!
|
||||||
|
|
||||||
if shouldLog {
|
|
||||||
print("✅ Face detected. Bounding box: \(face.boundingBox)")
|
|
||||||
}
|
|
||||||
|
|
||||||
guard let landmarks = face.landmarks else {
|
guard let landmarks = face.landmarks else {
|
||||||
if shouldLog {
|
|
||||||
print("❌ No face landmarks detected")
|
|
||||||
}
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check eye closure
|
// Check eye closure
|
||||||
if let leftEye = landmarks.leftEye,
|
if let leftEye = landmarks.leftEye,
|
||||||
let rightEye = landmarks.rightEye {
|
let rightEye = landmarks.rightEye
|
||||||
let eyesClosed = detectEyesClosed(leftEye: leftEye, rightEye: rightEye, shouldLog: shouldLog)
|
{
|
||||||
|
let eyesClosed = detectEyesClosed(
|
||||||
|
leftEye: leftEye, rightEye: rightEye, shouldLog: false)
|
||||||
self.isEyesClosed = eyesClosed
|
self.isEyesClosed = eyesClosed
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check gaze direction
|
// Check gaze direction
|
||||||
let lookingAway = detectLookingAway(face: face, landmarks: landmarks, shouldLog: shouldLog)
|
let lookingAway = detectLookingAway(face: face, landmarks: landmarks, shouldLog: false)
|
||||||
userLookingAtScreen = !lookingAway
|
userLookingAtScreen = !lookingAway
|
||||||
|
|
||||||
if shouldLog {
|
|
||||||
lastLogTime = Date()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private func detectEyesClosed(leftEye: VNFaceLandmarkRegion2D, rightEye: VNFaceLandmarkRegion2D, shouldLog: Bool) -> Bool {
|
private func detectEyesClosed(
|
||||||
|
leftEye: VNFaceLandmarkRegion2D, rightEye: VNFaceLandmarkRegion2D, shouldLog: Bool
|
||||||
|
) -> Bool {
|
||||||
guard leftEye.pointCount >= 2, rightEye.pointCount >= 2 else {
|
guard leftEye.pointCount >= 2, rightEye.pointCount >= 2 else {
|
||||||
if shouldLog {
|
|
||||||
print("⚠️ Eye landmarks insufficient for eye closure detection")
|
|
||||||
}
|
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
let leftEyeHeight = calculateEyeHeight(leftEye, shouldLog: shouldLog)
|
let leftEyeHeight = calculateEyeHeight(leftEye, shouldLog: shouldLog)
|
||||||
let rightEyeHeight = calculateEyeHeight(rightEye, shouldLog: shouldLog)
|
let rightEyeHeight = calculateEyeHeight(rightEye, shouldLog: shouldLog)
|
||||||
|
|
||||||
let closedThreshold: CGFloat = 0.02
|
let closedThreshold = EyeTrackingConstants.eyeClosedThreshold
|
||||||
|
|
||||||
let isClosed = leftEyeHeight < closedThreshold && rightEyeHeight < closedThreshold
|
let isClosed = leftEyeHeight < closedThreshold && rightEyeHeight < closedThreshold
|
||||||
|
|
||||||
if shouldLog {
|
|
||||||
print("👁️ Eye closure detection - Left: \(leftEyeHeight) < \(closedThreshold) = \(leftEyeHeight < closedThreshold), Right: \(rightEyeHeight) < \(closedThreshold) = \(rightEyeHeight < closedThreshold)")
|
|
||||||
print("👁️ Eyes closed: \(isClosed)")
|
|
||||||
}
|
|
||||||
|
|
||||||
return isClosed
|
return isClosed
|
||||||
}
|
}
|
||||||
|
|
||||||
private func calculateEyeHeight(_ eye: VNFaceLandmarkRegion2D, shouldLog: Bool) -> CGFloat {
|
private func calculateEyeHeight(_ eye: VNFaceLandmarkRegion2D, shouldLog: Bool) -> CGFloat {
|
||||||
let points = eye.normalizedPoints
|
let points = eye.normalizedPoints
|
||||||
guard points.count >= 2 else { return 0 }
|
guard points.count >= 2 else { return 0 }
|
||||||
|
|
||||||
let yValues = points.map { $0.y }
|
let yValues = points.map { $0.y }
|
||||||
let maxY = yValues.max() ?? 0
|
let maxY = yValues.max() ?? 0
|
||||||
let minY = yValues.min() ?? 0
|
let minY = yValues.min() ?? 0
|
||||||
|
|
||||||
let height = abs(maxY - minY)
|
let height = abs(maxY - minY)
|
||||||
|
|
||||||
if shouldLog {
|
|
||||||
print("📏 Eye height: \(height)")
|
|
||||||
}
|
|
||||||
|
|
||||||
return height
|
return height
|
||||||
}
|
}
|
||||||
|
|
||||||
private func detectLookingAway(face: VNFaceObservation, landmarks: VNFaceLandmarks2D, shouldLog: Bool) -> Bool {
|
private func detectLookingAway(
|
||||||
|
face: VNFaceObservation, landmarks: VNFaceLandmarks2D, shouldLog: Bool
|
||||||
|
) -> Bool {
|
||||||
// 1. Face Pose Check (Yaw & Pitch)
|
// 1. Face Pose Check (Yaw & Pitch)
|
||||||
let yaw = face.yaw?.doubleValue ?? 0.0
|
let yaw = face.yaw?.doubleValue ?? 0.0
|
||||||
let pitch = face.pitch?.doubleValue ?? 0.0
|
let pitch = face.pitch?.doubleValue ?? 0.0
|
||||||
|
|
||||||
let yawThreshold = 0.35 // ~20 degrees
|
let yawThreshold = EyeTrackingConstants.yawThreshold
|
||||||
let pitchThreshold = 0.4 // ~23 degrees
|
// Pitch check:
|
||||||
|
// - Camera at top = looking at screen is negative pitch
|
||||||
let poseLookingAway = abs(yaw) > yawThreshold || abs(pitch) > pitchThreshold
|
// - Looking above screen (straight ahead) is ~0 or positive -> Look Away
|
||||||
|
// - Looking at keyboard/lap is very negative -> Look Away
|
||||||
|
let pitchLookingAway = pitch > EyeTrackingConstants.pitchUpThreshold || pitch < EyeTrackingConstants.pitchDownThreshold
|
||||||
|
|
||||||
|
let poseLookingAway = abs(yaw) > yawThreshold || pitchLookingAway
|
||||||
|
|
||||||
// 2. Eye Gaze Check (Pupil Position)
|
// 2. Eye Gaze Check (Pupil Position)
|
||||||
var eyesLookingAway = false
|
var eyesLookingAway = false
|
||||||
|
|
||||||
if let leftEye = landmarks.leftEye,
|
if let leftEye = landmarks.leftEye,
|
||||||
let rightEye = landmarks.rightEye,
|
let rightEye = landmarks.rightEye,
|
||||||
let leftPupil = landmarks.leftPupil,
|
let leftPupil = landmarks.leftPupil,
|
||||||
let rightPupil = landmarks.rightPupil {
|
let rightPupil = landmarks.rightPupil
|
||||||
|
{
|
||||||
|
|
||||||
let leftRatio = calculatePupilHorizontalRatio(eye: leftEye, pupil: leftPupil)
|
let leftRatio = calculatePupilHorizontalRatio(eye: leftEye, pupil: leftPupil)
|
||||||
let rightRatio = calculatePupilHorizontalRatio(eye: rightEye, pupil: rightPupil)
|
let rightRatio = calculatePupilHorizontalRatio(eye: rightEye, pupil: rightPupil)
|
||||||
|
|
||||||
// Normal range for "looking center" is roughly 0.3 to 0.7
|
// Normal range for "looking center" is roughly 0.3 to 0.7
|
||||||
// (0.0 = extreme right, 1.0 = extreme left relative to face)
|
// (0.0 = extreme right, 1.0 = extreme left relative to face)
|
||||||
// Note: Camera is mirrored, so logic might be inverted
|
// Note: Camera is mirrored, so logic might be inverted
|
||||||
|
|
||||||
let minRatio = 0.25
|
let minRatio = EyeTrackingConstants.minPupilRatio
|
||||||
let maxRatio = 0.75
|
let maxRatio = EyeTrackingConstants.maxPupilRatio
|
||||||
|
|
||||||
let leftLookingAway = leftRatio < minRatio || leftRatio > maxRatio
|
let leftLookingAway = leftRatio < minRatio || leftRatio > maxRatio
|
||||||
let rightLookingAway = rightRatio < minRatio || rightRatio > maxRatio
|
let rightLookingAway = rightRatio < minRatio || rightRatio > maxRatio
|
||||||
|
|
||||||
// Consider looking away if BOTH eyes are off-center
|
// Consider looking away if BOTH eyes are off-center
|
||||||
eyesLookingAway = leftLookingAway && rightLookingAway
|
eyesLookingAway = leftLookingAway && rightLookingAway
|
||||||
|
|
||||||
if shouldLog {
|
|
||||||
print("👁️ Pupil Ratios - Left: \(String(format: "%.2f", leftRatio)), Right: \(String(format: "%.2f", rightRatio))")
|
|
||||||
print("👁️ Eyes Looking Away: \(eyesLookingAway)")
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let isLookingAway = poseLookingAway || eyesLookingAway
|
let isLookingAway = poseLookingAway || eyesLookingAway
|
||||||
|
|
||||||
if shouldLog {
|
|
||||||
print("📊 Gaze detection - Yaw: \(yaw), Pitch: \(pitch)")
|
|
||||||
print("📉 Thresholds - Yaw: \(yawThreshold), Pitch: \(pitchThreshold)")
|
|
||||||
print("🎯 Looking away: \(isLookingAway) (Pose: \(poseLookingAway), Eyes: \(eyesLookingAway))")
|
|
||||||
print("👀 User looking at screen: \(!isLookingAway)")
|
|
||||||
}
|
|
||||||
|
|
||||||
return isLookingAway
|
return isLookingAway
|
||||||
}
|
}
|
||||||
|
|
||||||
private func calculatePupilHorizontalRatio(eye: VNFaceLandmarkRegion2D, pupil: VNFaceLandmarkRegion2D) -> Double {
|
private func calculatePupilHorizontalRatio(
|
||||||
|
eye: VNFaceLandmarkRegion2D, pupil: VNFaceLandmarkRegion2D
|
||||||
|
) -> Double {
|
||||||
let eyePoints = eye.normalizedPoints
|
let eyePoints = eye.normalizedPoints
|
||||||
let pupilPoints = pupil.normalizedPoints
|
let pupilPoints = pupil.normalizedPoints
|
||||||
|
|
||||||
guard !eyePoints.isEmpty, !pupilPoints.isEmpty else { return 0.5 }
|
guard !eyePoints.isEmpty, !pupilPoints.isEmpty else { return 0.5 }
|
||||||
|
|
||||||
// Get eye horizontal bounds
|
// Get eye horizontal bounds
|
||||||
let eyeMinX = eyePoints.map { $0.x }.min() ?? 0
|
let eyeMinX = eyePoints.map { $0.x }.min() ?? 0
|
||||||
let eyeMaxX = eyePoints.map { $0.x }.max() ?? 0
|
let eyeMaxX = eyePoints.map { $0.x }.max() ?? 0
|
||||||
let eyeWidth = eyeMaxX - eyeMinX
|
let eyeWidth = eyeMaxX - eyeMinX
|
||||||
|
|
||||||
guard eyeWidth > 0 else { return 0.5 }
|
guard eyeWidth > 0 else { return 0.5 }
|
||||||
|
|
||||||
// Get pupil center X
|
// Get pupil center X
|
||||||
let pupilCenterX = pupilPoints.map { $0.x }.reduce(0, +) / Double(pupilPoints.count)
|
let pupilCenterX = pupilPoints.map { $0.x }.reduce(0, +) / Double(pupilPoints.count)
|
||||||
|
|
||||||
// Calculate ratio (0.0 to 1.0)
|
// Calculate ratio (0.0 to 1.0)
|
||||||
// 0.0 = Right side of eye (camera view)
|
// 0.0 = Right side of eye (camera view)
|
||||||
// 1.0 = Left side of eye (camera view)
|
// 1.0 = Left side of eye (camera view)
|
||||||
let ratio = (pupilCenterX - eyeMinX) / eyeWidth
|
let ratio = (pupilCenterX - eyeMinX) / eyeWidth
|
||||||
|
|
||||||
return ratio
|
return ratio
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -291,28 +263,28 @@ extension EyeTrackingService: AVCaptureVideoDataOutputSampleBufferDelegate {
|
|||||||
guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
|
guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
let request = VNDetectFaceLandmarksRequest { [weak self] request, error in
|
let request = VNDetectFaceLandmarksRequest { [weak self] request, error in
|
||||||
guard let self = self else { return }
|
guard let self = self else { return }
|
||||||
|
|
||||||
if let error = error {
|
if let error = error {
|
||||||
print("Face detection error: \(error)")
|
print("Face detection error: \(error)")
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
Task { @MainActor in
|
Task { @MainActor in
|
||||||
self.processFaceObservations(request.results as? [VNFaceObservation])
|
self.processFaceObservations(request.results as? [VNFaceObservation])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
request.revision = VNDetectFaceLandmarksRequestRevision3
|
request.revision = VNDetectFaceLandmarksRequestRevision3
|
||||||
|
|
||||||
let imageRequestHandler = VNImageRequestHandler(
|
let imageRequestHandler = VNImageRequestHandler(
|
||||||
cvPixelBuffer: pixelBuffer,
|
cvPixelBuffer: pixelBuffer,
|
||||||
orientation: .leftMirrored,
|
orientation: .leftMirrored,
|
||||||
options: [:]
|
options: [:]
|
||||||
)
|
)
|
||||||
|
|
||||||
do {
|
do {
|
||||||
try imageRequestHandler.perform([request])
|
try imageRequestHandler.perform([request])
|
||||||
} catch {
|
} catch {
|
||||||
@@ -328,7 +300,7 @@ enum EyeTrackingError: Error, LocalizedError {
|
|||||||
case cannotAddInput
|
case cannotAddInput
|
||||||
case cannotAddOutput
|
case cannotAddOutput
|
||||||
case visionRequestFailed
|
case visionRequestFailed
|
||||||
|
|
||||||
var errorDescription: String? {
|
var errorDescription: String? {
|
||||||
switch self {
|
switch self {
|
||||||
case .noCamera:
|
case .noCamera:
|
||||||
|
|||||||
Reference in New Issue
Block a user