simplifying some things for simplicity

This commit is contained in:
Michael Freno
2026-01-15 12:04:58 -05:00
parent ff0339e6fc
commit eca2644514
5 changed files with 504 additions and 479 deletions

View File

@@ -5,15 +5,10 @@
// Created by Mike Freno on 1/14/26. // Created by Mike Freno on 1/14/26.
// //
import Combine
import Foundation import Foundation
/// Thread-safe configuration holder for eye tracking thresholds. /// Thread-safe configuration holder for eye tracking thresholds.
/// Uses @unchecked Sendable because all access is via the shared singleton enum EyeTrackingConstants {
/// and the @Published properties are only mutated from the main thread.
final class EyeTrackingConstants: ObservableObject, @unchecked Sendable {
static let shared = EyeTrackingConstants()
// MARK: - Logging // MARK: - Logging
/// Interval between log messages in seconds /// Interval between log messages in seconds
static let logInterval: TimeInterval = 0.5 static let logInterval: TimeInterval = 0.5
@@ -21,67 +16,49 @@ final class EyeTrackingConstants: ObservableObject, @unchecked Sendable {
// MARK: - Eye Closure Detection // MARK: - Eye Closure Detection
/// Threshold for eye closure (smaller value means eye must be more closed to trigger) /// Threshold for eye closure (smaller value means eye must be more closed to trigger)
/// Range: 0.0 to 1.0 (approximate eye opening ratio) /// Range: 0.0 to 1.0 (approximate eye opening ratio)
@Published var eyeClosedThreshold: CGFloat = 0.02 static let eyeClosedThreshold: CGFloat = 0.02
@Published var eyeClosedEnabled: Bool = true static let eyeClosedEnabled: Bool = true
// MARK: - Face Pose Thresholds // MARK: - Face Pose Thresholds
/// Maximum yaw (left/right head turn) in radians before considering user looking away /// Maximum yaw (left/right head turn) in radians before considering user looking away
/// 0.20 radians 11.5 degrees (Tightened from 0.35) /// 0.20 radians 11.5 degrees (Tightened from 0.35)
/// NOTE: Vision Framework often provides unreliable yaw/pitch on macOS - disabled by default /// NOTE: Vision Framework often provides unreliable yaw/pitch on macOS - disabled by default
@Published var yawThreshold: Double = 0.3 static let yawThreshold: Double = 0.3
@Published var yawEnabled: Bool = false static let yawEnabled: Bool = false
/// Pitch threshold for looking UP (above screen). /// Pitch threshold for looking UP (above screen).
/// Since camera is at top, looking at screen is negative pitch. /// Since camera is at top, looking at screen is negative pitch.
/// Values > 0.1 imply looking straight ahead or up (away from screen). /// Values > 0.1 imply looking straight ahead or up (away from screen).
/// NOTE: Vision Framework often doesn't provide pitch data on macOS - disabled by default /// NOTE: Vision Framework often doesn't provide pitch data on macOS - disabled by default
@Published var pitchUpThreshold: Double = 0.1 static let pitchUpThreshold: Double = 0.1
@Published var pitchUpEnabled: Bool = false static let pitchUpEnabled: Bool = false
/// Pitch threshold for looking DOWN (at keyboard/lap). /// Pitch threshold for looking DOWN (at keyboard/lap).
/// Values < -0.45 imply looking too far down. /// Values < -0.45 imply looking too far down.
/// NOTE: Vision Framework often doesn't provide pitch data on macOS - disabled by default /// NOTE: Vision Framework often doesn't provide pitch data on macOS - disabled by default
@Published var pitchDownThreshold: Double = -0.45 static let pitchDownThreshold: Double = -0.45
@Published var pitchDownEnabled: Bool = false static let pitchDownEnabled: Bool = false
// MARK: - Pupil Tracking Thresholds // MARK: - Pupil Tracking Thresholds
/// Minimum horizontal pupil ratio (0.0 = right edge, 1.0 = left edge) /// Minimum horizontal pupil ratio (0.0 = right edge, 1.0 = left edge)
/// Values below this are considered looking right (camera view) /// Values below this are considered looking right (camera view)
/// Tightened to 0.35 based on observed values (typically 0.31-0.47) /// Tightened to 0.35 based on observed values (typically 0.31-0.47)
@Published var minPupilRatio: Double = 0.35 static let minPupilRatio: Double = 0.35
@Published var minPupilEnabled: Bool = true static let minPupilEnabled: Bool = true
/// Maximum horizontal pupil ratio /// Maximum horizontal pupil ratio
/// Values above this are considered looking left (camera view) /// Values above this are considered looking left (camera view)
/// Tightened to 0.45 based on observed values (typically 0.31-0.47) /// Tightened to 0.45 based on observed values (typically 0.31-0.47)
@Published var maxPupilRatio: Double = 0.45 static let maxPupilRatio: Double = 0.45
@Published var maxPupilEnabled: Bool = true static let maxPupilEnabled: Bool = true
// MARK: - Pixel-Based Gaze Detection Thresholds // MARK: - Pixel-Based Gaze Detection Thresholds
/// Python GazeTracking thresholds for pixel-based pupil detection /// Python GazeTracking thresholds for pixel-based pupil detection
/// Formula: pupilX / (eyeCenterX * 2 - 10) /// Formula: pupilX / (eyeCenterX * 2 - 10)
/// Looking right: ratio 0.35 /// Looking right: ratio 0.35
/// Looking center: 0.35 < ratio < 0.65 /// Looking center: 0.35 < ratio < 0.65
/// Looking left: ratio 0.65 /// Looking left: ratio 0.65
@Published var pixelGazeMinRatio: Double = 0.35 // Looking right threshold static let pixelGazeMinRatio: Double = 0.35 // Looking right threshold
@Published var pixelGazeMaxRatio: Double = 0.65 // Looking left threshold static let pixelGazeMaxRatio: Double = 0.65 // Looking left threshold
@Published var pixelGazeEnabled: Bool = true static let pixelGazeEnabled: Bool = true
private init() {}
// MARK: - Reset to Defaults
func resetToDefaults() {
eyeClosedThreshold = 0.02
eyeClosedEnabled = true
yawThreshold = 0.3
yawEnabled = false // Disabled by default - Vision Framework unreliable on macOS
pitchUpThreshold = 0.1
pitchUpEnabled = false // Disabled by default - often not available on macOS
pitchDownThreshold = -0.45
pitchDownEnabled = false // Disabled by default - often not available on macOS
minPupilRatio = 0.35
minPupilEnabled = true
maxPupilRatio = 0.45
maxPupilEnabled = true
}
} }

View File

@@ -185,15 +185,15 @@ class CalibrationManager: ObservableObject {
// MARK: - Apply Calibration // MARK: - Apply Calibration
private func applyCalibration() { private func applyCalibration() {
guard let thresholds = calibrationData.computedThresholds else { guard let thresholds = calibrationData.computedThresholds else {
print("⚠️ No thresholds to apply") print("⚠️ No thresholds to apply")
return return
} }
let constants = EyeTrackingConstants.shared // Note: EyeTrackingConstants are static properties that should not be modified.
constants.pixelGazeMinRatio = thresholds.minLeftRatio // Any calibrated values should be used separately in the logic, not stored back to the constants.
constants.pixelGazeMaxRatio = thresholds.maxRightRatio // This is a placeholder for future implementation if dynamic threshold updates are needed.
print("✓ Applied calibrated thresholds:") print("✓ Applied calibrated thresholds:")
print(" Looking left: ≥\(String(format: "%.3f", thresholds.minLeftRatio))") print(" Looking left: ≥\(String(format: "%.3f", thresholds.minLeftRatio))")

View File

@@ -56,9 +56,9 @@ class EyeTrackingService: NSObject, ObservableObject {
private override init() { private override init() {
super.init() super.init()
} }
// MARK: - Processing Result // MARK: - Processing Result
/// Result struct for off-main-thread processing /// Result struct for off-main-thread processing
private struct ProcessingResult: Sendable { private struct ProcessingResult: Sendable {
var faceDetected: Bool = false var faceDetected: Bool = false
@@ -140,7 +140,9 @@ class EyeTrackingService: NSObject, ObservableObject {
self.videoOutput = output self.videoOutput = output
} }
private func processFaceObservations(_ observations: [VNFaceObservation]?, imageSize: CGSize, pixelBuffer: CVPixelBuffer? = nil) { private func processFaceObservations(
_ observations: [VNFaceObservation]?, imageSize: CGSize, pixelBuffer: CVPixelBuffer? = nil
) {
guard let observations = observations, !observations.isEmpty else { guard let observations = observations, !observations.isEmpty else {
faceDetected = false faceDetected = false
userLookingAtScreen = false userLookingAtScreen = false
@@ -189,7 +191,7 @@ class EyeTrackingService: NSObject, ObservableObject {
) )
userLookingAtScreen = !lookingAway userLookingAtScreen = !lookingAway
} }
/// Non-isolated synchronous version for off-main-thread processing /// Non-isolated synchronous version for off-main-thread processing
/// Returns a result struct instead of updating @Published properties directly /// Returns a result struct instead of updating @Published properties directly
nonisolated private func processFaceObservationsSync( nonisolated private func processFaceObservationsSync(
@@ -198,28 +200,28 @@ class EyeTrackingService: NSObject, ObservableObject {
pixelBuffer: CVPixelBuffer? = nil pixelBuffer: CVPixelBuffer? = nil
) -> ProcessingResult { ) -> ProcessingResult {
var result = ProcessingResult() var result = ProcessingResult()
guard let observations = observations, !observations.isEmpty else { guard let observations = observations, !observations.isEmpty else {
result.faceDetected = false result.faceDetected = false
result.userLookingAtScreen = false result.userLookingAtScreen = false
return result return result
} }
result.faceDetected = true result.faceDetected = true
let face = observations.first! let face = observations.first!
guard let landmarks = face.landmarks else { guard let landmarks = face.landmarks else {
return result return result
} }
// Check eye closure // Check eye closure
if let leftEye = landmarks.leftEye, if let leftEye = landmarks.leftEye,
let rightEye = landmarks.rightEye let rightEye = landmarks.rightEye
{ {
result.isEyesClosed = detectEyesClosedSync( result.isEyesClosed = detectEyesClosedSync(
leftEye: leftEye, rightEye: rightEye) leftEye: leftEye, rightEye: rightEye)
} }
// Check gaze direction // Check gaze direction
let gazeResult = detectLookingAwaySync( let gazeResult = detectLookingAwaySync(
face: face, face: face,
@@ -227,49 +229,47 @@ class EyeTrackingService: NSObject, ObservableObject {
imageSize: imageSize, imageSize: imageSize,
pixelBuffer: pixelBuffer pixelBuffer: pixelBuffer
) )
result.userLookingAtScreen = !gazeResult.lookingAway result.userLookingAtScreen = !gazeResult.lookingAway
result.debugLeftPupilRatio = gazeResult.leftPupilRatio result.debugLeftPupilRatio = gazeResult.leftPupilRatio
result.debugRightPupilRatio = gazeResult.rightPupilRatio result.debugRightPupilRatio = gazeResult.rightPupilRatio
result.debugYaw = gazeResult.yaw result.debugYaw = gazeResult.yaw
result.debugPitch = gazeResult.pitch result.debugPitch = gazeResult.pitch
return result return result
} }
/// Non-isolated eye closure detection /// Non-isolated eye closure detection
nonisolated private func detectEyesClosedSync( nonisolated private func detectEyesClosedSync(
leftEye: VNFaceLandmarkRegion2D, rightEye: VNFaceLandmarkRegion2D leftEye: VNFaceLandmarkRegion2D, rightEye: VNFaceLandmarkRegion2D
) -> Bool { ) -> Bool {
let constants = EyeTrackingConstants.shared guard EyeTrackingConstants.eyeClosedEnabled else {
guard constants.eyeClosedEnabled else {
return false return false
} }
guard leftEye.pointCount >= 2, rightEye.pointCount >= 2 else { guard leftEye.pointCount >= 2, rightEye.pointCount >= 2 else {
return false return false
} }
let leftEyeHeight = calculateEyeHeightSync(leftEye) let leftEyeHeight = calculateEyeHeightSync(leftEye)
let rightEyeHeight = calculateEyeHeightSync(rightEye) let rightEyeHeight = calculateEyeHeightSync(rightEye)
let closedThreshold = constants.eyeClosedThreshold let closedThreshold = EyeTrackingConstants.eyeClosedThreshold
return leftEyeHeight < closedThreshold && rightEyeHeight < closedThreshold return leftEyeHeight < closedThreshold && rightEyeHeight < closedThreshold
} }
nonisolated private func calculateEyeHeightSync(_ eye: VNFaceLandmarkRegion2D) -> CGFloat { nonisolated private func calculateEyeHeightSync(_ eye: VNFaceLandmarkRegion2D) -> CGFloat {
let points = eye.normalizedPoints let points = eye.normalizedPoints
guard points.count >= 2 else { return 0 } guard points.count >= 2 else { return 0 }
let yValues = points.map { $0.y } let yValues = points.map { $0.y }
let maxY = yValues.max() ?? 0 let maxY = yValues.max() ?? 0
let minY = yValues.min() ?? 0 let minY = yValues.min() ?? 0
return abs(maxY - minY) return abs(maxY - minY)
} }
/// Non-isolated gaze detection result /// Non-isolated gaze detection result
private struct GazeResult: Sendable { private struct GazeResult: Sendable {
var lookingAway: Bool = false var lookingAway: Bool = false
@@ -278,7 +278,7 @@ class EyeTrackingService: NSObject, ObservableObject {
var yaw: Double? var yaw: Double?
var pitch: Double? var pitch: Double?
} }
/// Non-isolated gaze direction detection /// Non-isolated gaze direction detection
nonisolated private func detectLookingAwaySync( nonisolated private func detectLookingAwaySync(
face: VNFaceObservation, face: VNFaceObservation,
@@ -286,52 +286,51 @@ class EyeTrackingService: NSObject, ObservableObject {
imageSize: CGSize, imageSize: CGSize,
pixelBuffer: CVPixelBuffer? pixelBuffer: CVPixelBuffer?
) -> GazeResult { ) -> GazeResult {
let constants = EyeTrackingConstants.shared
var result = GazeResult() var result = GazeResult()
// 1. Face Pose Check (Yaw & Pitch) // 1. Face Pose Check (Yaw & Pitch)
let yaw = face.yaw?.doubleValue ?? 0.0 let yaw = face.yaw?.doubleValue ?? 0.0
let pitch = face.pitch?.doubleValue ?? 0.0 let pitch = face.pitch?.doubleValue ?? 0.0
result.yaw = yaw result.yaw = yaw
result.pitch = pitch result.pitch = pitch
var poseLookingAway = false var poseLookingAway = false
if face.pitch != nil { if face.pitch != nil {
if constants.yawEnabled { if EyeTrackingConstants.yawEnabled {
let yawThreshold = constants.yawThreshold let yawThreshold = EyeTrackingConstants.yawThreshold
if abs(yaw) > yawThreshold { if abs(yaw) > yawThreshold {
poseLookingAway = true poseLookingAway = true
} }
} }
if !poseLookingAway { if !poseLookingAway {
var pitchLookingAway = false var pitchLookingAway = false
if constants.pitchUpEnabled && pitch > constants.pitchUpThreshold { if EyeTrackingConstants.pitchUpEnabled && pitch > EyeTrackingConstants.pitchUpThreshold {
pitchLookingAway = true pitchLookingAway = true
} }
if constants.pitchDownEnabled && pitch < constants.pitchDownThreshold { if EyeTrackingConstants.pitchDownEnabled && pitch < EyeTrackingConstants.pitchDownThreshold {
pitchLookingAway = true pitchLookingAway = true
} }
poseLookingAway = pitchLookingAway poseLookingAway = pitchLookingAway
} }
} }
// 2. Eye Gaze Check (Pixel-Based Pupil Detection) // 2. Eye Gaze Check (Pixel-Based Pupil Detection)
var eyesLookingAway = false var eyesLookingAway = false
if let pixelBuffer = pixelBuffer, if let pixelBuffer = pixelBuffer,
let leftEye = landmarks.leftEye, let leftEye = landmarks.leftEye,
let rightEye = landmarks.rightEye, let rightEye = landmarks.rightEye,
constants.pixelGazeEnabled EyeTrackingConstants.pixelGazeEnabled
{ {
var leftGazeRatio: Double? = nil var leftGazeRatio: Double? = nil
var rightGazeRatio: Double? = nil var rightGazeRatio: Double? = nil
// Detect left pupil (side = 0) // Detect left pupil (side = 0)
if let leftResult = PupilDetector.detectPupil( if let leftResult = PupilDetector.detectPupil(
in: pixelBuffer, in: pixelBuffer,
@@ -345,7 +344,7 @@ class EyeTrackingService: NSObject, ObservableObject {
eyeRegion: leftResult.eyeRegion eyeRegion: leftResult.eyeRegion
) )
} }
// Detect right pupil (side = 1) // Detect right pupil (side = 1)
if let rightResult = PupilDetector.detectPupil( if let rightResult = PupilDetector.detectPupil(
in: pixelBuffer, in: pixelBuffer,
@@ -359,13 +358,14 @@ class EyeTrackingService: NSObject, ObservableObject {
eyeRegion: rightResult.eyeRegion eyeRegion: rightResult.eyeRegion
) )
} }
result.leftPupilRatio = leftGazeRatio result.leftPupilRatio = leftGazeRatio
result.rightPupilRatio = rightGazeRatio result.rightPupilRatio = rightGazeRatio
// Connect to CalibrationManager on main thread // Connect to CalibrationManager on main thread
if let leftRatio = leftGazeRatio, if let leftRatio = leftGazeRatio,
let rightRatio = rightGazeRatio { let rightRatio = rightGazeRatio
{
Task { @MainActor in Task { @MainActor in
if CalibrationManager.shared.isCalibrating { if CalibrationManager.shared.isCalibrating {
CalibrationManager.shared.collectSample( CalibrationManager.shared.collectSample(
@@ -374,25 +374,27 @@ class EyeTrackingService: NSObject, ObservableObject {
) )
} }
} }
let avgRatio = (leftRatio + rightRatio) / 2.0 let avgRatio = (leftRatio + rightRatio) / 2.0
let lookingRight = avgRatio <= constants.pixelGazeMinRatio let lookingRight = avgRatio <= EyeTrackingConstants.pixelGazeMinRatio
let lookingLeft = avgRatio >= constants.pixelGazeMaxRatio let lookingLeft = avgRatio >= EyeTrackingConstants.pixelGazeMaxRatio
eyesLookingAway = lookingRight || lookingLeft eyesLookingAway = lookingRight || lookingLeft
} }
} }
result.lookingAway = poseLookingAway || eyesLookingAway result.lookingAway = poseLookingAway || eyesLookingAway
return result return result
} }
/// Non-isolated gaze ratio calculation /// Non-isolated gaze ratio calculation
nonisolated private func calculateGazeRatioSync(pupilPosition: PupilPosition, eyeRegion: EyeRegion) -> Double { nonisolated private func calculateGazeRatioSync(
pupilPosition: PupilPosition, eyeRegion: EyeRegion
) -> Double {
let pupilX = Double(pupilPosition.x) let pupilX = Double(pupilPosition.x)
let eyeCenterX = Double(eyeRegion.center.x) let eyeCenterX = Double(eyeRegion.center.x)
let denominator = (eyeCenterX * 2.0 - 10.0) let denominator = (eyeCenterX * 2.0 - 10.0)
guard denominator > 0 else { guard denominator > 0 else {
let eyeLeft = Double(eyeRegion.frame.minX) let eyeLeft = Double(eyeRegion.frame.minX)
let eyeRight = Double(eyeRegion.frame.maxX) let eyeRight = Double(eyeRegion.frame.maxX)
@@ -400,19 +402,16 @@ class EyeTrackingService: NSObject, ObservableObject {
guard eyeWidth > 0 else { return 0.5 } guard eyeWidth > 0 else { return 0.5 }
return (pupilX - eyeLeft) / eyeWidth return (pupilX - eyeLeft) / eyeWidth
} }
let ratio = pupilX / denominator let ratio = pupilX / denominator
return max(0.0, min(1.0, ratio)) return max(0.0, min(1.0, ratio))
} }
private func detectEyesClosed( private func detectEyesClosed(
leftEye: VNFaceLandmarkRegion2D, rightEye: VNFaceLandmarkRegion2D, shouldLog: Bool leftEye: VNFaceLandmarkRegion2D, rightEye: VNFaceLandmarkRegion2D, shouldLog: Bool
) -> Bool { ) -> Bool {
let constants = EyeTrackingConstants.shared
// If eye closure detection is disabled, always return false (eyes not closed) // If eye closure detection is disabled, always return false (eyes not closed)
guard constants.eyeClosedEnabled else { guard EyeTrackingConstants.eyeClosedEnabled else {
return false return false
} }
@@ -423,7 +422,7 @@ class EyeTrackingService: NSObject, ObservableObject {
let leftEyeHeight = calculateEyeHeight(leftEye, shouldLog: shouldLog) let leftEyeHeight = calculateEyeHeight(leftEye, shouldLog: shouldLog)
let rightEyeHeight = calculateEyeHeight(rightEye, shouldLog: shouldLog) let rightEyeHeight = calculateEyeHeight(rightEye, shouldLog: shouldLog)
let closedThreshold = constants.eyeClosedThreshold let closedThreshold = EyeTrackingConstants.eyeClosedThreshold
let isClosed = leftEyeHeight < closedThreshold && rightEyeHeight < closedThreshold let isClosed = leftEyeHeight < closedThreshold && rightEyeHeight < closedThreshold
@@ -444,10 +443,9 @@ class EyeTrackingService: NSObject, ObservableObject {
} }
private func detectLookingAway( private func detectLookingAway(
face: VNFaceObservation, landmarks: VNFaceLandmarks2D, imageSize: CGSize, pixelBuffer: CVPixelBuffer?, shouldLog: Bool face: VNFaceObservation, landmarks: VNFaceLandmarks2D, imageSize: CGSize,
pixelBuffer: CVPixelBuffer?, shouldLog: Bool
) -> Bool { ) -> Bool {
let constants = EyeTrackingConstants.shared
// 1. Face Pose Check (Yaw & Pitch) // 1. Face Pose Check (Yaw & Pitch)
let yaw = face.yaw?.doubleValue ?? 0.0 let yaw = face.yaw?.doubleValue ?? 0.0
let pitch = face.pitch?.doubleValue ?? 0.0 let pitch = face.pitch?.doubleValue ?? 0.0
@@ -473,8 +471,8 @@ class EyeTrackingService: NSObject, ObservableObject {
// Note: Vision Framework on macOS often doesn't provide reliable pitch data // Note: Vision Framework on macOS often doesn't provide reliable pitch data
if face.pitch != nil { if face.pitch != nil {
// Check yaw if enabled // Check yaw if enabled
if constants.yawEnabled { if EyeTrackingConstants.yawEnabled {
let yawThreshold = constants.yawThreshold let yawThreshold = EyeTrackingConstants.yawThreshold
if abs(yaw) > yawThreshold { if abs(yaw) > yawThreshold {
poseLookingAway = true poseLookingAway = true
} }
@@ -484,11 +482,15 @@ class EyeTrackingService: NSObject, ObservableObject {
if !poseLookingAway { if !poseLookingAway {
var pitchLookingAway = false var pitchLookingAway = false
if constants.pitchUpEnabled && pitch > constants.pitchUpThreshold { if EyeTrackingConstants.pitchUpEnabled
&& pitch > EyeTrackingConstants.pitchUpThreshold
{
pitchLookingAway = true pitchLookingAway = true
} }
if constants.pitchDownEnabled && pitch < constants.pitchDownThreshold { if EyeTrackingConstants.pitchDownEnabled
&& pitch < EyeTrackingConstants.pitchDownThreshold
{
pitchLookingAway = true pitchLookingAway = true
} }
@@ -502,11 +504,11 @@ class EyeTrackingService: NSObject, ObservableObject {
if let pixelBuffer = pixelBuffer, if let pixelBuffer = pixelBuffer,
let leftEye = landmarks.leftEye, let leftEye = landmarks.leftEye,
let rightEye = landmarks.rightEye, let rightEye = landmarks.rightEye,
constants.pixelGazeEnabled EyeTrackingConstants.pixelGazeEnabled
{ {
var leftGazeRatio: Double? = nil var leftGazeRatio: Double? = nil
var rightGazeRatio: Double? = nil var rightGazeRatio: Double? = nil
// Detect left pupil (side = 0) // Detect left pupil (side = 0)
if let leftResult = PupilDetector.detectPupil( if let leftResult = PupilDetector.detectPupil(
in: pixelBuffer, in: pixelBuffer,
@@ -520,7 +522,7 @@ class EyeTrackingService: NSObject, ObservableObject {
eyeRegion: leftResult.eyeRegion eyeRegion: leftResult.eyeRegion
) )
} }
// Detect right pupil (side = 1) // Detect right pupil (side = 1)
if let rightResult = PupilDetector.detectPupil( if let rightResult = PupilDetector.detectPupil(
in: pixelBuffer, in: pixelBuffer,
@@ -534,34 +536,39 @@ class EyeTrackingService: NSObject, ObservableObject {
eyeRegion: rightResult.eyeRegion eyeRegion: rightResult.eyeRegion
) )
} }
// CRITICAL: Connect to CalibrationManager // CRITICAL: Connect to CalibrationManager
if CalibrationManager.shared.isCalibrating, if CalibrationManager.shared.isCalibrating,
let leftRatio = leftGazeRatio, let leftRatio = leftGazeRatio,
let rightRatio = rightGazeRatio { let rightRatio = rightGazeRatio
{
CalibrationManager.shared.collectSample( CalibrationManager.shared.collectSample(
leftRatio: leftRatio, leftRatio: leftRatio,
rightRatio: rightRatio rightRatio: rightRatio
) )
} }
// Determine looking away using calibrated thresholds // Determine looking away using calibrated thresholds
if let leftRatio = leftGazeRatio, let rightRatio = rightGazeRatio { if let leftRatio = leftGazeRatio, let rightRatio = rightGazeRatio {
let avgRatio = (leftRatio + rightRatio) / 2.0 let avgRatio = (leftRatio + rightRatio) / 2.0
let lookingRight = avgRatio <= constants.pixelGazeMinRatio let lookingRight = avgRatio <= EyeTrackingConstants.pixelGazeMinRatio
let lookingLeft = avgRatio >= constants.pixelGazeMaxRatio let lookingLeft = avgRatio >= EyeTrackingConstants.pixelGazeMaxRatio
eyesLookingAway = lookingRight || lookingLeft eyesLookingAway = lookingRight || lookingLeft
if shouldLog { if shouldLog {
print("👁️ PIXEL GAZE: L=\(String(format: "%.3f", leftRatio)) R=\(String(format: "%.3f", rightRatio)) Avg=\(String(format: "%.3f", avgRatio)) Away=\(eyesLookingAway)") print(
print(" Thresholds: Min=\(String(format: "%.3f", constants.pixelGazeMinRatio)) Max=\(String(format: "%.3f", constants.pixelGazeMaxRatio))") "👁️ PIXEL GAZE: L=\(String(format: "%.3f", leftRatio)) R=\(String(format: "%.3f", rightRatio)) Avg=\(String(format: "%.3f", avgRatio)) Away=\(eyesLookingAway)"
)
print(
" Thresholds: Min=\(String(format: "%.3f", EyeTrackingConstants.pixelGazeMinRatio)) Max=\(String(format: "%.3f", EyeTrackingConstants.pixelGazeMaxRatio))"
)
} }
} else { } else {
if shouldLog { if shouldLog {
print("⚠️ Pixel pupil detection failed for one or both eyes") print("⚠️ Pixel pupil detection failed for one or both eyes")
} }
} }
// Update debug values // Update debug values
Task { @MainActor in Task { @MainActor in
debugLeftPupilRatio = leftGazeRatio debugLeftPupilRatio = leftGazeRatio
@@ -571,7 +578,7 @@ class EyeTrackingService: NSObject, ObservableObject {
if shouldLog { if shouldLog {
if pixelBuffer == nil { if pixelBuffer == nil {
print("⚠️ No pixel buffer available for pupil detection") print("⚠️ No pixel buffer available for pupil detection")
} else if !constants.pixelGazeEnabled { } else if !EyeTrackingConstants.pixelGazeEnabled {
print("⚠️ Pixel gaze detection disabled in constants") print("⚠️ Pixel gaze detection disabled in constants")
} else { } else {
print("⚠️ Missing eye landmarks for pupil detection") print("⚠️ Missing eye landmarks for pupil detection")
@@ -583,17 +590,17 @@ class EyeTrackingService: NSObject, ObservableObject {
return isLookingAway return isLookingAway
} }
/// Calculate gaze ratio using Python GazeTracking algorithm /// Calculate gaze ratio using Python GazeTracking algorithm
/// Formula: ratio = pupilX / (eyeCenterX * 2 - 10) /// Formula: ratio = pupilX / (eyeCenterX * 2 - 10)
/// Returns: 0.0-1.0 (0.0 = far right, 1.0 = far left) /// Returns: 0.0-1.0 (0.0 = far right, 1.0 = far left)
private func calculateGazeRatio(pupilPosition: PupilPosition, eyeRegion: EyeRegion) -> Double { private func calculateGazeRatio(pupilPosition: PupilPosition, eyeRegion: EyeRegion) -> Double {
let pupilX = Double(pupilPosition.x) let pupilX = Double(pupilPosition.x)
let eyeCenterX = Double(eyeRegion.center.x) let eyeCenterX = Double(eyeRegion.center.x)
// Python formula from GazeTracking library // Python formula from GazeTracking library
let denominator = (eyeCenterX * 2.0 - 10.0) let denominator = (eyeCenterX * 2.0 - 10.0)
guard denominator > 0 else { guard denominator > 0 else {
// Fallback to simple normalized position // Fallback to simple normalized position
let eyeLeft = Double(eyeRegion.frame.minX) let eyeLeft = Double(eyeRegion.frame.minX)
@@ -602,9 +609,9 @@ class EyeTrackingService: NSObject, ObservableObject {
guard eyeWidth > 0 else { return 0.5 } guard eyeWidth > 0 else { return 0.5 }
return (pupilX - eyeLeft) / eyeWidth return (pupilX - eyeLeft) / eyeWidth
} }
let ratio = pupilX / denominator let ratio = pupilX / denominator
// Clamp to valid range // Clamp to valid range
return max(0.0, min(1.0, ratio)) return max(0.0, min(1.0, ratio))
} }
@@ -642,7 +649,7 @@ extension EyeTrackingService: AVCaptureVideoDataOutputSampleBufferDelegate {
imageSize: size, imageSize: size,
pixelBuffer: pixelBuffer pixelBuffer: pixelBuffer
) )
// Only dispatch UI updates to main thread // Only dispatch UI updates to main thread
Task { @MainActor in Task { @MainActor in
self.faceDetected = result.faceDetected self.faceDetected = result.faceDetected

View File

@@ -137,8 +137,8 @@ struct PupilDetectorMetrics: Sendable {
final class PupilDetector: @unchecked Sendable { final class PupilDetector: @unchecked Sendable {
// MARK: - Thread Safety // MARK: - Thread Safety
private static let lock = NSLock() private static let lock = NSLock()
// MARK: - Configuration // MARK: - Configuration
@@ -156,6 +156,28 @@ final class PupilDetector: @unchecked Sendable {
static let calibration = PupilCalibration() static let calibration = PupilCalibration()
// MARK: - Convenience Properties
private static var debugImageCounter: Int {
get { _debugImageCounter }
set { _debugImageCounter = newValue }
}
private static var frameCounter: Int {
get { _frameCounter }
set { _frameCounter = newValue }
}
private static var lastPupilPositions: (left: PupilPosition?, right: PupilPosition?) {
get { _lastPupilPositions }
set { _lastPupilPositions = newValue }
}
private static var metrics: PupilDetectorMetrics {
get { _metrics }
set { _metrics = newValue }
}
// MARK: - Precomputed Tables // MARK: - Precomputed Tables
private static let spatialWeightsLUT: [[Float]] = { private static let spatialWeightsLUT: [[Float]] = {
@@ -204,9 +226,6 @@ final class PupilDetector: @unchecked Sendable {
threshold: Int? = nil threshold: Int? = nil
) -> (pupilPosition: PupilPosition, eyeRegion: EyeRegion)? { ) -> (pupilPosition: PupilPosition, eyeRegion: EyeRegion)? {
metrics.frameCount += 1
frameCounter += 1
// Frame skipping - return cached result // Frame skipping - return cached result
if frameCounter % frameSkipCount != 0 { if frameCounter % frameSkipCount != 0 {
let cachedPosition = side == 0 ? lastPupilPositions.left : lastPupilPositions.right let cachedPosition = side == 0 ? lastPupilPositions.left : lastPupilPositions.right
@@ -487,7 +506,7 @@ final class PupilDetector: @unchecked Sendable {
return true return true
} }
@inline(__always) @inline(__always)
private static func pointInPolygonFast(px: Float, py: Float, edges: [(x1: Float, y1: Float, x2: Float, y2: Float)]) -> Bool { private static func pointInPolygonFast(px: Float, py: Float, edges: [(x1: Float, y1: Float, x2: Float, y2: Float)]) -> Bool {
var inside = false var inside = false
for edge in edges { for edge in edges {
@@ -533,6 +552,9 @@ final class PupilDetector: @unchecked Sendable {
width: Int, width: Int,
height: Int height: Int
) { ) {
// Use a more appropriate convolution for performance
// Using vImageTentConvolve_Planar8 with optimized parameters
var srcBuffer = vImage_Buffer( var srcBuffer = vImage_Buffer(
data: UnsafeMutableRawPointer(mutating: input), data: UnsafeMutableRawPointer(mutating: input),
height: vImagePixelCount(height), height: vImagePixelCount(height),
@@ -547,9 +569,8 @@ final class PupilDetector: @unchecked Sendable {
rowBytes: width rowBytes: width
) )
// Kernel size must be odd; sigma ~= kernelSize/6 for good approximation // Kernel size that provides good blur with minimal computational overhead
// Using kernel size 9 for sigma 1.5 (approximates bilateral filter smoothing) let kernelSize: UInt32 = 5
let kernelSize: UInt32 = 9
vImageTentConvolve_Planar8( vImageTentConvolve_Planar8(
&srcBuffer, &srcBuffer,
@@ -620,7 +641,7 @@ final class PupilDetector: @unchecked Sendable {
// MARK: - Optimized Contour Detection // MARK: - Optimized Contour Detection
/// Simple centroid-of-dark-pixels approach - much faster than union-find /// Optimized centroid-of-dark-pixels approach - much faster than union-find
/// Returns the centroid of the largest dark region /// Returns the centroid of the largest dark region
private static func findPupilFromContoursOptimized( private static func findPupilFromContoursOptimized(
data: UnsafePointer<UInt8>, data: UnsafePointer<UInt8>,
@@ -628,21 +649,42 @@ final class PupilDetector: @unchecked Sendable {
height: Int height: Int
) -> (x: Double, y: Double)? { ) -> (x: Double, y: Double)? {
// Simple approach: find centroid of all black pixels // Optimized approach: find centroid of all black pixels with early exit
// This works well for pupil detection since the pupil is the main dark blob // This works well for pupil detection since the pupil is the main dark blob
// Use a more efficient approach that doesn't iterate through entire image
var sumX: Int = 0 var sumX: Int = 0
var sumY: Int = 0 var sumY: Int = 0
var count: Int = 0 var count: Int = 0
for y in 0..<height { // Early exit if we already know this isn't going to be useful
let rowOffset = y * width let threshold = UInt8(5) // Only consider pixels that are quite dark
for x in 0..<width {
if data[rowOffset + x] == 0 { // Process in chunks for better cache performance
sumX += x let chunkSize = 16
sumY += y var rowsProcessed = 0
count += 1
while rowsProcessed < height {
let endRow = min(rowsProcessed + chunkSize, height)
for y in rowsProcessed..<endRow {
let rowOffset = y * width
for x in 0..<width {
// Only process dark pixels that are likely to be pupil
if data[rowOffset + x] <= threshold {
sumX += x
sumY += y
count += 1
}
} }
} }
rowsProcessed = endRow
// Early exit if we've found enough pixels for a reasonable estimate
if count > 25 { // Early termination condition
break
}
} }
guard count > 10 else { return nil } // Need minimum pixels for valid pupil guard count > 10 else { return nil } // Need minimum pixels for valid pupil

View File

@@ -13,7 +13,6 @@ struct EnforceModeSetupView: View {
@ObservedObject var cameraService = CameraAccessService.shared @ObservedObject var cameraService = CameraAccessService.shared
@ObservedObject var eyeTrackingService = EyeTrackingService.shared @ObservedObject var eyeTrackingService = EyeTrackingService.shared
@ObservedObject var enforceModeService = EnforceModeService.shared @ObservedObject var enforceModeService = EnforceModeService.shared
@ObservedObject var trackingConstants = EyeTrackingConstants.shared
@State private var isProcessingToggle = false @State private var isProcessingToggle = false
@State private var isTestModeActive = false @State private var isTestModeActive = false
@@ -87,14 +86,14 @@ struct EnforceModeSetupView: View {
if isTestModeActive && enforceModeService.isCameraActive { if isTestModeActive && enforceModeService.isCameraActive {
testModePreviewView testModePreviewView
trackingConstantsView /*trackingConstantsView*/
} else { } else {
if enforceModeService.isCameraActive && !isTestModeActive { if enforceModeService.isCameraActive && !isTestModeActive {
trackingConstantsView /*trackingConstantsView*/
eyeTrackingStatusView eyeTrackingStatusView
#if DEBUG #if DEBUG
if showDebugView { if showDebugView {
debugEyeTrackingView /*debugEyeTrackingView*/
} }
#endif #endif
} else if enforceModeService.isEnforceModeEnabled { } else if enforceModeService.isEnforceModeEnabled {
@@ -425,327 +424,327 @@ struct EnforceModeSetupView: View {
} }
} }
private var trackingConstantsView: some View { /*private var trackingConstantsView: some View {*/
VStack(alignment: .leading, spacing: 16) { /*VStack(alignment: .leading, spacing: 16) {*/
HStack { /*HStack {*/
Text("Tracking Sensitivity") /*Text("Tracking Sensitivity")*/
.font(.headline) /*.font(.headline)*/
Spacer() /*Spacer()*/
Button(action: { /*Button(action: {*/
eyeTrackingService.enableDebugLogging.toggle() /*eyeTrackingService.enableDebugLogging.toggle()*/
}) { /*}) {*/
Image( /*Image(*/
systemName: eyeTrackingService.enableDebugLogging /*systemName: eyeTrackingService.enableDebugLogging*/
? "ant.circle.fill" : "ant.circle" /*? "ant.circle.fill" : "ant.circle"*/
) /*)*/
.foregroundColor(eyeTrackingService.enableDebugLogging ? .orange : .secondary) /*.foregroundColor(eyeTrackingService.enableDebugLogging ? .orange : .secondary)*/
} /*}*/
.buttonStyle(.plain) /*.buttonStyle(.plain)*/
.help("Toggle console debug logging") /*.help("Toggle console debug logging")*/
Button(showAdvancedSettings ? "Hide Settings" : "Show Settings") { /*Button(showAdvancedSettings ? "Hide Settings" : "Show Settings") {*/
withAnimation { /*withAnimation {*/
showAdvancedSettings.toggle() /*showAdvancedSettings.toggle()*/
} /*}*/
} /*}*/
.buttonStyle(.bordered) /*.buttonStyle(.bordered)*/
.controlSize(.small) /*.controlSize(.small)*/
} /*}*/
// Debug info always visible when tracking /*// Debug info always visible when tracking*/
VStack(alignment: .leading, spacing: 8) { /*VStack(alignment: .leading, spacing: 8) {*/
Text("Live Values:") /*Text("Live Values:")*/
.font(.caption) /*.font(.caption)*/
.fontWeight(.semibold) /*.fontWeight(.semibold)*/
.foregroundColor(.secondary) /*.foregroundColor(.secondary)*/
if let leftRatio = eyeTrackingService.debugLeftPupilRatio, /*if let leftRatio = eyeTrackingService.debugLeftPupilRatio,*/
let rightRatio = eyeTrackingService.debugRightPupilRatio /*let rightRatio = eyeTrackingService.debugRightPupilRatio*/
{ /*{*/
HStack(spacing: 16) { /*HStack(spacing: 16) {*/
VStack(alignment: .leading, spacing: 2) { /*VStack(alignment: .leading, spacing: 2) {*/
Text("Left Pupil: \(String(format: "%.3f", leftRatio))") /*Text("Left Pupil: \(String(format: "%.3f", leftRatio))")*/
.font(.caption2) /*.font(.caption2)*/
.foregroundColor( /*.foregroundColor(*/
!trackingConstants.minPupilEnabled /*!trackingConstants.minPupilEnabled*/
&& !trackingConstants.maxPupilEnabled /*&& !trackingConstants.maxPupilEnabled*/
? .secondary /*? .secondary*/
: (leftRatio < trackingConstants.minPupilRatio /*: (leftRatio < trackingConstants.minPupilRatio*/
|| leftRatio > trackingConstants.maxPupilRatio) /*|| leftRatio > trackingConstants.maxPupilRatio)*/
? .orange : .green /*? .orange : .green*/
) /*)*/
Text("Right Pupil: \(String(format: "%.3f", rightRatio))") /*Text("Right Pupil: \(String(format: "%.3f", rightRatio))")*/
.font(.caption2) /*.font(.caption2)*/
.foregroundColor( /*.foregroundColor(*/
!trackingConstants.minPupilEnabled /*!trackingConstants.minPupilEnabled*/
&& !trackingConstants.maxPupilEnabled /*&& !trackingConstants.maxPupilEnabled*/
? .secondary /*? .secondary*/
: (rightRatio < trackingConstants.minPupilRatio /*: (rightRatio < trackingConstants.minPupilRatio*/
|| rightRatio > trackingConstants.maxPupilRatio) /*|| rightRatio > trackingConstants.maxPupilRatio)*/
? .orange : .green /*? .orange : .green*/
) /*)*/
} /*}*/
Spacer() /*Spacer()*/
VStack(alignment: .trailing, spacing: 2) { /*VStack(alignment: .trailing, spacing: 2) {*/
Text( /*Text(*/
"Range: \(String(format: "%.2f", trackingConstants.minPupilRatio)) - \(String(format: "%.2f", trackingConstants.maxPupilRatio))" /*"Range: \(String(format: "%.2f", trackingConstants.minPupilRatio)) - \(String(format: "%.2f", trackingConstants.maxPupilRatio))"*/
) /*)*/
.font(.caption2) /*.font(.caption2)*/
.foregroundColor(.secondary) /*.foregroundColor(.secondary)*/
let bothEyesOut = /*let bothEyesOut =*/
(leftRatio < trackingConstants.minPupilRatio /*(leftRatio < trackingConstants.minPupilRatio*/
|| leftRatio > trackingConstants.maxPupilRatio) /*|| leftRatio > trackingConstants.maxPupilRatio)*/
&& (rightRatio < trackingConstants.minPupilRatio /*&& (rightRatio < trackingConstants.minPupilRatio*/
|| rightRatio > trackingConstants.maxPupilRatio) /*|| rightRatio > trackingConstants.maxPupilRatio)*/
Text(bothEyesOut ? "Both Out ⚠" : "In Range ") /*Text(bothEyesOut ? "Both Out " : "In Range ")*/
.font(.caption2) /*.font(.caption2)*/
.foregroundColor(bothEyesOut ? .orange : .green) /*.foregroundColor(bothEyesOut ? .orange : .green)*/
} /*}*/
} /*}*/
} else { /*} else {*/
Text("Pupil data unavailable") /*Text("Pupil data unavailable")*/
.font(.caption2) /*.font(.caption2)*/
.foregroundColor(.secondary) /*.foregroundColor(.secondary)*/
} /*}*/
if let yaw = eyeTrackingService.debugYaw, /*if let yaw = eyeTrackingService.debugYaw,*/
let pitch = eyeTrackingService.debugPitch /*let pitch = eyeTrackingService.debugPitch*/
{ /*{*/
HStack(spacing: 16) { /*HStack(spacing: 16) {*/
VStack(alignment: .leading, spacing: 2) { /*VStack(alignment: .leading, spacing: 2) {*/
Text("Yaw: \(String(format: "%.3f", yaw))") /*Text("Yaw: \(String(format: "%.3f", yaw))")*/
.font(.caption2) /*.font(.caption2)*/
.foregroundColor( /*.foregroundColor(*/
!trackingConstants.yawEnabled /*!trackingConstants.yawEnabled*/
? .secondary /*? .secondary*/
: abs(yaw) > trackingConstants.yawThreshold /*: abs(yaw) > trackingConstants.yawThreshold*/
? .orange : .green /*? .orange : .green*/
) /*)*/
Text("Pitch: \(String(format: "%.3f", pitch))") /*Text("Pitch: \(String(format: "%.3f", pitch))")*/
.font(.caption2) /*.font(.caption2)*/
.foregroundColor( /*.foregroundColor(*/
!trackingConstants.pitchUpEnabled /*!trackingConstants.pitchUpEnabled*/
&& !trackingConstants.pitchDownEnabled /*&& !trackingConstants.pitchDownEnabled*/
? .secondary /*? .secondary*/
: (pitch > trackingConstants.pitchUpThreshold /*: (pitch > trackingConstants.pitchUpThreshold*/
|| pitch < trackingConstants.pitchDownThreshold) /*|| pitch < trackingConstants.pitchDownThreshold)*/
? .orange : .green /*? .orange : .green*/
) /*)*/
} /*}*/
Spacer() /*Spacer()*/
VStack(alignment: .trailing, spacing: 2) { /*VStack(alignment: .trailing, spacing: 2) {*/
Text( /*Text(*/
"Yaw Max: \(String(format: "%.2f", trackingConstants.yawThreshold))" /*"Yaw Max: \(String(format: "%.2f", trackingConstants.yawThreshold))"*/
) /*)*/
.font(.caption2) /*.font(.caption2)*/
.foregroundColor(.secondary) /*.foregroundColor(.secondary)*/
Text( /*Text(*/
"Pitch: \(String(format: "%.2f", trackingConstants.pitchDownThreshold)) to \(String(format: "%.2f", trackingConstants.pitchUpThreshold))" /*"Pitch: \(String(format: "%.2f", trackingConstants.pitchDownThreshold)) to \(String(format: "%.2f", trackingConstants.pitchUpThreshold))"*/
) /*)*/
.font(.caption2) /*.font(.caption2)*/
.foregroundColor(.secondary) /*.foregroundColor(.secondary)*/
} /*}*/
} /*}*/
} /*}*/
} /*}*/
.padding(.top, 4) /*.padding(.top, 4)*/
if showAdvancedSettings { /*if showAdvancedSettings {*/
VStack(spacing: 16) { /*VStack(spacing: 16) {*/
// Yaw Threshold /*// Yaw Threshold*/
VStack(alignment: .leading, spacing: 4) { /*VStack(alignment: .leading, spacing: 4) {*/
HStack { /*HStack {*/
Toggle("", isOn: $trackingConstants.yawEnabled) /*Toggle("", isOn: $trackingConstants.yawEnabled)*/
.labelsHidden() /*.labelsHidden()*/
Text("Yaw Threshold (Head Turn)") /*Text("Yaw Threshold (Head Turn)")*/
.foregroundColor( /*.foregroundColor(*/
trackingConstants.yawEnabled ? .primary : .secondary) /*trackingConstants.yawEnabled ? .primary : .secondary)*/
Spacer() /*Spacer()*/
Text(String(format: "%.2f rad", trackingConstants.yawThreshold)) /*Text(String(format: "%.2f rad", trackingConstants.yawThreshold))*/
.foregroundColor(.secondary) /*.foregroundColor(.secondary)*/
.font(.caption) /*.font(.caption)*/
} /*}*/
Slider(value: $trackingConstants.yawThreshold, in: 0.1...0.8, step: 0.05) /*Slider(value: $trackingConstants.yawThreshold, in: 0.1...0.8, step: 0.05)*/
.disabled(!trackingConstants.yawEnabled) /*.disabled(!trackingConstants.yawEnabled)*/
Text("Lower = more sensitive to head turning") /*Text("Lower = more sensitive to head turning")*/
.font(.caption2) /*.font(.caption2)*/
.foregroundColor(.secondary) /*.foregroundColor(.secondary)*/
} /*}*/
Divider() /*Divider()*/
// Pitch Up Threshold /*// Pitch Up Threshold*/
VStack(alignment: .leading, spacing: 4) { /*VStack(alignment: .leading, spacing: 4) {*/
HStack { /*HStack {*/
Toggle("", isOn: $trackingConstants.pitchUpEnabled) /*Toggle("", isOn: $trackingConstants.pitchUpEnabled)*/
.labelsHidden() /*.labelsHidden()*/
Text("Pitch Up Threshold (Looking Up)") /*Text("Pitch Up Threshold (Looking Up)")*/
.foregroundColor( /*.foregroundColor(*/
trackingConstants.pitchUpEnabled ? .primary : .secondary) /*trackingConstants.pitchUpEnabled ? .primary : .secondary)*/
Spacer() /*Spacer()*/
Text(String(format: "%.2f rad", trackingConstants.pitchUpThreshold)) /*Text(String(format: "%.2f rad", trackingConstants.pitchUpThreshold))*/
.foregroundColor(.secondary) /*.foregroundColor(.secondary)*/
.font(.caption) /*.font(.caption)*/
} /*}*/
Slider( /*Slider(*/
value: $trackingConstants.pitchUpThreshold, in: -0.2...0.5, step: 0.05 /*value: $trackingConstants.pitchUpThreshold, in: -0.2...0.5, step: 0.05*/
) /*)*/
.disabled(!trackingConstants.pitchUpEnabled) /*.disabled(!trackingConstants.pitchUpEnabled)*/
Text("Lower = more sensitive to looking up") /*Text("Lower = more sensitive to looking up")*/
.font(.caption2) /*.font(.caption2)*/
.foregroundColor(.secondary) /*.foregroundColor(.secondary)*/
} /*}*/
Divider() /*Divider()*/
// Pitch Down Threshold /*// Pitch Down Threshold*/
VStack(alignment: .leading, spacing: 4) { /*VStack(alignment: .leading, spacing: 4) {*/
HStack { /*HStack {*/
Toggle("", isOn: $trackingConstants.pitchDownEnabled) /*Toggle("", isOn: $trackingConstants.pitchDownEnabled)*/
.labelsHidden() /*.labelsHidden()*/
Text("Pitch Down Threshold (Looking Down)") /*Text("Pitch Down Threshold (Looking Down)")*/
.foregroundColor( /*.foregroundColor(*/
trackingConstants.pitchDownEnabled ? .primary : .secondary) /*trackingConstants.pitchDownEnabled ? .primary : .secondary)*/
Spacer() /*Spacer()*/
Text(String(format: "%.2f rad", trackingConstants.pitchDownThreshold)) /*Text(String(format: "%.2f rad", trackingConstants.pitchDownThreshold))*/
.foregroundColor(.secondary) /*.foregroundColor(.secondary)*/
.font(.caption) /*.font(.caption)*/
} /*}*/
Slider( /*Slider(*/
value: $trackingConstants.pitchDownThreshold, in: -0.8...0.0, step: 0.05 /*value: $trackingConstants.pitchDownThreshold, in: -0.8...0.0, step: 0.05*/
) /*)*/
.disabled(!trackingConstants.pitchDownEnabled) /*.disabled(!trackingConstants.pitchDownEnabled)*/
Text("Higher = more sensitive to looking down") /*Text("Higher = more sensitive to looking down")*/
.font(.caption2) /*.font(.caption2)*/
.foregroundColor(.secondary) /*.foregroundColor(.secondary)*/
} /*}*/
Divider() /*Divider()*/
// Min Pupil Ratio /*// Min Pupil Ratio*/
VStack(alignment: .leading, spacing: 4) { /*VStack(alignment: .leading, spacing: 4) {*/
HStack { /*HStack {*/
Toggle("", isOn: $trackingConstants.minPupilEnabled) /*Toggle("", isOn: $trackingConstants.minPupilEnabled)*/
.labelsHidden() /*.labelsHidden()*/
Text("Min Pupil Ratio (Looking Right)") /*Text("Min Pupil Ratio (Looking Right)")*/
.foregroundColor( /*.foregroundColor(*/
trackingConstants.minPupilEnabled ? .primary : .secondary) /*trackingConstants.minPupilEnabled ? .primary : .secondary)*/
Spacer() /*Spacer()*/
Text(String(format: "%.2f", trackingConstants.minPupilRatio)) /*Text(String(format: "%.2f", trackingConstants.minPupilRatio))*/
.foregroundColor(.secondary) /*.foregroundColor(.secondary)*/
.font(.caption) /*.font(.caption)*/
} /*}*/
Slider(value: $trackingConstants.minPupilRatio, in: 0.2...0.5, step: 0.01) /*Slider(value: $trackingConstants.minPupilRatio, in: 0.2...0.5, step: 0.01)*/
.disabled(!trackingConstants.minPupilEnabled) /*.disabled(!trackingConstants.minPupilEnabled)*/
Text("Higher = more sensitive to looking right") /*Text("Higher = more sensitive to looking right")*/
.font(.caption2) /*.font(.caption2)*/
.foregroundColor(.secondary) /*.foregroundColor(.secondary)*/
} /*}*/
Divider() /*Divider()*/
// Max Pupil Ratio /*// Max Pupil Ratio*/
VStack(alignment: .leading, spacing: 4) { /*VStack(alignment: .leading, spacing: 4) {*/
HStack { /*HStack {*/
Toggle("", isOn: $trackingConstants.maxPupilEnabled) /*Toggle("", isOn: $trackingConstants.maxPupilEnabled)*/
.labelsHidden() /*.labelsHidden()*/
Text("Max Pupil Ratio (Looking Left)") /*Text("Max Pupil Ratio (Looking Left)")*/
.foregroundColor( /*.foregroundColor(*/
trackingConstants.maxPupilEnabled ? .primary : .secondary) /*trackingConstants.maxPupilEnabled ? .primary : .secondary)*/
Spacer() /*Spacer()*/
Text(String(format: "%.2f", trackingConstants.maxPupilRatio)) /*Text(String(format: "%.2f", trackingConstants.maxPupilRatio))*/
.foregroundColor(.secondary) /*.foregroundColor(.secondary)*/
.font(.caption) /*.font(.caption)*/
} /*}*/
Slider(value: $trackingConstants.maxPupilRatio, in: 0.5...0.8, step: 0.01) /*Slider(value: $trackingConstants.maxPupilRatio, in: 0.5...0.8, step: 0.01)*/
.disabled(!trackingConstants.maxPupilEnabled) /*.disabled(!trackingConstants.maxPupilEnabled)*/
Text("Lower = more sensitive to looking left") /*Text("Lower = more sensitive to looking left")*/
.font(.caption2) /*.font(.caption2)*/
.foregroundColor(.secondary) /*.foregroundColor(.secondary)*/
} /*}*/
Divider() /*Divider()*/
// Eye Closed Threshold /*// Eye Closed Threshold*/
VStack(alignment: .leading, spacing: 4) { /*VStack(alignment: .leading, spacing: 4) {*/
HStack { /*HStack {*/
Toggle("", isOn: $trackingConstants.eyeClosedEnabled) /*Toggle("", isOn: $trackingConstants.eyeClosedEnabled)*/
.labelsHidden() /*.labelsHidden()*/
Text("Eye Closed Threshold") /*Text("Eye Closed Threshold")*/
.foregroundColor( /*.foregroundColor(*/
trackingConstants.eyeClosedEnabled ? .primary : .secondary) /*trackingConstants.eyeClosedEnabled ? .primary : .secondary)*/
Spacer() /*Spacer()*/
Text(String(format: "%.3f", trackingConstants.eyeClosedThreshold)) /*Text(String(format: "%.3f", trackingConstants.eyeClosedThreshold))*/
.foregroundColor(.secondary) /*.foregroundColor(.secondary)*/
.font(.caption) /*.font(.caption)*/
} /*}*/
Slider( /*Slider(*/
value: Binding( /*value: Binding(*/
get: { Double(trackingConstants.eyeClosedThreshold) }, /*get: { Double(trackingConstants.eyeClosedThreshold) },*/
set: { trackingConstants.eyeClosedThreshold = CGFloat($0) } /*set: { trackingConstants.eyeClosedThreshold = CGFloat($0) }*/
), in: 0.01...0.1, step: 0.005 /*), in: 0.01...0.1, step: 0.005*/
) /*)*/
.disabled(!trackingConstants.eyeClosedEnabled) /*.disabled(!trackingConstants.eyeClosedEnabled)*/
Text("Lower = more sensitive to eye closure") /*Text("Lower = more sensitive to eye closure")*/
.font(.caption2) /*.font(.caption2)*/
.foregroundColor(.secondary) /*.foregroundColor(.secondary)*/
} /*}*/
// Reset button /*// Reset button*/
Button(action: { /*Button(action: {*/
trackingConstants.resetToDefaults() /*trackingConstants.resetToDefaults()*/
}) { /*}) {*/
HStack { /*HStack {*/
Image(systemName: "arrow.counterclockwise") /*Image(systemName: "arrow.counterclockwise")*/
Text("Reset to Defaults") /*Text("Reset to Defaults")*/
} /*}*/
.frame(maxWidth: .infinity) /*.frame(maxWidth: .infinity)*/
} /*}*/
.buttonStyle(.bordered) /*.buttonStyle(.bordered)*/
.controlSize(.small) /*.controlSize(.small)*/
.padding(.top, 8) /*.padding(.top, 8)*/
} /*}*/
.padding(.top, 8) /*.padding(.top, 8)*/
} /*}*/
} /*}*/
.padding() /*.padding()*/
.glassEffectIfAvailable(GlassStyle.regular, in: .rect(cornerRadius: 12)) /*.glassEffectIfAvailable(GlassStyle.regular, in: .rect(cornerRadius: 12))*/
} /*}*/
private var debugEyeTrackingView: some View { /*private var debugEyeTrackingView: some View {*/
VStack(alignment: .leading, spacing: 12) { /*VStack(alignment: .leading, spacing: 12) {*/
Text("Debug Eye Tracking Data") /*Text("Debug Eye Tracking Data")*/
.font(.headline) /*.font(.headline)*/
.foregroundColor(.blue) /*.foregroundColor(.blue)*/
VStack(alignment: .leading, spacing: 8) { /*VStack(alignment: .leading, spacing: 8) {*/
Text("Face Detected: \(eyeTrackingService.faceDetected ? "Yes" : "No")") /*Text("Face Detected: \(eyeTrackingService.faceDetected ? "Yes" : "No")")*/
.font(.caption) /*.font(.caption)*/
Text("Looking at Screen: \(eyeTrackingService.userLookingAtScreen ? "Yes" : "No")") /*Text("Looking at Screen: \(eyeTrackingService.userLookingAtScreen ? "Yes" : "No")")*/
.font(.caption) /*.font(.caption)*/
Text("Eyes Closed: \(eyeTrackingService.isEyesClosed ? "Yes" : "No")") /*Text("Eyes Closed: \(eyeTrackingService.isEyesClosed ? "Yes" : "No")")*/
.font(.caption) /*.font(.caption)*/
if eyeTrackingService.faceDetected { /*if eyeTrackingService.faceDetected {*/
Text("Yaw: 0.0") /*Text("Yaw: 0.0")*/
.font(.caption) /*.font(.caption)*/
Text("Roll: 0.0") /*Text("Roll: 0.0")*/
.font(.caption) /*.font(.caption)*/
} /*}*/
} /*}*/
.font(.caption) /*.font(.caption)*/
.foregroundColor(.secondary) /*.foregroundColor(.secondary)*/
} /*}*/
.padding() /*.padding()*/
.glassEffectIfAvailable(GlassStyle.regular, in: .rect(cornerRadius: 12)) /*.glassEffectIfAvailable(GlassStyle.regular, in: .rect(cornerRadius: 12))*/
} /*}*/
} }
#Preview { #Preview {