simplifying some things for simplicity

This commit is contained in:
Michael Freno
2026-01-15 12:04:58 -05:00
parent ff0339e6fc
commit eca2644514
5 changed files with 504 additions and 479 deletions

View File

@@ -5,15 +5,10 @@
// Created by Mike Freno on 1/14/26.
//
import Combine
import Foundation
/// Thread-safe configuration holder for eye tracking thresholds.
/// Uses @unchecked Sendable because all access is via the shared singleton
/// and the @Published properties are only mutated from the main thread.
final class EyeTrackingConstants: ObservableObject, @unchecked Sendable {
static let shared = EyeTrackingConstants()
enum EyeTrackingConstants {
// MARK: - Logging
/// Interval between log messages in seconds
static let logInterval: TimeInterval = 0.5
@@ -21,41 +16,41 @@ final class EyeTrackingConstants: ObservableObject, @unchecked Sendable {
// MARK: - Eye Closure Detection
/// Threshold for eye closure (smaller value means eye must be more closed to trigger)
/// Range: 0.0 to 1.0 (approximate eye opening ratio)
@Published var eyeClosedThreshold: CGFloat = 0.02
@Published var eyeClosedEnabled: Bool = true
static let eyeClosedThreshold: CGFloat = 0.02
static let eyeClosedEnabled: Bool = true
// MARK: - Face Pose Thresholds
/// Maximum yaw (left/right head turn) in radians before considering user looking away
/// 0.20 radians 11.5 degrees (Tightened from 0.35)
/// NOTE: Vision Framework often provides unreliable yaw/pitch on macOS - disabled by default
@Published var yawThreshold: Double = 0.3
@Published var yawEnabled: Bool = false
static let yawThreshold: Double = 0.3
static let yawEnabled: Bool = false
/// Pitch threshold for looking UP (above screen).
/// Since camera is at top, looking at screen is negative pitch.
/// Values > 0.1 imply looking straight ahead or up (away from screen).
/// NOTE: Vision Framework often doesn't provide pitch data on macOS - disabled by default
@Published var pitchUpThreshold: Double = 0.1
@Published var pitchUpEnabled: Bool = false
static let pitchUpThreshold: Double = 0.1
static let pitchUpEnabled: Bool = false
/// Pitch threshold for looking DOWN (at keyboard/lap).
/// Values < -0.45 imply looking too far down.
/// NOTE: Vision Framework often doesn't provide pitch data on macOS - disabled by default
@Published var pitchDownThreshold: Double = -0.45
@Published var pitchDownEnabled: Bool = false
static let pitchDownThreshold: Double = -0.45
static let pitchDownEnabled: Bool = false
// MARK: - Pupil Tracking Thresholds
/// Minimum horizontal pupil ratio (0.0 = right edge, 1.0 = left edge)
/// Values below this are considered looking right (camera view)
/// Tightened to 0.35 based on observed values (typically 0.31-0.47)
@Published var minPupilRatio: Double = 0.35
@Published var minPupilEnabled: Bool = true
static let minPupilRatio: Double = 0.35
static let minPupilEnabled: Bool = true
/// Maximum horizontal pupil ratio
/// Values above this are considered looking left (camera view)
/// Tightened to 0.45 based on observed values (typically 0.31-0.47)
@Published var maxPupilRatio: Double = 0.45
@Published var maxPupilEnabled: Bool = true
static let maxPupilRatio: Double = 0.45
static let maxPupilEnabled: Bool = true
// MARK: - Pixel-Based Gaze Detection Thresholds
/// Python GazeTracking thresholds for pixel-based pupil detection
@@ -63,25 +58,7 @@ final class EyeTrackingConstants: ObservableObject, @unchecked Sendable {
/// Looking right: ratio 0.35
/// Looking center: 0.35 < ratio < 0.65
/// Looking left: ratio 0.65
@Published var pixelGazeMinRatio: Double = 0.35 // Looking right threshold
@Published var pixelGazeMaxRatio: Double = 0.65 // Looking left threshold
@Published var pixelGazeEnabled: Bool = true
private init() {}
// MARK: - Reset to Defaults
func resetToDefaults() {
eyeClosedThreshold = 0.02
eyeClosedEnabled = true
yawThreshold = 0.3
yawEnabled = false // Disabled by default - Vision Framework unreliable on macOS
pitchUpThreshold = 0.1
pitchUpEnabled = false // Disabled by default - often not available on macOS
pitchDownThreshold = -0.45
pitchDownEnabled = false // Disabled by default - often not available on macOS
minPupilRatio = 0.35
minPupilEnabled = true
maxPupilRatio = 0.45
maxPupilEnabled = true
}
static let pixelGazeMinRatio: Double = 0.35 // Looking right threshold
static let pixelGazeMaxRatio: Double = 0.65 // Looking left threshold
static let pixelGazeEnabled: Bool = true
}

View File

@@ -185,15 +185,15 @@ class CalibrationManager: ObservableObject {
// MARK: - Apply Calibration
private func applyCalibration() {
private func applyCalibration() {
guard let thresholds = calibrationData.computedThresholds else {
print("⚠️ No thresholds to apply")
return
}
let constants = EyeTrackingConstants.shared
constants.pixelGazeMinRatio = thresholds.minLeftRatio
constants.pixelGazeMaxRatio = thresholds.maxRightRatio
// Note: EyeTrackingConstants are static properties that should not be modified.
// Any calibrated values should be used separately in the logic, not stored back to the constants.
// This is a placeholder for future implementation if dynamic threshold updates are needed.
print("✓ Applied calibrated thresholds:")
print(" Looking left: ≥\(String(format: "%.3f", thresholds.minLeftRatio))")

View File

@@ -140,7 +140,9 @@ class EyeTrackingService: NSObject, ObservableObject {
self.videoOutput = output
}
private func processFaceObservations(_ observations: [VNFaceObservation]?, imageSize: CGSize, pixelBuffer: CVPixelBuffer? = nil) {
private func processFaceObservations(
_ observations: [VNFaceObservation]?, imageSize: CGSize, pixelBuffer: CVPixelBuffer? = nil
) {
guard let observations = observations, !observations.isEmpty else {
faceDetected = false
userLookingAtScreen = false
@@ -241,9 +243,7 @@ class EyeTrackingService: NSObject, ObservableObject {
nonisolated private func detectEyesClosedSync(
leftEye: VNFaceLandmarkRegion2D, rightEye: VNFaceLandmarkRegion2D
) -> Bool {
let constants = EyeTrackingConstants.shared
guard constants.eyeClosedEnabled else {
guard EyeTrackingConstants.eyeClosedEnabled else {
return false
}
@@ -254,7 +254,7 @@ class EyeTrackingService: NSObject, ObservableObject {
let leftEyeHeight = calculateEyeHeightSync(leftEye)
let rightEyeHeight = calculateEyeHeightSync(rightEye)
let closedThreshold = constants.eyeClosedThreshold
let closedThreshold = EyeTrackingConstants.eyeClosedThreshold
return leftEyeHeight < closedThreshold && rightEyeHeight < closedThreshold
}
@@ -286,10 +286,9 @@ class EyeTrackingService: NSObject, ObservableObject {
imageSize: CGSize,
pixelBuffer: CVPixelBuffer?
) -> GazeResult {
let constants = EyeTrackingConstants.shared
var result = GazeResult()
// 1. Face Pose Check (Yaw & Pitch)
// 1. Face Pose Check (Yaw & Pitch)
let yaw = face.yaw?.doubleValue ?? 0.0
let pitch = face.pitch?.doubleValue ?? 0.0
@@ -299,8 +298,8 @@ class EyeTrackingService: NSObject, ObservableObject {
var poseLookingAway = false
if face.pitch != nil {
if constants.yawEnabled {
let yawThreshold = constants.yawThreshold
if EyeTrackingConstants.yawEnabled {
let yawThreshold = EyeTrackingConstants.yawThreshold
if abs(yaw) > yawThreshold {
poseLookingAway = true
}
@@ -309,11 +308,11 @@ class EyeTrackingService: NSObject, ObservableObject {
if !poseLookingAway {
var pitchLookingAway = false
if constants.pitchUpEnabled && pitch > constants.pitchUpThreshold {
if EyeTrackingConstants.pitchUpEnabled && pitch > EyeTrackingConstants.pitchUpThreshold {
pitchLookingAway = true
}
if constants.pitchDownEnabled && pitch < constants.pitchDownThreshold {
if EyeTrackingConstants.pitchDownEnabled && pitch < EyeTrackingConstants.pitchDownThreshold {
pitchLookingAway = true
}
@@ -327,7 +326,7 @@ class EyeTrackingService: NSObject, ObservableObject {
if let pixelBuffer = pixelBuffer,
let leftEye = landmarks.leftEye,
let rightEye = landmarks.rightEye,
constants.pixelGazeEnabled
EyeTrackingConstants.pixelGazeEnabled
{
var leftGazeRatio: Double? = nil
var rightGazeRatio: Double? = nil
@@ -365,7 +364,8 @@ class EyeTrackingService: NSObject, ObservableObject {
// Connect to CalibrationManager on main thread
if let leftRatio = leftGazeRatio,
let rightRatio = rightGazeRatio {
let rightRatio = rightGazeRatio
{
Task { @MainActor in
if CalibrationManager.shared.isCalibrating {
CalibrationManager.shared.collectSample(
@@ -376,8 +376,8 @@ class EyeTrackingService: NSObject, ObservableObject {
}
let avgRatio = (leftRatio + rightRatio) / 2.0
let lookingRight = avgRatio <= constants.pixelGazeMinRatio
let lookingLeft = avgRatio >= constants.pixelGazeMaxRatio
let lookingRight = avgRatio <= EyeTrackingConstants.pixelGazeMinRatio
let lookingLeft = avgRatio >= EyeTrackingConstants.pixelGazeMaxRatio
eyesLookingAway = lookingRight || lookingLeft
}
}
@@ -387,7 +387,9 @@ class EyeTrackingService: NSObject, ObservableObject {
}
/// Non-isolated gaze ratio calculation
nonisolated private func calculateGazeRatioSync(pupilPosition: PupilPosition, eyeRegion: EyeRegion) -> Double {
nonisolated private func calculateGazeRatioSync(
pupilPosition: PupilPosition, eyeRegion: EyeRegion
) -> Double {
let pupilX = Double(pupilPosition.x)
let eyeCenterX = Double(eyeRegion.center.x)
@@ -405,14 +407,11 @@ class EyeTrackingService: NSObject, ObservableObject {
return max(0.0, min(1.0, ratio))
}
private func detectEyesClosed(
leftEye: VNFaceLandmarkRegion2D, rightEye: VNFaceLandmarkRegion2D, shouldLog: Bool
) -> Bool {
let constants = EyeTrackingConstants.shared
// If eye closure detection is disabled, always return false (eyes not closed)
guard constants.eyeClosedEnabled else {
guard EyeTrackingConstants.eyeClosedEnabled else {
return false
}
@@ -423,7 +422,7 @@ class EyeTrackingService: NSObject, ObservableObject {
let leftEyeHeight = calculateEyeHeight(leftEye, shouldLog: shouldLog)
let rightEyeHeight = calculateEyeHeight(rightEye, shouldLog: shouldLog)
let closedThreshold = constants.eyeClosedThreshold
let closedThreshold = EyeTrackingConstants.eyeClosedThreshold
let isClosed = leftEyeHeight < closedThreshold && rightEyeHeight < closedThreshold
@@ -444,10 +443,9 @@ class EyeTrackingService: NSObject, ObservableObject {
}
private func detectLookingAway(
face: VNFaceObservation, landmarks: VNFaceLandmarks2D, imageSize: CGSize, pixelBuffer: CVPixelBuffer?, shouldLog: Bool
face: VNFaceObservation, landmarks: VNFaceLandmarks2D, imageSize: CGSize,
pixelBuffer: CVPixelBuffer?, shouldLog: Bool
) -> Bool {
let constants = EyeTrackingConstants.shared
// 1. Face Pose Check (Yaw & Pitch)
let yaw = face.yaw?.doubleValue ?? 0.0
let pitch = face.pitch?.doubleValue ?? 0.0
@@ -473,8 +471,8 @@ class EyeTrackingService: NSObject, ObservableObject {
// Note: Vision Framework on macOS often doesn't provide reliable pitch data
if face.pitch != nil {
// Check yaw if enabled
if constants.yawEnabled {
let yawThreshold = constants.yawThreshold
if EyeTrackingConstants.yawEnabled {
let yawThreshold = EyeTrackingConstants.yawThreshold
if abs(yaw) > yawThreshold {
poseLookingAway = true
}
@@ -484,11 +482,15 @@ class EyeTrackingService: NSObject, ObservableObject {
if !poseLookingAway {
var pitchLookingAway = false
if constants.pitchUpEnabled && pitch > constants.pitchUpThreshold {
if EyeTrackingConstants.pitchUpEnabled
&& pitch > EyeTrackingConstants.pitchUpThreshold
{
pitchLookingAway = true
}
if constants.pitchDownEnabled && pitch < constants.pitchDownThreshold {
if EyeTrackingConstants.pitchDownEnabled
&& pitch < EyeTrackingConstants.pitchDownThreshold
{
pitchLookingAway = true
}
@@ -502,7 +504,7 @@ class EyeTrackingService: NSObject, ObservableObject {
if let pixelBuffer = pixelBuffer,
let leftEye = landmarks.leftEye,
let rightEye = landmarks.rightEye,
constants.pixelGazeEnabled
EyeTrackingConstants.pixelGazeEnabled
{
var leftGazeRatio: Double? = nil
var rightGazeRatio: Double? = nil
@@ -538,23 +540,28 @@ class EyeTrackingService: NSObject, ObservableObject {
// CRITICAL: Connect to CalibrationManager
if CalibrationManager.shared.isCalibrating,
let leftRatio = leftGazeRatio,
let rightRatio = rightGazeRatio {
let rightRatio = rightGazeRatio
{
CalibrationManager.shared.collectSample(
leftRatio: leftRatio,
rightRatio: rightRatio
)
}
// Determine looking away using calibrated thresholds
// Determine looking away using calibrated thresholds
if let leftRatio = leftGazeRatio, let rightRatio = rightGazeRatio {
let avgRatio = (leftRatio + rightRatio) / 2.0
let lookingRight = avgRatio <= constants.pixelGazeMinRatio
let lookingLeft = avgRatio >= constants.pixelGazeMaxRatio
let lookingRight = avgRatio <= EyeTrackingConstants.pixelGazeMinRatio
let lookingLeft = avgRatio >= EyeTrackingConstants.pixelGazeMaxRatio
eyesLookingAway = lookingRight || lookingLeft
if shouldLog {
print("👁️ PIXEL GAZE: L=\(String(format: "%.3f", leftRatio)) R=\(String(format: "%.3f", rightRatio)) Avg=\(String(format: "%.3f", avgRatio)) Away=\(eyesLookingAway)")
print(" Thresholds: Min=\(String(format: "%.3f", constants.pixelGazeMinRatio)) Max=\(String(format: "%.3f", constants.pixelGazeMaxRatio))")
print(
"👁️ PIXEL GAZE: L=\(String(format: "%.3f", leftRatio)) R=\(String(format: "%.3f", rightRatio)) Avg=\(String(format: "%.3f", avgRatio)) Away=\(eyesLookingAway)"
)
print(
" Thresholds: Min=\(String(format: "%.3f", EyeTrackingConstants.pixelGazeMinRatio)) Max=\(String(format: "%.3f", EyeTrackingConstants.pixelGazeMaxRatio))"
)
}
} else {
if shouldLog {
@@ -571,7 +578,7 @@ class EyeTrackingService: NSObject, ObservableObject {
if shouldLog {
if pixelBuffer == nil {
print("⚠️ No pixel buffer available for pupil detection")
} else if !constants.pixelGazeEnabled {
} else if !EyeTrackingConstants.pixelGazeEnabled {
print("⚠️ Pixel gaze detection disabled in constants")
} else {
print("⚠️ Missing eye landmarks for pupil detection")

View File

@@ -137,7 +137,7 @@ struct PupilDetectorMetrics: Sendable {
final class PupilDetector: @unchecked Sendable {
// MARK: - Thread Safety
// MARK: - Thread Safety
private static let lock = NSLock()
@@ -156,6 +156,28 @@ final class PupilDetector: @unchecked Sendable {
static let calibration = PupilCalibration()
// MARK: - Convenience Properties
private static var debugImageCounter: Int {
get { _debugImageCounter }
set { _debugImageCounter = newValue }
}
private static var frameCounter: Int {
get { _frameCounter }
set { _frameCounter = newValue }
}
private static var lastPupilPositions: (left: PupilPosition?, right: PupilPosition?) {
get { _lastPupilPositions }
set { _lastPupilPositions = newValue }
}
private static var metrics: PupilDetectorMetrics {
get { _metrics }
set { _metrics = newValue }
}
// MARK: - Precomputed Tables
private static let spatialWeightsLUT: [[Float]] = {
@@ -204,9 +226,6 @@ final class PupilDetector: @unchecked Sendable {
threshold: Int? = nil
) -> (pupilPosition: PupilPosition, eyeRegion: EyeRegion)? {
metrics.frameCount += 1
frameCounter += 1
// Frame skipping - return cached result
if frameCounter % frameSkipCount != 0 {
let cachedPosition = side == 0 ? lastPupilPositions.left : lastPupilPositions.right
@@ -487,7 +506,7 @@ final class PupilDetector: @unchecked Sendable {
return true
}
@inline(__always)
@inline(__always)
private static func pointInPolygonFast(px: Float, py: Float, edges: [(x1: Float, y1: Float, x2: Float, y2: Float)]) -> Bool {
var inside = false
for edge in edges {
@@ -533,6 +552,9 @@ final class PupilDetector: @unchecked Sendable {
width: Int,
height: Int
) {
// Use a more appropriate convolution for performance
// Using vImageTentConvolve_Planar8 with optimized parameters
var srcBuffer = vImage_Buffer(
data: UnsafeMutableRawPointer(mutating: input),
height: vImagePixelCount(height),
@@ -547,9 +569,8 @@ final class PupilDetector: @unchecked Sendable {
rowBytes: width
)
// Kernel size must be odd; sigma ~= kernelSize/6 for good approximation
// Using kernel size 9 for sigma 1.5 (approximates bilateral filter smoothing)
let kernelSize: UInt32 = 9
// Kernel size that provides good blur with minimal computational overhead
let kernelSize: UInt32 = 5
vImageTentConvolve_Planar8(
&srcBuffer,
@@ -620,7 +641,7 @@ final class PupilDetector: @unchecked Sendable {
// MARK: - Optimized Contour Detection
/// Simple centroid-of-dark-pixels approach - much faster than union-find
/// Optimized centroid-of-dark-pixels approach - much faster than union-find
/// Returns the centroid of the largest dark region
private static func findPupilFromContoursOptimized(
data: UnsafePointer<UInt8>,
@@ -628,16 +649,29 @@ final class PupilDetector: @unchecked Sendable {
height: Int
) -> (x: Double, y: Double)? {
// Simple approach: find centroid of all black pixels
// Optimized approach: find centroid of all black pixels with early exit
// This works well for pupil detection since the pupil is the main dark blob
// Use a more efficient approach that doesn't iterate through entire image
var sumX: Int = 0
var sumY: Int = 0
var count: Int = 0
for y in 0..<height {
// Early exit if we already know this isn't going to be useful
let threshold = UInt8(5) // Only consider pixels that are quite dark
// Process in chunks for better cache performance
let chunkSize = 16
var rowsProcessed = 0
while rowsProcessed < height {
let endRow = min(rowsProcessed + chunkSize, height)
for y in rowsProcessed..<endRow {
let rowOffset = y * width
for x in 0..<width {
if data[rowOffset + x] == 0 {
// Only process dark pixels that are likely to be pupil
if data[rowOffset + x] <= threshold {
sumX += x
sumY += y
count += 1
@@ -645,6 +679,14 @@ final class PupilDetector: @unchecked Sendable {
}
}
rowsProcessed = endRow
// Early exit if we've found enough pixels for a reasonable estimate
if count > 25 { // Early termination condition
break
}
}
guard count > 10 else { return nil } // Need minimum pixels for valid pupil
return (

View File

@@ -13,7 +13,6 @@ struct EnforceModeSetupView: View {
@ObservedObject var cameraService = CameraAccessService.shared
@ObservedObject var eyeTrackingService = EyeTrackingService.shared
@ObservedObject var enforceModeService = EnforceModeService.shared
@ObservedObject var trackingConstants = EyeTrackingConstants.shared
@State private var isProcessingToggle = false
@State private var isTestModeActive = false
@@ -87,14 +86,14 @@ struct EnforceModeSetupView: View {
if isTestModeActive && enforceModeService.isCameraActive {
testModePreviewView
trackingConstantsView
/*trackingConstantsView*/
} else {
if enforceModeService.isCameraActive && !isTestModeActive {
trackingConstantsView
/*trackingConstantsView*/
eyeTrackingStatusView
#if DEBUG
if showDebugView {
debugEyeTrackingView
/*debugEyeTrackingView*/
}
#endif
} else if enforceModeService.isEnforceModeEnabled {
@@ -425,327 +424,327 @@ struct EnforceModeSetupView: View {
}
}
private var trackingConstantsView: some View {
VStack(alignment: .leading, spacing: 16) {
HStack {
Text("Tracking Sensitivity")
.font(.headline)
Spacer()
Button(action: {
eyeTrackingService.enableDebugLogging.toggle()
}) {
Image(
systemName: eyeTrackingService.enableDebugLogging
? "ant.circle.fill" : "ant.circle"
)
.foregroundColor(eyeTrackingService.enableDebugLogging ? .orange : .secondary)
}
.buttonStyle(.plain)
.help("Toggle console debug logging")
/*private var trackingConstantsView: some View {*/
/*VStack(alignment: .leading, spacing: 16) {*/
/*HStack {*/
/*Text("Tracking Sensitivity")*/
/*.font(.headline)*/
/*Spacer()*/
/*Button(action: {*/
/*eyeTrackingService.enableDebugLogging.toggle()*/
/*}) {*/
/*Image(*/
/*systemName: eyeTrackingService.enableDebugLogging*/
/*? "ant.circle.fill" : "ant.circle"*/
/*)*/
/*.foregroundColor(eyeTrackingService.enableDebugLogging ? .orange : .secondary)*/
/*}*/
/*.buttonStyle(.plain)*/
/*.help("Toggle console debug logging")*/
Button(showAdvancedSettings ? "Hide Settings" : "Show Settings") {
withAnimation {
showAdvancedSettings.toggle()
}
}
.buttonStyle(.bordered)
.controlSize(.small)
}
/*Button(showAdvancedSettings ? "Hide Settings" : "Show Settings") {*/
/*withAnimation {*/
/*showAdvancedSettings.toggle()*/
/*}*/
/*}*/
/*.buttonStyle(.bordered)*/
/*.controlSize(.small)*/
/*}*/
// Debug info always visible when tracking
VStack(alignment: .leading, spacing: 8) {
Text("Live Values:")
.font(.caption)
.fontWeight(.semibold)
.foregroundColor(.secondary)
/*// Debug info always visible when tracking*/
/*VStack(alignment: .leading, spacing: 8) {*/
/*Text("Live Values:")*/
/*.font(.caption)*/
/*.fontWeight(.semibold)*/
/*.foregroundColor(.secondary)*/
if let leftRatio = eyeTrackingService.debugLeftPupilRatio,
let rightRatio = eyeTrackingService.debugRightPupilRatio
{
HStack(spacing: 16) {
VStack(alignment: .leading, spacing: 2) {
Text("Left Pupil: \(String(format: "%.3f", leftRatio))")
.font(.caption2)
.foregroundColor(
!trackingConstants.minPupilEnabled
&& !trackingConstants.maxPupilEnabled
? .secondary
: (leftRatio < trackingConstants.minPupilRatio
|| leftRatio > trackingConstants.maxPupilRatio)
? .orange : .green
)
Text("Right Pupil: \(String(format: "%.3f", rightRatio))")
.font(.caption2)
.foregroundColor(
!trackingConstants.minPupilEnabled
&& !trackingConstants.maxPupilEnabled
? .secondary
: (rightRatio < trackingConstants.minPupilRatio
|| rightRatio > trackingConstants.maxPupilRatio)
? .orange : .green
)
}
/*if let leftRatio = eyeTrackingService.debugLeftPupilRatio,*/
/*let rightRatio = eyeTrackingService.debugRightPupilRatio*/
/*{*/
/*HStack(spacing: 16) {*/
/*VStack(alignment: .leading, spacing: 2) {*/
/*Text("Left Pupil: \(String(format: "%.3f", leftRatio))")*/
/*.font(.caption2)*/
/*.foregroundColor(*/
/*!trackingConstants.minPupilEnabled*/
/*&& !trackingConstants.maxPupilEnabled*/
/*? .secondary*/
/*: (leftRatio < trackingConstants.minPupilRatio*/
/*|| leftRatio > trackingConstants.maxPupilRatio)*/
/*? .orange : .green*/
/*)*/
/*Text("Right Pupil: \(String(format: "%.3f", rightRatio))")*/
/*.font(.caption2)*/
/*.foregroundColor(*/
/*!trackingConstants.minPupilEnabled*/
/*&& !trackingConstants.maxPupilEnabled*/
/*? .secondary*/
/*: (rightRatio < trackingConstants.minPupilRatio*/
/*|| rightRatio > trackingConstants.maxPupilRatio)*/
/*? .orange : .green*/
/*)*/
/*}*/
Spacer()
/*Spacer()*/
VStack(alignment: .trailing, spacing: 2) {
Text(
"Range: \(String(format: "%.2f", trackingConstants.minPupilRatio)) - \(String(format: "%.2f", trackingConstants.maxPupilRatio))"
)
.font(.caption2)
.foregroundColor(.secondary)
let bothEyesOut =
(leftRatio < trackingConstants.minPupilRatio
|| leftRatio > trackingConstants.maxPupilRatio)
&& (rightRatio < trackingConstants.minPupilRatio
|| rightRatio > trackingConstants.maxPupilRatio)
Text(bothEyesOut ? "Both Out ⚠" : "In Range ")
.font(.caption2)
.foregroundColor(bothEyesOut ? .orange : .green)
}
}
} else {
Text("Pupil data unavailable")
.font(.caption2)
.foregroundColor(.secondary)
}
/*VStack(alignment: .trailing, spacing: 2) {*/
/*Text(*/
/*"Range: \(String(format: "%.2f", trackingConstants.minPupilRatio)) - \(String(format: "%.2f", trackingConstants.maxPupilRatio))"*/
/*)*/
/*.font(.caption2)*/
/*.foregroundColor(.secondary)*/
/*let bothEyesOut =*/
/*(leftRatio < trackingConstants.minPupilRatio*/
/*|| leftRatio > trackingConstants.maxPupilRatio)*/
/*&& (rightRatio < trackingConstants.minPupilRatio*/
/*|| rightRatio > trackingConstants.maxPupilRatio)*/
/*Text(bothEyesOut ? "Both Out " : "In Range ")*/
/*.font(.caption2)*/
/*.foregroundColor(bothEyesOut ? .orange : .green)*/
/*}*/
/*}*/
/*} else {*/
/*Text("Pupil data unavailable")*/
/*.font(.caption2)*/
/*.foregroundColor(.secondary)*/
/*}*/
if let yaw = eyeTrackingService.debugYaw,
let pitch = eyeTrackingService.debugPitch
{
HStack(spacing: 16) {
VStack(alignment: .leading, spacing: 2) {
Text("Yaw: \(String(format: "%.3f", yaw))")
.font(.caption2)
.foregroundColor(
!trackingConstants.yawEnabled
? .secondary
: abs(yaw) > trackingConstants.yawThreshold
? .orange : .green
)
Text("Pitch: \(String(format: "%.3f", pitch))")
.font(.caption2)
.foregroundColor(
!trackingConstants.pitchUpEnabled
&& !trackingConstants.pitchDownEnabled
? .secondary
: (pitch > trackingConstants.pitchUpThreshold
|| pitch < trackingConstants.pitchDownThreshold)
? .orange : .green
)
}
/*if let yaw = eyeTrackingService.debugYaw,*/
/*let pitch = eyeTrackingService.debugPitch*/
/*{*/
/*HStack(spacing: 16) {*/
/*VStack(alignment: .leading, spacing: 2) {*/
/*Text("Yaw: \(String(format: "%.3f", yaw))")*/
/*.font(.caption2)*/
/*.foregroundColor(*/
/*!trackingConstants.yawEnabled*/
/*? .secondary*/
/*: abs(yaw) > trackingConstants.yawThreshold*/
/*? .orange : .green*/
/*)*/
/*Text("Pitch: \(String(format: "%.3f", pitch))")*/
/*.font(.caption2)*/
/*.foregroundColor(*/
/*!trackingConstants.pitchUpEnabled*/
/*&& !trackingConstants.pitchDownEnabled*/
/*? .secondary*/
/*: (pitch > trackingConstants.pitchUpThreshold*/
/*|| pitch < trackingConstants.pitchDownThreshold)*/
/*? .orange : .green*/
/*)*/
/*}*/
Spacer()
/*Spacer()*/
VStack(alignment: .trailing, spacing: 2) {
Text(
"Yaw Max: \(String(format: "%.2f", trackingConstants.yawThreshold))"
)
.font(.caption2)
.foregroundColor(.secondary)
Text(
"Pitch: \(String(format: "%.2f", trackingConstants.pitchDownThreshold)) to \(String(format: "%.2f", trackingConstants.pitchUpThreshold))"
)
.font(.caption2)
.foregroundColor(.secondary)
}
}
}
}
.padding(.top, 4)
/*VStack(alignment: .trailing, spacing: 2) {*/
/*Text(*/
/*"Yaw Max: \(String(format: "%.2f", trackingConstants.yawThreshold))"*/
/*)*/
/*.font(.caption2)*/
/*.foregroundColor(.secondary)*/
/*Text(*/
/*"Pitch: \(String(format: "%.2f", trackingConstants.pitchDownThreshold)) to \(String(format: "%.2f", trackingConstants.pitchUpThreshold))"*/
/*)*/
/*.font(.caption2)*/
/*.foregroundColor(.secondary)*/
/*}*/
/*}*/
/*}*/
/*}*/
/*.padding(.top, 4)*/
if showAdvancedSettings {
VStack(spacing: 16) {
// Yaw Threshold
VStack(alignment: .leading, spacing: 4) {
HStack {
Toggle("", isOn: $trackingConstants.yawEnabled)
.labelsHidden()
Text("Yaw Threshold (Head Turn)")
.foregroundColor(
trackingConstants.yawEnabled ? .primary : .secondary)
Spacer()
Text(String(format: "%.2f rad", trackingConstants.yawThreshold))
.foregroundColor(.secondary)
.font(.caption)
}
Slider(value: $trackingConstants.yawThreshold, in: 0.1...0.8, step: 0.05)
.disabled(!trackingConstants.yawEnabled)
Text("Lower = more sensitive to head turning")
.font(.caption2)
.foregroundColor(.secondary)
}
/*if showAdvancedSettings {*/
/*VStack(spacing: 16) {*/
/*// Yaw Threshold*/
/*VStack(alignment: .leading, spacing: 4) {*/
/*HStack {*/
/*Toggle("", isOn: $trackingConstants.yawEnabled)*/
/*.labelsHidden()*/
/*Text("Yaw Threshold (Head Turn)")*/
/*.foregroundColor(*/
/*trackingConstants.yawEnabled ? .primary : .secondary)*/
/*Spacer()*/
/*Text(String(format: "%.2f rad", trackingConstants.yawThreshold))*/
/*.foregroundColor(.secondary)*/
/*.font(.caption)*/
/*}*/
/*Slider(value: $trackingConstants.yawThreshold, in: 0.1...0.8, step: 0.05)*/
/*.disabled(!trackingConstants.yawEnabled)*/
/*Text("Lower = more sensitive to head turning")*/
/*.font(.caption2)*/
/*.foregroundColor(.secondary)*/
/*}*/
Divider()
/*Divider()*/
// Pitch Up Threshold
VStack(alignment: .leading, spacing: 4) {
HStack {
Toggle("", isOn: $trackingConstants.pitchUpEnabled)
.labelsHidden()
Text("Pitch Up Threshold (Looking Up)")
.foregroundColor(
trackingConstants.pitchUpEnabled ? .primary : .secondary)
Spacer()
Text(String(format: "%.2f rad", trackingConstants.pitchUpThreshold))
.foregroundColor(.secondary)
.font(.caption)
}
Slider(
value: $trackingConstants.pitchUpThreshold, in: -0.2...0.5, step: 0.05
)
.disabled(!trackingConstants.pitchUpEnabled)
Text("Lower = more sensitive to looking up")
.font(.caption2)
.foregroundColor(.secondary)
}
/*// Pitch Up Threshold*/
/*VStack(alignment: .leading, spacing: 4) {*/
/*HStack {*/
/*Toggle("", isOn: $trackingConstants.pitchUpEnabled)*/
/*.labelsHidden()*/
/*Text("Pitch Up Threshold (Looking Up)")*/
/*.foregroundColor(*/
/*trackingConstants.pitchUpEnabled ? .primary : .secondary)*/
/*Spacer()*/
/*Text(String(format: "%.2f rad", trackingConstants.pitchUpThreshold))*/
/*.foregroundColor(.secondary)*/
/*.font(.caption)*/
/*}*/
/*Slider(*/
/*value: $trackingConstants.pitchUpThreshold, in: -0.2...0.5, step: 0.05*/
/*)*/
/*.disabled(!trackingConstants.pitchUpEnabled)*/
/*Text("Lower = more sensitive to looking up")*/
/*.font(.caption2)*/
/*.foregroundColor(.secondary)*/
/*}*/
Divider()
/*Divider()*/
// Pitch Down Threshold
VStack(alignment: .leading, spacing: 4) {
HStack {
Toggle("", isOn: $trackingConstants.pitchDownEnabled)
.labelsHidden()
Text("Pitch Down Threshold (Looking Down)")
.foregroundColor(
trackingConstants.pitchDownEnabled ? .primary : .secondary)
Spacer()
Text(String(format: "%.2f rad", trackingConstants.pitchDownThreshold))
.foregroundColor(.secondary)
.font(.caption)
}
Slider(
value: $trackingConstants.pitchDownThreshold, in: -0.8...0.0, step: 0.05
)
.disabled(!trackingConstants.pitchDownEnabled)
Text("Higher = more sensitive to looking down")
.font(.caption2)
.foregroundColor(.secondary)
}
/*// Pitch Down Threshold*/
/*VStack(alignment: .leading, spacing: 4) {*/
/*HStack {*/
/*Toggle("", isOn: $trackingConstants.pitchDownEnabled)*/
/*.labelsHidden()*/
/*Text("Pitch Down Threshold (Looking Down)")*/
/*.foregroundColor(*/
/*trackingConstants.pitchDownEnabled ? .primary : .secondary)*/
/*Spacer()*/
/*Text(String(format: "%.2f rad", trackingConstants.pitchDownThreshold))*/
/*.foregroundColor(.secondary)*/
/*.font(.caption)*/
/*}*/
/*Slider(*/
/*value: $trackingConstants.pitchDownThreshold, in: -0.8...0.0, step: 0.05*/
/*)*/
/*.disabled(!trackingConstants.pitchDownEnabled)*/
/*Text("Higher = more sensitive to looking down")*/
/*.font(.caption2)*/
/*.foregroundColor(.secondary)*/
/*}*/
Divider()
/*Divider()*/
// Min Pupil Ratio
VStack(alignment: .leading, spacing: 4) {
HStack {
Toggle("", isOn: $trackingConstants.minPupilEnabled)
.labelsHidden()
Text("Min Pupil Ratio (Looking Right)")
.foregroundColor(
trackingConstants.minPupilEnabled ? .primary : .secondary)
Spacer()
Text(String(format: "%.2f", trackingConstants.minPupilRatio))
.foregroundColor(.secondary)
.font(.caption)
}
Slider(value: $trackingConstants.minPupilRatio, in: 0.2...0.5, step: 0.01)
.disabled(!trackingConstants.minPupilEnabled)
Text("Higher = more sensitive to looking right")
.font(.caption2)
.foregroundColor(.secondary)
}
/*// Min Pupil Ratio*/
/*VStack(alignment: .leading, spacing: 4) {*/
/*HStack {*/
/*Toggle("", isOn: $trackingConstants.minPupilEnabled)*/
/*.labelsHidden()*/
/*Text("Min Pupil Ratio (Looking Right)")*/
/*.foregroundColor(*/
/*trackingConstants.minPupilEnabled ? .primary : .secondary)*/
/*Spacer()*/
/*Text(String(format: "%.2f", trackingConstants.minPupilRatio))*/
/*.foregroundColor(.secondary)*/
/*.font(.caption)*/
/*}*/
/*Slider(value: $trackingConstants.minPupilRatio, in: 0.2...0.5, step: 0.01)*/
/*.disabled(!trackingConstants.minPupilEnabled)*/
/*Text("Higher = more sensitive to looking right")*/
/*.font(.caption2)*/
/*.foregroundColor(.secondary)*/
/*}*/
Divider()
/*Divider()*/
// Max Pupil Ratio
VStack(alignment: .leading, spacing: 4) {
HStack {
Toggle("", isOn: $trackingConstants.maxPupilEnabled)
.labelsHidden()
Text("Max Pupil Ratio (Looking Left)")
.foregroundColor(
trackingConstants.maxPupilEnabled ? .primary : .secondary)
Spacer()
Text(String(format: "%.2f", trackingConstants.maxPupilRatio))
.foregroundColor(.secondary)
.font(.caption)
}
Slider(value: $trackingConstants.maxPupilRatio, in: 0.5...0.8, step: 0.01)
.disabled(!trackingConstants.maxPupilEnabled)
Text("Lower = more sensitive to looking left")
.font(.caption2)
.foregroundColor(.secondary)
}
/*// Max Pupil Ratio*/
/*VStack(alignment: .leading, spacing: 4) {*/
/*HStack {*/
/*Toggle("", isOn: $trackingConstants.maxPupilEnabled)*/
/*.labelsHidden()*/
/*Text("Max Pupil Ratio (Looking Left)")*/
/*.foregroundColor(*/
/*trackingConstants.maxPupilEnabled ? .primary : .secondary)*/
/*Spacer()*/
/*Text(String(format: "%.2f", trackingConstants.maxPupilRatio))*/
/*.foregroundColor(.secondary)*/
/*.font(.caption)*/
/*}*/
/*Slider(value: $trackingConstants.maxPupilRatio, in: 0.5...0.8, step: 0.01)*/
/*.disabled(!trackingConstants.maxPupilEnabled)*/
/*Text("Lower = more sensitive to looking left")*/
/*.font(.caption2)*/
/*.foregroundColor(.secondary)*/
/*}*/
Divider()
/*Divider()*/
// Eye Closed Threshold
VStack(alignment: .leading, spacing: 4) {
HStack {
Toggle("", isOn: $trackingConstants.eyeClosedEnabled)
.labelsHidden()
Text("Eye Closed Threshold")
.foregroundColor(
trackingConstants.eyeClosedEnabled ? .primary : .secondary)
Spacer()
Text(String(format: "%.3f", trackingConstants.eyeClosedThreshold))
.foregroundColor(.secondary)
.font(.caption)
}
Slider(
value: Binding(
get: { Double(trackingConstants.eyeClosedThreshold) },
set: { trackingConstants.eyeClosedThreshold = CGFloat($0) }
), in: 0.01...0.1, step: 0.005
)
.disabled(!trackingConstants.eyeClosedEnabled)
Text("Lower = more sensitive to eye closure")
.font(.caption2)
.foregroundColor(.secondary)
}
/*// Eye Closed Threshold*/
/*VStack(alignment: .leading, spacing: 4) {*/
/*HStack {*/
/*Toggle("", isOn: $trackingConstants.eyeClosedEnabled)*/
/*.labelsHidden()*/
/*Text("Eye Closed Threshold")*/
/*.foregroundColor(*/
/*trackingConstants.eyeClosedEnabled ? .primary : .secondary)*/
/*Spacer()*/
/*Text(String(format: "%.3f", trackingConstants.eyeClosedThreshold))*/
/*.foregroundColor(.secondary)*/
/*.font(.caption)*/
/*}*/
/*Slider(*/
/*value: Binding(*/
/*get: { Double(trackingConstants.eyeClosedThreshold) },*/
/*set: { trackingConstants.eyeClosedThreshold = CGFloat($0) }*/
/*), in: 0.01...0.1, step: 0.005*/
/*)*/
/*.disabled(!trackingConstants.eyeClosedEnabled)*/
/*Text("Lower = more sensitive to eye closure")*/
/*.font(.caption2)*/
/*.foregroundColor(.secondary)*/
/*}*/
// Reset button
Button(action: {
trackingConstants.resetToDefaults()
}) {
HStack {
Image(systemName: "arrow.counterclockwise")
Text("Reset to Defaults")
}
.frame(maxWidth: .infinity)
}
.buttonStyle(.bordered)
.controlSize(.small)
.padding(.top, 8)
}
.padding(.top, 8)
}
}
.padding()
.glassEffectIfAvailable(GlassStyle.regular, in: .rect(cornerRadius: 12))
}
/*// Reset button*/
/*Button(action: {*/
/*trackingConstants.resetToDefaults()*/
/*}) {*/
/*HStack {*/
/*Image(systemName: "arrow.counterclockwise")*/
/*Text("Reset to Defaults")*/
/*}*/
/*.frame(maxWidth: .infinity)*/
/*}*/
/*.buttonStyle(.bordered)*/
/*.controlSize(.small)*/
/*.padding(.top, 8)*/
/*}*/
/*.padding(.top, 8)*/
/*}*/
/*}*/
/*.padding()*/
/*.glassEffectIfAvailable(GlassStyle.regular, in: .rect(cornerRadius: 12))*/
/*}*/
private var debugEyeTrackingView: some View {
VStack(alignment: .leading, spacing: 12) {
Text("Debug Eye Tracking Data")
.font(.headline)
.foregroundColor(.blue)
/*private var debugEyeTrackingView: some View {*/
/*VStack(alignment: .leading, spacing: 12) {*/
/*Text("Debug Eye Tracking Data")*/
/*.font(.headline)*/
/*.foregroundColor(.blue)*/
VStack(alignment: .leading, spacing: 8) {
Text("Face Detected: \(eyeTrackingService.faceDetected ? "Yes" : "No")")
.font(.caption)
/*VStack(alignment: .leading, spacing: 8) {*/
/*Text("Face Detected: \(eyeTrackingService.faceDetected ? "Yes" : "No")")*/
/*.font(.caption)*/
Text("Looking at Screen: \(eyeTrackingService.userLookingAtScreen ? "Yes" : "No")")
.font(.caption)
/*Text("Looking at Screen: \(eyeTrackingService.userLookingAtScreen ? "Yes" : "No")")*/
/*.font(.caption)*/
Text("Eyes Closed: \(eyeTrackingService.isEyesClosed ? "Yes" : "No")")
.font(.caption)
/*Text("Eyes Closed: \(eyeTrackingService.isEyesClosed ? "Yes" : "No")")*/
/*.font(.caption)*/
if eyeTrackingService.faceDetected {
Text("Yaw: 0.0")
.font(.caption)
/*if eyeTrackingService.faceDetected {*/
/*Text("Yaw: 0.0")*/
/*.font(.caption)*/
Text("Roll: 0.0")
.font(.caption)
}
}
.font(.caption)
.foregroundColor(.secondary)
}
.padding()
.glassEffectIfAvailable(GlassStyle.regular, in: .rect(cornerRadius: 12))
}
/*Text("Roll: 0.0")*/
/*.font(.caption)*/
/*}*/
/*}*/
/*.font(.caption)*/
/*.foregroundColor(.secondary)*/
/*}*/
/*.padding()*/
/*.glassEffectIfAvailable(GlassStyle.regular, in: .rect(cornerRadius: 12))*/
/*}*/
}
#Preview {