turns out - vision framework aint good enough for this
This commit is contained in:
@@ -16,7 +16,6 @@ class AppDelegate: NSObject, NSApplicationDelegate, ObservableObject {
|
||||
private var updateManager: UpdateManager?
|
||||
private var overlayReminderWindowController: NSWindowController?
|
||||
private var subtleReminderWindowController: NSWindowController?
|
||||
private var settingsWindowController: NSWindowController?
|
||||
private var cancellables = Set<AnyCancellable>()
|
||||
private var hasStartedTimers = false
|
||||
|
||||
@@ -271,120 +270,29 @@ class AppDelegate: NSObject, NSApplicationDelegate, ObservableObject {
|
||||
subtleReminderWindowController = nil
|
||||
}
|
||||
|
||||
// Public method to open settings window
|
||||
func openSettings(tab: Int = 0) {
|
||||
// Post notification to close menu bar popover
|
||||
NotificationCenter.default.post(name: Notification.Name("CloseMenuBarPopover"), object: nil)
|
||||
handleMenuDismissal()
|
||||
|
||||
// Dismiss overlay reminders to prevent them from blocking settings window
|
||||
// Overlay reminders are at .floating level which would sit above settings
|
||||
dismissOverlayReminder()
|
||||
|
||||
// Small delay to allow menu bar to close before opening settings
|
||||
DispatchQueue.main.asyncAfter(deadline: .now() + 0.1) { [weak self] in
|
||||
self?.openSettingsWindow(tab: tab)
|
||||
guard let self else { return }
|
||||
SettingsWindowPresenter.shared.show(settingsManager: self.settingsManager, initialTab: tab)
|
||||
}
|
||||
}
|
||||
|
||||
// Public method to reopen onboarding window
|
||||
func openOnboarding() {
|
||||
NotificationCenter.default.post(name: Notification.Name("CloseMenuBarPopover"), object: nil)
|
||||
|
||||
// Dismiss overlay reminders to prevent blocking onboarding window
|
||||
dismissOverlayReminder()
|
||||
handleMenuDismissal()
|
||||
|
||||
DispatchQueue.main.asyncAfter(deadline: .now() + 0.1) { [weak self] in
|
||||
guard let self = self else { return }
|
||||
|
||||
if self.activateWindow(withIdentifier: WindowIdentifiers.onboarding) {
|
||||
return
|
||||
}
|
||||
|
||||
let window = NSWindow(
|
||||
contentRect: NSRect(x: 0, y: 0, width: 700, height: 700),
|
||||
styleMask: [.titled, .closable, .miniaturizable, .fullSizeContentView],
|
||||
backing: .buffered,
|
||||
defer: false
|
||||
)
|
||||
|
||||
window.identifier = WindowIdentifiers.onboarding
|
||||
window.titleVisibility = .hidden
|
||||
window.titlebarAppearsTransparent = true
|
||||
window.center()
|
||||
window.isReleasedWhenClosed = true
|
||||
window.contentView = NSHostingView(
|
||||
rootView: OnboardingContainerView(settingsManager: self.settingsManager)
|
||||
)
|
||||
|
||||
window.makeKeyAndOrderFront(nil)
|
||||
NSApp.activate(ignoringOtherApps: true)
|
||||
guard let self else { return }
|
||||
OnboardingWindowPresenter.shared.show(settingsManager: self.settingsManager)
|
||||
}
|
||||
}
|
||||
|
||||
private func openSettingsWindow(tab: Int) {
|
||||
if let existingWindow = findWindow(withIdentifier: WindowIdentifiers.settings) {
|
||||
NotificationCenter.default.post(
|
||||
name: Notification.Name("SwitchToSettingsTab"),
|
||||
object: tab
|
||||
)
|
||||
existingWindow.makeKeyAndOrderFront(nil)
|
||||
NSApp.activate(ignoringOtherApps: true)
|
||||
return
|
||||
private func handleMenuDismissal() {
|
||||
NotificationCenter.default.post(name: Notification.Name("CloseMenuBarPopover"), object: nil)
|
||||
dismissOverlayReminder()
|
||||
}
|
||||
|
||||
let window = NSWindow(
|
||||
contentRect: NSRect(x: 0, y: 0, width: 700, height: 700),
|
||||
styleMask: [.titled, .closable, .miniaturizable, .resizable, .fullSizeContentView],
|
||||
backing: .buffered,
|
||||
defer: false
|
||||
)
|
||||
|
||||
window.identifier = WindowIdentifiers.settings
|
||||
window.titleVisibility = .hidden
|
||||
window.titlebarAppearsTransparent = true
|
||||
window.toolbarStyle = .unified
|
||||
window.toolbar = NSToolbar()
|
||||
window.center()
|
||||
window.setFrameAutosaveName("SettingsWindow")
|
||||
window.isReleasedWhenClosed = false
|
||||
|
||||
window.contentView = NSHostingView(
|
||||
rootView: SettingsWindowView(settingsManager: settingsManager, initialTab: tab)
|
||||
)
|
||||
|
||||
let windowController = NSWindowController(window: window)
|
||||
windowController.showWindow(nil)
|
||||
|
||||
settingsWindowController = windowController
|
||||
|
||||
NSApp.activate(ignoringOtherApps: true)
|
||||
|
||||
NotificationCenter.default.addObserver(
|
||||
self,
|
||||
selector: #selector(settingsWindowWillCloseNotification(_:)),
|
||||
name: NSWindow.willCloseNotification,
|
||||
object: window
|
||||
)
|
||||
}
|
||||
|
||||
@objc private func settingsWindowWillCloseNotification(_ notification: Notification) {
|
||||
settingsWindowController = nil
|
||||
}
|
||||
|
||||
/// Finds a window by its identifier
|
||||
private func findWindow(withIdentifier identifier: NSUserInterfaceItemIdentifier) -> NSWindow? {
|
||||
return NSApplication.shared.windows.first { $0.identifier == identifier }
|
||||
}
|
||||
|
||||
/// Brings window to front if it exists, returns true if found
|
||||
private func activateWindow(withIdentifier identifier: NSUserInterfaceItemIdentifier) -> Bool {
|
||||
guard let window = findWindow(withIdentifier: identifier) else {
|
||||
return false
|
||||
}
|
||||
window.makeKeyAndOrderFront(nil)
|
||||
NSApp.activate(ignoringOtherApps: true)
|
||||
return true
|
||||
}
|
||||
}
|
||||
|
||||
// Custom window class that can become key to receive keyboard events
|
||||
|
||||
@@ -5,9 +5,12 @@
|
||||
// Created by Mike Freno on 1/14/26.
|
||||
//
|
||||
|
||||
import Combine
|
||||
import Foundation
|
||||
|
||||
enum EyeTrackingConstants {
|
||||
class EyeTrackingConstants: ObservableObject {
|
||||
static let shared = EyeTrackingConstants()
|
||||
|
||||
// MARK: - Logging
|
||||
/// Interval between log messages in seconds
|
||||
static let logInterval: TimeInterval = 0.5
|
||||
@@ -15,29 +18,57 @@ enum EyeTrackingConstants {
|
||||
// MARK: - Eye Closure Detection
|
||||
/// Threshold for eye closure (smaller value means eye must be more closed to trigger)
|
||||
/// Range: 0.0 to 1.0 (approximate eye opening ratio)
|
||||
static let eyeClosedThreshold: CGFloat = 0.02
|
||||
@Published var eyeClosedThreshold: CGFloat = 0.02
|
||||
@Published var eyeClosedEnabled: Bool = true
|
||||
|
||||
// MARK: - Face Pose Thresholds
|
||||
/// Maximum yaw (left/right head turn) in radians before considering user looking away
|
||||
/// 0.20 radians ≈ 11.5 degrees (Tightened from 0.35)
|
||||
static let yawThreshold: Double = 0.2
|
||||
/// NOTE: Vision Framework often provides unreliable yaw/pitch on macOS - disabled by default
|
||||
@Published var yawThreshold: Double = 0.3
|
||||
@Published var yawEnabled: Bool = false
|
||||
|
||||
/// Pitch threshold for looking UP (above screen).
|
||||
/// Since camera is at top, looking at screen is negative pitch.
|
||||
/// Values > 0.1 imply looking straight ahead or up (away from screen).
|
||||
static let pitchUpThreshold: Double = 0.1
|
||||
/// NOTE: Vision Framework often doesn't provide pitch data on macOS - disabled by default
|
||||
@Published var pitchUpThreshold: Double = 0.1
|
||||
@Published var pitchUpEnabled: Bool = false
|
||||
|
||||
/// Pitch threshold for looking DOWN (at keyboard/lap).
|
||||
/// Values < -0.45 imply looking too far down.
|
||||
static let pitchDownThreshold: Double = -0.45
|
||||
/// NOTE: Vision Framework often doesn't provide pitch data on macOS - disabled by default
|
||||
@Published var pitchDownThreshold: Double = -0.45
|
||||
@Published var pitchDownEnabled: Bool = false
|
||||
|
||||
// MARK: - Pupil Tracking Thresholds
|
||||
/// Minimum horizontal pupil ratio (0.0 = right edge, 1.0 = left edge)
|
||||
/// Values below this are considered looking right (camera view)
|
||||
static let minPupilRatio: Double = 0.40
|
||||
/// Tightened to 0.35 based on observed values (typically 0.31-0.47)
|
||||
@Published var minPupilRatio: Double = 0.35
|
||||
@Published var minPupilEnabled: Bool = true
|
||||
|
||||
/// Maximum horizontal pupil ratio
|
||||
/// Values above this are considered looking left (camera view)
|
||||
/// Tightened from 0.75 to 0.65
|
||||
static let maxPupilRatio: Double = 0.6
|
||||
/// Tightened to 0.45 based on observed values (typically 0.31-0.47)
|
||||
@Published var maxPupilRatio: Double = 0.45
|
||||
@Published var maxPupilEnabled: Bool = true
|
||||
|
||||
private init() {}
|
||||
|
||||
// MARK: - Reset to Defaults
|
||||
func resetToDefaults() {
|
||||
eyeClosedThreshold = 0.02
|
||||
eyeClosedEnabled = true
|
||||
yawThreshold = 0.3
|
||||
yawEnabled = false // Disabled by default - Vision Framework unreliable on macOS
|
||||
pitchUpThreshold = 0.1
|
||||
pitchUpEnabled = false // Disabled by default - often not available on macOS
|
||||
pitchDownThreshold = -0.45
|
||||
pitchDownEnabled = false // Disabled by default - often not available on macOS
|
||||
minPupilRatio = 0.35
|
||||
minPupilEnabled = true
|
||||
maxPupilRatio = 0.45
|
||||
maxPupilEnabled = true
|
||||
}
|
||||
}
|
||||
|
||||
57
Gaze/Protocols/EnforceModeProviding.swift
Normal file
57
Gaze/Protocols/EnforceModeProviding.swift
Normal file
@@ -0,0 +1,57 @@
|
||||
//
|
||||
// EnforceModeProviding.swift
|
||||
// Gaze
|
||||
//
|
||||
// Protocol abstraction for EnforceModeService to enable dependency injection and testing.
|
||||
//
|
||||
|
||||
import Combine
|
||||
import Foundation
|
||||
|
||||
/// Protocol that defines the interface for enforce mode functionality.
|
||||
@MainActor
|
||||
protocol EnforceModeProviding: AnyObject, ObservableObject {
|
||||
/// Whether enforce mode is currently enabled
|
||||
var isEnforceModeEnabled: Bool { get }
|
||||
|
||||
/// Whether the camera is currently active
|
||||
var isCameraActive: Bool { get }
|
||||
|
||||
/// Whether the user has complied with the break
|
||||
var userCompliedWithBreak: Bool { get }
|
||||
|
||||
/// Whether we're in test mode
|
||||
var isTestMode: Bool { get }
|
||||
|
||||
/// Enables enforce mode (may request camera permission)
|
||||
func enableEnforceMode() async
|
||||
|
||||
/// Disables enforce mode
|
||||
func disableEnforceMode()
|
||||
|
||||
/// Sets the timer engine reference
|
||||
func setTimerEngine(_ engine: TimerEngine)
|
||||
|
||||
/// Checks if a break should be enforced for the given timer
|
||||
func shouldEnforceBreak(for timerIdentifier: TimerIdentifier) -> Bool
|
||||
|
||||
/// Starts the camera for lookaway timer
|
||||
func startCameraForLookawayTimer(secondsRemaining: Int) async
|
||||
|
||||
/// Stops the camera
|
||||
func stopCamera()
|
||||
|
||||
/// Checks if user is complying with the break
|
||||
func checkUserCompliance()
|
||||
|
||||
/// Handles reminder dismissal
|
||||
func handleReminderDismissed()
|
||||
|
||||
/// Starts test mode
|
||||
func startTestMode() async
|
||||
|
||||
/// Stops test mode
|
||||
func stopTestMode()
|
||||
}
|
||||
|
||||
extension EnforceModeService: EnforceModeProviding {}
|
||||
48
Gaze/Protocols/SettingsProviding.swift
Normal file
48
Gaze/Protocols/SettingsProviding.swift
Normal file
@@ -0,0 +1,48 @@
|
||||
//
|
||||
// SettingsProviding.swift
|
||||
// Gaze
|
||||
//
|
||||
// Protocol abstraction for SettingsManager to enable dependency injection and testing.
|
||||
//
|
||||
|
||||
import Combine
|
||||
import Foundation
|
||||
|
||||
/// Protocol that defines the interface for managing application settings.
|
||||
/// This abstraction allows for dependency injection and easy mocking in tests.
|
||||
@MainActor
|
||||
protocol SettingsProviding: AnyObject, ObservableObject {
|
||||
/// The current application settings
|
||||
var settings: AppSettings { get set }
|
||||
|
||||
/// Publisher for observing settings changes
|
||||
var settingsPublisher: Published<AppSettings>.Publisher { get }
|
||||
|
||||
/// Retrieves the timer configuration for a specific timer type
|
||||
func timerConfiguration(for type: TimerType) -> TimerConfiguration
|
||||
|
||||
/// Updates the timer configuration for a specific timer type
|
||||
func updateTimerConfiguration(for type: TimerType, configuration: TimerConfiguration)
|
||||
|
||||
/// Returns all timer configurations
|
||||
func allTimerConfigurations() -> [TimerType: TimerConfiguration]
|
||||
|
||||
/// Saves settings to persistent storage
|
||||
func save()
|
||||
|
||||
/// Forces immediate save
|
||||
func saveImmediately()
|
||||
|
||||
/// Loads settings from persistent storage
|
||||
func load()
|
||||
|
||||
/// Resets settings to default values
|
||||
func resetToDefaults()
|
||||
}
|
||||
|
||||
/// Extension to provide the publisher for SettingsManager
|
||||
extension SettingsManager: SettingsProviding {
|
||||
var settingsPublisher: Published<AppSettings>.Publisher {
|
||||
$settings
|
||||
}
|
||||
}
|
||||
52
Gaze/Protocols/SmartModeProviding.swift
Normal file
52
Gaze/Protocols/SmartModeProviding.swift
Normal file
@@ -0,0 +1,52 @@
|
||||
//
|
||||
// SmartModeProviding.swift
|
||||
// Gaze
|
||||
//
|
||||
// Protocols for Smart Mode services (Fullscreen Detection, Idle Monitoring).
|
||||
//
|
||||
|
||||
import Combine
|
||||
import Foundation
|
||||
|
||||
/// Protocol for fullscreen detection functionality
|
||||
@MainActor
|
||||
protocol FullscreenDetectionProviding: AnyObject, ObservableObject {
|
||||
/// Whether a fullscreen app is currently active
|
||||
var isFullscreenActive: Bool { get }
|
||||
|
||||
/// Publisher for fullscreen state changes
|
||||
var isFullscreenActivePublisher: Published<Bool>.Publisher { get }
|
||||
|
||||
/// Forces an immediate state update
|
||||
func forceUpdate()
|
||||
}
|
||||
|
||||
/// Protocol for idle monitoring functionality
|
||||
@MainActor
|
||||
protocol IdleMonitoringProviding: AnyObject, ObservableObject {
|
||||
/// Whether the user is currently idle
|
||||
var isIdle: Bool { get }
|
||||
|
||||
/// Publisher for idle state changes
|
||||
var isIdlePublisher: Published<Bool>.Publisher { get }
|
||||
|
||||
/// Updates the idle threshold
|
||||
func updateThreshold(minutes: Int)
|
||||
|
||||
/// Forces an immediate state update
|
||||
func forceUpdate()
|
||||
}
|
||||
|
||||
// MARK: - Extensions for conformance
|
||||
|
||||
extension FullscreenDetectionService: FullscreenDetectionProviding {
|
||||
var isFullscreenActivePublisher: Published<Bool>.Publisher {
|
||||
$isFullscreenActive
|
||||
}
|
||||
}
|
||||
|
||||
extension IdleMonitoringService: IdleMonitoringProviding {
|
||||
var isIdlePublisher: Published<Bool>.Publisher {
|
||||
$isIdle
|
||||
}
|
||||
}
|
||||
@@ -168,7 +168,8 @@ class EnforceModeService: ObservableObject {
|
||||
private func startFaceDetectionTimer() {
|
||||
stopFaceDetectionTimer()
|
||||
// Check every 1 second
|
||||
faceDetectionTimer = Timer.scheduledTimer(withTimeInterval: 1.0, repeats: true) { [weak self] _ in
|
||||
faceDetectionTimer = Timer.scheduledTimer(withTimeInterval: 1.0, repeats: true) {
|
||||
[weak self] _ in
|
||||
Task { @MainActor [weak self] in
|
||||
self?.checkFaceDetectionTimeout()
|
||||
}
|
||||
@@ -190,7 +191,9 @@ class EnforceModeService: ObservableObject {
|
||||
|
||||
// If person has not been detected for too long, temporarily disable enforce mode
|
||||
if timeSinceLastDetection > faceDetectionTimeout {
|
||||
print("⏰ Person not detected for \(faceDetectionTimeout)s. Temporarily disabling enforce mode.")
|
||||
print(
|
||||
"⏰ Person not detected for \(faceDetectionTimeout)s. Temporarily disabling enforce mode."
|
||||
)
|
||||
disableEnforceMode()
|
||||
}
|
||||
}
|
||||
@@ -230,3 +233,4 @@ class EnforceModeService: ObservableObject {
|
||||
isTestMode = false
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -8,6 +8,7 @@
|
||||
import AVFoundation
|
||||
import Combine
|
||||
import Vision
|
||||
import simd
|
||||
|
||||
@MainActor
|
||||
class EyeTrackingService: NSObject, ObservableObject {
|
||||
@@ -18,6 +19,16 @@ class EyeTrackingService: NSObject, ObservableObject {
|
||||
@Published var userLookingAtScreen = true
|
||||
@Published var faceDetected = false
|
||||
|
||||
// Debug properties for UI display
|
||||
@Published var debugLeftPupilRatio: Double?
|
||||
@Published var debugRightPupilRatio: Double?
|
||||
@Published var debugYaw: Double?
|
||||
@Published var debugPitch: Double?
|
||||
@Published var enableDebugLogging: Bool = false
|
||||
|
||||
// Throttle for debug logging
|
||||
private var lastDebugLogTime: Date = .distantPast
|
||||
|
||||
private var captureSession: AVCaptureSession?
|
||||
private var videoOutput: AVCaptureVideoDataOutput?
|
||||
private let videoDataOutputQueue = DispatchQueue(
|
||||
@@ -116,7 +127,7 @@ class EyeTrackingService: NSObject, ObservableObject {
|
||||
self.videoOutput = output
|
||||
}
|
||||
|
||||
private func processFaceObservations(_ observations: [VNFaceObservation]?) {
|
||||
private func processFaceObservations(_ observations: [VNFaceObservation]?, imageSize: CGSize) {
|
||||
guard let observations = observations, !observations.isEmpty else {
|
||||
faceDetected = false
|
||||
userLookingAtScreen = false
|
||||
@@ -126,10 +137,26 @@ class EyeTrackingService: NSObject, ObservableObject {
|
||||
faceDetected = true
|
||||
let face = observations.first!
|
||||
|
||||
if enableDebugLogging {
|
||||
print("👁️ Face observation - boundingBox: \(face.boundingBox)")
|
||||
print(
|
||||
"👁️ Yaw: \(face.yaw?.doubleValue ?? 999), Pitch: \(face.pitch?.doubleValue ?? 999), Roll: \(face.roll?.doubleValue ?? 999)"
|
||||
)
|
||||
}
|
||||
|
||||
guard let landmarks = face.landmarks else {
|
||||
if enableDebugLogging {
|
||||
print("👁️ No landmarks available")
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
if enableDebugLogging {
|
||||
print(
|
||||
"👁️ Landmarks - leftEye: \(landmarks.leftEye != nil), rightEye: \(landmarks.rightEye != nil), leftPupil: \(landmarks.leftPupil != nil), rightPupil: \(landmarks.rightPupil != nil)"
|
||||
)
|
||||
}
|
||||
|
||||
// Check eye closure
|
||||
if let leftEye = landmarks.leftEye,
|
||||
let rightEye = landmarks.rightEye
|
||||
@@ -140,13 +167,25 @@ class EyeTrackingService: NSObject, ObservableObject {
|
||||
}
|
||||
|
||||
// Check gaze direction
|
||||
let lookingAway = detectLookingAway(face: face, landmarks: landmarks, shouldLog: false)
|
||||
let lookingAway = detectLookingAway(
|
||||
face: face,
|
||||
landmarks: landmarks,
|
||||
imageSize: imageSize,
|
||||
shouldLog: enableDebugLogging
|
||||
)
|
||||
userLookingAtScreen = !lookingAway
|
||||
}
|
||||
|
||||
private func detectEyesClosed(
|
||||
leftEye: VNFaceLandmarkRegion2D, rightEye: VNFaceLandmarkRegion2D, shouldLog: Bool
|
||||
) -> Bool {
|
||||
let constants = EyeTrackingConstants.shared
|
||||
|
||||
// If eye closure detection is disabled, always return false (eyes not closed)
|
||||
guard constants.eyeClosedEnabled else {
|
||||
return false
|
||||
}
|
||||
|
||||
guard leftEye.pointCount >= 2, rightEye.pointCount >= 2 else {
|
||||
return false
|
||||
}
|
||||
@@ -154,7 +193,7 @@ class EyeTrackingService: NSObject, ObservableObject {
|
||||
let leftEyeHeight = calculateEyeHeight(leftEye, shouldLog: shouldLog)
|
||||
let rightEyeHeight = calculateEyeHeight(rightEye, shouldLog: shouldLog)
|
||||
|
||||
let closedThreshold = EyeTrackingConstants.eyeClosedThreshold
|
||||
let closedThreshold = constants.eyeClosedThreshold
|
||||
|
||||
let isClosed = leftEyeHeight < closedThreshold && rightEyeHeight < closedThreshold
|
||||
|
||||
@@ -175,22 +214,57 @@ class EyeTrackingService: NSObject, ObservableObject {
|
||||
}
|
||||
|
||||
private func detectLookingAway(
|
||||
face: VNFaceObservation, landmarks: VNFaceLandmarks2D, shouldLog: Bool
|
||||
face: VNFaceObservation, landmarks: VNFaceLandmarks2D, imageSize: CGSize, shouldLog: Bool
|
||||
) -> Bool {
|
||||
let constants = EyeTrackingConstants.shared
|
||||
|
||||
// 1. Face Pose Check (Yaw & Pitch)
|
||||
let yaw = face.yaw?.doubleValue ?? 0.0
|
||||
let pitch = face.pitch?.doubleValue ?? 0.0
|
||||
let roll = face.roll?.doubleValue ?? 0.0
|
||||
|
||||
let yawThreshold = EyeTrackingConstants.yawThreshold
|
||||
// Pitch check:
|
||||
// - Camera at top = looking at screen is negative pitch
|
||||
// - Looking above screen (straight ahead) is ~0 or positive -> Look Away
|
||||
// - Looking at keyboard/lap is very negative -> Look Away
|
||||
let pitchLookingAway =
|
||||
pitch > EyeTrackingConstants.pitchUpThreshold
|
||||
|| pitch < EyeTrackingConstants.pitchDownThreshold
|
||||
// Debug logging
|
||||
if shouldLog {
|
||||
print("👁️ Face Pose - Yaw: \(yaw), Pitch: \(pitch), Roll: \(roll)")
|
||||
print(
|
||||
"👁️ Face available data - hasYaw: \(face.yaw != nil), hasPitch: \(face.pitch != nil), hasRoll: \(face.roll != nil)"
|
||||
)
|
||||
}
|
||||
|
||||
let poseLookingAway = abs(yaw) > yawThreshold || pitchLookingAway
|
||||
// Update debug values
|
||||
Task { @MainActor in
|
||||
debugYaw = yaw
|
||||
debugPitch = pitch
|
||||
}
|
||||
|
||||
var poseLookingAway = false
|
||||
|
||||
// Only use yaw/pitch if they're actually available and enabled
|
||||
// Note: Vision Framework on macOS often doesn't provide reliable pitch data
|
||||
if face.pitch != nil {
|
||||
// Check yaw if enabled
|
||||
if constants.yawEnabled {
|
||||
let yawThreshold = constants.yawThreshold
|
||||
if abs(yaw) > yawThreshold {
|
||||
poseLookingAway = true
|
||||
}
|
||||
}
|
||||
|
||||
// Check pitch if either threshold is enabled
|
||||
if !poseLookingAway {
|
||||
var pitchLookingAway = false
|
||||
|
||||
if constants.pitchUpEnabled && pitch > constants.pitchUpThreshold {
|
||||
pitchLookingAway = true
|
||||
}
|
||||
|
||||
if constants.pitchDownEnabled && pitch < constants.pitchDownThreshold {
|
||||
pitchLookingAway = true
|
||||
}
|
||||
|
||||
poseLookingAway = pitchLookingAway
|
||||
}
|
||||
}
|
||||
|
||||
// 2. Eye Gaze Check (Pupil Position)
|
||||
var eyesLookingAway = false
|
||||
@@ -201,21 +275,91 @@ class EyeTrackingService: NSObject, ObservableObject {
|
||||
let rightPupil = landmarks.rightPupil
|
||||
{
|
||||
|
||||
let leftRatio = calculatePupilHorizontalRatio(eye: leftEye, pupil: leftPupil)
|
||||
let rightRatio = calculatePupilHorizontalRatio(eye: rightEye, pupil: rightPupil)
|
||||
// NEW: Use inter-eye distance method
|
||||
let gazeOffsets = calculateGazeUsingInterEyeDistance(
|
||||
leftEye: leftEye,
|
||||
rightEye: rightEye,
|
||||
leftPupil: leftPupil,
|
||||
rightPupil: rightPupil,
|
||||
imageSize: imageSize,
|
||||
faceBoundingBox: face.boundingBox
|
||||
)
|
||||
|
||||
let leftRatio = calculatePupilHorizontalRatio(
|
||||
eye: leftEye,
|
||||
pupil: leftPupil,
|
||||
imageSize: imageSize,
|
||||
faceBoundingBox: face.boundingBox
|
||||
)
|
||||
let rightRatio = calculatePupilHorizontalRatio(
|
||||
eye: rightEye,
|
||||
pupil: rightPupil,
|
||||
imageSize: imageSize,
|
||||
faceBoundingBox: face.boundingBox
|
||||
)
|
||||
|
||||
// Debug logging
|
||||
if shouldLog {
|
||||
print(
|
||||
"👁️ Pupil Ratios (OLD METHOD) - Left: \(String(format: "%.3f", leftRatio)), Right: \(String(format: "%.3f", rightRatio))"
|
||||
)
|
||||
print(
|
||||
"👁️ Gaze Offsets (NEW METHOD) - Left: \(String(format: "%.3f", gazeOffsets.leftGaze)), Right: \(String(format: "%.3f", gazeOffsets.rightGaze))"
|
||||
)
|
||||
print(
|
||||
"👁️ Thresholds - Min: \(constants.minPupilRatio), Max: \(constants.maxPupilRatio)"
|
||||
)
|
||||
}
|
||||
|
||||
// Update debug values
|
||||
Task { @MainActor in
|
||||
debugLeftPupilRatio = leftRatio
|
||||
debugRightPupilRatio = rightRatio
|
||||
}
|
||||
|
||||
// Normal range for "looking center" is roughly 0.3 to 0.7
|
||||
// (0.0 = extreme right, 1.0 = extreme left relative to face)
|
||||
// Note: Camera is mirrored, so logic might be inverted
|
||||
|
||||
let minRatio = EyeTrackingConstants.minPupilRatio
|
||||
let maxRatio = EyeTrackingConstants.maxPupilRatio
|
||||
var leftLookingAway = false
|
||||
var rightLookingAway = false
|
||||
|
||||
let leftLookingAway = leftRatio < minRatio || leftRatio > maxRatio
|
||||
let rightLookingAway = rightRatio < minRatio || rightRatio > maxRatio
|
||||
// Check min pupil ratio if enabled
|
||||
/*if constants.minPupilEnabled {*/
|
||||
/*let minRatio = constants.minPupilRatio*/
|
||||
/*if leftRatio < minRatio {*/
|
||||
/*leftLookingAway = true*/
|
||||
/*}*/
|
||||
/*if rightRatio < minRatio {*/
|
||||
/*rightLookingAway = true*/
|
||||
/*}*/
|
||||
/*}*/
|
||||
|
||||
// Consider looking away if BOTH eyes are off-center
|
||||
eyesLookingAway = leftLookingAway && rightLookingAway
|
||||
/*// Check max pupil ratio if enabled*/
|
||||
/*if constants.maxPupilEnabled {*/
|
||||
/*let maxRatio = constants.maxPupilRatio*/
|
||||
/*if leftRatio > maxRatio {*/
|
||||
/*leftLookingAway = true*/
|
||||
/*}*/
|
||||
/*if rightRatio > maxRatio {*/
|
||||
/*rightLookingAway = true*/
|
||||
/*}*/
|
||||
/*}*/
|
||||
|
||||
// Consider looking away if EITHER eye is off-center
|
||||
// Changed from AND to OR logic because requiring both eyes makes detection too restrictive
|
||||
// This is more sensitive but also more reliable for detecting actual looking away
|
||||
eyesLookingAway = leftLookingAway || rightLookingAway
|
||||
|
||||
if shouldLog {
|
||||
print(
|
||||
"👁️ Looking Away - Left: \(leftLookingAway), Right: \(rightLookingAway), Either: \(eyesLookingAway)"
|
||||
)
|
||||
}
|
||||
} else {
|
||||
if shouldLog {
|
||||
print("👁️ Missing pupil or eye landmarks!")
|
||||
}
|
||||
}
|
||||
|
||||
let isLookingAway = poseLookingAway || eyesLookingAway
|
||||
@@ -224,33 +368,227 @@ class EyeTrackingService: NSObject, ObservableObject {
|
||||
}
|
||||
|
||||
private func calculatePupilHorizontalRatio(
|
||||
eye: VNFaceLandmarkRegion2D, pupil: VNFaceLandmarkRegion2D
|
||||
eye: VNFaceLandmarkRegion2D,
|
||||
pupil: VNFaceLandmarkRegion2D,
|
||||
imageSize: CGSize,
|
||||
faceBoundingBox: CGRect
|
||||
) -> Double {
|
||||
// Use normalizedPoints which are already normalized to face bounding box
|
||||
let eyePoints = eye.normalizedPoints
|
||||
let pupilPoints = pupil.normalizedPoints
|
||||
|
||||
// Throttle debug logging to every 0.5 seconds
|
||||
let now = Date()
|
||||
let shouldLog = now.timeIntervalSince(lastDebugLogTime) >= 0.5
|
||||
|
||||
if shouldLog {
|
||||
lastDebugLogTime = now
|
||||
|
||||
print("━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━")
|
||||
print("📊 EYE TRACKING DEBUG DATA")
|
||||
print("━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━")
|
||||
|
||||
print("\n🖼️ IMAGE SIZE:")
|
||||
print(" Width: \(imageSize.width), Height: \(imageSize.height)")
|
||||
|
||||
print("\n📦 FACE BOUNDING BOX (normalized):")
|
||||
print(" Origin: (\(faceBoundingBox.origin.x), \(faceBoundingBox.origin.y))")
|
||||
print(" Size: (\(faceBoundingBox.size.width), \(faceBoundingBox.size.height))")
|
||||
|
||||
print("\n👁️ EYE LANDMARK POINTS (normalized to face bounding box - from Vision):")
|
||||
print(" Count: \(eyePoints.count)")
|
||||
let eyeMinX = eyePoints.min(by: { $0.x < $1.x })?.x ?? 0
|
||||
let eyeMaxX = eyePoints.max(by: { $0.x < $1.x })?.x ?? 0
|
||||
for (index, point) in eyePoints.enumerated() {
|
||||
var marker = ""
|
||||
if abs(point.x - eyeMinX) < 0.0001 {
|
||||
marker = " ← LEFTMOST (inner corner)"
|
||||
} else if abs(point.x - eyeMaxX) < 0.0001 {
|
||||
marker = " ← RIGHTMOST (outer corner)"
|
||||
}
|
||||
if index == 0 {
|
||||
marker += " [FIRST]"
|
||||
} else if index == eyePoints.count - 1 {
|
||||
marker += " [LAST]"
|
||||
}
|
||||
print(
|
||||
" [\(index)]: (\(String(format: "%.4f", point.x)), \(String(format: "%.4f", point.y)))\(marker)"
|
||||
)
|
||||
}
|
||||
|
||||
print("\n👁️ PUPIL LANDMARK POINTS (normalized to face bounding box - from Vision):")
|
||||
print(" Count: \(pupilPoints.count)")
|
||||
for (index, point) in pupilPoints.enumerated() {
|
||||
print(
|
||||
" [\(index)]: (\(String(format: "%.4f", point.x)), \(String(format: "%.4f", point.y)))"
|
||||
)
|
||||
}
|
||||
|
||||
if let minPoint = eyePoints.min(by: { $0.x < $1.x }),
|
||||
let maxPoint = eyePoints.max(by: { $0.x < $1.x })
|
||||
{
|
||||
let eyeMinX = minPoint.x
|
||||
let eyeMaxX = maxPoint.x
|
||||
let eyeWidth = eyeMaxX - eyeMinX
|
||||
let pupilCenterX = pupilPoints.map { $0.x }.reduce(0, +) / Double(pupilPoints.count)
|
||||
let ratio = (pupilCenterX - eyeMinX) / eyeWidth
|
||||
|
||||
print("\n📏 CALCULATIONS:")
|
||||
print(" Eye MinX: \(String(format: "%.4f", eyeMinX))")
|
||||
print(" Eye MaxX: \(String(format: "%.4f", eyeMaxX))")
|
||||
print(" Eye Width: \(String(format: "%.4f", eyeWidth))")
|
||||
|
||||
// Analyze different point pairs to find better eye width
|
||||
if eyePoints.count >= 6 {
|
||||
let cornerWidth = eyePoints[5].x - eyePoints[0].x
|
||||
print(" Corner-to-Corner Width [0→5]: \(String(format: "%.4f", cornerWidth))")
|
||||
|
||||
// Try middle points too
|
||||
if eyePoints.count >= 4 {
|
||||
let midWidth = eyePoints[3].x - eyePoints[0].x
|
||||
print(" Point [0→3] Width: \(String(format: "%.4f", midWidth))")
|
||||
}
|
||||
}
|
||||
|
||||
print(" Pupil Center X: \(String(format: "%.4f", pupilCenterX))")
|
||||
print(" Pupil Min X: \(String(format: "%.4f", pupilPoints.min(by: { $0.x < $1.x })?.x ?? 0))")
|
||||
print(" Pupil Max X: \(String(format: "%.4f", pupilPoints.max(by: { $0.x < $1.x })?.x ?? 0))")
|
||||
print(" Final Ratio (current method): \(String(format: "%.4f", ratio))")
|
||||
|
||||
// Calculate alternate ratios
|
||||
if eyePoints.count >= 6 {
|
||||
let cornerWidth = eyePoints[5].x - eyePoints[0].x
|
||||
if cornerWidth > 0 {
|
||||
let cornerRatio = (pupilCenterX - eyePoints[0].x) / cornerWidth
|
||||
print(" Alternate Ratio (using corners [0→5]): \(String(format: "%.4f", cornerRatio))")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
print("\n━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\n")
|
||||
}
|
||||
|
||||
guard !eyePoints.isEmpty, !pupilPoints.isEmpty else { return 0.5 }
|
||||
|
||||
// Get eye horizontal bounds
|
||||
let eyeMinX = eyePoints.map { $0.x }.min() ?? 0
|
||||
let eyeMaxX = eyePoints.map { $0.x }.max() ?? 0
|
||||
guard let minPoint = eyePoints.min(by: { $0.x < $1.x }),
|
||||
let maxPoint = eyePoints.max(by: { $0.x < $1.x })
|
||||
else {
|
||||
return 0.5
|
||||
}
|
||||
|
||||
let eyeMinX = minPoint.x
|
||||
let eyeMaxX = maxPoint.x
|
||||
let eyeWidth = eyeMaxX - eyeMinX
|
||||
|
||||
guard eyeWidth > 0 else { return 0.5 }
|
||||
|
||||
// Get pupil center X
|
||||
let pupilCenterX = pupilPoints.map { $0.x }.reduce(0, +) / Double(pupilPoints.count)
|
||||
|
||||
// Calculate ratio (0.0 to 1.0)
|
||||
// 0.0 = Right side of eye (camera view)
|
||||
// 1.0 = Left side of eye (camera view)
|
||||
// Calculate ratio (0.0 to 1.0) - already normalized to face bounding box by Vision
|
||||
let ratio = (pupilCenterX - eyeMinX) / eyeWidth
|
||||
|
||||
return ratio
|
||||
}
|
||||
|
||||
/// NEW APPROACH: Calculate gaze using inter-eye distance as reference
|
||||
/// This works around Vision's limitation that eye landmarks only track the iris, not true eye corners
|
||||
private func calculateGazeUsingInterEyeDistance(
|
||||
leftEye: VNFaceLandmarkRegion2D,
|
||||
rightEye: VNFaceLandmarkRegion2D,
|
||||
leftPupil: VNFaceLandmarkRegion2D,
|
||||
rightPupil: VNFaceLandmarkRegion2D,
|
||||
imageSize: CGSize,
|
||||
faceBoundingBox: CGRect
|
||||
) -> (leftGaze: Double, rightGaze: Double) {
|
||||
|
||||
// CRITICAL: Convert from face-normalized coordinates to image coordinates
|
||||
// normalizedPoints are relative to face bounding box, not stable for gaze tracking
|
||||
|
||||
// Helper to convert face-normalized point to image coordinates
|
||||
func toImageCoords(_ point: CGPoint) -> CGPoint {
|
||||
// Face bounding box origin is in Vision coordinates (bottom-left origin)
|
||||
let imageX = faceBoundingBox.origin.x + point.x * faceBoundingBox.width
|
||||
let imageY = faceBoundingBox.origin.y + point.y * faceBoundingBox.height
|
||||
return CGPoint(x: imageX, y: imageY)
|
||||
}
|
||||
|
||||
// MARK: - AVCaptureVideoDataOutputSampleBufferDelegate
|
||||
// Convert all points to image space
|
||||
let leftEyePointsImg = leftEye.normalizedPoints.map { toImageCoords($0) }
|
||||
let rightEyePointsImg = rightEye.normalizedPoints.map { toImageCoords($0) }
|
||||
let leftPupilPointsImg = leftPupil.normalizedPoints.map { toImageCoords($0) }
|
||||
let rightPupilPointsImg = rightPupil.normalizedPoints.map { toImageCoords($0) }
|
||||
|
||||
// Calculate eye centers (average of all iris boundary points)
|
||||
let leftEyeCenterX = leftEyePointsImg.map { $0.x }.reduce(0, +) / Double(leftEyePointsImg.count)
|
||||
let rightEyeCenterX = rightEyePointsImg.map { $0.x }.reduce(0, +) / Double(rightEyePointsImg.count)
|
||||
|
||||
// Calculate pupil centers
|
||||
let leftPupilX = leftPupilPointsImg.map { $0.x }.reduce(0, +) / Double(leftPupilPointsImg.count)
|
||||
let rightPupilX = rightPupilPointsImg.map { $0.x }.reduce(0, +) / Double(rightPupilPointsImg.count)
|
||||
|
||||
// Inter-eye distance (the distance between eye centers) - should be stable now
|
||||
let interEyeDistance = abs(rightEyeCenterX - leftEyeCenterX)
|
||||
|
||||
// Estimate iris width as a fraction of inter-eye distance
|
||||
// Typical human: inter-pupil distance ~63mm, iris width ~12mm → ratio ~1/5
|
||||
let irisWidth = interEyeDistance / 5.0
|
||||
|
||||
// Calculate gaze offset for each eye (positive = looking right, negative = looking left)
|
||||
let leftGazeOffset = (leftPupilX - leftEyeCenterX) / irisWidth
|
||||
let rightGazeOffset = (rightPupilX - rightEyeCenterX) / irisWidth
|
||||
|
||||
// Throttle debug logging
|
||||
let now = Date()
|
||||
let shouldLog = now.timeIntervalSince(lastDebugLogTime) >= 0.5
|
||||
|
||||
if shouldLog {
|
||||
lastDebugLogTime = now
|
||||
|
||||
print("━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━")
|
||||
print("📊 INTER-EYE DISTANCE GAZE (IMAGE COORDS)")
|
||||
print("━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━")
|
||||
|
||||
print("\n🖼️ IMAGE SPACE:")
|
||||
print(" Image Size: \(Int(imageSize.width)) x \(Int(imageSize.height))")
|
||||
print(" Face Box: x=\(String(format: "%.3f", faceBoundingBox.origin.x)) w=\(String(format: "%.3f", faceBoundingBox.width))")
|
||||
|
||||
print("\n👁️ EYE CENTERS (image coords):")
|
||||
print(" Left Eye Center X: \(String(format: "%.4f", leftEyeCenterX)) (\(Int(leftEyeCenterX * imageSize.width))px)")
|
||||
print(" Right Eye Center X: \(String(format: "%.4f", rightEyeCenterX)) (\(Int(rightEyeCenterX * imageSize.width))px)")
|
||||
print(" Inter-Eye Distance: \(String(format: "%.4f", interEyeDistance)) (\(Int(interEyeDistance * imageSize.width))px)")
|
||||
print(" Estimated Iris Width: \(String(format: "%.4f", irisWidth)) (\(Int(irisWidth * imageSize.width))px)")
|
||||
|
||||
print("\n👁️ PUPIL POSITIONS (image coords):")
|
||||
print(" Left Pupil X: \(String(format: "%.4f", leftPupilX)) (\(Int(leftPupilX * imageSize.width))px)")
|
||||
print(" Right Pupil X: \(String(format: "%.4f", rightPupilX)) (\(Int(rightPupilX * imageSize.width))px)")
|
||||
|
||||
print("\n📏 PUPIL OFFSETS FROM EYE CENTER:")
|
||||
print(" Left Offset: \(String(format: "%.4f", leftPupilX - leftEyeCenterX)) (\(Int((leftPupilX - leftEyeCenterX) * imageSize.width))px)")
|
||||
print(" Right Offset: \(String(format: "%.4f", rightPupilX - rightEyeCenterX)) (\(Int((rightPupilX - rightEyeCenterX) * imageSize.width))px)")
|
||||
|
||||
print("\n📏 GAZE OFFSETS (normalized to iris width):")
|
||||
print(" Left Gaze Offset: \(String(format: "%.4f", leftGazeOffset)) (0=center, +right, -left)")
|
||||
print(" Right Gaze Offset: \(String(format: "%.4f", rightGazeOffset)) (0=center, +right, -left)")
|
||||
print(" Average Gaze: \(String(format: "%.4f", (leftGazeOffset + rightGazeOffset) / 2))")
|
||||
|
||||
// Interpretation
|
||||
let avgGaze = (leftGazeOffset + rightGazeOffset) / 2
|
||||
var interpretation = ""
|
||||
if avgGaze < -0.5 {
|
||||
interpretation = "Looking LEFT"
|
||||
} else if avgGaze > 0.5 {
|
||||
interpretation = "Looking RIGHT"
|
||||
} else {
|
||||
interpretation = "Looking CENTER"
|
||||
}
|
||||
print(" Interpretation: \(interpretation)")
|
||||
|
||||
print("\n━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\n")
|
||||
}
|
||||
|
||||
return (leftGazeOffset, rightGazeOffset)
|
||||
}
|
||||
}
|
||||
|
||||
extension EyeTrackingService: AVCaptureVideoDataOutputSampleBufferDelegate {
|
||||
nonisolated func captureOutput(
|
||||
@@ -270,13 +608,27 @@ extension EyeTrackingService: AVCaptureVideoDataOutputSampleBufferDelegate {
|
||||
return
|
||||
}
|
||||
|
||||
let size = CGSize(
|
||||
width: CVPixelBufferGetWidth(pixelBuffer),
|
||||
height: CVPixelBufferGetHeight(pixelBuffer)
|
||||
)
|
||||
|
||||
Task { @MainActor in
|
||||
self.processFaceObservations(request.results as? [VNFaceObservation])
|
||||
self.processFaceObservations(
|
||||
request.results as? [VNFaceObservation],
|
||||
imageSize: size
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// Use revision 3 which includes more detailed landmarks including pupils
|
||||
request.revision = VNDetectFaceLandmarksRequestRevision3
|
||||
|
||||
// Enable constellation points which may help with pose estimation
|
||||
if #available(macOS 14.0, *) {
|
||||
request.constellation = .constellation76Points
|
||||
}
|
||||
|
||||
let imageRequestHandler = VNImageRequestHandler(
|
||||
cvPixelBuffer: pixelBuffer,
|
||||
orientation: .leftMirrored,
|
||||
|
||||
266
Gaze/Services/PupilDetector.swift
Normal file
266
Gaze/Services/PupilDetector.swift
Normal file
@@ -0,0 +1,266 @@
|
||||
//
|
||||
// PupilDetector.swift
|
||||
// Gaze
|
||||
//
|
||||
// Created by Mike Freno on 1/15/26.
|
||||
//
|
||||
// Pixel-based pupil detection translated from Python GazeTracking library
|
||||
// Original: https://github.com/antoinelame/GazeTracking
|
||||
//
|
||||
|
||||
import CoreImage
|
||||
import Vision
|
||||
import Accelerate
|
||||
|
||||
struct PupilPosition {
|
||||
let x: CGFloat
|
||||
let y: CGFloat
|
||||
}
|
||||
|
||||
struct EyeRegion {
|
||||
let frame: CGRect // Bounding box of the eye in image coordinates
|
||||
let center: CGPoint // Center point of the eye region
|
||||
}
|
||||
|
||||
class PupilDetector {
|
||||
|
||||
/// Detects pupil position within an isolated eye region using pixel-based analysis
|
||||
/// - Parameters:
|
||||
/// - pixelBuffer: The camera frame pixel buffer
|
||||
/// - eyeLandmarks: Vision eye landmarks (6 points around iris)
|
||||
/// - faceBoundingBox: Face bounding box from Vision
|
||||
/// - imageSize: Size of the camera frame
|
||||
/// - Returns: Pupil position relative to eye region, or nil if detection fails
|
||||
static func detectPupil(
|
||||
in pixelBuffer: CVPixelBuffer,
|
||||
eyeLandmarks: VNFaceLandmarkRegion2D,
|
||||
faceBoundingBox: CGRect,
|
||||
imageSize: CGSize
|
||||
) -> (pupilPosition: PupilPosition, eyeRegion: EyeRegion)? {
|
||||
|
||||
// Step 1: Convert Vision landmarks to pixel coordinates
|
||||
let eyePoints = landmarksToPixelCoordinates(
|
||||
landmarks: eyeLandmarks,
|
||||
faceBoundingBox: faceBoundingBox,
|
||||
imageSize: imageSize
|
||||
)
|
||||
|
||||
guard eyePoints.count >= 6 else { return nil }
|
||||
|
||||
// Step 2: Create eye region bounding box
|
||||
guard let eyeRegion = createEyeRegion(from: eyePoints, imageSize: imageSize) else {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Step 3: Extract and process eye region from pixel buffer
|
||||
guard let eyeImage = extractEyeRegion(
|
||||
from: pixelBuffer,
|
||||
region: eyeRegion.frame,
|
||||
mask: eyePoints
|
||||
) else {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Step 4: Process image to isolate pupil (bilateral filter + threshold)
|
||||
guard let processedImage = processEyeImage(eyeImage) else {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Step 5: Find pupil using contour detection
|
||||
guard let pupilPosition = findPupilCentroid(in: processedImage) else {
|
||||
return nil
|
||||
}
|
||||
|
||||
return (pupilPosition, eyeRegion)
|
||||
}
|
||||
|
||||
// MARK: - Step 1: Convert Landmarks to Pixel Coordinates
|
||||
|
||||
private static func landmarksToPixelCoordinates(
|
||||
landmarks: VNFaceLandmarkRegion2D,
|
||||
faceBoundingBox: CGRect,
|
||||
imageSize: CGSize
|
||||
) -> [CGPoint] {
|
||||
return landmarks.normalizedPoints.map { point in
|
||||
// Vision coordinates are normalized to face bounding box
|
||||
let imageX = (faceBoundingBox.origin.x + point.x * faceBoundingBox.width) * imageSize.width
|
||||
let imageY = (faceBoundingBox.origin.y + point.y * faceBoundingBox.height) * imageSize.height
|
||||
return CGPoint(x: imageX, y: imageY)
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Step 2: Create Eye Region
|
||||
|
||||
private static func createEyeRegion(from points: [CGPoint], imageSize: CGSize) -> EyeRegion? {
|
||||
guard !points.isEmpty else { return nil }
|
||||
|
||||
let margin: CGFloat = 5
|
||||
let minX = points.map { $0.x }.min()! - margin
|
||||
let maxX = points.map { $0.x }.max()! + margin
|
||||
let minY = points.map { $0.y }.min()! - margin
|
||||
let maxY = points.map { $0.y }.max()! + margin
|
||||
|
||||
// Clamp to image bounds
|
||||
let clampedMinX = max(0, minX)
|
||||
let clampedMaxX = min(imageSize.width, maxX)
|
||||
let clampedMinY = max(0, minY)
|
||||
let clampedMaxY = min(imageSize.height, maxY)
|
||||
|
||||
let frame = CGRect(
|
||||
x: clampedMinX,
|
||||
y: clampedMinY,
|
||||
width: clampedMaxX - clampedMinX,
|
||||
height: clampedMaxY - clampedMinY
|
||||
)
|
||||
|
||||
let center = CGPoint(
|
||||
x: frame.width / 2,
|
||||
y: frame.height / 2
|
||||
)
|
||||
|
||||
return EyeRegion(frame: frame, center: center)
|
||||
}
|
||||
|
||||
// MARK: - Step 3: Extract Eye Region
|
||||
|
||||
private static func extractEyeRegion(
|
||||
from pixelBuffer: CVPixelBuffer,
|
||||
region: CGRect,
|
||||
mask: [CGPoint]
|
||||
) -> CIImage? {
|
||||
|
||||
let ciImage = CIImage(cvPixelBuffer: pixelBuffer)
|
||||
|
||||
// Convert to grayscale
|
||||
let grayscaleImage = ciImage.applyingFilter("CIPhotoEffectNoir")
|
||||
|
||||
// Crop to eye region
|
||||
let croppedImage = grayscaleImage.cropped(to: region)
|
||||
|
||||
return croppedImage
|
||||
}
|
||||
|
||||
// MARK: - Step 4: Process Eye Image
|
||||
|
||||
private static func processEyeImage(_ image: CIImage) -> CIImage? {
|
||||
// Apply bilateral filter (preserves edges while smoothing)
|
||||
// CIBilateralFilter approximation: use CIMedianFilter + morphology
|
||||
var processed = image
|
||||
|
||||
// 1. Median filter (reduces noise while preserving edges)
|
||||
processed = processed.applyingFilter("CIMedianFilter")
|
||||
|
||||
// 2. Morphological erosion (makes dark regions larger - approximates cv2.erode)
|
||||
// Use CIMorphologyMinimum with small radius
|
||||
processed = processed.applyingFilter("CIMorphologyMinimum", parameters: [
|
||||
kCIInputRadiusKey: 2.0
|
||||
])
|
||||
|
||||
// 3. Threshold to binary (black/white)
|
||||
// Use CIColorControls to increase contrast, then threshold
|
||||
processed = processed.applyingFilter("CIColorControls", parameters: [
|
||||
kCIInputContrastKey: 2.0,
|
||||
kCIInputBrightnessKey: -0.3
|
||||
])
|
||||
|
||||
// Apply color threshold to make it binary
|
||||
processed = processed.applyingFilter("CIColorThreshold", parameters: [
|
||||
"inputThreshold": 0.5
|
||||
])
|
||||
|
||||
return processed
|
||||
}
|
||||
|
||||
// MARK: - Step 5: Find Pupil Centroid
|
||||
|
||||
private static func findPupilCentroid(in image: CIImage) -> PupilPosition? {
|
||||
let context = CIContext()
|
||||
|
||||
// Convert CIImage to CGImage for contour detection
|
||||
guard let cgImage = context.createCGImage(image, from: image.extent) else {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Convert to vImage buffer for processing
|
||||
guard let (width, height, data) = cgImageToGrayscaleData(cgImage) else {
|
||||
return nil
|
||||
}
|
||||
|
||||
// Find connected components (contours)
|
||||
guard let (centroidX, centroidY) = findLargestDarkRegionCentroid(
|
||||
data: data,
|
||||
width: width,
|
||||
height: height
|
||||
) else {
|
||||
return nil
|
||||
}
|
||||
|
||||
return PupilPosition(x: CGFloat(centroidX), y: CGFloat(centroidY))
|
||||
}
|
||||
|
||||
// MARK: - Helper: Convert CGImage to Grayscale Data
|
||||
|
||||
private static func cgImageToGrayscaleData(_ cgImage: CGImage) -> (width: Int, height: Int, data: [UInt8])? {
|
||||
let width = cgImage.width
|
||||
let height = cgImage.height
|
||||
|
||||
var data = [UInt8](repeating: 0, count: width * height)
|
||||
|
||||
guard let context = CGContext(
|
||||
data: &data,
|
||||
width: width,
|
||||
height: height,
|
||||
bitsPerComponent: 8,
|
||||
bytesPerRow: width,
|
||||
space: CGColorSpaceCreateDeviceGray(),
|
||||
bitmapInfo: CGImageAlphaInfo.none.rawValue
|
||||
) else {
|
||||
return nil
|
||||
}
|
||||
|
||||
context.draw(cgImage, in: CGRect(x: 0, y: 0, width: width, height: height))
|
||||
|
||||
return (width, height, data)
|
||||
}
|
||||
|
||||
// MARK: - Helper: Find Centroid of Largest Dark Region
|
||||
|
||||
private static func findLargestDarkRegionCentroid(
|
||||
data: [UInt8],
|
||||
width: Int,
|
||||
height: Int
|
||||
) -> (x: Double, y: Double)? {
|
||||
|
||||
// Calculate image moments to find centroid
|
||||
// m00 = sum of all pixels (area)
|
||||
// m10 = sum of (x * pixel_value)
|
||||
// m01 = sum of (y * pixel_value)
|
||||
// centroid_x = m10 / m00
|
||||
// centroid_y = m01 / m00
|
||||
|
||||
var m00: Double = 0
|
||||
var m10: Double = 0
|
||||
var m01: Double = 0
|
||||
|
||||
for y in 0..<height {
|
||||
for x in 0..<width {
|
||||
let index = y * width + x
|
||||
let pixelValue = 255 - Int(data[index]) // Invert: we want dark regions
|
||||
|
||||
if pixelValue > 128 { // Only count dark pixels
|
||||
let weight = Double(pixelValue)
|
||||
m00 += weight
|
||||
m10 += Double(x) * weight
|
||||
m01 += Double(y) * weight
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
guard m00 > 0 else { return nil }
|
||||
|
||||
let centroidX = m10 / m00
|
||||
let centroidY = m01 / m00
|
||||
|
||||
return (centroidX, centroidY)
|
||||
}
|
||||
}
|
||||
110
Gaze/Services/ServiceContainer.swift
Normal file
110
Gaze/Services/ServiceContainer.swift
Normal file
@@ -0,0 +1,110 @@
|
||||
//
|
||||
// ServiceContainer.swift
|
||||
// Gaze
|
||||
//
|
||||
// Dependency injection container for managing service instances.
|
||||
//
|
||||
|
||||
import Foundation
|
||||
|
||||
/// A simple dependency injection container for managing service instances.
|
||||
/// Supports both production and test configurations.
|
||||
@MainActor
|
||||
final class ServiceContainer {
|
||||
|
||||
/// Shared instance for production use
|
||||
static let shared = ServiceContainer()
|
||||
|
||||
/// The settings manager instance
|
||||
private(set) var settingsManager: any SettingsProviding
|
||||
|
||||
/// The enforce mode service instance
|
||||
private(set) var enforceModeService: EnforceModeService
|
||||
|
||||
/// The timer engine instance (created lazily)
|
||||
private var _timerEngine: TimerEngine?
|
||||
|
||||
/// The fullscreen detection service
|
||||
private(set) var fullscreenService: FullscreenDetectionService?
|
||||
|
||||
/// The idle monitoring service
|
||||
private(set) var idleService: IdleMonitoringService?
|
||||
|
||||
/// The usage tracking service
|
||||
private(set) var usageTrackingService: UsageTrackingService?
|
||||
|
||||
/// Whether this container is configured for testing
|
||||
let isTestEnvironment: Bool
|
||||
|
||||
/// Creates a production container with real services
|
||||
private init() {
|
||||
self.isTestEnvironment = false
|
||||
self.settingsManager = SettingsManager.shared
|
||||
self.enforceModeService = EnforceModeService.shared
|
||||
}
|
||||
|
||||
/// Creates a test container with injectable dependencies
|
||||
/// - Parameters:
|
||||
/// - settingsManager: The settings manager to use (defaults to MockSettingsManager in tests)
|
||||
/// - enforceModeService: The enforce mode service to use
|
||||
init(
|
||||
settingsManager: any SettingsProviding,
|
||||
enforceModeService: EnforceModeService? = nil
|
||||
) {
|
||||
self.isTestEnvironment = true
|
||||
self.settingsManager = settingsManager
|
||||
self.enforceModeService = enforceModeService ?? EnforceModeService.shared
|
||||
}
|
||||
|
||||
/// Gets or creates the timer engine
|
||||
var timerEngine: TimerEngine {
|
||||
if let engine = _timerEngine {
|
||||
return engine
|
||||
}
|
||||
let engine = TimerEngine(
|
||||
settingsManager: settingsManager,
|
||||
enforceModeService: enforceModeService
|
||||
)
|
||||
_timerEngine = engine
|
||||
return engine
|
||||
}
|
||||
|
||||
/// Sets up smart mode services
|
||||
func setupSmartModeServices() {
|
||||
let settings = settingsManager.settings
|
||||
|
||||
fullscreenService = FullscreenDetectionService()
|
||||
idleService = IdleMonitoringService(
|
||||
idleThresholdMinutes: settings.smartMode.idleThresholdMinutes
|
||||
)
|
||||
usageTrackingService = UsageTrackingService(
|
||||
resetThresholdMinutes: settings.smartMode.usageResetAfterMinutes
|
||||
)
|
||||
|
||||
// Connect idle service to usage tracking
|
||||
if let idleService = idleService {
|
||||
usageTrackingService?.setupIdleMonitoring(idleService)
|
||||
}
|
||||
|
||||
// Connect services to timer engine
|
||||
timerEngine.setupSmartMode(
|
||||
fullscreenService: fullscreenService,
|
||||
idleService: idleService
|
||||
)
|
||||
}
|
||||
|
||||
/// Resets the container for testing purposes
|
||||
func reset() {
|
||||
_timerEngine?.stop()
|
||||
_timerEngine = nil
|
||||
fullscreenService = nil
|
||||
idleService = nil
|
||||
usageTrackingService = nil
|
||||
}
|
||||
|
||||
/// Creates a new container configured for testing
|
||||
static func forTesting(settings: AppSettings = .defaults) -> ServiceContainer {
|
||||
// We need to create this at runtime in tests using MockSettingsManager
|
||||
fatalError("Use init(settingsManager:) directly in tests")
|
||||
}
|
||||
}
|
||||
@@ -14,7 +14,7 @@ class TimerEngine: ObservableObject {
|
||||
@Published var activeReminder: ReminderEvent?
|
||||
|
||||
private var timerSubscription: AnyCancellable?
|
||||
private let settingsManager: SettingsManager
|
||||
private let settingsProvider: any SettingsProviding
|
||||
private var sleepStartTime: Date?
|
||||
|
||||
// For enforce mode integration
|
||||
@@ -25,9 +25,9 @@ class TimerEngine: ObservableObject {
|
||||
private var idleService: IdleMonitoringService?
|
||||
private var cancellables = Set<AnyCancellable>()
|
||||
|
||||
init(settingsManager: SettingsManager) {
|
||||
self.settingsManager = settingsManager
|
||||
self.enforceModeService = EnforceModeService.shared
|
||||
init(settingsManager: any SettingsProviding, enforceModeService: EnforceModeService? = nil) {
|
||||
self.settingsProvider = settingsManager
|
||||
self.enforceModeService = enforceModeService ?? EnforceModeService.shared
|
||||
|
||||
Task { @MainActor in
|
||||
self.enforceModeService?.setTimerEngine(self)
|
||||
@@ -61,7 +61,7 @@ class TimerEngine: ObservableObject {
|
||||
}
|
||||
|
||||
private func handleFullscreenChange(isFullscreen: Bool) {
|
||||
guard settingsManager.settings.smartMode.autoPauseOnFullscreen else { return }
|
||||
guard settingsProvider.settings.smartMode.autoPauseOnFullscreen else { return }
|
||||
|
||||
if isFullscreen {
|
||||
pauseAllTimers(reason: .fullscreen)
|
||||
@@ -73,7 +73,7 @@ class TimerEngine: ObservableObject {
|
||||
}
|
||||
|
||||
private func handleIdleChange(isIdle: Bool) {
|
||||
guard settingsManager.settings.smartMode.autoPauseOnIdle else { return }
|
||||
guard settingsProvider.settings.smartMode.autoPauseOnIdle else { return }
|
||||
|
||||
if isIdle {
|
||||
pauseAllTimers(reason: .idle)
|
||||
@@ -114,7 +114,7 @@ class TimerEngine: ObservableObject {
|
||||
|
||||
// Add built-in timers
|
||||
for timerType in TimerType.allCases {
|
||||
let config = settingsManager.timerConfiguration(for: timerType)
|
||||
let config = settingsProvider.timerConfiguration(for: timerType)
|
||||
if config.enabled {
|
||||
let identifier = TimerIdentifier.builtIn(timerType)
|
||||
newStates[identifier] = TimerState(
|
||||
@@ -127,7 +127,7 @@ class TimerEngine: ObservableObject {
|
||||
}
|
||||
|
||||
// Add user timers
|
||||
for userTimer in settingsManager.settings.userTimers where userTimer.enabled {
|
||||
for userTimer in settingsProvider.settings.userTimers where userTimer.enabled {
|
||||
let identifier = TimerIdentifier.user(id: userTimer.id)
|
||||
newStates[identifier] = TimerState(
|
||||
identifier: identifier,
|
||||
@@ -159,7 +159,7 @@ class TimerEngine: ObservableObject {
|
||||
|
||||
// Update built-in timers
|
||||
for timerType in TimerType.allCases {
|
||||
let config = settingsManager.timerConfiguration(for: timerType)
|
||||
let config = settingsProvider.timerConfiguration(for: timerType)
|
||||
let identifier = TimerIdentifier.builtIn(timerType)
|
||||
|
||||
if config.enabled {
|
||||
@@ -191,7 +191,7 @@ class TimerEngine: ObservableObject {
|
||||
}
|
||||
|
||||
// Update user timers
|
||||
for userTimer in settingsManager.settings.userTimers {
|
||||
for userTimer in settingsProvider.settings.userTimers {
|
||||
let identifier = TimerIdentifier.user(id: userTimer.id)
|
||||
let newIntervalSeconds = userTimer.intervalMinutes * 60
|
||||
|
||||
@@ -269,10 +269,10 @@ class TimerEngine: ObservableObject {
|
||||
let intervalSeconds: Int
|
||||
switch identifier {
|
||||
case .builtIn(let type):
|
||||
let config = settingsManager.timerConfiguration(for: type)
|
||||
let config = settingsProvider.timerConfiguration(for: type)
|
||||
intervalSeconds = config.intervalSeconds
|
||||
case .user(let id):
|
||||
guard let userTimer = settingsManager.settings.userTimers.first(where: { $0.id == id }) else { return }
|
||||
guard let userTimer = settingsProvider.settings.userTimers.first(where: { $0.id == id }) else { return }
|
||||
intervalSeconds = userTimer.intervalMinutes * 60
|
||||
}
|
||||
|
||||
@@ -335,14 +335,14 @@ class TimerEngine: ObservableObject {
|
||||
switch type {
|
||||
case .lookAway:
|
||||
activeReminder = .lookAwayTriggered(
|
||||
countdownSeconds: settingsManager.settings.lookAwayCountdownSeconds)
|
||||
countdownSeconds: settingsProvider.settings.lookAwayCountdownSeconds)
|
||||
case .blink:
|
||||
activeReminder = .blinkTriggered
|
||||
case .posture:
|
||||
activeReminder = .postureTriggered
|
||||
}
|
||||
case .user(let id):
|
||||
if let userTimer = settingsManager.settings.userTimers.first(where: { $0.id == id }) {
|
||||
if let userTimer = settingsProvider.settings.userTimers.first(where: { $0.id == id }) {
|
||||
activeReminder = .userTimerTriggered(userTimer)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -7,6 +7,108 @@
|
||||
|
||||
import SwiftUI
|
||||
|
||||
@MainActor
|
||||
final class SettingsWindowPresenter {
|
||||
static let shared = SettingsWindowPresenter()
|
||||
|
||||
private weak var windowController: NSWindowController?
|
||||
private var closeObserver: NSObjectProtocol?
|
||||
|
||||
func show(settingsManager: SettingsManager, initialTab: Int = 0) {
|
||||
if focusExistingWindow(tab: initialTab) {
|
||||
return
|
||||
}
|
||||
createWindow(settingsManager: settingsManager, initialTab: initialTab)
|
||||
}
|
||||
|
||||
func focus(tab: Int) {
|
||||
_ = focusExistingWindow(tab: tab)
|
||||
}
|
||||
|
||||
func close() {
|
||||
windowController?.close()
|
||||
windowController = nil
|
||||
removeCloseObserver()
|
||||
}
|
||||
|
||||
@discardableResult
|
||||
private func focusExistingWindow(tab: Int?) -> Bool {
|
||||
guard let window = windowController?.window else {
|
||||
windowController = nil
|
||||
return false
|
||||
}
|
||||
|
||||
if let tab {
|
||||
NotificationCenter.default.post(
|
||||
name: Notification.Name("SwitchToSettingsTab"),
|
||||
object: tab
|
||||
)
|
||||
}
|
||||
|
||||
window.makeKeyAndOrderFront(nil)
|
||||
NSApp.activate(ignoringOtherApps: true)
|
||||
return true
|
||||
}
|
||||
|
||||
private func createWindow(settingsManager: SettingsManager, initialTab: Int) {
|
||||
let window = NSWindow(
|
||||
contentRect: NSRect(x: 0, y: 0, width: 700, height: 700),
|
||||
styleMask: [.titled, .closable, .miniaturizable, .resizable, .fullSizeContentView],
|
||||
backing: .buffered,
|
||||
defer: false
|
||||
)
|
||||
|
||||
window.identifier = WindowIdentifiers.settings
|
||||
window.titleVisibility = .hidden
|
||||
window.titlebarAppearsTransparent = true
|
||||
window.toolbarStyle = .unified
|
||||
window.toolbar = NSToolbar()
|
||||
window.center()
|
||||
window.setFrameAutosaveName("SettingsWindow")
|
||||
window.isReleasedWhenClosed = false
|
||||
|
||||
let contentView = SettingsWindowView(
|
||||
settingsManager: settingsManager,
|
||||
initialTab: initialTab
|
||||
)
|
||||
window.contentView = NSHostingView(rootView: contentView)
|
||||
|
||||
let controller = NSWindowController(window: window)
|
||||
controller.showWindow(nil)
|
||||
|
||||
window.makeKeyAndOrderFront(nil)
|
||||
NSApp.activate(ignoringOtherApps: true)
|
||||
|
||||
windowController = controller
|
||||
|
||||
removeCloseObserver()
|
||||
closeObserver = NotificationCenter.default.addObserver(
|
||||
forName: NSWindow.willCloseNotification,
|
||||
object: window,
|
||||
queue: .main
|
||||
) { [weak self] _ in
|
||||
Task { @MainActor [weak self] in
|
||||
self?.windowController = nil
|
||||
self?.removeCloseObserver()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@MainActor
|
||||
private func removeCloseObserver() {
|
||||
if let closeObserver {
|
||||
NotificationCenter.default.removeObserver(closeObserver)
|
||||
self.closeObserver = nil
|
||||
}
|
||||
}
|
||||
|
||||
deinit {
|
||||
Task { @MainActor in
|
||||
removeCloseObserver()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct SettingsWindowView: View {
|
||||
@ObservedObject var settingsManager: SettingsManager
|
||||
@State private var selectedSection: SettingsSection
|
||||
|
||||
@@ -13,15 +13,18 @@ struct EnforceModeSetupView: View {
|
||||
@ObservedObject var cameraService = CameraAccessService.shared
|
||||
@ObservedObject var eyeTrackingService = EyeTrackingService.shared
|
||||
@ObservedObject var enforceModeService = EnforceModeService.shared
|
||||
@ObservedObject var trackingConstants = EyeTrackingConstants.shared
|
||||
|
||||
@State private var isProcessingToggle = false
|
||||
@State private var isTestModeActive = false
|
||||
@State private var cachedPreviewLayer: AVCaptureVideoPreviewLayer?
|
||||
@State private var showDebugView = false
|
||||
@State private var isViewActive = false
|
||||
@State private var showAdvancedSettings = false
|
||||
|
||||
var body: some View {
|
||||
VStack(spacing: 0) {
|
||||
ScrollView {
|
||||
VStack(spacing: 16) {
|
||||
Image(systemName: "video.fill")
|
||||
.font(.system(size: 60))
|
||||
@@ -81,8 +84,10 @@ struct EnforceModeSetupView: View {
|
||||
|
||||
if isTestModeActive && enforceModeService.isCameraActive {
|
||||
testModePreviewView
|
||||
trackingConstantsView
|
||||
} else {
|
||||
if enforceModeService.isCameraActive && !isTestModeActive {
|
||||
trackingConstantsView
|
||||
eyeTrackingStatusView
|
||||
#if DEBUG
|
||||
if showDebugView {
|
||||
@@ -100,6 +105,7 @@ struct EnforceModeSetupView: View {
|
||||
|
||||
Spacer()
|
||||
}
|
||||
}
|
||||
.frame(maxWidth: .infinity, maxHeight: .infinity)
|
||||
.padding()
|
||||
.background(.clear)
|
||||
@@ -163,35 +169,35 @@ struct EnforceModeSetupView: View {
|
||||
}
|
||||
}
|
||||
|
||||
VStack(alignment: .leading, spacing: 12) {
|
||||
Text("Live Tracking Status")
|
||||
.font(.headline)
|
||||
/*VStack(alignment: .leading, spacing: 12) {*/
|
||||
/*Text("Live Tracking Status")*/
|
||||
/*.font(.headline)*/
|
||||
|
||||
HStack(spacing: 20) {
|
||||
statusIndicator(
|
||||
title: "Face Detected",
|
||||
isActive: eyeTrackingService.faceDetected,
|
||||
icon: "person.fill"
|
||||
)
|
||||
/*HStack(spacing: 20) {*/
|
||||
/*statusIndicator(*/
|
||||
/*title: "Face Detected",*/
|
||||
/*isActive: eyeTrackingService.faceDetected,*/
|
||||
/*icon: "person.fill"*/
|
||||
/*)*/
|
||||
|
||||
statusIndicator(
|
||||
title: "Looking Away",
|
||||
isActive: !eyeTrackingService.userLookingAtScreen,
|
||||
icon: "arrow.turn.up.right"
|
||||
)
|
||||
}
|
||||
/*statusIndicator(*/
|
||||
/*title: "Looking Away",*/
|
||||
/*isActive: !eyeTrackingService.userLookingAtScreen,*/
|
||||
/*icon: "arrow.turn.up.right"*/
|
||||
/*)*/
|
||||
/*}*/
|
||||
|
||||
Text(
|
||||
lookingAway
|
||||
? "✓ Break compliance detected" : "⚠️ Please look away from screen"
|
||||
)
|
||||
.font(.caption)
|
||||
.foregroundColor(lookingAway ? .green : .orange)
|
||||
.frame(maxWidth: .infinity, alignment: .center)
|
||||
.padding(.top, 4)
|
||||
}
|
||||
.padding()
|
||||
.glassEffectIfAvailable(GlassStyle.regular, in: .rect(cornerRadius: 12))
|
||||
/*Text(*/
|
||||
/*lookingAway*/
|
||||
/*? "✓ Break compliance detected" : "⚠️ Please look away from screen"*/
|
||||
/*)*/
|
||||
/*.font(.caption)*/
|
||||
/*.foregroundColor(lookingAway ? .green : .orange)*/
|
||||
/*.frame(maxWidth: .infinity, alignment: .center)*/
|
||||
/*.padding(.top, 4)*/
|
||||
/*}*/
|
||||
/*.padding()*/
|
||||
/*.glassEffectIfAvailable(GlassStyle.regular, in: .rect(cornerRadius: 12))*/
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -357,6 +363,269 @@ struct EnforceModeSetupView: View {
|
||||
}
|
||||
}
|
||||
|
||||
private var trackingConstantsView: some View {
|
||||
VStack(alignment: .leading, spacing: 16) {
|
||||
HStack {
|
||||
Text("Tracking Sensitivity")
|
||||
.font(.headline)
|
||||
Spacer()
|
||||
Button(action: {
|
||||
eyeTrackingService.enableDebugLogging.toggle()
|
||||
}) {
|
||||
Image(systemName: eyeTrackingService.enableDebugLogging ? "ant.circle.fill" : "ant.circle")
|
||||
.foregroundColor(eyeTrackingService.enableDebugLogging ? .orange : .secondary)
|
||||
}
|
||||
.buttonStyle(.plain)
|
||||
.help("Toggle console debug logging")
|
||||
|
||||
Button(showAdvancedSettings ? "Hide Settings" : "Show Settings") {
|
||||
withAnimation {
|
||||
showAdvancedSettings.toggle()
|
||||
}
|
||||
}
|
||||
.buttonStyle(.bordered)
|
||||
.controlSize(.small)
|
||||
}
|
||||
|
||||
// Debug info always visible when tracking
|
||||
VStack(alignment: .leading, spacing: 8) {
|
||||
Text("Live Values:")
|
||||
.font(.caption)
|
||||
.fontWeight(.semibold)
|
||||
.foregroundColor(.secondary)
|
||||
|
||||
if let leftRatio = eyeTrackingService.debugLeftPupilRatio,
|
||||
let rightRatio = eyeTrackingService.debugRightPupilRatio {
|
||||
HStack(spacing: 16) {
|
||||
VStack(alignment: .leading, spacing: 2) {
|
||||
Text("Left Pupil: \(String(format: "%.3f", leftRatio))")
|
||||
.font(.caption2)
|
||||
.foregroundColor(
|
||||
!trackingConstants.minPupilEnabled && !trackingConstants.maxPupilEnabled ? .secondary :
|
||||
(leftRatio < trackingConstants.minPupilRatio || leftRatio > trackingConstants.maxPupilRatio) ? .orange : .green
|
||||
)
|
||||
Text("Right Pupil: \(String(format: "%.3f", rightRatio))")
|
||||
.font(.caption2)
|
||||
.foregroundColor(
|
||||
!trackingConstants.minPupilEnabled && !trackingConstants.maxPupilEnabled ? .secondary :
|
||||
(rightRatio < trackingConstants.minPupilRatio || rightRatio > trackingConstants.maxPupilRatio) ? .orange : .green
|
||||
)
|
||||
}
|
||||
|
||||
Spacer()
|
||||
|
||||
VStack(alignment: .trailing, spacing: 2) {
|
||||
Text("Range: \(String(format: "%.2f", trackingConstants.minPupilRatio)) - \(String(format: "%.2f", trackingConstants.maxPupilRatio))")
|
||||
.font(.caption2)
|
||||
.foregroundColor(.secondary)
|
||||
let bothEyesOut = (leftRatio < trackingConstants.minPupilRatio || leftRatio > trackingConstants.maxPupilRatio) &&
|
||||
(rightRatio < trackingConstants.minPupilRatio || rightRatio > trackingConstants.maxPupilRatio)
|
||||
Text(bothEyesOut ? "Both Out ⚠️" : "In Range ✓")
|
||||
.font(.caption2)
|
||||
.foregroundColor(bothEyesOut ? .orange : .green)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
Text("Pupil data unavailable")
|
||||
.font(.caption2)
|
||||
.foregroundColor(.secondary)
|
||||
}
|
||||
|
||||
if let yaw = eyeTrackingService.debugYaw,
|
||||
let pitch = eyeTrackingService.debugPitch {
|
||||
HStack(spacing: 16) {
|
||||
VStack(alignment: .leading, spacing: 2) {
|
||||
Text("Yaw: \(String(format: "%.3f", yaw))")
|
||||
.font(.caption2)
|
||||
.foregroundColor(
|
||||
!trackingConstants.yawEnabled ? .secondary :
|
||||
abs(yaw) > trackingConstants.yawThreshold ? .orange : .green
|
||||
)
|
||||
Text("Pitch: \(String(format: "%.3f", pitch))")
|
||||
.font(.caption2)
|
||||
.foregroundColor(
|
||||
!trackingConstants.pitchUpEnabled && !trackingConstants.pitchDownEnabled ? .secondary :
|
||||
(pitch > trackingConstants.pitchUpThreshold || pitch < trackingConstants.pitchDownThreshold) ? .orange : .green
|
||||
)
|
||||
}
|
||||
|
||||
Spacer()
|
||||
|
||||
VStack(alignment: .trailing, spacing: 2) {
|
||||
Text("Yaw Max: \(String(format: "%.2f", trackingConstants.yawThreshold))")
|
||||
.font(.caption2)
|
||||
.foregroundColor(.secondary)
|
||||
Text("Pitch: \(String(format: "%.2f", trackingConstants.pitchDownThreshold)) to \(String(format: "%.2f", trackingConstants.pitchUpThreshold))")
|
||||
.font(.caption2)
|
||||
.foregroundColor(.secondary)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
.padding(.top, 4)
|
||||
|
||||
if showAdvancedSettings {
|
||||
VStack(spacing: 16) {
|
||||
// Yaw Threshold
|
||||
VStack(alignment: .leading, spacing: 4) {
|
||||
HStack {
|
||||
Toggle("", isOn: $trackingConstants.yawEnabled)
|
||||
.labelsHidden()
|
||||
Text("Yaw Threshold (Head Turn)")
|
||||
.foregroundColor(
|
||||
trackingConstants.yawEnabled ? .primary : .secondary)
|
||||
Spacer()
|
||||
Text(String(format: "%.2f rad", trackingConstants.yawThreshold))
|
||||
.foregroundColor(.secondary)
|
||||
.font(.caption)
|
||||
}
|
||||
Slider(value: $trackingConstants.yawThreshold, in: 0.1...0.8, step: 0.05)
|
||||
.disabled(!trackingConstants.yawEnabled)
|
||||
Text("Lower = more sensitive to head turning")
|
||||
.font(.caption2)
|
||||
.foregroundColor(.secondary)
|
||||
}
|
||||
|
||||
Divider()
|
||||
|
||||
// Pitch Up Threshold
|
||||
VStack(alignment: .leading, spacing: 4) {
|
||||
HStack {
|
||||
Toggle("", isOn: $trackingConstants.pitchUpEnabled)
|
||||
.labelsHidden()
|
||||
Text("Pitch Up Threshold (Looking Up)")
|
||||
.foregroundColor(
|
||||
trackingConstants.pitchUpEnabled ? .primary : .secondary)
|
||||
Spacer()
|
||||
Text(String(format: "%.2f rad", trackingConstants.pitchUpThreshold))
|
||||
.foregroundColor(.secondary)
|
||||
.font(.caption)
|
||||
}
|
||||
Slider(
|
||||
value: $trackingConstants.pitchUpThreshold, in: -0.2...0.5, step: 0.05
|
||||
)
|
||||
.disabled(!trackingConstants.pitchUpEnabled)
|
||||
Text("Lower = more sensitive to looking up")
|
||||
.font(.caption2)
|
||||
.foregroundColor(.secondary)
|
||||
}
|
||||
|
||||
Divider()
|
||||
|
||||
// Pitch Down Threshold
|
||||
VStack(alignment: .leading, spacing: 4) {
|
||||
HStack {
|
||||
Toggle("", isOn: $trackingConstants.pitchDownEnabled)
|
||||
.labelsHidden()
|
||||
Text("Pitch Down Threshold (Looking Down)")
|
||||
.foregroundColor(
|
||||
trackingConstants.pitchDownEnabled ? .primary : .secondary)
|
||||
Spacer()
|
||||
Text(String(format: "%.2f rad", trackingConstants.pitchDownThreshold))
|
||||
.foregroundColor(.secondary)
|
||||
.font(.caption)
|
||||
}
|
||||
Slider(
|
||||
value: $trackingConstants.pitchDownThreshold, in: -0.8...0.0, step: 0.05
|
||||
)
|
||||
.disabled(!trackingConstants.pitchDownEnabled)
|
||||
Text("Higher = more sensitive to looking down")
|
||||
.font(.caption2)
|
||||
.foregroundColor(.secondary)
|
||||
}
|
||||
|
||||
Divider()
|
||||
|
||||
// Min Pupil Ratio
|
||||
VStack(alignment: .leading, spacing: 4) {
|
||||
HStack {
|
||||
Toggle("", isOn: $trackingConstants.minPupilEnabled)
|
||||
.labelsHidden()
|
||||
Text("Min Pupil Ratio (Looking Right)")
|
||||
.foregroundColor(
|
||||
trackingConstants.minPupilEnabled ? .primary : .secondary)
|
||||
Spacer()
|
||||
Text(String(format: "%.2f", trackingConstants.minPupilRatio))
|
||||
.foregroundColor(.secondary)
|
||||
.font(.caption)
|
||||
}
|
||||
Slider(value: $trackingConstants.minPupilRatio, in: 0.2...0.5, step: 0.01)
|
||||
.disabled(!trackingConstants.minPupilEnabled)
|
||||
Text("Higher = more sensitive to looking right")
|
||||
.font(.caption2)
|
||||
.foregroundColor(.secondary)
|
||||
}
|
||||
|
||||
Divider()
|
||||
|
||||
// Max Pupil Ratio
|
||||
VStack(alignment: .leading, spacing: 4) {
|
||||
HStack {
|
||||
Toggle("", isOn: $trackingConstants.maxPupilEnabled)
|
||||
.labelsHidden()
|
||||
Text("Max Pupil Ratio (Looking Left)")
|
||||
.foregroundColor(
|
||||
trackingConstants.maxPupilEnabled ? .primary : .secondary)
|
||||
Spacer()
|
||||
Text(String(format: "%.2f", trackingConstants.maxPupilRatio))
|
||||
.foregroundColor(.secondary)
|
||||
.font(.caption)
|
||||
}
|
||||
Slider(value: $trackingConstants.maxPupilRatio, in: 0.5...0.8, step: 0.01)
|
||||
.disabled(!trackingConstants.maxPupilEnabled)
|
||||
Text("Lower = more sensitive to looking left")
|
||||
.font(.caption2)
|
||||
.foregroundColor(.secondary)
|
||||
}
|
||||
|
||||
Divider()
|
||||
|
||||
// Eye Closed Threshold
|
||||
VStack(alignment: .leading, spacing: 4) {
|
||||
HStack {
|
||||
Toggle("", isOn: $trackingConstants.eyeClosedEnabled)
|
||||
.labelsHidden()
|
||||
Text("Eye Closed Threshold")
|
||||
.foregroundColor(
|
||||
trackingConstants.eyeClosedEnabled ? .primary : .secondary)
|
||||
Spacer()
|
||||
Text(String(format: "%.3f", trackingConstants.eyeClosedThreshold))
|
||||
.foregroundColor(.secondary)
|
||||
.font(.caption)
|
||||
}
|
||||
Slider(
|
||||
value: Binding(
|
||||
get: { Double(trackingConstants.eyeClosedThreshold) },
|
||||
set: { trackingConstants.eyeClosedThreshold = CGFloat($0) }
|
||||
), in: 0.01...0.1, step: 0.005
|
||||
)
|
||||
.disabled(!trackingConstants.eyeClosedEnabled)
|
||||
Text("Lower = more sensitive to eye closure")
|
||||
.font(.caption2)
|
||||
.foregroundColor(.secondary)
|
||||
}
|
||||
|
||||
// Reset button
|
||||
Button(action: {
|
||||
trackingConstants.resetToDefaults()
|
||||
}) {
|
||||
HStack {
|
||||
Image(systemName: "arrow.counterclockwise")
|
||||
Text("Reset to Defaults")
|
||||
}
|
||||
.frame(maxWidth: .infinity)
|
||||
}
|
||||
.buttonStyle(.bordered)
|
||||
.controlSize(.small)
|
||||
.padding(.top, 8)
|
||||
}
|
||||
.padding(.top, 8)
|
||||
}
|
||||
}
|
||||
.padding()
|
||||
.glassEffectIfAvailable(GlassStyle.regular, in: .rect(cornerRadius: 12))
|
||||
}
|
||||
|
||||
private var debugEyeTrackingView: some View {
|
||||
VStack(alignment: .leading, spacing: 12) {
|
||||
Text("Debug Eye Tracking Data")
|
||||
|
||||
76
GazeTests/Mocks/MockSettingsManager.swift
Normal file
76
GazeTests/Mocks/MockSettingsManager.swift
Normal file
@@ -0,0 +1,76 @@
|
||||
//
|
||||
// MockSettingsManager.swift
|
||||
// GazeTests
|
||||
//
|
||||
// A mock implementation of SettingsProviding for isolated unit testing.
|
||||
//
|
||||
|
||||
import Combine
|
||||
import Foundation
|
||||
@testable import Gaze
|
||||
|
||||
/// A mock implementation of SettingsProviding that doesn't use UserDefaults.
|
||||
/// This allows tests to run in complete isolation without affecting
|
||||
/// the shared singleton or persisting data.
|
||||
@MainActor
|
||||
final class MockSettingsManager: ObservableObject, SettingsProviding {
|
||||
@Published var settings: AppSettings
|
||||
|
||||
var settingsPublisher: Published<AppSettings>.Publisher {
|
||||
$settings
|
||||
}
|
||||
|
||||
private let timerConfigKeyPaths: [TimerType: WritableKeyPath<AppSettings, TimerConfiguration>] = [
|
||||
.lookAway: \.lookAwayTimer,
|
||||
.blink: \.blinkTimer,
|
||||
.posture: \.postureTimer,
|
||||
]
|
||||
|
||||
/// Track method calls for verification in tests
|
||||
var saveCallCount = 0
|
||||
var loadCallCount = 0
|
||||
var resetToDefaultsCallCount = 0
|
||||
|
||||
init(settings: AppSettings = .defaults) {
|
||||
self.settings = settings
|
||||
}
|
||||
|
||||
func timerConfiguration(for type: TimerType) -> TimerConfiguration {
|
||||
guard let keyPath = timerConfigKeyPaths[type] else {
|
||||
preconditionFailure("Unknown timer type: \(type)")
|
||||
}
|
||||
return settings[keyPath: keyPath]
|
||||
}
|
||||
|
||||
func updateTimerConfiguration(for type: TimerType, configuration: TimerConfiguration) {
|
||||
guard let keyPath = timerConfigKeyPaths[type] else {
|
||||
preconditionFailure("Unknown timer type: \(type)")
|
||||
}
|
||||
settings[keyPath: keyPath] = configuration
|
||||
}
|
||||
|
||||
func allTimerConfigurations() -> [TimerType: TimerConfiguration] {
|
||||
var configs: [TimerType: TimerConfiguration] = [:]
|
||||
for (type, keyPath) in timerConfigKeyPaths {
|
||||
configs[type] = settings[keyPath: keyPath]
|
||||
}
|
||||
return configs
|
||||
}
|
||||
|
||||
func save() {
|
||||
saveCallCount += 1
|
||||
}
|
||||
|
||||
func saveImmediately() {
|
||||
saveCallCount += 1
|
||||
}
|
||||
|
||||
func load() {
|
||||
loadCallCount += 1
|
||||
}
|
||||
|
||||
func resetToDefaults() {
|
||||
resetToDefaultsCallCount += 1
|
||||
settings = .defaults
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user