general: fixing build issues and tracking freeze

This commit is contained in:
Michael Freno
2026-01-15 09:05:50 -05:00
parent 67d52cc333
commit 429d4ff32e
8 changed files with 831 additions and 458 deletions

View File

@@ -30,7 +30,6 @@ class AppDelegate: NSObject, NSApplicationDelegate, ObservableObject {
timerEngine = TimerEngine(settingsManager: settingsManager) timerEngine = TimerEngine(settingsManager: settingsManager)
// Initialize Smart Mode services
setupSmartModeServices() setupSmartModeServices()
// Initialize update manager after onboarding is complete // Initialize update manager after onboarding is complete
@@ -42,7 +41,6 @@ class AppDelegate: NSObject, NSApplicationDelegate, ObservableObject {
observeSettingsChanges() observeSettingsChanges()
// Start timers if onboarding is complete
if settingsManager.settings.hasCompletedOnboarding { if settingsManager.settings.hasCompletedOnboarding {
startTimers() startTimers()
} }
@@ -57,7 +55,6 @@ class AppDelegate: NSObject, NSApplicationDelegate, ObservableObject {
resetThresholdMinutes: settingsManager.settings.smartMode.usageResetAfterMinutes resetThresholdMinutes: settingsManager.settings.smartMode.usageResetAfterMinutes
) )
// Connect idle service to usage tracking
if let idleService = idleService { if let idleService = idleService {
usageTrackingService?.setupIdleMonitoring(idleService) usageTrackingService?.setupIdleMonitoring(idleService)
} }
@@ -275,7 +272,8 @@ class AppDelegate: NSObject, NSApplicationDelegate, ObservableObject {
DispatchQueue.main.asyncAfter(deadline: .now() + 0.1) { [weak self] in DispatchQueue.main.asyncAfter(deadline: .now() + 0.1) { [weak self] in
guard let self else { return } guard let self else { return }
SettingsWindowPresenter.shared.show(settingsManager: self.settingsManager, initialTab: tab) SettingsWindowPresenter.shared.show(
settingsManager: self.settingsManager, initialTab: tab)
} }
} }
@@ -295,7 +293,6 @@ class AppDelegate: NSObject, NSApplicationDelegate, ObservableObject {
} }
// Custom window class that can become key to receive keyboard events
class KeyableWindow: NSWindow { class KeyableWindow: NSWindow {
override var canBecomeKey: Bool { override var canBecomeKey: Bool {
return true return true
@@ -306,7 +303,6 @@ class KeyableWindow: NSWindow {
} }
} }
// Non-key window class for subtle reminders that don't steal focus
class NonKeyWindow: NSWindow { class NonKeyWindow: NSWindow {
override var canBecomeKey: Bool { override var canBecomeKey: Bool {
return false return false

View File

@@ -233,4 +233,3 @@ class EnforceModeService: ObservableObject {
isTestMode = false isTestMode = false
} }
} }

View File

@@ -57,6 +57,19 @@ class EyeTrackingService: NSObject, ObservableObject {
super.init() super.init()
} }
// MARK: - Processing Result
/// Result struct for off-main-thread processing
private struct ProcessingResult {
var faceDetected: Bool = false
var isEyesClosed: Bool = false
var userLookingAtScreen: Bool = true
var debugLeftPupilRatio: Double?
var debugRightPupilRatio: Double?
var debugYaw: Double?
var debugPitch: Double?
}
func startEyeTracking() async throws { func startEyeTracking() async throws {
print("👁️ startEyeTracking called") print("👁️ startEyeTracking called")
guard !isEyeTrackingActive else { guard !isEyeTrackingActive else {
@@ -177,6 +190,221 @@ class EyeTrackingService: NSObject, ObservableObject {
userLookingAtScreen = !lookingAway userLookingAtScreen = !lookingAway
} }
/// Non-isolated synchronous version for off-main-thread processing
/// Returns a result struct instead of updating @Published properties directly
nonisolated private func processFaceObservationsSync(
_ observations: [VNFaceObservation]?,
imageSize: CGSize,
pixelBuffer: CVPixelBuffer? = nil
) -> ProcessingResult {
var result = ProcessingResult()
guard let observations = observations, !observations.isEmpty else {
result.faceDetected = false
result.userLookingAtScreen = false
return result
}
result.faceDetected = true
let face = observations.first!
guard let landmarks = face.landmarks else {
return result
}
// Check eye closure
if let leftEye = landmarks.leftEye,
let rightEye = landmarks.rightEye
{
result.isEyesClosed = detectEyesClosedSync(
leftEye: leftEye, rightEye: rightEye)
}
// Check gaze direction
let gazeResult = detectLookingAwaySync(
face: face,
landmarks: landmarks,
imageSize: imageSize,
pixelBuffer: pixelBuffer
)
result.userLookingAtScreen = !gazeResult.lookingAway
result.debugLeftPupilRatio = gazeResult.leftPupilRatio
result.debugRightPupilRatio = gazeResult.rightPupilRatio
result.debugYaw = gazeResult.yaw
result.debugPitch = gazeResult.pitch
return result
}
/// Non-isolated eye closure detection
nonisolated private func detectEyesClosedSync(
leftEye: VNFaceLandmarkRegion2D, rightEye: VNFaceLandmarkRegion2D
) -> Bool {
let constants = EyeTrackingConstants.shared
guard constants.eyeClosedEnabled else {
return false
}
guard leftEye.pointCount >= 2, rightEye.pointCount >= 2 else {
return false
}
let leftEyeHeight = calculateEyeHeightSync(leftEye)
let rightEyeHeight = calculateEyeHeightSync(rightEye)
let closedThreshold = constants.eyeClosedThreshold
return leftEyeHeight < closedThreshold && rightEyeHeight < closedThreshold
}
nonisolated private func calculateEyeHeightSync(_ eye: VNFaceLandmarkRegion2D) -> CGFloat {
let points = eye.normalizedPoints
guard points.count >= 2 else { return 0 }
let yValues = points.map { $0.y }
let maxY = yValues.max() ?? 0
let minY = yValues.min() ?? 0
return abs(maxY - minY)
}
/// Non-isolated gaze detection result
private struct GazeResult {
var lookingAway: Bool = false
var leftPupilRatio: Double?
var rightPupilRatio: Double?
var yaw: Double?
var pitch: Double?
}
/// Non-isolated gaze direction detection
nonisolated private func detectLookingAwaySync(
face: VNFaceObservation,
landmarks: VNFaceLandmarks2D,
imageSize: CGSize,
pixelBuffer: CVPixelBuffer?
) -> GazeResult {
let constants = EyeTrackingConstants.shared
var result = GazeResult()
// 1. Face Pose Check (Yaw & Pitch)
let yaw = face.yaw?.doubleValue ?? 0.0
let pitch = face.pitch?.doubleValue ?? 0.0
result.yaw = yaw
result.pitch = pitch
var poseLookingAway = false
if face.pitch != nil {
if constants.yawEnabled {
let yawThreshold = constants.yawThreshold
if abs(yaw) > yawThreshold {
poseLookingAway = true
}
}
if !poseLookingAway {
var pitchLookingAway = false
if constants.pitchUpEnabled && pitch > constants.pitchUpThreshold {
pitchLookingAway = true
}
if constants.pitchDownEnabled && pitch < constants.pitchDownThreshold {
pitchLookingAway = true
}
poseLookingAway = pitchLookingAway
}
}
// 2. Eye Gaze Check (Pixel-Based Pupil Detection)
var eyesLookingAway = false
if let pixelBuffer = pixelBuffer,
let leftEye = landmarks.leftEye,
let rightEye = landmarks.rightEye,
constants.pixelGazeEnabled
{
var leftGazeRatio: Double? = nil
var rightGazeRatio: Double? = nil
// Detect left pupil (side = 0)
if let leftResult = PupilDetector.detectPupil(
in: pixelBuffer,
eyeLandmarks: leftEye,
faceBoundingBox: face.boundingBox,
imageSize: imageSize,
side: 0
) {
leftGazeRatio = calculateGazeRatioSync(
pupilPosition: leftResult.pupilPosition,
eyeRegion: leftResult.eyeRegion
)
}
// Detect right pupil (side = 1)
if let rightResult = PupilDetector.detectPupil(
in: pixelBuffer,
eyeLandmarks: rightEye,
faceBoundingBox: face.boundingBox,
imageSize: imageSize,
side: 1
) {
rightGazeRatio = calculateGazeRatioSync(
pupilPosition: rightResult.pupilPosition,
eyeRegion: rightResult.eyeRegion
)
}
result.leftPupilRatio = leftGazeRatio
result.rightPupilRatio = rightGazeRatio
// Connect to CalibrationManager on main thread
if let leftRatio = leftGazeRatio,
let rightRatio = rightGazeRatio {
Task { @MainActor in
if CalibrationManager.shared.isCalibrating {
CalibrationManager.shared.collectSample(
leftRatio: leftRatio,
rightRatio: rightRatio
)
}
}
let avgRatio = (leftRatio + rightRatio) / 2.0
let lookingRight = avgRatio <= constants.pixelGazeMinRatio
let lookingLeft = avgRatio >= constants.pixelGazeMaxRatio
eyesLookingAway = lookingRight || lookingLeft
}
}
result.lookingAway = poseLookingAway || eyesLookingAway
return result
}
/// Non-isolated gaze ratio calculation
nonisolated private func calculateGazeRatioSync(pupilPosition: PupilPosition, eyeRegion: EyeRegion) -> Double {
let pupilX = Double(pupilPosition.x)
let eyeCenterX = Double(eyeRegion.center.x)
let denominator = (eyeCenterX * 2.0 - 10.0)
guard denominator > 0 else {
let eyeLeft = Double(eyeRegion.frame.minX)
let eyeRight = Double(eyeRegion.frame.maxX)
let eyeWidth = eyeRight - eyeLeft
guard eyeWidth > 0 else { return 0.5 }
return (pupilX - eyeLeft) / eyeWidth
}
let ratio = pupilX / denominator
return max(0.0, min(1.0, ratio))
}
private func detectEyesClosed( private func detectEyesClosed(
leftEye: VNFaceLandmarkRegion2D, rightEye: VNFaceLandmarkRegion2D, shouldLog: Bool leftEye: VNFaceLandmarkRegion2D, rightEye: VNFaceLandmarkRegion2D, shouldLog: Bool
@@ -406,12 +634,24 @@ extension EyeTrackingService: AVCaptureVideoDataOutputSampleBufferDelegate {
height: CVPixelBufferGetHeight(pixelBuffer) height: CVPixelBufferGetHeight(pixelBuffer)
) )
// Process face observations on the video queue (not main thread)
// to avoid UI freezes from heavy pupil detection
let observations = request.results as? [VNFaceObservation]
let result = self.processFaceObservationsSync(
observations,
imageSize: size,
pixelBuffer: pixelBuffer
)
// Only dispatch UI updates to main thread
Task { @MainActor in Task { @MainActor in
self.processFaceObservations( self.faceDetected = result.faceDetected
request.results as? [VNFaceObservation], self.isEyesClosed = result.isEyesClosed
imageSize: size, self.userLookingAtScreen = result.userLookingAtScreen
pixelBuffer: pixelBuffer self.debugLeftPupilRatio = result.debugLeftPupilRatio
) self.debugRightPupilRatio = result.debugRightPupilRatio
self.debugYaw = result.debugYaw
self.debugPitch = result.debugPitch
} }
} }

View File

@@ -71,6 +71,7 @@ final class FullscreenDetectionService: ObservableObject {
private let permissionManager: ScreenCapturePermissionManaging private let permissionManager: ScreenCapturePermissionManaging
private let environmentProvider: FullscreenEnvironmentProviding private let environmentProvider: FullscreenEnvironmentProviding
// This initializer is only for use within main actor contexts
init( init(
permissionManager: ScreenCapturePermissionManaging = ScreenCapturePermissionManager.shared, permissionManager: ScreenCapturePermissionManaging = ScreenCapturePermissionManager.shared,
environmentProvider: FullscreenEnvironmentProviding = SystemFullscreenEnvironmentProvider() environmentProvider: FullscreenEnvironmentProviding = SystemFullscreenEnvironmentProvider()
@@ -80,6 +81,19 @@ final class FullscreenDetectionService: ObservableObject {
setupObservers() setupObservers()
} }
// Factory method to safely create instances from non-main actor contexts
static func create(
permissionManager: ScreenCapturePermissionManaging = ScreenCapturePermissionManager.shared,
environmentProvider: FullscreenEnvironmentProviding = SystemFullscreenEnvironmentProvider()
) async -> FullscreenDetectionService {
await MainActor.run {
return FullscreenDetectionService(
permissionManager: permissionManager,
environmentProvider: environmentProvider
)
}
}
deinit { deinit {
let notificationCenter = NSWorkspace.shared.notificationCenter let notificationCenter = NSWorkspace.shared.notificationCenter
observers.forEach { notificationCenter.removeObserver($0) } observers.forEach { notificationCenter.removeObserver($0) }

File diff suppressed because it is too large Load Diff

View File

@@ -73,24 +73,26 @@ final class ServiceContainer {
func setupSmartModeServices() { func setupSmartModeServices() {
let settings = settingsManager.settings let settings = settingsManager.settings
fullscreenService = FullscreenDetectionService() Task { @MainActor in
idleService = IdleMonitoringService( fullscreenService = await FullscreenDetectionService.create()
idleThresholdMinutes: settings.smartMode.idleThresholdMinutes idleService = IdleMonitoringService(
) idleThresholdMinutes: settings.smartMode.idleThresholdMinutes
usageTrackingService = UsageTrackingService( )
resetThresholdMinutes: settings.smartMode.usageResetAfterMinutes usageTrackingService = UsageTrackingService(
) resetThresholdMinutes: settings.smartMode.usageResetAfterMinutes
)
// Connect idle service to usage tracking // Connect idle service to usage tracking
if let idleService = idleService { if let idleService = idleService {
usageTrackingService?.setupIdleMonitoring(idleService) usageTrackingService?.setupIdleMonitoring(idleService)
}
// Connect services to timer engine
timerEngine.setupSmartMode(
fullscreenService: fullscreenService,
idleService: idleService
)
} }
// Connect services to timer engine
timerEngine.setupSmartMode(
fullscreenService: fullscreenService,
idleService: idleService
)
} }
/// Resets the container for testing purposes /// Resets the container for testing purposes

View File

@@ -206,11 +206,7 @@ struct SettingsWindowView: View {
SettingsWindowPresenter.shared.close() SettingsWindowPresenter.shared.close()
DispatchQueue.main.asyncAfter(deadline: .now() + 0.2) { DispatchQueue.main.asyncAfter(deadline: .now() + 0.2) {
self.settingsManager.settings.hasCompletedOnboarding = false OnboardingWindowPresenter.shared.show(settingsManager: self.settingsManager)
DispatchQueue.main.asyncAfter(deadline: .now() + 0.2) {
OnboardingWindowPresenter.shared.show(settingsManager: self.settingsManager)
}
} }
} }
#endif #endif