general: its the final countdown
This commit is contained in:
@@ -54,13 +54,14 @@ enum EyeTrackingConstants: Sendable {
|
||||
static let maxPupilEnabled: Bool = true
|
||||
|
||||
// MARK: - Pixel-Based Gaze Detection Thresholds
|
||||
/// Python GazeTracking thresholds for pixel-based pupil detection
|
||||
/// Formula: pupilX / (eyeCenterX * 2 - 10)
|
||||
/// Looking right: ratio ≤ 0.35
|
||||
/// Looking center: 0.35 < ratio < 0.65
|
||||
/// Looking left: ratio ≥ 0.65
|
||||
static let pixelGazeMinRatio: Double = 0.35 // Looking right threshold
|
||||
static let pixelGazeMaxRatio: Double = 0.65 // Looking left threshold
|
||||
/// Thresholds for pupil-based gaze detection
|
||||
/// Based on video test data:
|
||||
/// - Looking at screen (center): H ≈ 0.20-0.50
|
||||
/// - Looking left (away): H ≈ 0.50+
|
||||
/// - Looking right (away): H ≈ 0.20-
|
||||
/// Coordinate system: Lower values = right, Higher values = left
|
||||
static let pixelGazeMinRatio: Double = 0.20 // Below this = looking right (away)
|
||||
static let pixelGazeMaxRatio: Double = 0.50 // Above this = looking left (away)
|
||||
static let pixelGazeEnabled: Bool = true
|
||||
|
||||
// MARK: - Screen Boundary Detection (New)
|
||||
|
||||
@@ -121,23 +121,29 @@ struct GazeThresholds: Codable {
|
||||
let referenceFaceWidth: Double // Average face width during calibration
|
||||
|
||||
var isValid: Bool {
|
||||
// Basic sanity checks
|
||||
return maxRightRatio < minLeftRatio &&
|
||||
minUpRatio < maxDownRatio &&
|
||||
screenRightBound < screenLeftBound && // Assuming lower ratio = right
|
||||
screenTopBound < screenBottomBound // Assuming lower ratio = up
|
||||
// Just check that we have reasonable values (not NaN or infinite)
|
||||
let values = [minLeftRatio, maxRightRatio, minUpRatio, maxDownRatio,
|
||||
screenLeftBound, screenRightBound, screenTopBound, screenBottomBound]
|
||||
return values.allSatisfy { $0.isFinite }
|
||||
}
|
||||
|
||||
/// Default thresholds based on video test data:
|
||||
/// - Center (looking at screen): H ≈ 0.29-0.35
|
||||
/// - Screen left edge: H ≈ 0.45-0.50
|
||||
/// - Looking away left: H ≈ 0.55+
|
||||
/// - Screen right edge: H ≈ 0.20-0.25
|
||||
/// - Looking away right: H ≈ 0.15-
|
||||
/// Coordinate system: Lower H = right, Higher H = left
|
||||
static var defaultThresholds: GazeThresholds {
|
||||
GazeThresholds(
|
||||
minLeftRatio: 0.65,
|
||||
maxRightRatio: 0.35,
|
||||
minUpRatio: 0.40,
|
||||
maxDownRatio: 0.60,
|
||||
screenLeftBound: 0.60,
|
||||
screenRightBound: 0.40,
|
||||
screenTopBound: 0.45,
|
||||
screenBottomBound: 0.55,
|
||||
minLeftRatio: 0.55, // Beyond this = looking left (away)
|
||||
maxRightRatio: 0.15, // Below this = looking right (away)
|
||||
minUpRatio: 0.30, // Below this = looking up (away)
|
||||
maxDownRatio: 0.60, // Above this = looking down (away)
|
||||
screenLeftBound: 0.50, // Left edge of screen
|
||||
screenRightBound: 0.20, // Right edge of screen
|
||||
screenTopBound: 0.35, // Top edge of screen
|
||||
screenBottomBound: 0.55, // Bottom edge of screen
|
||||
referenceFaceWidth: 0.0 // 0.0 means unused/uncalibrated
|
||||
)
|
||||
}
|
||||
@@ -187,60 +193,154 @@ struct CalibrationData: Codable {
|
||||
}
|
||||
|
||||
mutating func calculateThresholds() {
|
||||
// We need Center, Left, Right, Up, Down samples for a full calibration
|
||||
// Fallback: If corners (TopLeft, etc.) are available, use them to reinforce bounds
|
||||
// Calibration uses actual measured gaze ratios from the user looking at different
|
||||
// screen positions. The face width during calibration serves as a reference for
|
||||
// distance-based normalization during live tracking.
|
||||
//
|
||||
// Coordinate system (based on video testing):
|
||||
// Horizontal: 0.0 = far right, 1.0 = far left
|
||||
// Vertical: 0.0 = top, 1.0 = bottom
|
||||
// Center (looking at screen) typically: H ≈ 0.29-0.35
|
||||
|
||||
let centerH = averageRatio(for: .center) ?? 0.5
|
||||
let centerV = averageVerticalRatio(for: .center) ?? 0.5
|
||||
// 1. Get center reference point
|
||||
let centerH = averageRatio(for: .center)
|
||||
let centerV = averageVerticalRatio(for: .center)
|
||||
let centerFaceWidth = averageFaceWidth(for: .center)
|
||||
|
||||
// 1. Horizontal Bounds
|
||||
// If specific Left/Right steps missing, try corners
|
||||
let leftH = averageRatio(for: .left) ?? averageRatio(for: .topLeft) ?? averageRatio(for: .bottomLeft) ?? (centerH + 0.15)
|
||||
let rightH = averageRatio(for: .right) ?? averageRatio(for: .topRight) ?? averageRatio(for: .bottomRight) ?? (centerH - 0.15)
|
||||
guard let cH = centerH else {
|
||||
print("⚠️ No center calibration data, using defaults")
|
||||
self.computedThresholds = GazeThresholds.defaultThresholds
|
||||
return
|
||||
}
|
||||
|
||||
// 2. Vertical Bounds
|
||||
let upV = averageVerticalRatio(for: .up) ?? averageVerticalRatio(for: .topLeft) ?? averageVerticalRatio(for: .topRight) ?? (centerV - 0.15)
|
||||
let downV = averageVerticalRatio(for: .down) ?? averageVerticalRatio(for: .bottomLeft) ?? averageVerticalRatio(for: .bottomRight) ?? (centerV + 0.15)
|
||||
let cV = centerV ?? 0.45 // Default vertical center
|
||||
|
||||
// 3. Face Width Reference (average of all center samples)
|
||||
let refFaceWidth = averageFaceWidth(for: .center) ?? 0.0
|
||||
print("📊 Calibration data collected:")
|
||||
print(" Center H: \(String(format: "%.3f", cH)), V: \(String(format: "%.3f", cV))")
|
||||
|
||||
// 4. Compute Boundaries with Margin
|
||||
// "Screen Bound" is exactly where the user looked.
|
||||
// We set thresholds slightly BEYOND that to detect "Looking Away".
|
||||
// 2. Get horizontal screen bounds from left/right calibration points
|
||||
// These represent where the user looked when targeting screen edges
|
||||
// Use farLeft/farRight for "beyond screen" thresholds, left/right for screen bounds
|
||||
|
||||
// Note: Assuming standard coordinates where:
|
||||
// Horizontal: 0.0 (Right) -> 1.0 (Left)
|
||||
// Vertical: 0.0 (Up) -> 1.0 (Down)
|
||||
// Screen bounds (where user looked at screen edges)
|
||||
let screenLeftH = averageRatio(for: .left)
|
||||
?? averageRatio(for: .topLeft)
|
||||
?? averageRatio(for: .bottomLeft)
|
||||
let screenRightH = averageRatio(for: .right)
|
||||
?? averageRatio(for: .topRight)
|
||||
?? averageRatio(for: .bottomRight)
|
||||
|
||||
// Thresholds for "Looking Away"
|
||||
// Looking Left = Ratio > Screen Left Edge
|
||||
let lookLeftThreshold = leftH + 0.05
|
||||
// Looking Right = Ratio < Screen Right Edge
|
||||
let lookRightThreshold = rightH - 0.05
|
||||
// Far bounds (where user looked beyond screen - for "looking away" threshold)
|
||||
let farLeftH = averageRatio(for: .farLeft)
|
||||
let farRightH = averageRatio(for: .farRight)
|
||||
|
||||
// Looking Up = Ratio < Screen Top Edge
|
||||
let lookUpThreshold = upV - 0.05
|
||||
// Looking Down = Ratio > Screen Bottom Edge
|
||||
let lookDownThreshold = downV + 0.05
|
||||
// 3. Calculate horizontal thresholds
|
||||
// If we have farLeft/farRight, use the midpoint between screen edge and far as threshold
|
||||
// Otherwise, extend screen bounds by a margin
|
||||
|
||||
let leftBound: Double
|
||||
let rightBound: Double
|
||||
let lookLeftThreshold: Double
|
||||
let lookRightThreshold: Double
|
||||
|
||||
if let sLeft = screenLeftH {
|
||||
leftBound = sLeft
|
||||
// If we have farLeft, threshold is midpoint; otherwise extend by margin
|
||||
if let fLeft = farLeftH {
|
||||
lookLeftThreshold = (sLeft + fLeft) / 2.0
|
||||
} else {
|
||||
// Extend beyond screen by ~50% of center-to-edge distance
|
||||
let edgeDistance = sLeft - cH
|
||||
lookLeftThreshold = sLeft + edgeDistance * 0.5
|
||||
}
|
||||
} else {
|
||||
// No left calibration - estimate based on center
|
||||
leftBound = cH + 0.15
|
||||
lookLeftThreshold = cH + 0.20
|
||||
}
|
||||
|
||||
if let sRight = screenRightH {
|
||||
rightBound = sRight
|
||||
if let fRight = farRightH {
|
||||
lookRightThreshold = (sRight + fRight) / 2.0
|
||||
} else {
|
||||
let edgeDistance = cH - sRight
|
||||
lookRightThreshold = sRight - edgeDistance * 0.5
|
||||
}
|
||||
} else {
|
||||
rightBound = cH - 0.15
|
||||
lookRightThreshold = cH - 0.20
|
||||
}
|
||||
|
||||
// 4. Get vertical screen bounds
|
||||
let screenTopV = averageVerticalRatio(for: .up)
|
||||
?? averageVerticalRatio(for: .topLeft)
|
||||
?? averageVerticalRatio(for: .topRight)
|
||||
let screenBottomV = averageVerticalRatio(for: .down)
|
||||
?? averageVerticalRatio(for: .bottomLeft)
|
||||
?? averageVerticalRatio(for: .bottomRight)
|
||||
|
||||
let topBound: Double
|
||||
let bottomBound: Double
|
||||
let lookUpThreshold: Double
|
||||
let lookDownThreshold: Double
|
||||
|
||||
if let sTop = screenTopV {
|
||||
topBound = sTop
|
||||
let edgeDistance = cV - sTop
|
||||
lookUpThreshold = sTop - edgeDistance * 0.5
|
||||
} else {
|
||||
topBound = cV - 0.10
|
||||
lookUpThreshold = cV - 0.15
|
||||
}
|
||||
|
||||
if let sBottom = screenBottomV {
|
||||
bottomBound = sBottom
|
||||
let edgeDistance = sBottom - cV
|
||||
lookDownThreshold = sBottom + edgeDistance * 0.5
|
||||
} else {
|
||||
bottomBound = cV + 0.10
|
||||
lookDownThreshold = cV + 0.15
|
||||
}
|
||||
|
||||
// 5. Reference face width for distance normalization
|
||||
// Average face width from all calibration steps gives a good reference
|
||||
let allFaceWidths = CalibrationStep.allCases.compactMap { averageFaceWidth(for: $0) }
|
||||
let refFaceWidth = allFaceWidths.isEmpty ? 0.0 : allFaceWidths.reduce(0.0, +) / Double(allFaceWidths.count)
|
||||
|
||||
// 6. Create thresholds
|
||||
let thresholds = GazeThresholds(
|
||||
minLeftRatio: lookLeftThreshold,
|
||||
maxRightRatio: lookRightThreshold,
|
||||
minUpRatio: lookUpThreshold,
|
||||
maxDownRatio: lookDownThreshold,
|
||||
screenLeftBound: leftH,
|
||||
screenRightBound: rightH,
|
||||
screenTopBound: upV,
|
||||
screenBottomBound: downV,
|
||||
screenLeftBound: leftBound,
|
||||
screenRightBound: rightBound,
|
||||
screenTopBound: topBound,
|
||||
screenBottomBound: bottomBound,
|
||||
referenceFaceWidth: refFaceWidth
|
||||
)
|
||||
|
||||
self.computedThresholds = thresholds
|
||||
|
||||
print("✓ Calibration thresholds calculated:")
|
||||
print(" H-Range: \(String(format: "%.3f", rightH)) to \(String(format: "%.3f", leftH))")
|
||||
print(" V-Range: \(String(format: "%.3f", upV)) to \(String(format: "%.3f", downV))")
|
||||
print(" Center: H=\(String(format: "%.3f", cH)), V=\(String(format: "%.3f", cV))")
|
||||
print(" Screen H-Range: \(String(format: "%.3f", rightBound)) to \(String(format: "%.3f", leftBound))")
|
||||
print(" Screen V-Range: \(String(format: "%.3f", topBound)) to \(String(format: "%.3f", bottomBound))")
|
||||
print(" Away Thresholds: L≥\(String(format: "%.3f", lookLeftThreshold)), R≤\(String(format: "%.3f", lookRightThreshold))")
|
||||
print(" Away Thresholds: U≤\(String(format: "%.3f", lookUpThreshold)), D≥\(String(format: "%.3f", lookDownThreshold))")
|
||||
print(" Ref Face Width: \(String(format: "%.3f", refFaceWidth))")
|
||||
|
||||
// Log per-step data for debugging
|
||||
print(" Per-step data:")
|
||||
for step in CalibrationStep.allCases {
|
||||
if let h = averageRatio(for: step) {
|
||||
let v = averageVerticalRatio(for: step) ?? -1
|
||||
let fw = averageFaceWidth(for: step) ?? -1
|
||||
let count = getSamples(for: step).count
|
||||
print(" \(step.rawValue): H=\(String(format: "%.3f", h)), V=\(String(format: "%.3f", v)), FW=\(String(format: "%.3f", fw)), samples=\(count)")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -15,6 +15,7 @@ class CalibrationManager: ObservableObject {
|
||||
// MARK: - Published Properties
|
||||
|
||||
@Published var isCalibrating = false
|
||||
@Published var isCollectingSamples = false // True when actively collecting (after countdown)
|
||||
@Published var currentStep: CalibrationStep?
|
||||
@Published var currentStepIndex = 0
|
||||
@Published var samplesCollected = 0
|
||||
@@ -22,9 +23,8 @@ class CalibrationManager: ObservableObject {
|
||||
|
||||
// MARK: - Configuration
|
||||
|
||||
private let samplesPerStep = 20 // Collect 20 samples per calibration point (~1 second at 30fps)
|
||||
private let samplesPerStep = 30 // Collect 30 samples per calibration point (~1 second at 30fps)
|
||||
private let userDefaultsKey = "eyeTrackingCalibration"
|
||||
private let calibrationValidityDays = 30
|
||||
|
||||
// Calibration sequence (9 steps)
|
||||
private let calibrationSteps: [CalibrationStep] = [
|
||||
@@ -50,14 +50,27 @@ class CalibrationManager: ObservableObject {
|
||||
func startCalibration() {
|
||||
print("🎯 Starting calibration...")
|
||||
isCalibrating = true
|
||||
isCollectingSamples = false
|
||||
currentStepIndex = 0
|
||||
currentStep = calibrationSteps[0]
|
||||
samplesCollected = 0
|
||||
calibrationData = CalibrationData()
|
||||
}
|
||||
|
||||
/// Reset state for a new calibration attempt (clears isComplete flag from previous calibration)
|
||||
func resetForNewCalibration() {
|
||||
print("🔄 Resetting for new calibration...")
|
||||
calibrationData = CalibrationData()
|
||||
}
|
||||
|
||||
func startCollectingSamples() {
|
||||
guard isCalibrating, currentStep != nil else { return }
|
||||
print("📊 Started collecting samples for step: \(currentStep?.displayName ?? "unknown")")
|
||||
isCollectingSamples = true
|
||||
}
|
||||
|
||||
func collectSample(leftRatio: Double?, rightRatio: Double?, leftVertical: Double? = nil, rightVertical: Double? = nil, faceWidthRatio: Double? = nil) {
|
||||
guard isCalibrating, let step = currentStep else { return }
|
||||
guard isCalibrating, isCollectingSamples, let step = currentStep else { return }
|
||||
|
||||
let sample = GazeSample(
|
||||
leftRatio: leftRatio,
|
||||
@@ -76,6 +89,7 @@ class CalibrationManager: ObservableObject {
|
||||
}
|
||||
|
||||
private func advanceToNextStep() {
|
||||
isCollectingSamples = false
|
||||
currentStepIndex += 1
|
||||
|
||||
if currentStepIndex < calibrationSteps.count {
|
||||
@@ -108,6 +122,7 @@ class CalibrationManager: ObservableObject {
|
||||
applyCalibration()
|
||||
|
||||
isCalibrating = false
|
||||
isCollectingSamples = false
|
||||
currentStep = nil
|
||||
currentStepIndex = 0
|
||||
samplesCollected = 0
|
||||
@@ -118,6 +133,7 @@ class CalibrationManager: ObservableObject {
|
||||
func cancelCalibration() {
|
||||
print("❌ Calibration cancelled")
|
||||
isCalibrating = false
|
||||
isCollectingSamples = false
|
||||
currentStep = nil
|
||||
currentStepIndex = 0
|
||||
samplesCollected = 0
|
||||
@@ -183,15 +199,7 @@ class CalibrationManager: ObservableObject {
|
||||
thresholds.isValid else {
|
||||
return false
|
||||
}
|
||||
|
||||
// Check if calibration is not too old
|
||||
let daysSinceCalibration = Calendar.current.dateComponents(
|
||||
[.day],
|
||||
from: calibrationData.calibrationDate,
|
||||
to: Date()
|
||||
).day ?? 0
|
||||
|
||||
return daysSinceCalibration < calibrationValidityDays
|
||||
return true
|
||||
}
|
||||
|
||||
func needsRecalibration() -> Bool {
|
||||
|
||||
54
Gaze/Services/CalibrationWindowManager.swift
Normal file
54
Gaze/Services/CalibrationWindowManager.swift
Normal file
@@ -0,0 +1,54 @@
|
||||
//
|
||||
// CalibrationWindowManager.swift
|
||||
// Gaze
|
||||
//
|
||||
// Manages the fullscreen calibration overlay window.
|
||||
//
|
||||
|
||||
import AppKit
|
||||
import SwiftUI
|
||||
|
||||
@MainActor
|
||||
final class CalibrationWindowManager {
|
||||
static let shared = CalibrationWindowManager()
|
||||
|
||||
private var windowController: NSWindowController?
|
||||
|
||||
private init() {}
|
||||
|
||||
func showCalibrationOverlay() {
|
||||
guard let screen = NSScreen.main else { return }
|
||||
|
||||
let window = KeyableWindow(
|
||||
contentRect: screen.frame,
|
||||
styleMask: [.borderless, .fullSizeContentView],
|
||||
backing: .buffered,
|
||||
defer: false
|
||||
)
|
||||
|
||||
window.level = .screenSaver
|
||||
window.isOpaque = true
|
||||
window.backgroundColor = .black
|
||||
window.collectionBehavior = [.canJoinAllSpaces, .fullScreenAuxiliary]
|
||||
window.acceptsMouseMovedEvents = true
|
||||
window.ignoresMouseEvents = false
|
||||
|
||||
let overlayView = CalibrationOverlayView {
|
||||
self.dismissCalibrationOverlay()
|
||||
}
|
||||
window.contentView = NSHostingView(rootView: overlayView)
|
||||
|
||||
windowController = NSWindowController(window: window)
|
||||
windowController?.showWindow(nil)
|
||||
window.makeKeyAndOrderFront(nil)
|
||||
NSApp.activate(ignoringOtherApps: true)
|
||||
|
||||
print("🎯 Calibration overlay window opened")
|
||||
}
|
||||
|
||||
func dismissCalibrationOverlay() {
|
||||
windowController?.close()
|
||||
windowController = nil
|
||||
print("🎯 Calibration overlay window closed")
|
||||
}
|
||||
}
|
||||
@@ -494,27 +494,36 @@ class EyeTrackingService: NSObject, ObservableObject {
|
||||
if let thresholds = CalibrationState.shared.thresholds,
|
||||
CalibrationState.shared.isComplete {
|
||||
|
||||
// 1. Distance Scaling
|
||||
// 1. Distance Scaling using face width as proxy
|
||||
// When user is farther from screen, face appears smaller and eye movements
|
||||
// (in ratio terms) compress toward center. We scale to compensate.
|
||||
let currentFaceWidth = face.boundingBox.width
|
||||
let refFaceWidth = thresholds.referenceFaceWidth
|
||||
|
||||
var distanceScale = 1.0
|
||||
if refFaceWidth > 0 && currentFaceWidth > 0 {
|
||||
distanceScale = refFaceWidth / currentFaceWidth
|
||||
distanceScale = 1.0 + (distanceScale - 1.0) * EyeTrackingConstants.distanceSensitivity
|
||||
// ratio > 1 means user is farther than calibration distance
|
||||
// ratio < 1 means user is closer than calibration distance
|
||||
let rawScale = refFaceWidth / currentFaceWidth
|
||||
// Apply sensitivity factor and clamp to reasonable range
|
||||
distanceScale = 1.0 + (rawScale - 1.0) * EyeTrackingConstants.distanceSensitivity
|
||||
distanceScale = max(0.5, min(2.0, distanceScale)) // Clamp to 0.5x - 2x
|
||||
}
|
||||
|
||||
// 2. Normalize Gaze
|
||||
// 2. Calculate calibrated center point
|
||||
let centerH = (thresholds.screenLeftBound + thresholds.screenRightBound) / 2.0
|
||||
let centerV = (thresholds.screenTopBound + thresholds.screenBottomBound) / 2.0
|
||||
|
||||
// 3. Normalize gaze relative to center, scaled for distance
|
||||
// When farther away, eye movements are smaller, so we amplify them
|
||||
let deltaH = (avgH - centerH) * distanceScale
|
||||
let deltaV = (avgV - centerV) * distanceScale
|
||||
|
||||
let normalizedH = centerH + deltaH
|
||||
let normalizedV = centerV + deltaV
|
||||
|
||||
// 3. Boundary Check
|
||||
// 4. Boundary Check - compare against screen bounds
|
||||
// Looking away = gaze is beyond the calibrated screen edges
|
||||
let margin = EyeTrackingConstants.boundaryForgivenessMargin
|
||||
|
||||
let isLookingLeft = normalizedH > (thresholds.screenLeftBound + margin)
|
||||
@@ -525,7 +534,7 @@ class EyeTrackingService: NSObject, ObservableObject {
|
||||
eyesLookingAway = isLookingLeft || isLookingRight || isLookingUp || isLookingDown
|
||||
|
||||
} else {
|
||||
// Fallback to default constants
|
||||
// Fallback to default constants (no calibration)
|
||||
let lookingRight = avgH <= EyeTrackingConstants.pixelGazeMinRatio
|
||||
let lookingLeft = avgH >= EyeTrackingConstants.pixelGazeMaxRatio
|
||||
eyesLookingAway = lookingRight || lookingLeft
|
||||
|
||||
@@ -44,12 +44,15 @@ enum GazeDirection: String, Sendable, CaseIterable {
|
||||
case downRight = "↘"
|
||||
|
||||
/// Thresholds for direction detection
|
||||
/// Horizontal: 0.0 = looking right (from camera POV), 1.0 = looking left
|
||||
/// Based on actual video test data:
|
||||
/// - Looking at screen (center): H ≈ 0.29-0.35
|
||||
/// - Looking left (away): H ≈ 0.62-0.70
|
||||
/// Horizontal: Lower values = center/right, Higher values = left
|
||||
/// Vertical: 0.0 = looking up, 1.0 = looking down
|
||||
private static let horizontalLeftThreshold = 0.55 // Above this = looking left
|
||||
private static let horizontalRightThreshold = 0.45 // Below this = looking right
|
||||
private static let verticalUpThreshold = 0.40 // Below this = looking up
|
||||
private static let verticalDownThreshold = 0.60 // Above this = looking down
|
||||
private static let horizontalLeftThreshold = 0.50 // Above this = looking left (away)
|
||||
private static let horizontalRightThreshold = 0.20 // Below this = looking right
|
||||
private static let verticalUpThreshold = 0.35 // Below this = looking up
|
||||
private static let verticalDownThreshold = 0.55 // Above this = looking down
|
||||
|
||||
static func from(horizontal: Double, vertical: Double) -> GazeDirection {
|
||||
let isLeft = horizontal > horizontalLeftThreshold
|
||||
|
||||
449
Gaze/Views/Components/CalibrationOverlayView.swift
Normal file
449
Gaze/Views/Components/CalibrationOverlayView.swift
Normal file
@@ -0,0 +1,449 @@
|
||||
//
|
||||
// CalibrationOverlayView.swift
|
||||
// Gaze
|
||||
//
|
||||
// Fullscreen overlay view for eye tracking calibration targets.
|
||||
//
|
||||
|
||||
import SwiftUI
|
||||
import Combine
|
||||
import AVFoundation
|
||||
|
||||
struct CalibrationOverlayView: View {
|
||||
@StateObject private var calibrationManager = CalibrationManager.shared
|
||||
@StateObject private var eyeTrackingService = EyeTrackingService.shared
|
||||
@StateObject private var viewModel = CalibrationOverlayViewModel()
|
||||
|
||||
let onDismiss: () -> Void
|
||||
|
||||
var body: some View {
|
||||
GeometryReader { geometry in
|
||||
ZStack {
|
||||
Color.black.ignoresSafeArea()
|
||||
|
||||
// Camera preview at 50% opacity (mirrored for natural feel)
|
||||
if let previewLayer = eyeTrackingService.previewLayer {
|
||||
CameraPreviewView(previewLayer: previewLayer, borderColor: .clear)
|
||||
.scaleEffect(x: -1, y: 1)
|
||||
.opacity(0.5)
|
||||
.ignoresSafeArea()
|
||||
}
|
||||
|
||||
if let error = viewModel.showError {
|
||||
errorView(error)
|
||||
} else if !viewModel.cameraStarted {
|
||||
startingCameraView
|
||||
} else if calibrationManager.isCalibrating {
|
||||
calibrationContentView(screenSize: geometry.size)
|
||||
} else if viewModel.calibrationStarted && calibrationManager.calibrationData.isComplete {
|
||||
// Only show completion if we started calibration this session AND it completed
|
||||
completionView
|
||||
} else if viewModel.calibrationStarted {
|
||||
// Calibration was started but not yet complete - show content
|
||||
calibrationContentView(screenSize: geometry.size)
|
||||
}
|
||||
}
|
||||
}
|
||||
.task {
|
||||
await viewModel.startCamera(eyeTrackingService: eyeTrackingService, calibrationManager: calibrationManager)
|
||||
}
|
||||
.onDisappear {
|
||||
viewModel.cleanup(eyeTrackingService: eyeTrackingService, calibrationManager: calibrationManager)
|
||||
}
|
||||
.onChange(of: calibrationManager.currentStep) { oldStep, newStep in
|
||||
if newStep != nil && oldStep != newStep {
|
||||
viewModel.startStepCountdown(calibrationManager: calibrationManager)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Starting Camera View
|
||||
|
||||
private var startingCameraView: some View {
|
||||
VStack(spacing: 20) {
|
||||
ProgressView()
|
||||
.scaleEffect(2)
|
||||
.tint(.white)
|
||||
|
||||
Text("Starting camera...")
|
||||
.font(.title2)
|
||||
.foregroundStyle(.white)
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Error View
|
||||
|
||||
private func errorView(_ message: String) -> some View {
|
||||
VStack(spacing: 20) {
|
||||
Image(systemName: "exclamationmark.triangle.fill")
|
||||
.font(.system(size: 60))
|
||||
.foregroundStyle(.orange)
|
||||
|
||||
Text("Camera Error")
|
||||
.font(.title)
|
||||
.foregroundStyle(.white)
|
||||
|
||||
Text(message)
|
||||
.font(.body)
|
||||
.foregroundStyle(.gray)
|
||||
.multilineTextAlignment(.center)
|
||||
|
||||
Button("Close") {
|
||||
onDismiss()
|
||||
}
|
||||
.buttonStyle(.borderedProminent)
|
||||
.padding(.top, 20)
|
||||
}
|
||||
.padding(40)
|
||||
}
|
||||
|
||||
// MARK: - Calibration Content
|
||||
|
||||
private func calibrationContentView(screenSize: CGSize) -> some View {
|
||||
ZStack {
|
||||
VStack {
|
||||
progressBar
|
||||
Spacer()
|
||||
}
|
||||
|
||||
if let step = calibrationManager.currentStep {
|
||||
calibrationTarget(for: step, screenSize: screenSize)
|
||||
}
|
||||
|
||||
VStack {
|
||||
Spacer()
|
||||
HStack {
|
||||
cancelButton
|
||||
Spacer()
|
||||
if !calibrationManager.isCollectingSamples {
|
||||
skipButton
|
||||
}
|
||||
}
|
||||
.padding(.horizontal, 40)
|
||||
.padding(.bottom, 40)
|
||||
}
|
||||
|
||||
// Face detection indicator
|
||||
VStack {
|
||||
HStack {
|
||||
Spacer()
|
||||
faceDetectionIndicator
|
||||
}
|
||||
Spacer()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Progress Bar
|
||||
|
||||
private var progressBar: some View {
|
||||
VStack(spacing: 10) {
|
||||
HStack {
|
||||
Text("Calibrating...")
|
||||
.foregroundStyle(.white)
|
||||
Spacer()
|
||||
Text(calibrationManager.progressText)
|
||||
.foregroundStyle(.white.opacity(0.7))
|
||||
}
|
||||
|
||||
ProgressView(value: calibrationManager.progress)
|
||||
.progressViewStyle(.linear)
|
||||
.tint(.blue)
|
||||
}
|
||||
.padding()
|
||||
.background(Color.black.opacity(0.7))
|
||||
}
|
||||
|
||||
// MARK: - Face Detection Indicator
|
||||
|
||||
private var faceDetectionIndicator: some View {
|
||||
HStack(spacing: 8) {
|
||||
Circle()
|
||||
.fill(viewModel.stableFaceDetected ? Color.green : Color.red)
|
||||
.frame(width: 12, height: 12)
|
||||
|
||||
Text(viewModel.stableFaceDetected ? "Face detected" : "No face detected")
|
||||
.font(.caption)
|
||||
.foregroundStyle(.white.opacity(0.8))
|
||||
}
|
||||
.padding(.horizontal, 16)
|
||||
.padding(.vertical, 10)
|
||||
.background(Color.black.opacity(0.7))
|
||||
.cornerRadius(20)
|
||||
.padding()
|
||||
.animation(.easeInOut(duration: 0.3), value: viewModel.stableFaceDetected)
|
||||
}
|
||||
|
||||
// MARK: - Calibration Target
|
||||
|
||||
@ViewBuilder
|
||||
private func calibrationTarget(for step: CalibrationStep, screenSize: CGSize) -> some View {
|
||||
let position = targetPosition(for: step, screenSize: screenSize)
|
||||
|
||||
VStack(spacing: 20) {
|
||||
ZStack {
|
||||
// Outer ring (pulsing when counting down)
|
||||
Circle()
|
||||
.stroke(Color.blue.opacity(0.3), lineWidth: 3)
|
||||
.frame(width: 100, height: 100)
|
||||
.scaleEffect(viewModel.isCountingDown ? 1.2 : 1.0)
|
||||
.animation(
|
||||
viewModel.isCountingDown
|
||||
? .easeInOut(duration: 0.6).repeatForever(autoreverses: true)
|
||||
: .default,
|
||||
value: viewModel.isCountingDown)
|
||||
|
||||
// Progress ring when collecting
|
||||
if calibrationManager.isCollectingSamples {
|
||||
Circle()
|
||||
.trim(from: 0, to: CGFloat(calibrationManager.samplesCollected) / 30.0)
|
||||
.stroke(Color.green, lineWidth: 4)
|
||||
.frame(width: 90, height: 90)
|
||||
.rotationEffect(.degrees(-90))
|
||||
.animation(.linear(duration: 0.1), value: calibrationManager.samplesCollected)
|
||||
}
|
||||
|
||||
// Inner circle
|
||||
Circle()
|
||||
.fill(calibrationManager.isCollectingSamples ? Color.green : Color.blue)
|
||||
.frame(width: 60, height: 60)
|
||||
.animation(.easeInOut(duration: 0.3), value: calibrationManager.isCollectingSamples)
|
||||
|
||||
// Countdown number or collecting indicator
|
||||
if viewModel.isCountingDown && viewModel.countdownValue > 0 {
|
||||
Text("\(viewModel.countdownValue)")
|
||||
.font(.system(size: 36, weight: .bold))
|
||||
.foregroundStyle(.white)
|
||||
} else if calibrationManager.isCollectingSamples {
|
||||
Image(systemName: "eye.fill")
|
||||
.font(.system(size: 24, weight: .bold))
|
||||
.foregroundStyle(.white)
|
||||
}
|
||||
}
|
||||
|
||||
Text(instructionText(for: step))
|
||||
.font(.title2)
|
||||
.foregroundStyle(.white)
|
||||
.padding(.horizontal, 40)
|
||||
.padding(.vertical, 15)
|
||||
.background(Color.black.opacity(0.7))
|
||||
.cornerRadius(10)
|
||||
}
|
||||
.position(position)
|
||||
}
|
||||
|
||||
private func instructionText(for step: CalibrationStep) -> String {
|
||||
if viewModel.isCountingDown && viewModel.countdownValue > 0 {
|
||||
return "Get ready..."
|
||||
} else if calibrationManager.isCollectingSamples {
|
||||
return "Look at the target"
|
||||
} else {
|
||||
return step.instructionText
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Buttons
|
||||
|
||||
private var skipButton: some View {
|
||||
Button {
|
||||
viewModel.skipCurrentStep(calibrationManager: calibrationManager)
|
||||
} label: {
|
||||
Text("Skip")
|
||||
.foregroundStyle(.white)
|
||||
.padding(.horizontal, 20)
|
||||
.padding(.vertical, 10)
|
||||
.background(Color.white.opacity(0.2))
|
||||
.cornerRadius(8)
|
||||
}
|
||||
.buttonStyle(.plain)
|
||||
}
|
||||
|
||||
private var cancelButton: some View {
|
||||
Button {
|
||||
viewModel.cleanup(eyeTrackingService: eyeTrackingService, calibrationManager: calibrationManager)
|
||||
onDismiss()
|
||||
} label: {
|
||||
HStack(spacing: 6) {
|
||||
Image(systemName: "xmark")
|
||||
Text("Cancel")
|
||||
}
|
||||
.foregroundStyle(.white.opacity(0.7))
|
||||
.padding(.horizontal, 20)
|
||||
.padding(.vertical, 10)
|
||||
.background(Color.white.opacity(0.1))
|
||||
.cornerRadius(8)
|
||||
}
|
||||
.buttonStyle(.plain)
|
||||
.keyboardShortcut(.escape, modifiers: [])
|
||||
}
|
||||
|
||||
// MARK: - Completion View
|
||||
|
||||
private var completionView: some View {
|
||||
VStack(spacing: 30) {
|
||||
Image(systemName: "checkmark.circle.fill")
|
||||
.font(.system(size: 80))
|
||||
.foregroundStyle(.green)
|
||||
|
||||
Text("Calibration Complete!")
|
||||
.font(.largeTitle)
|
||||
.foregroundStyle(.white)
|
||||
.fontWeight(.bold)
|
||||
|
||||
Text("Your eye tracking has been calibrated successfully.")
|
||||
.font(.title3)
|
||||
.foregroundStyle(.gray)
|
||||
|
||||
Button("Done") {
|
||||
onDismiss()
|
||||
}
|
||||
.buttonStyle(.borderedProminent)
|
||||
.keyboardShortcut(.return, modifiers: [])
|
||||
.padding(.top, 20)
|
||||
}
|
||||
.onAppear {
|
||||
DispatchQueue.main.asyncAfter(deadline: .now() + 2.5) {
|
||||
onDismiss()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Helper Methods
|
||||
|
||||
private func targetPosition(for step: CalibrationStep, screenSize: CGSize) -> CGPoint {
|
||||
let width = screenSize.width
|
||||
let height = screenSize.height
|
||||
|
||||
let centerX = width / 2
|
||||
let centerY = height / 2
|
||||
let marginX: CGFloat = 150
|
||||
let marginY: CGFloat = 120
|
||||
|
||||
switch step {
|
||||
case .center:
|
||||
return CGPoint(x: centerX, y: centerY)
|
||||
case .left:
|
||||
return CGPoint(x: centerX - width / 4, y: centerY)
|
||||
case .right:
|
||||
return CGPoint(x: centerX + width / 4, y: centerY)
|
||||
case .farLeft:
|
||||
return CGPoint(x: marginX, y: centerY)
|
||||
case .farRight:
|
||||
return CGPoint(x: width - marginX, y: centerY)
|
||||
case .up:
|
||||
return CGPoint(x: centerX, y: marginY)
|
||||
case .down:
|
||||
return CGPoint(x: centerX, y: height - marginY)
|
||||
case .topLeft:
|
||||
return CGPoint(x: marginX, y: marginY)
|
||||
case .topRight:
|
||||
return CGPoint(x: width - marginX, y: marginY)
|
||||
case .bottomLeft:
|
||||
return CGPoint(x: marginX, y: height - marginY)
|
||||
case .bottomRight:
|
||||
return CGPoint(x: width - marginX, y: height - marginY)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - ViewModel
|
||||
|
||||
@MainActor
|
||||
class CalibrationOverlayViewModel: ObservableObject {
|
||||
@Published var countdownValue = 1
|
||||
@Published var isCountingDown = false
|
||||
@Published var cameraStarted = false
|
||||
@Published var showError: String?
|
||||
@Published var calibrationStarted = false
|
||||
@Published var stableFaceDetected = false // Debounced face detection
|
||||
|
||||
private var countdownTask: Task<Void, Never>?
|
||||
private var faceDetectionCancellable: AnyCancellable?
|
||||
private var lastFaceDetectedTime: Date = .distantPast
|
||||
private let faceDetectionDebounce: TimeInterval = 0.5 // 500ms debounce
|
||||
|
||||
func startCamera(eyeTrackingService: EyeTrackingService, calibrationManager: CalibrationManager) async {
|
||||
do {
|
||||
try await eyeTrackingService.startEyeTracking()
|
||||
cameraStarted = true
|
||||
|
||||
// Set up debounced face detection
|
||||
setupFaceDetectionObserver(eyeTrackingService: eyeTrackingService)
|
||||
|
||||
// Small delay to let camera stabilize
|
||||
try? await Task.sleep(nanoseconds: 500_000_000)
|
||||
|
||||
// Reset any previous calibration data before starting fresh
|
||||
calibrationManager.resetForNewCalibration()
|
||||
calibrationManager.startCalibration()
|
||||
calibrationStarted = true
|
||||
startStepCountdown(calibrationManager: calibrationManager)
|
||||
} catch {
|
||||
showError = "Failed to start camera: \(error.localizedDescription)"
|
||||
}
|
||||
}
|
||||
|
||||
private func setupFaceDetectionObserver(eyeTrackingService: EyeTrackingService) {
|
||||
faceDetectionCancellable = eyeTrackingService.$faceDetected
|
||||
.receive(on: DispatchQueue.main)
|
||||
.sink { [weak self] detected in
|
||||
guard let self = self else { return }
|
||||
|
||||
if detected {
|
||||
// Face detected - update immediately
|
||||
self.lastFaceDetectedTime = Date()
|
||||
self.stableFaceDetected = true
|
||||
} else {
|
||||
// Face lost - only update after debounce period
|
||||
let timeSinceLastDetection = Date().timeIntervalSince(self.lastFaceDetectedTime)
|
||||
if timeSinceLastDetection > self.faceDetectionDebounce {
|
||||
self.stableFaceDetected = false
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func cleanup(eyeTrackingService: EyeTrackingService, calibrationManager: CalibrationManager) {
|
||||
countdownTask?.cancel()
|
||||
countdownTask = nil
|
||||
faceDetectionCancellable?.cancel()
|
||||
faceDetectionCancellable = nil
|
||||
isCountingDown = false
|
||||
|
||||
if calibrationManager.isCalibrating {
|
||||
calibrationManager.cancelCalibration()
|
||||
}
|
||||
|
||||
eyeTrackingService.stopEyeTracking()
|
||||
}
|
||||
|
||||
func skipCurrentStep(calibrationManager: CalibrationManager) {
|
||||
countdownTask?.cancel()
|
||||
countdownTask = nil
|
||||
isCountingDown = false
|
||||
calibrationManager.skipStep()
|
||||
}
|
||||
|
||||
func startStepCountdown(calibrationManager: CalibrationManager) {
|
||||
countdownTask?.cancel()
|
||||
countdownTask = nil
|
||||
countdownValue = 1
|
||||
isCountingDown = true
|
||||
|
||||
countdownTask = Task { @MainActor in
|
||||
// Just 1 second countdown
|
||||
try? await Task.sleep(for: .seconds(1))
|
||||
if Task.isCancelled { return }
|
||||
|
||||
// Done counting, start collecting
|
||||
isCountingDown = false
|
||||
countdownValue = 0
|
||||
calibrationManager.startCollectingSamples()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#Preview {
|
||||
CalibrationOverlayView(onDismiss: {})
|
||||
}
|
||||
|
||||
@@ -11,21 +11,12 @@ struct EyeTrackingCalibrationView: View {
|
||||
@StateObject private var calibrationManager = CalibrationManager.shared
|
||||
@Environment(\.dismiss) private var dismiss
|
||||
|
||||
@State private var countdownValue = 3
|
||||
@State private var isCountingDown = false
|
||||
|
||||
var body: some View {
|
||||
ZStack {
|
||||
// Full-screen black background
|
||||
Color.black.ignoresSafeArea()
|
||||
|
||||
if calibrationManager.isCalibrating {
|
||||
calibrationContentView
|
||||
} else {
|
||||
introductionScreenView
|
||||
}
|
||||
}
|
||||
.frame(minWidth: 800, minHeight: 600)
|
||||
.frame(minWidth: 600, minHeight: 500)
|
||||
}
|
||||
|
||||
// MARK: - Introduction Screen
|
||||
@@ -77,7 +68,7 @@ struct EyeTrackingCalibrationView: View {
|
||||
.keyboardShortcut(.escape, modifiers: [])
|
||||
|
||||
Button("Start Calibration") {
|
||||
startCalibration()
|
||||
startFullscreenCalibration()
|
||||
}
|
||||
.keyboardShortcut(.return, modifiers: [])
|
||||
.buttonStyle(.borderedProminent)
|
||||
@@ -88,168 +79,14 @@ struct EyeTrackingCalibrationView: View {
|
||||
.frame(maxWidth: 600)
|
||||
}
|
||||
|
||||
// MARK: - Calibration Content
|
||||
// MARK: - Actions
|
||||
|
||||
private var calibrationContentView: some View {
|
||||
ZStack {
|
||||
// Progress indicator at top
|
||||
VStack {
|
||||
progressBar
|
||||
Spacer()
|
||||
}
|
||||
private func startFullscreenCalibration() {
|
||||
dismiss()
|
||||
|
||||
// Calibration target
|
||||
if let step = calibrationManager.currentStep {
|
||||
calibrationTarget(for: step)
|
||||
}
|
||||
|
||||
// Skip button at bottom
|
||||
VStack {
|
||||
Spacer()
|
||||
skipButton
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Progress Bar
|
||||
|
||||
private var progressBar: some View {
|
||||
VStack(spacing: 10) {
|
||||
HStack {
|
||||
Text("Calibrating...")
|
||||
.foregroundStyle(.white)
|
||||
Spacer()
|
||||
Text(calibrationManager.progressText)
|
||||
.foregroundStyle(.white.opacity(0.7))
|
||||
}
|
||||
|
||||
ProgressView(value: calibrationManager.progress)
|
||||
.progressViewStyle(.linear)
|
||||
.tint(.blue)
|
||||
}
|
||||
.padding()
|
||||
.background(Color.black.opacity(0.5))
|
||||
}
|
||||
|
||||
// MARK: - Calibration Target
|
||||
|
||||
@ViewBuilder
|
||||
private func calibrationTarget(for step: CalibrationStep) -> some View {
|
||||
let position = targetPosition(for: step)
|
||||
|
||||
VStack(spacing: 20) {
|
||||
// Target circle with countdown
|
||||
ZStack {
|
||||
// Outer ring (pulsing)
|
||||
Circle()
|
||||
.stroke(Color.blue.opacity(0.3), lineWidth: 3)
|
||||
.frame(width: 100, height: 100)
|
||||
.scaleEffect(isCountingDown ? 1.2 : 1.0)
|
||||
.animation(
|
||||
.easeInOut(duration: 0.6).repeatForever(autoreverses: true),
|
||||
value: isCountingDown)
|
||||
|
||||
// Inner circle
|
||||
Circle()
|
||||
.fill(Color.blue)
|
||||
.frame(width: 60, height: 60)
|
||||
|
||||
// Countdown number or checkmark
|
||||
if isCountingDown && countdownValue > 0 {
|
||||
Text("\(countdownValue)")
|
||||
.font(.system(size: 36, weight: .bold))
|
||||
.foregroundStyle(.white)
|
||||
} else if calibrationManager.samplesCollected > 0 {
|
||||
Image(systemName: "checkmark")
|
||||
.font(.system(size: 30, weight: .bold))
|
||||
.foregroundStyle(.white)
|
||||
}
|
||||
}
|
||||
|
||||
// Instruction text
|
||||
Text(step.instructionText)
|
||||
.font(.title2)
|
||||
.foregroundStyle(.white)
|
||||
.padding(.horizontal, 40)
|
||||
.padding(.vertical, 15)
|
||||
.background(Color.black.opacity(0.7))
|
||||
.cornerRadius(10)
|
||||
}
|
||||
.position(position)
|
||||
.onAppear {
|
||||
startStepCountdown()
|
||||
}
|
||||
}
|
||||
|
||||
// MARK: - Skip Button
|
||||
|
||||
private var skipButton: some View {
|
||||
Button {
|
||||
calibrationManager.skipStep()
|
||||
} label: {
|
||||
Text("Skip this position")
|
||||
.foregroundStyle(.white)
|
||||
.padding(.horizontal, 20)
|
||||
.padding(.vertical, 10)
|
||||
.background(Color.white.opacity(0.2))
|
||||
.cornerRadius(8)
|
||||
}
|
||||
.padding(.bottom, 40)
|
||||
}
|
||||
|
||||
// MARK: - Helper Methods
|
||||
|
||||
private func startCalibration() {
|
||||
calibrationManager.startCalibration()
|
||||
}
|
||||
|
||||
private func startStepCountdown() {
|
||||
countdownValue = 3
|
||||
isCountingDown = true
|
||||
|
||||
// Countdown 3, 2, 1
|
||||
Timer.scheduledTimer(withTimeInterval: 1.0, repeats: true) { timer in
|
||||
if countdownValue > 0 {
|
||||
countdownValue -= 1
|
||||
} else {
|
||||
timer.invalidate()
|
||||
isCountingDown = false
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private func targetPosition(for step: CalibrationStep) -> CGPoint {
|
||||
let screenBounds = NSScreen.main?.frame ?? CGRect(x: 0, y: 0, width: 1920, height: 1080)
|
||||
let width = screenBounds.width
|
||||
let height = screenBounds.height
|
||||
|
||||
let centerX = width / 2
|
||||
let centerY = height / 2
|
||||
let margin: CGFloat = 150
|
||||
|
||||
switch step {
|
||||
case .center:
|
||||
return CGPoint(x: centerX, y: centerY)
|
||||
case .left:
|
||||
return CGPoint(x: centerX - width / 4, y: centerY)
|
||||
case .right:
|
||||
return CGPoint(x: centerX + width / 4, y: centerY)
|
||||
case .farLeft:
|
||||
return CGPoint(x: margin, y: centerY)
|
||||
case .farRight:
|
||||
return CGPoint(x: width - margin, y: centerY)
|
||||
case .up:
|
||||
return CGPoint(x: centerX, y: margin)
|
||||
case .down:
|
||||
return CGPoint(x: centerX, y: height - margin)
|
||||
case .topLeft:
|
||||
return CGPoint(x: margin, y: margin)
|
||||
case .topRight:
|
||||
return CGPoint(x: width - margin, y: margin)
|
||||
case .bottomLeft:
|
||||
return CGPoint(x: margin, y: height - margin)
|
||||
case .bottomRight:
|
||||
return CGPoint(x: width - margin, y: height - margin)
|
||||
// Small delay to allow sheet dismissal animation
|
||||
DispatchQueue.main.asyncAfter(deadline: .now() + 0.3) {
|
||||
CalibrationWindowManager.shared.showCalibrationOverlay()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -50,7 +50,7 @@ final class SettingsWindowPresenter {
|
||||
|
||||
private func createWindow(settingsManager: SettingsManager, initialTab: Int) {
|
||||
let window = NSWindow(
|
||||
contentRect: NSRect(x: 0, y: 0, width: 700, height: 700),
|
||||
contentRect: NSRect(x: 0, y: 0, width: 1000, height: 900),
|
||||
styleMask: [.titled, .closable, .miniaturizable, .resizable, .fullSizeContentView],
|
||||
backing: .buffered,
|
||||
defer: false
|
||||
@@ -85,7 +85,8 @@ final class SettingsWindowPresenter {
|
||||
Task { @MainActor [weak self] in
|
||||
self?.windowController = nil
|
||||
self?.removeCloseObserver()
|
||||
NotificationCenter.default.post(name: Notification.Name("SettingsWindowDidClose"), object: nil)
|
||||
NotificationCenter.default.post(
|
||||
name: Notification.Name("SettingsWindowDidClose"), object: nil)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -125,9 +126,13 @@ struct SettingsWindowView: View {
|
||||
detailView(for: selectedSection)
|
||||
}
|
||||
}
|
||||
.onReceive(NotificationCenter.default.publisher(for: Notification.Name("SwitchToSettingsTab"))) { notification in
|
||||
.onReceive(
|
||||
NotificationCenter.default.publisher(
|
||||
for: Notification.Name("SwitchToSettingsTab"))
|
||||
) { notification in
|
||||
if let tab = notification.object as? Int,
|
||||
let section = SettingsSection(rawValue: tab) {
|
||||
let section = SettingsSection(rawValue: tab)
|
||||
{
|
||||
selectedSection = section
|
||||
}
|
||||
}
|
||||
|
||||
@@ -27,30 +27,12 @@ struct MenuBarContentWrapper: View {
|
||||
}
|
||||
}
|
||||
|
||||
// Hover button style for menubar items
|
||||
struct MenuBarButtonStyle: ButtonStyle {
|
||||
func makeBody(configuration: Configuration) -> some View {
|
||||
configuration.label
|
||||
.background(
|
||||
RoundedRectangle(cornerRadius: 6)
|
||||
.fill(
|
||||
configuration.isPressed
|
||||
? Color.accentColor.opacity(0.2) : Color.gray.opacity(0.1)
|
||||
)
|
||||
.opacity(configuration.isPressed ? 1 : 0)
|
||||
)
|
||||
.contentShape(Rectangle())
|
||||
.animation(.easeInOut(duration: 0.1), value: configuration.isPressed)
|
||||
}
|
||||
}
|
||||
|
||||
struct MenuBarHoverButtonStyle: ButtonStyle {
|
||||
@Environment(\.colorScheme) private var colorScheme
|
||||
@State private var isHovered = false
|
||||
|
||||
func makeBody(configuration: Configuration) -> some View {
|
||||
configuration.label
|
||||
.foregroundStyle(isHovered ? .white : .primary)
|
||||
.glassEffectIfAvailable(
|
||||
isHovered
|
||||
? GlassStyle.regular.tint(.accentColor).interactive()
|
||||
@@ -64,7 +46,6 @@ struct MenuBarHoverButtonStyle: ButtonStyle {
|
||||
}
|
||||
.scaleEffect(configuration.isPressed ? 0.98 : 1.0)
|
||||
.animation(.easeInOut(duration: 0.1), value: isHovered)
|
||||
.animation(.easeInOut(duration: 0.05), value: configuration.isPressed)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -73,29 +73,17 @@ struct EnforceModeSetupView: View {
|
||||
|
||||
if enforceModeService.isEnforceModeEnabled {
|
||||
testModeButton
|
||||
calibrationSection
|
||||
}
|
||||
|
||||
if isTestModeActive && enforceModeService.isCameraActive {
|
||||
testModePreviewView
|
||||
trackingConstantsView
|
||||
} else {
|
||||
if enforceModeService.isCameraActive && !isTestModeActive {
|
||||
trackingConstantsView
|
||||
} else if enforceModeService.isCameraActive && !isTestModeActive {
|
||||
eyeTrackingStatusView
|
||||
#if DEBUG
|
||||
if showDebugView {
|
||||
debugEyeTrackingView
|
||||
trackingConstantsView
|
||||
}
|
||||
#endif
|
||||
} else if enforceModeService.isEnforceModeEnabled {
|
||||
cameraPendingView
|
||||
}
|
||||
|
||||
privacyInfoView
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Spacer()
|
||||
}
|
||||
@@ -333,26 +321,6 @@ struct EnforceModeSetupView: View {
|
||||
.glassEffectIfAvailable(GlassStyle.regular, in: .rect(cornerRadius: 12))
|
||||
}
|
||||
|
||||
private var cameraPendingView: some View {
|
||||
HStack {
|
||||
Image(systemName: "timer")
|
||||
.font(.title2)
|
||||
.foregroundStyle(.orange)
|
||||
|
||||
VStack(alignment: .leading, spacing: 4) {
|
||||
Text("Camera Ready")
|
||||
.font(.headline)
|
||||
Text("Will activate 3 seconds before lookaway reminder")
|
||||
.font(.caption)
|
||||
.foregroundStyle(.secondary)
|
||||
}
|
||||
|
||||
Spacer()
|
||||
}
|
||||
.padding()
|
||||
.glassEffectIfAvailable(GlassStyle.regular, in: .rect(cornerRadius: 12))
|
||||
}
|
||||
|
||||
private func statusIndicator(title: String, isActive: Bool, icon: String) -> some View {
|
||||
VStack(spacing: 8) {
|
||||
Image(systemName: icon)
|
||||
|
||||
@@ -18,7 +18,7 @@ final class VideoGazeTests: XCTestCase {
|
||||
logLines.append(message)
|
||||
}
|
||||
|
||||
/// Process the outer video and log gaze detection results
|
||||
/// Process the outer video (looking away from screen) - should detect "looking away"
|
||||
func testOuterVideoGazeDetection() async throws {
|
||||
logLines = []
|
||||
|
||||
@@ -27,10 +27,19 @@ final class VideoGazeTests: XCTestCase {
|
||||
XCTFail("Video file not found at: \(projectPath)")
|
||||
return
|
||||
}
|
||||
try await processVideo(at: URL(fileURLWithPath: projectPath))
|
||||
let stats = try await processVideo(at: URL(fileURLWithPath: projectPath), expectLookingAway: true)
|
||||
|
||||
// For outer video, most frames should detect gaze outside center
|
||||
let nonCenterRatio = Double(stats.nonCenterFrames) / Double(max(1, stats.pupilDetectedFrames))
|
||||
log("🎯 OUTER video: \(String(format: "%.1f%%", nonCenterRatio * 100)) frames detected as non-center (expected: >50%)")
|
||||
log(" H-range: \(String(format: "%.3f", stats.minH)) to \(String(format: "%.3f", stats.maxH))")
|
||||
log(" V-range: \(String(format: "%.3f", stats.minV)) to \(String(format: "%.3f", stats.maxV))")
|
||||
|
||||
// At least 50% should be detected as non-center when looking away
|
||||
XCTAssertGreaterThan(nonCenterRatio, 0.5, "Looking away video should have >50% non-center detections. Log:\n\(logLines.joined(separator: "\n"))")
|
||||
}
|
||||
|
||||
/// Process the inner video and log gaze detection results
|
||||
/// Process the inner video (looking at screen) - should detect "looking at screen"
|
||||
func testInnerVideoGazeDetection() async throws {
|
||||
logLines = []
|
||||
|
||||
@@ -39,12 +48,36 @@ final class VideoGazeTests: XCTestCase {
|
||||
XCTFail("Video file not found at: \(projectPath)")
|
||||
return
|
||||
}
|
||||
try await processVideo(at: URL(fileURLWithPath: projectPath))
|
||||
let stats = try await processVideo(at: URL(fileURLWithPath: projectPath), expectLookingAway: false)
|
||||
|
||||
// For inner video, most frames should detect gaze at center
|
||||
let centerRatio = Double(stats.centerFrames) / Double(max(1, stats.pupilDetectedFrames))
|
||||
log("🎯 INNER video: \(String(format: "%.1f%%", centerRatio * 100)) frames detected as center (expected: >50%)")
|
||||
log(" H-range: \(String(format: "%.3f", stats.minH)) to \(String(format: "%.3f", stats.maxH))")
|
||||
log(" V-range: \(String(format: "%.3f", stats.minV)) to \(String(format: "%.3f", stats.maxV))")
|
||||
|
||||
// At least 50% should be detected as center when looking at screen
|
||||
XCTAssertGreaterThan(centerRatio, 0.5, "Looking at screen video should have >50% center detections. Log:\n\(logLines.joined(separator: "\n"))")
|
||||
}
|
||||
|
||||
private func processVideo(at url: URL) async throws {
|
||||
struct VideoStats {
|
||||
var totalFrames = 0
|
||||
var faceDetectedFrames = 0
|
||||
var pupilDetectedFrames = 0
|
||||
var centerFrames = 0
|
||||
var nonCenterFrames = 0
|
||||
var minH = Double.greatestFiniteMagnitude
|
||||
var maxH = -Double.greatestFiniteMagnitude
|
||||
var minV = Double.greatestFiniteMagnitude
|
||||
var maxV = -Double.greatestFiniteMagnitude
|
||||
}
|
||||
|
||||
private func processVideo(at url: URL, expectLookingAway: Bool) async throws -> VideoStats {
|
||||
var stats = VideoStats()
|
||||
|
||||
log("\n" + String(repeating: "=", count: 60))
|
||||
log("Processing video: \(url.lastPathComponent)")
|
||||
log("Expected behavior: \(expectLookingAway ? "LOOKING AWAY (non-center)" : "LOOKING AT SCREEN (center)")")
|
||||
log(String(repeating: "=", count: 60))
|
||||
|
||||
let asset = AVURLAsset(url: url)
|
||||
@@ -54,7 +87,7 @@ final class VideoGazeTests: XCTestCase {
|
||||
|
||||
guard let track = try await asset.loadTracks(withMediaType: .video).first else {
|
||||
XCTFail("No video track found")
|
||||
return
|
||||
return stats
|
||||
}
|
||||
|
||||
let size = try await track.load(.naturalSize)
|
||||
@@ -83,10 +116,6 @@ final class VideoGazeTests: XCTestCase {
|
||||
PupilDetector.frameSkipCount = 1
|
||||
defer { PupilDetector.frameSkipCount = originalFrameSkip }
|
||||
|
||||
var totalFrames = 0
|
||||
var faceDetectedFrames = 0
|
||||
var pupilDetectedFrames = 0
|
||||
|
||||
while let sampleBuffer = trackOutput.copyNextSampleBuffer() {
|
||||
defer {
|
||||
frameIndex += 1
|
||||
@@ -98,7 +127,7 @@ final class VideoGazeTests: XCTestCase {
|
||||
continue
|
||||
}
|
||||
|
||||
totalFrames += 1
|
||||
stats.totalFrames += 1
|
||||
|
||||
guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
|
||||
continue
|
||||
@@ -128,7 +157,7 @@ final class VideoGazeTests: XCTestCase {
|
||||
continue
|
||||
}
|
||||
|
||||
faceDetectedFrames += 1
|
||||
stats.faceDetectedFrames += 1
|
||||
|
||||
let imageSize = CGSize(
|
||||
width: CVPixelBufferGetWidth(pixelBuffer),
|
||||
@@ -165,10 +194,22 @@ final class VideoGazeTests: XCTestCase {
|
||||
|
||||
if let lh = leftHRatio, let rh = rightHRatio,
|
||||
let lv = leftVRatio, let rv = rightVRatio {
|
||||
pupilDetectedFrames += 1
|
||||
stats.pupilDetectedFrames += 1
|
||||
let avgH = (lh + rh) / 2.0
|
||||
let avgV = (lv + rv) / 2.0
|
||||
|
||||
// Track min/max ranges
|
||||
stats.minH = min(stats.minH, avgH)
|
||||
stats.maxH = max(stats.maxH, avgH)
|
||||
stats.minV = min(stats.minV, avgV)
|
||||
stats.maxV = max(stats.maxV, avgV)
|
||||
|
||||
let direction = GazeDirection.from(horizontal: avgH, vertical: avgV)
|
||||
if direction == .center {
|
||||
stats.centerFrames += 1
|
||||
} else {
|
||||
stats.nonCenterFrames += 1
|
||||
}
|
||||
log(String(format: "%5d | %5.1fs | YES | %.2f / %.2f | %.2f / %.2f | %@ %@",
|
||||
frameIndex, timeSeconds, lh, rh, lv, rv, direction.rawValue, String(describing: direction)))
|
||||
} else {
|
||||
@@ -177,8 +218,11 @@ final class VideoGazeTests: XCTestCase {
|
||||
}
|
||||
|
||||
log(String(repeating: "=", count: 75))
|
||||
log("Summary: \(totalFrames) frames sampled, \(faceDetectedFrames) with face, \(pupilDetectedFrames) with pupils")
|
||||
log("Summary: \(stats.totalFrames) frames sampled, \(stats.faceDetectedFrames) with face, \(stats.pupilDetectedFrames) with pupils")
|
||||
log("Center frames: \(stats.centerFrames), Non-center: \(stats.nonCenterFrames)")
|
||||
log("Processing complete\n")
|
||||
|
||||
return stats
|
||||
}
|
||||
|
||||
private func calculateHorizontalRatio(pupilPosition: PupilPosition, eyeRegion: EyeRegion) -> Double {
|
||||
|
||||
Reference in New Issue
Block a user