diff --git a/Gaze/Constants/AdaptiveLayout.swift b/Gaze/Constants/AdaptiveLayout.swift index fac09f0..dff916c 100644 --- a/Gaze/Constants/AdaptiveLayout.swift +++ b/Gaze/Constants/AdaptiveLayout.swift @@ -2,7 +2,7 @@ // AdaptiveLayout.swift // Gaze // -// Created by Claude on 1/19/26. +// Created by Mike Freno on 1/19/26. // import SwiftUI @@ -13,29 +13,29 @@ enum AdaptiveLayout { enum Window { static let minWidth: CGFloat = 700 #if APPSTORE - static let minHeight: CGFloat = 500 + static let minHeight: CGFloat = 500 #else - static let minHeight: CGFloat = 600 + static let minHeight: CGFloat = 600 #endif - + static let defaultWidth: CGFloat = 900 #if APPSTORE - static let defaultHeight: CGFloat = 650 + static let defaultHeight: CGFloat = 650 #else - static let defaultHeight: CGFloat = 800 + static let defaultHeight: CGFloat = 800 #endif } - + /// Content area constraints enum Content { /// Maximum width for content cards/sections static let maxWidth: CGFloat = 560 - /// Minimum width for content cards/sections + /// Minimum width for content cards/sections static let minWidth: CGFloat = 400 /// Ideal width for onboarding/welcome cards static let idealCardWidth: CGFloat = 520 } - + /// Font sizes that scale based on available space enum Font { static let heroIcon: CGFloat = 60 @@ -45,7 +45,7 @@ enum AdaptiveLayout { static let cardIcon: CGFloat = 32 static let cardIconSmall: CGFloat = 28 } - + /// Spacing values enum Spacing { static let standard: CGFloat = 20 @@ -53,7 +53,7 @@ enum AdaptiveLayout { static let section: CGFloat = 30 static let sectionCompact: CGFloat = 20 } - + /// Card dimensions for swipeable cards enum Card { static let maxWidth: CGFloat = 520 @@ -81,11 +81,11 @@ extension EnvironmentValues { struct AdaptiveContainerModifier: ViewModifier { @State private var isCompact = false let compactThreshold: CGFloat - + init(compactThreshold: CGFloat = 600) { self.compactThreshold = compactThreshold } - + func body(content: Content) -> some View { GeometryReader { geometry in content diff --git a/Gaze/Constants/TestingEnvironment.swift b/Gaze/Constants/TestingEnvironment.swift index ae42e0a..bd50cee 100644 --- a/Gaze/Constants/TestingEnvironment.swift +++ b/Gaze/Constants/TestingEnvironment.swift @@ -2,7 +2,7 @@ // TestingEnvironment.swift // Gaze // -// Created by OpenCode on 1/13/26. +// Created by Mike Freno on 1/13/26. // import Foundation @@ -13,26 +13,27 @@ enum TestingEnvironment { static var isUITesting: Bool { return ProcessInfo.processInfo.arguments.contains("--ui-testing") } - + /// Check if app should skip onboarding static var shouldSkipOnboarding: Bool { return ProcessInfo.processInfo.arguments.contains("--skip-onboarding") } - + /// Check if app should reset onboarding static var shouldResetOnboarding: Bool { return ProcessInfo.processInfo.arguments.contains("--reset-onboarding") } - + /// Check if running in any test mode (unit tests or UI tests) static var isAnyTestMode: Bool { - return isUITesting || ProcessInfo.processInfo.environment["XCTestConfigurationFilePath"] != nil + return isUITesting + || ProcessInfo.processInfo.environment["XCTestConfigurationFilePath"] != nil } - + #if DEBUG - /// Check if dev triggers should be visible - static var shouldShowDevTriggers: Bool { - return isUITesting || isAnyTestMode - } + /// Check if dev triggers should be visible + static var shouldShowDevTriggers: Bool { + return isUITesting || isAnyTestMode + } #endif } diff --git a/Gaze/Services/Permissions/ScreenCapturePermissionManager.swift b/Gaze/Services/Permissions/ScreenCapturePermissionManager.swift index 4018aa8..75b8c3c 100644 --- a/Gaze/Services/Permissions/ScreenCapturePermissionManager.swift +++ b/Gaze/Services/Permissions/ScreenCapturePermissionManager.swift @@ -2,7 +2,7 @@ // ScreenCapturePermissionManager.swift // Gaze // -// Created by ChatGPT on 1/14/26. +// Created by Mike Freno on 1/14/26. // import AppKit diff --git a/Gaze/Views/Components/GazeOverlayView.swift b/Gaze/Views/Components/GazeOverlayView.swift index da038c7..f138cfb 100644 --- a/Gaze/Views/Components/GazeOverlayView.swift +++ b/Gaze/Views/Components/GazeOverlayView.swift @@ -2,7 +2,7 @@ // GazeOverlayView.swift // Gaze // -// Created by Claude on 1/16/26. +// Created by Mike Freno on 1/16/26. // import SwiftUI diff --git a/Gaze/Views/Components/PupilOverlayView.swift b/Gaze/Views/Components/PupilOverlayView.swift index 8945eb1..4c0bd29 100644 --- a/Gaze/Views/Components/PupilOverlayView.swift +++ b/Gaze/Views/Components/PupilOverlayView.swift @@ -2,7 +2,7 @@ // PupilOverlayView.swift // Gaze // -// Created by Claude on 1/16/26. +// Created by Mike Freno on 1/16/26. // import SwiftUI diff --git a/Gaze/Views/Reminders/UserTimerOverlayReminderView.swift b/Gaze/Views/Reminders/UserTimerOverlayReminderView.swift index 690774a..2fed099 100644 --- a/Gaze/Views/Reminders/UserTimerOverlayReminderView.swift +++ b/Gaze/Views/Reminders/UserTimerOverlayReminderView.swift @@ -2,7 +2,7 @@ // UserTimerOverlayReminderView.swift // Gaze // -// Created by OpenCode on 1/11/26. +// Created by Mike Freno on 1/11/26. // import AppKit diff --git a/Gaze/Views/Reminders/UserTimerReminderView.swift b/Gaze/Views/Reminders/UserTimerReminderView.swift index 960cd4b..abc8236 100644 --- a/Gaze/Views/Reminders/UserTimerReminderView.swift +++ b/Gaze/Views/Reminders/UserTimerReminderView.swift @@ -2,7 +2,7 @@ // UserTimerReminderView.swift // Gaze // -// Created by OpenCode on 1/11/26. +// Created by Mike Freno on 1/11/26. // import SwiftUI diff --git a/GazeTests/Services/PupilDetectorTests.swift b/GazeTests/Services/PupilDetectorTests.swift index 7c545eb..7cee871 100644 --- a/GazeTests/Services/PupilDetectorTests.swift +++ b/GazeTests/Services/PupilDetectorTests.swift @@ -2,41 +2,42 @@ // PupilDetectorTests.swift // GazeTests // -// Created by Claude on 1/16/26. +// Created by Mike Freno on 1/16/26. // -import XCTest import CoreVideo import Vision +import XCTest + @testable import Gaze final class PupilDetectorTests: XCTestCase { - + override func setUp() async throws { // Reset the detector state PupilDetector.cleanup() } - + func testCreateCGImageFromData() throws { // Test basic image creation let width = 50 let height = 50 var pixels = [UInt8](repeating: 128, count: width * height) - + // Add some dark pixels for a "pupil" for y in 20..<30 { for x in 20..<30 { pixels[y * width + x] = 10 // Very dark } } - + // Save test image to verify let pixelData = Data(pixels) guard let provider = CGDataProvider(data: pixelData as CFData) else { XCTFail("Failed to create CGDataProvider") return } - + let cgImage = CGImage( width: width, height: height, @@ -50,18 +51,18 @@ final class PupilDetectorTests: XCTestCase { shouldInterpolate: false, intent: .defaultIntent ) - + XCTAssertNotNil(cgImage, "Should create CGImage from pixel data") } - + func testImageProcessingWithDarkPixels() throws { // Test that imageProcessingOptimized produces dark pixels let width = 60 let height = 40 - + // Create input with a dark circle (simulating pupil) var input = [UInt8](repeating: 200, count: width * height) // Light background (like eye white) - + // Add a dark ellipse in center (pupil) let centerX = width / 2 let centerY = height / 2 @@ -74,10 +75,10 @@ final class PupilDetectorTests: XCTestCase { } } } - + var output = [UInt8](repeating: 255, count: width * height) let threshold = 50 // Same as default - + // Call the actual processing function input.withUnsafeMutableBufferPointer { inputPtr in output.withUnsafeMutableBufferPointer { outputPtr in @@ -85,7 +86,7 @@ final class PupilDetectorTests: XCTestCase { // But we can verify by saving input for inspection } } - + // Save the input for manual inspection let inputData = Data(input) let url = URL(fileURLWithPath: "/Users/mike/gaze/images/test_input_synthetic.png") @@ -103,28 +104,30 @@ final class PupilDetectorTests: XCTestCase { shouldInterpolate: false, intent: .defaultIntent ) { - if let dest = CGImageDestinationCreateWithURL(url as CFURL, "public.png" as CFString, 1, nil) { + if let dest = CGImageDestinationCreateWithURL( + url as CFURL, "public.png" as CFString, 1, nil) + { CGImageDestinationAddImage(dest, cgImage, nil) CGImageDestinationFinalize(dest) print("💾 Saved synthetic test input to: \(url.path)") } } } - + // Count dark pixels in input let darkCount = input.filter { $0 < 50 }.count print("📊 Input has \(darkCount) dark pixels (< 50)") XCTAssertGreaterThan(darkCount, 0, "Input should have dark pixels for pupil") } - + func testFindPupilFromContoursWithSyntheticData() throws { // Create synthetic binary image with a dark region let width = 60 let height = 40 - + // All white except a dark blob var binaryData = [UInt8](repeating: 255, count: width * height) - + // Add dark region (0 = dark/pupil) let centerX = 30 let centerY = 20 @@ -139,9 +142,9 @@ final class PupilDetectorTests: XCTestCase { } } } - + print("📊 Created synthetic image with \(darkPixelCount) dark pixels") - + // Save for inspection let binaryDataObj = Data(binaryData) let url = URL(fileURLWithPath: "/Users/mike/gaze/images/test_binary_synthetic.png") @@ -159,14 +162,16 @@ final class PupilDetectorTests: XCTestCase { shouldInterpolate: false, intent: .defaultIntent ) { - if let dest = CGImageDestinationCreateWithURL(url as CFURL, "public.png" as CFString, 1, nil) { + if let dest = CGImageDestinationCreateWithURL( + url as CFURL, "public.png" as CFString, 1, nil) + { CGImageDestinationAddImage(dest, cgImage, nil) CGImageDestinationFinalize(dest) print("💾 Saved synthetic binary image to: \(url.path)") } } } - + XCTAssertGreaterThan(darkPixelCount, 10, "Should have enough dark pixels") } } diff --git a/GazeTests/VideoGazeTests.swift b/GazeTests/VideoGazeTests.swift index ca37583..6eb50af 100644 --- a/GazeTests/VideoGazeTests.swift +++ b/GazeTests/VideoGazeTests.swift @@ -2,77 +2,103 @@ // VideoGazeTests.swift // GazeTests // -// Created by Claude on 1/16/26. +// Created by Mike Freno on 1/16/26. // -import XCTest import AVFoundation import Vision +import XCTest + @testable import Gaze final class VideoGazeTests: XCTestCase { - + var logLines: [String] = [] - + private func log(_ message: String) { logLines.append(message) } - + private func attachLogs() { let attachment = XCTAttachment(string: logLines.joined(separator: "\n")) attachment.name = "Test Logs" attachment.lifetime = .keepAlways add(attachment) } - + /// Process the outer video (looking away from screen) - should detect "looking away" func testOuterVideoGazeDetection() async throws { logLines = [] - + let projectPath = "/Users/mike/Code/Gaze/GazeTests/video-test-outer.mp4" guard FileManager.default.fileExists(atPath: projectPath) else { XCTFail("Video file not found at: \(projectPath)") return } - let stats = try await processVideo(at: URL(fileURLWithPath: projectPath), expectLookingAway: true) - + let stats = try await processVideo( + at: URL(fileURLWithPath: projectPath), expectLookingAway: true) + // For outer video, most frames should detect gaze outside center - let nonCenterRatio = Double(stats.nonCenterFrames) / Double(max(1, stats.pupilDetectedFrames)) - log("🎯 OUTER video: \(String(format: "%.1f%%", nonCenterRatio * 100)) frames detected as non-center (expected: >50%)") - log(" H-range: \(String(format: "%.3f", stats.minH)) to \(String(format: "%.3f", stats.maxH))") - log(" V-range: \(String(format: "%.3f", stats.minV)) to \(String(format: "%.3f", stats.maxV))") - log(" Face width: \(String(format: "%.3f", stats.avgFaceWidth)) (range: \(String(format: "%.3f", stats.minFaceWidth))-\(String(format: "%.3f", stats.maxFaceWidth)))") - + let nonCenterRatio = + Double(stats.nonCenterFrames) / Double(max(1, stats.pupilDetectedFrames)) + log( + "🎯 OUTER video: \(String(format: "%.1f%%", nonCenterRatio * 100)) frames detected as non-center (expected: >50%)" + ) + log( + " H-range: \(String(format: "%.3f", stats.minH)) to \(String(format: "%.3f", stats.maxH))" + ) + log( + " V-range: \(String(format: "%.3f", stats.minV)) to \(String(format: "%.3f", stats.maxV))" + ) + log( + " Face width: \(String(format: "%.3f", stats.avgFaceWidth)) (range: \(String(format: "%.3f", stats.minFaceWidth))-\(String(format: "%.3f", stats.maxFaceWidth)))" + ) + attachLogs() - + // At least 50% should be detected as non-center when looking away - XCTAssertGreaterThan(nonCenterRatio, 0.5, "Looking away video should have >50% non-center detections. Log:\n\(logLines.joined(separator: "\n"))") + XCTAssertGreaterThan( + nonCenterRatio, 0.5, + "Looking away video should have >50% non-center detections. Log:\n\(logLines.joined(separator: "\n"))" + ) } - + /// Process the inner video (looking at screen) - should detect "looking at screen" func testInnerVideoGazeDetection() async throws { logLines = [] - + let projectPath = "/Users/mike/Code/Gaze/GazeTests/video-test-inner.mp4" guard FileManager.default.fileExists(atPath: projectPath) else { XCTFail("Video file not found at: \(projectPath)") return } - let stats = try await processVideo(at: URL(fileURLWithPath: projectPath), expectLookingAway: false) - + let stats = try await processVideo( + at: URL(fileURLWithPath: projectPath), expectLookingAway: false) + // For inner video, most frames should detect gaze at center let centerRatio = Double(stats.centerFrames) / Double(max(1, stats.pupilDetectedFrames)) - log("🎯 INNER video: \(String(format: "%.1f%%", centerRatio * 100)) frames detected as center (expected: >50%)") - log(" H-range: \(String(format: "%.3f", stats.minH)) to \(String(format: "%.3f", stats.maxH))") - log(" V-range: \(String(format: "%.3f", stats.minV)) to \(String(format: "%.3f", stats.maxV))") - log(" Face width: \(String(format: "%.3f", stats.avgFaceWidth)) (range: \(String(format: "%.3f", stats.minFaceWidth))-\(String(format: "%.3f", stats.maxFaceWidth)))") - + log( + "🎯 INNER video: \(String(format: "%.1f%%", centerRatio * 100)) frames detected as center (expected: >50%)" + ) + log( + " H-range: \(String(format: "%.3f", stats.minH)) to \(String(format: "%.3f", stats.maxH))" + ) + log( + " V-range: \(String(format: "%.3f", stats.minV)) to \(String(format: "%.3f", stats.maxV))" + ) + log( + " Face width: \(String(format: "%.3f", stats.avgFaceWidth)) (range: \(String(format: "%.3f", stats.minFaceWidth))-\(String(format: "%.3f", stats.maxFaceWidth)))" + ) + attachLogs() - + // At least 50% should be detected as center when looking at screen - XCTAssertGreaterThan(centerRatio, 0.5, "Looking at screen video should have >50% center detections. Log:\n\(logLines.joined(separator: "\n"))") + XCTAssertGreaterThan( + centerRatio, 0.5, + "Looking at screen video should have >50% center detections. Log:\n\(logLines.joined(separator: "\n"))" + ) } - + struct VideoStats { var totalFrames = 0 var faceDetectedFrames = 0 @@ -87,34 +113,38 @@ final class VideoGazeTests: XCTestCase { var maxFaceWidth = -Double.greatestFiniteMagnitude var totalFaceWidth = 0.0 var faceWidthCount = 0 - + var avgFaceWidth: Double { faceWidthCount > 0 ? totalFaceWidth / Double(faceWidthCount) : 0 } } - + private func processVideo(at url: URL, expectLookingAway: Bool) async throws -> VideoStats { var stats = VideoStats() - + log("\n" + String(repeating: "=", count: 60)) log("Processing video: \(url.lastPathComponent)") - log("Expected behavior: \(expectLookingAway ? "LOOKING AWAY (non-center)" : "LOOKING AT SCREEN (center)")") + log( + "Expected behavior: \(expectLookingAway ? "LOOKING AWAY (non-center)" : "LOOKING AT SCREEN (center)")" + ) log(String(repeating: "=", count: 60)) - + let asset = AVURLAsset(url: url) let duration = try await asset.load(.duration) let durationSeconds = CMTimeGetSeconds(duration) log("Duration: \(String(format: "%.2f", durationSeconds)) seconds") - + guard let track = try await asset.loadTracks(withMediaType: .video).first else { XCTFail("No video track found") return stats } - + let size = try await track.load(.naturalSize) let frameRate = try await track.load(.nominalFrameRate) - log("Size: \(Int(size.width))x\(Int(size.height)), FPS: \(String(format: "%.1f", frameRate))") - + log( + "Size: \(Int(size.width))x\(Int(size.height)), FPS: \(String(format: "%.1f", frameRate))" + ) + let reader = try AVAssetReader(asset: asset) let outputSettings: [String: Any] = [ kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA @@ -122,82 +152,86 @@ final class VideoGazeTests: XCTestCase { let trackOutput = AVAssetReaderTrackOutput(track: track, outputSettings: outputSettings) reader.add(trackOutput) reader.startReading() - + var frameIndex = 0 - let sampleInterval = max(1, Int(frameRate / 2)) // Sample ~2 frames per second - + let sampleInterval = max(1, Int(frameRate / 2)) // Sample ~2 frames per second + log("\nFrame | Time | Face | H-Ratio L/R | V-Ratio L/R | Direction") log(String(repeating: "-", count: 75)) - + // Reset calibration for fresh test PupilDetector.calibration.reset() - + // Disable frame skipping for video testing let originalFrameSkip = PupilDetector.frameSkipCount PupilDetector.frameSkipCount = 1 defer { PupilDetector.frameSkipCount = originalFrameSkip } - + while let sampleBuffer = trackOutput.copyNextSampleBuffer() { - defer { - frameIndex += 1 + defer { + frameIndex += 1 PupilDetector.advanceFrame() } - + // Only process every Nth frame if frameIndex % sampleInterval != 0 { continue } - + stats.totalFrames += 1 - + guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else { continue } - + let timestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer) let timeSeconds = CMTimeGetSeconds(timestamp) - + // Run face detection let request = VNDetectFaceLandmarksRequest() request.revision = VNDetectFaceLandmarksRequestRevision3 - + let handler = VNImageRequestHandler( cvPixelBuffer: pixelBuffer, orientation: .leftMirrored, options: [:] ) - + try handler.perform([request]) - + guard let observations = request.results, !observations.isEmpty, - let face = observations.first, - let landmarks = face.landmarks, - let leftEye = landmarks.leftEye, - let rightEye = landmarks.rightEye else { - log(String(format: "%5d | %5.1fs | NO | - | - | -", frameIndex, timeSeconds)) + let face = observations.first, + let landmarks = face.landmarks, + let leftEye = landmarks.leftEye, + let rightEye = landmarks.rightEye + else { + log( + String( + format: "%5d | %5.1fs | NO | - | - | -", + frameIndex, timeSeconds)) continue } - + stats.faceDetectedFrames += 1 - + // Track face width (bounding box width as ratio of image width) let faceWidth = face.boundingBox.width stats.minFaceWidth = min(stats.minFaceWidth, faceWidth) stats.maxFaceWidth = max(stats.maxFaceWidth, faceWidth) stats.totalFaceWidth += faceWidth stats.faceWidthCount += 1 - + let imageSize = CGSize( width: CVPixelBufferGetWidth(pixelBuffer), height: CVPixelBufferGetHeight(pixelBuffer) ) - + // Detect pupils var leftHRatio: Double? var rightHRatio: Double? var leftVRatio: Double? var rightVRatio: Double? - + if let leftResult = PupilDetector.detectPupil( in: pixelBuffer, eyeLandmarks: leftEye, @@ -205,10 +239,12 @@ final class VideoGazeTests: XCTestCase { imageSize: imageSize, side: 0 ) { - leftHRatio = calculateHorizontalRatio(pupilPosition: leftResult.pupilPosition, eyeRegion: leftResult.eyeRegion) - leftVRatio = calculateVerticalRatio(pupilPosition: leftResult.pupilPosition, eyeRegion: leftResult.eyeRegion) + leftHRatio = calculateHorizontalRatio( + pupilPosition: leftResult.pupilPosition, eyeRegion: leftResult.eyeRegion) + leftVRatio = calculateVerticalRatio( + pupilPosition: leftResult.pupilPosition, eyeRegion: leftResult.eyeRegion) } - + if let rightResult = PupilDetector.detectPupil( in: pixelBuffer, eyeLandmarks: rightEye, @@ -216,62 +252,79 @@ final class VideoGazeTests: XCTestCase { imageSize: imageSize, side: 1 ) { - rightHRatio = calculateHorizontalRatio(pupilPosition: rightResult.pupilPosition, eyeRegion: rightResult.eyeRegion) - rightVRatio = calculateVerticalRatio(pupilPosition: rightResult.pupilPosition, eyeRegion: rightResult.eyeRegion) + rightHRatio = calculateHorizontalRatio( + pupilPosition: rightResult.pupilPosition, eyeRegion: rightResult.eyeRegion) + rightVRatio = calculateVerticalRatio( + pupilPosition: rightResult.pupilPosition, eyeRegion: rightResult.eyeRegion) } - + if let lh = leftHRatio, let rh = rightHRatio, - let lv = leftVRatio, let rv = rightVRatio { + let lv = leftVRatio, let rv = rightVRatio + { stats.pupilDetectedFrames += 1 let avgH = (lh + rh) / 2.0 let avgV = (lv + rv) / 2.0 - + // Track min/max ranges stats.minH = min(stats.minH, avgH) stats.maxH = max(stats.maxH, avgH) stats.minV = min(stats.minV, avgV) stats.maxV = max(stats.maxV, avgV) - + let direction = GazeDirection.from(horizontal: avgH, vertical: avgV) if direction == .center { stats.centerFrames += 1 } else { stats.nonCenterFrames += 1 } - log(String(format: "%5d | %5.1fs | YES | %.2f / %.2f | %.2f / %.2f | %@ %@", - frameIndex, timeSeconds, lh, rh, lv, rv, direction.rawValue, String(describing: direction))) + log( + String( + format: "%5d | %5.1fs | YES | %.2f / %.2f | %.2f / %.2f | %@ %@", + frameIndex, timeSeconds, lh, rh, lv, rv, direction.rawValue, + String(describing: direction))) } else { - log(String(format: "%5d | %5.1fs | YES | PUPIL FAIL | PUPIL FAIL | -", frameIndex, timeSeconds)) + log( + String( + format: "%5d | %5.1fs | YES | PUPIL FAIL | PUPIL FAIL | -", + frameIndex, timeSeconds)) } } - + log(String(repeating: "=", count: 75)) - log("Summary: \(stats.totalFrames) frames sampled, \(stats.faceDetectedFrames) with face, \(stats.pupilDetectedFrames) with pupils") + log( + "Summary: \(stats.totalFrames) frames sampled, \(stats.faceDetectedFrames) with face, \(stats.pupilDetectedFrames) with pupils" + ) log("Center frames: \(stats.centerFrames), Non-center: \(stats.nonCenterFrames)") - log("Face width: avg=\(String(format: "%.3f", stats.avgFaceWidth)), range=\(String(format: "%.3f", stats.minFaceWidth)) to \(String(format: "%.3f", stats.maxFaceWidth))") + log( + "Face width: avg=\(String(format: "%.3f", stats.avgFaceWidth)), range=\(String(format: "%.3f", stats.minFaceWidth)) to \(String(format: "%.3f", stats.maxFaceWidth))" + ) log("Processing complete\n") - + return stats } - - private func calculateHorizontalRatio(pupilPosition: PupilPosition, eyeRegion: EyeRegion) -> Double { + + private func calculateHorizontalRatio(pupilPosition: PupilPosition, eyeRegion: EyeRegion) + -> Double + { // pupilPosition.y controls horizontal gaze due to image orientation let pupilY = Double(pupilPosition.y) let eyeHeight = Double(eyeRegion.frame.height) - + guard eyeHeight > 0 else { return 0.5 } - + let ratio = pupilY / eyeHeight return max(0.0, min(1.0, ratio)) } - - private func calculateVerticalRatio(pupilPosition: PupilPosition, eyeRegion: EyeRegion) -> Double { + + private func calculateVerticalRatio(pupilPosition: PupilPosition, eyeRegion: EyeRegion) + -> Double + { // pupilPosition.x controls vertical gaze due to image orientation let pupilX = Double(pupilPosition.x) let eyeWidth = Double(eyeRegion.frame.width) - + guard eyeWidth > 0 else { return 0.5 } - + let ratio = pupilX / eyeWidth return max(0.0, min(1.0, ratio)) }