feat: debugging overlay
This commit is contained in:
@@ -69,6 +69,24 @@ class AppDelegate: NSObject, NSApplicationDelegate, ObservableObject {
|
|||||||
if settingsManager.settings.hasCompletedOnboarding {
|
if settingsManager.settings.hasCompletedOnboarding {
|
||||||
startTimers()
|
startTimers()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// DEBUG: Auto-start eye tracking test mode if launch argument is present
|
||||||
|
#if DEBUG
|
||||||
|
if CommandLine.arguments.contains("--debug-eye-tracking") {
|
||||||
|
NSLog("🔬 DEBUG: Auto-starting eye tracking test mode")
|
||||||
|
Task { @MainActor in
|
||||||
|
// Enable enforce mode if not already
|
||||||
|
if !settingsManager.settings.enforcementMode {
|
||||||
|
settingsManager.settings.enforcementMode = true
|
||||||
|
}
|
||||||
|
// Start test mode after a brief delay
|
||||||
|
try? await Task.sleep(nanoseconds: 1_000_000_000) // 1 second
|
||||||
|
NSLog("🔬 DEBUG: Starting test mode now...")
|
||||||
|
await EnforceModeService.shared.startTestMode()
|
||||||
|
NSLog("🔬 DEBUG: Test mode started")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
// Note: Smart mode setup is now handled by ServiceContainer
|
// Note: Smart mode setup is now handled by ServiceContainer
|
||||||
|
|||||||
@@ -22,6 +22,8 @@ class EyeTrackingService: NSObject, ObservableObject {
|
|||||||
// Debug properties for UI display
|
// Debug properties for UI display
|
||||||
@Published var debugLeftPupilRatio: Double?
|
@Published var debugLeftPupilRatio: Double?
|
||||||
@Published var debugRightPupilRatio: Double?
|
@Published var debugRightPupilRatio: Double?
|
||||||
|
@Published var debugLeftVerticalRatio: Double?
|
||||||
|
@Published var debugRightVerticalRatio: Double?
|
||||||
@Published var debugYaw: Double?
|
@Published var debugYaw: Double?
|
||||||
@Published var debugPitch: Double?
|
@Published var debugPitch: Double?
|
||||||
@Published var enableDebugLogging: Bool = false {
|
@Published var enableDebugLogging: Bool = false {
|
||||||
@@ -31,6 +33,25 @@ class EyeTrackingService: NSObject, ObservableObject {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Computed gaze direction for UI overlay
|
||||||
|
var gazeDirection: GazeDirection {
|
||||||
|
guard let leftH = debugLeftPupilRatio,
|
||||||
|
let rightH = debugRightPupilRatio,
|
||||||
|
let leftV = debugLeftVerticalRatio,
|
||||||
|
let rightV = debugRightVerticalRatio else {
|
||||||
|
return .center
|
||||||
|
}
|
||||||
|
|
||||||
|
let avgHorizontal = (leftH + rightH) / 2.0
|
||||||
|
let avgVertical = (leftV + rightV) / 2.0
|
||||||
|
|
||||||
|
return GazeDirection.from(horizontal: avgHorizontal, vertical: avgVertical)
|
||||||
|
}
|
||||||
|
|
||||||
|
var isInFrame: Bool {
|
||||||
|
faceDetected
|
||||||
|
}
|
||||||
|
|
||||||
// Throttle for debug logging
|
// Throttle for debug logging
|
||||||
private var lastDebugLogTime: Date = .distantPast
|
private var lastDebugLogTime: Date = .distantPast
|
||||||
|
|
||||||
@@ -71,6 +92,8 @@ class EyeTrackingService: NSObject, ObservableObject {
|
|||||||
var userLookingAtScreen: Bool = true
|
var userLookingAtScreen: Bool = true
|
||||||
var debugLeftPupilRatio: Double?
|
var debugLeftPupilRatio: Double?
|
||||||
var debugRightPupilRatio: Double?
|
var debugRightPupilRatio: Double?
|
||||||
|
var debugLeftVerticalRatio: Double?
|
||||||
|
var debugRightVerticalRatio: Double?
|
||||||
var debugYaw: Double?
|
var debugYaw: Double?
|
||||||
var debugPitch: Double?
|
var debugPitch: Double?
|
||||||
|
|
||||||
@@ -80,6 +103,8 @@ class EyeTrackingService: NSObject, ObservableObject {
|
|||||||
userLookingAtScreen: Bool = true,
|
userLookingAtScreen: Bool = true,
|
||||||
debugLeftPupilRatio: Double? = nil,
|
debugLeftPupilRatio: Double? = nil,
|
||||||
debugRightPupilRatio: Double? = nil,
|
debugRightPupilRatio: Double? = nil,
|
||||||
|
debugLeftVerticalRatio: Double? = nil,
|
||||||
|
debugRightVerticalRatio: Double? = nil,
|
||||||
debugYaw: Double? = nil,
|
debugYaw: Double? = nil,
|
||||||
debugPitch: Double? = nil
|
debugPitch: Double? = nil
|
||||||
) {
|
) {
|
||||||
@@ -88,6 +113,8 @@ class EyeTrackingService: NSObject, ObservableObject {
|
|||||||
self.userLookingAtScreen = userLookingAtScreen
|
self.userLookingAtScreen = userLookingAtScreen
|
||||||
self.debugLeftPupilRatio = debugLeftPupilRatio
|
self.debugLeftPupilRatio = debugLeftPupilRatio
|
||||||
self.debugRightPupilRatio = debugRightPupilRatio
|
self.debugRightPupilRatio = debugRightPupilRatio
|
||||||
|
self.debugLeftVerticalRatio = debugLeftVerticalRatio
|
||||||
|
self.debugRightVerticalRatio = debugRightVerticalRatio
|
||||||
self.debugYaw = debugYaw
|
self.debugYaw = debugYaw
|
||||||
self.debugPitch = debugPitch
|
self.debugPitch = debugPitch
|
||||||
}
|
}
|
||||||
@@ -260,6 +287,8 @@ class EyeTrackingService: NSObject, ObservableObject {
|
|||||||
result.userLookingAtScreen = !gazeResult.lookingAway
|
result.userLookingAtScreen = !gazeResult.lookingAway
|
||||||
result.debugLeftPupilRatio = gazeResult.leftPupilRatio
|
result.debugLeftPupilRatio = gazeResult.leftPupilRatio
|
||||||
result.debugRightPupilRatio = gazeResult.rightPupilRatio
|
result.debugRightPupilRatio = gazeResult.rightPupilRatio
|
||||||
|
result.debugLeftVerticalRatio = gazeResult.leftVerticalRatio
|
||||||
|
result.debugRightVerticalRatio = gazeResult.rightVerticalRatio
|
||||||
result.debugYaw = gazeResult.yaw
|
result.debugYaw = gazeResult.yaw
|
||||||
result.debugPitch = gazeResult.pitch
|
result.debugPitch = gazeResult.pitch
|
||||||
|
|
||||||
@@ -302,6 +331,8 @@ class EyeTrackingService: NSObject, ObservableObject {
|
|||||||
var lookingAway: Bool = false
|
var lookingAway: Bool = false
|
||||||
var leftPupilRatio: Double?
|
var leftPupilRatio: Double?
|
||||||
var rightPupilRatio: Double?
|
var rightPupilRatio: Double?
|
||||||
|
var leftVerticalRatio: Double?
|
||||||
|
var rightVerticalRatio: Double?
|
||||||
var yaw: Double?
|
var yaw: Double?
|
||||||
var pitch: Double?
|
var pitch: Double?
|
||||||
|
|
||||||
@@ -309,12 +340,16 @@ class EyeTrackingService: NSObject, ObservableObject {
|
|||||||
lookingAway: Bool = false,
|
lookingAway: Bool = false,
|
||||||
leftPupilRatio: Double? = nil,
|
leftPupilRatio: Double? = nil,
|
||||||
rightPupilRatio: Double? = nil,
|
rightPupilRatio: Double? = nil,
|
||||||
|
leftVerticalRatio: Double? = nil,
|
||||||
|
rightVerticalRatio: Double? = nil,
|
||||||
yaw: Double? = nil,
|
yaw: Double? = nil,
|
||||||
pitch: Double? = nil
|
pitch: Double? = nil
|
||||||
) {
|
) {
|
||||||
self.lookingAway = lookingAway
|
self.lookingAway = lookingAway
|
||||||
self.leftPupilRatio = leftPupilRatio
|
self.leftPupilRatio = leftPupilRatio
|
||||||
self.rightPupilRatio = rightPupilRatio
|
self.rightPupilRatio = rightPupilRatio
|
||||||
|
self.leftVerticalRatio = leftVerticalRatio
|
||||||
|
self.rightVerticalRatio = rightVerticalRatio
|
||||||
self.yaw = yaw
|
self.yaw = yaw
|
||||||
self.pitch = pitch
|
self.pitch = pitch
|
||||||
}
|
}
|
||||||
@@ -371,6 +406,8 @@ class EyeTrackingService: NSObject, ObservableObject {
|
|||||||
{
|
{
|
||||||
var leftGazeRatio: Double? = nil
|
var leftGazeRatio: Double? = nil
|
||||||
var rightGazeRatio: Double? = nil
|
var rightGazeRatio: Double? = nil
|
||||||
|
var leftVerticalRatio: Double? = nil
|
||||||
|
var rightVerticalRatio: Double? = nil
|
||||||
|
|
||||||
// Detect left pupil (side = 0)
|
// Detect left pupil (side = 0)
|
||||||
if let leftResult = PupilDetector.detectPupil(
|
if let leftResult = PupilDetector.detectPupil(
|
||||||
@@ -384,6 +421,10 @@ class EyeTrackingService: NSObject, ObservableObject {
|
|||||||
pupilPosition: leftResult.pupilPosition,
|
pupilPosition: leftResult.pupilPosition,
|
||||||
eyeRegion: leftResult.eyeRegion
|
eyeRegion: leftResult.eyeRegion
|
||||||
)
|
)
|
||||||
|
leftVerticalRatio = calculateVerticalRatioSync(
|
||||||
|
pupilPosition: leftResult.pupilPosition,
|
||||||
|
eyeRegion: leftResult.eyeRegion
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Detect right pupil (side = 1)
|
// Detect right pupil (side = 1)
|
||||||
@@ -398,10 +439,16 @@ class EyeTrackingService: NSObject, ObservableObject {
|
|||||||
pupilPosition: rightResult.pupilPosition,
|
pupilPosition: rightResult.pupilPosition,
|
||||||
eyeRegion: rightResult.eyeRegion
|
eyeRegion: rightResult.eyeRegion
|
||||||
)
|
)
|
||||||
|
rightVerticalRatio = calculateVerticalRatioSync(
|
||||||
|
pupilPosition: rightResult.pupilPosition,
|
||||||
|
eyeRegion: rightResult.eyeRegion
|
||||||
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
result.leftPupilRatio = leftGazeRatio
|
result.leftPupilRatio = leftGazeRatio
|
||||||
result.rightPupilRatio = rightGazeRatio
|
result.rightPupilRatio = rightGazeRatio
|
||||||
|
result.leftVerticalRatio = leftVerticalRatio
|
||||||
|
result.rightVerticalRatio = rightVerticalRatio
|
||||||
|
|
||||||
// Connect to CalibrationManager on main thread
|
// Connect to CalibrationManager on main thread
|
||||||
if let leftRatio = leftGazeRatio,
|
if let leftRatio = leftGazeRatio,
|
||||||
@@ -448,6 +495,23 @@ class EyeTrackingService: NSObject, ObservableObject {
|
|||||||
return max(0.0, min(1.0, ratio))
|
return max(0.0, min(1.0, ratio))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Non-isolated vertical gaze ratio calculation
|
||||||
|
/// Returns 0.0 for looking up, 1.0 for looking down, 0.5 for center
|
||||||
|
nonisolated private func calculateVerticalRatioSync(
|
||||||
|
pupilPosition: PupilPosition, eyeRegion: EyeRegion
|
||||||
|
) -> Double {
|
||||||
|
let pupilY = Double(pupilPosition.y)
|
||||||
|
let eyeTop = Double(eyeRegion.frame.minY)
|
||||||
|
let eyeBottom = Double(eyeRegion.frame.maxY)
|
||||||
|
let eyeHeight = eyeBottom - eyeTop
|
||||||
|
|
||||||
|
guard eyeHeight > 0 else { return 0.5 }
|
||||||
|
|
||||||
|
// Normalize: 0.0 = top of eye region, 1.0 = bottom
|
||||||
|
let ratio = (pupilY - eyeTop) / eyeHeight
|
||||||
|
return max(0.0, min(1.0, ratio))
|
||||||
|
}
|
||||||
|
|
||||||
private func detectEyesClosed(
|
private func detectEyesClosed(
|
||||||
leftEye: VNFaceLandmarkRegion2D, rightEye: VNFaceLandmarkRegion2D, shouldLog: Bool
|
leftEye: VNFaceLandmarkRegion2D, rightEye: VNFaceLandmarkRegion2D, shouldLog: Bool
|
||||||
) -> Bool {
|
) -> Bool {
|
||||||
@@ -660,11 +724,22 @@ class EyeTrackingService: NSObject, ObservableObject {
|
|||||||
}
|
}
|
||||||
|
|
||||||
extension EyeTrackingService: AVCaptureVideoDataOutputSampleBufferDelegate {
|
extension EyeTrackingService: AVCaptureVideoDataOutputSampleBufferDelegate {
|
||||||
|
// DEBUG: Frame counter for periodic logging (nonisolated for video callback)
|
||||||
|
private nonisolated(unsafe) static var debugFrameCount = 0
|
||||||
|
|
||||||
nonisolated func captureOutput(
|
nonisolated func captureOutput(
|
||||||
_ output: AVCaptureOutput,
|
_ output: AVCaptureOutput,
|
||||||
didOutput sampleBuffer: CMSampleBuffer,
|
didOutput sampleBuffer: CMSampleBuffer,
|
||||||
from connection: AVCaptureConnection
|
from connection: AVCaptureConnection
|
||||||
) {
|
) {
|
||||||
|
// DEBUG: Print every 30 frames to show we're receiving video
|
||||||
|
#if DEBUG
|
||||||
|
EyeTrackingService.debugFrameCount += 1
|
||||||
|
if EyeTrackingService.debugFrameCount % 30 == 0 {
|
||||||
|
NSLog("🎥 EyeTrackingService: Received frame %d", EyeTrackingService.debugFrameCount)
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
|
guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
@@ -701,6 +776,8 @@ extension EyeTrackingService: AVCaptureVideoDataOutputSampleBufferDelegate {
|
|||||||
self.userLookingAtScreen = result.userLookingAtScreen
|
self.userLookingAtScreen = result.userLookingAtScreen
|
||||||
self.debugLeftPupilRatio = result.debugLeftPupilRatio
|
self.debugLeftPupilRatio = result.debugLeftPupilRatio
|
||||||
self.debugRightPupilRatio = result.debugRightPupilRatio
|
self.debugRightPupilRatio = result.debugRightPupilRatio
|
||||||
|
self.debugLeftVerticalRatio = result.debugLeftVerticalRatio
|
||||||
|
self.debugRightVerticalRatio = result.debugRightVerticalRatio
|
||||||
self.debugYaw = result.debugYaw
|
self.debugYaw = result.debugYaw
|
||||||
self.debugPitch = result.debugPitch
|
self.debugPitch = result.debugPitch
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -58,28 +58,28 @@ final class LoggingManager {
|
|||||||
func debug(_ message: String, category: String = "General") {
|
func debug(_ message: String, category: String = "General") {
|
||||||
guard isLoggingEnabled else { return }
|
guard isLoggingEnabled else { return }
|
||||||
let logger = Logger(subsystem: subsystem, category: category)
|
let logger = Logger(subsystem: subsystem, category: category)
|
||||||
logger.debug("\(message)")
|
logger.debug("\(message, privacy: .public)")
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Convenience method for info logging
|
/// Convenience method for info logging
|
||||||
func info(_ message: String, category: String = "General") {
|
func info(_ message: String, category: String = "General") {
|
||||||
guard isLoggingEnabled else { return }
|
guard isLoggingEnabled else { return }
|
||||||
let logger = Logger(subsystem: subsystem, category: category)
|
let logger = Logger(subsystem: subsystem, category: category)
|
||||||
logger.info("\(message)")
|
logger.info("\(message, privacy: .public)")
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Convenience method for error logging
|
/// Convenience method for error logging
|
||||||
func error(_ message: String, category: String = "General") {
|
func error(_ message: String, category: String = "General") {
|
||||||
guard isLoggingEnabled else { return }
|
guard isLoggingEnabled else { return }
|
||||||
let logger = Logger(subsystem: subsystem, category: category)
|
let logger = Logger(subsystem: subsystem, category: category)
|
||||||
logger.error("\(message)")
|
logger.error("\(message, privacy: .public)")
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Convenience method for warning logging
|
/// Convenience method for warning logging
|
||||||
func warning(_ message: String, category: String = "General") {
|
func warning(_ message: String, category: String = "General") {
|
||||||
guard isLoggingEnabled else { return }
|
guard isLoggingEnabled else { return }
|
||||||
let logger = Logger(subsystem: subsystem, category: category)
|
let logger = Logger(subsystem: subsystem, category: category)
|
||||||
logger.warning("\(message)")
|
logger.warning("\(message, privacy: .public)")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -31,6 +31,63 @@ struct EyeRegion: Sendable {
|
|||||||
let origin: CGPoint
|
let origin: CGPoint
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// 9-point gaze direction grid
|
||||||
|
enum GazeDirection: String, Sendable, CaseIterable {
|
||||||
|
case upLeft = "↖"
|
||||||
|
case up = "↑"
|
||||||
|
case upRight = "↗"
|
||||||
|
case left = "←"
|
||||||
|
case center = "●"
|
||||||
|
case right = "→"
|
||||||
|
case downLeft = "↙"
|
||||||
|
case down = "↓"
|
||||||
|
case downRight = "↘"
|
||||||
|
|
||||||
|
/// Thresholds for direction detection
|
||||||
|
/// Horizontal: 0.0 = looking right (from camera POV), 1.0 = looking left
|
||||||
|
/// Vertical: 0.0 = looking up, 1.0 = looking down
|
||||||
|
private static let horizontalLeftThreshold = 0.55 // Above this = looking left
|
||||||
|
private static let horizontalRightThreshold = 0.45 // Below this = looking right
|
||||||
|
private static let verticalUpThreshold = 0.40 // Below this = looking up
|
||||||
|
private static let verticalDownThreshold = 0.60 // Above this = looking down
|
||||||
|
|
||||||
|
static func from(horizontal: Double, vertical: Double) -> GazeDirection {
|
||||||
|
let isLeft = horizontal > horizontalLeftThreshold
|
||||||
|
let isRight = horizontal < horizontalRightThreshold
|
||||||
|
let isUp = vertical < verticalUpThreshold
|
||||||
|
let isDown = vertical > verticalDownThreshold
|
||||||
|
|
||||||
|
if isUp {
|
||||||
|
if isLeft { return .upLeft }
|
||||||
|
if isRight { return .upRight }
|
||||||
|
return .up
|
||||||
|
} else if isDown {
|
||||||
|
if isLeft { return .downLeft }
|
||||||
|
if isRight { return .downRight }
|
||||||
|
return .down
|
||||||
|
} else {
|
||||||
|
if isLeft { return .left }
|
||||||
|
if isRight { return .right }
|
||||||
|
return .center
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Grid position (0-2 for x and y)
|
||||||
|
var gridPosition: (x: Int, y: Int) {
|
||||||
|
switch self {
|
||||||
|
case .upLeft: return (0, 0)
|
||||||
|
case .up: return (1, 0)
|
||||||
|
case .upRight: return (2, 0)
|
||||||
|
case .left: return (0, 1)
|
||||||
|
case .center: return (1, 1)
|
||||||
|
case .right: return (2, 1)
|
||||||
|
case .downLeft: return (0, 2)
|
||||||
|
case .down: return (1, 2)
|
||||||
|
case .downRight: return (2, 2)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Calibration state for adaptive thresholding (matches Python Calibration class)
|
/// Calibration state for adaptive thresholding (matches Python Calibration class)
|
||||||
final class PupilCalibration: @unchecked Sendable {
|
final class PupilCalibration: @unchecked Sendable {
|
||||||
private let lock = NSLock()
|
private let lock = NSLock()
|
||||||
@@ -48,7 +105,8 @@ final class PupilCalibration: @unchecked Sendable {
|
|||||||
lock.lock()
|
lock.lock()
|
||||||
defer { lock.unlock() }
|
defer { lock.unlock() }
|
||||||
let thresholds = side == 0 ? thresholdsLeft : thresholdsRight
|
let thresholds = side == 0 ? thresholdsLeft : thresholdsRight
|
||||||
guard !thresholds.isEmpty else { return 50 }
|
// DEBUG: Use higher default threshold (was 50)
|
||||||
|
guard !thresholds.isEmpty else { return 90 }
|
||||||
return thresholds.reduce(0, +) / thresholds.count
|
return thresholds.reduce(0, +) / thresholds.count
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -147,9 +205,16 @@ final class PupilDetector: @unchecked Sendable {
|
|||||||
|
|
||||||
// MARK: - Configuration
|
// MARK: - Configuration
|
||||||
|
|
||||||
nonisolated(unsafe) static var enableDebugImageSaving = false
|
nonisolated(unsafe) static var enableDebugImageSaving: Bool = false // Disabled - causes sandbox errors
|
||||||
nonisolated(unsafe) static var enablePerformanceLogging = false
|
nonisolated(unsafe) static var enablePerformanceLogging = false
|
||||||
nonisolated(unsafe) static var enableDiagnosticLogging = false
|
nonisolated(unsafe) static var enableDiagnosticLogging = false // Disabled - pupil detection now working
|
||||||
|
nonisolated(unsafe) static var enableDebugLogging: Bool {
|
||||||
|
#if DEBUG
|
||||||
|
return true
|
||||||
|
#else
|
||||||
|
return false
|
||||||
|
#endif
|
||||||
|
}
|
||||||
nonisolated(unsafe) static var frameSkipCount = 10 // Process every Nth frame
|
nonisolated(unsafe) static var frameSkipCount = 10 // Process every Nth frame
|
||||||
|
|
||||||
// MARK: - State (protected by lock)
|
// MARK: - State (protected by lock)
|
||||||
@@ -240,7 +305,6 @@ final class PupilDetector: @unchecked Sendable {
|
|||||||
side: Int = 0,
|
side: Int = 0,
|
||||||
threshold: Int? = nil
|
threshold: Int? = nil
|
||||||
) -> (pupilPosition: PupilPosition, eyeRegion: EyeRegion)? {
|
) -> (pupilPosition: PupilPosition, eyeRegion: EyeRegion)? {
|
||||||
|
|
||||||
// Frame skipping - return cached result
|
// Frame skipping - return cached result
|
||||||
if frameCounter % frameSkipCount != 0 {
|
if frameCounter % frameSkipCount != 0 {
|
||||||
let cachedPosition = side == 0 ? lastPupilPositions.left : lastPupilPositions.right
|
let cachedPosition = side == 0 ? lastPupilPositions.left : lastPupilPositions.right
|
||||||
@@ -363,6 +427,38 @@ final class PupilDetector: @unchecked Sendable {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Step 7: Process image (bilateral filter + erosion + threshold)
|
// Step 7: Process image (bilateral filter + erosion + threshold)
|
||||||
|
if enableDiagnosticLogging {
|
||||||
|
logDebug(
|
||||||
|
"👁 PupilDetector: Using threshold=\(effectiveThreshold) for \(eyeWidth)x\(eyeHeight) eye region"
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Debug: Save input eye image before processing
|
||||||
|
if enableDebugImageSaving && debugImageCounter < 20 {
|
||||||
|
NSLog("📸 Saving eye_input_%d - %dx%d, side=%d, region=(%.0f,%.0f,%.0f,%.0f)",
|
||||||
|
debugImageCounter, eyeWidth, eyeHeight, side,
|
||||||
|
eyeRegion.frame.origin.x, eyeRegion.frame.origin.y,
|
||||||
|
eyeRegion.frame.width, eyeRegion.frame.height)
|
||||||
|
|
||||||
|
// Debug: Print pixel value statistics for input
|
||||||
|
var minVal: UInt8 = 255, maxVal: UInt8 = 0
|
||||||
|
var sum: Int = 0
|
||||||
|
var darkCount = 0 // pixels <= 90
|
||||||
|
for i in 0..<(eyeWidth * eyeHeight) {
|
||||||
|
let v = eyeBuf[i]
|
||||||
|
if v < minVal { minVal = v }
|
||||||
|
if v > maxVal { maxVal = v }
|
||||||
|
sum += Int(v)
|
||||||
|
if v <= 90 { darkCount += 1 }
|
||||||
|
}
|
||||||
|
let avgVal = Double(sum) / Double(eyeWidth * eyeHeight)
|
||||||
|
NSLog("📊 Eye input stats: min=%d, max=%d, avg=%.1f, darkPixels(<=90)=%d", minVal, maxVal, avgVal, darkCount)
|
||||||
|
|
||||||
|
saveDebugImage(
|
||||||
|
data: eyeBuf, width: eyeWidth, height: eyeHeight,
|
||||||
|
name: "eye_input_\(debugImageCounter)")
|
||||||
|
}
|
||||||
|
|
||||||
imageProcessingOptimized(
|
imageProcessingOptimized(
|
||||||
input: eyeBuf,
|
input: eyeBuf,
|
||||||
output: tmpBuf,
|
output: tmpBuf,
|
||||||
@@ -373,6 +469,15 @@ final class PupilDetector: @unchecked Sendable {
|
|||||||
|
|
||||||
// Debug: Save processed images if enabled
|
// Debug: Save processed images if enabled
|
||||||
if enableDebugImageSaving && debugImageCounter < 10 {
|
if enableDebugImageSaving && debugImageCounter < 10 {
|
||||||
|
// Debug: Print pixel value statistics for output
|
||||||
|
var darkCount = 0 // pixels == 0 (black)
|
||||||
|
var whiteCount = 0 // pixels == 255 (white)
|
||||||
|
for i in 0..<(eyeWidth * eyeHeight) {
|
||||||
|
if tmpBuf[i] == 0 { darkCount += 1 }
|
||||||
|
else if tmpBuf[i] == 255 { whiteCount += 1 }
|
||||||
|
}
|
||||||
|
NSLog("📊 Processed output stats: darkPixels=%d, whitePixels=%d", darkCount, whiteCount)
|
||||||
|
|
||||||
saveDebugImage(
|
saveDebugImage(
|
||||||
data: tmpBuf, width: eyeWidth, height: eyeHeight,
|
data: tmpBuf, width: eyeWidth, height: eyeHeight,
|
||||||
name: "processed_eye_\(debugImageCounter)")
|
name: "processed_eye_\(debugImageCounter)")
|
||||||
@@ -388,17 +493,13 @@ final class PupilDetector: @unchecked Sendable {
|
|||||||
)
|
)
|
||||||
else {
|
else {
|
||||||
if enableDiagnosticLogging {
|
if enableDiagnosticLogging {
|
||||||
logDebug(
|
logDebug("👁 PupilDetector: Failed - findPupilFromContours returned nil (not enough dark pixels) for side \(side)")
|
||||||
"👁 PupilDetector: Failed - findPupilFromContours returned nil (not enough dark pixels)"
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
if enableDiagnosticLogging {
|
if enableDiagnosticLogging {
|
||||||
logDebug(
|
logDebug("👁 PupilDetector: Success side=\(side) - centroid at (\(String(format: "%.1f", centroidX)), \(String(format: "%.1f", centroidY))) in \(eyeWidth)x\(eyeHeight) region")
|
||||||
"👁 PupilDetector: Success - centroid at (\(String(format: "%.1f", centroidX)), \(String(format: "%.1f", centroidY))) in \(eyeWidth)x\(eyeHeight) region"
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let pupilPosition = PupilPosition(x: CGFloat(centroidX), y: CGFloat(centroidY))
|
let pupilPosition = PupilPosition(x: CGFloat(centroidX), y: CGFloat(centroidY))
|
||||||
@@ -539,7 +640,7 @@ final class PupilDetector: @unchecked Sendable {
|
|||||||
|
|
||||||
guard eyeWidth > 0, eyeHeight > 0 else { return false }
|
guard eyeWidth > 0, eyeHeight > 0 else { return false }
|
||||||
|
|
||||||
// Initialize to white (masked out)
|
// Initialize to WHITE (255) - masked pixels should be bright so they don't affect pupil detection
|
||||||
memset(output, 255, eyeWidth * eyeHeight)
|
memset(output, 255, eyeWidth * eyeHeight)
|
||||||
|
|
||||||
// Convert eye points to local coordinates
|
// Convert eye points to local coordinates
|
||||||
@@ -600,20 +701,10 @@ final class PupilDetector: @unchecked Sendable {
|
|||||||
let size = width * height
|
let size = width * height
|
||||||
guard size > 0 else { return }
|
guard size > 0 else { return }
|
||||||
|
|
||||||
// Use a working buffer for intermediate results
|
// SIMPLIFIED: Skip blur to avoid contaminating dark pupil pixels with bright mask pixels
|
||||||
let workBuffer = UnsafeMutablePointer<UInt8>.allocate(capacity: size)
|
// Apply binary threshold directly to input
|
||||||
defer { workBuffer.deallocate() }
|
|
||||||
|
|
||||||
// 1. Fast Gaussian blur using vImage (replaces expensive bilateral filter)
|
|
||||||
gaussianBlurOptimized(input: input, output: workBuffer, width: width, height: height)
|
|
||||||
|
|
||||||
// 2. Erosion with vImage (3 iterations)
|
|
||||||
erodeOptimized(
|
|
||||||
input: workBuffer, output: output, width: width, height: height, iterations: 3)
|
|
||||||
|
|
||||||
// 3. Simple binary threshold (no vDSP overhead for small buffers)
|
|
||||||
for i in 0..<size {
|
for i in 0..<size {
|
||||||
output[i] = output[i] > UInt8(threshold) ? 255 : 0
|
output[i] = input[i] > UInt8(threshold) ? 255 : 0
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -722,29 +813,22 @@ final class PupilDetector: @unchecked Sendable {
|
|||||||
height: Int
|
height: Int
|
||||||
) -> (x: Double, y: Double)? {
|
) -> (x: Double, y: Double)? {
|
||||||
|
|
||||||
// Optimized approach: find centroid of all black pixels with early exit
|
// Optimized approach: find centroid of all black pixels
|
||||||
// This works well for pupil detection since the pupil is the main dark blob
|
// This works well for pupil detection since the pupil is the main dark blob
|
||||||
|
|
||||||
// Use a more efficient approach that doesn't iterate through entire image
|
|
||||||
var sumX: Int = 0
|
var sumX: Int = 0
|
||||||
var sumY: Int = 0
|
var sumY: Int = 0
|
||||||
var count: Int = 0
|
var count: Int = 0
|
||||||
|
|
||||||
// Early exit if we already know this isn't going to be useful
|
// After binary thresholding, pixels are 0 (black/pupil) or 255 (white/background)
|
||||||
let threshold = UInt8(5) // Only consider pixels that are quite dark
|
// Use threshold of 128 to catch any pixels that are closer to black
|
||||||
|
let threshold = UInt8(128)
|
||||||
|
|
||||||
// Process in chunks for better cache performance
|
// Process entire image to get accurate centroid
|
||||||
let chunkSize = 16
|
for y in 0..<height {
|
||||||
var rowsProcessed = 0
|
|
||||||
|
|
||||||
while rowsProcessed < height {
|
|
||||||
let endRow = min(rowsProcessed + chunkSize, height)
|
|
||||||
|
|
||||||
for y in rowsProcessed..<endRow {
|
|
||||||
let rowOffset = y * width
|
let rowOffset = y * width
|
||||||
for x in 0..<width {
|
for x in 0..<width {
|
||||||
// Only process dark pixels that are likely to be pupil
|
if data[rowOffset + x] < threshold {
|
||||||
if data[rowOffset + x] <= threshold {
|
|
||||||
sumX += x
|
sumX += x
|
||||||
sumY += y
|
sumY += y
|
||||||
count += 1
|
count += 1
|
||||||
@@ -752,15 +836,12 @@ final class PupilDetector: @unchecked Sendable {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
rowsProcessed = endRow
|
if enableDiagnosticLogging && count < 5 {
|
||||||
|
logDebug("👁 PupilDetector: Dark pixel count = \(count) (need >= 5)")
|
||||||
// Early exit if we've found enough pixels for a reasonable estimate
|
|
||||||
if count > 25 { // Early termination condition
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
guard count > 10 else { return nil } // Need minimum pixels for valid pupil
|
// Minimum 5 pixels for valid pupil (reduced from 10 for small eye regions)
|
||||||
|
guard count >= 5 else { return nil }
|
||||||
|
|
||||||
return (
|
return (
|
||||||
x: Double(sumX) / Double(count),
|
x: Double(sumX) / Double(count),
|
||||||
@@ -775,11 +856,14 @@ final class PupilDetector: @unchecked Sendable {
|
|||||||
faceBoundingBox: CGRect,
|
faceBoundingBox: CGRect,
|
||||||
imageSize: CGSize
|
imageSize: CGSize
|
||||||
) -> [CGPoint] {
|
) -> [CGPoint] {
|
||||||
|
// Vision uses bottom-left origin (normalized 0-1), CVPixelBuffer uses top-left
|
||||||
|
// We need to flip Y: flippedY = 1.0 - y
|
||||||
return landmarks.normalizedPoints.map { point in
|
return landmarks.normalizedPoints.map { point in
|
||||||
let imageX =
|
let imageX =
|
||||||
(faceBoundingBox.origin.x + point.x * faceBoundingBox.width) * imageSize.width
|
(faceBoundingBox.origin.x + point.x * faceBoundingBox.width) * imageSize.width
|
||||||
let imageY =
|
// Flip Y coordinate for pixel buffer coordinate system
|
||||||
(faceBoundingBox.origin.y + point.y * faceBoundingBox.height) * imageSize.height
|
let flippedY = 1.0 - (faceBoundingBox.origin.y + point.y * faceBoundingBox.height)
|
||||||
|
let imageY = flippedY * imageSize.height
|
||||||
return CGPoint(x: imageX, y: imageY)
|
return CGPoint(x: imageX, y: imageY)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -830,17 +914,27 @@ final class PupilDetector: @unchecked Sendable {
|
|||||||
private nonisolated static func saveDebugImage(
|
private nonisolated static func saveDebugImage(
|
||||||
data: UnsafePointer<UInt8>, width: Int, height: Int, name: String
|
data: UnsafePointer<UInt8>, width: Int, height: Int, name: String
|
||||||
) {
|
) {
|
||||||
guard let cgImage = createCGImage(from: data, width: width, height: height) else { return }
|
guard let cgImage = createCGImage(from: data, width: width, height: height) else {
|
||||||
|
NSLog("⚠️ PupilDetector: createCGImage failed for %@ (%dx%d)", name, width, height)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
let url = URL(fileURLWithPath: "/tmp/\(name).png")
|
let url = URL(fileURLWithPath: "/tmp/gaze_debug/\(name).png")
|
||||||
guard
|
guard
|
||||||
let destination = CGImageDestinationCreateWithURL(
|
let destination = CGImageDestinationCreateWithURL(
|
||||||
url as CFURL, UTType.png.identifier as CFString, 1, nil)
|
url as CFURL, UTType.png.identifier as CFString, 1, nil)
|
||||||
else { return }
|
else {
|
||||||
|
NSLog("⚠️ PupilDetector: CGImageDestinationCreateWithURL failed for %@", url.path)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
CGImageDestinationAddImage(destination, cgImage, nil)
|
CGImageDestinationAddImage(destination, cgImage, nil)
|
||||||
CGImageDestinationFinalize(destination)
|
let success = CGImageDestinationFinalize(destination)
|
||||||
logDebug("💾 Saved debug image: \(url.path)")
|
if success {
|
||||||
|
NSLog("💾 Saved debug image: %@", url.path)
|
||||||
|
} else {
|
||||||
|
NSLog("⚠️ PupilDetector: CGImageDestinationFinalize failed for %@", url.path)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private nonisolated static func createCGImage(
|
private nonisolated static func createCGImage(
|
||||||
@@ -848,24 +942,39 @@ final class PupilDetector: @unchecked Sendable {
|
|||||||
)
|
)
|
||||||
-> CGImage?
|
-> CGImage?
|
||||||
{
|
{
|
||||||
let mutableData = UnsafeMutablePointer<UInt8>.allocate(capacity: width * height)
|
guard width > 0 && height > 0 else {
|
||||||
defer { mutableData.deallocate() }
|
print("⚠️ PupilDetector: Invalid dimensions \(width)x\(height)")
|
||||||
memcpy(mutableData, data, width * height)
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
guard
|
// Create a Data object that copies the pixel data
|
||||||
let context = CGContext(
|
let pixelData = Data(bytes: data, count: width * height)
|
||||||
data: mutableData,
|
|
||||||
|
// Create CGImage from the data using CGDataProvider
|
||||||
|
guard let provider = CGDataProvider(data: pixelData as CFData) else {
|
||||||
|
print("⚠️ PupilDetector: CGDataProvider creation failed")
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
let cgImage = CGImage(
|
||||||
width: width,
|
width: width,
|
||||||
height: height,
|
height: height,
|
||||||
bitsPerComponent: 8,
|
bitsPerComponent: 8,
|
||||||
|
bitsPerPixel: 8,
|
||||||
bytesPerRow: width,
|
bytesPerRow: width,
|
||||||
space: CGColorSpaceCreateDeviceGray(),
|
space: CGColorSpaceCreateDeviceGray(),
|
||||||
bitmapInfo: CGImageAlphaInfo.none.rawValue
|
bitmapInfo: CGBitmapInfo(rawValue: CGImageAlphaInfo.none.rawValue),
|
||||||
|
provider: provider,
|
||||||
|
decode: nil,
|
||||||
|
shouldInterpolate: false,
|
||||||
|
intent: .defaultIntent
|
||||||
)
|
)
|
||||||
else {
|
|
||||||
return nil
|
if cgImage == nil {
|
||||||
|
print("⚠️ PupilDetector: CGImage creation failed")
|
||||||
}
|
}
|
||||||
return context.makeImage()
|
|
||||||
|
return cgImage
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Clean up allocated buffers (call on app termination if needed)
|
/// Clean up allocated buffers (call on app termination if needed)
|
||||||
|
|||||||
123
Gaze/Views/Components/GazeOverlayView.swift
Normal file
123
Gaze/Views/Components/GazeOverlayView.swift
Normal file
@@ -0,0 +1,123 @@
|
|||||||
|
//
|
||||||
|
// GazeOverlayView.swift
|
||||||
|
// Gaze
|
||||||
|
//
|
||||||
|
// Created by Claude on 1/16/26.
|
||||||
|
//
|
||||||
|
|
||||||
|
import SwiftUI
|
||||||
|
|
||||||
|
struct GazeOverlayView: View {
|
||||||
|
@ObservedObject var eyeTrackingService: EyeTrackingService
|
||||||
|
|
||||||
|
var body: some View {
|
||||||
|
VStack(spacing: 8) {
|
||||||
|
inFrameIndicator
|
||||||
|
gazeDirectionGrid
|
||||||
|
ratioDebugView
|
||||||
|
}
|
||||||
|
.padding(12)
|
||||||
|
}
|
||||||
|
|
||||||
|
private var inFrameIndicator: some View {
|
||||||
|
HStack(spacing: 6) {
|
||||||
|
Circle()
|
||||||
|
.fill(eyeTrackingService.isInFrame ? Color.green : Color.red)
|
||||||
|
.frame(width: 10, height: 10)
|
||||||
|
Text(eyeTrackingService.isInFrame ? "In Frame" : "No Face")
|
||||||
|
.font(.caption2)
|
||||||
|
.fontWeight(.semibold)
|
||||||
|
.foregroundColor(.white)
|
||||||
|
}
|
||||||
|
.padding(.horizontal, 10)
|
||||||
|
.padding(.vertical, 6)
|
||||||
|
.background(
|
||||||
|
Capsule()
|
||||||
|
.fill(Color.black.opacity(0.6))
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
private var gazeDirectionGrid: some View {
|
||||||
|
let currentDirection = eyeTrackingService.gazeDirection
|
||||||
|
let currentPos = currentDirection.gridPosition
|
||||||
|
|
||||||
|
return VStack(spacing: 2) {
|
||||||
|
ForEach(0..<3, id: \.self) { row in
|
||||||
|
HStack(spacing: 2) {
|
||||||
|
ForEach(0..<3, id: \.self) { col in
|
||||||
|
let isActive = currentPos.x == col && currentPos.y == row && eyeTrackingService.isInFrame
|
||||||
|
gridCell(row: row, col: col, isActive: isActive)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.padding(8)
|
||||||
|
.background(
|
||||||
|
RoundedRectangle(cornerRadius: 8)
|
||||||
|
.fill(Color.black.opacity(0.5))
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
private func gridCell(row: Int, col: Int, isActive: Bool) -> some View {
|
||||||
|
let direction = directionFor(row: row, col: col)
|
||||||
|
|
||||||
|
return ZStack {
|
||||||
|
RoundedRectangle(cornerRadius: 4)
|
||||||
|
.fill(isActive ? Color.green : Color.white.opacity(0.2))
|
||||||
|
|
||||||
|
Text(direction.rawValue)
|
||||||
|
.font(.system(size: 14, weight: .bold))
|
||||||
|
.foregroundColor(isActive ? .white : .white.opacity(0.6))
|
||||||
|
}
|
||||||
|
.frame(width: 28, height: 28)
|
||||||
|
}
|
||||||
|
|
||||||
|
private func directionFor(row: Int, col: Int) -> GazeDirection {
|
||||||
|
switch (col, row) {
|
||||||
|
case (0, 0): return .upLeft
|
||||||
|
case (1, 0): return .up
|
||||||
|
case (2, 0): return .upRight
|
||||||
|
case (0, 1): return .left
|
||||||
|
case (1, 1): return .center
|
||||||
|
case (2, 1): return .right
|
||||||
|
case (0, 2): return .downLeft
|
||||||
|
case (1, 2): return .down
|
||||||
|
case (2, 2): return .downRight
|
||||||
|
default: return .center
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private var ratioDebugView: some View {
|
||||||
|
VStack(alignment: .leading, spacing: 2) {
|
||||||
|
if let leftH = eyeTrackingService.debugLeftPupilRatio,
|
||||||
|
let rightH = eyeTrackingService.debugRightPupilRatio {
|
||||||
|
let avgH = (leftH + rightH) / 2.0
|
||||||
|
Text("H: \(String(format: "%.2f", avgH))")
|
||||||
|
.font(.system(size: 10, weight: .medium, design: .monospaced))
|
||||||
|
.foregroundColor(.white)
|
||||||
|
}
|
||||||
|
|
||||||
|
if let leftV = eyeTrackingService.debugLeftVerticalRatio,
|
||||||
|
let rightV = eyeTrackingService.debugRightVerticalRatio {
|
||||||
|
let avgV = (leftV + rightV) / 2.0
|
||||||
|
Text("V: \(String(format: "%.2f", avgV))")
|
||||||
|
.font(.system(size: 10, weight: .medium, design: .monospaced))
|
||||||
|
.foregroundColor(.white)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
.padding(.horizontal, 8)
|
||||||
|
.padding(.vertical, 4)
|
||||||
|
.background(
|
||||||
|
RoundedRectangle(cornerRadius: 4)
|
||||||
|
.fill(Color.black.opacity(0.5))
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#Preview {
|
||||||
|
ZStack {
|
||||||
|
Color.gray
|
||||||
|
GazeOverlayView(eyeTrackingService: EyeTrackingService.shared)
|
||||||
|
}
|
||||||
|
.frame(width: 300, height: 200)
|
||||||
|
}
|
||||||
@@ -212,7 +212,11 @@ struct EnforceModeSetupView: View {
|
|||||||
let previewLayer = eyeTrackingService.previewLayer ?? cachedPreviewLayer
|
let previewLayer = eyeTrackingService.previewLayer ?? cachedPreviewLayer
|
||||||
|
|
||||||
if let layer = previewLayer {
|
if let layer = previewLayer {
|
||||||
|
ZStack(alignment: .topTrailing) {
|
||||||
CameraPreviewView(previewLayer: layer, borderColor: borderColor)
|
CameraPreviewView(previewLayer: layer, borderColor: borderColor)
|
||||||
|
|
||||||
|
GazeOverlayView(eyeTrackingService: eyeTrackingService)
|
||||||
|
}
|
||||||
.frame(height: 300)
|
.frame(height: 300)
|
||||||
.glassEffectIfAvailable(GlassStyle.regular, in: .rect(cornerRadius: 12))
|
.glassEffectIfAvailable(GlassStyle.regular, in: .rect(cornerRadius: 12))
|
||||||
.onAppear {
|
.onAppear {
|
||||||
|
|||||||
172
GazeTests/Services/PupilDetectorTests.swift
Normal file
172
GazeTests/Services/PupilDetectorTests.swift
Normal file
@@ -0,0 +1,172 @@
|
|||||||
|
//
|
||||||
|
// PupilDetectorTests.swift
|
||||||
|
// GazeTests
|
||||||
|
//
|
||||||
|
// Created by Claude on 1/16/26.
|
||||||
|
//
|
||||||
|
|
||||||
|
import XCTest
|
||||||
|
import CoreVideo
|
||||||
|
import Vision
|
||||||
|
@testable import Gaze
|
||||||
|
|
||||||
|
final class PupilDetectorTests: XCTestCase {
|
||||||
|
|
||||||
|
override func setUp() async throws {
|
||||||
|
// Reset the detector state
|
||||||
|
PupilDetector.cleanup()
|
||||||
|
}
|
||||||
|
|
||||||
|
func testCreateCGImageFromData() throws {
|
||||||
|
// Test basic image creation
|
||||||
|
let width = 50
|
||||||
|
let height = 50
|
||||||
|
var pixels = [UInt8](repeating: 128, count: width * height)
|
||||||
|
|
||||||
|
// Add some dark pixels for a "pupil"
|
||||||
|
for y in 20..<30 {
|
||||||
|
for x in 20..<30 {
|
||||||
|
pixels[y * width + x] = 10 // Very dark
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save test image to verify
|
||||||
|
let pixelData = Data(pixels)
|
||||||
|
guard let provider = CGDataProvider(data: pixelData as CFData) else {
|
||||||
|
XCTFail("Failed to create CGDataProvider")
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
let cgImage = CGImage(
|
||||||
|
width: width,
|
||||||
|
height: height,
|
||||||
|
bitsPerComponent: 8,
|
||||||
|
bitsPerPixel: 8,
|
||||||
|
bytesPerRow: width,
|
||||||
|
space: CGColorSpaceCreateDeviceGray(),
|
||||||
|
bitmapInfo: CGBitmapInfo(rawValue: CGImageAlphaInfo.none.rawValue),
|
||||||
|
provider: provider,
|
||||||
|
decode: nil,
|
||||||
|
shouldInterpolate: false,
|
||||||
|
intent: .defaultIntent
|
||||||
|
)
|
||||||
|
|
||||||
|
XCTAssertNotNil(cgImage, "Should create CGImage from pixel data")
|
||||||
|
}
|
||||||
|
|
||||||
|
func testImageProcessingWithDarkPixels() throws {
|
||||||
|
// Test that imageProcessingOptimized produces dark pixels
|
||||||
|
let width = 60
|
||||||
|
let height = 40
|
||||||
|
|
||||||
|
// Create input with a dark circle (simulating pupil)
|
||||||
|
var input = [UInt8](repeating: 200, count: width * height) // Light background (like eye white)
|
||||||
|
|
||||||
|
// Add a dark ellipse in center (pupil)
|
||||||
|
let centerX = width / 2
|
||||||
|
let centerY = height / 2
|
||||||
|
for y in 0..<height {
|
||||||
|
for x in 0..<width {
|
||||||
|
let dx = x - centerX
|
||||||
|
let dy = y - centerY
|
||||||
|
if dx * dx + dy * dy < 100 { // Circle radius ~10
|
||||||
|
input[y * width + x] = 20 // Dark pupil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var output = [UInt8](repeating: 255, count: width * height)
|
||||||
|
let threshold = 50 // Same as default
|
||||||
|
|
||||||
|
// Call the actual processing function
|
||||||
|
input.withUnsafeMutableBufferPointer { inputPtr in
|
||||||
|
output.withUnsafeMutableBufferPointer { outputPtr in
|
||||||
|
// We can't call imageProcessingOptimized directly as it's private
|
||||||
|
// But we can verify by saving input for inspection
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Save the input for manual inspection
|
||||||
|
let inputData = Data(input)
|
||||||
|
let url = URL(fileURLWithPath: "/Users/mike/gaze/images/test_input_synthetic.png")
|
||||||
|
if let provider = CGDataProvider(data: inputData as CFData) {
|
||||||
|
if let cgImage = CGImage(
|
||||||
|
width: width,
|
||||||
|
height: height,
|
||||||
|
bitsPerComponent: 8,
|
||||||
|
bitsPerPixel: 8,
|
||||||
|
bytesPerRow: width,
|
||||||
|
space: CGColorSpaceCreateDeviceGray(),
|
||||||
|
bitmapInfo: CGBitmapInfo(rawValue: CGImageAlphaInfo.none.rawValue),
|
||||||
|
provider: provider,
|
||||||
|
decode: nil,
|
||||||
|
shouldInterpolate: false,
|
||||||
|
intent: .defaultIntent
|
||||||
|
) {
|
||||||
|
if let dest = CGImageDestinationCreateWithURL(url as CFURL, "public.png" as CFString, 1, nil) {
|
||||||
|
CGImageDestinationAddImage(dest, cgImage, nil)
|
||||||
|
CGImageDestinationFinalize(dest)
|
||||||
|
print("💾 Saved synthetic test input to: \(url.path)")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Count dark pixels in input
|
||||||
|
let darkCount = input.filter { $0 < 50 }.count
|
||||||
|
print("📊 Input has \(darkCount) dark pixels (< 50)")
|
||||||
|
XCTAssertGreaterThan(darkCount, 0, "Input should have dark pixels for pupil")
|
||||||
|
}
|
||||||
|
|
||||||
|
func testFindPupilFromContoursWithSyntheticData() throws {
|
||||||
|
// Create synthetic binary image with a dark region
|
||||||
|
let width = 60
|
||||||
|
let height = 40
|
||||||
|
|
||||||
|
// All white except a dark blob
|
||||||
|
var binaryData = [UInt8](repeating: 255, count: width * height)
|
||||||
|
|
||||||
|
// Add dark region (0 = dark/pupil)
|
||||||
|
let centerX = 30
|
||||||
|
let centerY = 20
|
||||||
|
var darkPixelCount = 0
|
||||||
|
for y in 0..<height {
|
||||||
|
for x in 0..<width {
|
||||||
|
let dx = x - centerX
|
||||||
|
let dy = y - centerY
|
||||||
|
if dx * dx + dy * dy < 100 {
|
||||||
|
binaryData[y * width + x] = 0
|
||||||
|
darkPixelCount += 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
print("📊 Created synthetic image with \(darkPixelCount) dark pixels")
|
||||||
|
|
||||||
|
// Save for inspection
|
||||||
|
let binaryDataObj = Data(binaryData)
|
||||||
|
let url = URL(fileURLWithPath: "/Users/mike/gaze/images/test_binary_synthetic.png")
|
||||||
|
if let provider = CGDataProvider(data: binaryDataObj as CFData) {
|
||||||
|
if let cgImage = CGImage(
|
||||||
|
width: width,
|
||||||
|
height: height,
|
||||||
|
bitsPerComponent: 8,
|
||||||
|
bitsPerPixel: 8,
|
||||||
|
bytesPerRow: width,
|
||||||
|
space: CGColorSpaceCreateDeviceGray(),
|
||||||
|
bitmapInfo: CGBitmapInfo(rawValue: CGImageAlphaInfo.none.rawValue),
|
||||||
|
provider: provider,
|
||||||
|
decode: nil,
|
||||||
|
shouldInterpolate: false,
|
||||||
|
intent: .defaultIntent
|
||||||
|
) {
|
||||||
|
if let dest = CGImageDestinationCreateWithURL(url as CFURL, "public.png" as CFString, 1, nil) {
|
||||||
|
CGImageDestinationAddImage(dest, cgImage, nil)
|
||||||
|
CGImageDestinationFinalize(dest)
|
||||||
|
print("💾 Saved synthetic binary image to: \(url.path)")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
XCTAssertGreaterThan(darkPixelCount, 10, "Should have enough dark pixels")
|
||||||
|
}
|
||||||
|
}
|
||||||
2
run
2
run
@@ -147,7 +147,7 @@ launch_app() {
|
|||||||
|
|
||||||
sleep 2
|
sleep 2
|
||||||
echo "================================================================"
|
echo "================================================================"
|
||||||
/usr/bin/log stream --predicate "subsystem contains \"$APP_SUBSYSTEM\"" \
|
/usr/bin/log stream --level debug --predicate "subsystem contains \"$APP_SUBSYSTEM\"" \
|
||||||
--style compact 2>/dev/null
|
--style compact 2>/dev/null
|
||||||
else
|
else
|
||||||
echo "⚠️ App not found at expected location, trying fallback..."
|
echo "⚠️ App not found at expected location, trying fallback..."
|
||||||
|
|||||||
Reference in New Issue
Block a user