feat: debugging overlay
This commit is contained in:
@@ -69,6 +69,24 @@ class AppDelegate: NSObject, NSApplicationDelegate, ObservableObject {
|
||||
if settingsManager.settings.hasCompletedOnboarding {
|
||||
startTimers()
|
||||
}
|
||||
|
||||
// DEBUG: Auto-start eye tracking test mode if launch argument is present
|
||||
#if DEBUG
|
||||
if CommandLine.arguments.contains("--debug-eye-tracking") {
|
||||
NSLog("🔬 DEBUG: Auto-starting eye tracking test mode")
|
||||
Task { @MainActor in
|
||||
// Enable enforce mode if not already
|
||||
if !settingsManager.settings.enforcementMode {
|
||||
settingsManager.settings.enforcementMode = true
|
||||
}
|
||||
// Start test mode after a brief delay
|
||||
try? await Task.sleep(nanoseconds: 1_000_000_000) // 1 second
|
||||
NSLog("🔬 DEBUG: Starting test mode now...")
|
||||
await EnforceModeService.shared.startTestMode()
|
||||
NSLog("🔬 DEBUG: Test mode started")
|
||||
}
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
// Note: Smart mode setup is now handled by ServiceContainer
|
||||
|
||||
@@ -22,6 +22,8 @@ class EyeTrackingService: NSObject, ObservableObject {
|
||||
// Debug properties for UI display
|
||||
@Published var debugLeftPupilRatio: Double?
|
||||
@Published var debugRightPupilRatio: Double?
|
||||
@Published var debugLeftVerticalRatio: Double?
|
||||
@Published var debugRightVerticalRatio: Double?
|
||||
@Published var debugYaw: Double?
|
||||
@Published var debugPitch: Double?
|
||||
@Published var enableDebugLogging: Bool = false {
|
||||
@@ -30,6 +32,25 @@ class EyeTrackingService: NSObject, ObservableObject {
|
||||
PupilDetector.enableDiagnosticLogging = enableDebugLogging
|
||||
}
|
||||
}
|
||||
|
||||
// Computed gaze direction for UI overlay
|
||||
var gazeDirection: GazeDirection {
|
||||
guard let leftH = debugLeftPupilRatio,
|
||||
let rightH = debugRightPupilRatio,
|
||||
let leftV = debugLeftVerticalRatio,
|
||||
let rightV = debugRightVerticalRatio else {
|
||||
return .center
|
||||
}
|
||||
|
||||
let avgHorizontal = (leftH + rightH) / 2.0
|
||||
let avgVertical = (leftV + rightV) / 2.0
|
||||
|
||||
return GazeDirection.from(horizontal: avgHorizontal, vertical: avgVertical)
|
||||
}
|
||||
|
||||
var isInFrame: Bool {
|
||||
faceDetected
|
||||
}
|
||||
|
||||
// Throttle for debug logging
|
||||
private var lastDebugLogTime: Date = .distantPast
|
||||
@@ -71,6 +92,8 @@ class EyeTrackingService: NSObject, ObservableObject {
|
||||
var userLookingAtScreen: Bool = true
|
||||
var debugLeftPupilRatio: Double?
|
||||
var debugRightPupilRatio: Double?
|
||||
var debugLeftVerticalRatio: Double?
|
||||
var debugRightVerticalRatio: Double?
|
||||
var debugYaw: Double?
|
||||
var debugPitch: Double?
|
||||
|
||||
@@ -80,6 +103,8 @@ class EyeTrackingService: NSObject, ObservableObject {
|
||||
userLookingAtScreen: Bool = true,
|
||||
debugLeftPupilRatio: Double? = nil,
|
||||
debugRightPupilRatio: Double? = nil,
|
||||
debugLeftVerticalRatio: Double? = nil,
|
||||
debugRightVerticalRatio: Double? = nil,
|
||||
debugYaw: Double? = nil,
|
||||
debugPitch: Double? = nil
|
||||
) {
|
||||
@@ -88,6 +113,8 @@ class EyeTrackingService: NSObject, ObservableObject {
|
||||
self.userLookingAtScreen = userLookingAtScreen
|
||||
self.debugLeftPupilRatio = debugLeftPupilRatio
|
||||
self.debugRightPupilRatio = debugRightPupilRatio
|
||||
self.debugLeftVerticalRatio = debugLeftVerticalRatio
|
||||
self.debugRightVerticalRatio = debugRightVerticalRatio
|
||||
self.debugYaw = debugYaw
|
||||
self.debugPitch = debugPitch
|
||||
}
|
||||
@@ -260,6 +287,8 @@ class EyeTrackingService: NSObject, ObservableObject {
|
||||
result.userLookingAtScreen = !gazeResult.lookingAway
|
||||
result.debugLeftPupilRatio = gazeResult.leftPupilRatio
|
||||
result.debugRightPupilRatio = gazeResult.rightPupilRatio
|
||||
result.debugLeftVerticalRatio = gazeResult.leftVerticalRatio
|
||||
result.debugRightVerticalRatio = gazeResult.rightVerticalRatio
|
||||
result.debugYaw = gazeResult.yaw
|
||||
result.debugPitch = gazeResult.pitch
|
||||
|
||||
@@ -302,6 +331,8 @@ class EyeTrackingService: NSObject, ObservableObject {
|
||||
var lookingAway: Bool = false
|
||||
var leftPupilRatio: Double?
|
||||
var rightPupilRatio: Double?
|
||||
var leftVerticalRatio: Double?
|
||||
var rightVerticalRatio: Double?
|
||||
var yaw: Double?
|
||||
var pitch: Double?
|
||||
|
||||
@@ -309,12 +340,16 @@ class EyeTrackingService: NSObject, ObservableObject {
|
||||
lookingAway: Bool = false,
|
||||
leftPupilRatio: Double? = nil,
|
||||
rightPupilRatio: Double? = nil,
|
||||
leftVerticalRatio: Double? = nil,
|
||||
rightVerticalRatio: Double? = nil,
|
||||
yaw: Double? = nil,
|
||||
pitch: Double? = nil
|
||||
) {
|
||||
self.lookingAway = lookingAway
|
||||
self.leftPupilRatio = leftPupilRatio
|
||||
self.rightPupilRatio = rightPupilRatio
|
||||
self.leftVerticalRatio = leftVerticalRatio
|
||||
self.rightVerticalRatio = rightVerticalRatio
|
||||
self.yaw = yaw
|
||||
self.pitch = pitch
|
||||
}
|
||||
@@ -371,6 +406,8 @@ class EyeTrackingService: NSObject, ObservableObject {
|
||||
{
|
||||
var leftGazeRatio: Double? = nil
|
||||
var rightGazeRatio: Double? = nil
|
||||
var leftVerticalRatio: Double? = nil
|
||||
var rightVerticalRatio: Double? = nil
|
||||
|
||||
// Detect left pupil (side = 0)
|
||||
if let leftResult = PupilDetector.detectPupil(
|
||||
@@ -384,6 +421,10 @@ class EyeTrackingService: NSObject, ObservableObject {
|
||||
pupilPosition: leftResult.pupilPosition,
|
||||
eyeRegion: leftResult.eyeRegion
|
||||
)
|
||||
leftVerticalRatio = calculateVerticalRatioSync(
|
||||
pupilPosition: leftResult.pupilPosition,
|
||||
eyeRegion: leftResult.eyeRegion
|
||||
)
|
||||
}
|
||||
|
||||
// Detect right pupil (side = 1)
|
||||
@@ -398,10 +439,16 @@ class EyeTrackingService: NSObject, ObservableObject {
|
||||
pupilPosition: rightResult.pupilPosition,
|
||||
eyeRegion: rightResult.eyeRegion
|
||||
)
|
||||
rightVerticalRatio = calculateVerticalRatioSync(
|
||||
pupilPosition: rightResult.pupilPosition,
|
||||
eyeRegion: rightResult.eyeRegion
|
||||
)
|
||||
}
|
||||
|
||||
result.leftPupilRatio = leftGazeRatio
|
||||
result.rightPupilRatio = rightGazeRatio
|
||||
result.leftVerticalRatio = leftVerticalRatio
|
||||
result.rightVerticalRatio = rightVerticalRatio
|
||||
|
||||
// Connect to CalibrationManager on main thread
|
||||
if let leftRatio = leftGazeRatio,
|
||||
@@ -447,6 +494,23 @@ class EyeTrackingService: NSObject, ObservableObject {
|
||||
let ratio = pupilX / denominator
|
||||
return max(0.0, min(1.0, ratio))
|
||||
}
|
||||
|
||||
/// Non-isolated vertical gaze ratio calculation
|
||||
/// Returns 0.0 for looking up, 1.0 for looking down, 0.5 for center
|
||||
nonisolated private func calculateVerticalRatioSync(
|
||||
pupilPosition: PupilPosition, eyeRegion: EyeRegion
|
||||
) -> Double {
|
||||
let pupilY = Double(pupilPosition.y)
|
||||
let eyeTop = Double(eyeRegion.frame.minY)
|
||||
let eyeBottom = Double(eyeRegion.frame.maxY)
|
||||
let eyeHeight = eyeBottom - eyeTop
|
||||
|
||||
guard eyeHeight > 0 else { return 0.5 }
|
||||
|
||||
// Normalize: 0.0 = top of eye region, 1.0 = bottom
|
||||
let ratio = (pupilY - eyeTop) / eyeHeight
|
||||
return max(0.0, min(1.0, ratio))
|
||||
}
|
||||
|
||||
private func detectEyesClosed(
|
||||
leftEye: VNFaceLandmarkRegion2D, rightEye: VNFaceLandmarkRegion2D, shouldLog: Bool
|
||||
@@ -660,11 +724,22 @@ class EyeTrackingService: NSObject, ObservableObject {
|
||||
}
|
||||
|
||||
extension EyeTrackingService: AVCaptureVideoDataOutputSampleBufferDelegate {
|
||||
// DEBUG: Frame counter for periodic logging (nonisolated for video callback)
|
||||
private nonisolated(unsafe) static var debugFrameCount = 0
|
||||
|
||||
nonisolated func captureOutput(
|
||||
_ output: AVCaptureOutput,
|
||||
didOutput sampleBuffer: CMSampleBuffer,
|
||||
from connection: AVCaptureConnection
|
||||
) {
|
||||
// DEBUG: Print every 30 frames to show we're receiving video
|
||||
#if DEBUG
|
||||
EyeTrackingService.debugFrameCount += 1
|
||||
if EyeTrackingService.debugFrameCount % 30 == 0 {
|
||||
NSLog("🎥 EyeTrackingService: Received frame %d", EyeTrackingService.debugFrameCount)
|
||||
}
|
||||
#endif
|
||||
|
||||
guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
|
||||
return
|
||||
}
|
||||
@@ -701,6 +776,8 @@ extension EyeTrackingService: AVCaptureVideoDataOutputSampleBufferDelegate {
|
||||
self.userLookingAtScreen = result.userLookingAtScreen
|
||||
self.debugLeftPupilRatio = result.debugLeftPupilRatio
|
||||
self.debugRightPupilRatio = result.debugRightPupilRatio
|
||||
self.debugLeftVerticalRatio = result.debugLeftVerticalRatio
|
||||
self.debugRightVerticalRatio = result.debugRightVerticalRatio
|
||||
self.debugYaw = result.debugYaw
|
||||
self.debugPitch = result.debugPitch
|
||||
}
|
||||
|
||||
@@ -58,28 +58,28 @@ final class LoggingManager {
|
||||
func debug(_ message: String, category: String = "General") {
|
||||
guard isLoggingEnabled else { return }
|
||||
let logger = Logger(subsystem: subsystem, category: category)
|
||||
logger.debug("\(message)")
|
||||
logger.debug("\(message, privacy: .public)")
|
||||
}
|
||||
|
||||
/// Convenience method for info logging
|
||||
func info(_ message: String, category: String = "General") {
|
||||
guard isLoggingEnabled else { return }
|
||||
let logger = Logger(subsystem: subsystem, category: category)
|
||||
logger.info("\(message)")
|
||||
logger.info("\(message, privacy: .public)")
|
||||
}
|
||||
|
||||
/// Convenience method for error logging
|
||||
func error(_ message: String, category: String = "General") {
|
||||
guard isLoggingEnabled else { return }
|
||||
let logger = Logger(subsystem: subsystem, category: category)
|
||||
logger.error("\(message)")
|
||||
logger.error("\(message, privacy: .public)")
|
||||
}
|
||||
|
||||
/// Convenience method for warning logging
|
||||
func warning(_ message: String, category: String = "General") {
|
||||
guard isLoggingEnabled else { return }
|
||||
let logger = Logger(subsystem: subsystem, category: category)
|
||||
logger.warning("\(message)")
|
||||
logger.warning("\(message, privacy: .public)")
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -31,6 +31,63 @@ struct EyeRegion: Sendable {
|
||||
let origin: CGPoint
|
||||
}
|
||||
|
||||
/// 9-point gaze direction grid
|
||||
enum GazeDirection: String, Sendable, CaseIterable {
|
||||
case upLeft = "↖"
|
||||
case up = "↑"
|
||||
case upRight = "↗"
|
||||
case left = "←"
|
||||
case center = "●"
|
||||
case right = "→"
|
||||
case downLeft = "↙"
|
||||
case down = "↓"
|
||||
case downRight = "↘"
|
||||
|
||||
/// Thresholds for direction detection
|
||||
/// Horizontal: 0.0 = looking right (from camera POV), 1.0 = looking left
|
||||
/// Vertical: 0.0 = looking up, 1.0 = looking down
|
||||
private static let horizontalLeftThreshold = 0.55 // Above this = looking left
|
||||
private static let horizontalRightThreshold = 0.45 // Below this = looking right
|
||||
private static let verticalUpThreshold = 0.40 // Below this = looking up
|
||||
private static let verticalDownThreshold = 0.60 // Above this = looking down
|
||||
|
||||
static func from(horizontal: Double, vertical: Double) -> GazeDirection {
|
||||
let isLeft = horizontal > horizontalLeftThreshold
|
||||
let isRight = horizontal < horizontalRightThreshold
|
||||
let isUp = vertical < verticalUpThreshold
|
||||
let isDown = vertical > verticalDownThreshold
|
||||
|
||||
if isUp {
|
||||
if isLeft { return .upLeft }
|
||||
if isRight { return .upRight }
|
||||
return .up
|
||||
} else if isDown {
|
||||
if isLeft { return .downLeft }
|
||||
if isRight { return .downRight }
|
||||
return .down
|
||||
} else {
|
||||
if isLeft { return .left }
|
||||
if isRight { return .right }
|
||||
return .center
|
||||
}
|
||||
}
|
||||
|
||||
/// Grid position (0-2 for x and y)
|
||||
var gridPosition: (x: Int, y: Int) {
|
||||
switch self {
|
||||
case .upLeft: return (0, 0)
|
||||
case .up: return (1, 0)
|
||||
case .upRight: return (2, 0)
|
||||
case .left: return (0, 1)
|
||||
case .center: return (1, 1)
|
||||
case .right: return (2, 1)
|
||||
case .downLeft: return (0, 2)
|
||||
case .down: return (1, 2)
|
||||
case .downRight: return (2, 2)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Calibration state for adaptive thresholding (matches Python Calibration class)
|
||||
final class PupilCalibration: @unchecked Sendable {
|
||||
private let lock = NSLock()
|
||||
@@ -48,7 +105,8 @@ final class PupilCalibration: @unchecked Sendable {
|
||||
lock.lock()
|
||||
defer { lock.unlock() }
|
||||
let thresholds = side == 0 ? thresholdsLeft : thresholdsRight
|
||||
guard !thresholds.isEmpty else { return 50 }
|
||||
// DEBUG: Use higher default threshold (was 50)
|
||||
guard !thresholds.isEmpty else { return 90 }
|
||||
return thresholds.reduce(0, +) / thresholds.count
|
||||
}
|
||||
|
||||
@@ -147,9 +205,16 @@ final class PupilDetector: @unchecked Sendable {
|
||||
|
||||
// MARK: - Configuration
|
||||
|
||||
nonisolated(unsafe) static var enableDebugImageSaving = false
|
||||
nonisolated(unsafe) static var enableDebugImageSaving: Bool = false // Disabled - causes sandbox errors
|
||||
nonisolated(unsafe) static var enablePerformanceLogging = false
|
||||
nonisolated(unsafe) static var enableDiagnosticLogging = false
|
||||
nonisolated(unsafe) static var enableDiagnosticLogging = false // Disabled - pupil detection now working
|
||||
nonisolated(unsafe) static var enableDebugLogging: Bool {
|
||||
#if DEBUG
|
||||
return true
|
||||
#else
|
||||
return false
|
||||
#endif
|
||||
}
|
||||
nonisolated(unsafe) static var frameSkipCount = 10 // Process every Nth frame
|
||||
|
||||
// MARK: - State (protected by lock)
|
||||
@@ -240,7 +305,6 @@ final class PupilDetector: @unchecked Sendable {
|
||||
side: Int = 0,
|
||||
threshold: Int? = nil
|
||||
) -> (pupilPosition: PupilPosition, eyeRegion: EyeRegion)? {
|
||||
|
||||
// Frame skipping - return cached result
|
||||
if frameCounter % frameSkipCount != 0 {
|
||||
let cachedPosition = side == 0 ? lastPupilPositions.left : lastPupilPositions.right
|
||||
@@ -363,6 +427,38 @@ final class PupilDetector: @unchecked Sendable {
|
||||
}
|
||||
|
||||
// Step 7: Process image (bilateral filter + erosion + threshold)
|
||||
if enableDiagnosticLogging {
|
||||
logDebug(
|
||||
"👁 PupilDetector: Using threshold=\(effectiveThreshold) for \(eyeWidth)x\(eyeHeight) eye region"
|
||||
)
|
||||
}
|
||||
|
||||
// Debug: Save input eye image before processing
|
||||
if enableDebugImageSaving && debugImageCounter < 20 {
|
||||
NSLog("📸 Saving eye_input_%d - %dx%d, side=%d, region=(%.0f,%.0f,%.0f,%.0f)",
|
||||
debugImageCounter, eyeWidth, eyeHeight, side,
|
||||
eyeRegion.frame.origin.x, eyeRegion.frame.origin.y,
|
||||
eyeRegion.frame.width, eyeRegion.frame.height)
|
||||
|
||||
// Debug: Print pixel value statistics for input
|
||||
var minVal: UInt8 = 255, maxVal: UInt8 = 0
|
||||
var sum: Int = 0
|
||||
var darkCount = 0 // pixels <= 90
|
||||
for i in 0..<(eyeWidth * eyeHeight) {
|
||||
let v = eyeBuf[i]
|
||||
if v < minVal { minVal = v }
|
||||
if v > maxVal { maxVal = v }
|
||||
sum += Int(v)
|
||||
if v <= 90 { darkCount += 1 }
|
||||
}
|
||||
let avgVal = Double(sum) / Double(eyeWidth * eyeHeight)
|
||||
NSLog("📊 Eye input stats: min=%d, max=%d, avg=%.1f, darkPixels(<=90)=%d", minVal, maxVal, avgVal, darkCount)
|
||||
|
||||
saveDebugImage(
|
||||
data: eyeBuf, width: eyeWidth, height: eyeHeight,
|
||||
name: "eye_input_\(debugImageCounter)")
|
||||
}
|
||||
|
||||
imageProcessingOptimized(
|
||||
input: eyeBuf,
|
||||
output: tmpBuf,
|
||||
@@ -373,6 +469,15 @@ final class PupilDetector: @unchecked Sendable {
|
||||
|
||||
// Debug: Save processed images if enabled
|
||||
if enableDebugImageSaving && debugImageCounter < 10 {
|
||||
// Debug: Print pixel value statistics for output
|
||||
var darkCount = 0 // pixels == 0 (black)
|
||||
var whiteCount = 0 // pixels == 255 (white)
|
||||
for i in 0..<(eyeWidth * eyeHeight) {
|
||||
if tmpBuf[i] == 0 { darkCount += 1 }
|
||||
else if tmpBuf[i] == 255 { whiteCount += 1 }
|
||||
}
|
||||
NSLog("📊 Processed output stats: darkPixels=%d, whitePixels=%d", darkCount, whiteCount)
|
||||
|
||||
saveDebugImage(
|
||||
data: tmpBuf, width: eyeWidth, height: eyeHeight,
|
||||
name: "processed_eye_\(debugImageCounter)")
|
||||
@@ -388,17 +493,13 @@ final class PupilDetector: @unchecked Sendable {
|
||||
)
|
||||
else {
|
||||
if enableDiagnosticLogging {
|
||||
logDebug(
|
||||
"👁 PupilDetector: Failed - findPupilFromContours returned nil (not enough dark pixels)"
|
||||
)
|
||||
logDebug("👁 PupilDetector: Failed - findPupilFromContours returned nil (not enough dark pixels) for side \(side)")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
if enableDiagnosticLogging {
|
||||
logDebug(
|
||||
"👁 PupilDetector: Success - centroid at (\(String(format: "%.1f", centroidX)), \(String(format: "%.1f", centroidY))) in \(eyeWidth)x\(eyeHeight) region"
|
||||
)
|
||||
logDebug("👁 PupilDetector: Success side=\(side) - centroid at (\(String(format: "%.1f", centroidX)), \(String(format: "%.1f", centroidY))) in \(eyeWidth)x\(eyeHeight) region")
|
||||
}
|
||||
|
||||
let pupilPosition = PupilPosition(x: CGFloat(centroidX), y: CGFloat(centroidY))
|
||||
@@ -539,7 +640,7 @@ final class PupilDetector: @unchecked Sendable {
|
||||
|
||||
guard eyeWidth > 0, eyeHeight > 0 else { return false }
|
||||
|
||||
// Initialize to white (masked out)
|
||||
// Initialize to WHITE (255) - masked pixels should be bright so they don't affect pupil detection
|
||||
memset(output, 255, eyeWidth * eyeHeight)
|
||||
|
||||
// Convert eye points to local coordinates
|
||||
@@ -600,20 +701,10 @@ final class PupilDetector: @unchecked Sendable {
|
||||
let size = width * height
|
||||
guard size > 0 else { return }
|
||||
|
||||
// Use a working buffer for intermediate results
|
||||
let workBuffer = UnsafeMutablePointer<UInt8>.allocate(capacity: size)
|
||||
defer { workBuffer.deallocate() }
|
||||
|
||||
// 1. Fast Gaussian blur using vImage (replaces expensive bilateral filter)
|
||||
gaussianBlurOptimized(input: input, output: workBuffer, width: width, height: height)
|
||||
|
||||
// 2. Erosion with vImage (3 iterations)
|
||||
erodeOptimized(
|
||||
input: workBuffer, output: output, width: width, height: height, iterations: 3)
|
||||
|
||||
// 3. Simple binary threshold (no vDSP overhead for small buffers)
|
||||
// SIMPLIFIED: Skip blur to avoid contaminating dark pupil pixels with bright mask pixels
|
||||
// Apply binary threshold directly to input
|
||||
for i in 0..<size {
|
||||
output[i] = output[i] > UInt8(threshold) ? 255 : 0
|
||||
output[i] = input[i] > UInt8(threshold) ? 255 : 0
|
||||
}
|
||||
}
|
||||
|
||||
@@ -722,45 +813,35 @@ final class PupilDetector: @unchecked Sendable {
|
||||
height: Int
|
||||
) -> (x: Double, y: Double)? {
|
||||
|
||||
// Optimized approach: find centroid of all black pixels with early exit
|
||||
// Optimized approach: find centroid of all black pixels
|
||||
// This works well for pupil detection since the pupil is the main dark blob
|
||||
|
||||
// Use a more efficient approach that doesn't iterate through entire image
|
||||
var sumX: Int = 0
|
||||
var sumY: Int = 0
|
||||
var count: Int = 0
|
||||
|
||||
// Early exit if we already know this isn't going to be useful
|
||||
let threshold = UInt8(5) // Only consider pixels that are quite dark
|
||||
// After binary thresholding, pixels are 0 (black/pupil) or 255 (white/background)
|
||||
// Use threshold of 128 to catch any pixels that are closer to black
|
||||
let threshold = UInt8(128)
|
||||
|
||||
// Process in chunks for better cache performance
|
||||
let chunkSize = 16
|
||||
var rowsProcessed = 0
|
||||
|
||||
while rowsProcessed < height {
|
||||
let endRow = min(rowsProcessed + chunkSize, height)
|
||||
|
||||
for y in rowsProcessed..<endRow {
|
||||
let rowOffset = y * width
|
||||
for x in 0..<width {
|
||||
// Only process dark pixels that are likely to be pupil
|
||||
if data[rowOffset + x] <= threshold {
|
||||
sumX += x
|
||||
sumY += y
|
||||
count += 1
|
||||
}
|
||||
// Process entire image to get accurate centroid
|
||||
for y in 0..<height {
|
||||
let rowOffset = y * width
|
||||
for x in 0..<width {
|
||||
if data[rowOffset + x] < threshold {
|
||||
sumX += x
|
||||
sumY += y
|
||||
count += 1
|
||||
}
|
||||
}
|
||||
|
||||
rowsProcessed = endRow
|
||||
|
||||
// Early exit if we've found enough pixels for a reasonable estimate
|
||||
if count > 25 { // Early termination condition
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
guard count > 10 else { return nil } // Need minimum pixels for valid pupil
|
||||
if enableDiagnosticLogging && count < 5 {
|
||||
logDebug("👁 PupilDetector: Dark pixel count = \(count) (need >= 5)")
|
||||
}
|
||||
|
||||
// Minimum 5 pixels for valid pupil (reduced from 10 for small eye regions)
|
||||
guard count >= 5 else { return nil }
|
||||
|
||||
return (
|
||||
x: Double(sumX) / Double(count),
|
||||
@@ -775,11 +856,14 @@ final class PupilDetector: @unchecked Sendable {
|
||||
faceBoundingBox: CGRect,
|
||||
imageSize: CGSize
|
||||
) -> [CGPoint] {
|
||||
// Vision uses bottom-left origin (normalized 0-1), CVPixelBuffer uses top-left
|
||||
// We need to flip Y: flippedY = 1.0 - y
|
||||
return landmarks.normalizedPoints.map { point in
|
||||
let imageX =
|
||||
(faceBoundingBox.origin.x + point.x * faceBoundingBox.width) * imageSize.width
|
||||
let imageY =
|
||||
(faceBoundingBox.origin.y + point.y * faceBoundingBox.height) * imageSize.height
|
||||
// Flip Y coordinate for pixel buffer coordinate system
|
||||
let flippedY = 1.0 - (faceBoundingBox.origin.y + point.y * faceBoundingBox.height)
|
||||
let imageY = flippedY * imageSize.height
|
||||
return CGPoint(x: imageX, y: imageY)
|
||||
}
|
||||
}
|
||||
@@ -830,17 +914,27 @@ final class PupilDetector: @unchecked Sendable {
|
||||
private nonisolated static func saveDebugImage(
|
||||
data: UnsafePointer<UInt8>, width: Int, height: Int, name: String
|
||||
) {
|
||||
guard let cgImage = createCGImage(from: data, width: width, height: height) else { return }
|
||||
guard let cgImage = createCGImage(from: data, width: width, height: height) else {
|
||||
NSLog("⚠️ PupilDetector: createCGImage failed for %@ (%dx%d)", name, width, height)
|
||||
return
|
||||
}
|
||||
|
||||
let url = URL(fileURLWithPath: "/tmp/\(name).png")
|
||||
let url = URL(fileURLWithPath: "/tmp/gaze_debug/\(name).png")
|
||||
guard
|
||||
let destination = CGImageDestinationCreateWithURL(
|
||||
url as CFURL, UTType.png.identifier as CFString, 1, nil)
|
||||
else { return }
|
||||
else {
|
||||
NSLog("⚠️ PupilDetector: CGImageDestinationCreateWithURL failed for %@", url.path)
|
||||
return
|
||||
}
|
||||
|
||||
CGImageDestinationAddImage(destination, cgImage, nil)
|
||||
CGImageDestinationFinalize(destination)
|
||||
logDebug("💾 Saved debug image: \(url.path)")
|
||||
let success = CGImageDestinationFinalize(destination)
|
||||
if success {
|
||||
NSLog("💾 Saved debug image: %@", url.path)
|
||||
} else {
|
||||
NSLog("⚠️ PupilDetector: CGImageDestinationFinalize failed for %@", url.path)
|
||||
}
|
||||
}
|
||||
|
||||
private nonisolated static func createCGImage(
|
||||
@@ -848,24 +942,39 @@ final class PupilDetector: @unchecked Sendable {
|
||||
)
|
||||
-> CGImage?
|
||||
{
|
||||
let mutableData = UnsafeMutablePointer<UInt8>.allocate(capacity: width * height)
|
||||
defer { mutableData.deallocate() }
|
||||
memcpy(mutableData, data, width * height)
|
||||
|
||||
guard
|
||||
let context = CGContext(
|
||||
data: mutableData,
|
||||
width: width,
|
||||
height: height,
|
||||
bitsPerComponent: 8,
|
||||
bytesPerRow: width,
|
||||
space: CGColorSpaceCreateDeviceGray(),
|
||||
bitmapInfo: CGImageAlphaInfo.none.rawValue
|
||||
)
|
||||
else {
|
||||
guard width > 0 && height > 0 else {
|
||||
print("⚠️ PupilDetector: Invalid dimensions \(width)x\(height)")
|
||||
return nil
|
||||
}
|
||||
return context.makeImage()
|
||||
|
||||
// Create a Data object that copies the pixel data
|
||||
let pixelData = Data(bytes: data, count: width * height)
|
||||
|
||||
// Create CGImage from the data using CGDataProvider
|
||||
guard let provider = CGDataProvider(data: pixelData as CFData) else {
|
||||
print("⚠️ PupilDetector: CGDataProvider creation failed")
|
||||
return nil
|
||||
}
|
||||
|
||||
let cgImage = CGImage(
|
||||
width: width,
|
||||
height: height,
|
||||
bitsPerComponent: 8,
|
||||
bitsPerPixel: 8,
|
||||
bytesPerRow: width,
|
||||
space: CGColorSpaceCreateDeviceGray(),
|
||||
bitmapInfo: CGBitmapInfo(rawValue: CGImageAlphaInfo.none.rawValue),
|
||||
provider: provider,
|
||||
decode: nil,
|
||||
shouldInterpolate: false,
|
||||
intent: .defaultIntent
|
||||
)
|
||||
|
||||
if cgImage == nil {
|
||||
print("⚠️ PupilDetector: CGImage creation failed")
|
||||
}
|
||||
|
||||
return cgImage
|
||||
}
|
||||
|
||||
/// Clean up allocated buffers (call on app termination if needed)
|
||||
|
||||
123
Gaze/Views/Components/GazeOverlayView.swift
Normal file
123
Gaze/Views/Components/GazeOverlayView.swift
Normal file
@@ -0,0 +1,123 @@
|
||||
//
|
||||
// GazeOverlayView.swift
|
||||
// Gaze
|
||||
//
|
||||
// Created by Claude on 1/16/26.
|
||||
//
|
||||
|
||||
import SwiftUI
|
||||
|
||||
struct GazeOverlayView: View {
|
||||
@ObservedObject var eyeTrackingService: EyeTrackingService
|
||||
|
||||
var body: some View {
|
||||
VStack(spacing: 8) {
|
||||
inFrameIndicator
|
||||
gazeDirectionGrid
|
||||
ratioDebugView
|
||||
}
|
||||
.padding(12)
|
||||
}
|
||||
|
||||
private var inFrameIndicator: some View {
|
||||
HStack(spacing: 6) {
|
||||
Circle()
|
||||
.fill(eyeTrackingService.isInFrame ? Color.green : Color.red)
|
||||
.frame(width: 10, height: 10)
|
||||
Text(eyeTrackingService.isInFrame ? "In Frame" : "No Face")
|
||||
.font(.caption2)
|
||||
.fontWeight(.semibold)
|
||||
.foregroundColor(.white)
|
||||
}
|
||||
.padding(.horizontal, 10)
|
||||
.padding(.vertical, 6)
|
||||
.background(
|
||||
Capsule()
|
||||
.fill(Color.black.opacity(0.6))
|
||||
)
|
||||
}
|
||||
|
||||
private var gazeDirectionGrid: some View {
|
||||
let currentDirection = eyeTrackingService.gazeDirection
|
||||
let currentPos = currentDirection.gridPosition
|
||||
|
||||
return VStack(spacing: 2) {
|
||||
ForEach(0..<3, id: \.self) { row in
|
||||
HStack(spacing: 2) {
|
||||
ForEach(0..<3, id: \.self) { col in
|
||||
let isActive = currentPos.x == col && currentPos.y == row && eyeTrackingService.isInFrame
|
||||
gridCell(row: row, col: col, isActive: isActive)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
.padding(8)
|
||||
.background(
|
||||
RoundedRectangle(cornerRadius: 8)
|
||||
.fill(Color.black.opacity(0.5))
|
||||
)
|
||||
}
|
||||
|
||||
private func gridCell(row: Int, col: Int, isActive: Bool) -> some View {
|
||||
let direction = directionFor(row: row, col: col)
|
||||
|
||||
return ZStack {
|
||||
RoundedRectangle(cornerRadius: 4)
|
||||
.fill(isActive ? Color.green : Color.white.opacity(0.2))
|
||||
|
||||
Text(direction.rawValue)
|
||||
.font(.system(size: 14, weight: .bold))
|
||||
.foregroundColor(isActive ? .white : .white.opacity(0.6))
|
||||
}
|
||||
.frame(width: 28, height: 28)
|
||||
}
|
||||
|
||||
private func directionFor(row: Int, col: Int) -> GazeDirection {
|
||||
switch (col, row) {
|
||||
case (0, 0): return .upLeft
|
||||
case (1, 0): return .up
|
||||
case (2, 0): return .upRight
|
||||
case (0, 1): return .left
|
||||
case (1, 1): return .center
|
||||
case (2, 1): return .right
|
||||
case (0, 2): return .downLeft
|
||||
case (1, 2): return .down
|
||||
case (2, 2): return .downRight
|
||||
default: return .center
|
||||
}
|
||||
}
|
||||
|
||||
private var ratioDebugView: some View {
|
||||
VStack(alignment: .leading, spacing: 2) {
|
||||
if let leftH = eyeTrackingService.debugLeftPupilRatio,
|
||||
let rightH = eyeTrackingService.debugRightPupilRatio {
|
||||
let avgH = (leftH + rightH) / 2.0
|
||||
Text("H: \(String(format: "%.2f", avgH))")
|
||||
.font(.system(size: 10, weight: .medium, design: .monospaced))
|
||||
.foregroundColor(.white)
|
||||
}
|
||||
|
||||
if let leftV = eyeTrackingService.debugLeftVerticalRatio,
|
||||
let rightV = eyeTrackingService.debugRightVerticalRatio {
|
||||
let avgV = (leftV + rightV) / 2.0
|
||||
Text("V: \(String(format: "%.2f", avgV))")
|
||||
.font(.system(size: 10, weight: .medium, design: .monospaced))
|
||||
.foregroundColor(.white)
|
||||
}
|
||||
}
|
||||
.padding(.horizontal, 8)
|
||||
.padding(.vertical, 4)
|
||||
.background(
|
||||
RoundedRectangle(cornerRadius: 4)
|
||||
.fill(Color.black.opacity(0.5))
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
#Preview {
|
||||
ZStack {
|
||||
Color.gray
|
||||
GazeOverlayView(eyeTrackingService: EyeTrackingService.shared)
|
||||
}
|
||||
.frame(width: 300, height: 200)
|
||||
}
|
||||
@@ -212,14 +212,18 @@ struct EnforceModeSetupView: View {
|
||||
let previewLayer = eyeTrackingService.previewLayer ?? cachedPreviewLayer
|
||||
|
||||
if let layer = previewLayer {
|
||||
CameraPreviewView(previewLayer: layer, borderColor: borderColor)
|
||||
.frame(height: 300)
|
||||
.glassEffectIfAvailable(GlassStyle.regular, in: .rect(cornerRadius: 12))
|
||||
.onAppear {
|
||||
if cachedPreviewLayer == nil {
|
||||
cachedPreviewLayer = eyeTrackingService.previewLayer
|
||||
}
|
||||
ZStack(alignment: .topTrailing) {
|
||||
CameraPreviewView(previewLayer: layer, borderColor: borderColor)
|
||||
|
||||
GazeOverlayView(eyeTrackingService: eyeTrackingService)
|
||||
}
|
||||
.frame(height: 300)
|
||||
.glassEffectIfAvailable(GlassStyle.regular, in: .rect(cornerRadius: 12))
|
||||
.onAppear {
|
||||
if cachedPreviewLayer == nil {
|
||||
cachedPreviewLayer = eyeTrackingService.previewLayer
|
||||
}
|
||||
}
|
||||
|
||||
/*VStack(alignment: .leading, spacing: 12) {*/
|
||||
/*Text("Live Tracking Status")*/
|
||||
|
||||
Reference in New Issue
Block a user