general: fixes, log updates

This commit is contained in:
Michael Freno
2026-01-16 01:07:35 -05:00
parent e5646a192f
commit 4ae8d77dab
5 changed files with 345 additions and 259 deletions

View File

@@ -55,38 +55,38 @@ class EnforceModeService: ObservableObject {
// If settings say it's enabled AND camera is authorized, mark as enabled
if settingsEnabled && cameraService.isCameraAuthorized {
isEnforceModeEnabled = true
print("✓ Enforce mode initialized as enabled (camera authorized)")
logDebug("✓ Enforce mode initialized as enabled (camera authorized)")
} else {
isEnforceModeEnabled = false
print("🔒 Enforce mode initialized as disabled")
logDebug("🔒 Enforce mode initialized as disabled")
}
}
func enableEnforceMode() async {
print("🔒 enableEnforceMode called")
logDebug("🔒 enableEnforceMode called")
guard !isEnforceModeEnabled else {
print("⚠️ Enforce mode already enabled")
logError("⚠️ Enforce mode already enabled")
return
}
let cameraService = CameraAccessService.shared
if !cameraService.isCameraAuthorized {
do {
print("🔒 Requesting camera permission...")
logDebug("🔒 Requesting camera permission...")
try await cameraService.requestCameraAccess()
} catch {
print("⚠️ Failed to get camera permission: \(error.localizedDescription)")
logError("⚠️ Failed to get camera permission: \(error.localizedDescription)")
return
}
}
guard cameraService.isCameraAuthorized else {
print("❌ Camera permission denied")
logError("❌ Camera permission denied")
return
}
isEnforceModeEnabled = true
print("✓ Enforce mode enabled (camera will activate before lookaway reminders)")
logDebug("✓ Enforce mode enabled (camera will activate before lookaway reminders)")
}
func disableEnforceMode() {
@@ -95,7 +95,7 @@ class EnforceModeService: ObservableObject {
stopCamera()
isEnforceModeEnabled = false
userCompliedWithBreak = false
print("✓ Enforce mode disabled")
logDebug("✓ Enforce mode disabled")
}
func setTimerEngine(_ engine: TimerEngine) {
@@ -118,23 +118,23 @@ class EnforceModeService: ObservableObject {
guard isEnforceModeEnabled else { return }
guard !isCameraActive else { return }
print("👁️ Starting camera for lookaway reminder (T-\(secondsRemaining)s)")
logDebug("👁️ Starting camera for lookaway reminder (T-\(secondsRemaining)s)")
do {
try await eyeTrackingService.startEyeTracking()
isCameraActive = true
lastFaceDetectionTime = Date() // Reset grace period
startFaceDetectionTimer()
print("✓ Camera active")
logDebug("✓ Camera active")
} catch {
print("⚠️ Failed to start camera: \(error.localizedDescription)")
logError("⚠️ Failed to start camera: \(error.localizedDescription)")
}
}
func stopCamera() {
guard isCameraActive else { return }
print("👁️ Stopping camera")
logDebug("👁️ Stopping camera")
eyeTrackingService.stopEyeTracking()
isCameraActive = false
userCompliedWithBreak = false
@@ -191,7 +191,7 @@ class EnforceModeService: ObservableObject {
// If person has not been detected for too long, temporarily disable enforce mode
if timeSinceLastDetection > faceDetectionTimeout {
print(
logDebug(
"⏰ Person not detected for \(faceDetectionTimeout)s. Temporarily disabling enforce mode."
)
disableEnforceMode()
@@ -210,7 +210,7 @@ class EnforceModeService: ObservableObject {
guard isEnforceModeEnabled else { return }
guard !isCameraActive else { return }
print("🧪 Starting test mode")
logDebug("🧪 Starting test mode")
isTestMode = true
do {
@@ -218,9 +218,9 @@ class EnforceModeService: ObservableObject {
isCameraActive = true
lastFaceDetectionTime = Date() // Reset grace period
startFaceDetectionTimer()
print("✓ Test mode camera active")
logDebug("✓ Test mode camera active")
} catch {
print("⚠️ Failed to start test mode camera: \(error.localizedDescription)")
logError("⚠️ Failed to start test mode camera: \(error.localizedDescription)")
isTestMode = false
}
}
@@ -228,7 +228,7 @@ class EnforceModeService: ObservableObject {
func stopTestMode() {
guard isTestMode else { return }
print("🧪 Stopping test mode")
logDebug("🧪 Stopping test mode")
stopCamera()
isTestMode = false
}

View File

@@ -14,11 +14,11 @@
// - Efficient contour detection with union-find
//
import CoreImage
import Vision
import Accelerate
import CoreImage
import ImageIO
import UniformTypeIdentifiers
import Vision
struct PupilPosition: Equatable, Sendable {
let x: CGFloat
@@ -137,7 +137,7 @@ struct PupilDetectorMetrics: Sendable {
final class PupilDetector: @unchecked Sendable {
// MARK: - Thread Safety
// MARK: - Thread Safety
private static let lock = NSLock()
@@ -151,7 +151,9 @@ final class PupilDetector: @unchecked Sendable {
private static var _debugImageCounter = 0
private static var _frameCounter = 0
private static var _lastPupilPositions: (left: PupilPosition?, right: PupilPosition?) = (nil, nil)
private static var _lastPupilPositions: (left: PupilPosition?, right: PupilPosition?) = (
nil, nil
)
private static var _metrics = PupilDetectorMetrics()
static let calibration = PupilCalibration()
@@ -187,7 +189,8 @@ final class PupilDetector: @unchecked Sendable {
var weights = [[Float]](repeating: [Float](repeating: 0, count: d), count: d)
for dy in 0..<d {
for dx in 0..<d {
let dist = sqrt(Float((dy - radius) * (dy - radius) + (dx - radius) * (dx - radius)))
let dist = sqrt(
Float((dy - radius) * (dy - radius) + (dx - radius) * (dx - radius)))
weights[dy][dx] = exp(-dist * dist / (2 * sigmaSpace * sigmaSpace))
}
}
@@ -249,7 +252,9 @@ final class PupilDetector: @unchecked Sendable {
let elapsed = (CFAbsoluteTimeGetCurrent() - startTime) * 1000
metrics.recordProcessingTime(elapsed)
if metrics.processedFrameCount % 30 == 0 {
print("👁 PupilDetector: \(String(format: "%.2f", elapsed))ms (avg: \(String(format: "%.2f", metrics.averageProcessingTimeMs))ms)")
print(
"👁 PupilDetector: \(String(format: "%.2f", elapsed))ms (avg: \(String(format: "%.2f", metrics.averageProcessingTimeMs))ms)"
)
}
}
}
@@ -273,16 +278,21 @@ final class PupilDetector: @unchecked Sendable {
let frameSize = frameWidth * frameHeight
// Step 3: Ensure buffers are allocated
ensureBufferCapacity(frameSize: frameSize, eyeSize: Int(eyeRegion.frame.width * eyeRegion.frame.height))
ensureBufferCapacity(
frameSize: frameSize, eyeSize: Int(eyeRegion.frame.width * eyeRegion.frame.height))
guard let grayBuffer = grayscaleBuffer,
let eyeBuf = eyeBuffer,
let tmpBuf = tempBuffer else {
let eyeBuf = eyeBuffer,
let tmpBuf = tempBuffer
else {
return nil
}
// Step 4: Extract grayscale data using vImage
guard extractGrayscaleDataOptimized(from: pixelBuffer, to: grayBuffer, width: frameWidth, height: frameHeight) else {
guard
extractGrayscaleDataOptimized(
from: pixelBuffer, to: grayBuffer, width: frameWidth, height: frameHeight)
else {
return nil
}
@@ -293,14 +303,16 @@ final class PupilDetector: @unchecked Sendable {
// Early exit for tiny regions (less than 10x10 pixels)
guard eyeWidth >= 10, eyeHeight >= 10 else { return nil }
guard isolateEyeWithMaskOptimized(
frameData: grayBuffer,
frameWidth: frameWidth,
frameHeight: frameHeight,
eyePoints: eyePoints,
region: eyeRegion,
output: eyeBuf
) else {
guard
isolateEyeWithMaskOptimized(
frameData: grayBuffer,
frameWidth: frameWidth,
frameHeight: frameHeight,
eyePoints: eyePoints,
region: eyeRegion,
output: eyeBuf
)
else {
return nil
}
@@ -326,16 +338,20 @@ final class PupilDetector: @unchecked Sendable {
// Debug: Save processed images if enabled
if enableDebugImageSaving && debugImageCounter < 10 {
saveDebugImage(data: tmpBuf, width: eyeWidth, height: eyeHeight, name: "processed_eye_\(debugImageCounter)")
saveDebugImage(
data: tmpBuf, width: eyeWidth, height: eyeHeight,
name: "processed_eye_\(debugImageCounter)")
debugImageCounter += 1
}
// Step 8: Find contours and compute centroid
guard let (centroidX, centroidY) = findPupilFromContoursOptimized(
data: tmpBuf,
width: eyeWidth,
height: eyeHeight
) else {
guard
let (centroidX, centroidY) = findPupilFromContoursOptimized(
data: tmpBuf,
width: eyeWidth,
height: eyeHeight
)
else {
return nil
}
@@ -405,10 +421,10 @@ final class PupilDetector: @unchecked Sendable {
// BGRA to Planar8 grayscale using luminance coefficients
// Y = 0.299*R + 0.587*G + 0.114*B
let matrix: [Int16] = [
28, // B coefficient (0.114 * 256 29, adjusted)
28, // B coefficient (0.114 * 256 29, adjusted)
150, // G coefficient (0.587 * 256 150)
77, // R coefficient (0.299 * 256 77)
0 // A coefficient
77, // R coefficient (0.299 * 256 77)
0, // A coefficient
]
let divisor: Int32 = 256
@@ -425,14 +441,17 @@ final class PupilDetector: @unchecked Sendable {
return error == kvImageNoError
case kCVPixelFormatType_420YpCbCr8BiPlanarFullRange,
kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange:
guard let yPlane = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0) else { return false }
kCVPixelFormatType_420YpCbCr8BiPlanarVideoRange:
guard let yPlane = CVPixelBufferGetBaseAddressOfPlane(pixelBuffer, 0) else {
return false
}
let yBytesPerRow = CVPixelBufferGetBytesPerRowOfPlane(pixelBuffer, 0)
let yBuffer = yPlane.assumingMemoryBound(to: UInt8.self)
// Direct copy of Y plane (already grayscale)
for y in 0..<height {
memcpy(output.advanced(by: y * width), yBuffer.advanced(by: y * yBytesPerRow), width)
memcpy(
output.advanced(by: y * width), yBuffer.advanced(by: y * yBytesPerRow), width)
}
return true
@@ -506,12 +525,15 @@ final class PupilDetector: @unchecked Sendable {
return true
}
@inline(__always)
private static func pointInPolygonFast(px: Float, py: Float, edges: [(x1: Float, y1: Float, x2: Float, y2: Float)]) -> Bool {
@inline(__always)
private static func pointInPolygonFast(
px: Float, py: Float, edges: [(x1: Float, y1: Float, x2: Float, y2: Float)]
) -> Bool {
var inside = false
for edge in edges {
if ((edge.y1 > py) != (edge.y2 > py)) &&
(px < (edge.x2 - edge.x1) * (py - edge.y1) / (edge.y2 - edge.y1) + edge.x1) {
if ((edge.y1 > py) != (edge.y2 > py))
&& (px < (edge.x2 - edge.x1) * (py - edge.y1) / (edge.y2 - edge.y1) + edge.x1)
{
inside = !inside
}
}
@@ -538,7 +560,8 @@ final class PupilDetector: @unchecked Sendable {
gaussianBlurOptimized(input: input, output: workBuffer, width: width, height: height)
// 2. Erosion with vImage (3 iterations)
erodeOptimized(input: workBuffer, output: output, width: width, height: height, iterations: 3)
erodeOptimized(
input: workBuffer, output: output, width: width, height: height, iterations: 3)
// 3. Simple binary threshold (no vDSP overhead for small buffers)
for i in 0..<size {
@@ -621,14 +644,16 @@ final class PupilDetector: @unchecked Sendable {
let kernel: [UInt8] = [
1, 1, 1,
1, 1, 1,
1, 1, 1
1, 1, 1,
]
for i in 0..<iterations {
if i % 2 == 0 {
vImageErode_Planar8(&srcBuffer, &dstBuffer, 0, 0, kernel, 3, 3, vImage_Flags(kvImageNoFlags))
vImageErode_Planar8(
&srcBuffer, &dstBuffer, 0, 0, kernel, 3, 3, vImage_Flags(kvImageNoFlags))
} else {
vImageErode_Planar8(&dstBuffer, &srcBuffer, 0, 0, kernel, 3, 3, vImage_Flags(kvImageNoFlags))
vImageErode_Planar8(
&dstBuffer, &srcBuffer, 0, 0, kernel, 3, 3, vImage_Flags(kvImageNoFlags))
}
}
@@ -703,8 +728,10 @@ final class PupilDetector: @unchecked Sendable {
imageSize: CGSize
) -> [CGPoint] {
return landmarks.normalizedPoints.map { point in
let imageX = (faceBoundingBox.origin.x + point.x * faceBoundingBox.width) * imageSize.width
let imageY = (faceBoundingBox.origin.y + point.y * faceBoundingBox.height) * imageSize.height
let imageX =
(faceBoundingBox.origin.x + point.x * faceBoundingBox.width) * imageSize.width
let imageY =
(faceBoundingBox.origin.y + point.y * faceBoundingBox.height) * imageSize.height
return CGPoint(x: imageX, y: imageY)
}
}
@@ -750,31 +777,40 @@ final class PupilDetector: @unchecked Sendable {
// MARK: - Debug Helpers
private static func saveDebugImage(data: UnsafePointer<UInt8>, width: Int, height: Int, name: String) {
private static func saveDebugImage(
data: UnsafePointer<UInt8>, width: Int, height: Int, name: String
) {
guard let cgImage = createCGImage(from: data, width: width, height: height) else { return }
let url = URL(fileURLWithPath: "/tmp/\(name).png")
guard let destination = CGImageDestinationCreateWithURL(url as CFURL, UTType.png.identifier as CFString, 1, nil) else { return }
guard
let destination = CGImageDestinationCreateWithURL(
url as CFURL, UTType.png.identifier as CFString, 1, nil)
else { return }
CGImageDestinationAddImage(destination, cgImage, nil)
CGImageDestinationFinalize(destination)
print("💾 Saved debug image: \(url.path)")
}
private static func createCGImage(from data: UnsafePointer<UInt8>, width: Int, height: Int) -> CGImage? {
private static func createCGImage(from data: UnsafePointer<UInt8>, width: Int, height: Int)
-> CGImage?
{
let mutableData = UnsafeMutablePointer<UInt8>.allocate(capacity: width * height)
defer { mutableData.deallocate() }
memcpy(mutableData, data, width * height)
guard let context = CGContext(
data: mutableData,
width: width,
height: height,
bitsPerComponent: 8,
bytesPerRow: width,
space: CGColorSpaceCreateDeviceGray(),
bitmapInfo: CGImageAlphaInfo.none.rawValue
) else {
guard
let context = CGContext(
data: mutableData,
width: width,
height: height,
bitsPerComponent: 8,
bytesPerRow: width,
space: CGColorSpaceCreateDeviceGray(),
bitmapInfo: CGImageAlphaInfo.none.rawValue
)
else {
return nil
}
return context.makeImage()

View File

@@ -7,6 +7,7 @@
import SwiftUI
import XCTest
@testable import Gaze
@MainActor
@@ -39,12 +40,12 @@ final class OnboardingNavigationTests: XCTestCase {
// Simulate moving through pages
let pages = [
"Welcome", // 0
"LookAway", // 1
"Blink", // 2
"Posture", // 3
"General", // 4
"Completion" // 5
"Welcome", // 0
"LookAway", // 1
"Blink", // 2
"Posture", // 3
"General", // 4
"Completion", // 5
]
for (index, pageName) in pages.enumerated() {
@@ -119,7 +120,8 @@ final class OnboardingNavigationTests: XCTestCase {
var lookAwayConfig = testEnv.settingsManager.settings.lookAwayTimer
lookAwayConfig.enabled = true
lookAwayConfig.intervalSeconds = 1200
testEnv.settingsManager.updateTimerConfiguration(for: .lookAway, configuration: lookAwayConfig)
testEnv.settingsManager.updateTimerConfiguration(
for: .lookAway, configuration: lookAwayConfig)
var blinkConfig = testEnv.settingsManager.settings.blinkTimer
blinkConfig.enabled = true
@@ -129,7 +131,8 @@ final class OnboardingNavigationTests: XCTestCase {
var postureConfig = testEnv.settingsManager.settings.postureTimer
postureConfig.enabled = true
postureConfig.intervalSeconds = 1800
testEnv.settingsManager.updateTimerConfiguration(for: .posture, configuration: postureConfig)
testEnv.settingsManager.updateTimerConfiguration(
for: .posture, configuration: postureConfig)
// Verify all configurations
let allConfigs = testEnv.settingsManager.allTimerConfigurations()
@@ -147,7 +150,8 @@ final class OnboardingNavigationTests: XCTestCase {
// Configure only some timers
var lookAwayConfig = testEnv.settingsManager.settings.lookAwayTimer
lookAwayConfig.enabled = true
testEnv.settingsManager.updateTimerConfiguration(for: .lookAway, configuration: lookAwayConfig)
testEnv.settingsManager.updateTimerConfiguration(
for: .lookAway, configuration: lookAwayConfig)
var blinkConfig = testEnv.settingsManager.settings.blinkTimer
blinkConfig.enabled = false
@@ -177,7 +181,8 @@ final class OnboardingNavigationTests: XCTestCase {
var lookAwayConfig = testEnv.settingsManager.settings.lookAwayTimer
lookAwayConfig.enabled = true
lookAwayConfig.intervalSeconds = 1200
testEnv.settingsManager.updateTimerConfiguration(for: .lookAway, configuration: lookAwayConfig)
testEnv.settingsManager.updateTimerConfiguration(
for: .lookAway, configuration: lookAwayConfig)
// Page 2: Blink Setup
var blinkConfig = testEnv.settingsManager.settings.blinkTimer
@@ -188,7 +193,8 @@ final class OnboardingNavigationTests: XCTestCase {
// Page 3: Posture Setup
var postureConfig = testEnv.settingsManager.settings.postureTimer
postureConfig.enabled = false // User chooses to disable this one
testEnv.settingsManager.updateTimerConfiguration(for: .posture, configuration: postureConfig)
testEnv.settingsManager.updateTimerConfiguration(
for: .posture, configuration: postureConfig)
// Page 4: General Settings
testEnv.settingsManager.settings.playSounds = true
@@ -213,7 +219,8 @@ final class OnboardingNavigationTests: XCTestCase {
// Configure on page 1
var lookAwayConfig = testEnv.settingsManager.settings.lookAwayTimer
lookAwayConfig.intervalSeconds = 1500
testEnv.settingsManager.updateTimerConfiguration(for: .lookAway, configuration: lookAwayConfig)
testEnv.settingsManager.updateTimerConfiguration(
for: .lookAway, configuration: lookAwayConfig)
// Move forward to page 2
var blinkConfig = testEnv.settingsManager.settings.blinkTimer

View File

@@ -44,3 +44,4 @@ final class ExampleUITests: XCTestCase {
}
}
}

42
run
View File

@@ -107,6 +107,32 @@ print_errors() {
echo "================================================================================"
}
# Pretty prints diagnostic warnings from output (LSP and compiler warnings)
print_warnings() {
local output="$1"
echo ""
echo "⚠️ Diagnostic Warnings:"
echo "================================================================================"
# Extract Swift compiler warnings in the format: /path/file.swift:line:col: warning: message
local warnings
warnings=$(echo "$output" | grep -E "\.swift:[0-9]+:[0-9]+: warning:" | sed 's/^/ /')
if [ -n "$warnings" ]; then
# Count total warnings
local count
count=$(echo "$warnings" | wc -l | tr -d ' ')
echo " Found $count warning(s):"
echo ""
echo "$warnings"
else
echo " No warnings found."
fi
echo "================================================================================"
}
# Launches the built application
launch_app() {
local build_dir
@@ -235,6 +261,11 @@ case "$ACTION" in
if [ $? -eq 0 ]; then
handle_build_success
echo "💡 The app is located at: build/Debug/Gaze.app"
# Show warnings in verbose mode
if [ "$VERBOSE" = true ]; then
print_warnings "$COMMAND_OUTPUT"
fi
else
echo "❌ Build failed!"
print_errors "$COMMAND_OUTPUT" "Build"
@@ -248,6 +279,11 @@ case "$ACTION" in
if [ $? -eq 0 ]; then
echo "✅ Tests passed!"
# Show warnings in verbose mode
if [ "$VERBOSE" = true ]; then
print_warnings "$COMMAND_OUTPUT"
fi
else
echo "❌ Tests failed!"
print_errors "$COMMAND_OUTPUT" "Test"
@@ -264,6 +300,12 @@ case "$ACTION" in
if [ $? -eq 0 ]; then
handle_build_success
# Show warnings in verbose mode
if [ "$VERBOSE" = true ]; then
print_warnings "$COMMAND_OUTPUT"
fi
launch_app
else
echo "❌ Build failed!"