general: cleanup for manual var tuning
This commit is contained in:
44
Gaze/Constants/EyeTrackingConstants.swift
Normal file
44
Gaze/Constants/EyeTrackingConstants.swift
Normal file
@@ -0,0 +1,44 @@
|
||||
//
|
||||
// EyeTrackingConstants.swift
|
||||
// Gaze
|
||||
//
|
||||
// Created by Mike Freno on 1/14/26.
|
||||
//
|
||||
|
||||
import Foundation
|
||||
|
||||
enum EyeTrackingConstants {
|
||||
// MARK: - Logging
|
||||
/// Interval between log messages in seconds
|
||||
static let logInterval: TimeInterval = 0.5
|
||||
|
||||
// MARK: - Eye Closure Detection
|
||||
/// Threshold for eye closure (smaller value means eye must be more closed to trigger)
|
||||
/// Range: 0.0 to 1.0 (approximate eye opening ratio)
|
||||
static let eyeClosedThreshold: CGFloat = 0.02
|
||||
|
||||
// MARK: - Face Pose Thresholds
|
||||
/// Maximum yaw (left/right head turn) in radians before considering user looking away
|
||||
/// 0.20 radians ≈ 11.5 degrees (Tightened from 0.35)
|
||||
static let yawThreshold: Double = 0.20
|
||||
|
||||
/// Pitch threshold for looking UP (above screen).
|
||||
/// Since camera is at top, looking at screen is negative pitch.
|
||||
/// Values > 0.1 imply looking straight ahead or up (away from screen).
|
||||
static let pitchUpThreshold: Double = 0.1
|
||||
|
||||
/// Pitch threshold for looking DOWN (at keyboard/lap).
|
||||
/// Values < -0.45 imply looking too far down.
|
||||
static let pitchDownThreshold: Double = -0.45
|
||||
|
||||
// MARK: - Pupil Tracking Thresholds
|
||||
/// Minimum horizontal pupil ratio (0.0 = right edge, 1.0 = left edge)
|
||||
/// Values below this are considered looking right (camera view)
|
||||
/// Tightened from 0.25 to 0.35
|
||||
static let minPupilRatio: Double = 0.35
|
||||
|
||||
/// Maximum horizontal pupil ratio
|
||||
/// Values above this are considered looking left (camera view)
|
||||
/// Tightened from 0.75 to 0.65
|
||||
static let maxPupilRatio: Double = 0.65
|
||||
}
|
||||
@@ -20,12 +20,13 @@ class EyeTrackingService: NSObject, ObservableObject {
|
||||
|
||||
private var captureSession: AVCaptureSession?
|
||||
private var videoOutput: AVCaptureVideoDataOutput?
|
||||
private let videoDataOutputQueue = DispatchQueue(label: "com.gaze.videoDataOutput", qos: .userInitiated)
|
||||
private let videoDataOutputQueue = DispatchQueue(
|
||||
label: "com.gaze.videoDataOutput", qos: .userInitiated)
|
||||
private var _previewLayer: AVCaptureVideoPreviewLayer?
|
||||
|
||||
// Logging throttle
|
||||
private var lastLogTime: Date = .distantPast
|
||||
private let logInterval: TimeInterval = 0.5 // Log every 0.5 seconds
|
||||
private let logInterval: TimeInterval = EyeTrackingConstants.logInterval
|
||||
|
||||
var previewLayer: AVCaptureVideoPreviewLayer? {
|
||||
guard let session = captureSession else {
|
||||
@@ -104,7 +105,9 @@ class EyeTrackingService: NSObject, ObservableObject {
|
||||
session.addInput(videoInput)
|
||||
|
||||
let output = AVCaptureVideoDataOutput()
|
||||
output.videoSettings = [kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA]
|
||||
output.videoSettings = [
|
||||
kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA
|
||||
]
|
||||
output.setSampleBufferDelegate(self, queue: videoDataOutputQueue)
|
||||
output.alwaysDiscardsLateVideoFrames = true
|
||||
|
||||
@@ -118,16 +121,7 @@ class EyeTrackingService: NSObject, ObservableObject {
|
||||
}
|
||||
|
||||
private func processFaceObservations(_ observations: [VNFaceObservation]?) {
|
||||
let shouldLog = Date().timeIntervalSince(lastLogTime) >= logInterval
|
||||
|
||||
if shouldLog {
|
||||
print("🔍 Processing face observations...")
|
||||
}
|
||||
|
||||
guard let observations = observations, !observations.isEmpty else {
|
||||
if shouldLog {
|
||||
print("❌ No faces detected")
|
||||
}
|
||||
faceDetected = false
|
||||
userLookingAtScreen = false
|
||||
return
|
||||
@@ -136,53 +130,38 @@ class EyeTrackingService: NSObject, ObservableObject {
|
||||
faceDetected = true
|
||||
let face = observations.first!
|
||||
|
||||
if shouldLog {
|
||||
print("✅ Face detected. Bounding box: \(face.boundingBox)")
|
||||
}
|
||||
|
||||
guard let landmarks = face.landmarks else {
|
||||
if shouldLog {
|
||||
print("❌ No face landmarks detected")
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Check eye closure
|
||||
if let leftEye = landmarks.leftEye,
|
||||
let rightEye = landmarks.rightEye {
|
||||
let eyesClosed = detectEyesClosed(leftEye: leftEye, rightEye: rightEye, shouldLog: shouldLog)
|
||||
let rightEye = landmarks.rightEye
|
||||
{
|
||||
let eyesClosed = detectEyesClosed(
|
||||
leftEye: leftEye, rightEye: rightEye, shouldLog: false)
|
||||
self.isEyesClosed = eyesClosed
|
||||
}
|
||||
|
||||
// Check gaze direction
|
||||
let lookingAway = detectLookingAway(face: face, landmarks: landmarks, shouldLog: shouldLog)
|
||||
let lookingAway = detectLookingAway(face: face, landmarks: landmarks, shouldLog: false)
|
||||
userLookingAtScreen = !lookingAway
|
||||
|
||||
if shouldLog {
|
||||
lastLogTime = Date()
|
||||
}
|
||||
}
|
||||
|
||||
private func detectEyesClosed(leftEye: VNFaceLandmarkRegion2D, rightEye: VNFaceLandmarkRegion2D, shouldLog: Bool) -> Bool {
|
||||
private func detectEyesClosed(
|
||||
leftEye: VNFaceLandmarkRegion2D, rightEye: VNFaceLandmarkRegion2D, shouldLog: Bool
|
||||
) -> Bool {
|
||||
guard leftEye.pointCount >= 2, rightEye.pointCount >= 2 else {
|
||||
if shouldLog {
|
||||
print("⚠️ Eye landmarks insufficient for eye closure detection")
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
let leftEyeHeight = calculateEyeHeight(leftEye, shouldLog: shouldLog)
|
||||
let rightEyeHeight = calculateEyeHeight(rightEye, shouldLog: shouldLog)
|
||||
|
||||
let closedThreshold: CGFloat = 0.02
|
||||
let closedThreshold = EyeTrackingConstants.eyeClosedThreshold
|
||||
|
||||
let isClosed = leftEyeHeight < closedThreshold && rightEyeHeight < closedThreshold
|
||||
|
||||
if shouldLog {
|
||||
print("👁️ Eye closure detection - Left: \(leftEyeHeight) < \(closedThreshold) = \(leftEyeHeight < closedThreshold), Right: \(rightEyeHeight) < \(closedThreshold) = \(rightEyeHeight < closedThreshold)")
|
||||
print("👁️ Eyes closed: \(isClosed)")
|
||||
}
|
||||
|
||||
return isClosed
|
||||
}
|
||||
|
||||
@@ -196,22 +175,24 @@ class EyeTrackingService: NSObject, ObservableObject {
|
||||
|
||||
let height = abs(maxY - minY)
|
||||
|
||||
if shouldLog {
|
||||
print("📏 Eye height: \(height)")
|
||||
}
|
||||
|
||||
return height
|
||||
}
|
||||
|
||||
private func detectLookingAway(face: VNFaceObservation, landmarks: VNFaceLandmarks2D, shouldLog: Bool) -> Bool {
|
||||
private func detectLookingAway(
|
||||
face: VNFaceObservation, landmarks: VNFaceLandmarks2D, shouldLog: Bool
|
||||
) -> Bool {
|
||||
// 1. Face Pose Check (Yaw & Pitch)
|
||||
let yaw = face.yaw?.doubleValue ?? 0.0
|
||||
let pitch = face.pitch?.doubleValue ?? 0.0
|
||||
|
||||
let yawThreshold = 0.35 // ~20 degrees
|
||||
let pitchThreshold = 0.4 // ~23 degrees
|
||||
let yawThreshold = EyeTrackingConstants.yawThreshold
|
||||
// Pitch check:
|
||||
// - Camera at top = looking at screen is negative pitch
|
||||
// - Looking above screen (straight ahead) is ~0 or positive -> Look Away
|
||||
// - Looking at keyboard/lap is very negative -> Look Away
|
||||
let pitchLookingAway = pitch > EyeTrackingConstants.pitchUpThreshold || pitch < EyeTrackingConstants.pitchDownThreshold
|
||||
|
||||
let poseLookingAway = abs(yaw) > yawThreshold || abs(pitch) > pitchThreshold
|
||||
let poseLookingAway = abs(yaw) > yawThreshold || pitchLookingAway
|
||||
|
||||
// 2. Eye Gaze Check (Pupil Position)
|
||||
var eyesLookingAway = false
|
||||
@@ -219,7 +200,8 @@ class EyeTrackingService: NSObject, ObservableObject {
|
||||
if let leftEye = landmarks.leftEye,
|
||||
let rightEye = landmarks.rightEye,
|
||||
let leftPupil = landmarks.leftPupil,
|
||||
let rightPupil = landmarks.rightPupil {
|
||||
let rightPupil = landmarks.rightPupil
|
||||
{
|
||||
|
||||
let leftRatio = calculatePupilHorizontalRatio(eye: leftEye, pupil: leftPupil)
|
||||
let rightRatio = calculatePupilHorizontalRatio(eye: rightEye, pupil: rightPupil)
|
||||
@@ -228,34 +210,24 @@ class EyeTrackingService: NSObject, ObservableObject {
|
||||
// (0.0 = extreme right, 1.0 = extreme left relative to face)
|
||||
// Note: Camera is mirrored, so logic might be inverted
|
||||
|
||||
let minRatio = 0.25
|
||||
let maxRatio = 0.75
|
||||
let minRatio = EyeTrackingConstants.minPupilRatio
|
||||
let maxRatio = EyeTrackingConstants.maxPupilRatio
|
||||
|
||||
let leftLookingAway = leftRatio < minRatio || leftRatio > maxRatio
|
||||
let rightLookingAway = rightRatio < minRatio || rightRatio > maxRatio
|
||||
|
||||
// Consider looking away if BOTH eyes are off-center
|
||||
eyesLookingAway = leftLookingAway && rightLookingAway
|
||||
|
||||
if shouldLog {
|
||||
print("👁️ Pupil Ratios - Left: \(String(format: "%.2f", leftRatio)), Right: \(String(format: "%.2f", rightRatio))")
|
||||
print("👁️ Eyes Looking Away: \(eyesLookingAway)")
|
||||
}
|
||||
}
|
||||
|
||||
let isLookingAway = poseLookingAway || eyesLookingAway
|
||||
|
||||
if shouldLog {
|
||||
print("📊 Gaze detection - Yaw: \(yaw), Pitch: \(pitch)")
|
||||
print("📉 Thresholds - Yaw: \(yawThreshold), Pitch: \(pitchThreshold)")
|
||||
print("🎯 Looking away: \(isLookingAway) (Pose: \(poseLookingAway), Eyes: \(eyesLookingAway))")
|
||||
print("👀 User looking at screen: \(!isLookingAway)")
|
||||
}
|
||||
|
||||
return isLookingAway
|
||||
}
|
||||
|
||||
private func calculatePupilHorizontalRatio(eye: VNFaceLandmarkRegion2D, pupil: VNFaceLandmarkRegion2D) -> Double {
|
||||
private func calculatePupilHorizontalRatio(
|
||||
eye: VNFaceLandmarkRegion2D, pupil: VNFaceLandmarkRegion2D
|
||||
) -> Double {
|
||||
let eyePoints = eye.normalizedPoints
|
||||
let pupilPoints = pupil.normalizedPoints
|
||||
|
||||
|
||||
Reference in New Issue
Block a user