general: progress on eye tracking
This commit is contained in:
@@ -123,6 +123,8 @@ class EnforceModeService: ObservableObject {
|
|||||||
do {
|
do {
|
||||||
try await eyeTrackingService.startEyeTracking()
|
try await eyeTrackingService.startEyeTracking()
|
||||||
isCameraActive = true
|
isCameraActive = true
|
||||||
|
lastFaceDetectionTime = Date() // Reset grace period
|
||||||
|
startFaceDetectionTimer()
|
||||||
print("✓ Camera active")
|
print("✓ Camera active")
|
||||||
} catch {
|
} catch {
|
||||||
print("⚠️ Failed to start camera: \(error.localizedDescription)")
|
print("⚠️ Failed to start camera: \(error.localizedDescription)")
|
||||||
@@ -137,9 +139,7 @@ class EnforceModeService: ObservableObject {
|
|||||||
isCameraActive = false
|
isCameraActive = false
|
||||||
userCompliedWithBreak = false
|
userCompliedWithBreak = false
|
||||||
|
|
||||||
// Invalidate the face detection timer when stopping camera
|
stopFaceDetectionTimer()
|
||||||
faceDetectionTimer?.invalidate()
|
|
||||||
faceDetectionTimer = nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func checkUserCompliance() {
|
func checkUserCompliance() {
|
||||||
@@ -159,35 +159,42 @@ class EnforceModeService: ObservableObject {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private func handleFaceDetectionChange(faceDetected: Bool) {
|
private func handleFaceDetectionChange(faceDetected: Bool) {
|
||||||
// Update the last face detection time
|
// Update the last face detection time only when a face is actively detected
|
||||||
if faceDetected {
|
if faceDetected {
|
||||||
lastFaceDetectionTime = Date()
|
lastFaceDetectionTime = Date()
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// If we are in enforce mode and camera is active, start checking for person presence
|
private func startFaceDetectionTimer() {
|
||||||
if isEnforceModeEnabled && isCameraActive {
|
stopFaceDetectionTimer()
|
||||||
// Cancel any existing timer and restart it
|
// Check every 1 second
|
||||||
faceDetectionTimer?.invalidate()
|
faceDetectionTimer = Timer.scheduledTimer(withTimeInterval: 1.0, repeats: true) { [weak self] _ in
|
||||||
|
Task { @MainActor [weak self] in
|
||||||
// Create a new timer to check for extended periods without face detection
|
self?.checkFaceDetectionTimeout()
|
||||||
faceDetectionTimer = Timer.scheduledTimer(withTimeInterval: 0.5, repeats: true) { [weak self] _ in
|
|
||||||
guard let self = self else { return }
|
|
||||||
|
|
||||||
// Dispatch to main actor to safely access main actor-isolated properties and methods
|
|
||||||
Task { @MainActor in
|
|
||||||
let timeSinceLastDetection = Date().timeIntervalSince(self.lastFaceDetectionTime)
|
|
||||||
|
|
||||||
// If person has not been detected for too long, temporarily disable enforce mode
|
|
||||||
if timeSinceLastDetection > self.faceDetectionTimeout {
|
|
||||||
print("⏰ Person not detected for \(self.faceDetectionTimeout)s. Temporarily disabling enforce mode.")
|
|
||||||
self.isEnforceModeEnabled = false
|
|
||||||
self.stopCamera()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private func stopFaceDetectionTimer() {
|
||||||
|
faceDetectionTimer?.invalidate()
|
||||||
|
faceDetectionTimer = nil
|
||||||
|
}
|
||||||
|
|
||||||
|
private func checkFaceDetectionTimeout() {
|
||||||
|
guard isEnforceModeEnabled && isCameraActive else {
|
||||||
|
stopFaceDetectionTimer()
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
let timeSinceLastDetection = Date().timeIntervalSince(lastFaceDetectionTime)
|
||||||
|
|
||||||
|
// If person has not been detected for too long, temporarily disable enforce mode
|
||||||
|
if timeSinceLastDetection > faceDetectionTimeout {
|
||||||
|
print("⏰ Person not detected for \(faceDetectionTimeout)s. Temporarily disabling enforce mode.")
|
||||||
|
disableEnforceMode()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func handleReminderDismissed() {
|
func handleReminderDismissed() {
|
||||||
// Stop camera when reminder is dismissed, but also check if we should disable enforce mode entirely
|
// Stop camera when reminder is dismissed, but also check if we should disable enforce mode entirely
|
||||||
// This helps in case a user closes settings window while a reminder is active
|
// This helps in case a user closes settings window while a reminder is active
|
||||||
@@ -206,6 +213,8 @@ class EnforceModeService: ObservableObject {
|
|||||||
do {
|
do {
|
||||||
try await eyeTrackingService.startEyeTracking()
|
try await eyeTrackingService.startEyeTracking()
|
||||||
isCameraActive = true
|
isCameraActive = true
|
||||||
|
lastFaceDetectionTime = Date() // Reset grace period
|
||||||
|
startFaceDetectionTimer()
|
||||||
print("✓ Test mode camera active")
|
print("✓ Test mode camera active")
|
||||||
} catch {
|
} catch {
|
||||||
print("⚠️ Failed to start test mode camera: \(error.localizedDescription)")
|
print("⚠️ Failed to start test mode camera: \(error.localizedDescription)")
|
||||||
|
|||||||
@@ -23,6 +23,10 @@ class EyeTrackingService: NSObject, ObservableObject {
|
|||||||
private let videoDataOutputQueue = DispatchQueue(label: "com.gaze.videoDataOutput", qos: .userInitiated)
|
private let videoDataOutputQueue = DispatchQueue(label: "com.gaze.videoDataOutput", qos: .userInitiated)
|
||||||
private var _previewLayer: AVCaptureVideoPreviewLayer?
|
private var _previewLayer: AVCaptureVideoPreviewLayer?
|
||||||
|
|
||||||
|
// Logging throttle
|
||||||
|
private var lastLogTime: Date = .distantPast
|
||||||
|
private let logInterval: TimeInterval = 0.5 // Log every 0.5 seconds
|
||||||
|
|
||||||
var previewLayer: AVCaptureVideoPreviewLayer? {
|
var previewLayer: AVCaptureVideoPreviewLayer? {
|
||||||
guard let session = captureSession else {
|
guard let session = captureSession else {
|
||||||
_previewLayer = nil
|
_previewLayer = nil
|
||||||
@@ -114,9 +118,16 @@ class EyeTrackingService: NSObject, ObservableObject {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private func processFaceObservations(_ observations: [VNFaceObservation]?) {
|
private func processFaceObservations(_ observations: [VNFaceObservation]?) {
|
||||||
print("🔍 Processing face observations...")
|
let shouldLog = Date().timeIntervalSince(lastLogTime) >= logInterval
|
||||||
|
|
||||||
|
if shouldLog {
|
||||||
|
print("🔍 Processing face observations...")
|
||||||
|
}
|
||||||
|
|
||||||
guard let observations = observations, !observations.isEmpty else {
|
guard let observations = observations, !observations.isEmpty else {
|
||||||
print("❌ No faces detected")
|
if shouldLog {
|
||||||
|
print("❌ No faces detected")
|
||||||
|
}
|
||||||
faceDetected = false
|
faceDetected = false
|
||||||
userLookingAtScreen = false
|
userLookingAtScreen = false
|
||||||
return
|
return
|
||||||
@@ -125,60 +136,58 @@ class EyeTrackingService: NSObject, ObservableObject {
|
|||||||
faceDetected = true
|
faceDetected = true
|
||||||
let face = observations.first!
|
let face = observations.first!
|
||||||
|
|
||||||
print("✅ Face detected. Bounding box: \(face.boundingBox)")
|
if shouldLog {
|
||||||
|
print("✅ Face detected. Bounding box: \(face.boundingBox)")
|
||||||
|
}
|
||||||
|
|
||||||
guard let landmarks = face.landmarks else {
|
guard let landmarks = face.landmarks else {
|
||||||
print("❌ No face landmarks detected")
|
if shouldLog {
|
||||||
|
print("❌ No face landmarks detected")
|
||||||
|
}
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
// Log eye landmarks
|
// Check eye closure
|
||||||
if let leftEye = landmarks.leftEye,
|
if let leftEye = landmarks.leftEye,
|
||||||
let rightEye = landmarks.rightEye {
|
let rightEye = landmarks.rightEye {
|
||||||
print("👁️ Left eye landmarks: \(leftEye.pointCount) points")
|
let eyesClosed = detectEyesClosed(leftEye: leftEye, rightEye: rightEye, shouldLog: shouldLog)
|
||||||
print("👁️ Right eye landmarks: \(rightEye.pointCount) points")
|
|
||||||
|
|
||||||
let leftEyeHeight = calculateEyeHeight(leftEye)
|
|
||||||
let rightEyeHeight = calculateEyeHeight(rightEye)
|
|
||||||
|
|
||||||
print("👁️ Left eye height: \(leftEyeHeight)")
|
|
||||||
print("👁️ Right eye height: \(rightEyeHeight)")
|
|
||||||
|
|
||||||
let eyesClosed = detectEyesClosed(leftEye: leftEye, rightEye: rightEye)
|
|
||||||
self.isEyesClosed = eyesClosed
|
self.isEyesClosed = eyesClosed
|
||||||
print("👁️ Eyes closed: \(eyesClosed)")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Log gaze detection
|
// Check gaze direction
|
||||||
let lookingAway = detectLookingAway(face: face, landmarks: landmarks)
|
let lookingAway = detectLookingAway(face: face, landmarks: landmarks, shouldLog: shouldLog)
|
||||||
userLookingAtScreen = !lookingAway
|
userLookingAtScreen = !lookingAway
|
||||||
|
|
||||||
print("📊 Gaze angle - Yaw: \(face.yaw?.doubleValue ?? 0.0), Roll: \(face.roll?.doubleValue ?? 0.0)")
|
if shouldLog {
|
||||||
print("🎯 Looking away: \(lookingAway)")
|
lastLogTime = Date()
|
||||||
print("👀 User looking at screen: \(userLookingAtScreen)")
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private func detectEyesClosed(leftEye: VNFaceLandmarkRegion2D, rightEye: VNFaceLandmarkRegion2D) -> Bool {
|
private func detectEyesClosed(leftEye: VNFaceLandmarkRegion2D, rightEye: VNFaceLandmarkRegion2D, shouldLog: Bool) -> Bool {
|
||||||
guard leftEye.pointCount >= 2, rightEye.pointCount >= 2 else {
|
guard leftEye.pointCount >= 2, rightEye.pointCount >= 2 else {
|
||||||
print("⚠️ Eye landmarks insufficient for eye closure detection")
|
if shouldLog {
|
||||||
|
print("⚠️ Eye landmarks insufficient for eye closure detection")
|
||||||
|
}
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
|
|
||||||
let leftEyeHeight = calculateEyeHeight(leftEye)
|
let leftEyeHeight = calculateEyeHeight(leftEye, shouldLog: shouldLog)
|
||||||
let rightEyeHeight = calculateEyeHeight(rightEye)
|
let rightEyeHeight = calculateEyeHeight(rightEye, shouldLog: shouldLog)
|
||||||
|
|
||||||
let closedThreshold: CGFloat = 0.02
|
let closedThreshold: CGFloat = 0.02
|
||||||
|
|
||||||
let isClosed = leftEyeHeight < closedThreshold && rightEyeHeight < closedThreshold
|
let isClosed = leftEyeHeight < closedThreshold && rightEyeHeight < closedThreshold
|
||||||
|
|
||||||
print("👁️ Eye closure detection - Left: \(leftEyeHeight) < \(closedThreshold) = \(leftEyeHeight < closedThreshold), Right: \(rightEyeHeight) < \(closedThreshold) = \(rightEyeHeight < closedThreshold)")
|
if shouldLog {
|
||||||
|
print("👁️ Eye closure detection - Left: \(leftEyeHeight) < \(closedThreshold) = \(leftEyeHeight < closedThreshold), Right: \(rightEyeHeight) < \(closedThreshold) = \(rightEyeHeight < closedThreshold)")
|
||||||
|
print("👁️ Eyes closed: \(isClosed)")
|
||||||
|
}
|
||||||
|
|
||||||
return isClosed
|
return isClosed
|
||||||
}
|
}
|
||||||
|
|
||||||
private func calculateEyeHeight(_ eye: VNFaceLandmarkRegion2D) -> CGFloat {
|
private func calculateEyeHeight(_ eye: VNFaceLandmarkRegion2D, shouldLog: Bool) -> CGFloat {
|
||||||
let points = eye.normalizedPoints
|
let points = eye.normalizedPoints
|
||||||
print("📏 Eye points count: \(points.count)")
|
|
||||||
guard points.count >= 2 else { return 0 }
|
guard points.count >= 2 else { return 0 }
|
||||||
|
|
||||||
let yValues = points.map { $0.y }
|
let yValues = points.map { $0.y }
|
||||||
@@ -186,26 +195,89 @@ class EyeTrackingService: NSObject, ObservableObject {
|
|||||||
let minY = yValues.min() ?? 0
|
let minY = yValues.min() ?? 0
|
||||||
|
|
||||||
let height = abs(maxY - minY)
|
let height = abs(maxY - minY)
|
||||||
print("📏 Eye height calculation: max(\(maxY)) - min(\(minY)) = \(height)")
|
|
||||||
|
if shouldLog {
|
||||||
|
print("📏 Eye height: \(height)")
|
||||||
|
}
|
||||||
|
|
||||||
return height
|
return height
|
||||||
}
|
}
|
||||||
|
|
||||||
private func detectLookingAway(face: VNFaceObservation, landmarks: VNFaceLandmarks2D) -> Bool {
|
private func detectLookingAway(face: VNFaceObservation, landmarks: VNFaceLandmarks2D, shouldLog: Bool) -> Bool {
|
||||||
|
// 1. Face Pose Check (Yaw & Pitch)
|
||||||
let yaw = face.yaw?.doubleValue ?? 0.0
|
let yaw = face.yaw?.doubleValue ?? 0.0
|
||||||
let roll = face.roll?.doubleValue ?? 0.0
|
let pitch = face.pitch?.doubleValue ?? 0.0
|
||||||
|
|
||||||
let yawThreshold = 0.35
|
let yawThreshold = 0.35 // ~20 degrees
|
||||||
let rollThreshold = 0.4
|
let pitchThreshold = 0.4 // ~23 degrees
|
||||||
|
|
||||||
let isLookingAway = abs(yaw) > yawThreshold || abs(roll) > rollThreshold
|
let poseLookingAway = abs(yaw) > yawThreshold || abs(pitch) > pitchThreshold
|
||||||
|
|
||||||
print("📊 Gaze detection - Yaw: \(yaw), Roll: \(roll)")
|
// 2. Eye Gaze Check (Pupil Position)
|
||||||
print("📉 Thresholds - Yaw: \(yawThreshold), Roll: \(rollThreshold)")
|
var eyesLookingAway = false
|
||||||
print("🎯 Looking away result: \(isLookingAway)")
|
|
||||||
|
if let leftEye = landmarks.leftEye,
|
||||||
|
let rightEye = landmarks.rightEye,
|
||||||
|
let leftPupil = landmarks.leftPupil,
|
||||||
|
let rightPupil = landmarks.rightPupil {
|
||||||
|
|
||||||
|
let leftRatio = calculatePupilHorizontalRatio(eye: leftEye, pupil: leftPupil)
|
||||||
|
let rightRatio = calculatePupilHorizontalRatio(eye: rightEye, pupil: rightPupil)
|
||||||
|
|
||||||
|
// Normal range for "looking center" is roughly 0.3 to 0.7
|
||||||
|
// (0.0 = extreme right, 1.0 = extreme left relative to face)
|
||||||
|
// Note: Camera is mirrored, so logic might be inverted
|
||||||
|
|
||||||
|
let minRatio = 0.25
|
||||||
|
let maxRatio = 0.75
|
||||||
|
|
||||||
|
let leftLookingAway = leftRatio < minRatio || leftRatio > maxRatio
|
||||||
|
let rightLookingAway = rightRatio < minRatio || rightRatio > maxRatio
|
||||||
|
|
||||||
|
// Consider looking away if BOTH eyes are off-center
|
||||||
|
eyesLookingAway = leftLookingAway && rightLookingAway
|
||||||
|
|
||||||
|
if shouldLog {
|
||||||
|
print("👁️ Pupil Ratios - Left: \(String(format: "%.2f", leftRatio)), Right: \(String(format: "%.2f", rightRatio))")
|
||||||
|
print("👁️ Eyes Looking Away: \(eyesLookingAway)")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let isLookingAway = poseLookingAway || eyesLookingAway
|
||||||
|
|
||||||
|
if shouldLog {
|
||||||
|
print("📊 Gaze detection - Yaw: \(yaw), Pitch: \(pitch)")
|
||||||
|
print("📉 Thresholds - Yaw: \(yawThreshold), Pitch: \(pitchThreshold)")
|
||||||
|
print("🎯 Looking away: \(isLookingAway) (Pose: \(poseLookingAway), Eyes: \(eyesLookingAway))")
|
||||||
|
print("👀 User looking at screen: \(!isLookingAway)")
|
||||||
|
}
|
||||||
|
|
||||||
return isLookingAway
|
return isLookingAway
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private func calculatePupilHorizontalRatio(eye: VNFaceLandmarkRegion2D, pupil: VNFaceLandmarkRegion2D) -> Double {
|
||||||
|
let eyePoints = eye.normalizedPoints
|
||||||
|
let pupilPoints = pupil.normalizedPoints
|
||||||
|
|
||||||
|
guard !eyePoints.isEmpty, !pupilPoints.isEmpty else { return 0.5 }
|
||||||
|
|
||||||
|
// Get eye horizontal bounds
|
||||||
|
let eyeMinX = eyePoints.map { $0.x }.min() ?? 0
|
||||||
|
let eyeMaxX = eyePoints.map { $0.x }.max() ?? 0
|
||||||
|
let eyeWidth = eyeMaxX - eyeMinX
|
||||||
|
|
||||||
|
guard eyeWidth > 0 else { return 0.5 }
|
||||||
|
|
||||||
|
// Get pupil center X
|
||||||
|
let pupilCenterX = pupilPoints.map { $0.x }.reduce(0, +) / Double(pupilPoints.count)
|
||||||
|
|
||||||
|
// Calculate ratio (0.0 to 1.0)
|
||||||
|
// 0.0 = Right side of eye (camera view)
|
||||||
|
// 1.0 = Left side of eye (camera view)
|
||||||
|
let ratio = (pupilCenterX - eyeMinX) / eyeWidth
|
||||||
|
|
||||||
|
return ratio
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// MARK: - AVCaptureVideoDataOutputSampleBufferDelegate
|
// MARK: - AVCaptureVideoDataOutputSampleBufferDelegate
|
||||||
|
|||||||
Reference in New Issue
Block a user