This commit is contained in:
Michael Freno
2026-01-30 12:20:56 -05:00
parent 7d6e51a183
commit 6e41c4059c
7 changed files with 55 additions and 39 deletions

View File

@@ -10,16 +10,20 @@ import Combine
import Foundation
protocol CameraSessionDelegate: AnyObject {
nonisolated func cameraSession(
@MainActor func cameraSession(
_ manager: CameraSessionManager,
didOutput pixelBuffer: CVPixelBuffer,
imageSize: CGSize
)
}
private struct PixelBufferBox: @unchecked Sendable {
let buffer: CVPixelBuffer
}
final class CameraSessionManager: NSObject, ObservableObject {
@Published private(set) var isRunning = false
weak var delegate: CameraSessionDelegate?
nonisolated(unsafe) weak var delegate: CameraSessionDelegate?
private var captureSession: AVCaptureSession?
private var videoOutput: AVCaptureVideoDataOutput?
@@ -116,6 +120,11 @@ extension CameraSessionManager: AVCaptureVideoDataOutputSampleBufferDelegate {
height: CVPixelBufferGetHeight(pixelBuffer)
)
delegate?.cameraSession(self, didOutput: pixelBuffer, imageSize: size)
let bufferBox = PixelBufferBox(buffer: pixelBuffer)
DispatchQueue.main.async { [weak self, bufferBox] in
guard let self else { return }
self.delegate?.cameraSession(self, didOutput: bufferBox.buffer, imageSize: size)
}
}
}

View File

@@ -161,7 +161,7 @@ class EyeTrackingService: NSObject, ObservableObject {
}
extension EyeTrackingService: CameraSessionDelegate {
nonisolated func cameraSession(
@MainActor func cameraSession(
_ manager: CameraSessionManager,
didOutput pixelBuffer: CVPixelBuffer,
imageSize: CGSize
@@ -174,28 +174,23 @@ extension EyeTrackingService: CameraSessionDelegate {
if let leftRatio = result.leftPupilRatio,
let rightRatio = result.rightPupilRatio,
let faceWidth = result.faceWidthRatio {
Task { @MainActor in
guard CalibratorService.shared.isCalibrating else { return }
CalibratorService.shared.submitSampleToBridge(
leftRatio: leftRatio,
rightRatio: rightRatio,
leftVertical: result.leftVerticalRatio,
rightVertical: result.rightVerticalRatio,
faceWidthRatio: faceWidth
)
}
guard CalibratorService.shared.isCalibrating else { return }
CalibratorService.shared.submitSampleToBridge(
leftRatio: leftRatio,
rightRatio: rightRatio,
leftVertical: result.leftVerticalRatio,
rightVertical: result.rightVerticalRatio,
faceWidthRatio: faceWidth
)
}
Task { @MainActor [weak self] in
guard let self else { return }
self.faceDetected = result.faceDetected
self.isEyesClosed = result.isEyesClosed
self.userLookingAtScreen = result.userLookingAtScreen
self.debugAdapter.update(from: result)
self.debugAdapter.updateEyeImages(from: PupilDetector.self)
self.syncDebugState()
self.updateGazeConfiguration()
}
self.faceDetected = result.faceDetected
self.isEyesClosed = result.isEyesClosed
self.userLookingAtScreen = result.userLookingAtScreen
self.debugAdapter.update(from: result)
self.debugAdapter.updateEyeImages(from: PupilDetector.self)
self.syncDebugState()
self.updateGazeConfiguration()
}
}

View File

@@ -6,7 +6,7 @@
//
import Foundation
import Vision
@preconcurrency import Vision
import simd
struct EyeTrackingProcessingResult: Sendable {
@@ -54,19 +54,19 @@ final class GazeDetector: @unchecked Sendable {
}
private let lock = NSLock()
private var configuration: Configuration
private nonisolated(unsafe) var configuration: Configuration
init(configuration: Configuration) {
nonisolated init(configuration: Configuration) {
self.configuration = configuration
}
func updateConfiguration(_ configuration: Configuration) {
nonisolated func updateConfiguration(_ configuration: Configuration) {
lock.lock()
self.configuration = configuration
lock.unlock()
}
nonisolated func process(
func process(
analysis: VisionPipeline.FaceAnalysis,
pixelBuffer: CVPixelBuffer
) -> EyeTrackingProcessingResult {
@@ -75,7 +75,7 @@ final class GazeDetector: @unchecked Sendable {
config = configuration
lock.unlock()
guard analysis.faceDetected, let face = analysis.face else {
guard analysis.faceDetected, let face = analysis.face?.value else {
return EyeTrackingProcessingResult(
faceDetected: false,
isEyesClosed: false,

View File

@@ -18,7 +18,7 @@ import Accelerate
import CoreImage
import ImageIO
import UniformTypeIdentifiers
import Vision
@preconcurrency import Vision
struct PupilPosition: Equatable, Sendable {
let x: CGFloat

View File

@@ -6,17 +6,21 @@
//
import Foundation
import Vision
@preconcurrency import Vision
final class VisionPipeline: @unchecked Sendable {
struct FaceAnalysis: Sendable {
let faceDetected: Bool
let face: VNFaceObservation?
let face: NonSendableFaceObservation?
let imageSize: CGSize
let debugYaw: Double?
let debugPitch: Double?
}
struct NonSendableFaceObservation: @unchecked Sendable {
nonisolated(unsafe) let value: VNFaceObservation
}
nonisolated func analyze(
pixelBuffer: CVPixelBuffer,
imageSize: CGSize
@@ -46,7 +50,7 @@ final class VisionPipeline: @unchecked Sendable {
)
}
guard let face = (request.results as? [VNFaceObservation])?.first else {
guard let face = request.results?.first else {
return FaceAnalysis(
faceDetected: false,
face: nil,
@@ -58,7 +62,7 @@ final class VisionPipeline: @unchecked Sendable {
return FaceAnalysis(
faceDetected: true,
face: face,
face: NonSendableFaceObservation(value: face),
imageSize: imageSize,
debugYaw: face.yaw?.doubleValue,
debugPitch: face.pitch?.doubleValue