Compare commits

..

20 Commits

Author SHA1 Message Date
Michael Freno
fc9ab37841 bit smoother 2026-02-01 11:29:49 -05:00
Michael Freno
e0a9d16484 mild 2026-02-01 10:22:47 -05:00
Michael Freno
d4adb530e0 meh 2026-02-01 02:13:32 -05:00
Michael Freno
5ae678ffe8 remake of calibration 2026-02-01 01:02:19 -05:00
Michael Freno
ac3548e77c checkpoint 2026-02-01 00:24:09 -05:00
Michael Freno
11f2313b34 christ 2026-01-31 23:49:06 -05:00
Michael Freno
a20b3701a6 tweaking 2026-01-31 23:34:07 -05:00
Michael Freno
2966dd7d5e simpler 2026-01-31 22:20:55 -05:00
Michael Freno
eab8a76a55 fix: getting enforce working 2026-01-31 16:21:57 -05:00
Michael Freno
7ca7d27f84 feat:improved animation 2026-01-31 16:21:47 -05:00
Michael Freno
4bd80245cd prevent overflow 2026-01-30 20:32:38 -05:00
Michael Freno
1e20283afc consolidation 2026-01-30 13:52:25 -05:00
Michael Freno
b725f9cfd7 post update 2026-01-30 13:18:55 -05:00
Michael Freno
a992bc8374 Version bump to v0.5.0 2026-01-30 12:57:08 -05:00
Michael Freno
4b446db817 fix fullscreen 2026-01-30 12:55:41 -05:00
Michael Freno
cbd60fdd08 adding macrovisionkit for fullscreen detection 2026-01-30 12:48:37 -05:00
Michael Freno
6e41c4059c building 2026-01-30 12:20:56 -05:00
Michael Freno
7d6e51a183 fix ref 2026-01-30 12:09:44 -05:00
Michael Freno
0b6dd3f903 fix one 2026-01-30 11:50:41 -05:00
Michael Freno
7a23ae9bad checking 2026-01-30 09:01:25 -05:00
49 changed files with 2410 additions and 5933 deletions

View File

@@ -8,6 +8,7 @@
/* Begin PBXBuildFile section */
275915892F132A9200D0E60D /* Lottie in Frameworks */ = {isa = PBXBuildFile; productRef = 27AE10B12F10B1FC00E00DBC /* Lottie */; };
27CF3CCB2F2D266600D67058 /* MacroVisionKit in Frameworks */ = {isa = PBXBuildFile; productRef = 27CF3CCA2F2D266600D67058 /* MacroVisionKit */; };
27SPARKLE00000000003 /* Sparkle in Frameworks */ = {isa = PBXBuildFile; productRef = 27SPARKLE00000000002 /* Sparkle */; };
/* End PBXBuildFile section */
@@ -70,6 +71,7 @@
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
27CF3CCB2F2D266600D67058 /* MacroVisionKit in Frameworks */,
275915892F132A9200D0E60D /* Lottie in Frameworks */,
27SPARKLE00000000003 /* Sparkle in Frameworks */,
);
@@ -134,6 +136,7 @@
packageProductDependencies = (
27AE10B12F10B1FC00E00DBC /* Lottie */,
27SPARKLE00000000002 /* Sparkle */,
27CF3CCA2F2D266600D67058 /* MacroVisionKit */,
);
productName = Gaze;
productReference = 27A21B3C2F0F69DC0018C4F3 /* Gaze.app */;
@@ -220,6 +223,7 @@
packageReferences = (
27AE10B02F10B1FC00E00DBC /* XCRemoteSwiftPackageReference "lottie-spm" */,
27SPARKLE00000000001 /* XCRemoteSwiftPackageReference "Sparkle" */,
27CF3CC92F2D266600D67058 /* XCRemoteSwiftPackageReference "MacroVisionKit" */,
);
preferredProjectObjectVersion = 77;
productRefGroup = 27A21B3D2F0F69DC0018C4F3 /* Products */;
@@ -424,7 +428,7 @@
CODE_SIGN_ENTITLEMENTS = Gaze/Gaze.entitlements;
CODE_SIGN_STYLE = Automatic;
COMBINE_HIDPI_IMAGES = YES;
CURRENT_PROJECT_VERSION = 9;
CURRENT_PROJECT_VERSION = 10;
DEVELOPMENT_TEAM = 6GK4F9L62V;
ENABLE_APP_SANDBOX = YES;
ENABLE_HARDENED_RUNTIME = YES;
@@ -439,7 +443,7 @@
"@executable_path/../Frameworks",
);
MACOSX_DEPLOYMENT_TARGET = 14.6;
MARKETING_VERSION = 0.4.1;
MARKETING_VERSION = 0.5.0;
PRODUCT_BUNDLE_IDENTIFIER = com.mikefreno.Gaze;
PRODUCT_NAME = "$(TARGET_NAME)";
REGISTER_APP_GROUPS = YES;
@@ -462,7 +466,7 @@
CODE_SIGN_ENTITLEMENTS = Gaze/Gaze.entitlements;
CODE_SIGN_STYLE = Automatic;
COMBINE_HIDPI_IMAGES = YES;
CURRENT_PROJECT_VERSION = 9;
CURRENT_PROJECT_VERSION = 10;
DEVELOPMENT_TEAM = 6GK4F9L62V;
ENABLE_APP_SANDBOX = YES;
ENABLE_HARDENED_RUNTIME = YES;
@@ -477,7 +481,7 @@
"@executable_path/../Frameworks",
);
MACOSX_DEPLOYMENT_TARGET = 14.6;
MARKETING_VERSION = 0.4.1;
MARKETING_VERSION = 0.5.0;
PRODUCT_BUNDLE_IDENTIFIER = com.mikefreno.Gaze;
PRODUCT_NAME = "$(TARGET_NAME)";
REGISTER_APP_GROUPS = YES;
@@ -496,11 +500,11 @@
buildSettings = {
BUNDLE_LOADER = "$(TEST_HOST)";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 9;
CURRENT_PROJECT_VERSION = 10;
DEVELOPMENT_TEAM = 6GK4F9L62V;
GENERATE_INFOPLIST_FILE = YES;
MACOSX_DEPLOYMENT_TARGET = 26.2;
MARKETING_VERSION = 0.4.1;
MARKETING_VERSION = 0.5.0;
PRODUCT_BUNDLE_IDENTIFIER = com.mikefreno.GazeTests;
PRODUCT_NAME = "$(TARGET_NAME)";
STRING_CATALOG_GENERATE_SYMBOLS = NO;
@@ -517,11 +521,11 @@
buildSettings = {
BUNDLE_LOADER = "$(TEST_HOST)";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 9;
CURRENT_PROJECT_VERSION = 10;
DEVELOPMENT_TEAM = 6GK4F9L62V;
GENERATE_INFOPLIST_FILE = YES;
MACOSX_DEPLOYMENT_TARGET = 26.2;
MARKETING_VERSION = 0.4.1;
MARKETING_VERSION = 0.5.0;
PRODUCT_BUNDLE_IDENTIFIER = com.mikefreno.GazeTests;
PRODUCT_NAME = "$(TARGET_NAME)";
STRING_CATALOG_GENERATE_SYMBOLS = NO;
@@ -537,10 +541,10 @@
isa = XCBuildConfiguration;
buildSettings = {
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 9;
CURRENT_PROJECT_VERSION = 10;
DEVELOPMENT_TEAM = 6GK4F9L62V;
GENERATE_INFOPLIST_FILE = YES;
MARKETING_VERSION = 0.4.1;
MARKETING_VERSION = 0.5.0;
PRODUCT_BUNDLE_IDENTIFIER = com.mikefreno.GazeUITests;
PRODUCT_NAME = "$(TARGET_NAME)";
STRING_CATALOG_GENERATE_SYMBOLS = NO;
@@ -556,10 +560,10 @@
isa = XCBuildConfiguration;
buildSettings = {
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 9;
CURRENT_PROJECT_VERSION = 10;
DEVELOPMENT_TEAM = 6GK4F9L62V;
GENERATE_INFOPLIST_FILE = YES;
MARKETING_VERSION = 0.4.1;
MARKETING_VERSION = 0.5.0;
PRODUCT_BUNDLE_IDENTIFIER = com.mikefreno.GazeUITests;
PRODUCT_NAME = "$(TARGET_NAME)";
STRING_CATALOG_GENERATE_SYMBOLS = NO;
@@ -621,6 +625,14 @@
minimumVersion = 4.6.0;
};
};
27CF3CC92F2D266600D67058 /* XCRemoteSwiftPackageReference "MacroVisionKit" */ = {
isa = XCRemoteSwiftPackageReference;
repositoryURL = "https://github.com/TheBoredTeam/MacroVisionKit.git";
requirement = {
kind = upToNextMajorVersion;
minimumVersion = 0.1.0;
};
};
27SPARKLE00000000001 /* XCRemoteSwiftPackageReference "Sparkle" */ = {
isa = XCRemoteSwiftPackageReference;
repositoryURL = "https://github.com/sparkle-project/Sparkle";
@@ -637,6 +649,11 @@
package = 27AE10B02F10B1FC00E00DBC /* XCRemoteSwiftPackageReference "lottie-spm" */;
productName = Lottie;
};
27CF3CCA2F2D266600D67058 /* MacroVisionKit */ = {
isa = XCSwiftPackageProductDependency;
package = 27CF3CC92F2D266600D67058 /* XCRemoteSwiftPackageReference "MacroVisionKit" */;
productName = MacroVisionKit;
};
27SPARKLE00000000002 /* Sparkle */ = {
isa = XCSwiftPackageProductDependency;
package = 27SPARKLE00000000001 /* XCRemoteSwiftPackageReference "Sparkle" */;

View File

@@ -1,646 +0,0 @@
// !$*UTF8*$!
{
archiveVersion = 1;
classes = {
};
objectVersion = 77;
objects = {
/* Begin PBXBuildFile section */
275915892F132A9200D0E60D /* Lottie in Frameworks */ = {isa = PBXBuildFile; productRef = 27AE10B12F10B1FC00E00DBC /* Lottie */; };
27SPARKLE00000000003 /* Sparkle in Frameworks */ = {isa = PBXBuildFile; productRef = 27SPARKLE00000000002 /* Sparkle */; };
/* End PBXBuildFile section */
/* Begin PBXContainerItemProxy section */
27A21B4A2F0F69DD0018C4F3 /* PBXContainerItemProxy */ = {
isa = PBXContainerItemProxy;
containerPortal = 27A21B342F0F69DC0018C4F3 /* Project object */;
proxyType = 1;
remoteGlobalIDString = 27A21B3B2F0F69DC0018C4F3;
remoteInfo = Gaze;
};
27A21B542F0F69DD0018C4F3 /* PBXContainerItemProxy */ = {
isa = PBXContainerItemProxy;
containerPortal = 27A21B342F0F69DC0018C4F3 /* Project object */;
proxyType = 1;
remoteGlobalIDString = 27A21B3B2F0F69DC0018C4F3;
remoteInfo = Gaze;
};
/* End PBXContainerItemProxy section */
/* Begin PBXFileReference section */
27A21B3C2F0F69DC0018C4F3 /* Gaze.app */ = {isa = PBXFileReference; explicitFileType = wrapper.application; includeInIndex = 0; path = Gaze.app; sourceTree = BUILT_PRODUCTS_DIR; };
27A21B492F0F69DD0018C4F3 /* GazeTests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = GazeTests.xctest; sourceTree = BUILT_PRODUCTS_DIR; };
27A21B532F0F69DD0018C4F3 /* GazeUITests.xctest */ = {isa = PBXFileReference; explicitFileType = wrapper.cfbundle; includeInIndex = 0; path = GazeUITests.xctest; sourceTree = BUILT_PRODUCTS_DIR; };
/* End PBXFileReference section */
/* Begin PBXFileSystemSynchronizedBuildFileExceptionSet section */
270D22E92F1474F1008BCE42 /* Exceptions for "Gaze" folder in "Gaze" target */ = {
isa = PBXFileSystemSynchronizedBuildFileExceptionSet;
membershipExceptions = (
Info.plist,
);
target = 27A21B3B2F0F69DC0018C4F3 /* Gaze */;
};
/* End PBXFileSystemSynchronizedBuildFileExceptionSet section */
/* Begin PBXFileSystemSynchronizedRootGroup section */
27A21B3E2F0F69DC0018C4F3 /* Gaze */ = {
isa = PBXFileSystemSynchronizedRootGroup;
exceptions = (
270D22E92F1474F1008BCE42 /* Exceptions for "Gaze" folder in "Gaze" target */,
);
path = Gaze;
sourceTree = "<group>";
};
27A21B4C2F0F69DD0018C4F3 /* GazeTests */ = {
isa = PBXFileSystemSynchronizedRootGroup;
path = GazeTests;
sourceTree = "<group>";
};
27A21B562F0F69DD0018C4F3 /* GazeUITests */ = {
isa = PBXFileSystemSynchronizedRootGroup;
path = GazeUITests;
sourceTree = "<group>";
};
/* End PBXFileSystemSynchronizedRootGroup section */
/* Begin PBXFrameworksBuildPhase section */
27A21B392F0F69DC0018C4F3 /* Frameworks */ = {
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
275915892F132A9200D0E60D /* Lottie in Frameworks */,
27SPARKLE00000000003 /* Sparkle in Frameworks */,
);
runOnlyForDeploymentPostprocessing = 0;
};
27A21B462F0F69DD0018C4F3 /* Frameworks */ = {
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
27A21B502F0F69DD0018C4F3 /* Frameworks */ = {
isa = PBXFrameworksBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXFrameworksBuildPhase section */
/* Begin PBXGroup section */
27A21B332F0F69DC0018C4F3 = {
isa = PBXGroup;
children = (
27A21B3E2F0F69DC0018C4F3 /* Gaze */,
27A21B4C2F0F69DD0018C4F3 /* GazeTests */,
27A21B562F0F69DD0018C4F3 /* GazeUITests */,
27A21B3D2F0F69DC0018C4F3 /* Products */,
);
sourceTree = "<group>";
};
27A21B3D2F0F69DC0018C4F3 /* Products */ = {
isa = PBXGroup;
children = (
27A21B3C2F0F69DC0018C4F3 /* Gaze.app */,
27A21B492F0F69DD0018C4F3 /* GazeTests.xctest */,
27A21B532F0F69DD0018C4F3 /* GazeUITests.xctest */,
);
name = Products;
sourceTree = "<group>";
};
/* End PBXGroup section */
/* Begin PBXNativeTarget section */
27A21B3B2F0F69DC0018C4F3 /* Gaze */ = {
isa = PBXNativeTarget;
buildConfigurationList = 27A21B5D2F0F69DD0018C4F3 /* Build configuration list for PBXNativeTarget "Gaze" */;
buildPhases = (
27A21B382F0F69DC0018C4F3 /* Sources */,
27A21B392F0F69DC0018C4F3 /* Frameworks */,
27A21B3A2F0F69DC0018C4F3 /* Resources */,
);
buildRules = (
);
dependencies = (
);
fileSystemSynchronizedGroups = (
27A21B3E2F0F69DC0018C4F3 /* Gaze */,
);
name = Gaze;
packageProductDependencies = (
27AE10B12F10B1FC00E00DBC /* Lottie */,
27SPARKLE00000000002 /* Sparkle */,
);
productName = Gaze;
productReference = 27A21B3C2F0F69DC0018C4F3 /* Gaze.app */;
productType = "com.apple.product-type.application";
};
27A21B482F0F69DD0018C4F3 /* GazeTests */ = {
isa = PBXNativeTarget;
buildConfigurationList = 27A21B602F0F69DD0018C4F3 /* Build configuration list for PBXNativeTarget "GazeTests" */;
buildPhases = (
27A21B452F0F69DD0018C4F3 /* Sources */,
27A21B462F0F69DD0018C4F3 /* Frameworks */,
27A21B472F0F69DD0018C4F3 /* Resources */,
);
buildRules = (
);
dependencies = (
27A21B4B2F0F69DD0018C4F3 /* PBXTargetDependency */,
);
fileSystemSynchronizedGroups = (
27A21B4C2F0F69DD0018C4F3 /* GazeTests */,
);
name = GazeTests;
packageProductDependencies = (
);
productName = GazeTests;
productReference = 27A21B492F0F69DD0018C4F3 /* GazeTests.xctest */;
productType = "com.apple.product-type.bundle.unit-test";
};
27A21B522F0F69DD0018C4F3 /* GazeUITests */ = {
isa = PBXNativeTarget;
buildConfigurationList = 27A21B632F0F69DD0018C4F3 /* Build configuration list for PBXNativeTarget "GazeUITests" */;
buildPhases = (
27A21B4F2F0F69DD0018C4F3 /* Sources */,
27A21B502F0F69DD0018C4F3 /* Frameworks */,
27A21B512F0F69DD0018C4F3 /* Resources */,
);
buildRules = (
);
dependencies = (
27A21B552F0F69DD0018C4F3 /* PBXTargetDependency */,
);
fileSystemSynchronizedGroups = (
27A21B562F0F69DD0018C4F3 /* GazeUITests */,
);
name = GazeUITests;
packageProductDependencies = (
);
productName = GazeUITests;
productReference = 27A21B532F0F69DD0018C4F3 /* GazeUITests.xctest */;
productType = "com.apple.product-type.bundle.ui-testing";
};
/* End PBXNativeTarget section */
/* Begin PBXProject section */
27A21B342F0F69DC0018C4F3 /* Project object */ = {
isa = PBXProject;
attributes = {
BuildIndependentTargetsInParallel = 1;
LastSwiftUpdateCheck = 2620;
LastUpgradeCheck = 2620;
TargetAttributes = {
27A21B3B2F0F69DC0018C4F3 = {
CreatedOnToolsVersion = 26.2;
};
27A21B482F0F69DD0018C4F3 = {
CreatedOnToolsVersion = 26.2;
TestTargetID = 27A21B3B2F0F69DC0018C4F3;
};
27A21B522F0F69DD0018C4F3 = {
CreatedOnToolsVersion = 26.2;
TestTargetID = 27A21B3B2F0F69DC0018C4F3;
};
};
};
buildConfigurationList = 27A21B372F0F69DC0018C4F3 /* Build configuration list for PBXProject "Gaze" */;
developmentRegion = en;
hasScannedForEncodings = 0;
knownRegions = (
en,
Base,
);
mainGroup = 27A21B332F0F69DC0018C4F3;
minimizedProjectReferenceProxies = 1;
packageReferences = (
27AE10B02F10B1FC00E00DBC /* XCRemoteSwiftPackageReference "lottie-spm" */,
27SPARKLE00000000001 /* XCRemoteSwiftPackageReference "Sparkle" */,
);
preferredProjectObjectVersion = 77;
productRefGroup = 27A21B3D2F0F69DC0018C4F3 /* Products */;
projectDirPath = "";
projectRoot = "";
targets = (
27A21B3B2F0F69DC0018C4F3 /* Gaze */,
27A21B482F0F69DD0018C4F3 /* GazeTests */,
27A21B522F0F69DD0018C4F3 /* GazeUITests */,
);
};
/* End PBXProject section */
/* Begin PBXResourcesBuildPhase section */
27A21B3A2F0F69DC0018C4F3 /* Resources */ = {
isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
27A21B472F0F69DD0018C4F3 /* Resources */ = {
isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
27A21B512F0F69DD0018C4F3 /* Resources */ = {
isa = PBXResourcesBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXResourcesBuildPhase section */
/* Begin PBXSourcesBuildPhase section */
27A21B382F0F69DC0018C4F3 /* Sources */ = {
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
27A21B452F0F69DD0018C4F3 /* Sources */ = {
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
27A21B4F2F0F69DD0018C4F3 /* Sources */ = {
isa = PBXSourcesBuildPhase;
buildActionMask = 2147483647;
files = (
);
runOnlyForDeploymentPostprocessing = 0;
};
/* End PBXSourcesBuildPhase section */
/* Begin PBXTargetDependency section */
27A21B4B2F0F69DD0018C4F3 /* PBXTargetDependency */ = {
isa = PBXTargetDependency;
target = 27A21B3B2F0F69DC0018C4F3 /* Gaze */;
targetProxy = 27A21B4A2F0F69DD0018C4F3 /* PBXContainerItemProxy */;
};
27A21B552F0F69DD0018C4F3 /* PBXTargetDependency */ = {
isa = PBXTargetDependency;
target = 27A21B3B2F0F69DC0018C4F3 /* Gaze */;
targetProxy = 27A21B542F0F69DD0018C4F3 /* PBXContainerItemProxy */;
};
/* End PBXTargetDependency section */
/* Begin XCBuildConfiguration section */
27A21B5B2F0F69DD0018C4F3 /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES;
CLANG_ANALYZER_NONNULL = YES;
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++20";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_ENABLE_OBJC_WEAK = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = dwarf;
DEVELOPMENT_TEAM = 6GK4F9L62V;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_TESTABILITY = YES;
ENABLE_USER_SCRIPT_SANDBOXING = YES;
GCC_C_LANGUAGE_STANDARD = gnu17;
GCC_DYNAMIC_NO_PIC = NO;
GCC_NO_COMMON_BLOCKS = YES;
GCC_OPTIMIZATION_LEVEL = 0;
GCC_PREPROCESSOR_DEFINITIONS = (
"DEBUG=1",
"$(inherited)",
);
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
MACOSX_DEPLOYMENT_TARGET = 26.2;
MTL_ENABLE_DEBUG_INFO = INCLUDE_SOURCE;
MTL_FAST_MATH = YES;
ONLY_ACTIVE_ARCH = YES;
SDKROOT = macosx;
SWIFT_ACTIVE_COMPILATION_CONDITIONS = "DEBUG $(inherited)";
SWIFT_OPTIMIZATION_LEVEL = "-Onone";
};
name = Debug;
};
27A21B5C2F0F69DD0018C4F3 /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ALWAYS_SEARCH_USER_PATHS = NO;
ASSETCATALOG_COMPILER_GENERATE_SWIFT_ASSET_SYMBOL_EXTENSIONS = YES;
CLANG_ANALYZER_NONNULL = YES;
CLANG_ANALYZER_NUMBER_OBJECT_CONVERSION = YES_AGGRESSIVE;
CLANG_CXX_LANGUAGE_STANDARD = "gnu++20";
CLANG_ENABLE_MODULES = YES;
CLANG_ENABLE_OBJC_ARC = YES;
CLANG_ENABLE_OBJC_WEAK = YES;
CLANG_WARN_BLOCK_CAPTURE_AUTORELEASING = YES;
CLANG_WARN_BOOL_CONVERSION = YES;
CLANG_WARN_COMMA = YES;
CLANG_WARN_CONSTANT_CONVERSION = YES;
CLANG_WARN_DEPRECATED_OBJC_IMPLEMENTATIONS = YES;
CLANG_WARN_DIRECT_OBJC_ISA_USAGE = YES_ERROR;
CLANG_WARN_DOCUMENTATION_COMMENTS = YES;
CLANG_WARN_EMPTY_BODY = YES;
CLANG_WARN_ENUM_CONVERSION = YES;
CLANG_WARN_INFINITE_RECURSION = YES;
CLANG_WARN_INT_CONVERSION = YES;
CLANG_WARN_NON_LITERAL_NULL_CONVERSION = YES;
CLANG_WARN_OBJC_IMPLICIT_RETAIN_SELF = YES;
CLANG_WARN_OBJC_LITERAL_CONVERSION = YES;
CLANG_WARN_OBJC_ROOT_CLASS = YES_ERROR;
CLANG_WARN_QUOTED_INCLUDE_IN_FRAMEWORK_HEADER = YES;
CLANG_WARN_RANGE_LOOP_ANALYSIS = YES;
CLANG_WARN_STRICT_PROTOTYPES = YES;
CLANG_WARN_SUSPICIOUS_MOVE = YES;
CLANG_WARN_UNGUARDED_AVAILABILITY = YES_AGGRESSIVE;
CLANG_WARN_UNREACHABLE_CODE = YES;
CLANG_WARN__DUPLICATE_METHOD_MATCH = YES;
COPY_PHASE_STRIP = NO;
DEBUG_INFORMATION_FORMAT = "dwarf-with-dsym";
DEVELOPMENT_TEAM = 6GK4F9L62V;
ENABLE_NS_ASSERTIONS = NO;
ENABLE_STRICT_OBJC_MSGSEND = YES;
ENABLE_USER_SCRIPT_SANDBOXING = YES;
GCC_C_LANGUAGE_STANDARD = gnu17;
GCC_NO_COMMON_BLOCKS = YES;
GCC_WARN_64_TO_32_BIT_CONVERSION = YES;
GCC_WARN_ABOUT_RETURN_TYPE = YES_ERROR;
GCC_WARN_UNDECLARED_SELECTOR = YES;
GCC_WARN_UNINITIALIZED_AUTOS = YES_AGGRESSIVE;
GCC_WARN_UNUSED_FUNCTION = YES;
GCC_WARN_UNUSED_VARIABLE = YES;
LOCALIZATION_PREFERS_STRING_CATALOGS = YES;
MACOSX_DEPLOYMENT_TARGET = 26.2;
MTL_ENABLE_DEBUG_INFO = NO;
MTL_FAST_MATH = YES;
SDKROOT = macosx;
SWIFT_COMPILATION_MODE = wholemodule;
};
name = Release;
};
27A21B5E2F0F69DD0018C4F3 /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = Gaze;
ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
ASSETCATALOG_COMPILER_INCLUDE_ALL_APPICON_ASSETS = NO;
CODE_SIGN_ENTITLEMENTS = Gaze/Gaze.entitlements;
CODE_SIGN_STYLE = Automatic;
COMBINE_HIDPI_IMAGES = YES;
CURRENT_PROJECT_VERSION = 8;
DEVELOPMENT_TEAM = 6GK4F9L62V;
ENABLE_APP_SANDBOX = YES;
ENABLE_HARDENED_RUNTIME = YES;
ENABLE_PREVIEWS = YES;
GENERATE_INFOPLIST_FILE = NO;
INFOPLIST_FILE = Gaze/Info.plist;
INFOPLIST_KEY_LSApplicationCategoryType = "public.app-category.productivity";
INFOPLIST_KEY_NSHumanReadableCopyright = "";
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/../Frameworks",
);
MACOSX_DEPLOYMENT_TARGET = 13.0;
MARKETING_VERSION = 0.4.0;
OTHER_SWIFT_FLAGS = "-D APPSTORE";
PRODUCT_BUNDLE_IDENTIFIER = com.mikefreno.Gaze;
PRODUCT_NAME = "$(TARGET_NAME)";
REGISTER_APP_GROUPS = YES;
STRING_CATALOG_GENERATE_SYMBOLS = YES;
SWIFT_APPROACHABLE_CONCURRENCY = YES;
SWIFT_DEFAULT_ACTOR_ISOLATION = MainActor;
SWIFT_EMIT_LOC_STRINGS = YES;
SWIFT_UPCOMING_FEATURE_MEMBER_IMPORT_VISIBILITY = YES;
SWIFT_VERSION = 5.0;
};
name = Debug;
};
27A21B5F2F0F69DD0018C4F3 /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
ASSETCATALOG_COMPILER_APPICON_NAME = Gaze;
ASSETCATALOG_COMPILER_GLOBAL_ACCENT_COLOR_NAME = AccentColor;
ASSETCATALOG_COMPILER_INCLUDE_ALL_APPICON_ASSETS = NO;
CODE_SIGN_ENTITLEMENTS = Gaze/Gaze.entitlements;
CODE_SIGN_STYLE = Automatic;
COMBINE_HIDPI_IMAGES = YES;
CURRENT_PROJECT_VERSION = 8;
DEVELOPMENT_TEAM = 6GK4F9L62V;
ENABLE_APP_SANDBOX = YES;
ENABLE_HARDENED_RUNTIME = YES;
ENABLE_PREVIEWS = YES;
GENERATE_INFOPLIST_FILE = NO;
INFOPLIST_FILE = Gaze/Info.plist;
INFOPLIST_KEY_LSApplicationCategoryType = "public.app-category.productivity";
INFOPLIST_KEY_NSHumanReadableCopyright = "";
LD_RUNPATH_SEARCH_PATHS = (
"$(inherited)",
"@executable_path/../Frameworks",
);
MACOSX_DEPLOYMENT_TARGET = 13.0;
MARKETING_VERSION = 0.4.0;
OTHER_SWIFT_FLAGS = "-D APPSTORE";
PRODUCT_BUNDLE_IDENTIFIER = com.mikefreno.Gaze;
PRODUCT_NAME = "$(TARGET_NAME)";
REGISTER_APP_GROUPS = YES;
STRING_CATALOG_GENERATE_SYMBOLS = YES;
SWIFT_APPROACHABLE_CONCURRENCY = YES;
SWIFT_DEFAULT_ACTOR_ISOLATION = MainActor;
SWIFT_EMIT_LOC_STRINGS = YES;
SWIFT_UPCOMING_FEATURE_MEMBER_IMPORT_VISIBILITY = YES;
SWIFT_VERSION = 5.0;
};
name = Release;
};
27A21B612F0F69DD0018C4F3 /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
BUNDLE_LOADER = "$(TEST_HOST)";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 6;
DEVELOPMENT_TEAM = 6GK4F9L62V;
GENERATE_INFOPLIST_FILE = YES;
MACOSX_DEPLOYMENT_TARGET = 26.2;
MARKETING_VERSION = 0.4.0;
PRODUCT_BUNDLE_IDENTIFIER = com.mikefreno.GazeTests;
PRODUCT_NAME = "$(TARGET_NAME)";
STRING_CATALOG_GENERATE_SYMBOLS = NO;
SWIFT_APPROACHABLE_CONCURRENCY = YES;
SWIFT_EMIT_LOC_STRINGS = NO;
SWIFT_UPCOMING_FEATURE_MEMBER_IMPORT_VISIBILITY = YES;
SWIFT_VERSION = 5.0;
TEST_HOST = "$(BUILT_PRODUCTS_DIR)/Gaze.app/$(BUNDLE_EXECUTABLE_FOLDER_PATH)/Gaze";
};
name = Debug;
};
27A21B622F0F69DD0018C4F3 /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
BUNDLE_LOADER = "$(TEST_HOST)";
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 6;
DEVELOPMENT_TEAM = 6GK4F9L62V;
GENERATE_INFOPLIST_FILE = YES;
MACOSX_DEPLOYMENT_TARGET = 26.2;
MARKETING_VERSION = 0.4.0;
PRODUCT_BUNDLE_IDENTIFIER = com.mikefreno.GazeTests;
PRODUCT_NAME = "$(TARGET_NAME)";
STRING_CATALOG_GENERATE_SYMBOLS = NO;
SWIFT_APPROACHABLE_CONCURRENCY = YES;
SWIFT_EMIT_LOC_STRINGS = NO;
SWIFT_UPCOMING_FEATURE_MEMBER_IMPORT_VISIBILITY = YES;
SWIFT_VERSION = 5.0;
TEST_HOST = "$(BUILT_PRODUCTS_DIR)/Gaze.app/$(BUNDLE_EXECUTABLE_FOLDER_PATH)/Gaze";
};
name = Release;
};
27A21B642F0F69DD0018C4F3 /* Debug */ = {
isa = XCBuildConfiguration;
buildSettings = {
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 6;
DEVELOPMENT_TEAM = 6GK4F9L62V;
GENERATE_INFOPLIST_FILE = YES;
MARKETING_VERSION = 0.4.0;
PRODUCT_BUNDLE_IDENTIFIER = com.mikefreno.GazeUITests;
PRODUCT_NAME = "$(TARGET_NAME)";
STRING_CATALOG_GENERATE_SYMBOLS = NO;
SWIFT_APPROACHABLE_CONCURRENCY = YES;
SWIFT_EMIT_LOC_STRINGS = NO;
SWIFT_UPCOMING_FEATURE_MEMBER_IMPORT_VISIBILITY = YES;
SWIFT_VERSION = 5.0;
TEST_TARGET_NAME = Gaze;
};
name = Debug;
};
27A21B652F0F69DD0018C4F3 /* Release */ = {
isa = XCBuildConfiguration;
buildSettings = {
CODE_SIGN_STYLE = Automatic;
CURRENT_PROJECT_VERSION = 6;
DEVELOPMENT_TEAM = 6GK4F9L62V;
GENERATE_INFOPLIST_FILE = YES;
MARKETING_VERSION = 0.4.0;
PRODUCT_BUNDLE_IDENTIFIER = com.mikefreno.GazeUITests;
PRODUCT_NAME = "$(TARGET_NAME)";
STRING_CATALOG_GENERATE_SYMBOLS = NO;
SWIFT_APPROACHABLE_CONCURRENCY = YES;
SWIFT_EMIT_LOC_STRINGS = NO;
SWIFT_UPCOMING_FEATURE_MEMBER_IMPORT_VISIBILITY = YES;
SWIFT_VERSION = 5.0;
TEST_TARGET_NAME = Gaze;
};
name = Release;
};
/* End XCBuildConfiguration section */
/* Begin XCConfigurationList section */
27A21B372F0F69DC0018C4F3 /* Build configuration list for PBXProject "Gaze" */ = {
isa = XCConfigurationList;
buildConfigurations = (
27A21B5B2F0F69DD0018C4F3 /* Debug */,
27A21B5C2F0F69DD0018C4F3 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
27A21B5D2F0F69DD0018C4F3 /* Build configuration list for PBXNativeTarget "Gaze" */ = {
isa = XCConfigurationList;
buildConfigurations = (
27A21B5E2F0F69DD0018C4F3 /* Debug */,
27A21B5F2F0F69DD0018C4F3 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
27A21B602F0F69DD0018C4F3 /* Build configuration list for PBXNativeTarget "GazeTests" */ = {
isa = XCConfigurationList;
buildConfigurations = (
27A21B612F0F69DD0018C4F3 /* Debug */,
27A21B622F0F69DD0018C4F3 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
27A21B632F0F69DD0018C4F3 /* Build configuration list for PBXNativeTarget "GazeUITests" */ = {
isa = XCConfigurationList;
buildConfigurations = (
27A21B642F0F69DD0018C4F3 /* Debug */,
27A21B652F0F69DD0018C4F3 /* Release */,
);
defaultConfigurationIsVisible = 0;
defaultConfigurationName = Release;
};
/* End XCConfigurationList section */
/* Begin XCRemoteSwiftPackageReference section */
27AE10B02F10B1FC00E00DBC /* XCRemoteSwiftPackageReference "lottie-spm" */ = {
isa = XCRemoteSwiftPackageReference;
repositoryURL = "https://github.com/airbnb/lottie-spm.git";
requirement = {
kind = upToNextMajorVersion;
minimumVersion = 4.6.0;
};
};
27SPARKLE00000000001 /* XCRemoteSwiftPackageReference "Sparkle" */ = {
isa = XCRemoteSwiftPackageReference;
repositoryURL = "https://github.com/sparkle-project/Sparkle";
requirement = {
kind = exactVersion;
version = 2.8.1;
};
};
/* End XCRemoteSwiftPackageReference section */
/* Begin XCSwiftPackageProductDependency section */
27AE10B12F10B1FC00E00DBC /* Lottie */ = {
isa = XCSwiftPackageProductDependency;
package = 27AE10B02F10B1FC00E00DBC /* XCRemoteSwiftPackageReference "lottie-spm" */;
productName = Lottie;
};
27SPARKLE00000000002 /* Sparkle */ = {
isa = XCSwiftPackageProductDependency;
package = 27SPARKLE00000000001 /* XCRemoteSwiftPackageReference "Sparkle" */;
productName = Sparkle;
};
/* End XCSwiftPackageProductDependency section */
};
rootObject = 27A21B342F0F69DC0018C4F3 /* Project object */;
}

View File

@@ -1,5 +1,5 @@
{
"originHash" : "513d974fbede884a919977d3446360023f6e3239ac314f4fbd9657e80aca7560",
"originHash" : "83c4b4b69555e54712e60721606a120fe3f01308b1af84957cd0941e93e64f8a",
"pins" : [
{
"identity" : "lottie-spm",
@@ -10,6 +10,15 @@
"version" : "4.6.0"
}
},
{
"identity" : "macrovisionkit",
"kind" : "remoteSourceControl",
"location" : "https://github.com/TheBoredTeam/MacroVisionKit.git",
"state" : {
"revision" : "da481a6be8d8b1bf7fcb218507a72428bbcae7b0",
"version" : "0.2.0"
}
},
{
"identity" : "sparkle",
"kind" : "remoteSourceControl",

View File

@@ -21,12 +21,12 @@
"p": {
"a": 1,
"k": [
{ "t": 0, "s": [150, 180, 0], "i": { "x": 0.3, "y": 1 }, "o": { "x": 0.7, "y": 0 } },
{ "t": 40, "s": [150, 120, 0], "i": { "x": 0.3, "y": 1 }, "o": { "x": 0.7, "y": 0 } },
{ "t": 80, "s": [150, 140, 0], "i": { "x": 0.3, "y": 1 }, "o": { "x": 0.7, "y": 0 } },
{ "t": 120, "s": [150, 120, 0], "i": { "x": 0.3, "y": 1 }, "o": { "x": 0.7, "y": 0 } },
{ "t": 160, "s": [150, 140, 0], "i": { "x": 0.3, "y": 1 }, "o": { "x": 0.7, "y": 0 } },
{ "t": 180, "s": [150, 130, 0] }
{ "t": 0, "s": [150, 160, 0], "i": { "x": 0.3, "y": 1 }, "o": { "x": 0.7, "y": 0 } },
{ "t": 40, "s": [150, 100, 0], "i": { "x": 0.3, "y": 1 }, "o": { "x": 0.7, "y": 0 } },
{ "t": 80, "s": [150, 120, 0], "i": { "x": 0.3, "y": 1 }, "o": { "x": 0.7, "y": 0 } },
{ "t": 120, "s": [150, 100, 0], "i": { "x": 0.3, "y": 1 }, "o": { "x": 0.7, "y": 0 } },
{ "t": 160, "s": [150, 120, 0], "i": { "x": 0.3, "y": 1 }, "o": { "x": 0.7, "y": 0 } },
{ "t": 180, "s": [150, 110, 0] }
]
},
"a": { "a": 0, "k": [0, 0, 0] },

View File

@@ -185,7 +185,10 @@ class AppDelegate: NSObject, NSApplicationDelegate, ObservableObject {
private func showReminder(_ event: ReminderEvent) {
switch event {
case .lookAwayTriggered(let countdownSeconds):
let view = LookAwayReminderView(countdownSeconds: countdownSeconds) { [weak self] in
let view = LookAwayReminderView(
countdownSeconds: countdownSeconds,
enforceModeService: EnforceModeService.shared
) { [weak self] in
self?.timerEngine?.dismissReminder()
}
windowManager.showReminderWindow(view, windowType: .overlay)

View File

@@ -1,87 +0,0 @@
//
// EyeTrackingConstants.swift
// Gaze
//
// Created by Mike Freno on 1/14/26.
//
import Foundation
/// Thread-safe configuration holder for eye tracking thresholds.
/// All properties are Sendable constants, safe for use in any concurrency context.
enum EyeTrackingConstants: Sendable {
// MARK: - Logging
/// Interval between log messages in seconds
static let logInterval: TimeInterval = 0.5
// MARK: - Eye Closure Detection
/// Threshold for eye closure (smaller value means eye must be more closed to trigger)
/// Range: 0.0 to 1.0 (approximate eye opening ratio)
static let eyeClosedThreshold: CGFloat = 0.02
static let eyeClosedEnabled: Bool = true
// MARK: - Face Pose Thresholds
/// Maximum yaw (left/right head turn) in radians before considering user looking away
/// 0.20 radians 11.5 degrees (Tightened from 0.35)
/// NOTE: Vision Framework often provides unreliable yaw/pitch on macOS - disabled by default
static let yawThreshold: Double = 0.3
static let yawEnabled: Bool = false
/// Pitch threshold for looking UP (above screen).
/// Since camera is at top, looking at screen is negative pitch.
/// Values > 0.1 imply looking straight ahead or up (away from screen).
/// NOTE: Vision Framework often doesn't provide pitch data on macOS - disabled by default
static let pitchUpThreshold: Double = 0.1
static let pitchUpEnabled: Bool = false
/// Pitch threshold for looking DOWN (at keyboard/lap).
/// Values < -0.45 imply looking too far down.
/// NOTE: Vision Framework often doesn't provide pitch data on macOS - disabled by default
static let pitchDownThreshold: Double = -0.45
static let pitchDownEnabled: Bool = false
// MARK: - Pupil Tracking Thresholds
/// Minimum horizontal pupil ratio (0.0 = right edge, 1.0 = left edge)
/// Values below this are considered looking right (camera view)
/// Tightened to 0.35 based on observed values (typically 0.31-0.47)
static let minPupilRatio: Double = 0.35
static let minPupilEnabled: Bool = true
/// Maximum horizontal pupil ratio
/// Values above this are considered looking left (camera view)
/// Tightened to 0.45 based on observed values (typically 0.31-0.47)
static let maxPupilRatio: Double = 0.45
static let maxPupilEnabled: Bool = true
// MARK: - Pixel-Based Gaze Detection Thresholds
/// Thresholds for pupil-based gaze detection
/// Based on video test data:
/// - Looking at screen (center): H 0.20-0.50
/// - Looking left (away): H 0.50+
/// - Looking right (away): H 0.20-
/// Coordinate system: Lower values = right, Higher values = left
static let pixelGazeMinRatio: Double = 0.20 // Below this = looking right (away)
static let pixelGazeMaxRatio: Double = 0.50 // Above this = looking left (away)
static let pixelGazeEnabled: Bool = true
// MARK: - Screen Boundary Detection (New)
/// Forgiveness margin for the "gray area" around the screen edge.
/// 0.05 means the safe zone is extended by 5% of the range on each side.
/// If in the gray area, we assume the user is Looking Away (success).
static let boundaryForgivenessMargin: Double = 0.05
/// Distance sensitivity factor.
/// 1.0 = Linear scaling (face width 50% smaller -> eye movement expected to be 50% smaller)
/// > 1.0 = More aggressive scaling
static let distanceSensitivity: Double = 1.0
/// Default reference face width for distance scaling when uncalibrated.
/// Measured from test videos at typical laptop distance (~60cm).
/// Face bounding box width as ratio of image width.
static let defaultReferenceFaceWidth: Double = 0.4566
/// Minimum confidence required for a valid pupil detection before updating the gaze average.
/// Helps filter out blinks or noisy frames.
static let minimumGazeConfidence: Int = 3 // consecutive valid frames
}

View File

@@ -42,6 +42,10 @@ struct AppSettings: Codable, Equatable, Hashable, Sendable {
var subtleReminderSize: ReminderSize
var smartMode: SmartModeSettings
var enforceModeStrictness: Double
var enforceModeEyeBoxWidthFactor: Double
var enforceModeEyeBoxHeightFactor: Double
var enforceModeCalibration: EnforceModeCalibration?
var hasCompletedOnboarding: Bool
var launchAtLogin: Bool
@@ -57,6 +61,10 @@ struct AppSettings: Codable, Equatable, Hashable, Sendable {
userTimers: [UserTimer] = [],
subtleReminderSize: ReminderSize = DefaultSettingsBuilder.subtleReminderSize,
smartMode: SmartModeSettings = DefaultSettingsBuilder.smartMode,
enforceModeStrictness: Double = DefaultSettingsBuilder.enforceModeStrictness,
enforceModeEyeBoxWidthFactor: Double = DefaultSettingsBuilder.enforceModeEyeBoxWidthFactor,
enforceModeEyeBoxHeightFactor: Double = DefaultSettingsBuilder.enforceModeEyeBoxHeightFactor,
enforceModeCalibration: EnforceModeCalibration? = DefaultSettingsBuilder.enforceModeCalibration,
hasCompletedOnboarding: Bool = DefaultSettingsBuilder.hasCompletedOnboarding,
launchAtLogin: Bool = DefaultSettingsBuilder.launchAtLogin,
playSounds: Bool = DefaultSettingsBuilder.playSounds
@@ -70,6 +78,10 @@ struct AppSettings: Codable, Equatable, Hashable, Sendable {
self.userTimers = userTimers
self.subtleReminderSize = subtleReminderSize
self.smartMode = smartMode
self.enforceModeStrictness = enforceModeStrictness
self.enforceModeEyeBoxWidthFactor = enforceModeEyeBoxWidthFactor
self.enforceModeEyeBoxHeightFactor = enforceModeEyeBoxHeightFactor
self.enforceModeCalibration = enforceModeCalibration
self.hasCompletedOnboarding = hasCompletedOnboarding
self.launchAtLogin = launchAtLogin
self.playSounds = playSounds

View File

@@ -1,281 +0,0 @@
//
// CalibrationData.swift
// Gaze
//
// Created by Mike Freno on 1/15/26.
//
import Foundation
// MARK: - Calibration Models
enum CalibrationStep: String, Codable, CaseIterable {
case center
case farLeft
case left
case farRight
case right
case up
case down
case topLeft
case topRight
case bottomLeft
case bottomRight
var displayName: String {
switch self {
case .center: return "Center"
case .farLeft: return "Far Left"
case .left: return "Left"
case .farRight: return "Far Right"
case .right: return "Right"
case .up: return "Up"
case .down: return "Down"
case .topLeft: return "Top Left"
case .topRight: return "Top Right"
case .bottomLeft: return "Bottom Left"
case .bottomRight: return "Bottom Right"
}
}
var instructionText: String {
switch self {
case .center:
return "Look at the center of the screen"
case .farLeft:
return "Look as far left as comfortable"
case .left:
return "Look to the left edge of the screen"
case .farRight:
return "Look as far right as comfortable"
case .right:
return "Look to the right edge of the screen"
case .up:
return "Look to the top edge of the screen"
case .down:
return "Look to the bottom edge of the screen"
case .topLeft:
return "Look to the top left corner"
case .topRight:
return "Look to the top right corner"
case .bottomLeft:
return "Look to the bottom left corner"
case .bottomRight:
return "Look to the bottom right corner"
}
}
}
struct GazeSample: Codable {
let leftRatio: Double?
let rightRatio: Double?
let averageRatio: Double
let leftVerticalRatio: Double?
let rightVerticalRatio: Double?
let averageVerticalRatio: Double
let faceWidthRatio: Double? // For distance scaling (face width / image width)
let timestamp: Date
init(
leftRatio: Double?,
rightRatio: Double?,
leftVerticalRatio: Double? = nil,
rightVerticalRatio: Double? = nil,
faceWidthRatio: Double? = nil
) {
self.leftRatio = leftRatio
self.rightRatio = rightRatio
self.leftVerticalRatio = leftVerticalRatio
self.rightVerticalRatio = rightVerticalRatio
self.faceWidthRatio = faceWidthRatio
self.averageRatio = GazeSample.average(left: leftRatio, right: rightRatio, fallback: 0.5)
self.averageVerticalRatio = GazeSample.average(
left: leftVerticalRatio,
right: rightVerticalRatio,
fallback: 0.5
)
self.timestamp = Date()
}
private static func average(left: Double?, right: Double?, fallback: Double) -> Double {
switch (left, right) {
case let (left?, right?):
return (left + right) / 2.0
case let (left?, nil):
return left
case let (nil, right?):
return right
default:
return fallback
}
}
}
struct GazeThresholds: Codable {
// Horizontal Thresholds
let minLeftRatio: Double // Looking left ( value)
let maxRightRatio: Double // Looking right ( value)
// Vertical Thresholds
let minUpRatio: Double // Looking up ( value, typically < 0.5)
let maxDownRatio: Double // Looking down ( value, typically > 0.5)
// Screen Bounds (Calibration Zone)
// Defines the rectangle of pupil ratios that correspond to looking AT the screen
let screenLeftBound: Double
let screenRightBound: Double
let screenTopBound: Double
let screenBottomBound: Double
// Reference Data for Distance Scaling
let referenceFaceWidth: Double // Average face width during calibration
var isValid: Bool {
isFiniteValues([
minLeftRatio, maxRightRatio, minUpRatio, maxDownRatio,
screenLeftBound, screenRightBound, screenTopBound, screenBottomBound,
])
}
private func isFiniteValues(_ values: [Double]) -> Bool {
values.allSatisfy { $0.isFinite }
}
/// Default thresholds based on video test data:
/// - Center (looking at screen): H 0.29-0.35
/// - Screen left edge: H 0.45-0.50
/// - Looking away left: H 0.55+
/// - Screen right edge: H 0.20-0.25
/// - Looking away right: H 0.15-
/// Coordinate system: Lower H = right, Higher H = left
static var defaultThresholds: GazeThresholds {
GazeThresholds(
minLeftRatio: 0.55, // Beyond this = looking left (away)
maxRightRatio: 0.15, // Below this = looking right (away)
minUpRatio: 0.30, // Below this = looking up (away)
maxDownRatio: 0.60, // Above this = looking down (away)
screenLeftBound: 0.50, // Left edge of screen
screenRightBound: 0.20, // Right edge of screen
screenTopBound: 0.35, // Top edge of screen
screenBottomBound: 0.55, // Bottom edge of screen
referenceFaceWidth: 0.4566 // Measured from test videos (avg of inner/outer)
)
}
}
struct CalibrationData: Codable {
var samples: [CalibrationStep: [GazeSample]]
var computedThresholds: GazeThresholds?
var calibrationDate: Date
var isComplete: Bool
private let thresholdCalculator = CalibrationThresholdCalculator()
enum CodingKeys: String, CodingKey {
case samples
case computedThresholds
case calibrationDate
case isComplete
}
init() {
self.samples = [:]
self.computedThresholds = nil
self.calibrationDate = Date()
self.isComplete = false
}
mutating func addSample(_ sample: GazeSample, for step: CalibrationStep) {
if samples[step] == nil {
samples[step] = []
}
samples[step]?.append(sample)
}
func getSamples(for step: CalibrationStep) -> [GazeSample] {
return samples[step] ?? []
}
func averageRatio(for step: CalibrationStep) -> Double? {
let stepSamples = getSamples(for: step)
guard !stepSamples.isEmpty else { return nil }
return stepSamples.reduce(0.0) { $0 + $1.averageRatio } / Double(stepSamples.count)
}
func averageVerticalRatio(for step: CalibrationStep) -> Double? {
let stepSamples = getSamples(for: step)
guard !stepSamples.isEmpty else { return nil }
return stepSamples.reduce(0.0) { $0 + $1.averageVerticalRatio } / Double(stepSamples.count)
}
func averageFaceWidth(for step: CalibrationStep) -> Double? {
let stepSamples = getSamples(for: step)
let validSamples = stepSamples.compactMap { $0.faceWidthRatio }
guard !validSamples.isEmpty else { return nil }
return validSamples.reduce(0.0, +) / Double(validSamples.count)
}
mutating func calculateThresholds() {
self.computedThresholds = thresholdCalculator.calculate(using: self)
logStepData()
}
private func logStepData() {
print(" Per-step data:")
for step in CalibrationStep.allCases {
if let h = averageRatio(for: step) {
let v = averageVerticalRatio(for: step) ?? -1
let fw = averageFaceWidth(for: step) ?? -1
let count = getSamples(for: step).count
print(
" \(step.rawValue): H=\(String(format: "%.3f", h)), V=\(String(format: "%.3f", v)), FW=\(String(format: "%.3f", fw)), samples=\(count)"
)
}
}
}
}
/// Thread-safe storage for active calibration thresholds
/// Allows non-isolated code (video processing) to read thresholds without hitting MainActor
class CalibrationState: @unchecked Sendable {
static let shared = CalibrationState()
private let queue = DispatchQueue(label: "com.gaze.calibrationState", attributes: .concurrent)
private var _thresholds: GazeThresholds?
private var _isComplete: Bool = false
var thresholds: GazeThresholds? {
get { queue.sync { _thresholds } }
set { queue.async(flags: .barrier) { self._thresholds = newValue } }
}
var isComplete: Bool {
get { queue.sync { _isComplete } }
set { queue.async(flags: .barrier) { self._isComplete = newValue } }
}
func reset() {
setState(thresholds: nil, isComplete: false)
}
func setThresholds(_ thresholds: GazeThresholds?) {
setState(thresholds: thresholds, isComplete: nil)
}
func setComplete(_ isComplete: Bool) {
setState(thresholds: nil, isComplete: isComplete)
}
private func setState(thresholds: GazeThresholds?, isComplete: Bool?) {
queue.async(flags: .barrier) {
if let thresholds {
self._thresholds = thresholds
} else if isComplete == nil {
self._thresholds = nil
}
if let isComplete {
self._isComplete = isComplete
}
}
}
}

View File

@@ -1,158 +0,0 @@
//
// CalibrationThresholdCalculator.swift
// Gaze
//
// Created by Mike Freno on 1/29/26.
//
import Foundation
struct CalibrationThresholdCalculator {
func calculate(using data: CalibrationData) -> GazeThresholds? {
let centerH = data.averageRatio(for: .center)
let centerV = data.averageVerticalRatio(for: .center)
guard let cH = centerH else {
print("⚠️ No center calibration data, using defaults")
return GazeThresholds.defaultThresholds
}
let cV = centerV ?? 0.45
print("📊 Calibration data collected:")
print(" Center H: \(String(format: "%.3f", cH)), V: \(String(format: "%.3f", cV))")
let screenLeftH = data.averageRatio(for: .left)
?? data.averageRatio(for: .topLeft)
?? data.averageRatio(for: .bottomLeft)
let screenRightH = data.averageRatio(for: .right)
?? data.averageRatio(for: .topRight)
?? data.averageRatio(for: .bottomRight)
let farLeftH = data.averageRatio(for: .farLeft)
let farRightH = data.averageRatio(for: .farRight)
let (leftBound, lookLeftThreshold) = horizontalBounds(
center: cH,
screenEdge: screenLeftH,
farEdge: farLeftH,
direction: .left
)
let (rightBound, lookRightThreshold) = horizontalBounds(
center: cH,
screenEdge: screenRightH,
farEdge: farRightH,
direction: .right
)
let screenTopV = data.averageVerticalRatio(for: .up)
?? data.averageVerticalRatio(for: .topLeft)
?? data.averageVerticalRatio(for: .topRight)
let screenBottomV = data.averageVerticalRatio(for: .down)
?? data.averageVerticalRatio(for: .bottomLeft)
?? data.averageVerticalRatio(for: .bottomRight)
let (topBound, lookUpThreshold) = verticalBounds(center: cV, screenEdge: screenTopV, isUpperEdge: true)
let (bottomBound, lookDownThreshold) = verticalBounds(
center: cV,
screenEdge: screenBottomV,
isUpperEdge: false
)
let allFaceWidths = CalibrationStep.allCases.compactMap { data.averageFaceWidth(for: $0) }
let refFaceWidth = allFaceWidths.isEmpty ? 0.0 : allFaceWidths.average()
let thresholds = GazeThresholds(
minLeftRatio: lookLeftThreshold,
maxRightRatio: lookRightThreshold,
minUpRatio: lookUpThreshold,
maxDownRatio: lookDownThreshold,
screenLeftBound: leftBound,
screenRightBound: rightBound,
screenTopBound: topBound,
screenBottomBound: bottomBound,
referenceFaceWidth: refFaceWidth
)
logThresholds(
thresholds: thresholds,
centerHorizontal: cH,
centerVertical: cV
)
return thresholds
}
private enum HorizontalDirection {
case left
case right
}
private func horizontalBounds(
center: Double,
screenEdge: Double?,
farEdge: Double?,
direction: HorizontalDirection
) -> (bound: Double, threshold: Double) {
let defaultBoundOffset = direction == .left ? 0.15 : -0.15
let defaultThresholdOffset = direction == .left ? 0.20 : -0.20
guard let screenEdge = screenEdge else {
return (center + defaultBoundOffset, center + defaultThresholdOffset)
}
let bound = screenEdge
let threshold: Double
if let farEdge = farEdge {
threshold = (screenEdge + farEdge) / 2.0
} else {
threshold = screenEdge + defaultThresholdOffset
}
return (bound, threshold)
}
private func verticalBounds(center: Double, screenEdge: Double?, isUpperEdge: Bool) -> (bound: Double, threshold: Double) {
let defaultBoundOffset = isUpperEdge ? -0.10 : 0.10
let defaultThresholdOffset = isUpperEdge ? -0.15 : 0.15
guard let screenEdge = screenEdge else {
return (center + defaultBoundOffset, center + defaultThresholdOffset)
}
let bound = screenEdge
let edgeDistance = isUpperEdge ? center - screenEdge : screenEdge - center
let threshold = isUpperEdge ? screenEdge - (edgeDistance * 0.5) : screenEdge + (edgeDistance * 0.5)
return (bound, threshold)
}
private func logThresholds(
thresholds: GazeThresholds,
centerHorizontal: Double,
centerVertical: Double
) {
print("✓ Calibration thresholds calculated:")
print(" Center: H=\(String(format: "%.3f", centerHorizontal)), V=\(String(format: "%.3f", centerVertical))")
print(
" Screen H-Range: \(String(format: "%.3f", thresholds.screenRightBound)) to \(String(format: "%.3f", thresholds.screenLeftBound))"
)
print(
" Screen V-Range: \(String(format: "%.3f", thresholds.screenTopBound)) to \(String(format: "%.3f", thresholds.screenBottomBound))"
)
print(
" Away Thresholds: L≥\(String(format: "%.3f", thresholds.minLeftRatio)), R≤\(String(format: "%.3f", thresholds.maxRightRatio))"
)
print(
" Away Thresholds: U≤\(String(format: "%.3f", thresholds.minUpRatio)), D≥\(String(format: "%.3f", thresholds.maxDownRatio))"
)
print(" Ref Face Width: \(String(format: "%.3f", thresholds.referenceFaceWidth))")
}
}
private extension Array where Element == Double {
func average() -> Double {
guard !isEmpty else { return 0 }
return reduce(0.0, +) / Double(count)
}
}

View File

@@ -16,6 +16,10 @@ struct DefaultSettingsBuilder {
static let postureIntervalMinutes = 30
static let subtleReminderSize: ReminderSize = .medium
static let smartMode: SmartModeSettings = .defaults
static let enforceModeStrictness = 0.4
static let enforceModeEyeBoxWidthFactor = 0.20
static let enforceModeEyeBoxHeightFactor = 0.02
static let enforceModeCalibration: EnforceModeCalibration? = nil
static let hasCompletedOnboarding = false
static let launchAtLogin = false
static let playSounds = true
@@ -31,6 +35,10 @@ struct DefaultSettingsBuilder {
userTimers: [],
subtleReminderSize: subtleReminderSize,
smartMode: smartMode,
enforceModeStrictness: enforceModeStrictness,
enforceModeEyeBoxWidthFactor: enforceModeEyeBoxWidthFactor,
enforceModeEyeBoxHeightFactor: enforceModeEyeBoxHeightFactor,
enforceModeCalibration: enforceModeCalibration,
hasCompletedOnboarding: hasCompletedOnboarding,
launchAtLogin: launchAtLogin,
playSounds: playSounds

View File

@@ -0,0 +1,19 @@
//
// EnforceModeCalibration.swift
// Gaze
//
// Created by Mike Freno on 2/1/26.
//
import Foundation
struct EnforceModeCalibration: Codable, Equatable, Hashable, Sendable {
let createdAt: Date
let eyeBoxWidthFactor: Double
let eyeBoxHeightFactor: Double
let faceWidthRatio: Double
let horizontalMin: Double
let horizontalMax: Double
let verticalMin: Double
let verticalMax: Double
}

View File

@@ -7,11 +7,35 @@
import Foundation
enum PauseReason: Codable, Equatable, Hashable {
enum PauseReason: nonisolated Codable, nonisolated Sendable, nonisolated Equatable, nonisolated Hashable {
case manual
case fullscreen
case idle
case system
nonisolated static func == (lhs: PauseReason, rhs: PauseReason) -> Bool {
switch (lhs, rhs) {
case (.manual, .manual),
(.fullscreen, .fullscreen),
(.idle, .idle),
(.system, .system):
return true
default:
return false
}
}
nonisolated func hash(into hasher: inout Hasher) {
switch self {
case .manual:
hasher.combine(0)
case .fullscreen:
hasher.combine(1)
case .idle:
hasher.combine(2)
case .system:
hasher.combine(3)
}
}
}
extension PauseReason: Sendable {}

View File

@@ -0,0 +1,16 @@
//
// SetupPresentation.swift
// Gaze
//
// Created by Mike Freno on 1/30/26.
//
import Foundation
enum SetupPresentation {
case window
case card
var isWindow: Bool { self == .window }
var isCard: Bool { self == .card }
}

View File

@@ -8,7 +8,7 @@
import Foundation
/// Unified identifier for both built-in and user-defined timers
enum TimerIdentifier: Hashable, Codable {
enum TimerIdentifier: Hashable, Codable, Sendable {
case builtIn(TimerType)
case user(id: String)

View File

@@ -7,7 +7,7 @@
import Foundation
struct TimerState: Equatable, Hashable {
struct TimerState: Equatable, Hashable, Sendable {
let identifier: TimerIdentifier
var remainingSeconds: Int
var isPaused: Bool
@@ -45,7 +45,7 @@ struct TimerState: Equatable, Hashable {
}
}
enum TimerStateBuilder {
enum TimerStateBuilder: Sendable {
static func make(
identifier: TimerIdentifier,
intervalSeconds: Int,

View File

@@ -1,83 +0,0 @@
//
// CalibrationFlowController.swift
// Gaze
//
// Created by Mike Freno on 1/29/26.
//
import Combine
import Foundation
final class CalibrationFlowController: ObservableObject {
@Published private(set) var currentStep: CalibrationStep?
@Published private(set) var currentStepIndex = 0
@Published private(set) var isCollectingSamples = false
@Published private(set) var samplesCollected = 0
private let samplesPerStep: Int
private let calibrationSteps: [CalibrationStep]
init(samplesPerStep: Int, calibrationSteps: [CalibrationStep]) {
self.samplesPerStep = samplesPerStep
self.calibrationSteps = calibrationSteps
self.currentStep = calibrationSteps.first
}
func start() {
isCollectingSamples = false
currentStepIndex = 0
currentStep = calibrationSteps.first
samplesCollected = 0
}
func stop() {
isCollectingSamples = false
currentStep = nil
currentStepIndex = 0
samplesCollected = 0
}
func startCollectingSamples() {
guard currentStep != nil else { return }
isCollectingSamples = true
}
func resetSamples() {
isCollectingSamples = false
samplesCollected = 0
}
func markSampleCollected() -> Bool {
samplesCollected += 1
return samplesCollected >= samplesPerStep
}
func advanceToNextStep() -> Bool {
isCollectingSamples = false
currentStepIndex += 1
guard currentStepIndex < calibrationSteps.count else {
currentStep = nil
return false
}
currentStep = calibrationSteps[currentStepIndex]
samplesCollected = 0
return true
}
func skipStep() -> Bool {
advanceToNextStep()
}
var progress: Double {
let totalSteps = calibrationSteps.count
guard totalSteps > 0 else { return 0 }
let currentProgress = Double(samplesCollected) / Double(samplesPerStep)
return (Double(currentStepIndex) + currentProgress) / Double(totalSteps)
}
var progressText: String {
"\(min(currentStepIndex + 1, calibrationSteps.count)) of \(calibrationSteps.count)"
}
}

View File

@@ -1,292 +0,0 @@
//
// CalibratorService.swift
// Gaze
//
// Created by Mike Freno on 1/29/26.
//
import Combine
import Foundation
import AppKit
import SwiftUI
final class CalibratorService: ObservableObject {
static let shared = CalibratorService()
@Published var isCalibrating = false
@Published var isCollectingSamples = false
@Published var currentStep: CalibrationStep?
@Published var currentStepIndex = 0
@Published var samplesCollected = 0
@Published var calibrationData = CalibrationData()
private let samplesPerStep = 30
private let userDefaultsKey = "eyeTrackingCalibration"
private let flowController: CalibrationFlowController
private var windowController: NSWindowController?
private init() {
self.flowController = CalibrationFlowController(
samplesPerStep: samplesPerStep,
calibrationSteps: [
.center,
.left,
.right,
.farLeft,
.farRight,
.up,
.down,
.topLeft,
.topRight
]
)
loadCalibration()
bindFlowController()
}
private func bindFlowController() {
flowController.$isCollectingSamples
.assign(to: &$isCollectingSamples)
flowController.$currentStep
.assign(to: &$currentStep)
flowController.$currentStepIndex
.assign(to: &$currentStepIndex)
flowController.$samplesCollected
.assign(to: &$samplesCollected)
}
func startCalibration() {
print("🎯 Starting calibration...")
isCalibrating = true
flowController.start()
calibrationData = CalibrationData()
}
func resetForNewCalibration() {
print("🔄 Resetting for new calibration...")
calibrationData = CalibrationData()
flowController.start()
}
func startCollectingSamples() {
guard isCalibrating else { return }
print("📊 Started collecting samples for step: \(currentStep?.displayName ?? "unknown")")
flowController.startCollectingSamples()
}
func collectSample(
leftRatio: Double?,
rightRatio: Double?,
leftVertical: Double? = nil,
rightVertical: Double? = nil,
faceWidthRatio: Double? = nil
) {
guard isCalibrating, isCollectingSamples, let step = currentStep else { return }
let sample = GazeSample(
leftRatio: leftRatio,
rightRatio: rightRatio,
leftVerticalRatio: leftVertical,
rightVerticalRatio: rightVertical,
faceWidthRatio: faceWidthRatio
)
calibrationData.addSample(sample, for: step)
if flowController.markSampleCollected() {
advanceToNextStep()
}
}
private func advanceToNextStep() {
if flowController.advanceToNextStep() {
print("📍 Calibration step: \(currentStep?.displayName ?? "unknown")")
} else {
finishCalibration()
}
}
func skipStep() {
guard isCalibrating, let step = currentStep else { return }
print("⏭️ Skipping calibration step: \(step.displayName)")
advanceToNextStep()
}
func showCalibrationOverlay() {
guard let screen = NSScreen.main else { return }
let window = KeyableWindow(
contentRect: screen.frame,
styleMask: [.borderless, .fullSizeContentView],
backing: .buffered,
defer: false
)
window.level = .screenSaver
window.isOpaque = true
window.backgroundColor = .black
window.collectionBehavior = [.canJoinAllSpaces, .fullScreenAuxiliary]
window.acceptsMouseMovedEvents = true
window.ignoresMouseEvents = false
let overlayView = CalibrationOverlayView {
self.dismissCalibrationOverlay()
}
window.contentView = NSHostingView(rootView: overlayView)
windowController = NSWindowController(window: window)
windowController?.showWindow(nil)
window.makeKeyAndOrderFront(nil)
NSApp.activate(ignoringOtherApps: true)
print("🎯 Calibration overlay window opened")
}
func dismissCalibrationOverlay() {
windowController?.close()
windowController = nil
print("🎯 Calibration overlay window closed")
}
func finishCalibration() {
print("✓ Calibration complete, calculating thresholds...")
calibrationData.calculateThresholds()
calibrationData.isComplete = true
calibrationData.calibrationDate = Date()
saveCalibration()
applyCalibration()
isCalibrating = false
flowController.stop()
print("✓ Calibration saved and applied")
}
func cancelCalibration() {
print("❌ Calibration cancelled")
isCalibrating = false
flowController.stop()
calibrationData = CalibrationData()
CalibrationState.shared.reset()
}
private func saveCalibration() {
do {
let encoder = JSONEncoder()
encoder.dateEncodingStrategy = .iso8601
let data = try encoder.encode(calibrationData)
UserDefaults.standard.set(data, forKey: userDefaultsKey)
print("💾 Calibration data saved to UserDefaults")
} catch {
print("❌ Failed to save calibration: \(error)")
}
}
func loadCalibration() {
guard let data = UserDefaults.standard.data(forKey: userDefaultsKey) else {
print(" No existing calibration found")
return
}
do {
let decoder = JSONDecoder()
decoder.dateDecodingStrategy = .iso8601
calibrationData = try decoder.decode(CalibrationData.self, from: data)
if isCalibrationValid() {
print("✓ Loaded valid calibration from \(calibrationData.calibrationDate)")
applyCalibration()
} else {
print("⚠️ Calibration expired, needs recalibration")
}
} catch {
print("❌ Failed to load calibration: \(error)")
}
}
func clearCalibration() {
UserDefaults.standard.removeObject(forKey: userDefaultsKey)
calibrationData = CalibrationData()
CalibrationState.shared.reset()
print("🗑️ Calibration data cleared")
}
func isCalibrationValid() -> Bool {
guard calibrationData.isComplete,
let thresholds = calibrationData.computedThresholds,
thresholds.isValid else {
return false
}
return true
}
func needsRecalibration() -> Bool {
return !isCalibrationValid()
}
private func applyCalibration() {
guard let thresholds = calibrationData.computedThresholds else {
print("⚠️ No thresholds to apply")
return
}
CalibrationState.shared.setThresholds(thresholds)
CalibrationState.shared.setComplete(true)
print("✓ Applied calibrated thresholds:")
print(" Looking left: ≥\(String(format: "%.3f", thresholds.minLeftRatio))")
print(" Looking right: ≤\(String(format: "%.3f", thresholds.maxRightRatio))")
print(" Looking up: ≤\(String(format: "%.3f", thresholds.minUpRatio))")
print(" Looking down: ≥\(String(format: "%.3f", thresholds.maxDownRatio))")
print(" Screen Bounds: [\(String(format: "%.2f", thresholds.screenRightBound))..\(String(format: "%.2f", thresholds.screenLeftBound))] x [\(String(format: "%.2f", thresholds.screenTopBound))..\(String(format: "%.2f", thresholds.screenBottomBound))]")
}
func getCalibrationSummary() -> String {
guard calibrationData.isComplete else {
return "No calibration data"
}
let dateFormatter = DateFormatter()
dateFormatter.dateStyle = .medium
dateFormatter.timeStyle = .short
var summary = "Calibrated: \(dateFormatter.string(from: calibrationData.calibrationDate))\n"
if let thresholds = calibrationData.computedThresholds {
summary += "H-Range: \(String(format: "%.3f", thresholds.screenRightBound)) to \(String(format: "%.3f", thresholds.screenLeftBound))\n"
summary += "V-Range: \(String(format: "%.3f", thresholds.screenTopBound)) to \(String(format: "%.3f", thresholds.screenBottomBound))\n"
summary += "Ref Face Width: \(String(format: "%.3f", thresholds.referenceFaceWidth))"
}
return summary
}
var progress: Double {
flowController.progress
}
var progressText: String {
flowController.progressText
}
func submitSampleToBridge(
leftRatio: Double,
rightRatio: Double,
leftVertical: Double? = nil,
rightVertical: Double? = nil,
faceWidthRatio: Double = 0
) {
Task { [weak self] in
self?.collectSample(
leftRatio: leftRatio,
rightRatio: rightRatio,
leftVertical: leftVertical,
rightVertical: rightVertical,
faceWidthRatio: faceWidthRatio
)
}
}
}

View File

@@ -0,0 +1,268 @@
//
// EnforceModeCalibrationService.swift
// Gaze
//
// Created by Mike Freno on 2/1/26.
//
import AppKit
import Combine
import Foundation
import SwiftUI
@MainActor
final class EnforceModeCalibrationService: ObservableObject {
static let shared = EnforceModeCalibrationService()
@Published var isCalibrating = false
@Published var isCollectingSamples = false
@Published var currentStep: CalibrationStep = .eyeBox
@Published var targetIndex = 0
@Published var countdownProgress: Double = 1.0
@Published var samplesCollected = 0
private var samples: [CalibrationSample] = []
private let targets = CalibrationTarget.defaultTargets
let settingsManager = SettingsManager.shared
private let eyeTrackingService = EyeTrackingService.shared
private var countdownTimer: Timer?
private var sampleTimer: Timer?
private let countdownDuration: TimeInterval = 0.8
private let preCountdownPause: TimeInterval = 0.8
private let sampleInterval: TimeInterval = 0.02
private let samplesPerTarget = 20
private var windowController: NSWindowController?
func start() {
samples.removeAll()
targetIndex = 0
currentStep = .eyeBox
isCollectingSamples = false
samplesCollected = 0
countdownProgress = 1.0
isCalibrating = true
}
func presentOverlay() {
guard windowController == nil else { return }
guard let screen = NSScreen.main else { return }
start()
let window = KeyableWindow(
contentRect: screen.frame,
styleMask: [.borderless, .fullSizeContentView],
backing: .buffered,
defer: false
)
window.level = .screenSaver
window.isOpaque = true
window.backgroundColor = .black
window.collectionBehavior = [.canJoinAllSpaces, .fullScreenAuxiliary]
window.acceptsMouseMovedEvents = true
window.ignoresMouseEvents = false
let overlayView = EnforceModeCalibrationOverlayView()
window.contentView = NSHostingView(rootView: overlayView)
windowController = NSWindowController(window: window)
windowController?.showWindow(nil)
window.makeKeyAndOrderFront(nil)
NSApp.activate(ignoringOtherApps: true)
}
func dismissOverlay() {
windowController?.close()
windowController = nil
isCalibrating = false
}
func cancel() {
stopCountdown()
stopSampleCollection()
isCalibrating = false
}
func advance() {
switch currentStep {
case .eyeBox:
currentStep = .targets
// Start countdown immediately when transitioning to targets to avoid first point duplication
startCountdown()
case .targets:
if targetIndex < targets.count - 1 {
targetIndex += 1
startCountdown()
} else {
finish()
}
case .complete:
isCalibrating = false
}
}
func recordSample() {
let debugState = eyeTrackingService.currentDebugSnapshot()
guard let h = debugState.normalizedHorizontal,
let v = debugState.normalizedVertical,
let faceWidth = debugState.faceWidthRatio else {
return
}
let target = targets[targetIndex]
samples.append(
CalibrationSample(
target: target,
horizontal: h,
vertical: v,
faceWidthRatio: faceWidth
)
)
}
func currentTarget() -> CalibrationTarget {
targets[targetIndex]
}
private func startCountdown() {
stopCountdown()
stopSampleCollection()
countdownProgress = 1.0
let startTime = Date()
countdownTimer = Timer.scheduledTimer(withTimeInterval: 0.02, repeats: true) { [weak self] _ in
guard let self else { return }
Task { @MainActor in
let elapsed = Date().timeIntervalSince(startTime)
// Pause before starting the countdown
if elapsed < self.preCountdownPause {
self.countdownProgress = 1.0
return
}
// Start the actual countdown after pause
let countdownElapsed = elapsed - self.preCountdownPause
let remaining = max(0, self.countdownDuration - countdownElapsed)
self.countdownProgress = remaining / self.countdownDuration
if remaining <= 0 {
self.stopCountdown()
self.startSampleCollection()
}
}
}
}
private func stopCountdown() {
countdownTimer?.invalidate()
countdownTimer = nil
// Only reset to 1.0 when actually stopping, not during transitions
// countdownProgress = 1.0
}
private func startSampleCollection() {
stopSampleCollection()
samplesCollected = 0
isCollectingSamples = true
sampleTimer = Timer.scheduledTimer(withTimeInterval: sampleInterval, repeats: true) { [weak self] _ in
guard let self else { return }
Task { @MainActor in
self.recordSample()
self.samplesCollected += 1
if self.samplesCollected >= self.samplesPerTarget {
self.stopSampleCollection()
self.advance()
}
}
}
}
private func stopSampleCollection() {
sampleTimer?.invalidate()
sampleTimer = nil
isCollectingSamples = false
}
private func finish() {
stopCountdown()
stopSampleCollection()
guard let calibration = CalibrationSample.makeCalibration(samples: samples) else {
currentStep = .complete
return
}
settingsManager.settings.enforceModeCalibration = calibration
currentStep = .complete
}
var progress: Double {
guard !targets.isEmpty else { return 0 }
return Double(targetIndex) / Double(targets.count)
}
var progressText: String {
"\(min(targetIndex + 1, targets.count))/\(targets.count)"
}
}
enum CalibrationStep: String {
case eyeBox
case targets
case complete
}
struct CalibrationTarget: Identifiable, Sendable {
let id = UUID()
let x: CGFloat
let y: CGFloat
let label: String
static let defaultTargets: [CalibrationTarget] = [
CalibrationTarget(x: 0.1, y: 0.1, label: "Top Left"),
CalibrationTarget(x: 0.5, y: 0.1, label: "Top"),
CalibrationTarget(x: 0.9, y: 0.1, label: "Top Right"),
CalibrationTarget(x: 0.9, y: 0.5, label: "Right"),
CalibrationTarget(x: 0.9, y: 0.9, label: "Bottom Right"),
CalibrationTarget(x: 0.5, y: 0.9, label: "Bottom"),
CalibrationTarget(x: 0.1, y: 0.9, label: "Bottom Left"),
CalibrationTarget(x: 0.1, y: 0.5, label: "Left"),
CalibrationTarget(x: 0.5, y: 0.5, label: "Center")
]
}
private struct CalibrationSample: Sendable {
let target: CalibrationTarget
let horizontal: Double
let vertical: Double
let faceWidthRatio: Double
static func makeCalibration(samples: [CalibrationSample]) -> EnforceModeCalibration? {
guard !samples.isEmpty else { return nil }
let horizontalValues = samples.map { $0.horizontal }
let verticalValues = samples.map { $0.vertical }
let faceWidths = samples.map { $0.faceWidthRatio }
guard let minH = horizontalValues.min(),
let maxH = horizontalValues.max(),
let minV = verticalValues.min(),
let maxV = verticalValues.max() else {
return nil
}
let faceWidthMean = faceWidths.reduce(0, +) / Double(faceWidths.count)
return EnforceModeCalibration(
createdAt: Date(),
eyeBoxWidthFactor: SettingsManager.shared.settings.enforceModeEyeBoxWidthFactor,
eyeBoxHeightFactor: SettingsManager.shared.settings.enforceModeEyeBoxHeightFactor,
faceWidthRatio: faceWidthMean,
horizontalMin: minH,
horizontalMax: maxH,
verticalMin: minV,
verticalMax: maxV
)
}
}

View File

@@ -31,6 +31,9 @@ class EnforceModeService: ObservableObject {
private var timerEngine: TimerEngine?
private var cancellables = Set<AnyCancellable>()
private var faceDetectionTimer: Timer?
private var trackingDebugTimer: Timer?
private var trackingLapStats = TrackingLapStats()
private var lastLookAwayTime: Date = .distantPast
// MARK: - Configuration
@@ -60,14 +63,15 @@ class EnforceModeService: ObservableObject {
}
private func setupEyeTrackingObservers() {
eyeTrackingService.$userLookingAtScreen
eyeTrackingService.$trackingResult
.sink { [weak self] _ in
guard let self, self.isCameraActive else { return }
self.checkUserCompliance()
}
.store(in: &cancellables)
eyeTrackingService.$faceDetected
eyeTrackingService.$trackingResult
.map { $0.faceDetected }
.sink { [weak self] faceDetected in
guard let self else { return }
if faceDetected {
@@ -75,6 +79,33 @@ class EnforceModeService: ObservableObject {
}
}
.store(in: &cancellables)
settingsManager._settingsSubject
.receive(on: RunLoop.main)
.sink { [weak self] _ in
self?.refreshEnforceModeState()
}
.store(in: &cancellables)
$isCameraActive
.removeDuplicates()
.sink { [weak self] isActive in
if isActive {
self?.startTrackingDebugTimer()
} else {
self?.stopTrackingDebugTimer()
}
}
.store(in: &cancellables)
}
private func refreshEnforceModeState() {
let cameraService = CameraAccessService.shared
let enabled = isEnforcementEnabled && cameraService.isCameraAuthorized
if isEnforceModeEnabled != enabled {
isEnforceModeEnabled = enabled
logDebug("🔄 Enforce mode state refreshed: \(enabled)")
}
}
// MARK: - Enable/Disable
@@ -150,11 +181,18 @@ class EnforceModeService: ObservableObject {
}
func evaluateCompliance(
isLookingAtScreen: Bool,
gazeState: GazeState,
faceDetected: Bool
) -> ComplianceResult {
guard faceDetected else { return .faceNotDetected }
return isLookingAtScreen ? .notCompliant : .compliant
guard faceDetected else { return .compliant }
switch gazeState {
case .lookingAway:
return .compliant
case .lookingAtScreen:
return .notCompliant
case .unknown:
return .notCompliant
}
}
// MARK: - Camera Control
@@ -187,6 +225,7 @@ class EnforceModeService: ObservableObject {
eyeTrackingService.stopEyeTracking()
isCameraActive = false
stopFaceDetectionTimer()
stopTrackingDebugTimer()
userCompliedWithBreak = false
}
@@ -198,17 +237,19 @@ class EnforceModeService: ObservableObject {
return
}
let compliance = evaluateCompliance(
isLookingAtScreen: eyeTrackingService.userLookingAtScreen,
faceDetected: eyeTrackingService.faceDetected
gazeState: eyeTrackingService.trackingResult.gazeState,
faceDetected: eyeTrackingService.trackingResult.faceDetected
)
switch compliance {
case .compliant:
lastLookAwayTime = Date()
userCompliedWithBreak = true
case .notCompliant:
userCompliedWithBreak = false
case .faceNotDetected:
userCompliedWithBreak = false
lastLookAwayTime = Date()
userCompliedWithBreak = true
}
}
@@ -233,6 +274,48 @@ class EnforceModeService: ObservableObject {
faceDetectionTimer = nil
}
private func startTrackingDebugTimer() {
stopTrackingDebugTimer()
trackingDebugTimer = Timer.scheduledTimer(withTimeInterval: 0.2, repeats: true) { [weak self] _ in
self?.logTrackingDebugSnapshot()
}
}
private func stopTrackingDebugTimer() {
trackingDebugTimer?.invalidate()
trackingDebugTimer = nil
}
private func logTrackingDebugSnapshot() {
guard isCameraActive else { return }
let debugState = eyeTrackingService.debugState
let faceWidth = debugState.faceWidthRatio.map { String(format: "%.3f", $0) } ?? "-"
let horizontal = debugState.normalizedHorizontal.map { String(format: "%.3f", $0) } ?? "-"
let vertical = debugState.normalizedVertical.map { String(format: "%.3f", $0) } ?? "-"
trackingLapStats.ingest(
faceWidth: debugState.faceWidthRatio,
horizontal: debugState.normalizedHorizontal,
vertical: debugState.normalizedVertical
)
logDebug(
"📊 Tracking | faceWidth=\(faceWidth) | h=\(horizontal) | v=\(vertical)",
category: "EyeTracking"
)
}
func logTrackingLap() {
logDebug("🏁 Tracking Lap", category: "EyeTracking")
logTrackingDebugSnapshot()
if let summary = trackingLapStats.summaryString() {
logDebug("📈 Lap Stats | \(summary)", category: "EyeTracking")
}
trackingLapStats.reset()
}
private func checkFaceDetectionTimeout() {
guard isCameraActive else {
stopFaceDetectionTimer()
@@ -241,11 +324,30 @@ class EnforceModeService: ObservableObject {
let timeSinceLastDetection = Date().timeIntervalSince(lastFaceDetectionTime)
if timeSinceLastDetection > faceDetectionTimeout {
logDebug("⏰ Person not detected for \(faceDetectionTimeout)s. Temporarily disabling enforce mode.")
disableEnforceMode()
logDebug("⏰ Person not detected for \(faceDetectionTimeout)s. Assuming look away.")
lastLookAwayTime = Date()
userCompliedWithBreak = true
lastFaceDetectionTime = Date()
}
}
func shouldAdvanceLookAwayCountdown() -> Bool {
guard isEnforceModeEnabled else { return true }
guard isCameraActive else { return true }
if !eyeTrackingService.trackingResult.faceDetected {
lastLookAwayTime = Date()
return true
}
if eyeTrackingService.trackingResult.gazeState == .lookingAway {
lastLookAwayTime = Date()
return true
}
return Date().timeIntervalSince(lastLookAwayTime) <= 0.25
}
// MARK: - Test Mode
func startTestMode() async {
@@ -271,3 +373,45 @@ class EnforceModeService: ObservableObject {
isTestMode = false
}
}
private struct TrackingLapStats {
private var faceWidthValues: [Double] = []
private var horizontalValues: [Double] = []
private var verticalValues: [Double] = []
mutating func ingest(faceWidth: Double?, horizontal: Double?, vertical: Double?) {
if let faceWidth { faceWidthValues.append(faceWidth) }
if let horizontal { horizontalValues.append(horizontal) }
if let vertical { verticalValues.append(vertical) }
}
mutating func reset() {
faceWidthValues.removeAll(keepingCapacity: true)
horizontalValues.removeAll(keepingCapacity: true)
verticalValues.removeAll(keepingCapacity: true)
}
func summaryString() -> String? {
guard !faceWidthValues.isEmpty || !horizontalValues.isEmpty || !verticalValues.isEmpty else {
return nil
}
let faceWidth = stats(for: faceWidthValues)
let horizontal = stats(for: horizontalValues)
let vertical = stats(for: verticalValues)
return "faceWidth[\(faceWidth)] h[\(horizontal)] v[\(vertical)]"
}
private func stats(for values: [Double]) -> String {
guard let minValue = values.min(), let maxValue = values.max(), !values.isEmpty else {
return "-"
}
let mean = values.reduce(0, +) / Double(values.count)
return "min=\(format(minValue)) max=\(format(maxValue)) mean=\(format(mean))"
}
private func format(_ value: Double) -> String {
String(format: "%.3f", value)
}
}

View File

@@ -10,16 +10,20 @@ import Combine
import Foundation
protocol CameraSessionDelegate: AnyObject {
nonisolated func cameraSession(
@MainActor func cameraSession(
_ manager: CameraSessionManager,
didOutput pixelBuffer: CVPixelBuffer,
imageSize: CGSize
)
}
private struct PixelBufferBox: @unchecked Sendable {
let buffer: CVPixelBuffer
}
final class CameraSessionManager: NSObject, ObservableObject {
@Published private(set) var isRunning = false
weak var delegate: CameraSessionDelegate?
nonisolated(unsafe) weak var delegate: CameraSessionDelegate?
private var captureSession: AVCaptureSession?
private var videoOutput: AVCaptureVideoDataOutput?
@@ -96,6 +100,13 @@ final class CameraSessionManager: NSObject, ObservableObject {
}
session.addOutput(output)
if let connection = output.connection(with: .video) {
if connection.isVideoMirroringSupported {
connection.automaticallyAdjustsVideoMirroring = false
connection.isVideoMirrored = true
}
}
self.captureSession = session
self.videoOutput = output
}
@@ -116,6 +127,11 @@ extension CameraSessionManager: AVCaptureVideoDataOutputSampleBufferDelegate {
height: CVPixelBufferGetHeight(pixelBuffer)
)
delegate?.cameraSession(self, didOutput: pixelBuffer, imageSize: size)
let bufferBox = PixelBufferBox(buffer: pixelBuffer)
DispatchQueue.main.async { [weak self, bufferBox] in
guard let self else { return }
self.delegate?.cameraSession(self, didOutput: bufferBox.buffer, imageSize: size)
}
}
}

View File

@@ -5,7 +5,6 @@
// Created by Mike Freno on 1/13/26.
//
import AppKit
import AVFoundation
import Combine
import Foundation
@@ -14,81 +13,97 @@ class EyeTrackingService: NSObject, ObservableObject {
static let shared = EyeTrackingService()
@Published var isEyeTrackingActive = false
@Published var isEyesClosed = false
@Published var userLookingAtScreen = true
@Published var faceDetected = false
@Published var debugLeftPupilRatio: Double?
@Published var debugRightPupilRatio: Double?
@Published var debugLeftVerticalRatio: Double?
@Published var debugRightVerticalRatio: Double?
@Published var debugYaw: Double?
@Published var debugPitch: Double?
@Published var enableDebugLogging: Bool = false {
didSet {
debugAdapter.enableDebugLogging = enableDebugLogging
}
}
@Published var debugLeftEyeInput: NSImage?
@Published var debugRightEyeInput: NSImage?
@Published var debugLeftEyeProcessed: NSImage?
@Published var debugRightEyeProcessed: NSImage?
@Published var debugLeftPupilPosition: PupilPosition?
@Published var debugRightPupilPosition: PupilPosition?
@Published var debugLeftEyeSize: CGSize?
@Published var debugRightEyeSize: CGSize?
@Published var debugLeftEyeRegion: EyeRegion?
@Published var debugRightEyeRegion: EyeRegion?
@Published var debugImageSize: CGSize?
@Published var trackingResult = TrackingResult.empty
@Published var debugState = EyeTrackingDebugState.empty
private let cameraManager = CameraSessionManager()
private let visionPipeline = VisionPipeline()
private let debugAdapter = EyeDebugStateAdapter()
private let gazeDetector: GazeDetector
private let processor: VisionGazeProcessor
private var cancellables = Set<AnyCancellable>()
var previewLayer: AVCaptureVideoPreviewLayer? {
cameraManager.previewLayer
}
var gazeDirection: GazeDirection {
guard let leftH = debugLeftPupilRatio,
let rightH = debugRightPupilRatio,
let leftV = debugLeftVerticalRatio,
let rightV = debugRightVerticalRatio else {
return .center
}
let avgHorizontal = (leftH + rightH) / 2.0
let avgVertical = (leftV + rightV) / 2.0
return GazeDirection.from(horizontal: avgHorizontal, vertical: avgVertical)
}
var isInFrame: Bool {
faceDetected
trackingResult.faceDetected
}
private override init() {
let configuration = GazeDetector.Configuration(
thresholds: CalibrationState.shared.thresholds,
isCalibrationComplete: CalibrationState.shared.isComplete,
eyeClosedEnabled: EyeTrackingConstants.eyeClosedEnabled,
eyeClosedThreshold: EyeTrackingConstants.eyeClosedThreshold,
yawEnabled: EyeTrackingConstants.yawEnabled,
yawThreshold: EyeTrackingConstants.yawThreshold,
pitchUpEnabled: EyeTrackingConstants.pitchUpEnabled,
pitchUpThreshold: EyeTrackingConstants.pitchUpThreshold,
pitchDownEnabled: EyeTrackingConstants.pitchDownEnabled,
pitchDownThreshold: EyeTrackingConstants.pitchDownThreshold,
pixelGazeEnabled: EyeTrackingConstants.pixelGazeEnabled,
pixelGazeMinRatio: EyeTrackingConstants.pixelGazeMinRatio,
pixelGazeMaxRatio: EyeTrackingConstants.pixelGazeMaxRatio,
boundaryForgivenessMargin: EyeTrackingConstants.boundaryForgivenessMargin,
distanceSensitivity: EyeTrackingConstants.distanceSensitivity,
defaultReferenceFaceWidth: EyeTrackingConstants.defaultReferenceFaceWidth
)
self.gazeDetector = GazeDetector(configuration: configuration)
let config = TrackingConfig.default
self.processor = VisionGazeProcessor(config: config)
super.init()
cameraManager.delegate = self
setupSettingsObserver()
}
private func setupSettingsObserver() {
SettingsManager.shared._settingsSubject
.receive(on: RunLoop.main)
.sink { [weak self] settings in
self?.applyStrictness(settings.enforceModeStrictness)
}
.store(in: &cancellables)
applyStrictness(SettingsManager.shared.settings.enforceModeStrictness)
}
private func applyStrictness(_ strictness: Double) {
let settings = SettingsManager.shared.settings
let widthFactor = settings.enforceModeEyeBoxWidthFactor
let heightFactor = settings.enforceModeEyeBoxHeightFactor
let calibration = settings.enforceModeCalibration
let clamped = min(1, max(0, strictness))
let scale = 1.6 - (0.8 * clamped)
let horizontalThreshold: Double
let verticalThreshold: Double
let baselineEnabled: Bool
let centerHorizontal: Double
let centerVertical: Double
if let calibration {
let halfWidth = max(0.01, (calibration.horizontalMax - calibration.horizontalMin) / 2)
let halfHeight = max(0.01, (calibration.verticalMax - calibration.verticalMin) / 2)
let marginScale = 0.15
horizontalThreshold = halfWidth * (1.0 + marginScale) * scale
verticalThreshold = halfHeight * (1.0 + marginScale) * scale
baselineEnabled = false
centerHorizontal = (calibration.horizontalMin + calibration.horizontalMax) / 2
centerVertical = (calibration.verticalMin + calibration.verticalMax) / 2
processor.setFaceWidthBaseline(calibration.faceWidthRatio)
} else {
horizontalThreshold = TrackingConfig.default.horizontalAwayThreshold * scale
verticalThreshold = TrackingConfig.default.verticalAwayThreshold * scale
baselineEnabled = TrackingConfig.default.baselineEnabled
centerHorizontal = TrackingConfig.default.defaultCenterHorizontal
centerVertical = TrackingConfig.default.defaultCenterVertical
processor.resetBaseline()
}
let config = TrackingConfig(
horizontalAwayThreshold: horizontalThreshold,
verticalAwayThreshold: verticalThreshold,
minBaselineSamples: TrackingConfig.default.minBaselineSamples,
baselineSmoothing: TrackingConfig.default.baselineSmoothing,
baselineUpdateThreshold: TrackingConfig.default.baselineUpdateThreshold,
minConfidence: TrackingConfig.default.minConfidence,
eyeClosedThreshold: TrackingConfig.default.eyeClosedThreshold,
baselineEnabled: baselineEnabled,
defaultCenterHorizontal: centerHorizontal,
defaultCenterVertical: centerVertical,
faceWidthSmoothing: TrackingConfig.default.faceWidthSmoothing,
faceWidthScaleMin: TrackingConfig.default.faceWidthScaleMin,
faceWidthScaleMax: 1.4,
eyeBoundsHorizontalPadding: TrackingConfig.default.eyeBoundsHorizontalPadding,
eyeBoundsVerticalPaddingUp: TrackingConfig.default.eyeBoundsVerticalPaddingUp,
eyeBoundsVerticalPaddingDown: TrackingConfig.default.eyeBoundsVerticalPaddingDown,
eyeBoxWidthFactor: widthFactor,
eyeBoxHeightFactor: heightFactor
)
processor.updateConfig(config)
}
func startEyeTracking() async throws {
@@ -109,93 +124,33 @@ class EyeTrackingService: NSObject, ObservableObject {
cameraManager.stop()
Task { @MainActor in
isEyeTrackingActive = false
isEyesClosed = false
userLookingAtScreen = true
faceDetected = false
debugAdapter.clear()
syncDebugState()
trackingResult = TrackingResult.empty
debugState = EyeTrackingDebugState.empty
}
}
private func syncDebugState() {
debugLeftPupilRatio = debugAdapter.leftPupilRatio
debugRightPupilRatio = debugAdapter.rightPupilRatio
debugLeftVerticalRatio = debugAdapter.leftVerticalRatio
debugRightVerticalRatio = debugAdapter.rightVerticalRatio
debugYaw = debugAdapter.yaw
debugPitch = debugAdapter.pitch
debugLeftEyeInput = debugAdapter.leftEyeInput
debugRightEyeInput = debugAdapter.rightEyeInput
debugLeftEyeProcessed = debugAdapter.leftEyeProcessed
debugRightEyeProcessed = debugAdapter.rightEyeProcessed
debugLeftPupilPosition = debugAdapter.leftPupilPosition
debugRightPupilPosition = debugAdapter.rightPupilPosition
debugLeftEyeSize = debugAdapter.leftEyeSize
debugRightEyeSize = debugAdapter.rightEyeSize
debugLeftEyeRegion = debugAdapter.leftEyeRegion
debugRightEyeRegion = debugAdapter.rightEyeRegion
debugImageSize = debugAdapter.imageSize
}
private func updateGazeConfiguration() {
let configuration = GazeDetector.Configuration(
thresholds: CalibrationState.shared.thresholds,
isCalibrationComplete: CalibratorService.shared.isCalibrating || CalibrationState.shared.isComplete,
eyeClosedEnabled: EyeTrackingConstants.eyeClosedEnabled,
eyeClosedThreshold: EyeTrackingConstants.eyeClosedThreshold,
yawEnabled: EyeTrackingConstants.yawEnabled,
yawThreshold: EyeTrackingConstants.yawThreshold,
pitchUpEnabled: EyeTrackingConstants.pitchUpEnabled,
pitchUpThreshold: EyeTrackingConstants.pitchUpThreshold,
pitchDownEnabled: EyeTrackingConstants.pitchDownEnabled,
pitchDownThreshold: EyeTrackingConstants.pitchDownThreshold,
pixelGazeEnabled: EyeTrackingConstants.pixelGazeEnabled,
pixelGazeMinRatio: EyeTrackingConstants.pixelGazeMinRatio,
pixelGazeMaxRatio: EyeTrackingConstants.pixelGazeMaxRatio,
boundaryForgivenessMargin: EyeTrackingConstants.boundaryForgivenessMargin,
distanceSensitivity: EyeTrackingConstants.distanceSensitivity,
defaultReferenceFaceWidth: EyeTrackingConstants.defaultReferenceFaceWidth
)
gazeDetector.updateConfiguration(configuration)
func currentDebugSnapshot() -> EyeTrackingDebugState {
debugState
}
}
extension EyeTrackingService: CameraSessionDelegate {
nonisolated func cameraSession(
@MainActor func cameraSession(
_ manager: CameraSessionManager,
didOutput pixelBuffer: CVPixelBuffer,
imageSize: CGSize
) {
PupilDetector.advanceFrame()
let analysis = visionPipeline.analyze(pixelBuffer: pixelBuffer, imageSize: imageSize)
let result = gazeDetector.process(analysis: analysis, pixelBuffer: pixelBuffer)
let observation = processor.process(analysis: analysis)
if let leftRatio = result.leftPupilRatio,
let rightRatio = result.rightPupilRatio,
let faceWidth = result.faceWidthRatio {
Task { @MainActor in
guard CalibratorService.shared.isCalibrating else { return }
CalibratorService.shared.submitSampleToBridge(
leftRatio: leftRatio,
rightRatio: rightRatio,
leftVertical: result.leftVerticalRatio,
rightVertical: result.rightVerticalRatio,
faceWidthRatio: faceWidth
trackingResult = TrackingResult(
faceDetected: observation.faceDetected,
gazeState: observation.gazeState,
eyesClosed: observation.eyesClosed,
confidence: observation.confidence,
timestamp: Date()
)
}
}
Task { @MainActor [weak self] in
guard let self else { return }
self.faceDetected = result.faceDetected
self.isEyesClosed = result.isEyesClosed
self.userLookingAtScreen = result.userLookingAtScreen
self.debugAdapter.update(from: result)
self.debugAdapter.updateEyeImages(from: PupilDetector.self)
self.syncDebugState()
self.updateGazeConfiguration()
}
debugState = observation.debugState
}
}
@@ -220,96 +175,3 @@ enum EyeTrackingError: Error, LocalizedError {
}
}
}
// MARK: - Debug State Adapter
final class EyeDebugStateAdapter {
var leftPupilRatio: Double?
var rightPupilRatio: Double?
var leftVerticalRatio: Double?
var rightVerticalRatio: Double?
var yaw: Double?
var pitch: Double?
var enableDebugLogging: Bool = false {
didSet {
PupilDetector.enableDiagnosticLogging = enableDebugLogging
}
}
var leftEyeInput: NSImage?
var rightEyeInput: NSImage?
var leftEyeProcessed: NSImage?
var rightEyeProcessed: NSImage?
var leftPupilPosition: PupilPosition?
var rightPupilPosition: PupilPosition?
var leftEyeSize: CGSize?
var rightEyeSize: CGSize?
var leftEyeRegion: EyeRegion?
var rightEyeRegion: EyeRegion?
var imageSize: CGSize?
var gazeDirection: GazeDirection {
guard let leftH = leftPupilRatio,
let rightH = rightPupilRatio,
let leftV = leftVerticalRatio,
let rightV = rightVerticalRatio else {
return .center
}
let avgHorizontal = (leftH + rightH) / 2.0
let avgVertical = (leftV + rightV) / 2.0
return GazeDirection.from(horizontal: avgHorizontal, vertical: avgVertical)
}
func update(from result: EyeTrackingProcessingResult) {
leftPupilRatio = result.leftPupilRatio
rightPupilRatio = result.rightPupilRatio
leftVerticalRatio = result.leftVerticalRatio
rightVerticalRatio = result.rightVerticalRatio
yaw = result.yaw
pitch = result.pitch
}
func updateEyeImages(from detector: PupilDetector.Type) {
if let leftInput = detector.debugLeftEyeInput {
leftEyeInput = NSImage(cgImage: leftInput, size: NSSize(width: leftInput.width, height: leftInput.height))
}
if let rightInput = detector.debugRightEyeInput {
rightEyeInput = NSImage(cgImage: rightInput, size: NSSize(width: rightInput.width, height: rightInput.height))
}
if let leftProcessed = detector.debugLeftEyeProcessed {
leftEyeProcessed = NSImage(cgImage: leftProcessed, size: NSSize(width: leftProcessed.width, height: leftProcessed.height))
}
if let rightProcessed = detector.debugRightEyeProcessed {
rightEyeProcessed = NSImage(cgImage: rightProcessed, size: NSSize(width: rightProcessed.width, height: rightProcessed.height))
}
leftPupilPosition = detector.debugLeftPupilPosition
rightPupilPosition = detector.debugRightPupilPosition
leftEyeSize = detector.debugLeftEyeSize
rightEyeSize = detector.debugRightEyeSize
leftEyeRegion = detector.debugLeftEyeRegion
rightEyeRegion = detector.debugRightEyeRegion
imageSize = detector.debugImageSize
}
func clear() {
leftPupilRatio = nil
rightPupilRatio = nil
leftVerticalRatio = nil
rightVerticalRatio = nil
yaw = nil
pitch = nil
leftEyeInput = nil
rightEyeInput = nil
leftEyeProcessed = nil
rightEyeProcessed = nil
leftPupilPosition = nil
rightPupilPosition = nil
leftEyeSize = nil
rightEyeSize = nil
leftEyeRegion = nil
rightEyeRegion = nil
imageSize = nil
}
}

View File

@@ -0,0 +1,54 @@
//
// GazeBaselineModel.swift
// Gaze
//
// Created by Mike Freno on 1/31/26.
//
import Foundation
public final class GazeBaselineModel: @unchecked Sendable {
public struct Baseline: Sendable {
let horizontal: Double
let vertical: Double
let sampleCount: Int
}
private let lock = NSLock()
private var horizontal: Double?
private var vertical: Double?
private var sampleCount: Int = 0
public func reset() {
lock.lock()
horizontal = nil
vertical = nil
sampleCount = 0
lock.unlock()
}
public func update(horizontal: Double, vertical: Double, smoothing: Double) {
lock.lock()
defer { lock.unlock() }
if let existingH = self.horizontal, let existingV = self.vertical {
self.horizontal = existingH + (horizontal - existingH) * smoothing
self.vertical = existingV + (vertical - existingV) * smoothing
} else {
self.horizontal = horizontal
self.vertical = vertical
}
sampleCount += 1
}
public func current(defaultH: Double, defaultV: Double) -> Baseline {
lock.lock()
defer { lock.unlock() }
return Baseline(
horizontal: horizontal ?? defaultH,
vertical: vertical ?? defaultV,
sampleCount: sampleCount
)
}
}

View File

@@ -1,344 +0,0 @@
//
// GazeDetector.swift
// Gaze
//
// Gaze detection logic and pupil analysis.
//
import Foundation
import Vision
import simd
struct EyeTrackingProcessingResult: Sendable {
let faceDetected: Bool
let isEyesClosed: Bool
let userLookingAtScreen: Bool
let leftPupilRatio: Double?
let rightPupilRatio: Double?
let leftVerticalRatio: Double?
let rightVerticalRatio: Double?
let yaw: Double?
let pitch: Double?
let faceWidthRatio: Double?
}
final class GazeDetector: @unchecked Sendable {
struct GazeResult: Sendable {
let isLookingAway: Bool
let isEyesClosed: Bool
let leftPupilRatio: Double?
let rightPupilRatio: Double?
let leftVerticalRatio: Double?
let rightVerticalRatio: Double?
let yaw: Double?
let pitch: Double?
}
struct Configuration: Sendable {
let thresholds: GazeThresholds?
let isCalibrationComplete: Bool
let eyeClosedEnabled: Bool
let eyeClosedThreshold: CGFloat
let yawEnabled: Bool
let yawThreshold: Double
let pitchUpEnabled: Bool
let pitchUpThreshold: Double
let pitchDownEnabled: Bool
let pitchDownThreshold: Double
let pixelGazeEnabled: Bool
let pixelGazeMinRatio: Double
let pixelGazeMaxRatio: Double
let boundaryForgivenessMargin: Double
let distanceSensitivity: Double
let defaultReferenceFaceWidth: Double
}
private let lock = NSLock()
private var configuration: Configuration
init(configuration: Configuration) {
self.configuration = configuration
}
func updateConfiguration(_ configuration: Configuration) {
lock.lock()
self.configuration = configuration
lock.unlock()
}
nonisolated func process(
analysis: VisionPipeline.FaceAnalysis,
pixelBuffer: CVPixelBuffer
) -> EyeTrackingProcessingResult {
let config: Configuration
lock.lock()
config = configuration
lock.unlock()
guard analysis.faceDetected, let face = analysis.face else {
return EyeTrackingProcessingResult(
faceDetected: false,
isEyesClosed: false,
userLookingAtScreen: false,
leftPupilRatio: nil,
rightPupilRatio: nil,
leftVerticalRatio: nil,
rightVerticalRatio: nil,
yaw: analysis.debugYaw,
pitch: analysis.debugPitch,
faceWidthRatio: nil
)
}
let landmarks = face.landmarks
let yaw = face.yaw?.doubleValue ?? 0.0
let pitch = face.pitch?.doubleValue ?? 0.0
var isEyesClosed = false
if let leftEye = landmarks?.leftEye, let rightEye = landmarks?.rightEye {
isEyesClosed = detectEyesClosed(leftEye: leftEye, rightEye: rightEye, configuration: config)
}
let gazeResult = detectLookingAway(
face: face,
landmarks: landmarks,
imageSize: analysis.imageSize,
pixelBuffer: pixelBuffer,
configuration: config
)
let lookingAway = gazeResult.lookingAway
let userLookingAtScreen = !lookingAway
return EyeTrackingProcessingResult(
faceDetected: true,
isEyesClosed: isEyesClosed,
userLookingAtScreen: userLookingAtScreen,
leftPupilRatio: gazeResult.leftPupilRatio,
rightPupilRatio: gazeResult.rightPupilRatio,
leftVerticalRatio: gazeResult.leftVerticalRatio,
rightVerticalRatio: gazeResult.rightVerticalRatio,
yaw: gazeResult.yaw ?? yaw,
pitch: gazeResult.pitch ?? pitch,
faceWidthRatio: face.boundingBox.width
)
}
private func detectEyesClosed(
leftEye: VNFaceLandmarkRegion2D,
rightEye: VNFaceLandmarkRegion2D,
configuration: Configuration
) -> Bool {
guard configuration.eyeClosedEnabled else { return false }
guard leftEye.pointCount >= 2, rightEye.pointCount >= 2 else { return false }
let leftEyeHeight = calculateEyeHeight(leftEye)
let rightEyeHeight = calculateEyeHeight(rightEye)
let closedThreshold = configuration.eyeClosedThreshold
return leftEyeHeight < closedThreshold && rightEyeHeight < closedThreshold
}
private func calculateEyeHeight(_ eye: VNFaceLandmarkRegion2D) -> CGFloat {
let points = eye.normalizedPoints
guard points.count >= 2 else { return 0 }
let yValues = points.map { $0.y }
let maxY = yValues.max() ?? 0
let minY = yValues.min() ?? 0
return abs(maxY - minY)
}
private struct GazeDetectionResult: Sendable {
var lookingAway: Bool = false
var leftPupilRatio: Double?
var rightPupilRatio: Double?
var leftVerticalRatio: Double?
var rightVerticalRatio: Double?
var yaw: Double?
var pitch: Double?
}
private func detectLookingAway(
face: VNFaceObservation,
landmarks: VNFaceLandmarks2D?,
imageSize: CGSize,
pixelBuffer: CVPixelBuffer,
configuration: Configuration
) -> GazeDetectionResult {
var result = GazeDetectionResult()
let yaw = face.yaw?.doubleValue ?? 0.0
let pitch = face.pitch?.doubleValue ?? 0.0
result.yaw = yaw
result.pitch = pitch
var poseLookingAway = false
if face.pitch != nil {
if configuration.yawEnabled {
let yawThreshold = configuration.yawThreshold
if abs(yaw) > yawThreshold {
poseLookingAway = true
}
}
if !poseLookingAway {
var pitchLookingAway = false
if configuration.pitchUpEnabled && pitch > configuration.pitchUpThreshold {
pitchLookingAway = true
}
if configuration.pitchDownEnabled && pitch < configuration.pitchDownThreshold {
pitchLookingAway = true
}
poseLookingAway = pitchLookingAway
}
}
var eyesLookingAway = false
if let landmarks,
let leftEye = landmarks.leftEye,
let rightEye = landmarks.rightEye,
configuration.pixelGazeEnabled {
var leftGazeRatio: Double? = nil
var rightGazeRatio: Double? = nil
var leftVerticalRatio: Double? = nil
var rightVerticalRatio: Double? = nil
if let leftResult = PupilDetector.detectPupil(
in: pixelBuffer,
eyeLandmarks: leftEye,
faceBoundingBox: face.boundingBox,
imageSize: imageSize,
side: 0
) {
leftGazeRatio = calculateGazeRatio(
pupilPosition: leftResult.pupilPosition,
eyeRegion: leftResult.eyeRegion
)
leftVerticalRatio = calculateVerticalRatio(
pupilPosition: leftResult.pupilPosition,
eyeRegion: leftResult.eyeRegion
)
}
if let rightResult = PupilDetector.detectPupil(
in: pixelBuffer,
eyeLandmarks: rightEye,
faceBoundingBox: face.boundingBox,
imageSize: imageSize,
side: 1
) {
rightGazeRatio = calculateGazeRatio(
pupilPosition: rightResult.pupilPosition,
eyeRegion: rightResult.eyeRegion
)
rightVerticalRatio = calculateVerticalRatio(
pupilPosition: rightResult.pupilPosition,
eyeRegion: rightResult.eyeRegion
)
}
result.leftPupilRatio = leftGazeRatio
result.rightPupilRatio = rightGazeRatio
result.leftVerticalRatio = leftVerticalRatio
result.rightVerticalRatio = rightVerticalRatio
if let leftRatio = leftGazeRatio,
let rightRatio = rightGazeRatio {
let avgH = (leftRatio + rightRatio) / 2.0
let avgV = (leftVerticalRatio != nil && rightVerticalRatio != nil)
? (leftVerticalRatio! + rightVerticalRatio!) / 2.0
: 0.5
if configuration.isCalibrationComplete,
let thresholds = configuration.thresholds {
let currentFaceWidth = face.boundingBox.width
let refFaceWidth = thresholds.referenceFaceWidth
var distanceScale = 1.0
if refFaceWidth > 0 && currentFaceWidth > 0 {
let rawScale = refFaceWidth / currentFaceWidth
distanceScale = 1.0 + (rawScale - 1.0) * configuration.distanceSensitivity
distanceScale = max(0.5, min(2.0, distanceScale))
}
let centerH = (thresholds.screenLeftBound + thresholds.screenRightBound) / 2.0
let centerV = (thresholds.screenTopBound + thresholds.screenBottomBound) / 2.0
let deltaH = (avgH - centerH) * distanceScale
let deltaV = (avgV - centerV) * distanceScale
let normalizedH = centerH + deltaH
let normalizedV = centerV + deltaV
let margin = configuration.boundaryForgivenessMargin
let isLookingLeft = normalizedH > (thresholds.screenLeftBound + margin)
let isLookingRight = normalizedH < (thresholds.screenRightBound - margin)
let isLookingUp = normalizedV < (thresholds.screenTopBound - margin)
let isLookingDown = normalizedV > (thresholds.screenBottomBound + margin)
eyesLookingAway = isLookingLeft || isLookingRight || isLookingUp || isLookingDown
} else {
let currentFaceWidth = face.boundingBox.width
let refFaceWidth = configuration.defaultReferenceFaceWidth
var distanceScale = 1.0
if refFaceWidth > 0 && currentFaceWidth > 0 {
let rawScale = refFaceWidth / currentFaceWidth
distanceScale = 1.0 + (rawScale - 1.0) * configuration.distanceSensitivity
distanceScale = max(0.5, min(2.0, distanceScale))
}
let centerH = (configuration.pixelGazeMinRatio + configuration.pixelGazeMaxRatio) / 2.0
let normalizedH = centerH + (avgH - centerH) * distanceScale
let lookingRight = normalizedH <= configuration.pixelGazeMinRatio
let lookingLeft = normalizedH >= configuration.pixelGazeMaxRatio
eyesLookingAway = lookingRight || lookingLeft
}
}
}
result.lookingAway = poseLookingAway || eyesLookingAway
return result
}
private func calculateGazeRatio(
pupilPosition: PupilPosition,
eyeRegion: EyeRegion
) -> Double {
let pupilX = Double(pupilPosition.x)
let eyeCenterX = Double(eyeRegion.center.x)
let denominator = (eyeCenterX * 2.0 - 10.0)
guard denominator > 0 else {
let eyeLeft = Double(eyeRegion.frame.minX)
let eyeRight = Double(eyeRegion.frame.maxX)
let eyeWidth = eyeRight - eyeLeft
guard eyeWidth > 0 else { return 0.5 }
return (pupilX - eyeLeft) / eyeWidth
}
let ratio = pupilX / denominator
return max(0.0, min(1.0, ratio))
}
private func calculateVerticalRatio(
pupilPosition: PupilPosition,
eyeRegion: EyeRegion
) -> Double {
let pupilX = Double(pupilPosition.x)
let eyeWidth = Double(eyeRegion.frame.width)
guard eyeWidth > 0 else { return 0.5 }
let ratio = pupilX / eyeWidth
return max(0.0, min(1.0, ratio))
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,134 @@
//
// TrackingModels.swift
// Gaze
//
// Created by Mike Freno on 1/31/26.
//
import Foundation
public enum GazeState: String, Sendable {
case lookingAtScreen
case lookingAway
case unknown
}
public struct TrackingResult: Sendable {
public let faceDetected: Bool
public let gazeState: GazeState
public let eyesClosed: Bool
public let confidence: Double
public let timestamp: Date
public static let empty = TrackingResult(
faceDetected: false,
gazeState: .unknown,
eyesClosed: false,
confidence: 0,
timestamp: Date()
)
}
public struct EyeTrackingDebugState: Sendable {
public let leftEyeRect: CGRect?
public let rightEyeRect: CGRect?
public let leftPupil: CGPoint?
public let rightPupil: CGPoint?
public let imageSize: CGSize?
public let faceWidthRatio: Double?
public let normalizedHorizontal: Double?
public let normalizedVertical: Double?
public static let empty = EyeTrackingDebugState(
leftEyeRect: nil,
rightEyeRect: nil,
leftPupil: nil,
rightPupil: nil,
imageSize: nil,
faceWidthRatio: nil,
normalizedHorizontal: nil,
normalizedVertical: nil
)
}
public struct TrackingConfig: Sendable {
public init(
horizontalAwayThreshold: Double,
verticalAwayThreshold: Double,
minBaselineSamples: Int,
baselineSmoothing: Double,
baselineUpdateThreshold: Double,
minConfidence: Double,
eyeClosedThreshold: Double,
baselineEnabled: Bool,
defaultCenterHorizontal: Double,
defaultCenterVertical: Double,
faceWidthSmoothing: Double,
faceWidthScaleMin: Double,
faceWidthScaleMax: Double,
eyeBoundsHorizontalPadding: Double,
eyeBoundsVerticalPaddingUp: Double,
eyeBoundsVerticalPaddingDown: Double,
eyeBoxWidthFactor: Double,
eyeBoxHeightFactor: Double
) {
self.horizontalAwayThreshold = horizontalAwayThreshold
self.verticalAwayThreshold = verticalAwayThreshold
self.minBaselineSamples = minBaselineSamples
self.baselineSmoothing = baselineSmoothing
self.baselineUpdateThreshold = baselineUpdateThreshold
self.minConfidence = minConfidence
self.eyeClosedThreshold = eyeClosedThreshold
self.baselineEnabled = baselineEnabled
self.defaultCenterHorizontal = defaultCenterHorizontal
self.defaultCenterVertical = defaultCenterVertical
self.faceWidthSmoothing = faceWidthSmoothing
self.faceWidthScaleMin = faceWidthScaleMin
self.faceWidthScaleMax = faceWidthScaleMax
self.eyeBoundsHorizontalPadding = eyeBoundsHorizontalPadding
self.eyeBoundsVerticalPaddingUp = eyeBoundsVerticalPaddingUp
self.eyeBoundsVerticalPaddingDown = eyeBoundsVerticalPaddingDown
self.eyeBoxWidthFactor = eyeBoxWidthFactor
self.eyeBoxHeightFactor = eyeBoxHeightFactor
}
public let horizontalAwayThreshold: Double
public let verticalAwayThreshold: Double
public let minBaselineSamples: Int
public let baselineSmoothing: Double
public let baselineUpdateThreshold: Double
public let minConfidence: Double
public let eyeClosedThreshold: Double
public let baselineEnabled: Bool
public let defaultCenterHorizontal: Double
public let defaultCenterVertical: Double
public let faceWidthSmoothing: Double
public let faceWidthScaleMin: Double
public let faceWidthScaleMax: Double
public let eyeBoundsHorizontalPadding: Double
public let eyeBoundsVerticalPaddingUp: Double
public let eyeBoundsVerticalPaddingDown: Double
public let eyeBoxWidthFactor: Double
public let eyeBoxHeightFactor: Double
public static let `default` = TrackingConfig(
horizontalAwayThreshold: 0.08,
verticalAwayThreshold: 0.12,
minBaselineSamples: 8,
baselineSmoothing: 0.15,
baselineUpdateThreshold: 0.08,
minConfidence: 0.5,
eyeClosedThreshold: 0.18,
baselineEnabled: true,
defaultCenterHorizontal: 0.5,
defaultCenterVertical: 0.5,
faceWidthSmoothing: 0.12,
faceWidthScaleMin: 0.85,
faceWidthScaleMax: 1.4,
eyeBoundsHorizontalPadding: 0.1,
eyeBoundsVerticalPaddingUp: 0.9,
eyeBoundsVerticalPaddingDown: 0.4,
eyeBoxWidthFactor: 0.18,
eyeBoxHeightFactor: 0.10
)
}

View File

@@ -0,0 +1,390 @@
//
// VisionGazeProcessor.swift
// Gaze
//
// Created by Mike Freno on 1/31/26.
//
import Foundation
@preconcurrency import Vision
final class VisionGazeProcessor: @unchecked Sendable {
struct EyeObservation: Sendable {
let center: CGPoint
let width: Double
let height: Double
let pupil: CGPoint?
let frame: CGRect
let normalizedPupil: CGPoint?
let hasPupilLandmarks: Bool
}
struct ObservationResult: Sendable {
let faceDetected: Bool
let eyesClosed: Bool
let gazeState: GazeState
let confidence: Double
let horizontal: Double?
let vertical: Double?
let debugState: EyeTrackingDebugState
}
private let baselineModel = GazeBaselineModel()
private var faceWidthBaseline: Double?
private var faceWidthSmoothed: Double?
private var config: TrackingConfig
init(config: TrackingConfig) {
self.config = config
}
func updateConfig(_ config: TrackingConfig) {
self.config = config
}
func resetBaseline() {
baselineModel.reset()
faceWidthBaseline = nil
faceWidthSmoothed = nil
}
func setFaceWidthBaseline(_ value: Double) {
faceWidthBaseline = value
faceWidthSmoothed = value
}
func process(analysis: VisionPipeline.FaceAnalysis) -> ObservationResult {
guard analysis.faceDetected, let face = analysis.face?.value else {
return ObservationResult(
faceDetected: false,
eyesClosed: false,
gazeState: .unknown,
confidence: 0,
horizontal: nil,
vertical: nil,
debugState: .empty
)
}
guard let landmarks = face.landmarks else {
return ObservationResult(
faceDetected: true,
eyesClosed: false,
gazeState: .unknown,
confidence: 0.3,
horizontal: nil,
vertical: nil,
debugState: .empty
)
}
let leftEye = makeEyeObservation(
eye: landmarks.leftEye,
pupil: landmarks.leftPupil,
face: face,
imageSize: analysis.imageSize
)
let rightEye = makeEyeObservation(
eye: landmarks.rightEye,
pupil: landmarks.rightPupil,
face: face,
imageSize: analysis.imageSize
)
let eyesClosed = detectEyesClosed(left: leftEye, right: rightEye)
let (horizontal, vertical) = normalizePupilPosition(left: leftEye, right: rightEye)
let faceWidthRatio = Double(face.boundingBox.size.width)
let distanceScale = updateDistanceScale(faceWidthRatio: faceWidthRatio)
let confidence = calculateConfidence(leftEye: leftEye, rightEye: rightEye)
let gazeState = decideGazeState(
horizontal: horizontal,
vertical: vertical,
confidence: confidence,
eyesClosed: eyesClosed,
distanceScale: distanceScale
)
let debugState = EyeTrackingDebugState(
leftEyeRect: leftEye?.frame,
rightEyeRect: rightEye?.frame,
leftPupil: leftEye?.pupil,
rightPupil: rightEye?.pupil,
imageSize: analysis.imageSize,
faceWidthRatio: faceWidthRatio,
normalizedHorizontal: horizontal,
normalizedVertical: vertical
)
return ObservationResult(
faceDetected: true,
eyesClosed: eyesClosed,
gazeState: gazeState,
confidence: confidence,
horizontal: horizontal,
vertical: vertical,
debugState: debugState
)
}
private func makeEyeObservation(
eye: VNFaceLandmarkRegion2D?,
pupil: VNFaceLandmarkRegion2D?,
face: VNFaceObservation,
imageSize: CGSize
) -> EyeObservation? {
guard let eye else { return nil }
let eyePoints = normalizePoints(eye.normalizedPoints, face: face, imageSize: imageSize)
guard let bounds = boundingRect(points: eyePoints) else { return nil }
let pupilPoint: CGPoint?
let hasPupilLandmarks = (pupil?.pointCount ?? 0) > 0
if let pupil, pupil.pointCount > 0 {
let pupilPoints = normalizePoints(pupil.normalizedPoints, face: face, imageSize: imageSize)
pupilPoint = averagePoint(pupilPoints, fallback: bounds.center)
} else {
pupilPoint = bounds.center
}
let eyeBox = makeFaceRelativeEyeBox(
center: bounds.center,
faceWidth: face.boundingBox.size.width * imageSize.width
)
let paddedFrame = expandRect(
eyeBox,
horizontalPadding: config.eyeBoundsHorizontalPadding,
verticalPaddingUp: config.eyeBoundsVerticalPaddingUp,
verticalPaddingDown: config.eyeBoundsVerticalPaddingDown
)
let normalizedPupil: CGPoint?
if let pupilPoint {
let nx = clamp((pupilPoint.x - paddedFrame.minX) / paddedFrame.size.width)
let ny = clamp((pupilPoint.y - paddedFrame.minY) / paddedFrame.size.height)
normalizedPupil = CGPoint(x: nx, y: ny)
} else {
normalizedPupil = nil
}
return EyeObservation(
center: bounds.center,
width: bounds.size.width,
height: bounds.size.height,
pupil: pupilPoint,
frame: paddedFrame,
normalizedPupil: normalizedPupil,
hasPupilLandmarks: hasPupilLandmarks
)
}
private func normalizePoints(
_ points: [CGPoint],
face: VNFaceObservation,
imageSize: CGSize
) -> [CGPoint] {
points.map { point in
let x = (face.boundingBox.origin.x + point.x * face.boundingBox.size.width)
* imageSize.width
let y = (1.0 - (face.boundingBox.origin.y + point.y * face.boundingBox.size.height))
* imageSize.height
return CGPoint(x: x, y: y)
}
}
private func boundingRect(points: [CGPoint]) -> (center: CGPoint, size: CGSize, minX: CGFloat, minY: CGFloat)? {
guard !points.isEmpty else { return nil }
var minX = CGFloat.greatestFiniteMagnitude
var maxX = -CGFloat.greatestFiniteMagnitude
var minY = CGFloat.greatestFiniteMagnitude
var maxY = -CGFloat.greatestFiniteMagnitude
for point in points {
minX = min(minX, point.x)
maxX = max(maxX, point.x)
minY = min(minY, point.y)
maxY = max(maxY, point.y)
}
let width = maxX - minX
let height = maxY - minY
guard width > 0, height > 0 else { return nil }
return (
center: CGPoint(x: minX + width / 2, y: minY + height / 2),
size: CGSize(width: width, height: height),
minX: minX,
minY: minY
)
}
private func averagePoint(_ points: [CGPoint], fallback: CGPoint) -> CGPoint {
guard !points.isEmpty else { return fallback }
let sum = points.reduce(CGPoint.zero) { partial, next in
CGPoint(x: partial.x + next.x, y: partial.y + next.y)
}
return CGPoint(x: sum.x / CGFloat(points.count), y: sum.y / CGFloat(points.count))
}
private func clamp(_ value: CGFloat) -> CGFloat {
min(1, max(0, value))
}
private func expandRect(
_ rect: CGRect,
horizontalPadding: Double,
verticalPaddingUp: Double,
verticalPaddingDown: Double
) -> CGRect {
let dx = rect.width * CGFloat(horizontalPadding)
let up = rect.height * CGFloat(verticalPaddingUp)
let down = rect.height * CGFloat(verticalPaddingDown)
return CGRect(
x: rect.origin.x - dx,
y: rect.origin.y - up,
width: rect.width + (dx * 2),
height: rect.height + up + down
)
}
private func makeFaceRelativeEyeBox(center: CGPoint, faceWidth: CGFloat) -> CGRect {
let width = faceWidth * CGFloat(config.eyeBoxWidthFactor)
let height = faceWidth * CGFloat(config.eyeBoxHeightFactor)
return CGRect(
x: center.x - width / 2,
y: center.y - height / 2,
width: width,
height: height
)
}
private func averageCoordinate(left: CGFloat?, right: CGFloat?, fallback: Double?) -> Double? {
switch (left, right) {
case let (left?, right?):
return Double((left + right) / 2)
case let (left?, nil):
return Double(left)
case let (nil, right?):
return Double(right)
default:
return fallback
}
}
private func normalizePupilPosition(
left: EyeObservation?,
right: EyeObservation?
) -> (horizontal: Double?, vertical: Double?) {
let leftPupil = left?.normalizedPupil
let rightPupil = right?.normalizedPupil
let horizontal = averageCoordinate(
left: leftPupil?.x,
right: rightPupil?.x,
fallback: nil
)
let vertical = averageCoordinate(
left: leftPupil?.y,
right: rightPupil?.y,
fallback: nil
)
return (horizontal, vertical)
}
private func detectEyesClosed(left: EyeObservation?, right: EyeObservation?) -> Bool {
guard let left, let right else { return false }
let leftRatio = left.height / max(left.width, 1)
let rightRatio = right.height / max(right.width, 1)
let avgRatio = (leftRatio + rightRatio) / 2
return avgRatio < config.eyeClosedThreshold
}
private func calculateConfidence(leftEye: EyeObservation?, rightEye: EyeObservation?) -> Double {
var score = 0.0
if leftEye?.hasPupilLandmarks == true { score += 0.35 }
if rightEye?.hasPupilLandmarks == true { score += 0.35 }
if leftEye != nil { score += 0.15 }
if rightEye != nil { score += 0.15 }
return min(1.0, score)
}
private func decideGazeState(
horizontal: Double?,
vertical: Double?,
confidence: Double,
eyesClosed: Bool,
distanceScale: Double
) -> GazeState {
guard confidence >= config.minConfidence else { return .unknown }
guard let horizontal, let vertical else { return .unknown }
if eyesClosed { return .unknown }
let baseline = baselineModel.current(
defaultH: config.defaultCenterHorizontal,
defaultV: config.defaultCenterVertical
)
let deltaH = abs(horizontal - baseline.horizontal)
let deltaV = abs(vertical - baseline.vertical)
let thresholdH = config.horizontalAwayThreshold * distanceScale
let thresholdV = config.verticalAwayThreshold * distanceScale
let lookingDown = vertical > baseline.vertical
let lookingUp = vertical < baseline.vertical
let verticalMultiplier: Double
if lookingDown {
verticalMultiplier = 1.1
} else if lookingUp {
verticalMultiplier = 1.4
} else {
verticalMultiplier = 1.0
}
let verticalAway = deltaV > (thresholdV * verticalMultiplier)
let away = deltaH > thresholdH || verticalAway
if config.baselineEnabled {
if baseline.sampleCount < config.minBaselineSamples {
baselineModel.update(
horizontal: horizontal,
vertical: vertical,
smoothing: config.baselineSmoothing
)
} else if deltaH < config.baselineUpdateThreshold
&& deltaV < config.baselineUpdateThreshold {
baselineModel.update(
horizontal: horizontal,
vertical: vertical,
smoothing: config.baselineSmoothing
)
}
}
let stable = baseline.sampleCount >= config.minBaselineSamples || !config.baselineEnabled
if !stable { return .unknown }
return away ? .lookingAway : .lookingAtScreen
}
private func updateDistanceScale(faceWidthRatio: Double) -> Double {
let smoothed: Double
if let existing = faceWidthSmoothed {
smoothed = existing + (faceWidthRatio - existing) * config.faceWidthSmoothing
} else {
smoothed = faceWidthRatio
}
faceWidthSmoothed = smoothed
if faceWidthBaseline == nil {
faceWidthBaseline = smoothed
return 1.0
}
let baseline = faceWidthBaseline ?? smoothed
guard baseline > 0 else { return 1.0 }
let ratio = baseline / max(0.0001, smoothed)
return clampDouble(ratio, min: config.faceWidthScaleMin, max: config.faceWidthScaleMax)
}
private func clampDouble(_ value: Double, min: Double, max: Double) -> Double {
Swift.min(max, Swift.max(min, value))
}
}

View File

@@ -6,15 +6,17 @@
//
import Foundation
import Vision
@preconcurrency import Vision
final class VisionPipeline: @unchecked Sendable {
struct FaceAnalysis: Sendable {
let faceDetected: Bool
let face: VNFaceObservation?
let face: NonSendableFaceObservation?
let imageSize: CGSize
let debugYaw: Double?
let debugPitch: Double?
}
struct NonSendableFaceObservation: @unchecked Sendable {
nonisolated(unsafe) let value: VNFaceObservation
}
nonisolated func analyze(
@@ -40,28 +42,22 @@ final class VisionPipeline: @unchecked Sendable {
return FaceAnalysis(
faceDetected: false,
face: nil,
imageSize: imageSize,
debugYaw: nil,
debugPitch: nil
imageSize: imageSize
)
}
guard let face = (request.results as? [VNFaceObservation])?.first else {
guard let face = request.results?.first else {
return FaceAnalysis(
faceDetected: false,
face: nil,
imageSize: imageSize,
debugYaw: nil,
debugPitch: nil
imageSize: imageSize
)
}
return FaceAnalysis(
faceDetected: true,
face: face,
imageSize: imageSize,
debugYaw: face.yaw?.doubleValue,
debugPitch: face.pitch?.doubleValue
face: NonSendableFaceObservation(value: face),
imageSize: imageSize
)
}
}

View File

@@ -5,6 +5,7 @@
// Created by Mike Freno on 1/13/26.
//
import AppKit
import AVFoundation
import Combine
@@ -33,11 +34,20 @@ class CameraAccessService: ObservableObject {
return
}
let currentStatus = AVCaptureDevice.authorizationStatus(for: .video)
if currentStatus == .denied || currentStatus == .restricted {
checkCameraAuthorizationStatus()
openSystemSettings()
throw CameraAccessError.accessDenied
}
print("🎥 Calling AVCaptureDevice.requestAccess...")
let status = await AVCaptureDevice.requestAccess(for: .video)
print("🎥 Permission result: \(status)")
if !status {
checkCameraAuthorizationStatus()
openSystemSettings()
throw CameraAccessError.accessDenied
}
@@ -69,6 +79,27 @@ class CameraAccessService: ObservableObject {
}
}
func openSystemSettings() {
let possibleUrls = [
"x-apple.systempreferences:com.apple.preference.security?Privacy_Camera",
"x-apple.systempreferences:Privacy?Camera",
"x-apple.systempreferences:com.apple.preference.security",
"x-apple.systempreferences:Privacy",
"x-apple.systempreferences:com.apple.preferences.security",
]
for urlString in possibleUrls {
if let url = URL(string: urlString),
NSWorkspace.shared.open(url)
{
print("Successfully opened: \(urlString)")
return
}
}
print("⚠️ Failed to open System Settings")
}
func checkCameraHardware() {
let devices = AVCaptureDevice.DiscoverySession(
deviceTypes: [.builtInWideAngleCamera],
@@ -97,7 +128,7 @@ enum CameraAccessError: Error, LocalizedError {
switch self {
case .accessDenied:
return
"Camera access was denied. Please enable camera permissions in System Preferences."
"Camera access was denied. Please enable camera permissions in System Settings."
case .unsupportedOS:
return "This feature requires macOS 12 or later."
case .unknown:

View File

@@ -7,139 +7,45 @@
import AppKit
import Combine
import CoreGraphics
import Foundation
public struct FullscreenWindowDescriptor: Equatable {
public let ownerPID: pid_t
public let layer: Int
public let bounds: CGRect
public init(ownerPID: pid_t, layer: Int, bounds: CGRect) {
self.ownerPID = ownerPID
self.layer = layer
self.bounds = bounds
}
}
protocol FullscreenEnvironmentProviding {
func frontmostProcessIdentifier() -> pid_t?
func windowDescriptors() -> [FullscreenWindowDescriptor]
func screenFrames() -> [CGRect]
}
struct SystemFullscreenEnvironmentProvider: FullscreenEnvironmentProviding {
func frontmostProcessIdentifier() -> pid_t? {
NSWorkspace.shared.frontmostApplication?.processIdentifier
}
func windowDescriptors() -> [FullscreenWindowDescriptor] {
let options: CGWindowListOption = [.optionOnScreenOnly, .excludeDesktopElements]
guard let windowList = CGWindowListCopyWindowInfo(options, kCGNullWindowID) as? [[String: Any]] else {
return []
}
return windowList.compactMap { window in
guard let ownerPID = window[kCGWindowOwnerPID as String] as? pid_t,
let layer = window[kCGWindowLayer as String] as? Int,
let boundsDict = window[kCGWindowBounds as String] as? [String: CGFloat] else {
return nil
}
let bounds = CGRect(
x: boundsDict["X"] ?? 0,
y: boundsDict["Y"] ?? 0,
width: boundsDict["Width"] ?? 0,
height: boundsDict["Height"] ?? 0
)
return FullscreenWindowDescriptor(ownerPID: ownerPID, layer: layer, bounds: bounds)
}
}
public func screenFrames() -> [CGRect] {
NSScreen.screens.map(\.frame)
}
}
import MacroVisionKit
final class FullscreenDetectionService: ObservableObject {
@Published private(set) var isFullscreenActive = false
private var observers: [NSObjectProtocol] = []
private var frontmostAppObserver: AnyCancellable?
private var fullscreenTask: Task<Void, Never>?
private let permissionManager: ScreenCapturePermissionManaging
private let environmentProvider: FullscreenEnvironmentProviding
private let windowMatcher = FullscreenWindowMatcher()
#if canImport(MacroVisionKit)
private let monitor = FullScreenMonitor.shared
#endif
init(
permissionManager: ScreenCapturePermissionManaging,
environmentProvider: FullscreenEnvironmentProviding
permissionManager: ScreenCapturePermissionManaging
) {
self.permissionManager = permissionManager
self.environmentProvider = environmentProvider
setupObservers()
startMonitoring()
}
/// Convenience initializer using default services
convenience init() {
self.init(
permissionManager: ScreenCapturePermissionManager.shared,
environmentProvider: SystemFullscreenEnvironmentProvider()
permissionManager: ScreenCapturePermissionManager.shared
)
}
// Factory method to safely create instances from non-main actor contexts
static func create(
permissionManager: ScreenCapturePermissionManaging? = nil,
environmentProvider: FullscreenEnvironmentProviding? = nil
permissionManager: ScreenCapturePermissionManaging? = nil
) async -> FullscreenDetectionService {
await MainActor.run {
return FullscreenDetectionService(
permissionManager: permissionManager ?? ScreenCapturePermissionManager.shared,
environmentProvider: environmentProvider ?? SystemFullscreenEnvironmentProvider()
permissionManager: permissionManager ?? ScreenCapturePermissionManager.shared
)
}
}
deinit {
let notificationCenter = NSWorkspace.shared.notificationCenter
observers.forEach { notificationCenter.removeObserver($0) }
frontmostAppObserver?.cancel()
}
private func setupObservers() {
let workspace = NSWorkspace.shared
let notificationCenter = workspace.notificationCenter
let stateChangeHandler: (Notification) -> Void = { [weak self] _ in
self?.checkFullscreenState()
}
let notifications: [(NSNotification.Name, Any?)] = [
(NSWorkspace.activeSpaceDidChangeNotification, workspace),
(NSApplication.didChangeScreenParametersNotification, nil),
(NSWindow.willEnterFullScreenNotification, nil),
(NSWindow.willExitFullScreenNotification, nil),
]
observers = notifications.map { notification, object in
notificationCenter.addObserver(
forName: notification,
object: object,
queue: .main,
using: stateChangeHandler
)
}
frontmostAppObserver = NotificationCenter.default.publisher(
for: NSWorkspace.didActivateApplicationNotification,
object: workspace
)
.sink { [weak self] _ in
self?.checkFullscreenState()
}
checkFullscreenState()
fullscreenTask?.cancel()
}
private func canReadWindowInfo() -> Bool {
@@ -151,25 +57,17 @@ final class FullscreenDetectionService: ObservableObject {
return true
}
private func checkFullscreenState() {
guard canReadWindowInfo() else { return }
guard let frontmostPID = environmentProvider.frontmostProcessIdentifier() else {
setFullscreenState(false)
return
}
let windows = environmentProvider.windowDescriptors()
let screens = environmentProvider.screenFrames()
for window in windows where window.ownerPID == frontmostPID && window.layer == 0 {
if windowMatcher.isFullscreen(windowBounds: window.bounds, screenFrames: screens) {
setFullscreenState(true)
return
private func startMonitoring() {
fullscreenTask = Task { [weak self] in
guard let self else { return }
let stream = await monitor.spaceChanges()
for await spaces in stream {
guard self.canReadWindowInfo() else { continue }
self.setFullscreenState(!spaces.isEmpty)
}
}
setFullscreenState(false)
forceUpdate()
}
fileprivate func setFullscreenState(_ isActive: Bool) {
@@ -179,7 +77,12 @@ final class FullscreenDetectionService: ObservableObject {
}
func forceUpdate() {
checkFullscreenState()
Task { [weak self] in
guard let self else { return }
guard self.canReadWindowInfo() else { return }
let spaces = await monitor.detectFullscreenApps()
self.setFullscreenState(!spaces.isEmpty)
}
}
#if DEBUG
@@ -188,16 +91,3 @@ final class FullscreenDetectionService: ObservableObject {
}
#endif
}
struct FullscreenWindowMatcher {
func isFullscreen(windowBounds: CGRect, screenFrames: [CGRect], tolerance: CGFloat = 1) -> Bool {
screenFrames.contains { matches(windowBounds, screenFrame: $0, tolerance: tolerance) }
}
private func matches(_ windowBounds: CGRect, screenFrame: CGRect, tolerance: CGFloat) -> Bool {
abs(windowBounds.width - screenFrame.width) < tolerance
&& abs(windowBounds.height - screenFrame.height) < tolerance
&& abs(windowBounds.origin.x - screenFrame.origin.x) < tolerance
&& abs(windowBounds.origin.y - screenFrame.origin.y) < tolerance
}
}

View File

@@ -1,456 +0,0 @@
//
// CalibrationOverlayView.swift
// Gaze
//
// Fullscreen overlay view for eye tracking calibration targets.
//
import AVFoundation
import Combine
import SwiftUI
struct CalibrationOverlayView: View {
@StateObject private var calibratorService = CalibratorService.shared
@StateObject private var eyeTrackingService = EyeTrackingService.shared
@StateObject private var viewModel = CalibrationOverlayViewModel()
let onDismiss: () -> Void
var body: some View {
GeometryReader { geometry in
ZStack {
Color.black.ignoresSafeArea()
// Camera preview at 50% opacity (mirrored for natural feel)
if let previewLayer = eyeTrackingService.previewLayer {
CameraPreviewView(previewLayer: previewLayer, borderColor: .clear)
.scaleEffect(x: -1, y: 1)
.opacity(0.5)
.ignoresSafeArea()
}
if let error = viewModel.showError {
errorView(error)
} else if !viewModel.cameraStarted {
startingCameraView
} else if calibratorService.isCalibrating {
calibrationContentView(screenSize: geometry.size)
} else if viewModel.calibrationStarted
&& calibratorService.calibrationData.isComplete
{
// Only show completion if we started calibration this session AND it completed
completionView
} else if viewModel.calibrationStarted {
// Calibration was started but not yet complete - show content
calibrationContentView(screenSize: geometry.size)
}
}
}
.task {
await viewModel.startCamera(
eyeTrackingService: eyeTrackingService, calibratorService: calibratorService)
}
.onDisappear {
viewModel.cleanup(
eyeTrackingService: eyeTrackingService, calibratorService: calibratorService)
}
.onChange(of: calibratorService.currentStep) { oldStep, newStep in
if newStep != nil && oldStep != newStep {
viewModel.startStepCountdown(calibratorService: calibratorService)
}
}
}
// MARK: - Starting Camera View
private var startingCameraView: some View {
VStack(spacing: 20) {
ProgressView()
.scaleEffect(2)
.tint(.white)
Text("Starting camera...")
.font(.title2)
.foregroundStyle(.white)
}
}
// MARK: - Error View
private func errorView(_ message: String) -> some View {
VStack(spacing: 20) {
Image(systemName: "exclamationmark.triangle.fill")
.font(.system(size: 60))
.foregroundStyle(.orange)
Text("Camera Error")
.font(.title)
.foregroundStyle(.white)
Text(message)
.font(.body)
.foregroundStyle(.gray)
.multilineTextAlignment(.center)
Button("Close") {
onDismiss()
}
.buttonStyle(.borderedProminent)
.padding(.top, 20)
}
.padding(40)
}
// MARK: - Calibration Content
private func calibrationContentView(screenSize: CGSize) -> some View {
ZStack {
VStack {
progressBar
Spacer()
}
if let step = calibratorService.currentStep {
calibrationTarget(for: step, screenSize: screenSize)
}
VStack {
Spacer()
HStack {
cancelButton
Spacer()
if !calibratorService.isCollectingSamples {
skipButton
}
}
.padding(.horizontal, 40)
.padding(.bottom, 40)
}
// Face detection indicator
VStack {
HStack {
Spacer()
faceDetectionIndicator
}
Spacer()
}
}
}
// MARK: - Progress Bar
private var progressBar: some View {
VStack(spacing: 10) {
HStack {
Text("Calibrating...")
.foregroundStyle(.white)
Spacer()
Text(calibratorService.progressText)
.foregroundStyle(.white.opacity(0.7))
}
ProgressView(value: calibratorService.progress)
.progressViewStyle(.linear)
.tint(.blue)
}
.padding()
.background(Color.black.opacity(0.7))
}
// MARK: - Face Detection Indicator
private var faceDetectionIndicator: some View {
HStack(spacing: 8) {
Circle()
.fill(viewModel.stableFaceDetected ? Color.green : Color.red)
.frame(width: 12, height: 12)
Text(viewModel.stableFaceDetected ? "Face detected" : "No face detected")
.font(.caption)
.foregroundStyle(.white.opacity(0.8))
}
.padding(.horizontal, 16)
.padding(.vertical, 10)
.background(Color.black.opacity(0.7))
.cornerRadius(20)
.padding()
.animation(.easeInOut(duration: 0.3), value: viewModel.stableFaceDetected)
}
// MARK: - Calibration Target
@ViewBuilder
private func calibrationTarget(for step: CalibrationStep, screenSize: CGSize) -> some View {
let position = targetPosition(for: step, screenSize: screenSize)
VStack(spacing: 20) {
ZStack {
// Outer ring (pulsing when counting down)
Circle()
.stroke(Color.blue.opacity(0.3), lineWidth: 3)
.frame(width: 100, height: 100)
.scaleEffect(viewModel.isCountingDown ? 1.2 : 1.0)
.animation(
viewModel.isCountingDown
? .easeInOut(duration: 0.6).repeatForever(autoreverses: true)
: .default,
value: viewModel.isCountingDown)
// Progress ring when collecting
if calibratorService.isCollectingSamples {
Circle()
.trim(from: 0, to: CGFloat(calibratorService.samplesCollected) / 30.0)
.stroke(Color.green, lineWidth: 4)
.frame(width: 90, height: 90)
.rotationEffect(.degrees(-90))
.animation(
.linear(duration: 0.1), value: calibratorService.samplesCollected)
}
// Inner circle
Circle()
.fill(calibratorService.isCollectingSamples ? Color.green : Color.blue)
.frame(width: 60, height: 60)
.animation(
.easeInOut(duration: 0.3), value: calibratorService.isCollectingSamples)
// Countdown number or collecting indicator
if viewModel.isCountingDown && viewModel.countdownValue > 0 {
Text("\(viewModel.countdownValue)")
.font(.system(size: 36, weight: .bold))
.foregroundStyle(.white)
} else if calibratorService.isCollectingSamples {
Image(systemName: "eye.fill")
.font(.system(size: 24, weight: .bold))
.foregroundStyle(.white)
}
}
Text(instructionText(for: step))
.font(.title2)
.foregroundStyle(.white)
.padding(.horizontal, 40)
.padding(.vertical, 15)
.background(Color.black.opacity(0.7))
.cornerRadius(10)
}
.position(position)
}
private func instructionText(for step: CalibrationStep) -> String {
if viewModel.isCountingDown && viewModel.countdownValue > 0 {
return "Get ready..."
} else if calibratorService.isCollectingSamples {
return "Look at the target"
} else {
return step.instructionText
}
}
// MARK: - Buttons
private var skipButton: some View {
Button {
viewModel.skipCurrentStep(calibratorService: calibratorService)
} label: {
Text("Skip")
.foregroundStyle(.white)
.padding(.horizontal, 20)
.padding(.vertical, 10)
.background(Color.white.opacity(0.2))
.cornerRadius(8)
}
.buttonStyle(.plain)
}
private var cancelButton: some View {
Button {
viewModel.cleanup(
eyeTrackingService: eyeTrackingService, calibratorService: calibratorService)
onDismiss()
} label: {
HStack(spacing: 6) {
Image(systemName: "xmark")
Text("Cancel")
}
.foregroundStyle(.white.opacity(0.7))
.padding(.horizontal, 20)
.padding(.vertical, 10)
.background(Color.white.opacity(0.1))
.cornerRadius(8)
}
.buttonStyle(.plain)
.keyboardShortcut(.escape, modifiers: [])
}
// MARK: - Completion View
private var completionView: some View {
VStack(spacing: 30) {
Image(systemName: "checkmark.circle.fill")
.font(.system(size: 80))
.foregroundStyle(.green)
Text("Calibration Complete!")
.font(.largeTitle)
.foregroundStyle(.white)
.fontWeight(.bold)
Text("Your eye tracking has been calibrated successfully.")
.font(.title3)
.foregroundStyle(.gray)
Button("Done") {
onDismiss()
}
.buttonStyle(.borderedProminent)
.keyboardShortcut(.return, modifiers: [])
.padding(.top, 20)
}
.onAppear {
DispatchQueue.main.asyncAfter(deadline: .now() + 2.5) {
onDismiss()
}
}
}
// MARK: - Helper Methods
private func targetPosition(for step: CalibrationStep, screenSize: CGSize) -> CGPoint {
let width = screenSize.width
let height = screenSize.height
let centerX = width / 2
let centerY = height / 2
let marginX: CGFloat = 150
let marginY: CGFloat = 120
switch step {
case .center:
return CGPoint(x: centerX, y: centerY)
case .left:
return CGPoint(x: centerX - width / 4, y: centerY)
case .right:
return CGPoint(x: centerX + width / 4, y: centerY)
case .farLeft:
return CGPoint(x: marginX, y: centerY)
case .farRight:
return CGPoint(x: width - marginX, y: centerY)
case .up:
return CGPoint(x: centerX, y: marginY)
case .down:
return CGPoint(x: centerX, y: height - marginY)
case .topLeft:
return CGPoint(x: marginX, y: marginY)
case .topRight:
return CGPoint(x: width - marginX, y: marginY)
case .bottomLeft:
return CGPoint(x: marginX, y: height - marginY)
case .bottomRight:
return CGPoint(x: width - marginX, y: height - marginY)
}
}
}
// MARK: - ViewModel
@MainActor
class CalibrationOverlayViewModel: ObservableObject {
@Published var countdownValue = 1
@Published var isCountingDown = false
@Published var cameraStarted = false
@Published var showError: String?
@Published var calibrationStarted = false
@Published var stableFaceDetected = false // Debounced face detection
private var countdownTask: Task<Void, Never>?
private var faceDetectionCancellable: AnyCancellable?
private var lastFaceDetectedTime: Date = .distantPast
private let faceDetectionDebounce: TimeInterval = 0.5 // 500ms debounce
func startCamera(eyeTrackingService: EyeTrackingService, calibratorService: CalibratorService)
async
{
do {
try await eyeTrackingService.startEyeTracking()
cameraStarted = true
// Set up debounced face detection
setupFaceDetectionObserver(eyeTrackingService: eyeTrackingService)
try? await Task.sleep(for: .seconds(0.5))
// Reset any previous calibration data before starting fresh
calibratorService.resetForNewCalibration()
calibratorService.startCalibration()
calibrationStarted = true
startStepCountdown(calibratorService: calibratorService)
} catch {
showError = "Failed to start camera: \(error.localizedDescription)"
}
}
private func setupFaceDetectionObserver(eyeTrackingService: EyeTrackingService) {
faceDetectionCancellable = eyeTrackingService.$faceDetected
.receive(on: DispatchQueue.main)
.sink { [weak self] detected in
guard let self = self else { return }
if detected {
// Face detected - update immediately
self.lastFaceDetectedTime = Date()
self.stableFaceDetected = true
} else {
// Face lost - only update after debounce period
let timeSinceLastDetection = Date().timeIntervalSince(self.lastFaceDetectedTime)
if timeSinceLastDetection > self.faceDetectionDebounce {
self.stableFaceDetected = false
}
}
}
}
func cleanup(eyeTrackingService: EyeTrackingService, calibratorService: CalibratorService) {
countdownTask?.cancel()
countdownTask = nil
faceDetectionCancellable?.cancel()
faceDetectionCancellable = nil
isCountingDown = false
if calibratorService.isCalibrating {
calibratorService.cancelCalibration()
}
eyeTrackingService.stopEyeTracking()
}
func skipCurrentStep(calibratorService: CalibratorService) {
countdownTask?.cancel()
countdownTask = nil
isCountingDown = false
calibratorService.skipStep()
}
func startStepCountdown(calibratorService: CalibratorService) {
countdownTask?.cancel()
countdownTask = nil
countdownValue = 1
isCountingDown = true
countdownTask = Task { @MainActor in
// Just 1 second countdown
try? await Task.sleep(for: .seconds(1))
if Task.isCancelled { return }
// Done counting, start collecting
isCountingDown = false
countdownValue = 0
calibratorService.startCollectingSamples()
}
}
}
#Preview {
CalibrationOverlayView(onDismiss: {})
}

View File

@@ -11,6 +11,20 @@ import AVFoundation
struct CameraPreviewView: NSViewRepresentable {
let previewLayer: AVCaptureVideoPreviewLayer
let borderColor: NSColor
let showsBorder: Bool
let cornerRadius: CGFloat
init(
previewLayer: AVCaptureVideoPreviewLayer,
borderColor: NSColor,
showsBorder: Bool = true,
cornerRadius: CGFloat = 12
) {
self.previewLayer = previewLayer
self.borderColor = borderColor
self.showsBorder = showsBorder
self.cornerRadius = cornerRadius
}
func makeNSView(context: Context) -> PreviewContainerView {
let view = PreviewContainerView()
@@ -23,6 +37,11 @@ struct CameraPreviewView: NSViewRepresentable {
view.layer?.addSublayer(previewLayer)
}
if let connection = previewLayer.connection, connection.isVideoMirroringSupported {
connection.automaticallyAdjustsVideoMirroring = false
connection.isVideoMirrored = true
}
updateBorder(view: view, color: borderColor)
return view
@@ -42,13 +61,22 @@ struct CameraPreviewView: NSViewRepresentable {
previewLayer.frame = nsView.bounds
}
if let connection = previewLayer.connection, connection.isVideoMirroringSupported {
connection.automaticallyAdjustsVideoMirroring = false
connection.isVideoMirrored = true
}
updateBorder(view: nsView, color: borderColor)
}
private func updateBorder(view: NSView, color: NSColor) {
if showsBorder {
view.layer?.borderColor = color.cgColor
view.layer?.borderWidth = 4
view.layer?.cornerRadius = 12
} else {
view.layer?.borderWidth = 0
}
view.layer?.cornerRadius = cornerRadius
view.layer?.masksToBounds = true
}

View File

@@ -0,0 +1,197 @@
//
// EnforceModeCalibrationOverlayView.swift
// Gaze
//
// Created by Mike Freno on 2/1/26.
//
import SwiftUI
struct EnforceModeCalibrationOverlayView: View {
@ObservedObject private var calibrationService = EnforceModeCalibrationService.shared
@ObservedObject private var eyeTrackingService = EyeTrackingService.shared
@Bindable private var settingsManager = SettingsManager.shared
@ObservedObject private var enforceModeService = EnforceModeService.shared
var body: some View {
ZStack {
cameraBackground
switch calibrationService.currentStep {
case .eyeBox:
eyeBoxStep
case .targets:
targetStep
case .complete:
completionStep
}
}
}
private var eyeBoxStep: some View {
ZStack {
VStack(spacing: 20) {
VStack(spacing: 16) {
Text("Adjust Eye Box")
.font(.title2)
.foregroundStyle(.white)
Text(
"Use the sliders to fit the boxes around your eyes. It need not be perfect."
)
.font(.callout)
.multilineTextAlignment(.center)
.foregroundStyle(.white.opacity(0.8))
}
.padding(.horizontal, 40)
.frame(maxWidth: 520)
.frame(maxWidth: .infinity, alignment: .top)
VStack(alignment: .leading, spacing: 12) {
Text("Width")
.font(.caption)
.foregroundStyle(.white.opacity(0.8))
Slider(
value: $settingsManager.settings.enforceModeEyeBoxWidthFactor,
in: 0.12...0.25
)
Text("Height")
.font(.caption)
.foregroundStyle(.white.opacity(0.8))
Slider(
value: $settingsManager.settings.enforceModeEyeBoxHeightFactor,
in: 0.01...0.10
)
}
.padding(16)
.background(.black.opacity(0.3))
.clipShape(RoundedRectangle(cornerRadius: 12))
.frame(maxWidth: 420)
Spacer()
HStack(spacing: 12) {
Button("Cancel") {
calibrationService.dismissOverlay()
enforceModeService.stopTestMode()
}
.buttonStyle(.bordered)
Button("Continue") {
calibrationService.advance()
}
.buttonStyle(.borderedProminent)
}
.padding(.bottom, 40)
}
}
}
private var targetStep: some View {
ZStack {
VStack(spacing: 10) {
HStack {
Text("Calibrating...")
.foregroundStyle(.white)
Spacer()
Text(calibrationService.progressText)
.foregroundStyle(.white.opacity(0.7))
}
ProgressView(value: calibrationService.progress)
.progressViewStyle(.linear)
.tint(.blue)
}
.padding()
.background(Color.black.opacity(0.7))
.frame(maxWidth: .infinity, maxHeight: .infinity, alignment: .top)
targetDot
VStack {
Spacer()
HStack(spacing: 12) {
Button("Cancel") {
calibrationService.dismissOverlay()
enforceModeService.stopTestMode()
}
.buttonStyle(.bordered)
}
}
.padding(.bottom, 40)
}
}
private var completionStep: some View {
VStack(spacing: 20) {
Text("Calibration Complete")
.font(.title2)
.foregroundStyle(.white)
Text("Enforce Mode is ready to use.")
.font(.callout)
.foregroundStyle(.white.opacity(0.8))
Button("Done") {
calibrationService.dismissOverlay()
enforceModeService.stopTestMode()
}
.buttonStyle(.borderedProminent)
}
}
private var targetDot: some View {
GeometryReader { geometry in
let target = calibrationService.currentTarget()
let center = CGPoint(
x: geometry.size.width * target.x,
y: geometry.size.height * target.y
)
ZStack {
Circle()
.fill(Color.blue)
.frame(width: 120, height: 120)
Circle()
.trim(from: 0, to: CGFloat(calibrationService.countdownProgress))
.stroke(Color.blue.opacity(0.8), lineWidth: 8)
.frame(width: 160, height: 160)
.rotationEffect(.degrees(-90))
.animation(.linear(duration: 0.02), value: calibrationService.countdownProgress)
}
.position(center)
.animation(.easeInOut(duration: 0.3), value: center)
}
.ignoresSafeArea()
}
private var cameraBackground: some View {
ZStack {
if let layer = eyeTrackingService.previewLayer {
CameraPreviewView(
previewLayer: layer,
borderColor: .clear,
showsBorder: false,
cornerRadius: 0
)
.opacity(0.5)
}
if calibrationService.currentStep == .eyeBox {
GeometryReader { geometry in
EyeTrackingDebugOverlayView(
debugState: eyeTrackingService.debugState,
viewSize: geometry.size
)
.opacity(0.8)
}
}
Color.black.opacity(0.35)
.ignoresSafeArea()
}
.ignoresSafeArea()
}
}

View File

@@ -0,0 +1,418 @@
//
// EnforceModeSetupContent.swift
// Gaze
//
// Created by Mike Freno on 1/30/26.
//
import AVFoundation
import AppKit
import SwiftUI
struct EnforceModeSetupContent: View {
@Bindable var settingsManager: SettingsManager
@ObservedObject var cameraService = CameraAccessService.shared
@ObservedObject var eyeTrackingService = EyeTrackingService.shared
@ObservedObject var enforceModeService = EnforceModeService.shared
@ObservedObject var calibrationService = EnforceModeCalibrationService.shared
@Environment(\.isCompactLayout) private var isCompact
let presentation: SetupPresentation
@Binding var isTestModeActive: Bool
@Binding var cachedPreviewLayer: AVCaptureVideoPreviewLayer?
let isProcessingToggle: Bool
let handleEnforceModeToggle: (Bool) -> Void
private var cameraHardwareAvailable: Bool {
cameraService.hasCameraHardware
}
private var sectionCornerRadius: CGFloat {
presentation.isCard ? 10 : 12
}
private var sectionPadding: CGFloat {
presentation.isCard ? 10 : 16
}
private var headerFont: Font {
presentation.isCard ? .subheadline : .headline
}
private var iconSize: CGFloat {
presentation.isCard ? AdaptiveLayout.Font.cardIconSmall : AdaptiveLayout.Font.cardIcon
}
var body: some View {
VStack(spacing: presentation.isCard ? 10 : 24) {
if presentation.isCard {
Image(systemName: "video.fill")
.font(.system(size: iconSize))
.foregroundStyle(Color.accentColor)
Text("Enforce Mode")
.font(.title2)
.fontWeight(.bold)
}
Text("Use your camera to ensure you take breaks")
.font(presentation.isCard ? .subheadline : (isCompact ? .subheadline : .title3))
.foregroundStyle(.secondary)
.multilineTextAlignment(.center)
if presentation.isCard {
Spacer(minLength: 0)
}
VStack(spacing: presentation.isCard ? 10 : 20) {
enforceModeToggleView
cameraStatusView
if enforceModeService.isEnforceModeEnabled {
testModeButton
}
if isTestModeActive && enforceModeService.isCameraActive {
testModePreviewView
trackingConstantsView
} else if enforceModeService.isCameraActive && !isTestModeActive {
eyeTrackingStatusView
trackingConstantsView
}
if enforceModeService.isEnforceModeEnabled {
strictnessControlView
}
calibrationActionView
privacyInfoView
}
if presentation.isCard {
Spacer(minLength: 0)
}
}
}
private var testModeButton: some View {
Button(action: {
Task { @MainActor in
if isTestModeActive {
enforceModeService.stopTestMode()
isTestModeActive = false
cachedPreviewLayer = nil
} else {
await enforceModeService.startTestMode()
isTestModeActive = enforceModeService.isCameraActive
if isTestModeActive {
cachedPreviewLayer = eyeTrackingService.previewLayer
}
}
}
}) {
HStack {
Image(systemName: isTestModeActive ? "stop.circle.fill" : "play.circle.fill")
.font(.title3)
Text(isTestModeActive ? "Stop Test" : "Test Tracking")
.font(.headline)
}
.frame(maxWidth: .infinity)
.padding()
}
.buttonStyle(.borderedProminent)
.controlSize(presentation.isCard ? .regular : .large)
}
private var testModePreviewView: some View {
VStack(spacing: 16) {
let lookingAway = eyeTrackingService.trackingResult.gazeState == .lookingAway
let borderColor: NSColor = lookingAway ? .systemGreen : .systemRed
let previewLayer = eyeTrackingService.previewLayer ?? cachedPreviewLayer
if let layer = previewLayer {
ZStack {
CameraPreviewView(previewLayer: layer, borderColor: borderColor)
GeometryReader { geometry in
EyeTrackingDebugOverlayView(
debugState: eyeTrackingService.debugState,
viewSize: geometry.size
)
}
}
.frame(height: presentation.isCard ? 180 : (isCompact ? 200 : 300))
.glassEffectIfAvailable(
GlassStyle.regular, in: .rect(cornerRadius: sectionCornerRadius)
)
.onAppear {
if cachedPreviewLayer == nil {
cachedPreviewLayer = eyeTrackingService.previewLayer
}
}
}
}
}
private var cameraStatusView: some View {
HStack {
VStack(alignment: .leading, spacing: 4) {
Text("Camera Access")
.font(headerFont)
if cameraService.isCameraAuthorized {
Label("Authorized", systemImage: "checkmark.circle.fill")
.font(.caption)
.foregroundStyle(.green)
} else if let error = cameraService.cameraError {
Label(error.localizedDescription, systemImage: "exclamationmark.triangle.fill")
.font(.caption)
.foregroundStyle(.orange)
} else {
Label("Not authorized", systemImage: "xmark.circle.fill")
.font(.caption)
.foregroundStyle(.secondary)
}
}
Spacer()
if !cameraService.isCameraAuthorized {
Button("Request Access") {
Task { @MainActor in
do {
try await cameraService.requestCameraAccess()
} catch {
print("⚠️ Camera access failed: \(error.localizedDescription)")
}
}
}
.buttonStyle(.bordered)
.controlSize(presentation.isCard ? .small : .regular)
}
}
.padding(sectionPadding)
.glassEffectIfAvailable(GlassStyle.regular, in: .rect(cornerRadius: sectionCornerRadius))
}
private var eyeTrackingStatusView: some View {
VStack(alignment: .leading, spacing: 12) {
Text("Eye Tracking Status")
.font(headerFont)
HStack(spacing: 20) {
statusIndicator(
title: "Face Detected",
isActive: eyeTrackingService.trackingResult.faceDetected,
icon: "person.fill"
)
statusIndicator(
title: "Looking Away",
isActive: eyeTrackingService.trackingResult.gazeState == .lookingAway,
icon: "arrow.turn.up.right"
)
}
}
.padding(sectionPadding)
.glassEffectIfAvailable(GlassStyle.regular, in: .rect(cornerRadius: sectionCornerRadius))
}
private func statusIndicator(title: String, isActive: Bool, icon: String) -> some View {
VStack(spacing: 8) {
Image(systemName: icon)
.font(.title2)
.foregroundStyle(isActive ? .green : .secondary)
Text(title)
.font(.caption)
.foregroundStyle(.secondary)
.multilineTextAlignment(.center)
}
.frame(maxWidth: .infinity)
}
private var privacyInfoView: some View {
VStack(alignment: .leading, spacing: 12) {
HStack {
Image(systemName: "lock.shield.fill")
.font(.title3)
.foregroundStyle(.blue)
Text("Privacy Information")
.font(headerFont)
}
VStack(alignment: .leading, spacing: 8) {
privacyBullet("All processing happens on-device")
privacyBullet("No images are stored or transmitted")
privacyBullet("Camera only active during lookaway reminders (3 second window)")
privacyBullet("You can always force quit with cmd+q")
}
.font(.caption)
.foregroundStyle(.secondary)
}
.padding(sectionPadding)
.glassEffectIfAvailable(
GlassStyle.regular.tint(.blue.opacity(0.1)),
in: .rect(cornerRadius: sectionCornerRadius)
)
}
private func privacyBullet(_ text: String) -> some View {
HStack(alignment: .top, spacing: 8) {
Image(systemName: "checkmark")
.font(.caption2)
.foregroundStyle(.blue)
Text(text)
}
}
private var enforceModeToggleView: some View {
HStack {
VStack(alignment: .leading, spacing: 2) {
Text("Enable Enforce Mode")
.font(headerFont)
if !cameraHardwareAvailable {
Text("No camera hardware detected")
.font(.caption2)
.foregroundStyle(.orange)
} else {
Text("Camera activates 3 seconds before lookaway reminders")
.font(.caption2)
.foregroundStyle(.secondary)
}
}
Spacer()
Toggle(
"",
isOn: Binding(
get: {
enforceModeService.isEnforceModeEnabled
},
set: { newValue in
guard !isProcessingToggle else { return }
handleEnforceModeToggle(newValue)
}
)
)
.labelsHidden()
.disabled(isProcessingToggle || !cameraHardwareAvailable)
.controlSize(presentation.isCard ? .small : (isCompact ? .small : .regular))
}
.padding(sectionPadding)
.glassEffectIfAvailable(GlassStyle.regular, in: .rect(cornerRadius: sectionCornerRadius))
}
private var trackingConstantsView: some View {
VStack(alignment: .leading, spacing: 16) {
HStack {
Text("Tracking Status")
.font(headerFont)
}
let gazeState = eyeTrackingService.trackingResult.gazeState
let stateLabel: String = {
switch gazeState {
case .lookingAway:
return "Looking Away"
case .lookingAtScreen:
return "Looking At Screen"
case .unknown:
return "Unknown"
}
}()
VStack(alignment: .leading, spacing: 8) {
HStack(spacing: 12) {
Text("Gaze:")
.font(.caption2)
.foregroundStyle(.secondary)
Text(stateLabel)
.font(.caption2)
.foregroundStyle(gazeState == .lookingAway ? .green : .secondary)
}
HStack(spacing: 12) {
Text("Confidence:")
.font(.caption2)
.foregroundStyle(.secondary)
Text(String(format: "%.2f", eyeTrackingService.trackingResult.confidence))
.font(.caption2)
.foregroundStyle(.secondary)
}
if let faceWidth = eyeTrackingService.debugState.faceWidthRatio {
HStack(spacing: 12) {
Text("Face Width:")
.font(.caption2)
.foregroundStyle(.secondary)
Text(String(format: "%.3f", faceWidth))
.font(.caption2)
.foregroundStyle(.secondary)
}
}
if let horizontal = eyeTrackingService.debugState.normalizedHorizontal,
let vertical = eyeTrackingService.debugState.normalizedVertical
{
HStack(spacing: 12) {
Text("Ratios:")
.font(.caption2)
.foregroundStyle(.secondary)
Text("H \(String(format: "%.3f", horizontal))")
.font(.caption2)
.foregroundStyle(.secondary)
Text("V \(String(format: "%.3f", vertical))")
.font(.caption2)
.foregroundStyle(.secondary)
}
}
}
}
.padding(sectionPadding)
.glassEffectIfAvailable(GlassStyle.regular, in: .rect(cornerRadius: sectionCornerRadius))
}
private var strictnessControlView: some View {
VStack(alignment: .leading, spacing: 12) {
Text("Tracking Strictness")
.font(headerFont)
Slider(
value: $settingsManager.settings.enforceModeStrictness,
in: 0...1
)
.controlSize(.small)
HStack {
Text("Lenient")
.font(.caption2)
.foregroundStyle(.secondary)
Spacer()
Text("Strict")
.font(.caption2)
.foregroundStyle(.secondary)
}
}
.padding(sectionPadding)
.glassEffectIfAvailable(GlassStyle.regular, in: .rect(cornerRadius: sectionCornerRadius))
}
private var calibrationActionView: some View {
Button(action: {
calibrationService.presentOverlay()
Task { @MainActor in
await enforceModeService.startTestMode()
}
}) {
HStack {
Image(systemName: "target")
Text("Calibrate Eye Tracking")
.font(.headline)
}
.frame(maxWidth: .infinity)
.padding(.vertical, 8)
}
.buttonStyle(.bordered)
.controlSize(.regular)
}
}

View File

@@ -1,116 +0,0 @@
//
// EyeTrackingCalibrationView.swift
// Gaze
//
// Created by Mike Freno on 1/15/26.
//
import SwiftUI
struct EyeTrackingCalibrationView: View {
@StateObject private var calibratorService = CalibratorService.shared
@Environment(\.dismiss) private var dismiss
var body: some View {
ZStack {
Color.black.ignoresSafeArea()
introductionScreenView
}
.frame(minWidth: 600, minHeight: 500)
}
// MARK: - Introduction Screen
private var introductionScreenView: some View {
VStack(spacing: 30) {
Image(systemName: "eye.circle.fill")
.font(.system(size: 80))
.foregroundStyle(.blue)
Text("Eye Tracking Calibration")
.font(.largeTitle)
.foregroundStyle(.white)
.fontWeight(.bold)
Text("This calibration will help improve eye tracking accuracy.")
.font(.title3)
.multilineTextAlignment(.center)
.foregroundStyle(.gray)
VStack(alignment: .leading, spacing: 15) {
InstructionRow(icon: "1.circle.fill", text: "Look at each target on the screen")
InstructionRow(
icon: "2.circle.fill", text: "Keep your head still, only move your eyes")
InstructionRow(icon: "3.circle.fill", text: "Follow the countdown at each position")
InstructionRow(icon: "4.circle.fill", text: "Takes about 30-45 seconds")
}
.padding(.vertical, 20)
if calibratorService.calibrationData.isComplete {
VStack(spacing: 10) {
Text("Last calibration:")
.font(.caption)
.foregroundStyle(.gray)
Text(calibratorService.getCalibrationSummary())
.font(.caption)
.multilineTextAlignment(.center)
.foregroundStyle(.gray)
}
.padding(.vertical)
}
HStack(spacing: 20) {
Button("Cancel") {
dismiss()
}
.foregroundStyle(.white)
.buttonStyle(.plain)
.keyboardShortcut(.escape, modifiers: [])
Button("Start Calibration") {
startFullscreenCalibration()
}
.keyboardShortcut(.return, modifiers: [])
.buttonStyle(.borderedProminent)
}
.padding(.top, 20)
}
.padding(60)
.frame(maxWidth: 600)
}
// MARK: - Actions
private func startFullscreenCalibration() {
dismiss()
// Small delay to allow sheet dismissal animation
DispatchQueue.main.asyncAfter(deadline: .now() + 0.3) {
CalibratorService.shared.showCalibrationOverlay()
}
}
}
// MARK: - Instruction Row
struct InstructionRow: View {
let icon: String
let text: String
var body: some View {
HStack(spacing: 15) {
Image(systemName: icon)
.font(.title2)
.foregroundStyle(.blue)
.frame(width: 30)
Text(text)
.foregroundStyle(.white)
.font(.body)
}
}
}
#Preview {
EyeTrackingCalibrationView()
}

View File

@@ -0,0 +1,96 @@
//
// EyeTrackingDebugOverlayView.swift
// Gaze
//
// Created by Mike Freno on 1/31/26.
//
import SwiftUI
struct EyeTrackingDebugOverlayView: View {
let debugState: EyeTrackingDebugState
let viewSize: CGSize
var body: some View {
ZStack {
if let leftRect = debugState.leftEyeRect,
let imageSize = debugState.imageSize {
drawEyeRect(leftRect, imageSize: imageSize, color: .cyan)
}
if let rightRect = debugState.rightEyeRect,
let imageSize = debugState.imageSize {
drawEyeRect(rightRect, imageSize: imageSize, color: .yellow)
}
if let leftPupil = debugState.leftPupil,
let imageSize = debugState.imageSize {
drawPupil(leftPupil, imageSize: imageSize, color: .red)
}
if let rightPupil = debugState.rightPupil,
let imageSize = debugState.imageSize {
drawPupil(rightPupil, imageSize: imageSize, color: .red)
}
}
}
private func drawEyeRect(_ rect: CGRect, imageSize: CGSize, color: Color) -> some View {
let mapped = mapRect(rect, imageSize: imageSize)
return Rectangle()
.stroke(color, lineWidth: 2)
.frame(width: mapped.size.width, height: mapped.size.height)
.position(x: mapped.midX, y: mapped.midY)
}
private func drawPupil(_ point: CGPoint, imageSize: CGSize, color: Color) -> some View {
let mapped = mapPoint(point, imageSize: imageSize)
return Circle()
.fill(color)
.frame(width: 6, height: 6)
.position(x: mapped.x, y: mapped.y)
}
private func mapRect(_ rect: CGRect, imageSize: CGSize) -> CGRect {
let mappedOrigin = mapPoint(rect.origin, imageSize: imageSize)
let mappedMax = mapPoint(CGPoint(x: rect.maxX, y: rect.maxY), imageSize: imageSize)
let width = abs(mappedMax.x - mappedOrigin.x)
let height = abs(mappedMax.y - mappedOrigin.y)
return CGRect(
x: min(mappedOrigin.x, mappedMax.x),
y: min(mappedOrigin.y, mappedMax.y),
width: width,
height: height
)
}
private func mapPoint(_ point: CGPoint, imageSize: CGSize) -> CGPoint {
let rawImageWidth = imageSize.width
let rawImageHeight = imageSize.height
let imageAspect = rawImageWidth / rawImageHeight
let viewAspect = viewSize.width / viewSize.height
let scale: CGFloat
let offsetX: CGFloat
let offsetY: CGFloat
if imageAspect > viewAspect {
scale = viewSize.height / rawImageHeight
offsetX = (viewSize.width - rawImageWidth * scale) / 2
offsetY = 0
} else {
scale = viewSize.width / rawImageWidth
offsetX = 0
offsetY = (viewSize.height - rawImageHeight * scale) / 2
}
let mirroredX = rawImageWidth - point.x
let screenX = mirroredX * scale + offsetX
let screenY = point.y * scale + offsetY
return CGPoint(x: screenX, y: screenY)
}
}

View File

@@ -1,253 +0,0 @@
//
// GazeOverlayView.swift
// Gaze
//
// Created by Mike Freno on 1/16/26.
//
import SwiftUI
struct GazeOverlayView: View {
@ObservedObject var eyeTrackingService: EyeTrackingService
var body: some View {
VStack(spacing: 8) {
inFrameIndicator
gazeDirectionGrid
ratioDebugView
eyeImagesDebugView
}
.padding(12)
}
private var inFrameIndicator: some View {
HStack(spacing: 6) {
Circle()
.fill(eyeTrackingService.isInFrame ? Color.green : Color.red)
.frame(width: 10, height: 10)
Text(eyeTrackingService.isInFrame ? "In Frame" : "No Face")
.font(.caption2)
.fontWeight(.semibold)
.foregroundStyle(.white)
}
.padding(.horizontal, 10)
.padding(.vertical, 6)
.background(
Capsule()
.fill(Color.black.opacity(0.6))
)
}
private var gazeDirectionGrid: some View {
let currentDirection = eyeTrackingService.gazeDirection
let currentPos = currentDirection.gridPosition
return VStack(spacing: 2) {
ForEach(0..<3, id: \.self) { row in
HStack(spacing: 2) {
ForEach(0..<3, id: \.self) { col in
let isActive =
currentPos.x == col && currentPos.y == row
&& eyeTrackingService.isInFrame
gridCell(row: row, col: col, isActive: isActive)
}
}
}
}
.padding(8)
.background(
RoundedRectangle(cornerRadius: 8)
.fill(Color.black.opacity(0.5))
)
}
private func gridCell(row: Int, col: Int, isActive: Bool) -> some View {
let direction = directionFor(row: row, col: col)
return ZStack {
RoundedRectangle(cornerRadius: 4)
.fill(isActive ? Color.green : Color.white.opacity(0.2))
Text(direction.rawValue)
.font(.system(size: 14, weight: .bold))
.foregroundStyle(isActive ? .white : .white.opacity(0.6))
}
.frame(width: 28, height: 28)
}
private func directionFor(row: Int, col: Int) -> GazeDirection {
switch (col, row) {
case (0, 0): return .upLeft
case (1, 0): return .up
case (2, 0): return .upRight
case (0, 1): return .left
case (1, 1): return .center
case (2, 1): return .right
case (0, 2): return .downLeft
case (1, 2): return .down
case (2, 2): return .downRight
default: return .center
}
}
private var ratioDebugView: some View {
VStack(alignment: .leading, spacing: 2) {
// Show individual L/R ratios
HStack(spacing: 8) {
if let leftH = eyeTrackingService.debugLeftPupilRatio {
Text("L.H: \(String(format: "%.2f", leftH))")
.font(.system(size: 9, weight: .medium, design: .monospaced))
.foregroundStyle(.white)
}
if let rightH = eyeTrackingService.debugRightPupilRatio {
Text("R.H: \(String(format: "%.2f", rightH))")
.font(.system(size: 9, weight: .medium, design: .monospaced))
.foregroundStyle(.white)
}
}
HStack(spacing: 8) {
if let leftV = eyeTrackingService.debugLeftVerticalRatio {
Text("L.V: \(String(format: "%.2f", leftV))")
.font(.system(size: 9, weight: .medium, design: .monospaced))
.foregroundStyle(.white)
}
if let rightV = eyeTrackingService.debugRightVerticalRatio {
Text("R.V: \(String(format: "%.2f", rightV))")
.font(.system(size: 9, weight: .medium, design: .monospaced))
.foregroundStyle(.white)
}
}
// Show averaged ratios
if let leftH = eyeTrackingService.debugLeftPupilRatio,
let rightH = eyeTrackingService.debugRightPupilRatio,
let leftV = eyeTrackingService.debugLeftVerticalRatio,
let rightV = eyeTrackingService.debugRightVerticalRatio
{
let avgH = (leftH + rightH) / 2.0
let avgV = (leftV + rightV) / 2.0
Text("Avg H:\(String(format: "%.2f", avgH)) V:\(String(format: "%.2f", avgV))")
.font(.system(size: 9, weight: .bold, design: .monospaced))
.foregroundStyle(.yellow)
}
}
.padding(.horizontal, 8)
.padding(.vertical, 4)
.background(
RoundedRectangle(cornerRadius: 4)
.fill(Color.black.opacity(0.5))
)
}
private var eyeImagesDebugView: some View {
HStack(spacing: 12) {
// Left eye
VStack(spacing: 4) {
Text("Left")
.font(.system(size: 8, weight: .bold))
.foregroundStyle(.white)
HStack(spacing: 4) {
eyeImageView(
image: eyeTrackingService.debugLeftEyeInput,
pupilPosition: eyeTrackingService.debugLeftPupilPosition,
eyeSize: eyeTrackingService.debugLeftEyeSize,
label: "Input"
)
eyeImageView(
image: eyeTrackingService.debugLeftEyeProcessed,
pupilPosition: eyeTrackingService.debugLeftPupilPosition,
eyeSize: eyeTrackingService.debugLeftEyeSize,
label: "Proc"
)
}
}
// Right eye
VStack(spacing: 4) {
Text("Right")
.font(.system(size: 8, weight: .bold))
.foregroundStyle(.white)
HStack(spacing: 4) {
eyeImageView(
image: eyeTrackingService.debugRightEyeInput,
pupilPosition: eyeTrackingService.debugRightPupilPosition,
eyeSize: eyeTrackingService.debugRightEyeSize,
label: "Input"
)
eyeImageView(
image: eyeTrackingService.debugRightEyeProcessed,
pupilPosition: eyeTrackingService.debugRightPupilPosition,
eyeSize: eyeTrackingService.debugRightEyeSize,
label: "Proc"
)
}
}
}
.padding(8)
.background(
RoundedRectangle(cornerRadius: 8)
.fill(Color.black.opacity(0.5))
)
}
private func eyeImageView(
image: NSImage?, pupilPosition: PupilPosition?, eyeSize: CGSize?, label: String
) -> some View {
let displaySize: CGFloat = 50
return VStack(spacing: 2) {
ZStack {
if let nsImage = image {
Image(nsImage: nsImage)
.resizable()
.interpolation(.none)
.aspectRatio(contentMode: .fit)
.frame(width: displaySize, height: displaySize)
// Draw pupil position marker
if let pupil = pupilPosition, let size = eyeSize, size.width > 0,
size.height > 0
{
let scaleX = displaySize / size.width
let scaleY = displaySize / size.height
let scale = min(scaleX, scaleY)
let scaledWidth = size.width * scale
let scaledHeight = size.height * scale
Circle()
.fill(Color.red)
.frame(width: 4, height: 4)
.offset(
x: (pupil.x * scale) - (scaledWidth / 2),
y: (pupil.y * scale) - (scaledHeight / 2)
)
}
} else {
RoundedRectangle(cornerRadius: 4)
.fill(Color.gray.opacity(0.3))
.frame(width: displaySize, height: displaySize)
Text("--")
.font(.system(size: 10))
.foregroundStyle(.white.opacity(0.5))
}
}
.frame(width: displaySize, height: displaySize)
.clipShape(RoundedRectangle(cornerRadius: 4))
Text(label)
.font(.system(size: 7))
.foregroundStyle(.white.opacity(0.7))
}
}
}
#Preview {
ZStack {
Color.gray
GazeOverlayView(eyeTrackingService: EyeTrackingService.shared)
}
.frame(width: 400, height: 400)
}

View File

@@ -1,184 +0,0 @@
//
// PupilOverlayView.swift
// Gaze
//
// Created by Mike Freno on 1/16/26.
//
import SwiftUI
/// Draws pupil detection markers directly on top of the camera preview
struct PupilOverlayView: View {
@ObservedObject var eyeTrackingService: EyeTrackingService
var body: some View {
GeometryReader { geometry in
let viewSize = geometry.size
// Draw eye regions and pupil markers
ZStack {
// Left eye
if let leftRegion = eyeTrackingService.debugLeftEyeRegion,
let leftPupil = eyeTrackingService.debugLeftPupilPosition,
let imageSize = eyeTrackingService.debugImageSize
{
EyeOverlayShape(
eyeRegion: leftRegion,
pupilPosition: leftPupil,
imageSize: imageSize,
viewSize: viewSize,
color: .cyan,
label: "L"
)
}
// Right eye
if let rightRegion = eyeTrackingService.debugRightEyeRegion,
let rightPupil = eyeTrackingService.debugRightPupilPosition,
let imageSize = eyeTrackingService.debugImageSize
{
EyeOverlayShape(
eyeRegion: rightRegion,
pupilPosition: rightPupil,
imageSize: imageSize,
viewSize: viewSize,
color: .yellow,
label: "R"
)
}
}
}
}
}
/// Helper view for drawing eye overlay
private struct EyeOverlayShape: View {
let eyeRegion: EyeRegion
let pupilPosition: PupilPosition
let imageSize: CGSize
let viewSize: CGSize
let color: Color
let label: String
private var transformedCoordinates: (eyeRect: CGRect, pupilPoint: CGPoint) {
// Standard macOS Camera Coordinate System (Landscape):
// Raw Buffer:
// - Origin (0,0) is Top-Left
// - X increases Right
// - Y increases Down
//
// Preview Layer (Mirrored):
// - Appears like a mirror
// - Screen X increases Right
// - Screen Y increases Down
// - BUT the image content is flipped horizontally
// (Raw Left is Screen Right, Raw Right is Screen Left)
// Use dimensions directly (no rotation swap)
let rawImageWidth = imageSize.width
let rawImageHeight = imageSize.height
// Calculate aspect-fill scaling
// We compare the raw aspect ratio to the view aspect ratio
let imageAspect = rawImageWidth / rawImageHeight
let viewAspect = viewSize.width / viewSize.height
let scale: CGFloat
let offsetX: CGFloat
let offsetY: CGFloat
if imageAspect > viewAspect {
// Image is wider than view - crop sides (pillarbox behavior in aspect fill)
// Wait, aspect fill means we fill the view, so we crop the excess.
// If image is wider, we scale by height to fill height, and crop width.
scale = viewSize.height / rawImageHeight
offsetX = (viewSize.width - rawImageWidth * scale) / 2
offsetY = 0
} else {
// Image is taller than view (or view is wider) - scale by width, crop height
scale = viewSize.width / rawImageWidth
offsetX = 0
offsetY = (viewSize.height - rawImageHeight * scale) / 2
}
// Transform Eye Region
// Mirroring X: The 'left' of the raw image becomes the 'right' of the screen
// Raw Rect: x, y, w, h
// Mirrored X = ImageWidth - (x + w)
let eyeRawX = eyeRegion.frame.origin.x
let eyeRawY = eyeRegion.frame.origin.y
let eyeRawW = eyeRegion.frame.width
let eyeRawH = eyeRegion.frame.height
// Calculate Screen Coordinates
let eyeScreenX = (rawImageWidth - (eyeRawX + eyeRawW)) * scale + offsetX
let eyeScreenY = eyeRawY * scale + offsetY
let eyeScreenW = eyeRawW * scale
let eyeScreenH = eyeRawH * scale
// Transform Pupil Position
// Global Raw Pupil X = eyeRawX + pupilPosition.x
// Global Raw Pupil Y = eyeRawY + pupilPosition.y
let pupilGlobalRawX = eyeRawX + pupilPosition.x
let pupilGlobalRawY = eyeRawY + pupilPosition.y
// Mirror X for Pupil
let pupilScreenX = (rawImageWidth - pupilGlobalRawX) * scale + offsetX
let pupilScreenY = pupilGlobalRawY * scale + offsetY
return (
eyeRect: CGRect(x: eyeScreenX, y: eyeScreenY, width: eyeScreenW, height: eyeScreenH),
pupilPoint: CGPoint(x: pupilScreenX, y: pupilScreenY)
)
}
var body: some View {
let coords = transformedCoordinates
let eyeRect = coords.eyeRect
let pupilPoint = coords.pupilPoint
ZStack {
// Eye region rectangle
Rectangle()
.stroke(color, lineWidth: 2)
.frame(width: eyeRect.width, height: eyeRect.height)
.position(x: eyeRect.midX, y: eyeRect.midY)
// Pupil marker (red dot)
Circle()
.fill(Color.red)
.frame(width: 8, height: 8)
.position(x: pupilPoint.x, y: pupilPoint.y)
// Crosshair at pupil position
Path { path in
path.move(to: CGPoint(x: pupilPoint.x - 6, y: pupilPoint.y))
path.addLine(to: CGPoint(x: pupilPoint.x + 6, y: pupilPoint.y))
path.move(to: CGPoint(x: pupilPoint.x, y: pupilPoint.y - 6))
path.addLine(to: CGPoint(x: pupilPoint.x, y: pupilPoint.y + 6))
}
.stroke(Color.red, lineWidth: 1)
// Label
Text(label)
.font(.system(size: 10, weight: .bold))
.foregroundStyle(color)
.position(x: eyeRect.minX + 8, y: eyeRect.minY - 8)
// Debug: Show raw coordinates
Text("\(label): (\(Int(pupilPosition.x)), \(Int(pupilPosition.y)))")
.font(.system(size: 8, design: .monospaced))
.foregroundStyle(.white)
.background(.black.opacity(0.7))
.position(x: eyeRect.midX, y: eyeRect.maxY + 10)
}
}
}
#Preview {
ZStack {
Color.black
PupilOverlayView(eyeTrackingService: EyeTrackingService.shared)
}
.frame(width: 400, height: 300)
}

View File

@@ -0,0 +1,201 @@
//
// SmartModeSetupContent.swift
// Gaze
//
// Created by Mike Freno on 1/30/26.
//
import SwiftUI
struct SmartModeSetupContent: View {
@Bindable var settingsManager: SettingsManager
@State private var permissionManager = ScreenCapturePermissionManager.shared
let presentation: SetupPresentation
private var iconSize: CGFloat {
presentation.isCard ? AdaptiveLayout.Font.cardIconSmall : AdaptiveLayout.Font.cardIcon
}
private var sectionCornerRadius: CGFloat {
presentation.isCard ? 10 : 12
}
private var sectionPadding: CGFloat {
presentation.isCard ? 10 : 16
}
private var sectionSpacing: CGFloat {
presentation.isCard ? 8 : 12
}
var body: some View {
VStack(spacing: presentation.isCard ? 10 : 24) {
if presentation.isCard {
Image(systemName: "brain.fill")
.font(.system(size: iconSize))
.foregroundStyle(.purple)
Text("Smart Mode")
.font(.title2)
.fontWeight(.bold)
}
Text("Automatically manage timers based on your activity")
.font(.subheadline)
.foregroundStyle(.secondary)
.multilineTextAlignment(.center)
if presentation.isCard {
Spacer(minLength: 0)
}
VStack(spacing: sectionSpacing) {
fullscreenSection
idleSection
#if DEBUG
usageTrackingSection
#endif
}
.frame(maxWidth: presentation.isCard ? .infinity : 600)
if presentation.isCard {
Spacer(minLength: 0)
}
}
}
private var fullscreenSection: some View {
VStack(alignment: .leading, spacing: 12) {
HStack {
VStack(alignment: .leading, spacing: 4) {
HStack {
Image(systemName: "arrow.up.left.and.arrow.down.right")
.foregroundStyle(.blue)
Text("Auto-pause on Fullscreen")
.font(presentation.isCard ? .subheadline : .headline)
}
Text("Timers will automatically pause when you enter fullscreen mode (videos, games, presentations)")
.font(.caption)
.foregroundStyle(.secondary)
}
Spacer()
Toggle("", isOn: $settingsManager.settings.smartMode.autoPauseOnFullscreen)
.labelsHidden()
.controlSize(presentation.isCard ? .small : .regular)
.onChange(of: settingsManager.settings.smartMode.autoPauseOnFullscreen) { _, newValue in
if newValue {
permissionManager.requestAuthorizationIfNeeded()
}
}
}
if settingsManager.settings.smartMode.autoPauseOnFullscreen,
permissionManager.authorizationStatus != .authorized
{
permissionWarningView
}
}
.padding(sectionPadding)
.glassEffectIfAvailable(GlassStyle.regular, in: .rect(cornerRadius: sectionCornerRadius))
}
private var permissionWarningView: some View {
VStack(alignment: .leading, spacing: 8) {
Label(
permissionManager.authorizationStatus == .denied
? "Screen Recording permission required"
: "Grant Screen Recording access",
systemImage: "exclamationmark.shield"
)
.foregroundStyle(.orange)
Text("macOS requires Screen Recording permission to detect other apps in fullscreen.")
.font(.caption)
.foregroundStyle(.secondary)
HStack {
Button("Grant Access") {
permissionManager.requestAuthorizationIfNeeded()
permissionManager.openSystemSettings()
}
.buttonStyle(.bordered)
.controlSize(presentation.isCard ? .small : .regular)
Button("Open Settings") {
permissionManager.openSystemSettings()
}
.buttonStyle(.borderless)
}
.font(.caption)
.padding(.top, 4)
}
.padding(.top, 8)
}
private var idleSection: some View {
VStack(alignment: .leading, spacing: 12) {
HStack {
VStack(alignment: .leading, spacing: 4) {
HStack {
Image(systemName: "moon.zzz.fill")
.foregroundStyle(.indigo)
Text("Auto-pause on Idle")
.font(presentation.isCard ? .subheadline : .headline)
}
Text("Timers will pause when you're inactive for more than the threshold below")
.font(.caption)
.foregroundStyle(.secondary)
}
Spacer()
Toggle("", isOn: $settingsManager.settings.smartMode.autoPauseOnIdle)
.labelsHidden()
.controlSize(presentation.isCard ? .small : .regular)
}
if settingsManager.settings.smartMode.autoPauseOnIdle {
ThresholdSlider(
label: "Idle Threshold:",
value: $settingsManager.settings.smartMode.idleThresholdMinutes,
range: 1...30,
unit: "min"
)
}
}
.padding(sectionPadding)
.glassEffectIfAvailable(GlassStyle.regular, in: .rect(cornerRadius: sectionCornerRadius))
}
private var usageTrackingSection: some View {
VStack(alignment: .leading, spacing: 12) {
HStack {
VStack(alignment: .leading, spacing: 4) {
HStack {
Image(systemName: "chart.line.uptrend.xyaxis")
.foregroundStyle(.green)
Text("Track Usage Statistics")
.font(presentation.isCard ? .subheadline : .headline)
}
Text("Monitor active and idle time, with automatic reset after the specified duration")
.font(.caption)
.foregroundStyle(.secondary)
}
Spacer()
Toggle("", isOn: $settingsManager.settings.smartMode.trackUsage)
.labelsHidden()
.controlSize(presentation.isCard ? .small : .regular)
}
if settingsManager.settings.smartMode.trackUsage {
ThresholdSlider(
label: "Reset After:",
value: $settingsManager.settings.smartMode.usageResetAfterMinutes,
range: 15...240,
step: 15,
unit: "min"
)
}
}
.padding(sectionPadding)
.glassEffectIfAvailable(GlassStyle.regular, in: .rect(cornerRadius: sectionCornerRadius))
}
}

View File

@@ -0,0 +1,39 @@
//
// ThresholdSlider.swift
// Gaze
//
// Created by Mike Freno on 1/30/26.
//
import SwiftUI
struct ThresholdSlider: View {
let label: String
@Binding var value: Int
let range: ClosedRange<Int>
var step: Int = 1
let unit: String
var body: some View {
VStack(alignment: .leading, spacing: 8) {
HStack {
Text(label)
.font(.subheadline)
Spacer()
Text("\(value) \(unit)")
.font(.subheadline)
.foregroundStyle(.secondary)
}
Slider(
value: Binding(
get: { Double(value) },
set: { value = Int($0) }
),
in: Double(range.lowerBound)...Double(range.upperBound),
step: Double(step)
)
}
.padding(.top, 8)
}
}

View File

@@ -5,6 +5,7 @@
// Created by Mike Freno on 1/18/26.
//
import AVFoundation
import SwiftUI
struct AdditionalModifiersView: View {
@@ -12,6 +13,10 @@ struct AdditionalModifiersView: View {
@State private var frontCardIndex: Int = 0
@State private var dragOffset: CGFloat = 0
@State private var isDragging: Bool = false
@State private var isTestModeActive = false
@State private var cachedPreviewLayer: AVCaptureVideoPreviewLayer?
@State private var isProcessingToggle = false
@ObservedObject var cameraService = CameraAccessService.shared
@Environment(\.isCompactLayout) private var isCompact
private var backCardOffset: CGFloat { isCompact ? 20 : AdaptiveLayout.Card.backOffset }
@@ -50,15 +55,38 @@ struct AdditionalModifiersView: View {
ZStack {
#if DEBUG
cardView(for: 0, width: cardWidth, height: cardHeight)
.zIndex(zIndex(for: 0))
.scaleEffect(scale(for: 0))
.offset(x: xOffset(for: 0), y: yOffset(for: 0))
setupCard(
presentation: .card,
content:
EnforceModeSetupContent(
settingsManager: settingsManager,
presentation: .card,
isTestModeActive: $isTestModeActive,
cachedPreviewLayer: $cachedPreviewLayer,
isProcessingToggle: isProcessingToggle,
handleEnforceModeToggle: { enabled in
if enabled {
Task { @MainActor in
try await cameraService.requestCameraAccess()
}
}
}
),
width: cardWidth,
height: cardHeight,
index: 0
)
#endif
cardView(for: 1, width: cardWidth, height: cardHeight)
.zIndex(zIndex(for: 1))
.scaleEffect(scale(for: 1))
.offset(x: xOffset(for: 1), y: yOffset(for: 1))
setupCard(
presentation: .card,
content: SmartModeSetupContent(
settingsManager: settingsManager,
presentation: .card
),
width: cardWidth,
height: cardHeight,
index: 1
)
}
.padding(isCompact ? 12 : 20)
.gesture(dragGesture)
@@ -198,226 +226,28 @@ struct AdditionalModifiersView: View {
// MARK: - Card Views
@ViewBuilder
private func cardView(for index: Int, width: CGFloat, height: CGFloat) -> some View {
private func setupCard(
presentation: SetupPresentation,
content: some View,
width: CGFloat,
height: CGFloat,
index: Int
) -> some View {
ZStack {
RoundedRectangle(cornerRadius: 16)
.fill(Color(NSColor.windowBackgroundColor))
.shadow(color: Color.black.opacity(0.2), radius: 10, x: 0, y: 4)
Group {
if index == 0 {
enforceModeContent
} else {
smartModeContent
}
}
ScrollView {
content
.padding(isCompact ? 12 : 20)
}
.frame(maxWidth: .infinity, maxHeight: .infinity, alignment: .center)
}
.frame(width: width, height: height)
}
@ObservedObject var cameraService = CameraAccessService.shared
private var enforceModeContent: some View {
VStack(spacing: isCompact ? 10 : 16) {
Image(systemName: "video.fill")
.font(
.system(
size: isCompact
? AdaptiveLayout.Font.cardIconSmall : AdaptiveLayout.Font.cardIcon)
)
.foregroundStyle(Color.accentColor)
Text("Enforce Mode")
.font(isCompact ? .headline : .title2)
.fontWeight(.bold)
if !cameraService.hasCameraHardware {
Text("Camera hardware not detected")
.font(isCompact ? .caption : .subheadline)
.foregroundStyle(.orange)
.multilineTextAlignment(.center)
} else {
Text("Use your camera to ensure you take breaks")
.font(isCompact ? .caption : .subheadline)
.foregroundStyle(.secondary)
.multilineTextAlignment(.center)
}
Spacer()
VStack(spacing: isCompact ? 10 : 16) {
HStack {
VStack(alignment: .leading, spacing: 2) {
Text("Enable Enforce Mode")
.font(isCompact ? .subheadline : .headline)
if !cameraService.hasCameraHardware {
Text("No camera hardware detected")
.font(.caption2)
.foregroundStyle(.orange)
} else {
Text("Camera activates before lookaway reminders")
.font(.caption2)
.foregroundStyle(.secondary)
}
}
Spacer()
Toggle(
"",
isOn: Binding(
get: {
settingsManager.isTimerEnabled(for: .lookAway)
|| settingsManager.isTimerEnabled(for: .blink)
|| settingsManager.isTimerEnabled(for: .posture)
},
set: { newValue in
if newValue {
Task { @MainActor in
try await cameraService.requestCameraAccess()
}
}
}
)
)
.labelsHidden()
.disabled(!cameraService.hasCameraHardware)
.controlSize(isCompact ? .small : .regular)
}
.padding(isCompact ? 10 : 16)
.glassEffectIfAvailable(GlassStyle.regular, in: .rect(cornerRadius: 12))
HStack {
VStack(alignment: .leading, spacing: 2) {
Text("Camera Access")
.font(isCompact ? .subheadline : .headline)
if !cameraService.hasCameraHardware {
Label("No camera", systemImage: "xmark.circle.fill")
.font(.caption2)
.foregroundStyle(.orange)
} else if cameraService.isCameraAuthorized {
Label("Authorized", systemImage: "checkmark.circle.fill")
.font(.caption2)
.foregroundStyle(.green)
} else if let error = cameraService.cameraError {
Label(
error.localizedDescription,
systemImage: "exclamationmark.triangle.fill"
)
.font(.caption2)
.foregroundStyle(.orange)
} else {
Label("Not authorized", systemImage: "xmark.circle.fill")
.font(.caption2)
.foregroundStyle(.secondary)
}
}
Spacer()
if !cameraService.isCameraAuthorized {
Button("Request Access") {
Task { @MainActor in
do {
try await cameraService.requestCameraAccess()
} catch {
print("Camera access failed: \(error.localizedDescription)")
}
}
}
.buttonStyle(.bordered)
.controlSize(isCompact ? .small : .regular)
}
}
.padding(isCompact ? 10 : 16)
.glassEffectIfAvailable(GlassStyle.regular, in: .rect(cornerRadius: 12))
}
Spacer()
}
}
private var smartModeContent: some View {
VStack(spacing: isCompact ? 10 : 16) {
Image(systemName: "brain.fill")
.font(
.system(
size: isCompact
? AdaptiveLayout.Font.cardIconSmall : AdaptiveLayout.Font.cardIcon)
)
.foregroundStyle(.purple)
Text("Smart Mode")
.font(isCompact ? .headline : .title2)
.fontWeight(.bold)
Text("Automatically manage timers based on activity")
.font(isCompact ? .caption : .subheadline)
.foregroundStyle(.secondary)
.multilineTextAlignment(.center)
Spacer()
VStack(spacing: isCompact ? 8 : 12) {
smartModeToggle(
icon: "arrow.up.left.and.arrow.down.right",
iconColor: .blue,
title: "Auto-pause on Fullscreen",
subtitle: "Pause during videos, games, presentations",
isOn: $settingsManager.settings.smartMode.autoPauseOnFullscreen
)
smartModeToggle(
icon: "moon.zzz.fill",
iconColor: .indigo,
title: "Auto-pause on Idle",
subtitle: "Pause when you're inactive",
isOn: $settingsManager.settings.smartMode.autoPauseOnIdle
)
#if DEBUG
smartModeToggle(
icon: "chart.line.uptrend.xyaxis",
iconColor: .green,
title: "Track Usage Statistics",
subtitle: "Monitor active and idle time",
isOn: $settingsManager.settings.smartMode.trackUsage
)
#endif
}
Spacer()
}
}
@ViewBuilder
private func smartModeToggle(
icon: String, iconColor: Color, title: String, subtitle: String, isOn: Binding<Bool>
) -> some View {
HStack {
Image(systemName: icon)
.foregroundStyle(iconColor)
.frame(width: isCompact ? 20 : 24)
VStack(alignment: .leading, spacing: 1) {
Text(title)
.font(isCompact ? .caption : .subheadline)
.fontWeight(.medium)
Text(subtitle)
.font(.caption2)
.foregroundStyle(.secondary)
.lineLimit(1)
}
Spacer()
Toggle("", isOn: isOn)
.labelsHidden()
.controlSize(.small)
}
.padding(.horizontal, isCompact ? 8 : 12)
.padding(.vertical, isCompact ? 6 : 10)
.glassEffectIfAvailable(GlassStyle.regular, in: .rect(cornerRadius: 10))
.zIndex(zIndex(for: index))
.scaleEffect(scale(for: index))
.offset(x: xOffset(for: index), y: yOffset(for: index))
}
// MARK: - Gestures & Navigation

View File

@@ -76,7 +76,10 @@ final class MenuBarGuideOverlayPresenter {
private func startCheckTimer() {
checkTimer?.invalidate()
checkTimer = Timer.scheduledTimer(withTimeInterval: 0.01, repeats: true) { [weak self] _ in
self?.checkWindowFrame()
guard let self else { return }
Task { @MainActor in
self.checkWindowFrame()
}
}
}
@@ -129,7 +132,10 @@ final class MenuBarGuideOverlayPresenter {
// Set up KVO for window frame changes
onboardingWindowObserver = onboardingWindow.observe(\.frame, options: [.new, .old]) {
[weak self] _, _ in
self?.checkWindowFrame()
guard let self else { return }
Task { @MainActor in
self.checkWindowFrame()
}
}
// Add observer for when the onboarding window is closed
@@ -145,7 +151,10 @@ final class MenuBarGuideOverlayPresenter {
}
// Hide the overlay when onboarding window closes
self?.hide()
guard let self else { return }
Task { @MainActor in
self.hide()
}
}
}
}

View File

@@ -79,7 +79,7 @@ struct SettingsWindowView: View {
BlinkSetupView(settingsManager: settingsManager)
case .posture:
PostureSetupView(settingsManager: settingsManager)
#if ENFORCE_READY
#if DEBUG
case .enforceMode:
EnforceModeSetupView(settingsManager: settingsManager)
#endif

View File

@@ -12,15 +12,23 @@ import SwiftUI
struct LookAwayReminderView: View {
let countdownSeconds: Int
var onDismiss: () -> Void
var enforceModeService: EnforceModeService?
@State private var remainingSeconds: Int
@State private var remainingTime: TimeInterval
@State private var timer: Timer?
@State private var keyMonitor: Any?
init(countdownSeconds: Int, onDismiss: @escaping () -> Void) {
init(
countdownSeconds: Int,
enforceModeService: EnforceModeService? = nil,
onDismiss: @escaping () -> Void
) {
self.countdownSeconds = countdownSeconds
self.enforceModeService = enforceModeService
self.onDismiss = onDismiss
self._remainingSeconds = State(initialValue: countdownSeconds)
self._remainingTime = State(initialValue: TimeInterval(countdownSeconds))
}
var body: some View {
@@ -100,15 +108,21 @@ struct LookAwayReminderView: View {
}
private var progress: CGFloat {
CGFloat(remainingSeconds) / CGFloat(countdownSeconds)
CGFloat(remainingTime) / CGFloat(countdownSeconds)
}
private func startCountdown() {
let timer = Timer(timeInterval: 1.0, repeats: true) { [self] _ in
if remainingSeconds > 0 {
remainingSeconds -= 1
} else {
let tickInterval: TimeInterval = 0.25
let timer = Timer(timeInterval: tickInterval, repeats: true) { [self] _ in
guard remainingTime > 0 else {
dismiss()
return
}
let shouldAdvance = enforceModeService?.shouldAdvanceLookAwayCountdown() ?? true
if shouldAdvance {
remainingTime = max(0, remainingTime - tickInterval)
remainingSeconds = max(0, Int(ceil(remainingTime)))
}
}
RunLoop.current.add(timer, forMode: .common)

View File

@@ -6,24 +6,16 @@
//
import AVFoundation
import Foundation
import SwiftUI
struct EnforceModeSetupView: View {
@Bindable var settingsManager: SettingsManager
@ObservedObject var cameraService = CameraAccessService.shared
@ObservedObject var eyeTrackingService = EyeTrackingService.shared
@ObservedObject var enforceModeService = EnforceModeService.shared
@Environment(\.isCompactLayout) private var isCompact
@State private var isProcessingToggle = false
@State private var isTestModeActive = false
@State private var cachedPreviewLayer: AVCaptureVideoPreviewLayer?
@State private var showDebugView = false
@State private var isViewActive = false
@State private var showAdvancedSettings = false
@State private var showCalibrationWindow = false
@ObservedObject var calibratorService = CalibratorService.shared
private var cameraHardwareAvailable: Bool {
cameraService.hasCameraHardware
@@ -33,354 +25,35 @@ struct EnforceModeSetupView: View {
VStack(spacing: 0) {
SetupHeader(icon: "video.fill", title: "Enforce Mode", color: .accentColor)
Spacer()
VStack(spacing: isCompact ? 16 : 30) {
Text("Use your camera to ensure you take breaks")
.font(isCompact ? .subheadline : .title3)
.foregroundStyle(.secondary)
.multilineTextAlignment(.center)
VStack(spacing: isCompact ? 12 : 20) {
HStack {
VStack(alignment: .leading, spacing: 2) {
Text("Enable Enforce Mode")
.font(isCompact ? .subheadline : .headline)
if !cameraHardwareAvailable {
Text("No camera hardware detected")
.font(.caption2)
.foregroundStyle(.orange)
} else {
Text("Camera activates 3 seconds before lookaway reminders")
.font(.caption2)
.foregroundStyle(.secondary)
}
}
Spacer()
Toggle(
"",
isOn: Binding(
get: {
settingsManager.isTimerEnabled(for: .lookAway) ||
settingsManager.isTimerEnabled(for: .blink) ||
settingsManager.isTimerEnabled(for: .posture)
},
set: { newValue in
print("🎛️ Toggle changed to: \(newValue)")
EnforceModeSetupContent(
settingsManager: settingsManager,
presentation: .window,
isTestModeActive: $isTestModeActive,
cachedPreviewLayer: $cachedPreviewLayer,
isProcessingToggle: isProcessingToggle,
handleEnforceModeToggle: { enabled in
print("🎛️ Toggle changed to: \(enabled)")
guard !isProcessingToggle else {
print("⚠️ Already processing toggle")
return
}
handleEnforceModeToggle(enabled: newValue)
handleEnforceModeToggle(enabled: enabled)
}
)
)
.labelsHidden()
.disabled(isProcessingToggle || !cameraHardwareAvailable)
.controlSize(isCompact ? .small : .regular)
}
.padding(isCompact ? 10 : 16)
.glassEffectIfAvailable(GlassStyle.regular, in: .rect(cornerRadius: 12))
.padding(.top, 20)
cameraStatusView
if enforceModeService.isEnforceModeEnabled {
testModeButton
}
if isTestModeActive && enforceModeService.isCameraActive {
testModePreviewView
trackingConstantsView
} else if enforceModeService.isCameraActive && !isTestModeActive {
eyeTrackingStatusView
trackingConstantsView
}
privacyInfoView
}
}
Spacer()
Spacer(minLength: 0)
}
.frame(maxWidth: .infinity, maxHeight: .infinity)
.padding()
.background(.clear)
.onAppear {
isViewActive = true
}
.onDisappear {
isViewActive = false
// If the view disappeared and camera is still active, stop it
if enforceModeService.isCameraActive {
print("👁️ EnforceModeSetupView disappeared, stopping camera preview")
enforceModeService.stopCamera()
}
}
}
private var testModeButton: some View {
Button(action: {
Task { @MainActor in
if isTestModeActive {
enforceModeService.stopTestMode()
isTestModeActive = false
cachedPreviewLayer = nil
} else {
await enforceModeService.startTestMode()
isTestModeActive = enforceModeService.isCameraActive
if isTestModeActive {
cachedPreviewLayer = eyeTrackingService.previewLayer
}
}
}
}) {
HStack {
Image(systemName: isTestModeActive ? "stop.circle.fill" : "play.circle.fill")
.font(.title3)
Text(isTestModeActive ? "Stop Test" : "Test Tracking")
.font(.headline)
}
.frame(maxWidth: .infinity)
.padding()
}
.buttonStyle(.borderedProminent)
.controlSize(.large)
}
private var calibrationSection: some View {
VStack(alignment: .leading, spacing: 12) {
HStack {
Image(systemName: "target")
.font(.title3)
.foregroundStyle(.blue)
Text("Eye Tracking Calibration")
.font(.headline)
}
if calibratorService.calibrationData.isComplete {
VStack(alignment: .leading, spacing: 8) {
Text(calibratorService.getCalibrationSummary())
.font(.caption)
.foregroundStyle(.secondary)
if calibratorService.needsRecalibration() {
Label(
"Calibration expired - recalibration recommended",
systemImage: "exclamationmark.triangle.fill"
)
.font(.caption)
.foregroundStyle(.orange)
} else {
Label("Calibration active and valid", systemImage: "checkmark.circle.fill")
.font(.caption)
.foregroundStyle(.green)
}
}
} else {
Text("Not calibrated - using default thresholds")
.font(.caption)
.foregroundStyle(.secondary)
}
Button(action: {
showCalibrationWindow = true
}) {
HStack {
Image(systemName: "target")
Text(
calibratorService.calibrationData.isComplete
? "Recalibrate" : "Run Calibration")
}
.frame(maxWidth: .infinity)
.padding(.vertical, 8)
}
.buttonStyle(.bordered)
.controlSize(.regular)
}
.padding()
.glassEffectIfAvailable(
GlassStyle.regular.tint(.blue.opacity(0.1)), in: .rect(cornerRadius: 12)
)
.sheet(isPresented: $showCalibrationWindow) {
EyeTrackingCalibrationView()
}
}
private var testModePreviewView: some View {
VStack(spacing: 16) {
let lookingAway = !eyeTrackingService.userLookingAtScreen
let borderColor: NSColor = lookingAway ? .systemGreen : .systemRed
// Cache the preview layer to avoid recreating it
let previewLayer = eyeTrackingService.previewLayer ?? cachedPreviewLayer
if let layer = previewLayer {
ZStack {
CameraPreviewView(previewLayer: layer, borderColor: borderColor)
// Pupil detection overlay (drawn on video)
PupilOverlayView(eyeTrackingService: eyeTrackingService)
// Debug info overlay (top-right corner)
VStack {
HStack {
Spacer()
GazeOverlayView(eyeTrackingService: eyeTrackingService)
}
Spacer()
}
}
.frame(height: isCompact ? 200 : 300)
.glassEffectIfAvailable(GlassStyle.regular, in: .rect(cornerRadius: 12))
.onAppear {
if cachedPreviewLayer == nil {
cachedPreviewLayer = eyeTrackingService.previewLayer
}
}
/*VStack(alignment: .leading, spacing: 12) {*/
/*Text("Live Tracking Status")*/
/*.font(.headline)*/
/*HStack(spacing: 20) {*/
/*statusIndicator(*/
/*title: "Face Detected",*/
/*isActive: eyeTrackingService.faceDetected,*/
/*icon: "person.fill"*/
/*)*/
/*statusIndicator(*/
/*title: "Looking Away",*/
/*isActive: !eyeTrackingService.userLookingAtScreen,*/
/*icon: "arrow.turn.up.right"*/
/*)*/
/*}*/
/*Text(*/
/*lookingAway*/
/*? " Break compliance detected" : " Please look away from screen"*/
/*)*/
/*.font(.caption)*/
/*.foregroundStyle(lookingAway ? .green : .orange)*/
/*.frame(maxWidth: .infinity, alignment: .center)*/
/*.padding(.top, 4)*/
/*}*/
/*.padding()*/
/*.glassEffectIfAvailable(GlassStyle.regular, in: .rect(cornerRadius: 12))*/
}
}
}
private var cameraStatusView: some View {
HStack {
VStack(alignment: .leading, spacing: 4) {
Text("Camera Access")
.font(.headline)
if cameraService.isCameraAuthorized {
Label("Authorized", systemImage: "checkmark.circle.fill")
.font(.caption)
.foregroundStyle(.green)
} else if let error = cameraService.cameraError {
Label(error.localizedDescription, systemImage: "exclamationmark.triangle.fill")
.font(.caption)
.foregroundStyle(.orange)
} else {
Label("Not authorized", systemImage: "xmark.circle.fill")
.font(.caption)
.foregroundStyle(.secondary)
}
}
Spacer()
if !cameraService.isCameraAuthorized {
Button("Request Access") {
print("📷 Request Access button clicked")
Task { @MainActor in
do {
try await cameraService.requestCameraAccess()
print("✓ Camera access granted via button")
} catch {
print("⚠️ Camera access failed: \(error.localizedDescription)")
}
}
}
.buttonStyle(.bordered)
}
}
.padding()
.glassEffectIfAvailable(GlassStyle.regular, in: .rect(cornerRadius: 12))
}
private var eyeTrackingStatusView: some View {
VStack(alignment: .leading, spacing: 12) {
Text("Eye Tracking Status")
.font(.headline)
HStack(spacing: 20) {
statusIndicator(
title: "Face Detected",
isActive: eyeTrackingService.faceDetected,
icon: "person.fill"
)
statusIndicator(
title: "Looking Away",
isActive: !eyeTrackingService.userLookingAtScreen,
icon: "arrow.turn.up.right"
)
}
}
.padding()
.glassEffectIfAvailable(GlassStyle.regular, in: .rect(cornerRadius: 12))
}
private func statusIndicator(title: String, isActive: Bool, icon: String) -> some View {
VStack(spacing: 8) {
Image(systemName: icon)
.font(.title2)
.foregroundStyle(isActive ? .green : .secondary)
Text(title)
.font(.caption)
.foregroundStyle(.secondary)
.multilineTextAlignment(.center)
}
.frame(maxWidth: .infinity)
}
private var privacyInfoView: some View {
VStack(alignment: .leading, spacing: 12) {
HStack {
Image(systemName: "lock.shield.fill")
.font(.title3)
.foregroundStyle(.blue)
Text("Privacy Information")
.font(.headline)
}
VStack(alignment: .leading, spacing: 8) {
privacyBullet("All processing happens on-device")
privacyBullet("No images are stored or transmitted")
privacyBullet("Camera only active during lookaway reminders (3 second window)")
privacyBullet("You can always force quit with cmd+q")
}
.font(.caption)
.foregroundStyle(.secondary)
}
.padding()
.glassEffectIfAvailable(
GlassStyle.regular.tint(.blue.opacity(0.1)), in: .rect(cornerRadius: 12))
}
private func privacyBullet(_ text: String) -> some View {
HStack(alignment: .top, spacing: 8) {
Image(systemName: "checkmark")
.font(.caption2)
.foregroundStyle(.blue)
Text(text)
}
}
private func handleEnforceModeToggle(enabled: Bool) {
print("🎛️ handleEnforceModeToggle called with enabled: \(enabled)")
isProcessingToggle = true
@@ -411,232 +84,6 @@ struct EnforceModeSetupView: View {
}
}
}
private var trackingConstantsView: some View {
VStack(alignment: .leading, spacing: 16) {
HStack {
Text("Tracking Sensitivity")
.font(.headline)
Spacer()
Button(action: {
eyeTrackingService.enableDebugLogging.toggle()
}) {
Image(
systemName: eyeTrackingService.enableDebugLogging
? "ant.circle.fill" : "ant.circle"
)
.foregroundStyle(eyeTrackingService.enableDebugLogging ? .orange : .secondary)
}
.buttonStyle(.plain)
.help("Toggle console debug logging")
Button(showAdvancedSettings ? "Hide Settings" : "Show Settings") {
withAnimation {
showAdvancedSettings.toggle()
}
}
.buttonStyle(.bordered)
.controlSize(.small)
}
// Debug info always visible when tracking
VStack(alignment: .leading, spacing: 8) {
Text("Live Values:")
.font(.caption)
.fontWeight(.semibold)
.foregroundStyle(.secondary)
if let leftRatio = eyeTrackingService.debugLeftPupilRatio,
let rightRatio = eyeTrackingService.debugRightPupilRatio
{
HStack(spacing: 16) {
VStack(alignment: .leading, spacing: 2) {
Text("Left Pupil: \(String(format: "%.3f", leftRatio))")
.font(.caption2)
.foregroundStyle(
!EyeTrackingConstants.minPupilEnabled
&& !EyeTrackingConstants.maxPupilEnabled
? .secondary
: (leftRatio < EyeTrackingConstants.minPupilRatio
|| leftRatio > EyeTrackingConstants.maxPupilRatio)
? Color.orange : Color.green
)
Text("Right Pupil: \(String(format: "%.3f", rightRatio))")
.font(.caption2)
.foregroundStyle(
!EyeTrackingConstants.minPupilEnabled
&& !EyeTrackingConstants.maxPupilEnabled
? .secondary
: (rightRatio < EyeTrackingConstants.minPupilRatio
|| rightRatio > EyeTrackingConstants.maxPupilRatio)
? Color.orange : Color.green
)
}
Spacer()
VStack(alignment: .trailing, spacing: 2) {
Text(
"Range: \(String(format: "%.2f", EyeTrackingConstants.minPupilRatio)) - \(String(format: "%.2f", EyeTrackingConstants.maxPupilRatio))"
)
.font(.caption2)
.foregroundStyle(.secondary)
let bothEyesOut =
(leftRatio < EyeTrackingConstants.minPupilRatio
|| leftRatio > EyeTrackingConstants.maxPupilRatio)
&& (rightRatio < EyeTrackingConstants.minPupilRatio
|| rightRatio > EyeTrackingConstants.maxPupilRatio)
Text(bothEyesOut ? "Both Out ⚠️" : "In Range ✓")
.font(.caption2)
.foregroundStyle(bothEyesOut ? .orange : .green)
}
}
} else {
Text("Pupil data unavailable")
.font(.caption2)
.foregroundStyle(.secondary)
}
if let yaw = eyeTrackingService.debugYaw,
let pitch = eyeTrackingService.debugPitch
{
HStack(spacing: 16) {
VStack(alignment: .leading, spacing: 2) {
Text("Yaw: \(String(format: "%.3f", yaw))")
.font(.caption2)
.foregroundStyle(
!EyeTrackingConstants.yawEnabled
? .secondary
: abs(yaw) > EyeTrackingConstants.yawThreshold
? Color.orange : Color.green
)
Text("Pitch: \(String(format: "%.3f", pitch))")
.font(.caption2)
.foregroundStyle(
!EyeTrackingConstants.pitchUpEnabled
&& !EyeTrackingConstants.pitchDownEnabled
? .secondary
: (pitch > EyeTrackingConstants.pitchUpThreshold
|| pitch < EyeTrackingConstants.pitchDownThreshold)
? Color.orange : Color.green
)
}
Spacer()
VStack(alignment: .trailing, spacing: 2) {
Text(
"Yaw Max: \(String(format: "%.2f", EyeTrackingConstants.yawThreshold))"
)
.font(.caption2)
.foregroundStyle(.secondary)
Text(
"Pitch: \(String(format: "%.2f", EyeTrackingConstants.pitchDownThreshold)) to \(String(format: "%.2f", EyeTrackingConstants.pitchUpThreshold))"
)
.font(.caption2)
.foregroundStyle(.secondary)
}
}
}
}
.padding(.top, 4)
if showAdvancedSettings {
VStack(spacing: 16) {
// Display the current constant values
VStack(alignment: .leading, spacing: 8) {
Text("Current Threshold Values:")
.font(.caption)
.fontWeight(.semibold)
.foregroundStyle(.secondary)
HStack {
Text("Yaw Threshold:")
Spacer()
Text("\(String(format: "%.2f", EyeTrackingConstants.yawThreshold)) rad")
.foregroundStyle(.secondary)
}
HStack {
Text("Pitch Up Threshold:")
Spacer()
Text(
"\(String(format: "%.2f", EyeTrackingConstants.pitchUpThreshold)) rad"
)
.foregroundStyle(.secondary)
}
HStack {
Text("Pitch Down Threshold:")
Spacer()
Text(
"\(String(format: "%.2f", EyeTrackingConstants.pitchDownThreshold)) rad"
)
.foregroundStyle(.secondary)
}
HStack {
Text("Min Pupil Ratio:")
Spacer()
Text("\(String(format: "%.2f", EyeTrackingConstants.minPupilRatio))")
.foregroundStyle(.secondary)
}
HStack {
Text("Max Pupil Ratio:")
Spacer()
Text("\(String(format: "%.2f", EyeTrackingConstants.maxPupilRatio))")
.foregroundStyle(.secondary)
}
HStack {
Text("Eye Closed Threshold:")
Spacer()
Text(
"\(String(format: "%.3f", EyeTrackingConstants.eyeClosedThreshold))"
)
.foregroundStyle(.secondary)
}
}
.padding(.top, 8)
}
.padding(.top, 8)
}
}
.padding()
.glassEffectIfAvailable(GlassStyle.regular, in: .rect(cornerRadius: 12))
}
private var debugEyeTrackingView: some View {
VStack(alignment: .leading, spacing: 12) {
Text("Debug Eye Tracking Data")
.font(.headline)
.foregroundStyle(.blue)
VStack(alignment: .leading, spacing: 8) {
Text("Face Detected: \(eyeTrackingService.faceDetected ? "Yes" : "No")")
.font(.caption)
Text("Looking at Screen: \(eyeTrackingService.userLookingAtScreen ? "Yes" : "No")")
.font(.caption)
Text("Eyes Closed: \(eyeTrackingService.isEyesClosed ? "Yes" : "No")")
.font(.caption)
if eyeTrackingService.faceDetected {
Text("Yaw: 0.0")
.font(.caption)
Text("Roll: 0.0")
.font(.caption)
}
}
.font(.caption)
.foregroundStyle(.secondary)
}
.padding()
.glassEffectIfAvailable(GlassStyle.regular, in: .rect(cornerRadius: 12))
}
}
#Preview {

View File

@@ -54,7 +54,6 @@ struct LookAwaySetupView: View {
private func previewLookAway() {
guard let screen = NSScreen.main else { return }
let sizePercentage = settingsManager.settings.subtleReminderSize.percentage
let lookAwayIntervalMinutes = settingsManager.settings.lookAwayIntervalMinutes
PreviewWindowHelper.showPreview(on: screen) { dismiss in
LookAwayReminderView(countdownSeconds: lookAwayIntervalMinutes * 60, onDismiss: dismiss)

View File

@@ -9,201 +9,23 @@ import SwiftUI
struct SmartModeSetupView: View {
@Bindable var settingsManager: SettingsManager
@State private var permissionManager = ScreenCapturePermissionManager.shared
var body: some View {
VStack(spacing: 0) {
SetupHeader(icon: "brain.fill", title: "Smart Mode", color: .purple)
Text("Automatically manage timers based on your activity")
.font(.subheadline)
.foregroundStyle(.secondary)
.padding(.bottom, 30)
SmartModeSetupContent(
settingsManager: settingsManager,
presentation: .window
)
.padding(.top, 24)
Spacer()
VStack(spacing: 24) {
fullscreenSection
idleSection
#if DEBUG
usageTrackingSection
#endif
}
.frame(maxWidth: 600)
Spacer()
Spacer(minLength: 0)
}
.frame(maxWidth: .infinity, maxHeight: .infinity)
.padding()
.background(.clear)
}
private var fullscreenSection: some View {
VStack(alignment: .leading, spacing: 12) {
HStack {
VStack(alignment: .leading, spacing: 4) {
HStack {
Image(systemName: "arrow.up.left.and.arrow.down.right")
.foregroundStyle(.blue)
Text("Auto-pause on Fullscreen")
.font(.headline)
}
Text(
"Timers will automatically pause when you enter fullscreen mode (videos, games, presentations)"
)
.font(.caption)
.foregroundStyle(.secondary)
}
Spacer()
Toggle("", isOn: $settingsManager.settings.smartMode.autoPauseOnFullscreen)
.labelsHidden()
.onChange(of: settingsManager.settings.smartMode.autoPauseOnFullscreen) {
_, newValue in
if newValue {
permissionManager.requestAuthorizationIfNeeded()
}
}
}
if settingsManager.settings.smartMode.autoPauseOnFullscreen,
permissionManager.authorizationStatus != .authorized
{
permissionWarningView
}
}
.padding()
.glassEffectIfAvailable(GlassStyle.regular, in: .rect(cornerRadius: 8))
}
private var permissionWarningView: some View {
VStack(alignment: .leading, spacing: 8) {
Label(
permissionManager.authorizationStatus == .denied
? "Screen Recording permission required"
: "Grant Screen Recording access",
systemImage: "exclamationmark.shield"
)
.foregroundStyle(.orange)
Text("macOS requires Screen Recording permission to detect other apps in fullscreen.")
.font(.caption)
.foregroundStyle(.secondary)
HStack {
Button("Grant Access") {
permissionManager.requestAuthorizationIfNeeded()
permissionManager.openSystemSettings()
}
.buttonStyle(.bordered)
Button("Open Settings") {
permissionManager.openSystemSettings()
}
.buttonStyle(.borderless)
}
.font(.caption)
.padding(.top, 4)
}
.padding(.top, 8)
}
private var idleSection: some View {
VStack(alignment: .leading, spacing: 12) {
HStack {
VStack(alignment: .leading, spacing: 4) {
HStack {
Image(systemName: "moon.zzz.fill")
.foregroundStyle(.indigo)
Text("Auto-pause on Idle")
.font(.headline)
}
Text("Timers will pause when you're inactive for more than the threshold below")
.font(.caption)
.foregroundStyle(.secondary)
}
Spacer()
Toggle("", isOn: $settingsManager.settings.smartMode.autoPauseOnIdle)
.labelsHidden()
}
if settingsManager.settings.smartMode.autoPauseOnIdle {
ThresholdSlider(
label: "Idle Threshold:",
value: $settingsManager.settings.smartMode.idleThresholdMinutes,
range: 1...30,
unit: "min"
)
}
}
.padding()
.glassEffectIfAvailable(GlassStyle.regular, in: .rect(cornerRadius: 8))
}
private var usageTrackingSection: some View {
VStack(alignment: .leading, spacing: 12) {
HStack {
VStack(alignment: .leading, spacing: 4) {
HStack {
Image(systemName: "chart.line.uptrend.xyaxis")
.foregroundStyle(.green)
Text("Track Usage Statistics")
.font(.headline)
}
Text(
"Monitor active and idle time, with automatic reset after the specified duration"
)
.font(.caption)
.foregroundStyle(.secondary)
}
Spacer()
Toggle("", isOn: $settingsManager.settings.smartMode.trackUsage)
.labelsHidden()
}
if settingsManager.settings.smartMode.trackUsage {
ThresholdSlider(
label: "Reset After:",
value: $settingsManager.settings.smartMode.usageResetAfterMinutes,
range: 15...240,
step: 15,
unit: "min"
)
}
}
.padding()
.glassEffectIfAvailable(GlassStyle.regular, in: .rect(cornerRadius: 8))
}
}
struct ThresholdSlider: View {
let label: String
@Binding var value: Int
let range: ClosedRange<Int>
var step: Int = 1
let unit: String
var body: some View {
VStack(alignment: .leading, spacing: 8) {
HStack {
Text(label)
.font(.subheadline)
Spacer()
Text("\(value) \(unit)")
.font(.subheadline)
.foregroundStyle(.secondary)
}
Slider(
value: Binding(
get: { Double(value) },
set: { value = Int($0) }
),
in: Double(range.lowerBound)...Double(range.upperBound),
step: Double(step)
)
}
.padding(.top, 8)
}
}
#Preview {

View File

@@ -1,177 +0,0 @@
//
// PupilDetectorTests.swift
// GazeTests
//
// Created by Mike Freno on 1/16/26.
//
import CoreVideo
import Vision
import XCTest
@testable import Gaze
final class PupilDetectorTests: XCTestCase {
override func setUp() async throws {
// Reset the detector state
PupilDetector.cleanup()
}
func testCreateCGImageFromData() throws {
// Test basic image creation
let width = 50
let height = 50
var pixels = [UInt8](repeating: 128, count: width * height)
// Add some dark pixels for a "pupil"
for y in 20..<30 {
for x in 20..<30 {
pixels[y * width + x] = 10 // Very dark
}
}
// Save test image to verify
let pixelData = Data(pixels)
guard let provider = CGDataProvider(data: pixelData as CFData) else {
XCTFail("Failed to create CGDataProvider")
return
}
let cgImage = CGImage(
width: width,
height: height,
bitsPerComponent: 8,
bitsPerPixel: 8,
bytesPerRow: width,
space: CGColorSpaceCreateDeviceGray(),
bitmapInfo: CGBitmapInfo(rawValue: CGImageAlphaInfo.none.rawValue),
provider: provider,
decode: nil,
shouldInterpolate: false,
intent: .defaultIntent
)
XCTAssertNotNil(cgImage, "Should create CGImage from pixel data")
}
func testImageProcessingWithDarkPixels() throws {
// Test that imageProcessingOptimized produces dark pixels
let width = 60
let height = 40
// Create input with a dark circle (simulating pupil)
var input = [UInt8](repeating: 200, count: width * height) // Light background (like eye white)
// Add a dark ellipse in center (pupil)
let centerX = width / 2
let centerY = height / 2
for y in 0..<height {
for x in 0..<width {
let dx = x - centerX
let dy = y - centerY
if dx * dx + dy * dy < 100 { // Circle radius ~10
input[y * width + x] = 20 // Dark pupil
}
}
}
var output = [UInt8](repeating: 255, count: width * height)
let threshold = 50 // Same as default
// Call the actual processing function
input.withUnsafeMutableBufferPointer { inputPtr in
output.withUnsafeMutableBufferPointer { outputPtr in
// We can't call imageProcessingOptimized directly as it's private
// But we can verify by saving input for inspection
}
}
// Save the input for manual inspection
let inputData = Data(input)
let url = URL(fileURLWithPath: "/Users/mike/gaze/images/test_input_synthetic.png")
if let provider = CGDataProvider(data: inputData as CFData) {
if let cgImage = CGImage(
width: width,
height: height,
bitsPerComponent: 8,
bitsPerPixel: 8,
bytesPerRow: width,
space: CGColorSpaceCreateDeviceGray(),
bitmapInfo: CGBitmapInfo(rawValue: CGImageAlphaInfo.none.rawValue),
provider: provider,
decode: nil,
shouldInterpolate: false,
intent: .defaultIntent
) {
if let dest = CGImageDestinationCreateWithURL(
url as CFURL, "public.png" as CFString, 1, nil)
{
CGImageDestinationAddImage(dest, cgImage, nil)
CGImageDestinationFinalize(dest)
print("💾 Saved synthetic test input to: \(url.path)")
}
}
}
// Count dark pixels in input
let darkCount = input.filter { $0 < 50 }.count
print("📊 Input has \(darkCount) dark pixels (< 50)")
XCTAssertGreaterThan(darkCount, 0, "Input should have dark pixels for pupil")
}
func testFindPupilFromContoursWithSyntheticData() throws {
// Create synthetic binary image with a dark region
let width = 60
let height = 40
// All white except a dark blob
var binaryData = [UInt8](repeating: 255, count: width * height)
// Add dark region (0 = dark/pupil)
let centerX = 30
let centerY = 20
var darkPixelCount = 0
for y in 0..<height {
for x in 0..<width {
let dx = x - centerX
let dy = y - centerY
if dx * dx + dy * dy < 100 {
binaryData[y * width + x] = 0
darkPixelCount += 1
}
}
}
print("📊 Created synthetic image with \(darkPixelCount) dark pixels")
// Save for inspection
let binaryDataObj = Data(binaryData)
let url = URL(fileURLWithPath: "/Users/mike/gaze/images/test_binary_synthetic.png")
if let provider = CGDataProvider(data: binaryDataObj as CFData) {
if let cgImage = CGImage(
width: width,
height: height,
bitsPerComponent: 8,
bitsPerPixel: 8,
bytesPerRow: width,
space: CGColorSpaceCreateDeviceGray(),
bitmapInfo: CGBitmapInfo(rawValue: CGImageAlphaInfo.none.rawValue),
provider: provider,
decode: nil,
shouldInterpolate: false,
intent: .defaultIntent
) {
if let dest = CGImageDestinationCreateWithURL(
url as CFURL, "public.png" as CFString, 1, nil)
{
CGImageDestinationAddImage(dest, cgImage, nil)
CGImageDestinationFinalize(dest)
print("💾 Saved synthetic binary image to: \(url.path)")
}
}
}
XCTAssertGreaterThan(darkPixelCount, 10, "Should have enough dark pixels")
}
}

View File

@@ -1,331 +0,0 @@
//
// VideoGazeTests.swift
// GazeTests
//
// Created by Mike Freno on 1/16/26.
//
import AVFoundation
import Vision
import XCTest
@testable import Gaze
final class VideoGazeTests: XCTestCase {
var logLines: [String] = []
private func log(_ message: String) {
logLines.append(message)
}
private func attachLogs() {
let attachment = XCTAttachment(string: logLines.joined(separator: "\n"))
attachment.name = "Test Logs"
attachment.lifetime = .keepAlways
add(attachment)
}
/// Process the outer video (looking away from screen) - should detect "looking away"
func testOuterVideoGazeDetection() async throws {
logLines = []
let projectPath = "/Users/mike/Code/Gaze/GazeTests/video-test-outer.mp4"
guard FileManager.default.fileExists(atPath: projectPath) else {
XCTFail("Video file not found at: \(projectPath)")
return
}
let stats = try await processVideo(
at: URL(fileURLWithPath: projectPath), expectLookingAway: true)
// For outer video, most frames should detect gaze outside center
let nonCenterRatio =
Double(stats.nonCenterFrames) / Double(max(1, stats.pupilDetectedFrames))
log(
"🎯 OUTER video: \(String(format: "%.1f%%", nonCenterRatio * 100)) frames detected as non-center (expected: >50%)"
)
log(
" H-range: \(String(format: "%.3f", stats.minH)) to \(String(format: "%.3f", stats.maxH))"
)
log(
" V-range: \(String(format: "%.3f", stats.minV)) to \(String(format: "%.3f", stats.maxV))"
)
log(
" Face width: \(String(format: "%.3f", stats.avgFaceWidth)) (range: \(String(format: "%.3f", stats.minFaceWidth))-\(String(format: "%.3f", stats.maxFaceWidth)))"
)
attachLogs()
// At least 50% should be detected as non-center when looking away
XCTAssertGreaterThan(
nonCenterRatio, 0.5,
"Looking away video should have >50% non-center detections. Log:\n\(logLines.joined(separator: "\n"))"
)
}
/// Process the inner video (looking at screen) - should detect "looking at screen"
func testInnerVideoGazeDetection() async throws {
logLines = []
let projectPath = "/Users/mike/Code/Gaze/GazeTests/video-test-inner.mp4"
guard FileManager.default.fileExists(atPath: projectPath) else {
XCTFail("Video file not found at: \(projectPath)")
return
}
let stats = try await processVideo(
at: URL(fileURLWithPath: projectPath), expectLookingAway: false)
// For inner video, most frames should detect gaze at center
let centerRatio = Double(stats.centerFrames) / Double(max(1, stats.pupilDetectedFrames))
log(
"🎯 INNER video: \(String(format: "%.1f%%", centerRatio * 100)) frames detected as center (expected: >50%)"
)
log(
" H-range: \(String(format: "%.3f", stats.minH)) to \(String(format: "%.3f", stats.maxH))"
)
log(
" V-range: \(String(format: "%.3f", stats.minV)) to \(String(format: "%.3f", stats.maxV))"
)
log(
" Face width: \(String(format: "%.3f", stats.avgFaceWidth)) (range: \(String(format: "%.3f", stats.minFaceWidth))-\(String(format: "%.3f", stats.maxFaceWidth)))"
)
attachLogs()
// At least 50% should be detected as center when looking at screen
XCTAssertGreaterThan(
centerRatio, 0.5,
"Looking at screen video should have >50% center detections. Log:\n\(logLines.joined(separator: "\n"))"
)
}
struct VideoStats {
var totalFrames = 0
var faceDetectedFrames = 0
var pupilDetectedFrames = 0
var centerFrames = 0
var nonCenterFrames = 0
var minH = Double.greatestFiniteMagnitude
var maxH = -Double.greatestFiniteMagnitude
var minV = Double.greatestFiniteMagnitude
var maxV = -Double.greatestFiniteMagnitude
var minFaceWidth = Double.greatestFiniteMagnitude
var maxFaceWidth = -Double.greatestFiniteMagnitude
var totalFaceWidth = 0.0
var faceWidthCount = 0
var avgFaceWidth: Double {
faceWidthCount > 0 ? totalFaceWidth / Double(faceWidthCount) : 0
}
}
private func processVideo(at url: URL, expectLookingAway: Bool) async throws -> VideoStats {
var stats = VideoStats()
log("\n" + String(repeating: "=", count: 60))
log("Processing video: \(url.lastPathComponent)")
log(
"Expected behavior: \(expectLookingAway ? "LOOKING AWAY (non-center)" : "LOOKING AT SCREEN (center)")"
)
log(String(repeating: "=", count: 60))
let asset = AVURLAsset(url: url)
let duration = try await asset.load(.duration)
let durationSeconds = CMTimeGetSeconds(duration)
log("Duration: \(String(format: "%.2f", durationSeconds)) seconds")
guard let track = try await asset.loadTracks(withMediaType: .video).first else {
XCTFail("No video track found")
return stats
}
let size = try await track.load(.naturalSize)
let frameRate = try await track.load(.nominalFrameRate)
log(
"Size: \(Int(size.width))x\(Int(size.height)), FPS: \(String(format: "%.1f", frameRate))"
)
let reader = try AVAssetReader(asset: asset)
let outputSettings: [String: Any] = [
kCVPixelBufferPixelFormatTypeKey as String: kCVPixelFormatType_32BGRA
]
let trackOutput = AVAssetReaderTrackOutput(track: track, outputSettings: outputSettings)
reader.add(trackOutput)
reader.startReading()
var frameIndex = 0
let sampleInterval = max(1, Int(frameRate / 2)) // Sample ~2 frames per second
log("\nFrame | Time | Face | H-Ratio L/R | V-Ratio L/R | Direction")
log(String(repeating: "-", count: 75))
// Reset calibration for fresh test
PupilDetector.calibration.reset()
// Disable frame skipping for video testing
let originalFrameSkip = PupilDetector.frameSkipCount
PupilDetector.frameSkipCount = 1
defer { PupilDetector.frameSkipCount = originalFrameSkip }
while let sampleBuffer = trackOutput.copyNextSampleBuffer() {
defer {
frameIndex += 1
PupilDetector.advanceFrame()
}
// Only process every Nth frame
if frameIndex % sampleInterval != 0 {
continue
}
stats.totalFrames += 1
guard let pixelBuffer = CMSampleBufferGetImageBuffer(sampleBuffer) else {
continue
}
let timestamp = CMSampleBufferGetPresentationTimeStamp(sampleBuffer)
let timeSeconds = CMTimeGetSeconds(timestamp)
// Run face detection
let request = VNDetectFaceLandmarksRequest()
request.revision = VNDetectFaceLandmarksRequestRevision3
let handler = VNImageRequestHandler(
cvPixelBuffer: pixelBuffer,
orientation: .leftMirrored,
options: [:]
)
try handler.perform([request])
guard let observations = request.results, !observations.isEmpty,
let face = observations.first,
let landmarks = face.landmarks,
let leftEye = landmarks.leftEye,
let rightEye = landmarks.rightEye
else {
log(
String(
format: "%5d | %5.1fs | NO | - | - | -",
frameIndex, timeSeconds))
continue
}
stats.faceDetectedFrames += 1
// Track face width (bounding box width as ratio of image width)
let faceWidth = face.boundingBox.width
stats.minFaceWidth = min(stats.minFaceWidth, faceWidth)
stats.maxFaceWidth = max(stats.maxFaceWidth, faceWidth)
stats.totalFaceWidth += faceWidth
stats.faceWidthCount += 1
let imageSize = CGSize(
width: CVPixelBufferGetWidth(pixelBuffer),
height: CVPixelBufferGetHeight(pixelBuffer)
)
// Detect pupils
var leftHRatio: Double?
var rightHRatio: Double?
var leftVRatio: Double?
var rightVRatio: Double?
if let leftResult = PupilDetector.detectPupil(
in: pixelBuffer,
eyeLandmarks: leftEye,
faceBoundingBox: face.boundingBox,
imageSize: imageSize,
side: 0
) {
leftHRatio = calculateHorizontalRatio(
pupilPosition: leftResult.pupilPosition, eyeRegion: leftResult.eyeRegion)
leftVRatio = calculateVerticalRatio(
pupilPosition: leftResult.pupilPosition, eyeRegion: leftResult.eyeRegion)
}
if let rightResult = PupilDetector.detectPupil(
in: pixelBuffer,
eyeLandmarks: rightEye,
faceBoundingBox: face.boundingBox,
imageSize: imageSize,
side: 1
) {
rightHRatio = calculateHorizontalRatio(
pupilPosition: rightResult.pupilPosition, eyeRegion: rightResult.eyeRegion)
rightVRatio = calculateVerticalRatio(
pupilPosition: rightResult.pupilPosition, eyeRegion: rightResult.eyeRegion)
}
if let lh = leftHRatio, let rh = rightHRatio,
let lv = leftVRatio, let rv = rightVRatio
{
stats.pupilDetectedFrames += 1
let avgH = (lh + rh) / 2.0
let avgV = (lv + rv) / 2.0
// Track min/max ranges
stats.minH = min(stats.minH, avgH)
stats.maxH = max(stats.maxH, avgH)
stats.minV = min(stats.minV, avgV)
stats.maxV = max(stats.maxV, avgV)
let direction = GazeDirection.from(horizontal: avgH, vertical: avgV)
if direction == .center {
stats.centerFrames += 1
} else {
stats.nonCenterFrames += 1
}
log(
String(
format: "%5d | %5.1fs | YES | %.2f / %.2f | %.2f / %.2f | %@ %@",
frameIndex, timeSeconds, lh, rh, lv, rv, direction.rawValue,
String(describing: direction)))
} else {
log(
String(
format: "%5d | %5.1fs | YES | PUPIL FAIL | PUPIL FAIL | -",
frameIndex, timeSeconds))
}
}
log(String(repeating: "=", count: 75))
log(
"Summary: \(stats.totalFrames) frames sampled, \(stats.faceDetectedFrames) with face, \(stats.pupilDetectedFrames) with pupils"
)
log("Center frames: \(stats.centerFrames), Non-center: \(stats.nonCenterFrames)")
log(
"Face width: avg=\(String(format: "%.3f", stats.avgFaceWidth)), range=\(String(format: "%.3f", stats.minFaceWidth)) to \(String(format: "%.3f", stats.maxFaceWidth))"
)
log("Processing complete\n")
return stats
}
private func calculateHorizontalRatio(pupilPosition: PupilPosition, eyeRegion: EyeRegion)
-> Double
{
// pupilPosition.y controls horizontal gaze due to image orientation
let pupilY = Double(pupilPosition.y)
let eyeHeight = Double(eyeRegion.frame.height)
guard eyeHeight > 0 else { return 0.5 }
let ratio = pupilY / eyeHeight
return max(0.0, min(1.0, ratio))
}
private func calculateVerticalRatio(pupilPosition: PupilPosition, eyeRegion: EyeRegion)
-> Double
{
// pupilPosition.x controls vertical gaze due to image orientation
let pupilX = Double(pupilPosition.x)
let eyeWidth = Double(eyeRegion.frame.width)
guard eyeWidth > 0 else { return 0.5 }
let ratio = pupilX / eyeWidth
return max(0.0, min(1.0, ratio))
}
}

View File

@@ -2,6 +2,17 @@
<rss xmlns:sparkle="http://www.andymatuschak.org/xml-namespaces/sparkle" version="2.0">
<channel>
<title>Gaze</title>
<item>
<title>0.5.0</title>
<pubDate>Fri, 30 Jan 2026 12:58:57 -0500</pubDate>
<sparkle:version>10</sparkle:version>
<sparkle:shortVersionString>0.5.0</sparkle:shortVersionString>
<sparkle:minimumSystemVersion>14.6</sparkle:minimumSystemVersion>
<enclosure url="https://freno.me/api/downloads/Gaze-0.5.0.dmg" length="5253164" type="application/octet-stream" sparkle:edSignature="eXTeHXkMiAO4O1drqvdeYYn6oY9bpilm4toHNZ5BGvWVeNOtwzFC9YOWb+abPEYDRMmu5oodbPBFyPE65w6BDg=="/>
<sparkle:deltas>
<enclosure url="https://freno.me/api/downloads/Gaze10-9.delta" sparkle:deltaFrom="9" length="721382" type="application/octet-stream" sparkle:deltaFromSparkleExecutableSize="860560" sparkle:deltaFromSparkleLocales="de,he,ar,el,ja,fa,uk" sparkle:edSignature="kO4XClM9nikVhnhX4QypN5zFIikIhVdnVcOQDsvL8ciBD6opGM/IH7pqnCRKRwP6N8SRobgAQTHsee6oOZyWCA=="/>
</sparkle:deltas>
</item>
<item>
<title>0.4.1</title>
<pubDate>Tue, 13 Jan 2026 17:27:46 -0500</pubDate>
@@ -62,13 +73,5 @@
<enclosure url="https://freno.me/api/downloads/Gaze2-1.delta" sparkle:deltaFrom="1" length="94254" type="application/octet-stream" sparkle:deltaFromSparkleExecutableSize="858560" sparkle:deltaFromSparkleLocales="de,he,ar,el,ja,fa,uk" sparkle:edSignature="qfxSfqD9iVJ7GVL19V8T4OuOTz0ZgqJNceBH6W+dwoKel1R+BTPkU9Ia8xR12v07GoXkyyqc+ba79OOL7jIpBw=="/>
</sparkle:deltas>
</item>
<item>
<title>0.1.1</title>
<pubDate>Sun, 11 Jan 2026 18:07:02 -0500</pubDate>
<sparkle:version>1</sparkle:version>
<sparkle:shortVersionString>0.2.0</sparkle:shortVersionString>
<sparkle:minimumSystemVersion>14.6</sparkle:minimumSystemVersion>
<enclosure url="https://freno.me/api/downloads/Gaze-0.2.0.dmg" length="4831161" type="application/octet-stream" sparkle:edSignature="zCEmiiO4Q7HV7uGbI/CQcfJElm1uqrYorznE6uCWaKm/Zg1bUrWaeTRf9+Uv9f9+0iptyiS2FNdglLQB8RKkCA=="/>
</item>
</channel>
</rss>