A powerful, lightweight JavaScript library for real-time gaze tracking and attention monitoring using MediaPipe.
Gazer.js enables developers to easily integrate face detection and gaze tracking capabilities into web applications. Perfect for educational platforms, video conferencing, productivity tools, and attention monitoring systems.
- ๐ฏ Real-time Gaze Tracking - Detect when users are looking at or away from the screen
- ๏ฟฝ Retina Location Tracking - Record precise retina positions with configurable change thresholds and intervals
- ๏ฟฝ๐ค Face Detection - Robust face detection with confidence scoring
- ๐ Attention Analytics - Track attention time, distraction periods, and engagement metrics
- ๐ก Tracking Data API - Automatic data posting to external APIs with configurable intervals
- ๏ฟฝ๐ Performance Modes - Pre-configured Low/Medium/High performance settings for optimal CPU usage
- ๐๏ธ Sensitivity Modes - Strict/Medium/Relaxed gaze detection presets for different use cases
- โ๏ธ Highly Configurable - 30+ configuration options with smart preset management
- โก Performance Optimized - Frame skipping, idle detection, and efficient canvas updates
- ๐ Event-Driven - Real-time callbacks for all major events
- ๐จ Visual Overlays - Optional face rectangles, gaze vectors, and eye tracking indicators
- ๐ฑ Browser Compatible - Works in all modern browsers with webcam access
- ๐ ๏ธ Easy Integration - Drop-in solution with minimal setup required
- ๐ง Smart Canvas Alignment - Automatic canvas sizing and positioning for accurate overlays
<!-- MediaPipe dependencies -->
<script src="https://cdn.jsdelivr.net/npm/@mediapipe/camera_utils/camera_utils.js"></script>
<script src="https://cdn.jsdelivr.net/npm/@mediapipe/face_detection/face_detection.js"></script>
<script src="https://cdn.jsdelivr.net/npm/@mediapipe/face_mesh/face_mesh.js"></script>
<!-- Gazer.js library -->
<script src="gazer.js"></script><video id="webcam" autoplay muted playsinline style="width: 640px; height: 480px;"></video>// Simple initialization with defaults (Medium performance & sensitivity)
const gazer = new Gazer('webcam', {
onGazeChange: (gazeState, gazeData) => {
console.log('Gaze direction:', gazeState); // 'screen', 'away', 'unknown'
},
onStatsUpdate: (stats) => {
console.log('Attention stats:', stats);
}
});
// Advanced initialization with performance and sensitivity modes
const gazer = new Gazer('webcam', {
performanceMode: 'high', // 'low', 'medium', 'high'
sensitivityMode: 'strict', // 'strict', 'medium', 'relaxed'
onGazeChange: (gazeState, gazeData) => {
console.log('Gaze direction:', gazeState);
}
});
// With automatic tracking data posting to API
const gazer = new Gazer('webcam', {
postTrackingDataInterval: 30, // Post data every 30 seconds
onPostTrackingData: async (data) => {
await fetch('/api/tracking', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(data)
});
},
onGazeChange: (gazeState) => {
console.log('Gaze:', gazeState);
}
});
// Start tracking
await gazer.start();Gazer.js includes three pre-configured performance modes for optimal CPU usage:
// Low Performance - Basic tracking (10 FPS)
const gazer = new Gazer('webcam', {
performanceMode: 'low' // Best for low-end devices or battery saving
});
// Medium Performance - Balanced (15 FPS) - DEFAULT
const gazer = new Gazer('webcam', {
performanceMode: 'medium' // Recommended for most applications
});
// High Performance - Smooth tracking (25 FPS)
const gazer = new Gazer('webcam', {
performanceMode: 'high' // Best for high-end devices requiring smooth tracking
});Choose gaze detection sensitivity based on your use case:
// Strict Sensitivity - Precise alignment required
const gazer = new Gazer('webcam', {
sensitivityMode: 'strict' // Best for focused work scenarios
});
// Medium Sensitivity - Balanced detection - DEFAULT
const gazer = new Gazer('webcam', {
sensitivityMode: 'medium' // Recommended for most users
});
// Relaxed Sensitivity - Forgiving detection
const gazer = new Gazer('webcam', {
sensitivityMode: 'relaxed' // Good for casual use or accessibility
});You can still use manual settings, but modes take precedence unless overridden:
// This will use high performance mode settings
const gazer = new Gazer('webcam', {
performanceMode: 'high',
targetFps: 10 // This will override the high mode's 25 FPS (with console warning)
});const gazer = new Gazer(videoElementId, options)Parameters:
videoElementId(string): ID of the video elementoptions(object): Configuration options (see Configuration section)
Starts the camera and begins gaze tracking.
await gazer.start();Stops the camera and pauses tracking.
await gazer.stop();Changes performance mode at runtime.
gazer.setPerformanceMode('high'); // 'low', 'medium', 'high', 'manual'Changes sensitivity mode at runtime.
gazer.setSensitivityMode('strict'); // 'strict', 'medium', 'relaxed', 'manual'Returns available performance modes.
const modes = gazer.getPerformanceModes();
// [{ value: 'low', label: 'Low', description: '...' }, ...]Returns available sensitivity modes.
const modes = gazer.getSensitivityModes();
// [{ value: 'strict', label: 'Strict', description: '...' }, ...]Updates multiple settings at once, including modes.
gazer.updateSettings({
performanceMode: 'high',
sensitivityMode: 'relaxed',
targetFps: 20,
showFaceRectangle: false
});Updates configuration in real-time (legacy method).
gazer.updateConfig({
targetFps: 20,
showFaceRectangle: false
});Returns current tracking statistics.
const stats = gazer.getStats();
console.log(stats.faceCount, stats.awayTime, stats.distractedTime);Checks if MediaPipe models are loaded.
if (gazer.isReady()) {
await gazer.start();
}Checks if tracking is currently active.
console.log('Tracking active:', gazer.isActive());Changes tracking data posting interval at runtime.
gazer.setTrackingDataInterval(60); // Change to 1 minute
gazer.setTrackingDataInterval(0); // Disable postingChanges tracking data callback function at runtime.
gazer.setTrackingDataCallback(async (data) => {
await fetch('/api/tracking', {
method: 'POST',
body: JSON.stringify(data)
});
});Triggers immediate tracking data post regardless of interval.
gazer.forcePostTrackingData();Enable or disable retina location tracking.
gazer.setTrackRetinaLocations(true); // Enable tracking
gazer.setTrackRetinaLocations(false); // Disable trackingSet the movement threshold for recording new retina positions.
gazer.setRetinaLocationChangeThreshold(0.05); // 5% change required
gazer.setRetinaLocationChangeThreshold(0.15); // 15% change requiredSet the time interval for recording retina positions regardless of movement.
gazer.setRetinaLocationInterval(3); // Record every 3 seconds
gazer.setRetinaLocationInterval(10); // Record every 10 secondsCleans up resources and removes canvas overlay.
gazer.destroy();{
performanceMode: 'medium', // 'low', 'medium', 'high', null/manual
sensitivityMode: 'medium' // 'strict', 'medium', 'relaxed', null/manual
}Performance Mode Presets:
low: 10 FPS, skip 3 frames, pause on idle, reduced canvasmedium: 15 FPS, skip 2 frames, pause on idle, normal canvashigh: 25 FPS, skip 1 frame, no pause, normal canvas
Sensitivity Mode Presets:
strict: Horizontal 0.2, Vertical 0.1, Smoothing 3 framesmedium: Horizontal 0.3, Vertical 0.15, Smoothing 5 framesrelaxed: Horizontal 0.5, Vertical 0.25, Smoothing 7 frames
{
targetFps: 15, // Target processing frame rate
frameSkip: 1, // Process every N frames
pauseOnIdle: true, // Pause processing when no faces detected
reducedCanvas: false, // Reduce canvas update frequency
idleTimeout: 3000 // Idle timeout in milliseconds
}{
horizontalThreshold: 0.3, // Left/right gaze sensitivity (0.1-1.0)
verticalThreshold: 0.15, // Up/down gaze sensitivity (0.05-0.5)
gazeHistorySize: 5 // Frames to smooth gaze detection (2-10)
}{
trackRetinaLocations: true, // Enable retina location tracking
retinaLocationChangeThreshold: 0.1, // Record when movement > 10% (0.01-1.0)
retinaLocationInterval: 5 // Record every N seconds regardless of change
}Retina tracking records normalized coordinates (0-1) where:
x: 0= far left,x: 1= far righty: 0= top,y: 1= bottom- Locations are recorded when movement exceeds threshold OR when interval time passes
- Each location includes
x,y,confidence,timestamp, andgazeDirection
{
showGazeVector: true, // Show gaze direction arrow
showEyePoints: true, // Show eye center indicators
showFaceRectangle: true, // Show face detection rectangle
enableLogs: true // Enable console logging
}{
postTrackingDataInterval: 30, // Seconds between data posts (0 to disable)
onPostTrackingData: null // Callback function for posting data
}{
faceDetectionModel: "short", // "short" or "full"
faceDetectionConfidence: 0.5, // Detection confidence (0-1)
faceMeshConfidence: 0.5, // Face mesh confidence (0-1)
faceMeshTracking: 0.5, // Face mesh tracking confidence (0-1)
maxNumFaces: 1, // Maximum faces to detect
refineLandmarks: true // Use refined landmarks
}{
cameraWidth: 640, // Camera resolution width
cameraHeight: 480 // Camera resolution height
}The Tracking Data API allows you to automatically collect and send tracking analytics to external APIs at regular intervals. This is perfect for learning analytics, attention monitoring systems, and user behavior analysis.
const gazer = new Gazer('webcam', {
postTrackingDataInterval: 30, // Post data every 30 seconds
onPostTrackingData: async (data) => {
// Send to your analytics endpoint
await fetch('https://your-api.com/analytics/tracking', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Authorization': 'Bearer your-token'
},
body: JSON.stringify(data)
});
}
});const gazer = new Gazer('webcam', {
postTrackingDataInterval: 60, // Post every minute
onPostTrackingData: async (data) => {
try {
const response = await fetch('/api/tracking-data', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify({
...data,
userId: getCurrentUserId(),
sessionId: getSessionId(),
metadata: {
userAgent: navigator.userAgent,
timestamp: new Date().toISOString()
}
})
});
if (!response.ok) {
console.error('Failed to post tracking data:', response.status);
}
} catch (error) {
console.error('Tracking data post error:', error);
}
}
});// Change interval during runtime
gazer.setTrackingDataInterval(120); // 2 minutes
// Force immediate data post
gazer.forcePostTrackingData();
// Update callback
gazer.setTrackingDataCallback(async (data) => {
// New callback logic
console.log('Face count changes:', data.faceCountChanges);
});
// Disable tracking data
gazer.setTrackingDataInterval(0);Each callback receives comprehensive tracking data:
{
timestamp: 1691420400000, // Current timestamp
sessionDuration: 30, // Seconds since last post
faceCountChanges: 5, // Face count transitions (engagement indicator)
totalAwayTime: 15, // Total away time (seconds)
totalDistractedTime: 8, // Total distracted time (seconds)
currentFaceCount: 1, // Current faces detected
currentGazeState: "screen", // Current gaze direction
processingFps: 15, // Performance metrics
framesSkipped: 120,
canvasUpdates: 450,
isRunning: true,
isIdle: false,
retinaLocations: [ // Array of retina positions recorded this session
{
x: 0.523, // Normalized x position (0-1)
y: 0.456, // Normalized y position (0-1)
confidence: 0.89, // Detection confidence
timestamp: 1691420385000, // When position was recorded
gazeDirection: "screen" // Gaze state when recorded
},
// ... more locations
]
}Key Metrics for Analytics:
faceCountChanges: High values indicate movement/distraction, low values suggest focustotalAwayTime: Time user was completely away from screentotalDistractedTime: Time user was present but looking awaysessionDuration: Time interval for the posted dataretinaLocations: Precise gaze positions for heatmap analysis and attention mapping
Triggered when gaze direction changes.
onGazeChange: (gazeState, gazeData) => {
// gazeState: 'screen', 'away', 'unknown'
// gazeData: { direction, horizontal, vertical, confidence, ... }
console.log(`User is looking ${gazeState}`);
}Triggered periodically with tracking statistics.
onStatsUpdate: (stats) => {
console.log(`Faces: ${stats.faceCount}`);
console.log(`Away time: ${stats.awayTime}s`);
console.log(`Distracted time: ${stats.distractedTime}s`);
console.log(`Confidence: ${stats.confidence}%`);
}Triggered when faces are detected or lost.
onFaceDetected: (faces) => {
console.log(`${faces.length} face(s) detected`);
}Triggered when MediaPipe models finish loading.
onModelLoaded: () => {
console.log('Ready to start tracking!');
}Triggered when errors occur.
onError: (error) => {
console.error('Gazer error:', error);
}Triggered at regular intervals to post tracking data to external APIs.
onPostTrackingData: async (trackingData) => {
// Send data to your analytics API
await fetch('https://your-api.com/tracking-data', {
method: 'POST',
headers: { 'Content-Type': 'application/json' },
body: JSON.stringify(trackingData)
});
}Tracking Data Object:
{
timestamp: 1691420400000, // Current timestamp
sessionDuration: 30, // Seconds since last data post
faceCountChanges: 5, // Number of times face count changed
totalAwayTime: 15, // Total seconds user was away
totalDistractedTime: 8, // Total seconds user was distracted
currentFaceCount: 1, // Current number of faces detected
currentGazeState: "screen", // Current gaze state
processingFps: 15, // Current processing FPS
framesSkipped: 120, // Total frames skipped
canvasUpdates: 450, // Total canvas updates
isRunning: true, // Whether tracking is active
isIdle: false // Whether in idle mode
}The stats object contains:
{
faceCount: 1, // Number of faces currently detected
faceCountChanges: 3, // Number of times face count changed (session total)
awayTime: 15, // Total time away from screen (seconds)
distractedTime: 8, // Total time looking away (seconds)
confidence: 87, // Average detection confidence (%)
processingFps: 14.2, // Actual processing frame rate
framesSkipped: 42, // Number of frames skipped
canvasUpdates: 156, // Number of canvas updates
gazeState: 'screen', // Current gaze state
isRunning: true, // Whether tracking is active
isIdle: false // Whether in idle mode
}<!DOCTYPE html>
<html>
<head>
<title>Gazer.js Demo</title>
</head>
<body>
<div style="display: flex; gap: 20px;">
<div>
<video id="webcam" autoplay muted playsinline style="width: 640px; height: 480px;"></video>
<div>
<button onclick="startTracking()">Start</button>
<button onclick="stopTracking()">Stop</button>
<!-- Performance Mode Controls -->
<select id="performanceMode" onchange="changePerformanceMode()">
<option value="low">Low Performance</option>
<option value="medium" selected>Medium Performance</option>
<option value="high">High Performance</option>
<option value="manual">Manual</option>
</select>
<!-- Sensitivity Mode Controls -->
<select id="sensitivityMode" onchange="changeSensitivityMode()">
<option value="strict">Strict Sensitivity</option>
<option value="medium" selected>Medium Sensitivity</option>
<option value="relaxed">Relaxed Sensitivity</option>
<option value="manual">Manual</option>
</select>
</div>
</div>
<div>
<div id="stats"></div>
<div id="gaze-status"></div>
</div>
</div>
<!-- Include dependencies -->
<script src="https://cdn.jsdelivr.net/npm/@mediapipe/camera_utils/camera_utils.js"></script>
<script src="https://cdn.jsdelivr.net/npm/@mediapipe/face_detection/face_detection.js"></script>
<script src="https://cdn.jsdelivr.net/npm/@mediapipe/face_mesh/face_mesh.js"></script>
<script src="gazer.js"></script>
<script>
let gazer;
// Initialize with performance and sensitivity modes
function init() {
gazer = new Gazer('webcam', {
performanceMode: 'medium', // Default performance mode
sensitivityMode: 'medium', // Default sensitivity mode
showFaceRectangle: true,
showGazeVector: true,
onGazeChange: (gazeState, gazeData) => {
document.getElementById('gaze-status').innerHTML = `
<h3>Gaze Status: ${gazeState.toUpperCase()}</h3>
<p>Confidence: ${Math.round((gazeData?.confidence || 0) * 100)}%</p>
<p>Horizontal: ${(gazeData?.horizontal || 0).toFixed(3)}</p>
<p>Vertical: ${(gazeData?.vertical || 0).toFixed(3)}</p>
`;
},
onStatsUpdate: (stats) => {
document.getElementById('stats').innerHTML = `
<h3>Statistics</h3>
<p><strong>Faces Detected:</strong> ${stats.faceCount}</p>
<p><strong>Away Time:</strong> ${stats.awayTime}s</p>
<p><strong>Distracted Time:</strong> ${stats.distractedTime}s</p>
<p><strong>Processing FPS:</strong> ${stats.processingFps.toFixed(1)}</p>
<p><strong>Frames Skipped:</strong> ${stats.framesSkipped}</p>
<p><strong>Canvas Updates:</strong> ${stats.canvasUpdates}</p>
<p><strong>Average Confidence:</strong> ${stats.confidence}%</p>
`;
},
onModelLoaded: () => {
console.log('โ
Models loaded successfully!');
document.querySelector('button').disabled = false;
},
onError: (error) => {
alert('Error: ' + error.message);
}
});
}
async function startTracking() {
if (gazer.isReady()) {
await gazer.start();
console.log('๐ฏ Tracking started');
} else {
alert('Please wait for models to load');
}
}
async function stopTracking() {
await gazer.stop();
console.log('โน๏ธ Tracking stopped');
}
function changePerformanceMode() {
const mode = document.getElementById('performanceMode').value;
gazer.setPerformanceMode(mode);
}
function changeSensitivityMode() {
const mode = document.getElementById('sensitivityMode').value;
gazer.setSensitivityMode(mode);
}
// Initialize when page loads
window.onload = init;
</script>
</body>
</html>- ๐ Educational Platforms - Monitor student attention during online classes with automatic analytics posting
- ๐ผ Video Conferencing - Track engagement in meetings and presentations with real-time data
- ๐ฎ Gaming Applications - Eye-tracking controls and attention-based gameplay
- ๐ฌ Research Studies - Attention and behavior analysis with automated data collection
- ๐ป Productivity Tools - Focus tracking and distraction alerts with performance insights
- โฟ Accessibility - Gaze-based navigation for disabled users
- ๐ฑ Mobile Apps - Attention monitoring in mobile web applications
- ๐ Learning Analytics - Automated attention metrics for educational content effectiveness
- ๐ข Corporate Training - Employee engagement tracking during training sessions
- ๐ฅ Content Analysis - Understanding viewer attention patterns for video content
- Optimize Frame Rate: Lower
targetFpsfor better performance - Use Frame Skipping: Set
frameSkip > 1for resource-constrained devices - Enable Idle Detection: Use
pauseOnIdle: trueto save resources - Reduce Visual Updates: Enable
reducedCanvas: truefor minimal UI updates - Disable Unnecessary Features: Turn off visual overlays you don't need
- โ Chrome 88+
- โ Firefox 85+
- โ Safari 14+
- โ Edge 88+
Requirements:
- WebRTC/getUserMedia support
- WebAssembly support
- Modern JavaScript (ES6+)
MIT License - see LICENSE file for details.