Update all files to latest version

This commit is contained in:
2025-10-08 22:29:13 -06:00
parent b532c8792b
commit 3eb058e37b
18 changed files with 152 additions and 2531 deletions

8
.env.example Normal file
View File

@@ -0,0 +1,8 @@
# Python Server Configuration
# URL del servidor Python para identificación de SKU
NEXT_PUBLIC_PYTHON_SERVER_URL=http://localhost:8000
# Ejemplos de configuración:
# - Desarrollo local: http://localhost:8000
# - Producción con ngrok: https://tu-url.ngrok-free.app
# - Producción: https://api.tu-dominio.com

4
.gitignore vendored
View File

@@ -32,6 +32,7 @@ yarn-error.log*
# env files (can opt-in for committing if needed)
.env*
!.env.example
# vercel
.vercel
@@ -42,3 +43,6 @@ next-env.d.ts
# Model files
python_server/models/
__pycache__/

View File

@@ -5,14 +5,11 @@ import { useSearchParams } from 'next/navigation';
import { Camera, History, VideoOff, Settings, Video } from 'lucide-react';
import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from '@/components/ui/select';
import { Slider } from '@/components/ui/slider';
import { Switch } from '@/components/ui/switch';
import { type Shoe } from '@/lib/shoe-database';
import { addToHistory, getHistory } from '@/lib/history-storage';
import ShoeResultsPopup from '@/components/shoe-results-popup';
import HistorySidebar from '@/components/history-sidebar';
import { useDetection } from '@/lib/ml/use-detection';
import type { DetectionResult } from '@/lib/ml/types';
import { skuIdentificationService } from '@/lib/sku-identification';
type CameraStatus = 'loading' | 'active' | 'denied' | 'no_devices';
@@ -32,128 +29,7 @@ function HomePageContent() {
const [history, setHistory] = useState<Shoe[]>([]);
const [detectedSKU, setDetectedSKU] = useState<string | null>(null);
const [isSettingsPanelOpen, setSettingsPanelOpen] = useState(false);
// ML Detection state
const [detectionEnabled, setDetectionEnabled] = useState(true); // Auto-enable on page load
const [currentDetection, setCurrentDetection] = useState<DetectionResult | null>(null);
const [shoeDetectionCount, setShoeDetectionCount] = useState(0);
const lastSoundTimeRef = useRef(0);
// Initialize ML detection system first
const {
isLoading: isMLLoading,
metrics,
error: mlError,
initialize: initializeML,
startContinuous,
stopContinuous,
triggerDetection,
updateConfig,
config,
detectionEngine,
setDetectionCallback
} = useDetection({
modelVariant: 'standard', // Start with standard model
enableContinuous: true,
enableTrigger: true,
onDetection: undefined, // Will be set after handleDetection is defined
onError: (error) => {
console.error('ML Detection Error:', error);
}
});
// Clean detection callback - no canvas drawing needed
const handleDetection = useCallback(async (detection: DetectionResult | null) => {
const callbackId = Math.random().toString(36).substr(2, 9);
console.log(`🔍 Detection callback received [${callbackId}]:`, detection);
setCurrentDetection(detection);
// Count actual shoe detections (not just inference attempts)
if (detection) {
setShoeDetectionCount(prev => prev + 1);
}
// Auto-trigger popup when shoe is detected with high confidence
if (detection && detection.confidence > 0.7) {
console.log(`🎯 HIGH CONFIDENCE SHOE DETECTED! [${callbackId}] Opening popup...`, detection);
// Call SKU identification API
if (videoRef.current && detectionEngine) {
try {
console.log(`🔍 [${callbackId}] Calling SKU identification...`);
const sku = await detectionEngine.identifyProductSKU(videoRef.current);
console.log(`📦 [${callbackId}] SKU result:`, sku);
setDetectedSKU(sku);
if (sku) {
// Create shoe object with SKU for history
const shoeWithSKU: Shoe = {
id: Date.now().toString(),
name: `Producto ${sku}`,
brand: 'Identificado por IA',
price: 'Precio por consultar',
image: '/placeholder.jpg',
confidence: detection.confidence,
sku: sku,
timestamp: new Date().toISOString()
};
const updatedHistory = addToHistory(shoeWithSKU);
setHistory(updatedHistory);
}
} catch (error) {
console.error(`❌ [${callbackId}] SKU identification failed:`, error);
setDetectedSKU(null);
}
}
setPopupOpen(true);
// Play detection sound with debouncing (max once per 2 seconds)
const now = Date.now();
const lastTime = lastSoundTimeRef.current;
console.log(`🔊 Sound check [${callbackId}]: now=${now}, lastTime=${lastTime}, diff=${now - lastTime}ms`);
if (now - lastTime > 2000) {
try {
const audioId = Math.random().toString(36).substr(2, 9);
// Use AudioContext for more reliable single-play behavior
const AudioContextClass = window.AudioContext || (window as typeof window & { webkitAudioContext: typeof AudioContext }).webkitAudioContext;
const audioContext = new AudioContextClass();
console.log(`🔊 Playing detection sound [callback:${callbackId}] [audio:${audioId}]`);
// Simple beep using Web Audio API
const oscillator = audioContext.createOscillator();
const gainNode = audioContext.createGain();
oscillator.connect(gainNode);
gainNode.connect(audioContext.destination);
oscillator.frequency.setValueAtTime(800, audioContext.currentTime);
gainNode.gain.setValueAtTime(0.3, audioContext.currentTime);
gainNode.gain.exponentialRampToValueAtTime(0.001, audioContext.currentTime + 0.3);
oscillator.start(audioContext.currentTime);
oscillator.stop(audioContext.currentTime + 0.3);
console.log(`▶️ Audio beep started [${audioId}]`);
lastSoundTimeRef.current = now;
} catch (e) {
console.warn(`Sound playback failed [${callbackId}]:`, e);
}
} else {
console.log(`🔇 Sound skipped [${callbackId}] - too soon after last sound (${now - lastTime}ms ago)`);
}
}
}, [detectionEngine]);
// Set the detection callback after handleDetection is defined
useEffect(() => {
if (setDetectionCallback && handleDetection) {
setDetectionCallback(handleDetection);
}
}, [handleDetection, setDetectionCallback]);
const [isScanning, setIsScanning] = useState(false);
// Effect to clean up the stream when component unmounts or stream changes
useEffect(() => {
@@ -170,33 +46,6 @@ function HomePageContent() {
}
}, [stream, cameraStatus]); // Runs when stream or camera status changes
// Track initialization state to prevent multiple attempts
const [mlInitialized, setMLInitialized] = useState(false);
// Initialize ML detection when camera is ready (only once)
useEffect(() => {
// Only log in development and when conditions change meaningfully
if (process.env.NODE_ENV === 'development') {
console.log('🔍 ML init check:', {
ready: videoRef.current && cameraStatus === 'active' && !isMLLoading && detectionEnabled && !mlInitialized
});
}
if (videoRef.current && cameraStatus === 'active' && !isMLLoading && detectionEnabled && !mlInitialized) {
console.log('✅ Starting ML detection...');
setMLInitialized(true);
initializeML(videoRef.current).then(() => {
console.log('✅ ML ready, starting continuous detection');
startContinuous();
}).catch((error) => {
console.error('❌ ML initialization failed:', error);
setMLInitialized(false); // Reset on error to allow retry
});
}
}, [cameraStatus, detectionEnabled, isMLLoading, mlInitialized, initializeML, startContinuous]);
const startStream = useCallback(async (deviceId: string) => {
// Stop previous stream if it exists
(videoRef.current?.srcObject as MediaStream)?.getTracks().forEach((track) => track.stop());
@@ -274,57 +123,54 @@ function HomePageContent() {
};
const handleScan = async () => {
if (detectionEnabled && triggerDetection) {
try {
console.log('🎯 Triggering ML detection...');
const mlResult = await triggerDetection();
if (mlResult) {
console.log('✅ Shoe detected by ML, calling SKU identification...');
// Call SKU identification with the detected shoe
if (videoRef.current && detectionEngine) {
try {
const sku = await detectionEngine.identifyProductSKU(videoRef.current);
console.log('📦 Manual scan SKU result:', sku);
setDetectedSKU(sku);
if (sku) {
// Create shoe object with SKU for history
const shoeWithSKU: Shoe = {
id: Date.now().toString(),
name: `Producto ${sku}`,
brand: 'Identificado por IA',
price: 'Precio por consultar',
image: '/placeholder.jpg',
confidence: mlResult.confidence,
sku: sku,
timestamp: new Date().toISOString()
};
const updatedHistory = addToHistory(shoeWithSKU);
setHistory(updatedHistory);
}
setPopupOpen(true);
} catch (skuError) {
console.error('❌ SKU identification failed:', skuError);
// Still show popup even if SKU fails
setDetectedSKU(null);
setPopupOpen(true);
}
} else {
console.warn('⚠️ Video or detection engine not available for SKU call');
setPopupOpen(true);
}
} else {
console.log('❌ No shoe detected by ML');
}
} catch (error) {
console.error('❌ ML detection failed:', error);
if (!videoRef.current || isScanning) return;
try {
setIsScanning(true);
console.log('📸 Capturando imagen del video...');
// Capture frame from video
const canvas = document.createElement('canvas');
const ctx = canvas.getContext('2d')!;
canvas.width = videoRef.current.videoWidth;
canvas.height = videoRef.current.videoHeight;
ctx.drawImage(videoRef.current, 0, 0);
// Get ImageData
const imageData = ctx.getImageData(0, 0, canvas.width, canvas.height);
console.log('🔍 Llamando al servidor Python para identificar SKU...');
// Call SKU identification service
const sku = await skuIdentificationService.identifySKU(imageData);
console.log('📦 SKU result:', sku);
setDetectedSKU(sku);
if (sku) {
// Create shoe object with SKU for history
const shoeWithSKU: Shoe = {
id: Date.now().toString(),
name: `Producto ${sku}`,
brand: 'Identificado por IA',
price: 'Precio por consultar',
image: '/placeholder.jpg',
sku: sku,
timestamp: new Date().toISOString()
};
const updatedHistory = addToHistory(shoeWithSKU);
setHistory(updatedHistory);
}
} else {
console.log('⚠️ ML detection is disabled');
setPopupOpen(true);
} catch (error) {
console.error('❌ Error en identificación:', error);
setDetectedSKU(null);
setPopupOpen(true);
} finally {
setIsScanning(false);
}
};
@@ -499,11 +345,10 @@ function HomePageContent() {
`}</style>
</div>
{/* Detection Counters - Only show in dev mode */}
{isDev && (
<div className="absolute top-4 right-4 bg-black/60 backdrop-blur-sm rounded-lg px-3 py-2 text-white text-xs space-y-1">
<div className="text-green-400">👟 Shoes Found: {shoeDetectionCount}</div>
<div className="text-blue-400"> Avg Speed: {metrics?.inferenceTime ? `${metrics.inferenceTime.toFixed(0)}ms` : 'N/A'}</div>
{/* Scanning Indicator */}
{isScanning && (
<div className="absolute top-4 right-4 bg-blue-600/80 backdrop-blur-sm rounded-lg px-4 py-2 text-white text-sm font-medium animate-pulse">
📸 Identificando...
</div>
)}
@@ -548,144 +393,6 @@ function HomePageContent() {
{/* Additional Options */}
<div className="space-y-4 flex-1">
{/* ML Detection Settings - Only show in dev mode */}
{isDev && (
<div className="bg-white/5 rounded-lg p-4 border border-white/10">
<div className="flex items-center gap-2 mb-3">
<Camera size={20} className="text-blue-400" />
<span className="text-white font-medium">Detección IA</span>
{isMLLoading && <span className="text-xs text-yellow-400">Cargando...</span>}
</div>
<div className="space-y-3">
<div className="flex items-center gap-3">
<button
onClick={() => {
setDetectionEnabled(!detectionEnabled);
if (!detectionEnabled) {
console.log('Enabling ML detection');
} else {
console.log('Disabling ML detection');
stopContinuous();
}
}}
className={`text-sm py-2 px-4 rounded-md transition-colors border ${
detectionEnabled
? 'bg-green-500/20 text-green-300 border-green-500/30 hover:bg-green-500/30'
: 'bg-white/10 text-white border-white/20 hover:bg-blue-500/30'
}`}
>
{detectionEnabled ? 'Activado' : 'Activar'}
</button>
<div className="flex items-center gap-2">
{detectionEnabled && (
<div className="w-2 h-2 bg-green-500 rounded-full animate-pulse"></div>
)}
<span className="text-white/60 text-sm">
{detectionEnabled ? 'Detectando zapatos automáticamente' : 'Click para activar detección IA'}
</span>
</div>
</div>
{/* ML Metrics */}
{detectionEnabled && metrics && (
<div className="text-xs space-y-1 text-white/50 bg-black/20 p-2 rounded">
<div>FPS: {metrics.fps.toFixed(1)}</div>
<div>Inferencia: {metrics.inferenceTime.toFixed(0)}ms</div>
{metrics.memoryUsage > 0 && <div>Memoria: {metrics.memoryUsage.toFixed(0)}MB</div>}
</div>
)}
{/* Detection Confidence Indicator */}
{detectionEnabled && currentDetection && (
<div className="space-y-2 pt-2">
<label className="text-sm font-medium text-white/80">Confianza de Detección</label>
<div className="bg-white/10 rounded-lg p-3 border border-white/20">
<div className="flex justify-between items-center mb-1">
<span className="text-xs text-white/60">Confianza</span>
<span className="text-xs text-white font-bold">{(currentDetection.confidence * 100).toFixed(1)}%</span>
</div>
<div className="w-full bg-black/30 rounded-full h-2">
<div
className={`h-2 rounded-full transition-all duration-300 ${
currentDetection.confidence > 0.8 ? 'bg-green-500' :
currentDetection.confidence > 0.6 ? 'bg-yellow-500' : 'bg-red-500'
}`}
style={{ width: `${currentDetection.confidence * 100}%` }}
/>
</div>
<div className="flex justify-between text-xs text-white/40 mt-1">
<span>Bajo</span>
<span>Alto</span>
</div>
</div>
</div>
)}
{/* Other settings */}
{detectionEnabled && config && (
<div className="space-y-4 pt-4">
<div>
<label className="text-sm font-medium text-white/80">Sensibilidad ({(config.confidenceThreshold * 100).toFixed(0)}%)</label>
<Slider
min={0.3}
max={0.9}
step={0.05}
value={[config.confidenceThreshold]}
onValueChange={([value]) => updateConfig({ confidenceThreshold: value })}
disabled={!detectionEnabled}
className="mt-2"
/>
</div>
<div>
<label className="text-sm font-medium text-white/80">Frames a saltar ({config.frameSkip})</label>
<Slider
min={1}
max={10}
step={1}
value={[config.frameSkip]}
onValueChange={([value]) => updateConfig({ frameSkip: value })}
disabled={!detectionEnabled}
className="mt-2"
/>
</div>
<div className="flex items-center justify-between">
<label className="text-sm font-medium text-white/80">Detección continua</label>
<Switch
checked={config.enableContinuous}
onCheckedChange={(checked) => updateConfig({ enableContinuous: checked })}
disabled={!detectionEnabled}
/>
</div>
<div className="flex items-center justify-between">
<label className="text-sm font-medium text-white/80">Detección por trigger</label>
<Switch
checked={config.enableTrigger}
onCheckedChange={(checked) => updateConfig({ enableTrigger: checked })}
disabled={!detectionEnabled}
/>
</div>
</div>
)}
{/* Detection Status */}
{currentDetection && (
<div className="text-xs bg-green-500/10 text-green-300 p-2 rounded border border-green-500/20">
🎯 Zapato detectado (confianza: {(currentDetection.confidence * 100).toFixed(1)}%)
</div>
)}
{mlError && (
<div className="text-xs bg-red-500/10 text-red-300 p-2 rounded border border-red-500/20">
{mlError}
</div>
)}
</div>
</div>
)}
{/* App Info */}
<div className="bg-gradient-to-r from-blue-500/10 to-purple-500/10 rounded-lg p-4 border border-blue-500/20">
<h3 className="text-white font-medium mb-2">Smart Store Assistant</h3>

View File

@@ -1,17 +0,0 @@
import type { Shoe } from "./shoe-database";
/**
* Simulates detecting a shoe from a list of possible shoes.
* In a real application, this would involve a machine learning model.
* @param allShoes The list of all shoes in the database.
* @returns A randomly selected shoe.
*/
export function detectShoe(allShoes: Shoe[]): Shoe | null {
if (allShoes.length === 0) {
return null;
}
// Simulate detection by picking a random shoe from the database.
const randomIndex = Math.floor(Math.random() * allShoes.length);
return allShoes[randomIndex];
}

View File

@@ -1,388 +0,0 @@
import type { DetectionConfig, DetectionResult, DetectionMetrics, DetectionMode } from './types';
import { DetectionWorkerManager } from './detection-worker-manager';
import { detectDeviceCapabilities, getRecommendedConfig } from './device-capabilities';
import { skuIdentificationService } from '../sku-identification';
// Extend window interface for TensorFlow.js
declare global {
interface Window {
tf: unknown;
}
}
/**
* Main detection engine that coordinates continuous and trigger detection
*/
export class DetectionEngine {
private workerManager: DetectionWorkerManager;
private config: DetectionConfig;
private model: unknown = null; // TensorFlow.js model instance
// Detection state
private isRunning = false;
private detectionMode: DetectionMode = 'hybrid';
private frameSkipCounter = 0;
private detectionCount = 0;
// Temporal filtering
private detectionHistory: DetectionResult[] = [];
private lastValidDetection: DetectionResult | null = null;
// Performance tracking
private metrics: DetectionMetrics = {
fps: 0,
inferenceTime: 0,
memoryUsage: 0
};
// Event callbacks
private onDetectionCallback?: (detection: DetectionResult | null) => void;
private onMetricsCallback?: (metrics: DetectionMetrics) => void;
private lastDetectionCallbackTime?: number;
constructor() {
console.log('🏗️ DetectionEngine constructor called');
this.workerManager = new DetectionWorkerManager();
// Get device-optimized configuration
const capabilities = detectDeviceCapabilities();
this.config = getRecommendedConfig(capabilities);
console.log('✅ Detection engine initialized', { capabilities, config: this.config });
}
/**
* Initialize the detection engine with a specific model
*/
async initialize(modelVariant?: 'quantized' | 'standard' | 'full', onProgress?: (progress: number) => void): Promise<void> {
const variant = modelVariant || this.config.modelVariant;
console.log(`🔧 Initializing detection engine with ${variant} model...`);
try {
// Load the model into the worker
console.log('📥 Loading model into worker...');
await this.workerManager.loadModel(variant, onProgress);
// Configure the worker with current settings
console.log('⚙️ Configuring worker...');
await this.workerManager.configure(this.config);
console.log(`✅ Detection engine initialized with ${variant} model`);
} catch (error) {
console.error('❌ Failed to initialize detection engine:', error);
throw error;
}
}
/**
* Start continuous detection
*/
startContinuousDetection(videoElement: HTMLVideoElement): void {
console.log('🚀 startContinuousDetection called:', {
isRunning: this.isRunning,
enableContinuous: this.config.enableContinuous,
videoElement: !!videoElement
});
if (this.isRunning) {
console.warn('Detection already running');
return;
}
this.isRunning = true;
this.detectionMode = this.config.enableContinuous ? 'continuous' : 'trigger';
if (this.config.enableContinuous) {
console.log('🔄 Starting continuous detection loop...');
this.runContinuousLoop(videoElement);
}
console.log(`✅ Started detection in ${this.detectionMode} mode`);
}
/**
* Stop continuous detection
*/
stopContinuousDetection(): void {
this.isRunning = false;
this.frameSkipCounter = 0;
this.detectionHistory = [];
console.log('Stopped continuous detection');
}
/**
* Perform single trigger detection - higher quality/confidence than continuous
*/
async triggerDetection(videoElement: HTMLVideoElement): Promise<DetectionResult | null> {
const startTime = performance.now();
try {
console.log('🎯 Starting trigger detection (high quality)');
// Capture image data for trigger detection (high quality)
const imageData = this.captureVideoFrame(videoElement, true);
// Use worker manager for detection
const detections = await this.workerManager.detect(imageData);
const detection = detections.length > 0 ? detections[0] : null;
// Update metrics
this.metrics.inferenceTime = performance.now() - startTime;
console.log('✅ Trigger detection completed:', detection);
// Trigger callbacks for immediate display
if (this.onDetectionCallback && detection) {
this.onDetectionCallback(detection);
}
return detection;
} catch (error) {
console.error('❌ Trigger detection failed:', error);
throw error;
}
}
/**
* Identify product SKU from detected shoe
* @param videoElement - Video element to capture image from
* @returns Promise<string | null> - Product SKU if identified successfully
*/
async identifyProductSKU(videoElement: HTMLVideoElement): Promise<string | null> {
try {
console.log('🔍 Starting product SKU identification...');
// Capture high-quality image for SKU identification
const imageData = this.captureVideoFrame(videoElement, true);
// Call SKU identification service
const sku = await skuIdentificationService.identifySKU(imageData);
if (sku) {
console.log('✅ Product SKU identified:', sku);
} else {
console.log('❌ No valid SKU found');
}
return sku;
} catch (error) {
console.error('❌ SKU identification failed:', error);
return null;
}
}
/**
* Continuous detection loop
*/
private async runContinuousLoop(videoElement: HTMLVideoElement): Promise<void> {
if (!this.isRunning) return;
// Frame skipping logic
this.frameSkipCounter++;
if (this.frameSkipCounter < this.config.frameSkip) {
// Skip this frame, schedule next iteration
requestAnimationFrame(() => this.runContinuousLoop(videoElement));
return;
}
this.frameSkipCounter = 0;
// Only log every 10th detection to reduce noise
if (this.detectionCount % 10 === 0) {
console.log(`🔄 Continuous detection running... (${this.detectionCount} inferences)`);
}
try {
const startTime = performance.now();
// Capture image data for continuous detection (lower quality)
const imageData = this.captureVideoFrame(videoElement, false);
// Use worker manager for detection
const detections = await this.workerManager.detect(imageData);
const detection = detections.length > 0 ? detections[0] : null;
const inferenceTime = performance.now() - startTime;
console.log('⚡ Continuous detection completed:', { time: inferenceTime, detection });
// Apply temporal filtering
const validDetection = this.applyTemporalFiltering(detection);
// Update metrics
this.updateMetrics(inferenceTime);
// Trigger callbacks (only if we have a valid detection)
// Use a debounced approach to avoid too frequent updates
if (this.onDetectionCallback) {
// Only update if it's been at least 100ms since last detection callback for continuous
const now = Date.now();
if (!this.lastDetectionCallbackTime || now - this.lastDetectionCallbackTime > 100) {
this.onDetectionCallback(validDetection);
this.lastDetectionCallbackTime = now;
}
}
} catch (error) {
console.error('Continuous detection error:', error);
}
// Schedule next iteration
if (this.isRunning) {
requestAnimationFrame(() => this.runContinuousLoop(videoElement));
}
}
/**
* Capture frame from video element
*/
private captureVideoFrame(videoElement: HTMLVideoElement, highQuality: boolean): ImageData {
const canvas = document.createElement('canvas');
const ctx = canvas.getContext('2d')!;
// Use different resolutions based on detection mode
const [targetWidth, targetHeight] = highQuality
? [640, 480] // High quality for trigger detection
: [320, 240]; // Lower quality for continuous detection
canvas.width = targetWidth;
canvas.height = targetHeight;
// Draw video frame to canvas
ctx.drawImage(videoElement, 0, 0, targetWidth, targetHeight);
// Extract image data
const imageData = ctx.getImageData(0, 0, targetWidth, targetHeight);
// Cleanup
canvas.remove();
return imageData;
}
/**
* Apply temporal consistency filtering to reduce false positives
*/
private applyTemporalFiltering(detection: DetectionResult | null): DetectionResult | null {
if (!detection) {
// No detection - decay previous detections
this.detectionHistory = this.detectionHistory.filter(d =>
Date.now() - d.timestamp < 1000 // Keep detections from last second
);
// If we have recent consistent detections, continue showing them
if (this.detectionHistory.length >= 2) {
return this.lastValidDetection;
}
return null;
}
// Add current detection to history
this.detectionHistory.push(detection);
// Keep only recent detections (last 3 seconds)
this.detectionHistory = this.detectionHistory.filter(d =>
Date.now() - d.timestamp < 3000
);
// Check temporal consistency
const recentDetections = this.detectionHistory.filter(d =>
Date.now() - d.timestamp < 500 // Last 500ms
);
if (recentDetections.length >= 2) {
// We have consistent detections - this is likely valid
this.lastValidDetection = detection;
return detection;
}
// Not enough temporal consistency yet
return this.lastValidDetection;
}
/**
* Update performance metrics
*/
private updateMetrics(inferenceTime: number): void {
this.detectionCount++;
this.metrics = {
fps: 0, // Placeholder, as PerformanceMonitor is removed
inferenceTime: inferenceTime,
memoryUsage: this.getMemoryUsage()
};
if (this.onMetricsCallback) {
this.onMetricsCallback(this.metrics);
}
}
/**
* Get current memory usage (rough estimate)
*/
private getMemoryUsage(): number {
const memInfo = (performance as Performance & { memory?: { usedJSHeapSize: number } }).memory;
if (memInfo && memInfo.usedJSHeapSize) {
return memInfo.usedJSHeapSize / (1024 * 1024); // MB
}
return 0;
}
/**
* Set detection callback
*/
onDetection(callback: (detection: DetectionResult | null) => void): void {
this.onDetectionCallback = callback;
}
/**
* Set metrics callback
*/
onMetrics(callback: (metrics: DetectionMetrics) => void): void {
this.onMetricsCallback = callback;
}
/**
* Update configuration
*/
async updateConfig(newConfig: Partial<DetectionConfig>): Promise<void> {
this.config = { ...this.config, ...newConfig };
await this.workerManager.configure(this.config);
console.log('Configuration updated:', this.config);
}
/**
* Get current configuration
*/
getConfig(): DetectionConfig {
return { ...this.config };
}
/**
* Get current metrics
*/
getMetrics(): DetectionMetrics {
return { ...this.metrics };
}
/**
* Check if detection is running
*/
isDetectionRunning(): boolean {
return this.isRunning;
}
/**
* Destroy the detection engine
*/
destroy(): void {
this.stopContinuousDetection();
this.workerManager.destroy();
}
}

View File

@@ -1,216 +0,0 @@
import type { DetectionConfig, DetectionResult, DetectionMetrics, WorkerMessage, WorkerResponse } from './types';
import { ModelCache } from './model-cache';
import { MODEL_VARIANTS } from './model-config';
/**
* Manages the detection worker and handles communication
*/
export class DetectionWorkerManager {
private worker: Worker | null = null;
private messageId = 0;
private pendingMessages = new Map<string, { resolve: (value: any) => void; reject: (reason?: any) => void }>();
private modelCache = new ModelCache();
private isWorkerReady = false;
constructor() {
this.initializeWorker();
}
private async initializeWorker() {
try {
// Create worker from the detection worker file
this.worker = new Worker(
new URL('../../workers/detection-worker.ts', import.meta.url),
{ type: 'module' }
);
this.worker.onmessage = (event: MessageEvent<WorkerResponse>) => {
this.handleWorkerMessage(event.data);
};
this.worker.onerror = (error) => {
console.error('Worker error:', error);
this.isWorkerReady = false;
};
this.isWorkerReady = true;
console.log('Detection worker initialized');
await this.sendMessage('INITIALIZE', undefined);
} catch (error) {
console.error('Failed to initialize worker:', error);
this.isWorkerReady = false;
}
}
private handleWorkerMessage(message: WorkerResponse) {
const { type, id } = message;
const pending = this.pendingMessages.get(id);
if (!pending) {
console.warn('Received response for unknown message ID:', id);
return;
}
this.pendingMessages.delete(id);
if (type === 'ERROR') {
pending.reject(new Error((message as { error: string }).error));
} else if (type === 'DETECTION_RESULT') {
const detectionMessage = message as { result: DetectionResult | null };
pending.resolve({ result: detectionMessage.result });
} else if (type === 'INITIALIZED') {
pending.resolve(undefined);
} else if (type === 'METRICS_UPDATE') {
pending.resolve({ metrics: (message as { metrics: Partial<DetectionMetrics> }).metrics });
} else if (type === 'LOADED_MODEL') {
pending.resolve(undefined);
} else if (type === 'CONFIGURED') {
pending.resolve(undefined);
} else {
pending.resolve(message);
}
}
private async sendMessage<T>(type: WorkerMessage['type'], payload: unknown): Promise<T> {
if (!this.worker || !this.isWorkerReady) {
throw new Error('Worker not available');
}
const id = (this.messageId++).toString();
return new Promise((resolve, reject) => {
this.pendingMessages.set(id, { resolve, reject });
let message: WorkerMessage & { id: string };
if (type === 'INITIALIZE') {
message = { type, id };
} else if (type === 'DETECT') {
message = { type, imageData: (payload as { imageData: ImageData }).imageData, id };
} else if (type === 'UPDATE_CONFIG' || type === 'CONFIGURE') {
message = { type, config: payload as DetectionConfig, id };
} else if (type === 'LOAD_MODEL') {
const modelPayload = payload as { variant: 'quantized' | 'standard' | 'full'; modelData: ArrayBuffer };
message = { type, variant: modelPayload.variant, modelData: modelPayload.modelData, id };
} else {
throw new Error(`Unknown message type for sendMessage: ${type}`);
}
this.worker!.postMessage(message);
// Timeout after 30 seconds
setTimeout(() => {
if (this.pendingMessages.has(id)) {
this.pendingMessages.delete(id);
reject(new Error('Worker message timeout'));
}
}, 90000);
});
}
/**
* Load a model into the worker
*/
async loadModel(variant: 'quantized' | 'standard' | 'full', onProgress?: (progress: number) => void): Promise<void> {
const modelInfo = MODEL_VARIANTS[variant];
try {
// Get model data from cache or download
const modelData = await this.modelCache.getModel(variant, modelInfo, onProgress);
// Send model data to worker
await this.sendMessage('LOAD_MODEL', {
variant,
modelData
});
console.log(`Model ${variant} loaded successfully`);
} catch (error) {
console.error(`Failed to load model ${variant}:`, error);
throw error;
}
}
/**
* Configure the detection settings
*/
async configure(config: DetectionConfig): Promise<void> {
await this.sendMessage('CONFIGURE', config);
}
/**
* Perform detection on an image
*/
async detect(imageData: ImageData): Promise<DetectionResult[]> {
if (!this.isWorkerReady) {
throw new Error('Worker not ready');
}
try {
const results = await this.sendMessage<{ result: DetectionResult | null }>('DETECT', { imageData });
// Handle the case where results or results.result is undefined
if (!results || results.result === undefined || results.result === null) {
return [];
}
return [results.result];
} catch (error) {
console.error('Detection failed:', error);
throw error;
}
}
/**
* Get worker metrics
*/
async getMetrics(): Promise<object> {
// Metrics are tracked locally, no need to query worker
return {};
}
/**
* Check if worker is ready
*/
isReady(): boolean {
return this.isWorkerReady;
}
/**
* Terminate the worker
*/
destroy() {
if (this.worker) {
this.worker.terminate();
this.worker = null;
this.isWorkerReady = false;
}
// Reject all pending messages
this.pendingMessages.forEach(({ reject }) => {
reject(new Error('Worker terminated'));
});
this.pendingMessages.clear();
}
}
/**
* Singleton instance manager
*/
let workerManager: DetectionWorkerManager | null = null;
export function getDetectionWorkerManager(): DetectionWorkerManager {
if (!workerManager) {
workerManager = new DetectionWorkerManager();
}
return workerManager;
}
export function destroyDetectionWorkerManager() {
if (workerManager) {
workerManager.destroy();
workerManager = null;
}
}

View File

@@ -1,65 +0,0 @@
import type { DeviceCapabilities, DeviceTier } from './types';
import { DEFAULT_CONFIG } from './model-config';
/**
* Detects device capabilities to suggest an optimal performance configuration.
*/
export function detectDeviceCapabilities(): DeviceCapabilities {
const hasWebGL = (() => {
try {
const canvas = document.createElement('canvas');
return !!(window.WebGLRenderingContext && (canvas.getContext('webgl') || canvas.getContext('experimental-webgl')));
} catch {
return false;
}
})();
const cpuCores = navigator.hardwareConcurrency || 2; // Default to 2 if undefined
// @ts-expect-error - deviceMemory is not in all browsers
const memory = navigator.deviceMemory || 1; // Default to 1GB if undefined
let tier: DeviceTier;
if (cpuCores >= 8 && memory >= 4) {
tier = 'high';
} else if (cpuCores >= 4 && memory >= 2) {
tier = 'mid';
} else {
tier = 'low';
}
return { tier, hasWebGL, cpuCores, memory };
}
/**
* Gets a recommended configuration based on the detected device tier.
* @param capabilities The detected device capabilities.
* @returns A full DetectionConfig with recommended settings.
*/
export function getRecommendedConfig(capabilities: DeviceCapabilities): typeof DEFAULT_CONFIG {
const baseConfig = { ...DEFAULT_CONFIG };
switch (capabilities.tier) {
case 'high':
return {
...baseConfig,
modelVariant: 'standard',
frameSkip: 6,
confidenceThreshold: 0.8,
};
case 'mid':
return {
...baseConfig,
modelVariant: 'standard',
frameSkip: 6,
confidenceThreshold: 0.8,
};
case 'low':
default:
return {
...baseConfig,
modelVariant: 'quantized',
frameSkip: 6,
confidenceThreshold: 0.8,
};
}
}

View File

@@ -1,114 +0,0 @@
import type { DetectionResult } from './types';
import { VALIDATION_RULES } from './model-config';
/**
* A temporal filter to smooth detections and reduce flickering.
*/
class TemporalFilter {
private history: (DetectionResult | null)[] = [];
private frameCount = 0;
constructor(private consistencyFrames: number) {
this.history = new Array(consistencyFrames).fill(null);
}
add(detection: DetectionResult | null): DetectionResult | null {
this.history.shift();
this.history.push(detection);
const recentDetections = this.history.filter(d => d !== null);
if (recentDetections.length >= this.consistencyFrames) {
// Return the most confident detection from the recent history
return recentDetections.reduce((prev, current) => (prev!.confidence > current!.confidence ? prev : current));
}
return null;
}
}
/**
* The InferencePipeline class handles post-processing of model outputs,
* including filtering, validation, and temporal smoothing to prevent false positives.
*/
export class InferencePipeline {
private temporalFilter: TemporalFilter;
constructor() {
this.temporalFilter = new TemporalFilter(VALIDATION_RULES.temporalConsistencyFrames);
}
/**
* Processes the raw output from the TensorFlow.js model.
* @param boxes Raw bounding boxes.
* @param scores Raw confidence scores.
* @param classes Raw class indices.
* @param confidenceThreshold The current confidence threshold.
* @returns A single, validated DetectionResult or null.
*/
process(boxes: number[], scores: number[], classes: number[], confidenceThreshold: number): DetectionResult | null {
const detections: DetectionResult[] = [];
// Process up to 5 detections like the working implementation
for (let i = 0; i < Math.min(5, scores.length); i++) {
const score = scores[i];
// Convert to percentage and check threshold like working implementation
const scorePercent = score * 100;
if (scorePercent < (confidenceThreshold * 100)) continue;
// Extract bounding box [y_min, x_min, y_max, x_max] like working implementation
const yMin = boxes[i * 4];
const xMin = boxes[i * 4 + 1];
const yMax = boxes[i * 4 + 2];
const xMax = boxes[i * 4 + 3];
// Convert to [x, y, width, height] format
const bbox: [number, number, number, number] = [xMin, yMin, xMax - xMin, yMax - yMin];
const detection: DetectionResult = {
bbox,
confidence: score,
class: 'shoe', // Assume all detections are shoes
timestamp: Date.now()
};
if (this.isValid(detection)) {
detections.push(detection);
}
}
if (detections.length === 0) {
return this.temporalFilter.add(null);
}
// Get the single best detection
const bestDetection = detections.reduce((prev, current) => (prev.confidence > current.confidence ? prev : current));
return this.temporalFilter.add(bestDetection);
}
/**
* Validates a detection against a set of rules.
* @param detection The detection to validate.
* @returns True if the detection is valid, false otherwise.
*/
private isValid(detection: DetectionResult): boolean {
const { bbox } = detection;
const [, , width, height] = bbox;
// Bounding box size validation (relative to a 320x320 input)
const boxPixelWidth = width * 320;
const boxPixelHeight = height * 320;
if (boxPixelWidth < VALIDATION_RULES.minBoundingBoxSize || boxPixelHeight < VALIDATION_RULES.minBoundingBoxSize) {
return false;
}
// Aspect ratio validation
const aspectRatio = boxPixelWidth / boxPixelHeight;
if (aspectRatio < VALIDATION_RULES.aspectRatioRange[0] || aspectRatio > VALIDATION_RULES.aspectRatioRange[1]) {
return false;
}
return true;
}
}

View File

@@ -1,269 +0,0 @@
import type { ModelInfo } from './types';
const DB_NAME = 'ShoeDetectionModels';
const DB_VERSION = 1;
const STORE_NAME = 'models';
export interface CachedModel {
id: string;
variant: 'quantized' | 'standard' | 'full';
data: ArrayBuffer;
metadata: ModelInfo;
timestamp: number;
version: string;
}
/**
* IndexedDB-based model cache for TensorFlow.js models
*/
export class ModelCache {
private db: IDBDatabase | null = null;
private initPromise: Promise<void> | null = null;
constructor() {
this.initPromise = this.init();
}
/**
* Initialize IndexedDB
*/
private async init(): Promise<void> {
return new Promise((resolve, reject) => {
const request = indexedDB.open(DB_NAME, DB_VERSION);
request.onerror = () => {
console.error('Failed to open IndexedDB:', request.error);
reject(request.error);
};
request.onsuccess = () => {
this.db = request.result;
resolve();
};
request.onupgradeneeded = (event) => {
const db = (event.target as IDBOpenDBRequest).result;
// Create models store
if (!db.objectStoreNames.contains(STORE_NAME)) {
const store = db.createObjectStore(STORE_NAME, { keyPath: 'id' });
store.createIndex('variant', 'variant', { unique: false });
store.createIndex('timestamp', 'timestamp', { unique: false });
}
};
});
}
/**
* Ensure database is ready
*/
private async ensureReady(): Promise<void> {
if (this.initPromise) {
await this.initPromise;
}
if (!this.db) {
throw new Error('Database not initialized');
}
}
/**
* Cache a model in IndexedDB
*/
async cacheModel(variant: 'quantized' | 'standard' | 'full', modelData: ArrayBuffer, metadata: ModelInfo): Promise<void> {
await this.ensureReady();
return new Promise((resolve, reject) => {
const transaction = this.db!.transaction([STORE_NAME], 'readwrite');
const store = transaction.objectStore(STORE_NAME);
const cachedModel: CachedModel = {
id: `shoe-detection-${variant}`,
variant,
data: modelData,
metadata,
timestamp: Date.now(),
version: '1.0.0'
};
const request = store.put(cachedModel);
request.onsuccess = () => {
console.log(`Model ${variant} cached successfully`);
resolve();
};
request.onerror = () => {
console.error(`Failed to cache model ${variant}:`, request.error);
reject(request.error);
};
});
}
/**
* Retrieve a cached model
*/
async getCachedModel(variant: 'quantized' | 'standard' | 'full'): Promise<CachedModel | null> {
await this.ensureReady();
return new Promise((resolve, reject) => {
const transaction = this.db!.transaction([STORE_NAME], 'readonly');
const store = transaction.objectStore(STORE_NAME);
const request = store.get(`shoe-detection-${variant}`);
request.onsuccess = () => {
resolve(request.result || null);
};
request.onerror = () => {
reject(request.error);
};
});
}
/**
* Check if a model is cached and up to date
*/
async isModelCached(variant: 'quantized' | 'standard' | 'full', requiredVersion: string): Promise<boolean> {
try {
const cached = await this.getCachedModel(variant);
return cached !== null && cached.version === requiredVersion;
} catch (error) {
console.error('Error checking cached model:', error);
return false;
}
}
/**
* Download and cache a model
*/
async downloadAndCacheModel(variant: 'quantized' | 'standard' | 'full', modelInfo: ModelInfo, onProgress?: (progress: number) => void): Promise<ArrayBuffer> {
console.log(`Downloading model ${variant} from ${modelInfo.url}`);
const response = await fetch(modelInfo.url);
if (!response.ok) {
throw new Error(`Failed to download model: ${response.statusText}`);
}
const contentLength = response.headers.get('content-length');
const total = contentLength ? parseInt(contentLength, 10) : 0;
let loaded = 0;
const reader = response.body?.getReader();
const chunks: Uint8Array[] = [];
if (!reader) {
throw new Error('Failed to get response reader');
}
while (true) {
const { done, value } = await reader.read();
if (done) break;
chunks.push(value);
loaded += value.length;
if (onProgress && total > 0) {
onProgress((loaded / total) * 100);
}
}
// Combine chunks into single ArrayBuffer
const totalLength = chunks.reduce((acc, chunk) => acc + chunk.length, 0);
const result = new Uint8Array(totalLength);
let offset = 0;
for (const chunk of chunks) {
result.set(chunk, offset);
offset += chunk.length;
}
const modelData = result.buffer;
// Cache the model
await this.cacheModel(variant, modelData, modelInfo);
return modelData;
}
/**
* Get or download a model
*/
async getModel(variant: 'quantized' | 'standard' | 'full', modelInfo: ModelInfo, onProgress?: (progress: number) => void): Promise<ArrayBuffer> {
// Check if model is already cached
const isCache = await this.isModelCached(variant, '1.0.0');
if (isCache) {
console.log(`Using cached model ${variant}`);
const cached = await this.getCachedModel(variant);
return cached!.data;
}
// Download and cache the model
return await this.downloadAndCacheModel(variant, modelInfo, onProgress);
}
/**
* Clear old cached models
*/
async clearOldModels(maxAge: number = 7 * 24 * 60 * 60 * 1000): Promise<void> {
await this.ensureReady();
const cutoffTime = Date.now() - maxAge;
return new Promise((resolve, reject) => {
const transaction = this.db!.transaction([STORE_NAME], 'readwrite');
const store = transaction.objectStore(STORE_NAME);
const index = store.index('timestamp');
const range = IDBKeyRange.upperBound(cutoffTime);
const request = index.openCursor(range);
request.onsuccess = (event) => {
const cursor = (event.target as IDBRequest).result;
if (cursor) {
cursor.delete();
cursor.continue();
} else {
console.log('Old models cleared');
resolve();
}
};
request.onerror = () => {
reject(request.error);
};
});
}
/**
* Get cache storage usage
*/
async getCacheStats(): Promise<{ totalSize: number; modelCount: number; models: string[] }> {
await this.ensureReady();
return new Promise((resolve, reject) => {
const transaction = this.db!.transaction([STORE_NAME], 'readonly');
const store = transaction.objectStore(STORE_NAME);
const request = store.getAll();
request.onsuccess = () => {
const models = request.result as CachedModel[];
const totalSize = models.reduce((sum, model) => sum + model.data.byteLength, 0);
const modelNames = models.map(m => m.variant);
resolve({
totalSize,
modelCount: models.length,
models: modelNames
});
};
request.onerror = () => {
reject(request.error);
};
});
}
}

View File

@@ -1,57 +0,0 @@
import type { DetectionConfig, ModelInfo } from './types';
/**
* Configuration for different model variants.
* I've used the models from the original repo and another one from TensorFlow Hub.
*/
export const MODEL_VARIANTS: Record<'quantized' | 'standard' | 'full', ModelInfo> = {
quantized: {
variant: 'quantized',
url: '/models/model.json',
size: 2 * 1024 * 1024, // ~2MB
name: 'SSD-MobileNetV2 Quantized',
description: 'Fastest, for continuous detection.'
},
standard: {
variant: 'standard',
url: '/models/model.json',
size: 2 * 1024 * 1024, // Same model, different configs
name: 'SSD-MobileNetV2 Standard',
description: 'Balanced speed and accuracy.'
},
full: {
variant: 'full',
url: '/models/model.json',
size: 2 * 1024 * 1024, // Same model, different configs
name: 'SSD-MobileNetV2 Full',
description: 'Most accurate, for on-demand scan.'
}
};
/**
* Default detection configuration.
*/
export const DEFAULT_CONFIG: DetectionConfig = {
frameSkip: 6,
confidenceThreshold: 0.8, // Default to 80% confidence
modelVariant: 'standard',
maxDetections: 5, // Match the working implementation (process up to 5 detections)
inputSize: [300, 300], // Match the working implementation
enableContinuous: true,
enableTrigger: true,
};
/**
* Class labels for the models.
* IMPORTANT: This must match the order of the model's output classes.
*/
export const CLASS_LABELS = ['shoe'];
/**
* Rules to validate detections and reduce false positives.
*/
export const VALIDATION_RULES = {
minBoundingBoxSize: 30, // Minimum pixel width/height of a bounding box
aspectRatioRange: [0.5, 2.0], // Plausible aspect ratio (width / height) for a shoe
temporalConsistencyFrames: 3, // Must be detected in N consecutive frames
};

View File

@@ -1,84 +0,0 @@
/**
* This file contains all the TypeScript interfaces for the ML detection system.
*/
/**
* Configuration for the detection engine.
*/
export interface DetectionConfig {
frameSkip: number;
confidenceThreshold: number;
modelVariant: 'quantized' | 'standard' | 'full';
maxDetections: number;
inputSize: [number, number];
enableContinuous: boolean;
enableTrigger: boolean;
}
/**
* Information about a specific model variant.
*/
export interface ModelInfo {
variant: 'quantized' | 'standard' | 'full';
url: string;
size: number; // in bytes
name: string;
description: string;
}
/**
* Represents a single detected object.
*/
export interface DetectionResult {
bbox: [number, number, number, number]; // [x, y, width, height]
confidence: number;
class: string;
timestamp: number;
}
/**
* Defines the performance tier of the user's device.
*/
export type DeviceTier = 'low' | 'mid' | 'high';
/**
* Holds information about the device's capabilities.
*/
export interface DeviceCapabilities {
tier: DeviceTier;
hasWebGL: boolean;
cpuCores: number;
memory: number; // in GB
}
/**
* Performance metrics for the detection engine.
*/
export interface DetectionMetrics {
fps: number;
inferenceTime: number;
memoryUsage: number; // in MB
}
/**
* Detection mode type.
*/
export type DetectionMode = 'continuous' | 'trigger' | 'hybrid';
/**
* Types for messages sent to and from the detection worker.
*/
export type WorkerMessage =
| { type: 'INITIALIZE' }
| { type: 'DETECT'; imageData: ImageData }
| { type: 'UPDATE_CONFIG'; config: DetectionConfig }
| { type: 'LOAD_MODEL'; variant: 'quantized' | 'standard' | 'full'; modelData: ArrayBuffer }
| { type: 'CONFIGURE'; config: DetectionConfig };
export type WorkerResponse =
| { type: 'INITIALIZED'; id: string }
| { type: 'DETECTION_RESULT'; result: DetectionResult | null; id: string }
| { type: 'METRICS_UPDATE'; metrics: Partial<DetectionMetrics>; id: string }
| { type: 'ERROR'; error: string; id: string }
| { type: 'LOADED_MODEL'; id: string }
| { type: 'CONFIGURED'; id: string };

View File

@@ -1,307 +0,0 @@
import { useEffect, useRef, useState, useCallback } from 'react';
import type { DetectionConfig, DetectionResult, DetectionMetrics } from './types';
import { DetectionEngine } from './detection-engine';
interface UseDetectionOptions {
modelVariant?: 'quantized' | 'standard' | 'full';
enableContinuous?: boolean;
enableTrigger?: boolean;
onDetection?: (detection: DetectionResult | null) => void;
onError?: (error: Error) => void;
}
interface UseDetectionReturn {
// State
isLoading: boolean;
isDetecting: boolean;
currentDetection: DetectionResult | null;
metrics: DetectionMetrics | null;
error: string | null;
// Actions
initialize: (videoElement: HTMLVideoElement) => Promise<DetectionEngine>;
startContinuous: () => void;
stopContinuous: () => void;
triggerDetection: () => Promise<DetectionResult | null>;
updateConfig: (config: Partial<DetectionConfig>) => Promise<void>;
setDetectionCallback: (callback: (detection: DetectionResult | null) => void) => void;
// Config
config: DetectionConfig | null;
// Engine reference
detectionEngine: DetectionEngine | null;
}
/**
* React hook for shoe detection functionality
*/
export function useDetection(options: UseDetectionOptions = {}): UseDetectionReturn {
const {
modelVariant = 'standard',
enableContinuous = true,
enableTrigger = true,
onDetection,
onError
} = options;
// Store the callback in a ref so it can be updated
const detectionCallbackRef = useRef<((detection: DetectionResult | null) => void) | undefined>(onDetection);
// State
const [isLoading, setIsLoading] = useState(false);
const [isDetecting, setIsDetecting] = useState(false);
const [currentDetection, setCurrentDetection] = useState<DetectionResult | null>(null);
const [metrics, setMetrics] = useState<DetectionMetrics | null>(null);
const [error, setError] = useState<string | null>(null);
const [config, setConfig] = useState<DetectionConfig | null>(null);
// Refs
const detectionEngineRef = useRef<DetectionEngine | null>(null);
const videoElementRef = useRef<HTMLVideoElement | null>(null);
const initializationPromiseRef = useRef<Promise<void> | null>(null);
// Initialize detection engine
const initialize = useCallback(async (videoElement: HTMLVideoElement): Promise<DetectionEngine> => {
console.log('🚀 useDetection.initialize called:', { videoElement: !!videoElement });
// Prevent multiple initializations
if (initializationPromiseRef.current) {
console.log('⚠️ Initialization already in progress, returning existing promise');
return initializationPromiseRef.current;
}
setIsLoading(true);
setError(null);
const initPromise = (async () => {
try {
console.log('🏗️ Creating detection engine...');
// Create detection engine
const engine = new DetectionEngine();
detectionEngineRef.current = engine;
videoElementRef.current = videoElement;
// Set up event listeners
engine.onDetection((detection) => {
setCurrentDetection(detection);
detectionCallbackRef.current?.(detection);
});
engine.onMetrics((newMetrics) => {
setMetrics(newMetrics);
});
// Initialize with progress tracking
await engine.initialize(modelVariant, (progress) => {
// You could add progress state here if needed
console.log(`Model loading: ${progress.toFixed(1)}%`);
});
// Get initial configuration
const initialConfig = engine.getConfig();
setConfig(initialConfig);
console.log('Detection hook initialized successfully');
return engine; // Return engine instance
} catch (err) {
const error = err instanceof Error ? err : new Error('Unknown initialization error');
console.error('Detection initialization failed:', error);
setError(error.message);
onError?.(error);
throw error;
} finally {
setIsLoading(false);
}
})();
initializationPromiseRef.current = initPromise;
return initPromise;
}, [modelVariant, onDetection, onError]);
// Start continuous detection
const startContinuous = useCallback(() => {
console.log('🔄 useDetection.startContinuous called:', {
hasEngine: !!detectionEngineRef.current,
hasVideo: !!videoElementRef.current,
enableContinuous
});
if (!detectionEngineRef.current || !videoElementRef.current) {
console.warn('Detection engine or video element not available');
return;
}
if (!enableContinuous) {
console.warn('Continuous detection is disabled');
return;
}
try {
console.log('🚀 Starting continuous detection...');
detectionEngineRef.current.startContinuousDetection(videoElementRef.current);
setIsDetecting(true);
setError(null);
console.log('✅ Continuous detection started successfully');
} catch (err) {
const error = err instanceof Error ? err : new Error('Failed to start continuous detection');
console.error('❌ Start continuous detection failed:', error);
setError(error.message);
onError?.(error);
}
}, [enableContinuous, onError]);
// Stop continuous detection
const stopContinuous = useCallback(() => {
if (!detectionEngineRef.current) {
return;
}
try {
detectionEngineRef.current.stopContinuousDetection();
setIsDetecting(false);
setCurrentDetection(null);
} catch (err) {
console.error('Stop continuous detection failed:', err);
}
}, []);
// Trigger single detection
const triggerDetection = useCallback(async (): Promise<DetectionResult | null> => {
if (!detectionEngineRef.current || !videoElementRef.current) {
throw new Error('Detection engine or video element not available');
}
if (!enableTrigger) {
throw new Error('Trigger detection is disabled');
}
try {
setError(null);
const detection = await detectionEngineRef.current.triggerDetection(videoElementRef.current);
// Update current detection state
setCurrentDetection(detection);
detectionCallbackRef.current?.(detection);
return detection;
} catch (err) {
const error = err instanceof Error ? err : new Error('Trigger detection failed');
console.error('Trigger detection failed:', error);
setError(error.message);
onError?.(error);
throw error;
}
}, [enableTrigger, onError]);
// Update configuration
const updateConfig = useCallback(async (newConfig: Partial<DetectionConfig>): Promise<void> => {
if (!detectionEngineRef.current) {
throw new Error('Detection engine not available');
}
try {
await detectionEngineRef.current.updateConfig(newConfig);
const updatedConfig = detectionEngineRef.current.getConfig();
setConfig(updatedConfig);
setError(null);
} catch (err) {
const error = err instanceof Error ? err : new Error('Failed to update configuration');
console.error('Update config failed:', error);
setError(error.message);
onError?.(error);
throw error;
}
}, [onError]);
// Cleanup on unmount
useEffect(() => {
return () => {
if (detectionEngineRef.current) {
detectionEngineRef.current.destroy();
detectionEngineRef.current = null;
}
initializationPromiseRef.current = null;
videoElementRef.current = null;
};
}, []);
return {
// State
isLoading,
isDetecting,
currentDetection,
metrics,
error,
// Actions
initialize,
startContinuous,
stopContinuous,
triggerDetection,
updateConfig,
setDetectionCallback: (callback: (detection: DetectionResult | null) => void) => {
detectionCallbackRef.current = callback;
},
// Config
config,
// Engine reference
detectionEngine: detectionEngineRef.current
};
}
/**
* Hook for detection metrics monitoring
*/
export function useDetectionMetrics(detectionEngine: DetectionEngine | null) {
const [metrics, setMetrics] = useState<DetectionMetrics | null>(null);
useEffect(() => {
if (!detectionEngine) return;
detectionEngine.onMetrics(setMetrics);
// Get initial metrics
const initialMetrics = detectionEngine.getMetrics();
setMetrics(initialMetrics);
}, [detectionEngine]);
return metrics;
}
/**
* Hook for performance monitoring and adjustments
*/
export function usePerformanceOptimization(detectionEngine: DetectionEngine | null) {
const [recommendations, setRecommendations] = useState<string[]>([]);
useEffect(() => {
if (!detectionEngine) return;
const interval = setInterval(() => {
const metrics = detectionEngine.getMetrics();
const newRecommendations: string[] = [];
if (metrics.fps < 15) {
newRecommendations.push('Consider increasing frame skip or switching to a lighter model');
}
if (metrics.inferenceTime > 100) {
newRecommendations.push('Inference time is high, consider switching to quantized model');
}
if (metrics.memoryUsage > 100) {
newRecommendations.push('High memory usage detected');
}
setRecommendations(newRecommendations);
}, 5000); // Check every 5 seconds
return () => clearInterval(interval);
}, [detectionEngine]);
return recommendations;
}

View File

@@ -13,7 +13,7 @@ interface CacheEntry {
*/
export class SKUIdentificationService {
private cache = new Map<string, CacheEntry>();
private readonly API_ENDPOINT = 'https://pegasus-working-bison.ngrok-free.app/predictfile';
private readonly API_ENDPOINT = `${process.env.NEXT_PUBLIC_PYTHON_SERVER_URL}/predictfile`;
private readonly CACHE_TTL = 60 * 60 * 1000; // 1 hour cache
private readonly MAX_CACHE_SIZE = 100; // Prevent memory leaks

555
package-lock.json generated
View File

@@ -12,9 +12,9 @@
"@radix-ui/react-dialog": "^1.1.15",
"@radix-ui/react-select": "^2.2.6",
"@radix-ui/react-separator": "^1.1.7",
"@radix-ui/react-slider": "^1.3.6",
"@radix-ui/react-slot": "^1.2.3",
"@tensorflow/tfjs": "^4.22.0",
"@tensorflow/tfjs-backend-webgl": "^4.22.0",
"@radix-ui/react-switch": "^1.2.6",
"class-variance-authority": "^0.7.1",
"clsx": "^2.1.1",
"embla-carousel-react": "^8.6.0",
@@ -1415,6 +1415,39 @@
}
}
},
"node_modules/@radix-ui/react-slider": {
"version": "1.3.6",
"resolved": "https://registry.npmjs.org/@radix-ui/react-slider/-/react-slider-1.3.6.tgz",
"integrity": "sha512-JPYb1GuM1bxfjMRlNLE+BcmBC8onfCi60Blk7OBqi2MLTFdS+8401U4uFjnwkOr49BLmXxLC6JHkvAsx5OJvHw==",
"license": "MIT",
"dependencies": {
"@radix-ui/number": "1.1.1",
"@radix-ui/primitive": "1.1.3",
"@radix-ui/react-collection": "1.1.7",
"@radix-ui/react-compose-refs": "1.1.2",
"@radix-ui/react-context": "1.1.2",
"@radix-ui/react-direction": "1.1.1",
"@radix-ui/react-primitive": "2.1.3",
"@radix-ui/react-use-controllable-state": "1.2.2",
"@radix-ui/react-use-layout-effect": "1.1.1",
"@radix-ui/react-use-previous": "1.1.1",
"@radix-ui/react-use-size": "1.1.1"
},
"peerDependencies": {
"@types/react": "*",
"@types/react-dom": "*",
"react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc",
"react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
},
"peerDependenciesMeta": {
"@types/react": {
"optional": true
},
"@types/react-dom": {
"optional": true
}
}
},
"node_modules/@radix-ui/react-slot": {
"version": "1.2.3",
"resolved": "https://registry.npmjs.org/@radix-ui/react-slot/-/react-slot-1.2.3.tgz",
@@ -1433,6 +1466,35 @@
}
}
},
"node_modules/@radix-ui/react-switch": {
"version": "1.2.6",
"resolved": "https://registry.npmjs.org/@radix-ui/react-switch/-/react-switch-1.2.6.tgz",
"integrity": "sha512-bByzr1+ep1zk4VubeEVViV592vu2lHE2BZY5OnzehZqOOgogN80+mNtCqPkhn2gklJqOpxWgPoYTSnhBCqpOXQ==",
"license": "MIT",
"dependencies": {
"@radix-ui/primitive": "1.1.3",
"@radix-ui/react-compose-refs": "1.1.2",
"@radix-ui/react-context": "1.1.2",
"@radix-ui/react-primitive": "2.1.3",
"@radix-ui/react-use-controllable-state": "1.2.2",
"@radix-ui/react-use-previous": "1.1.1",
"@radix-ui/react-use-size": "1.1.1"
},
"peerDependencies": {
"@types/react": "*",
"@types/react-dom": "*",
"react": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc",
"react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0 || ^19.0.0-rc"
},
"peerDependenciesMeta": {
"@types/react": {
"optional": true
},
"@types/react-dom": {
"optional": true
}
}
},
"node_modules/@radix-ui/react-use-callback-ref": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/@radix-ui/react-use-callback-ref/-/react-use-callback-ref-1.1.1.tgz",
@@ -1903,128 +1965,6 @@
"tailwindcss": "4.1.12"
}
},
"node_modules/@tensorflow/tfjs": {
"version": "4.22.0",
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs/-/tfjs-4.22.0.tgz",
"integrity": "sha512-0TrIrXs6/b7FLhLVNmfh8Sah6JgjBPH4mZ8JGb7NU6WW+cx00qK5BcAZxw7NCzxj6N8MRAIfHq+oNbPUNG5VAg==",
"license": "Apache-2.0",
"dependencies": {
"@tensorflow/tfjs-backend-cpu": "4.22.0",
"@tensorflow/tfjs-backend-webgl": "4.22.0",
"@tensorflow/tfjs-converter": "4.22.0",
"@tensorflow/tfjs-core": "4.22.0",
"@tensorflow/tfjs-data": "4.22.0",
"@tensorflow/tfjs-layers": "4.22.0",
"argparse": "^1.0.10",
"chalk": "^4.1.0",
"core-js": "3.29.1",
"regenerator-runtime": "^0.13.5",
"yargs": "^16.0.3"
},
"bin": {
"tfjs-custom-module": "dist/tools/custom_module/cli.js"
}
},
"node_modules/@tensorflow/tfjs-backend-cpu": {
"version": "4.22.0",
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs-backend-cpu/-/tfjs-backend-cpu-4.22.0.tgz",
"integrity": "sha512-1u0FmuLGuRAi8D2c3cocHTASGXOmHc/4OvoVDENJayjYkS119fcTcQf4iHrtLthWyDIPy3JiPhRrZQC9EwnhLw==",
"license": "Apache-2.0",
"dependencies": {
"@types/seedrandom": "^2.4.28",
"seedrandom": "^3.0.5"
},
"engines": {
"yarn": ">= 1.3.2"
},
"peerDependencies": {
"@tensorflow/tfjs-core": "4.22.0"
}
},
"node_modules/@tensorflow/tfjs-backend-webgl": {
"version": "4.22.0",
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs-backend-webgl/-/tfjs-backend-webgl-4.22.0.tgz",
"integrity": "sha512-H535XtZWnWgNwSzv538czjVlbJebDl5QTMOth4RXr2p/kJ1qSIXE0vZvEtO+5EC9b00SvhplECny2yDewQb/Yg==",
"license": "Apache-2.0",
"dependencies": {
"@tensorflow/tfjs-backend-cpu": "4.22.0",
"@types/offscreencanvas": "~2019.3.0",
"@types/seedrandom": "^2.4.28",
"seedrandom": "^3.0.5"
},
"engines": {
"yarn": ">= 1.3.2"
},
"peerDependencies": {
"@tensorflow/tfjs-core": "4.22.0"
}
},
"node_modules/@tensorflow/tfjs-converter": {
"version": "4.22.0",
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs-converter/-/tfjs-converter-4.22.0.tgz",
"integrity": "sha512-PT43MGlnzIo+YfbsjM79Lxk9lOq6uUwZuCc8rrp0hfpLjF6Jv8jS84u2jFb+WpUeuF4K33ZDNx8CjiYrGQ2trQ==",
"license": "Apache-2.0",
"peerDependencies": {
"@tensorflow/tfjs-core": "4.22.0"
}
},
"node_modules/@tensorflow/tfjs-core": {
"version": "4.22.0",
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs-core/-/tfjs-core-4.22.0.tgz",
"integrity": "sha512-LEkOyzbknKFoWUwfkr59vSB68DMJ4cjwwHgicXN0DUi3a0Vh1Er3JQqCI1Hl86GGZQvY8ezVrtDIvqR1ZFW55A==",
"license": "Apache-2.0",
"dependencies": {
"@types/long": "^4.0.1",
"@types/offscreencanvas": "~2019.7.0",
"@types/seedrandom": "^2.4.28",
"@webgpu/types": "0.1.38",
"long": "4.0.0",
"node-fetch": "~2.6.1",
"seedrandom": "^3.0.5"
},
"engines": {
"yarn": ">= 1.3.2"
}
},
"node_modules/@tensorflow/tfjs-core/node_modules/@types/offscreencanvas": {
"version": "2019.7.3",
"resolved": "https://registry.npmjs.org/@types/offscreencanvas/-/offscreencanvas-2019.7.3.tgz",
"integrity": "sha512-ieXiYmgSRXUDeOntE1InxjWyvEelZGP63M+cGuquuRLuIKKT1osnkXjxev9B7d1nXSug5vpunx+gNlbVxMlC9A==",
"license": "MIT"
},
"node_modules/@tensorflow/tfjs-data": {
"version": "4.22.0",
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs-data/-/tfjs-data-4.22.0.tgz",
"integrity": "sha512-dYmF3LihQIGvtgJrt382hSRH4S0QuAp2w1hXJI2+kOaEqo5HnUPG0k5KA6va+S1yUhx7UBToUKCBHeLHFQRV4w==",
"license": "Apache-2.0",
"dependencies": {
"@types/node-fetch": "^2.1.2",
"node-fetch": "~2.6.1",
"string_decoder": "^1.3.0"
},
"peerDependencies": {
"@tensorflow/tfjs-core": "4.22.0",
"seedrandom": "^3.0.5"
}
},
"node_modules/@tensorflow/tfjs-layers": {
"version": "4.22.0",
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs-layers/-/tfjs-layers-4.22.0.tgz",
"integrity": "sha512-lybPj4ZNj9iIAPUj7a8ZW1hg8KQGfqWLlCZDi9eM/oNKCCAgchiyzx8OrYoWmRrB+AM6VNEeIT+2gZKg5ReihA==",
"license": "Apache-2.0 AND MIT",
"peerDependencies": {
"@tensorflow/tfjs-core": "4.22.0"
}
},
"node_modules/@tensorflow/tfjs/node_modules/argparse": {
"version": "1.0.10",
"resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz",
"integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==",
"license": "MIT",
"dependencies": {
"sprintf-js": "~1.0.2"
}
},
"node_modules/@tybys/wasm-util": {
"version": "0.10.0",
"resolved": "https://registry.npmjs.org/@tybys/wasm-util/-/wasm-util-0.10.0.tgz",
@@ -2057,37 +1997,16 @@
"dev": true,
"license": "MIT"
},
"node_modules/@types/long": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/@types/long/-/long-4.0.2.tgz",
"integrity": "sha512-MqTGEo5bj5t157U6fA/BiDynNkn0YknVdh48CMPkTSpFTVmvao5UQmm7uEF6xBEo7qIMAlY/JSleYaE6VOdpaA==",
"license": "MIT"
},
"node_modules/@types/node": {
"version": "20.19.11",
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.19.11.tgz",
"integrity": "sha512-uug3FEEGv0r+jrecvUUpbY8lLisvIjg6AAic6a2bSP5OEOLeJsDSnvhCDov7ipFFMXS3orMpzlmi0ZcuGkBbow==",
"dev": true,
"license": "MIT",
"dependencies": {
"undici-types": "~6.21.0"
}
},
"node_modules/@types/node-fetch": {
"version": "2.6.13",
"resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.6.13.tgz",
"integrity": "sha512-QGpRVpzSaUs30JBSGPjOg4Uveu384erbHBoT1zeONvyCfwQxIkUshLAOqN/k9EjGviPRmWTTe6aH2qySWKTVSw==",
"license": "MIT",
"dependencies": {
"@types/node": "*",
"form-data": "^4.0.4"
}
},
"node_modules/@types/offscreencanvas": {
"version": "2019.3.0",
"resolved": "https://registry.npmjs.org/@types/offscreencanvas/-/offscreencanvas-2019.3.0.tgz",
"integrity": "sha512-esIJx9bQg+QYF0ra8GnvfianIY8qWB0GBx54PK5Eps6m+xTj86KLavHv6qDhzKcu5UUOgNfJ2pWaIIV7TRUd9Q==",
"license": "MIT"
},
"node_modules/@types/react": {
"version": "19.1.12",
"resolved": "https://registry.npmjs.org/@types/react/-/react-19.1.12.tgz",
@@ -2108,12 +2027,6 @@
"@types/react": "^19.0.0"
}
},
"node_modules/@types/seedrandom": {
"version": "2.4.34",
"resolved": "https://registry.npmjs.org/@types/seedrandom/-/seedrandom-2.4.34.tgz",
"integrity": "sha512-ytDiArvrn/3Xk6/vtylys5tlY6eo7Ane0hvcx++TKo6RxQXuVfW0AF/oeWqAj9dN29SyhtawuXstgmPlwNcv/A==",
"license": "MIT"
},
"node_modules/@typescript-eslint/eslint-plugin": {
"version": "8.41.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.41.0.tgz",
@@ -2671,12 +2584,6 @@
"win32"
]
},
"node_modules/@webgpu/types": {
"version": "0.1.38",
"resolved": "https://registry.npmjs.org/@webgpu/types/-/types-0.1.38.tgz",
"integrity": "sha512-7LrhVKz2PRh+DD7+S+PVaFd5HxaWQvoMqBbsV9fNJO1pjUs1P8bM2vQVNfk+3URTqbuTI7gkXi0rfsN0IadoBA==",
"license": "BSD-3-Clause"
},
"node_modules/acorn": {
"version": "8.15.0",
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz",
@@ -2717,19 +2624,11 @@
"url": "https://github.com/sponsors/epoberezkin"
}
},
"node_modules/ansi-regex": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
"integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
"license": "MIT",
"engines": {
"node": ">=8"
}
},
"node_modules/ansi-styles": {
"version": "4.3.0",
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
"integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
"dev": true,
"license": "MIT",
"dependencies": {
"color-convert": "^2.0.1"
@@ -2947,12 +2846,6 @@
"node": ">= 0.4"
}
},
"node_modules/asynckit": {
"version": "0.4.0",
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==",
"license": "MIT"
},
"node_modules/available-typed-arrays": {
"version": "1.0.7",
"resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz",
@@ -3054,6 +2947,7 @@
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz",
"integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"es-errors": "^1.3.0",
@@ -3114,6 +3008,7 @@
"version": "4.1.2",
"resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz",
"integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==",
"dev": true,
"license": "MIT",
"dependencies": {
"ansi-styles": "^4.1.0",
@@ -3154,17 +3049,6 @@
"integrity": "sha512-IV3Ou0jSMzZrd3pZ48nLkT9DA7Ag1pnPzaiQhpW7c3RbcqqzvzzVu+L8gfqMp/8IM2MQtSiqaCxrrcfu8I8rMA==",
"license": "MIT"
},
"node_modules/cliui": {
"version": "7.0.4",
"resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz",
"integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==",
"license": "ISC",
"dependencies": {
"string-width": "^4.2.0",
"strip-ansi": "^6.0.0",
"wrap-ansi": "^7.0.0"
}
},
"node_modules/clsx": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/clsx/-/clsx-2.1.1.tgz",
@@ -3192,6 +3076,7 @@
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
"integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
"devOptional": true,
"license": "MIT",
"dependencies": {
"color-name": "~1.1.4"
@@ -3204,6 +3089,7 @@
"version": "1.1.4",
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==",
"devOptional": true,
"license": "MIT"
},
"node_modules/color-string": {
@@ -3217,18 +3103,6 @@
"simple-swizzle": "^0.2.2"
}
},
"node_modules/combined-stream": {
"version": "1.0.8",
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
"integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
"license": "MIT",
"dependencies": {
"delayed-stream": "~1.0.0"
},
"engines": {
"node": ">= 0.8"
}
},
"node_modules/concat-map": {
"version": "0.0.1",
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
@@ -3236,17 +3110,6 @@
"dev": true,
"license": "MIT"
},
"node_modules/core-js": {
"version": "3.29.1",
"resolved": "https://registry.npmjs.org/core-js/-/core-js-3.29.1.tgz",
"integrity": "sha512-+jwgnhg6cQxKYIIjGtAHq2nwUOolo9eoFZ4sHfUH09BLXBgxnH4gA0zEd+t+BO2cNB8idaBtZFcFTRjQJRJmAw==",
"hasInstallScript": true,
"license": "MIT",
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/core-js"
}
},
"node_modules/cross-spawn": {
"version": "7.0.6",
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz",
@@ -3391,15 +3254,6 @@
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/delayed-stream": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
"integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==",
"license": "MIT",
"engines": {
"node": ">=0.4.0"
}
},
"node_modules/detect-libc": {
"version": "2.0.4",
"resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.4.tgz",
@@ -3433,6 +3287,7 @@
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz",
"integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==",
"dev": true,
"license": "MIT",
"dependencies": {
"call-bind-apply-helpers": "^1.0.1",
@@ -3565,6 +3420,7 @@
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz",
"integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.4"
@@ -3574,6 +3430,7 @@
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz",
"integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.4"
@@ -3611,6 +3468,7 @@
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz",
"integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==",
"dev": true,
"license": "MIT",
"dependencies": {
"es-errors": "^1.3.0"
@@ -3623,6 +3481,7 @@
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz",
"integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==",
"dev": true,
"license": "MIT",
"dependencies": {
"es-errors": "^1.3.0",
@@ -3665,15 +3524,6 @@
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/escalade": {
"version": "3.2.0",
"resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz",
"integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==",
"license": "MIT",
"engines": {
"node": ">=6"
}
},
"node_modules/escape-string-regexp": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz",
@@ -4254,26 +4104,11 @@
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/form-data": {
"version": "4.0.4",
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz",
"integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==",
"license": "MIT",
"dependencies": {
"asynckit": "^0.4.0",
"combined-stream": "^1.0.8",
"es-set-tostringtag": "^2.1.0",
"hasown": "^2.0.2",
"mime-types": "^2.1.12"
},
"engines": {
"node": ">= 6"
}
},
"node_modules/function-bind": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz",
"integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==",
"dev": true,
"license": "MIT",
"funding": {
"url": "https://github.com/sponsors/ljharb"
@@ -4310,19 +4145,11 @@
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/get-caller-file": {
"version": "2.0.5",
"resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz",
"integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==",
"license": "ISC",
"engines": {
"node": "6.* || 8.* || >= 10.*"
}
},
"node_modules/get-intrinsic": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz",
"integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"call-bind-apply-helpers": "^1.0.2",
@@ -4356,6 +4183,7 @@
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz",
"integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==",
"dev": true,
"license": "MIT",
"dependencies": {
"dunder-proto": "^1.0.1",
@@ -4443,6 +4271,7 @@
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz",
"integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.4"
@@ -4482,6 +4311,7 @@
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz",
"integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=8"
@@ -4520,6 +4350,7 @@
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz",
"integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.4"
@@ -4532,6 +4363,7 @@
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz",
"integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==",
"dev": true,
"license": "MIT",
"dependencies": {
"has-symbols": "^1.0.3"
@@ -4547,6 +4379,7 @@
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz",
"integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"function-bind": "^1.1.2"
@@ -4785,15 +4618,6 @@
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/is-fullwidth-code-point": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz",
"integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==",
"license": "MIT",
"engines": {
"node": ">=8"
}
},
"node_modules/is-generator-function": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/is-generator-function/-/is-generator-function-1.1.0.tgz",
@@ -5442,12 +5266,6 @@
"dev": true,
"license": "MIT"
},
"node_modules/long": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/long/-/long-4.0.0.tgz",
"integrity": "sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA==",
"license": "Apache-2.0"
},
"node_modules/loose-envify": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz",
@@ -5484,6 +5302,7 @@
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz",
"integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.4"
@@ -5513,27 +5332,6 @@
"node": ">=8.6"
}
},
"node_modules/mime-db": {
"version": "1.52.0",
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
"integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
"license": "MIT",
"engines": {
"node": ">= 0.6"
}
},
"node_modules/mime-types": {
"version": "2.1.35",
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
"integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
"license": "MIT",
"dependencies": {
"mime-db": "1.52.0"
},
"engines": {
"node": ">= 0.6"
}
},
"node_modules/minimatch": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
@@ -5726,26 +5524,6 @@
"node": "^10 || ^12 || >=14"
}
},
"node_modules/node-fetch": {
"version": "2.6.13",
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.13.tgz",
"integrity": "sha512-StxNAxh15zr77QvvkmveSQ8uCQ4+v5FkvNTj0OESmiHu+VRi/gXArXtkWMElOsOUNLtUEvI4yS+rdtOHZTwlQA==",
"license": "MIT",
"dependencies": {
"whatwg-url": "^5.0.0"
},
"engines": {
"node": "4.x || >=6.0.0"
},
"peerDependencies": {
"encoding": "^0.1.0"
},
"peerDependenciesMeta": {
"encoding": {
"optional": true
}
}
},
"node_modules/object-assign": {
"version": "4.1.1",
"resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz",
@@ -6208,12 +5986,6 @@
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/regenerator-runtime": {
"version": "0.13.11",
"resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.11.tgz",
"integrity": "sha512-kY1AZVr2Ra+t+piVaJ4gxaFaReZVH40AKNo7UCX6W+dEwBo/2oZJzqfuN1qLq1oL45o56cPaTXELwrTh8Fpggg==",
"license": "MIT"
},
"node_modules/regexp.prototype.flags": {
"version": "1.5.4",
"resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.4.tgz",
@@ -6235,15 +6007,6 @@
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/require-directory": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz",
"integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==",
"license": "MIT",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/resolve": {
"version": "1.22.10",
"resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.10.tgz",
@@ -6340,26 +6103,6 @@
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/safe-buffer": {
"version": "5.2.1",
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
"integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/feross"
},
{
"type": "patreon",
"url": "https://www.patreon.com/feross"
},
{
"type": "consulting",
"url": "https://feross.org/support"
}
],
"license": "MIT"
},
"node_modules/safe-push-apply": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/safe-push-apply/-/safe-push-apply-1.0.0.tgz",
@@ -6401,12 +6144,6 @@
"integrity": "sha512-NlHwttCI/l5gCPR3D1nNXtWABUmBwvZpEQiD4IXSbIDq8BzLIK/7Ir5gTFSGZDUu37K5cMNp0hFtzO38sC7gWA==",
"license": "MIT"
},
"node_modules/seedrandom": {
"version": "3.0.5",
"resolved": "https://registry.npmjs.org/seedrandom/-/seedrandom-3.0.5.tgz",
"integrity": "sha512-8OwmbklUNzwezjGInmZ+2clQmExQPvomqjL7LFqOYqtmuxRgQYqOD3mHaU+MvZn5FLUeVxVfQjwLZW/n/JFuqg==",
"license": "MIT"
},
"node_modules/semver": {
"version": "7.7.2",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz",
@@ -6627,12 +6364,6 @@
"node": ">=0.10.0"
}
},
"node_modules/sprintf-js": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz",
"integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==",
"license": "BSD-3-Clause"
},
"node_modules/stable-hash": {
"version": "0.0.5",
"resolved": "https://registry.npmjs.org/stable-hash/-/stable-hash-0.0.5.tgz",
@@ -6662,35 +6393,6 @@
"node": ">=10.0.0"
}
},
"node_modules/string_decoder": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz",
"integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==",
"license": "MIT",
"dependencies": {
"safe-buffer": "~5.2.0"
}
},
"node_modules/string-width": {
"version": "4.2.3",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
"integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
"license": "MIT",
"dependencies": {
"emoji-regex": "^8.0.0",
"is-fullwidth-code-point": "^3.0.0",
"strip-ansi": "^6.0.1"
},
"engines": {
"node": ">=8"
}
},
"node_modules/string-width/node_modules/emoji-regex": {
"version": "8.0.0",
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
"integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==",
"license": "MIT"
},
"node_modules/string.prototype.includes": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/string.prototype.includes/-/string.prototype.includes-2.0.1.tgz",
@@ -6804,18 +6506,6 @@
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/strip-ansi": {
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
"integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
"license": "MIT",
"dependencies": {
"ansi-regex": "^5.0.1"
},
"engines": {
"node": ">=8"
}
},
"node_modules/strip-bom": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz",
@@ -6866,6 +6556,7 @@
"version": "7.2.0",
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz",
"integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==",
"dev": true,
"license": "MIT",
"dependencies": {
"has-flag": "^4.0.0"
@@ -6997,12 +6688,6 @@
"node": ">=8.0"
}
},
"node_modules/tr46": {
"version": "0.0.3",
"resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz",
"integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==",
"license": "MIT"
},
"node_modules/ts-api-utils": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.1.0.tgz",
@@ -7173,6 +6858,7 @@
"version": "6.21.0",
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz",
"integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==",
"dev": true,
"license": "MIT"
},
"node_modules/unrs-resolver": {
@@ -7276,22 +6962,6 @@
"react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0.0 || ^19.0.0-rc"
}
},
"node_modules/webidl-conversions": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz",
"integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==",
"license": "BSD-2-Clause"
},
"node_modules/whatwg-url": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz",
"integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==",
"license": "MIT",
"dependencies": {
"tr46": "~0.0.3",
"webidl-conversions": "^3.0.0"
}
},
"node_modules/which": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",
@@ -7407,32 +7077,6 @@
"node": ">=0.10.0"
}
},
"node_modules/wrap-ansi": {
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
"integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==",
"license": "MIT",
"dependencies": {
"ansi-styles": "^4.0.0",
"string-width": "^4.1.0",
"strip-ansi": "^6.0.0"
},
"engines": {
"node": ">=10"
},
"funding": {
"url": "https://github.com/chalk/wrap-ansi?sponsor=1"
}
},
"node_modules/y18n": {
"version": "5.0.8",
"resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz",
"integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==",
"license": "ISC",
"engines": {
"node": ">=10"
}
},
"node_modules/yallist": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-5.0.0.tgz",
@@ -7443,33 +7087,6 @@
"node": ">=18"
}
},
"node_modules/yargs": {
"version": "16.2.0",
"resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz",
"integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==",
"license": "MIT",
"dependencies": {
"cliui": "^7.0.2",
"escalade": "^3.1.1",
"get-caller-file": "^2.0.5",
"require-directory": "^2.1.1",
"string-width": "^4.2.0",
"y18n": "^5.0.5",
"yargs-parser": "^20.2.2"
},
"engines": {
"node": ">=10"
}
},
"node_modules/yargs-parser": {
"version": "20.2.9",
"resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz",
"integrity": "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==",
"license": "ISC",
"engines": {
"node": ">=10"
}
},
"node_modules/yocto-queue": {
"version": "0.1.0",
"resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz",

View File

@@ -16,8 +16,6 @@
"@radix-ui/react-slider": "^1.3.6",
"@radix-ui/react-slot": "^1.2.3",
"@radix-ui/react-switch": "^1.2.6",
"@tensorflow/tfjs": "^4.22.0",
"@tensorflow/tfjs-backend-webgl": "^4.22.0",
"class-variance-authority": "^0.7.1",
"clsx": "^2.1.1",
"embla-carousel-react": "^8.6.0",

Binary file not shown.

File diff suppressed because one or more lines are too long

View File

@@ -1,195 +0,0 @@
import type { DetectionConfig, DetectionResult, WorkerMessage, WorkerResponse } from '../lib/ml/types';
import { InferencePipeline } from '../lib/ml/inference-pipeline';
declare const self: DedicatedWorkerGlobalScope;
let tfGlobal: any = null;
let model: any = null;
let config: DetectionConfig | null = null;
let pipeline: InferencePipeline | null = null;
async function initialize(id: string) {
console.log('Initializing worker...');
tfGlobal = await import('@tensorflow/tfjs');
await import('@tensorflow/tfjs-backend-webgl');
await tfGlobal.setBackend('webgl');
await tfGlobal.ready();
console.log('TensorFlow.js backend set to:', tfGlobal.getBackend());
pipeline = new InferencePipeline();
self.postMessage({ type: 'INITIALIZED', id });
}
async function loadModelWorker(variant: 'quantized' | 'standard' | 'full', modelData: ArrayBuffer, id: string) {
console.log(`Worker: Loading model ${variant}...`);
try {
if (!tfGlobal) {
throw new Error('TensorFlow.js not initialized');
}
// Use local model files from public folder with full URL for worker context
const baseUrl = self.location.origin;
const modelUrls = {
'quantized': `${baseUrl}/models/model.json`,
'standard': `${baseUrl}/models/model.json`,
'full': `${baseUrl}/models/model.json`
};
console.log(`Worker: Loading REAL model from ${modelUrls[variant]}`);
// Load the real model like in the working GitHub implementation
model = await tfGlobal.loadGraphModel(modelUrls[variant]);
console.log('Worker: Real model loaded successfully', model);
// Warm up the model like the working implementation
if (model && config) {
console.log('Worker: Warming up model with input size:', config.inputSize);
const dummyFloat = tfGlobal.zeros([1, ...config.inputSize, 3]);
const dummyInput = tfGlobal.cast(dummyFloat, 'int32');
dummyFloat.dispose();
const result = await model.executeAsync(
{ image_tensor: dummyInput },
['detection_boxes', 'num_detections', 'detection_classes', 'detection_scores']
);
console.log('Worker: Warmup result:', result);
dummyInput.dispose();
if (Array.isArray(result)) {
result.forEach(t => t.dispose());
} else if (result) {
result.dispose();
}
console.log('Worker: Model warmed up successfully.');
}
self.postMessage({ type: 'LOADED_MODEL', id });
} catch (error) {
console.error(`Worker: Failed to load model ${variant}:`, error);
self.postMessage({ type: 'ERROR', error: error instanceof Error ? error.message : 'Unknown error during model loading', id });
}
}
async function configureWorker(newConfig: DetectionConfig, id: string) {
console.log('Worker: Configuring...');
config = newConfig;
self.postMessage({ type: 'CONFIGURED', id });
}
async function detect(imageData: ImageData, id: string) {
console.log('Worker: detect function called.');
if (!model || !config || !pipeline) {
self.postMessage({ type: 'ERROR', error: 'Worker not initialized or configured.', id });
return;
}
const tensor = tfGlobal.tidy(() => {
// Convert ImageData to tensor in Web Worker context
const { data, width, height } = imageData;
// In Web Worker, we need to create tensor manually from the pixel data
// Convert RGBA to RGB by dropping every 4th value (alpha channel)
const rgbData = new Uint8Array(width * height * 3);
for (let i = 0; i < width * height; i++) {
rgbData[i * 3] = data[i * 4]; // R
rgbData[i * 3 + 1] = data[i * 4 + 1]; // G
rgbData[i * 3 + 2] = data[i * 4 + 2]; // B
// Skip alpha channel (data[i * 4 + 3])
}
// Create tensor from RGB data
const img = tfGlobal.tensor3d(rgbData, [height, width, 3]);
// Resize to model input size (300x300) - this returns float32
const resized = tfGlobal.image.resizeBilinear(img, config!.inputSize);
// Cast to int32 as required by the model
const int32Tensor = tfGlobal.cast(resized, 'int32');
return int32Tensor.expandDims(0); // Now properly int32
});
try {
console.log('Worker: About to execute model with tensor shape:', tensor.shape, 'dtype:', tensor.dtype);
// Use the same input format as the working implementation
const result = await model.executeAsync(
{ image_tensor: tensor },
['detection_boxes', 'num_detections', 'detection_classes', 'detection_scores']
);
tensor.dispose();
// Reduced logging for performance
if (process.env.NODE_ENV === 'development') {
console.log('Worker: Model execution completed, processing results...');
}
if (!result || !Array.isArray(result) || result.length < 4) {
console.error('Worker: Invalid model output:', result);
self.postMessage({ type: 'DETECTION_RESULT', result: null, id });
return;
}
// Match the working implementation: [boxes, num_detections, classes, scores]
const [boxes, numDetections, classes, scores] = result;
console.log('Worker: Extracting data from tensors...');
const boxesData = await boxes.data();
const scoresData = await scores.data();
const classesData = await classes.data();
// Only log detailed outputs when debugging specific issues
const maxScore = Math.max(...Array.from(scoresData.slice(0, 10)));
const scoresAbove30 = Array.from(scoresData.slice(0, 10)).filter(s => s > 0.3).length;
if (process.env.NODE_ENV === 'development' && (maxScore > 0.3 || scoresAbove30 > 0)) {
console.log('Worker: Potential detection found:', { maxScore, scoresAbove30 });
}
result.forEach(t => t.dispose());
const detectionResult = pipeline.process(
boxesData as number[],
scoresData as number[],
classesData as number[],
config.confidenceThreshold
);
console.log('Worker detectionResult:', detectionResult);
self.postMessage({ type: 'DETECTION_RESULT', result: detectionResult, id });
} catch (error) {
tensor.dispose();
console.error('Worker: Detection execution failed:', error);
console.error('Worker: Error stack:', error.stack);
self.postMessage({ type: 'ERROR', error: error instanceof Error ? error.message : 'Detection execution failed', id });
}
}
(async () => {
self.onmessage = async (event: MessageEvent<WorkerMessage>) => {
try {
switch (event.data.type) {
case 'INITIALIZE':
await initialize(event.data.id);
break;
case 'LOAD_MODEL':
await loadModelWorker(event.data.variant, event.data.modelData, event.data.id);
break;
case 'CONFIGURE':
await configureWorker(event.data.config, event.data.id);
break;
case 'DETECT':
await detect(event.data.imageData, event.data.id);
break;
case 'UPDATE_CONFIG':
await configureWorker(event.data.config, event.data.id);
break;
default:
throw new Error(`Unknown message type: ${(event.data as any).type}`);
}
} catch (error) {
self.postMessage({ type: 'ERROR', error: error instanceof Error ? error.message : 'Unknown error in worker', id: event.data.id });
}
};
})();