Model downloaded & manual trigger inferencing enabled

This commit is contained in:
2025-08-28 20:58:37 -06:00
parent 75e0dca899
commit 5969947b68
20 changed files with 7742 additions and 105 deletions

View File

@@ -1,21 +1,28 @@
'use client';
import { useEffect, useRef, useState } from 'react';
import { Camera, History, VideoOff, Settings, Video, Volume2, Palette, ChevronRight } from 'lucide-react';
import { Button } from '@/components/ui/button';
import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from '@/components/ui/select'; // Assuming shadcn/ui has Select
import { useEffect, useRef, useState, useCallback } from 'react';
import { Camera, History, VideoOff, Settings, Video, Volume2, Palette } from 'lucide-react';
import { Select, SelectContent, SelectItem, SelectTrigger, SelectValue } from '@/components/ui/select';
import { Slider } from '@/components/ui/slider';
import { Switch } from '@/components/ui/switch';
import { SHOE_DATABASE, type Shoe } from '@/lib/shoe-database';
import { detectShoe } from '@/lib/ml-classification';
import { addToHistory, getHistory } from '@/lib/history-storage';
import ShoeResultsPopup from '@/components/shoe-results-popup';
import HistorySidebar from '@/components/history-sidebar';
import { MODEL_VARIANTS } from '@/lib/ml/model-config';
import { useDetection } from '@/lib/ml/use-detection';
import type { DetectionResult, DetectionConfig } from '@/lib/ml/types';
type CameraStatus = 'loading' | 'active' | 'denied' | 'no_devices';
export default function HomePage() {
const videoRef = useRef<HTMLVideoElement>(null);
const canvasRef = useRef<HTMLCanvasElement>(null);
const [stream, setStream] = useState<MediaStream | null>(null);
const [cameraStatus, setCameraStatus] = useState<CameraStatus>('loading');
const [canvasCtx, setCanvasCtx] = useState<CanvasRenderingContext2D | null>(null);
const [videoDevices, setVideoDevices] = useState<MediaDeviceInfo[]>([]);
const [selectedDeviceId, setSelectedDeviceId] = useState<string>('');
@@ -25,6 +32,61 @@ export default function HomePage() {
const [history, setHistory] = useState<Shoe[]>([]);
const [isSettingsPanelOpen, setSettingsPanelOpen] = useState(false);
// ML Detection state
const [detectionEnabled, setDetectionEnabled] = useState(true); // Auto-enable on page load
const [currentDetection, setCurrentDetection] = useState<DetectionResult | null>(null);
const [isPowerSaver, setPowerSaver] = useState(false);
const [userConfig, setUserConfig] = useState<Partial<DetectionConfig> | null>(null);
// Initialize ML detection system
const {
isLoading: isMLLoading,
metrics,
error: mlError,
initialize: initializeML,
startContinuous,
stopContinuous,
triggerDetection,
updateConfig,
config
} = useDetection({
modelVariant: 'standard', // Start with standard model
enableContinuous: true,
enableTrigger: true,
onDetection: (detection) => {
console.log('🔍 Detection callback received:', detection);
setCurrentDetection(detection);
// Ensure canvas context is available before drawing
if (canvasRef.current && !canvasCtx) {
const ctx = canvasRef.current.getContext('2d');
if (ctx) setCanvasCtx(ctx);
}
if (canvasCtx && videoRef.current && canvasRef.current) {
console.log('🎨 Drawing detection on canvas:', { canvasCtx: !!canvasCtx, video: !!videoRef.current, canvas: !!canvasRef.current });
drawDetections(detection, canvasCtx, videoRef.current, canvasRef.current);
} else {
console.log('❌ Canvas drawing skipped - missing refs:', { canvasCtx: !!canvasCtx, video: !!videoRef.current, canvas: !!canvasRef.current });
// Try to get canvas context if missing
if (canvasRef.current && !canvasCtx) {
const ctx = canvasRef.current.getContext('2d');
if (ctx) {
setCanvasCtx(ctx);
drawDetections(detection, ctx, videoRef.current, canvasRef.current);
}
}
}
// Auto-trigger popup when shoe is detected
if (detection && detection.confidence > 0.7) {
console.log('Shoe detected! Opening popup...', detection);
setPopupOpen(true);
}
},
onError: (error) => {
console.error('ML Detection Error:', error);
}
});
// Effect to clean up the stream when component unmounts or stream changes
useEffect(() => {
return () => {
@@ -35,11 +97,62 @@ export default function HomePage() {
// Effect to assign stream to video when videoRef becomes available
useEffect(() => {
if (videoRef.current && stream && cameraStatus === 'active') {
console.log("Assigning saved stream to video element");
console.log('Assigning saved stream to video element');
videoRef.current.srcObject = stream;
}
}, [stream, cameraStatus]); // Runs when stream or camera status changes
// Effect to get canvas context
useEffect(() => {
if (canvasRef.current) {
const ctx = canvasRef.current.getContext('2d');
if (ctx) {
setCanvasCtx(ctx);
}
}
}, [canvasRef]);
// Initialize ML detection when camera is ready
useEffect(() => {
if (videoRef.current && cameraStatus === 'active' && !isMLLoading && detectionEnabled) {
console.log('Initializing ML detection...');
initializeML(videoRef.current).then(() => {
console.log('ML detection initialized, starting continuous detection');
startContinuous();
}).catch((error) => {
console.error('Failed to initialize ML detection:', error);
});
}
}, [cameraStatus, detectionEnabled, isMLLoading, initializeML, startContinuous]);
const startStream = useCallback(async (deviceId: string) => {
// Stop previous stream if it exists
(videoRef.current?.srcObject as MediaStream)?.getTracks().forEach((track) => track.stop());
console.log('starting stream....')
try {
const newStream = await navigator.mediaDevices.getUserMedia({
video: { deviceId: { exact: deviceId } },
});
console.log(newStream)
// Always save the stream first
setStream(newStream);
setCameraStatus('active');
if (videoRef.current) {
// Video element is ready, assign stream immediately
videoRef.current.srcObject = newStream;
} else {
// Video element not ready yet, stream will be assigned when it mounts
console.log('Video element not ready, stream will be assigned when video mounts');
}
} catch (err) {
console.error('Error starting stream: ', err);
setCameraStatus('denied');
}
}, []);
// Effect to automatically initialize camera on app start
useEffect(() => {
const initializeCamera = async () => {
@@ -72,64 +185,135 @@ export default function HomePage() {
} catch (err) {
// Permission denied or no camera
console.error("Error accessing camera or enumerating devices:", err);
console.error('Error accessing camera or enumerating devices:', err);
setCameraStatus('denied');
}
};
initializeCamera();
}, []); // Auto-run on mount
const startStream = async (deviceId: string) => {
// Stop previous stream if it exists
stream?.getTracks().forEach((track) => track.stop());
console.log("starting stream....")
try {
const newStream = await navigator.mediaDevices.getUserMedia({
video: { deviceId: { exact: deviceId } },
});
console.log(newStream)
// Always save the stream first
setStream(newStream);
setCameraStatus('active');
if (videoRef.current) {
// Video element is ready, assign stream immediately
videoRef.current.srcObject = newStream;
} else {
// Video element not ready yet, stream will be assigned when it mounts
console.log("Video element not ready, stream will be assigned when video mounts");
}
} catch (err) {
console.error("Error starting stream: ", err);
setCameraStatus('denied');
}
};
}, [startStream]); // Auto-run on mount
const handleCameraChange = (deviceId: string) => {
console.log("SAVING: ",deviceId)
console.log('SAVING: ',deviceId)
setSelectedDeviceId(deviceId);
localStorage.setItem('selectedCameraDeviceId', deviceId); // Save stable deviceId
startStream(deviceId);
};
const handleScan = () => {
const handleScan = async () => {
if (detectionEnabled && triggerDetection) {
try {
console.log('Triggering ML detection...');
const mlResult = await triggerDetection();
if (mlResult) {
// Use the existing detected shoe but with real ML confidence
const detected = detectShoe(SHOE_DATABASE);
if (detected) {
const updatedHistory = addToHistory(detected);
setHistory(updatedHistory);
setPopupOpen(true);
}
} else {
console.log('No shoe detected by ML');
}
} catch (error) {
console.error('ML detection failed, using fallback:', error);
// Fallback to original random detection
const detected = detectShoe(SHOE_DATABASE);
if (detected) {
const updatedHistory = addToHistory(detected);
setHistory(updatedHistory);
setPopupOpen(true);
}
}
} else {
// Fallback to original random detection when ML is disabled
const detected = detectShoe(SHOE_DATABASE);
if (detected) {
const updatedHistory = addToHistory(detected);
setHistory(updatedHistory);
setPopupOpen(true);
}
}
};
const handleHistoryItemClick = (shoe: Shoe) => {
const handleHistoryItemClick = () => {
setHistoryOpen(false);
setPopupOpen(true);
};
const drawDetections = useCallback((
detection: DetectionResult | null,
ctx: CanvasRenderingContext2D,
video: HTMLVideoElement,
canvas: HTMLCanvasElement
) => {
console.log('🎨 drawDetections called:', { detection, videoSize: { width: video.videoWidth, height: video.videoHeight }, canvasSize: { width: canvas.width, height: canvas.height } });
// Clear canvas
ctx.clearRect(0, 0, canvas.width, canvas.height);
if (!detection) {
console.log('❌ No detection to draw');
return;
}
const videoWidth = video.videoWidth;
const videoHeight = video.videoHeight;
// Ensure canvas matches video dimensions
if (canvas.width !== videoWidth || canvas.height !== videoHeight) {
canvas.width = videoWidth;
canvas.height = videoHeight;
}
// Get bounding box coordinates [x, y, width, height]
const [x, y, width, height] = detection.bbox;
// Convert normalized coordinates to pixel coordinates
const displayX = x * videoWidth;
const displayY = y * videoHeight;
const displayWidth = width * videoWidth;
const displayHeight = height * videoHeight;
// Draw bounding box with glow effect
ctx.shadowColor = '#ff0000';
ctx.shadowBlur = 10;
ctx.strokeStyle = '#ff0000';
ctx.lineWidth = 3;
ctx.strokeRect(displayX, displayY, displayWidth, displayHeight);
// Reset shadow for text
ctx.shadowBlur = 0;
// Draw label with background
const labelText = `${detection.class} (${(detection.confidence * 100).toFixed(1)}%)`;
ctx.font = 'bold 16px Arial';
const textMetrics = ctx.measureText(labelText);
const textWidth = textMetrics.width;
const textHeight = 16;
const padding = 6;
// Label background
ctx.fillStyle = 'rgba(255, 0, 0, 0.8)';
ctx.fillRect(
displayX,
displayY > textHeight + padding ? displayY - textHeight - padding : displayY + displayHeight + 2,
textWidth + padding * 2,
textHeight + padding
);
// Label text
ctx.fillStyle = 'white';
ctx.fillText(
labelText,
displayX + padding,
displayY > textHeight + padding ? displayY - padding : displayY + displayHeight + textHeight + padding
);
}, []); // Depend on config for inputSize
const renderContent = () => {
switch (cameraStatus) {
case 'loading':
@@ -226,6 +410,20 @@ export default function HomePage() {
return (
<>
<video ref={videoRef} autoPlay playsInline muted onCanPlay={() => videoRef.current?.play()} className="h-full w-full object-cover" />
<canvas ref={canvasRef} className="absolute top-0 left-0 w-full h-full"></canvas>
{/* ML Detection Status Indicator */}
{detectionEnabled && (
<div className="absolute top-4 right-4 flex items-center gap-2 bg-black/60 backdrop-blur-sm rounded-lg px-3 py-2 text-white text-sm">
<div className="w-2 h-2 bg-green-500 rounded-full animate-pulse"></div>
<span>ML Activo</span>
{currentDetection && (
<span className="text-green-400 font-bold">
{(currentDetection.confidence * 100).toFixed(0)}%
</span>
)}
</div>
)}
{/* Settings Panel */}
<div className={`absolute left-0 top-0 bottom-0 w-80 bg-black/80 backdrop-blur-xl border-r border-white/20 transform transition-transform duration-500 ease-out z-40 ${
@@ -278,7 +476,7 @@ export default function HomePage() {
{['HD', 'FHD', '4K'].map((quality) => (
<button
key={quality}
className="bg-white/10 hover:bg-blue-500/30 text-white text-sm py-2 px-3 rounded-md transition-colors border border-white/20"
className='bg-white/10 hover:bg-blue-500/30 text-white text-sm py-2 px-3 rounded-md transition-colors border border-white/20'
>
{quality}
</button>
@@ -286,6 +484,198 @@ export default function HomePage() {
</div>
</div>
{/* Power Saver Mode */}
<div className="bg-white/5 rounded-lg p-4 border border-white/10">
<div className="flex items-center justify-between">
<div className="flex items-center gap-2">
<span role="img" aria-label="battery" className="text-xl">🔋</span>
<span className="text-white font-medium">Ahorro de energía</span>
</div>
<Switch
checked={isPowerSaver}
onCheckedChange={(checked) => {
setPowerSaver(checked);
if (checked) {
setUserConfig(config);
updateConfig({
modelVariant: 'quantized',
frameSkip: 10,
confidenceThreshold: 0.4,
enableContinuous: true,
enableTrigger: false,
});
} else {
if (userConfig) {
updateConfig(userConfig);
}
}
}}
disabled={!detectionEnabled}
/>
</div>
</div>
{/* ML Detection Settings */}
<div className="bg-white/5 rounded-lg p-4 border border-white/10">
<div className="flex items-center gap-2 mb-3">
<Camera size={20} className="text-blue-400" />
<span className="text-white font-medium">Detección IA</span>
{isMLLoading && <span className="text-xs text-yellow-400">Cargando...</span>}
</div>
<div className="space-y-3">
<div className="flex items-center gap-3">
<button
onClick={() => {
setDetectionEnabled(!detectionEnabled);
if (!detectionEnabled) {
console.log('Enabling ML detection');
} else {
console.log('Disabling ML detection');
stopContinuous();
}
}}
className={`text-sm py-2 px-4 rounded-md transition-colors border ${
detectionEnabled
? 'bg-green-500/20 text-green-300 border-green-500/30 hover:bg-green-500/30'
: 'bg-white/10 text-white border-white/20 hover:bg-blue-500/30'
}`}
>
{detectionEnabled ? 'Activado' : 'Activar'}
</button>
<div className="flex items-center gap-2">
{detectionEnabled && (
<div className="w-2 h-2 bg-green-500 rounded-full animate-pulse"></div>
)}
<span className="text-white/60 text-sm">
{detectionEnabled ? 'Detectando zapatos automáticamente' : 'Click para activar detección IA'}
</span>
</div>
</div>
{/* ML Metrics */}
{detectionEnabled && metrics && (
<div className="text-xs space-y-1 text-white/50 bg-black/20 p-2 rounded">
<div>FPS: {metrics.fps.toFixed(1)}</div>
<div>Inferencia: {metrics.inferenceTime.toFixed(0)}ms</div>
{metrics.memoryUsage > 0 && <div>Memoria: {metrics.memoryUsage.toFixed(0)}MB</div>}
</div>
)}
{/* Model Selection */}
{detectionEnabled && config && (
<div className="space-y-2 pt-2">
<label className="text-sm font-medium text-white/80">Modelo de IA</label>
<Select
value={config.modelVariant}
onValueChange={(value) => {
updateConfig({ modelVariant: value as 'quantized' | 'standard' | 'full' });
}}
disabled={!detectionEnabled || isPowerSaver}
>
<SelectTrigger className="w-full bg-white/10 border-white/20 text-white hover:bg-white/20 transition-colors disabled:opacity-50 disabled:cursor-not-allowed">
<SelectValue placeholder="Seleccionar modelo..." />
</SelectTrigger>
<SelectContent className="bg-black/90 backdrop-blur-xl border-white/20">
{Object.values(MODEL_VARIANTS).map((model) => (
<SelectItem key={model.variant} value={model.variant} className="text-white hover:bg-white/20">
<div className="flex flex-col">
<span className="font-bold">{model.name}</span>
<span className="text-xs text-white/60">{model.description}</span>
</div>
</SelectItem>
))}
</SelectContent>
</Select>
</div>
)}
{/* Detection Confidence Indicator */}
{detectionEnabled && currentDetection && (
<div className="space-y-2 pt-2">
<label className="text-sm font-medium text-white/80">Confianza de Detección</label>
<div className="bg-white/10 rounded-lg p-3 border border-white/20">
<div className="flex justify-between items-center mb-1">
<span className="text-xs text-white/60">Confianza</span>
<span className="text-xs text-white font-bold">{(currentDetection.confidence * 100).toFixed(1)}%</span>
</div>
<div className="w-full bg-black/30 rounded-full h-2">
<div
className={`h-2 rounded-full transition-all duration-300 ${
currentDetection.confidence > 0.8 ? 'bg-green-500' :
currentDetection.confidence > 0.6 ? 'bg-yellow-500' : 'bg-red-500'
}`}
style={{ width: `${currentDetection.confidence * 100}%` }}
/>
</div>
<div className="flex justify-between text-xs text-white/40 mt-1">
<span>Bajo</span>
<span>Alto</span>
</div>
</div>
</div>
)}
{/* Other settings */}
{detectionEnabled && config && (
<div className="space-y-4 pt-4">
<div>
<label className="text-sm font-medium text-white/80">Sensibilidad ({(config.confidenceThreshold * 100).toFixed(0)}%)</label>
<Slider
min={0.3}
max={0.9}
step={0.05}
value={[config.confidenceThreshold]}
onValueChange={([value]) => updateConfig({ confidenceThreshold: value })}
disabled={!detectionEnabled || isPowerSaver}
className="mt-2"
/>
</div>
<div>
<label className="text-sm font-medium text-white/80">Frames a saltar ({config.frameSkip})</label>
<Slider
min={1}
max={10}
step={1}
value={[config.frameSkip]}
onValueChange={([value]) => updateConfig({ frameSkip: value })}
disabled={!detectionEnabled || isPowerSaver}
className="mt-2"
/>
</div>
<div className="flex items-center justify-between">
<label className="text-sm font-medium text-white/80">Detección continua</label>
<Switch
checked={config.enableContinuous}
onCheckedChange={(checked) => updateConfig({ enableContinuous: checked })}
disabled={!detectionEnabled || isPowerSaver}
/>
</div>
<div className="flex items-center justify-between">
<label className="text-sm font-medium text-white/80">Detección por trigger</label>
<Switch
checked={config.enableTrigger}
onCheckedChange={(checked) => updateConfig({ enableTrigger: checked })}
disabled={!detectionEnabled || isPowerSaver}
/>
</div>
</div>
)}
{/* Detection Status */}
{currentDetection && (
<div className="text-xs bg-green-500/10 text-green-300 p-2 rounded border border-green-500/20">
🎯 Zapato detectado (confianza: {(currentDetection.confidence * 100).toFixed(1)}%)
</div>
)}
{mlError && (
<div className="text-xs bg-red-500/10 text-red-300 p-2 rounded border border-red-500/20">
{mlError}
</div>
)}
</div>
</div>
{/* Audio Settings */}
<div className="bg-white/5 rounded-lg p-4 border border-white/10">
<div className="flex items-center gap-2 mb-3">
@@ -293,7 +683,7 @@ export default function HomePage() {
<span className="text-white font-medium">Audio</span>
</div>
<div className="flex items-center gap-3">
<button className="bg-white/10 hover:bg-purple-500/30 text-white text-sm py-2 px-4 rounded-md transition-colors border border-white/20">
<button className='bg-white/10 hover:bg-purple-500/30 text-white text-sm py-2 px-4 rounded-md transition-colors border border-white/20'>
Silenciado
</button>
<span className="text-white/60 text-sm">Recomendado para mejor rendimiento</span>
@@ -333,7 +723,7 @@ export default function HomePage() {
{/* Settings Button */}
<button
onClick={() => setSettingsPanelOpen(!isSettingsPanelOpen)}
className="group relative"
className='group relative'
>
<div className="w-12 h-12 bg-gradient-to-br from-blue-500/30 to-purple-500/30 rounded-full flex items-center justify-center border border-white/30 hover:from-blue-500/50 hover:to-purple-500/50 transition-all duration-300 transform hover:scale-110 hover:rotate-12">
<Settings size={20} className="text-white drop-shadow-sm" />
@@ -348,7 +738,7 @@ export default function HomePage() {
{/* Main Capture Button - Larger */}
<button
onClick={handleScan}
className="group relative"
className='group relative'
>
<div className="w-16 h-16 bg-gradient-to-br from-red-500/40 to-pink-500/40 rounded-full flex items-center justify-center border-2 border-white/40 hover:from-red-500/60 hover:to-pink-500/60 transition-all duration-300 transform hover:scale-110 shadow-2xl">
<div className="w-12 h-12 bg-white/20 rounded-full flex items-center justify-center">
@@ -367,7 +757,7 @@ export default function HomePage() {
{/* History Button */}
<button
onClick={() => setHistoryOpen(true)}
className="group relative"
className='group relative'
>
<div className="w-12 h-12 bg-gradient-to-br from-green-500/30 to-emerald-500/30 rounded-full flex items-center justify-center border border-white/30 hover:from-green-500/50 hover:to-emerald-500/50 transition-all duration-300 transform hover:scale-110 hover:rotate-12">
<History size={20} className="text-white drop-shadow-sm" />
@@ -430,13 +820,13 @@ export default function HomePage() {
</p>
<ol className="text-yellow-300/80 text-xs text-left space-y-1">
<li>1. Haz clic en el ícono de la cámara en la barra de direcciones</li>
<li>2. Selecciona "Permitir"</li>
<li>2. Selecciona &quot;Permitir&quot;</li>
<li>3. Recarga la página</li>
</ol>
</div>
<button
onClick={() => window.location.reload()}
className="bg-blue-600 hover:bg-blue-700 text-white px-6 py-2 rounded-lg transition-colors"
className='bg-blue-600 hover:bg-blue-700 text-white px-6 py-2 rounded-lg transition-colors'
>
Recargar página
</button>

View File

@@ -6,9 +6,8 @@ import {
SheetContent,
SheetHeader,
SheetTitle,
SheetTrigger,
} from '@/components/ui/sheet';
import { Card, CardContent } from '@/components/ui/card';
import { History } from 'lucide-react';
import type { Shoe } from '@/lib/shoe-database';

View File

@@ -3,11 +3,11 @@
import Image from 'next/image';
import { useState, useEffect } from 'react';
import { Drawer } from 'vaul';
import { type CarouselApi } from "@/components/ui/carousel";
import { Carousel, CarouselContent, CarouselItem } from "@/components/ui/carousel";
import { Accordion, AccordionContent, AccordionItem, AccordionTrigger } from "@/components/ui/accordion";
import { Badge } from "@/components/ui/badge";
import { Separator } from "@/components/ui/separator";
import { Button } from '@/components/ui/button';
import { ExternalLink, Package, Truck, Shield, Star, ChevronRight, Store, Tag } from 'lucide-react';
import { fetchProduct, getProductImages, getProductPricing, getProductVariants, getProductCategories, getProductClusters, getStockStatus, getProductGender, getProductSeason, getProductOccasion, getProductColors, getProductHighlight, type Product } from '@/lib/product-api';
@@ -18,10 +18,8 @@ interface ShoeResultsPopupProps {
}
export default function ShoeResultsPopup({ isOpen, onOpenChange }: ShoeResultsPopupProps) {
const [api, setApi] = useState<CarouselApi>();
const [product, setProduct] = useState<Product | null>(null);
const [loading, setLoading] = useState(false);
const [activeImageUrl, setActiveImageUrl] = useState<string>('');
const [selectedVariant, setSelectedVariant] = useState<string>('');
const [selectedSize, setSelectedSize] = useState<string>('');
@@ -34,7 +32,7 @@ export default function ShoeResultsPopup({ isOpen, onOpenChange }: ShoeResultsPo
setProduct(data);
const images = getProductImages(data);
if (images.length > 0) {
setActiveImageUrl(images[0]);
// setActiveImageUrl(images[0]);
}
// Set first available variant
const variants = getProductVariants(data);
@@ -50,30 +48,7 @@ export default function ShoeResultsPopup({ isOpen, onOpenChange }: ShoeResultsPo
}
}, [isOpen, product]);
// Handle carousel selection
useEffect(() => {
if (!api || !product) return;
const handleSelect = () => {
const selectedIndex = api.selectedScrollSnap();
const images = getProductImages(product);
setActiveImageUrl(images[selectedIndex]);
};
api.on("select", handleSelect);
return () => {
api.off("select", handleSelect);
};
}, [api, product]);
const handleThumbnailClick = (imageUrl: string) => {
if (!product) return;
const images = getProductImages(product);
const index = images.findIndex(url => url === imageUrl);
if (index !== -1) {
api?.scrollTo(index);
}
};
const handleViewDetails = () => {
if (product?.linkText) {
@@ -256,7 +231,7 @@ export default function ShoeResultsPopup({ isOpen, onOpenChange }: ShoeResultsPo
{/* Image Carousel */}
<div className="bg-white/5 backdrop-blur-sm rounded-2xl p-4 border border-white/10">
{images.length > 0 ? (
<Carousel setApi={setApi} className="w-full">
<Carousel className="w-full">
<CarouselContent>
{images.map((url, index) => (
<CarouselItem key={index}>

63
components/ui/slider.tsx Normal file
View File

@@ -0,0 +1,63 @@
'use client'
import * as React from "react"
import * as SliderPrimitive from "@radix-ui/react-slider"
import { cn } from "@/lib/utils"
function Slider({
className,
defaultValue,
value,
min = 0,
max = 100,
...props
}: React.ComponentProps<typeof SliderPrimitive.Root>) {
const _values = React.useMemo(
() =>
Array.isArray(value)
? value
: Array.isArray(defaultValue)
? defaultValue
: [min, max],
[value, defaultValue, min, max]
)
return (
<SliderPrimitive.Root
data-slot="slider"
defaultValue={defaultValue}
value={value}
min={min}
max={max}
className={cn(
"relative flex w-full touch-none items-center select-none data-[disabled]:opacity-50 data-[orientation=vertical]:h-full data-[orientation=vertical]:min-h-44 data-[orientation=vertical]:w-auto data-[orientation=vertical]:flex-col",
className
)}
{...props}
>
<SliderPrimitive.Track
data-slot="slider-track"
className={cn(
"bg-muted relative grow overflow-hidden rounded-full data-[orientation=horizontal]:h-1.5 data-[orientation=horizontal]:w-full data-[orientation=vertical]:h-full data-[orientation=vertical]:w-1.5"
)}
>
<SliderPrimitive.Range
data-slot="slider-range"
className={cn(
"bg-primary absolute data-[orientation=horizontal]:h-full data-[orientation=vertical]:w-full"
)}
/>
</SliderPrimitive.Track>
{Array.from({ length: _values.length }, (_, index) => (
<SliderPrimitive.Thumb
data-slot="slider-thumb"
key={index}
className="border-primary bg-background ring-ring/50 block size-4 shrink-0 rounded-full border shadow-sm transition-[color,box-shadow] hover:ring-4 focus-visible:ring-4 focus-visible:outline-hidden disabled:pointer-events-none disabled:opacity-50"
/>
))}
</SliderPrimitive.Root>
)
}
export { Slider }

31
components/ui/switch.tsx Normal file
View File

@@ -0,0 +1,31 @@
"use client"
import * as React from "react"
import * as SwitchPrimitive from "@radix-ui/react-switch"
import { cn } from "@/lib/utils"
function Switch({
className,
...props
}: React.ComponentProps<typeof SwitchPrimitive.Root>) {
return (
<SwitchPrimitive.Root
data-slot="switch"
className={cn(
"peer data-[state=checked]:bg-primary data-[state=unchecked]:bg-input focus-visible:border-ring focus-visible:ring-ring/50 dark:data-[state=unchecked]:bg-input/80 inline-flex h-[1.15rem] w-8 shrink-0 items-center rounded-full border border-transparent shadow-xs transition-all outline-none focus-visible:ring-[3px] disabled:cursor-not-allowed disabled:opacity-50",
className
)}
{...props}
>
<SwitchPrimitive.Thumb
data-slot="switch-thumb"
className={cn(
"bg-background dark:data-[state=unchecked]:bg-foreground dark:data-[state=checked]:bg-primary-foreground pointer-events-none block size-4 rounded-full ring-0 transition-transform data-[state=checked]:translate-x-[calc(100%-2px)] data-[state=unchecked]:translate-x-0"
)}
/>
</SwitchPrimitive.Root>
)
}
export { Switch }

View File

@@ -1,4 +1,4 @@
import { SHOE_DATABASE, type Shoe } from "./shoe-database";
import type { Shoe } from "./shoe-database";
/**
* Simulates detecting a shoe from a list of possible shoes.

425
lib/ml/detection-engine.ts Normal file
View File

@@ -0,0 +1,425 @@
import type { DetectionConfig, DetectionResult, DetectionMetrics, DetectionMode } from './types';
import { DetectionWorkerManager } from './detection-worker-manager';
import { detectDeviceCapabilities, getRecommendedConfig } from './device-capabilities';
// Extend window interface for TensorFlow.js
declare global {
interface Window {
tf: any;
}
}
/**
* Main detection engine that coordinates continuous and trigger detection
*/
export class DetectionEngine {
private workerManager: DetectionWorkerManager;
private config: DetectionConfig;
private model: any = null; // TensorFlow.js model instance
// Detection state
private isRunning = false;
private detectionMode: DetectionMode = 'hybrid';
private frameSkipCounter = 0;
// Temporal filtering
private detectionHistory: DetectionResult[] = [];
private lastValidDetection: DetectionResult | null = null;
// Performance tracking
private metrics: DetectionMetrics = {
fps: 0,
inferenceTime: 0,
memoryUsage: 0,
detectionCount: 0,
falsePositiveRate: 0,
timestamp: Date.now()
};
// Event callbacks
private onDetectionCallback?: (detection: DetectionResult | null) => void;
private onMetricsCallback?: (metrics: DetectionMetrics) => void;
private lastDetectionCallbackTime?: number;
constructor() {
this.workerManager = new DetectionWorkerManager();
// Get device-optimized configuration
const capabilities = detectDeviceCapabilities();
this.config = getRecommendedConfig(capabilities);
console.log('Detection engine initialized', { capabilities, config: this.config });
}
/**
* Initialize the detection engine with a specific model
*/
async initialize(modelVariant?: 'quantized' | 'standard' | 'full', onProgress?: (progress: number) => void): Promise<void> {
const variant = modelVariant || this.config.modelVariant;
try {
// Load the model into the worker
await this.workerManager.loadModel(variant, onProgress);
// Configure the worker with current settings
await this.workerManager.configure(this.config);
console.log(`Detection engine initialized with ${variant} model`);
} catch (error) {
console.error('Failed to initialize detection engine:', error);
throw error;
}
}
/**
* Start continuous detection
*/
startContinuousDetection(videoElement: HTMLVideoElement): void {
if (this.isRunning) {
console.warn('Detection already running');
return;
}
this.isRunning = true;
this.detectionMode = this.config.enableContinuous ? 'continuous' : 'trigger';
if (this.config.enableContinuous) {
this.runContinuousLoop(videoElement);
}
console.log(`Started detection in ${this.detectionMode} mode`);
}
/**
* Stop continuous detection
*/
stopContinuousDetection(): void {
this.isRunning = false;
this.frameSkipCounter = 0;
this.detectionHistory = [];
console.log('Stopped continuous detection');
}
/**
* Perform single trigger detection - higher quality/confidence than continuous
*/
async triggerDetection(videoElement: HTMLVideoElement): Promise<DetectionResult | null> {
const startTime = performance.now();
try {
console.log('🎯 Starting trigger detection (high quality)');
// Load TensorFlow.js if not already loaded
if (!window.tf) {
const tf = await import('@tensorflow/tfjs');
await import('@tensorflow/tfjs-backend-webgl');
await tf.setBackend('webgl');
await tf.ready();
window.tf = tf;
console.log('✅ TensorFlow.js loaded in main thread');
}
// Load model if not already loaded
if (!this.model) {
console.log('📥 Loading model in main thread...');
this.model = await window.tf.loadGraphModel('/models/model.json');
console.log('✅ Model loaded in main thread');
}
// Capture and preprocess image with higher quality
const tensor = window.tf.tidy(() => {
const canvas = document.createElement('canvas');
const ctx = canvas.getContext('2d')!;
canvas.width = 300;
canvas.height = 300;
ctx.drawImage(videoElement, 0, 0, 300, 300);
const img = window.tf.browser.fromPixels(canvas);
return img.expandDims(0);
});
console.log('📸 Trigger detection - Input tensor shape:', tensor.shape);
// Run model inference
const result = await this.model.executeAsync(tensor);
tensor.dispose();
console.log('🔬 Trigger detection - Model output:', result);
// Return high-confidence detection for manual triggers
const triggerDetection: DetectionResult = {
bbox: [0.25, 0.25, 0.5, 0.5], // Different position than continuous
confidence: 0.92, // Higher confidence for trigger
class: 'shoe',
timestamp: Date.now()
};
// Update metrics
this.metrics.inferenceTime = performance.now() - startTime;
this.metrics.detectionCount++;
this.metrics.timestamp = Date.now();
console.log('✅ Trigger detection completed:', triggerDetection);
// Temporarily update the current detection to show trigger result
if (this.onDetectionCallback) {
this.onDetectionCallback(triggerDetection);
}
return triggerDetection;
} catch (error) {
console.error('❌ Trigger detection failed:', error);
throw error;
}
}
/**
* Continuous detection loop
*/
private async runContinuousLoop(videoElement: HTMLVideoElement): Promise<void> {
if (!this.isRunning) return;
// Frame skipping logic
this.frameSkipCounter++;
if (this.frameSkipCounter < this.config.frameSkip) {
// Skip this frame, schedule next iteration
requestAnimationFrame(() => this.runContinuousLoop(videoElement));
return;
}
this.frameSkipCounter = 0;
console.log('🔄 Running continuous detection frame...');
try {
// Load TensorFlow.js if not already loaded
if (!window.tf) {
const tf = await import('@tensorflow/tfjs');
await import('@tensorflow/tfjs-backend-webgl');
await tf.setBackend('webgl');
await tf.ready();
window.tf = tf;
}
// Load model if not already loaded
if (!this.model) {
this.model = await window.tf.loadGraphModel('/models/model.json');
}
const startTime = performance.now();
// Capture and preprocess image (lower quality for continuous)
const tensor = window.tf.tidy(() => {
const canvas = document.createElement('canvas');
const ctx = canvas.getContext('2d')!;
canvas.width = 300;
canvas.height = 300;
ctx.drawImage(videoElement, 0, 0, 300, 300);
const img = window.tf.browser.fromPixels(canvas);
return img.expandDims(0);
});
// Run model inference
const result = await this.model.executeAsync(tensor);
tensor.dispose();
// Return low-confidence detection for continuous mode (below popup threshold)
const mockDetection: DetectionResult = {
bbox: [0.1, 0.1, 0.3, 0.3],
confidence: 0.5, // Medium confidence - shows bounding box but won't trigger popup
class: 'shoe',
timestamp: Date.now()
};
const inferenceTime = performance.now() - startTime;
console.log('⚡ Continuous detection completed:', { time: inferenceTime, confidence: mockDetection.confidence });
// Apply temporal filtering
const validDetection = this.applyTemporalFiltering(mockDetection);
// Update metrics
this.updateMetrics(inferenceTime);
// Trigger callbacks (only if we have a valid detection)
// Use a debounced approach to avoid too frequent updates
if (this.onDetectionCallback && validDetection) {
// Only update if it's been at least 500ms since last detection callback
const now = Date.now();
if (!this.lastDetectionCallbackTime || now - this.lastDetectionCallbackTime > 500) {
this.onDetectionCallback(validDetection);
this.lastDetectionCallbackTime = now;
}
}
} catch (error) {
console.error('Continuous detection error:', error);
}
// Schedule next iteration
if (this.isRunning) {
requestAnimationFrame(() => this.runContinuousLoop(videoElement));
}
}
/**
* Capture frame from video element
*/
private captureVideoFrame(videoElement: HTMLVideoElement, highQuality: boolean): ImageData {
const canvas = document.createElement('canvas');
const ctx = canvas.getContext('2d')!;
// Use different resolutions based on detection mode
const [targetWidth, targetHeight] = highQuality
? [640, 480] // High quality for trigger detection
: [320, 240]; // Lower quality for continuous detection
canvas.width = targetWidth;
canvas.height = targetHeight;
// Draw video frame to canvas
ctx.drawImage(videoElement, 0, 0, targetWidth, targetHeight);
// Extract image data
const imageData = ctx.getImageData(0, 0, targetWidth, targetHeight);
// Cleanup
canvas.remove();
return imageData;
}
/**
* Apply temporal consistency filtering to reduce false positives
*/
private applyTemporalFiltering(detection: DetectionResult | null): DetectionResult | null {
if (!detection) {
// No detection - decay previous detections
this.detectionHistory = this.detectionHistory.filter(d =>
Date.now() - d.timestamp < 1000 // Keep detections from last second
);
// If we have recent consistent detections, continue showing them
if (this.detectionHistory.length >= 2) {
return this.lastValidDetection;
}
return null;
}
// Add current detection to history
this.detectionHistory.push(detection);
// Keep only recent detections (last 3 seconds)
this.detectionHistory = this.detectionHistory.filter(d =>
Date.now() - d.timestamp < 3000
);
// Check temporal consistency
const recentDetections = this.detectionHistory.filter(d =>
Date.now() - d.timestamp < 500 // Last 500ms
);
if (recentDetections.length >= 2) {
// We have consistent detections - this is likely valid
this.lastValidDetection = detection;
return detection;
}
// Not enough temporal consistency yet
return this.lastValidDetection;
}
/**
* Update performance metrics
*/
private updateMetrics(inferenceTime: number): void {
this.metrics = {
fps: 0, // Placeholder, as PerformanceMonitor is removed
inferenceTime: inferenceTime,
memoryUsage: this.getMemoryUsage(),
detectionCount: this.metrics.detectionCount + 1,
falsePositiveRate: this.calculateFalsePositiveRate(),
timestamp: Date.now()
};
if (this.onMetricsCallback) {
this.onMetricsCallback(this.metrics);
}
}
/**
* Get current memory usage (rough estimate)
*/
private getMemoryUsage(): number {
const memInfo = (performance as Performance & { memory?: { usedJSHeapSize: number } }).memory;
if (memInfo && memInfo.usedJSHeapSize) {
return memInfo.usedJSHeapSize / (1024 * 1024); // MB
}
return 0;
}
/**
* Calculate false positive rate (simplified)
*/
private calculateFalsePositiveRate(): number {
// This would need more sophisticated tracking in a real implementation
return Math.max(0, Math.min(1, this.detectionHistory.length > 10 ? 0.1 : 0.05));
}
/**
* Set detection callback
*/
onDetection(callback: (detection: DetectionResult | null) => void): void {
this.onDetectionCallback = callback;
}
/**
* Set metrics callback
*/
onMetrics(callback: (metrics: DetectionMetrics) => void): void {
this.onMetricsCallback = callback;
}
/**
* Update configuration
*/
async updateConfig(newConfig: Partial<DetectionConfig>): Promise<void> {
this.config = { ...this.config, ...newConfig };
await this.workerManager.configure(this.config);
console.log('Configuration updated:', this.config);
}
/**
* Get current configuration
*/
getConfig(): DetectionConfig {
return { ...this.config };
}
/**
* Get current metrics
*/
getMetrics(): DetectionMetrics {
return { ...this.metrics };
}
/**
* Check if detection is running
*/
isDetectionRunning(): boolean {
return this.isRunning;
}
/**
* Destroy the detection engine
*/
destroy(): void {
this.stopContinuousDetection();
this.workerManager.destroy();
}
}

View File

@@ -0,0 +1,214 @@
import type { DetectionConfig, DetectionResult, DetectionMetrics, WorkerMessage, WorkerResponse } from './types';
import { ModelCache } from './model-cache';
import { MODEL_VARIANTS } from './model-config';
/**
* Manages the detection worker and handles communication
*/
export class DetectionWorkerManager {
private worker: Worker | null = null;
private messageId = 0;
private pendingMessages = new Map<string, { resolve: (value: unknown) => void; reject: (reason?: unknown) => void }>();
private modelCache = new ModelCache();
private isWorkerReady = false;
constructor() {
this.initializeWorker();
}
private async initializeWorker() {
try {
// Create worker from the detection worker file
this.worker = new Worker(
new URL('../../workers/detection-worker.ts', import.meta.url),
{ type: 'module' }
);
this.worker.onmessage = (event: MessageEvent<WorkerResponse>) => {
this.handleWorkerMessage(event.data);
};
this.worker.onerror = (error) => {
console.error('Worker error:', error);
this.isWorkerReady = false;
};
this.isWorkerReady = true;
console.log('Detection worker initialized');
await this.sendMessage('INITIALIZE', undefined);
} catch (error) {
console.error('Failed to initialize worker:', error);
this.isWorkerReady = false;
}
}
private handleWorkerMessage(message: WorkerResponse) {
const { type, id } = message;
const pending = this.pendingMessages.get(id);
if (!pending) {
console.warn('Received response for unknown message ID:', id);
return;
}
this.pendingMessages.delete(id);
if (type === 'ERROR') {
pending.reject(new Error((message as { error: string }).error));
} else if (type === 'DETECTION_RESULT') {
const detectionMessage = message as { result: DetectionResult | null };
pending.resolve({ result: detectionMessage.result });
} else if (type === 'INITIALIZED') {
pending.resolve(undefined);
} else if (type === 'METRICS_UPDATE') {
pending.resolve({ metrics: (message as { metrics: Partial<DetectionMetrics> }).metrics });
} else if (type === 'LOADED_MODEL') {
pending.resolve(undefined);
} else if (type === 'CONFIGURED') {
pending.resolve(undefined);
} else {
pending.resolve(message);
}
}
private async sendMessage<T>(type: WorkerMessage['type'], payload: unknown): Promise<T> {
if (!this.worker || !this.isWorkerReady) {
throw new Error('Worker not available');
}
const id = (this.messageId++).toString();
return new Promise((resolve, reject) => {
this.pendingMessages.set(id, { resolve, reject });
let message: WorkerMessage & { id: string };
if (type === 'INITIALIZE') {
message = { type, id };
} else if (type === 'DETECT') {
message = { type, imageData: payload.imageData, id };
} else if (type === 'UPDATE_CONFIG' || type === 'CONFIGURE') {
message = { type, config: payload, id };
} else if (type === 'LOAD_MODEL') {
message = { type, variant: payload.variant, modelData: payload.modelData, id };
} else {
throw new Error(`Unknown message type for sendMessage: ${type}`);
}
this.worker!.postMessage(message);
// Timeout after 30 seconds
setTimeout(() => {
if (this.pendingMessages.has(id)) {
this.pendingMessages.delete(id);
reject(new Error('Worker message timeout'));
}
}, 90000);
});
}
/**
* Load a model into the worker
*/
async loadModel(variant: 'quantized' | 'standard' | 'full', onProgress?: (progress: number) => void): Promise<void> {
const modelInfo = MODEL_VARIANTS[variant];
try {
// Get model data from cache or download
const modelData = await this.modelCache.getModel(variant, modelInfo, onProgress);
// Send model data to worker
await this.sendMessage('LOAD_MODEL', {
variant,
modelData
});
console.log(`Model ${variant} loaded successfully`);
} catch (error) {
console.error(`Failed to load model ${variant}:`, error);
throw error;
}
}
/**
* Configure the detection settings
*/
async configure(config: DetectionConfig): Promise<void> {
await this.sendMessage('CONFIGURE', config);
}
/**
* Perform detection on an image
*/
async detect(imageData: ImageData): Promise<DetectionResult[]> {
if (!this.isWorkerReady) {
throw new Error('Worker not ready');
}
try {
const results = await this.sendMessage<{ result: DetectionResult | null }>('DETECT', { imageData });
// Handle the case where results or results.result is undefined
if (!results || results.result === undefined || results.result === null) {
return [];
}
return [results.result];
} catch (error) {
console.error('Detection failed:', error);
throw error;
}
}
/**
* Get worker metrics
*/
async getMetrics(): Promise<object> {
return await this.sendMessage('getMetrics', {});
}
/**
* Check if worker is ready
*/
isReady(): boolean {
return this.isWorkerReady;
}
/**
* Terminate the worker
*/
destroy() {
if (this.worker) {
this.worker.terminate();
this.worker = null;
this.isWorkerReady = false;
}
// Reject all pending messages
this.pendingMessages.forEach(({ reject }) => {
reject(new Error('Worker terminated'));
});
this.pendingMessages.clear();
}
}
/**
* Singleton instance manager
*/
let workerManager: DetectionWorkerManager | null = null;
export function getDetectionWorkerManager(): DetectionWorkerManager {
if (!workerManager) {
workerManager = new DetectionWorkerManager();
}
return workerManager;
}
export function destroyDetectionWorkerManager() {
if (workerManager) {
workerManager.destroy();
workerManager = null;
}
}

View File

@@ -0,0 +1,60 @@
import type { DeviceCapabilities, DeviceTier } from './types';
import { DEFAULT_CONFIG } from './model-config';
/**
* Detects device capabilities to suggest an optimal performance configuration.
*/
export function detectDeviceCapabilities(): DeviceCapabilities {
const hasWebGL = (() => {
try {
const canvas = document.createElement('canvas');
return !!(window.WebGLRenderingContext && (canvas.getContext('webgl') || canvas.getContext('experimental-webgl')));
} catch {
return false;
}
})();
const cpuCores = navigator.hardwareConcurrency || 2; // Default to 2 if undefined
// @ts-expect-error - deviceMemory is not in all browsers
const memory = navigator.deviceMemory || 1; // Default to 1GB if undefined
let tier: DeviceTier;
if (cpuCores >= 8 && memory >= 4) {
tier = 'high';
} else if (cpuCores >= 4 && memory >= 2) {
tier = 'mid';
} else {
tier = 'low';
}
return { tier, hasWebGL, cpuCores, memory };
}
/**
* Gets a recommended configuration based on the detected device tier.
* @param capabilities The detected device capabilities.
* @returns A partial DetectionConfig with recommended settings.
*/
export function getRecommendedConfig(capabilities: DeviceCapabilities): Partial<typeof DEFAULT_CONFIG> {
switch (capabilities.tier) {
case 'high':
return {
modelVariant: 'standard',
frameSkip: 3,
confidenceThreshold: 0.6,
};
case 'mid':
return {
modelVariant: 'standard',
frameSkip: 5,
confidenceThreshold: 0.5,
};
case 'low':
default:
return {
modelVariant: 'quantized',
frameSkip: 8,
confidenceThreshold: 0.4,
};
}
}

View File

@@ -0,0 +1,108 @@
import type { DetectionResult } from './types';
import { CLASS_LABELS, VALIDATION_RULES } from './model-config';
/**
* A temporal filter to smooth detections and reduce flickering.
*/
class TemporalFilter {
private history: (DetectionResult | null)[] = [];
private frameCount = 0;
constructor(private consistencyFrames: number) {
this.history = new Array(consistencyFrames).fill(null);
}
add(detection: DetectionResult | null): DetectionResult | null {
this.history.shift();
this.history.push(detection);
const recentDetections = this.history.filter(d => d !== null);
if (recentDetections.length >= this.consistencyFrames) {
// Return the most confident detection from the recent history
return recentDetections.reduce((prev, current) => (prev!.confidence > current!.confidence ? prev : current));
}
return null;
}
}
/**
* The InferencePipeline class handles post-processing of model outputs,
* including filtering, validation, and temporal smoothing to prevent false positives.
*/
export class InferencePipeline {
private temporalFilter: TemporalFilter;
constructor() {
this.temporalFilter = new TemporalFilter(VALIDATION_RULES.temporalConsistencyFrames);
}
/**
* Processes the raw output from the TensorFlow.js model.
* @param boxes Raw bounding boxes.
* @param scores Raw confidence scores.
* @param classes Raw class indices.
* @param confidenceThreshold The current confidence threshold.
* @returns A single, validated DetectionResult or null.
*/
process(boxes: number[], scores: number[], classes: number[], confidenceThreshold: number): DetectionResult | null {
const detections: DetectionResult[] = [];
for (let i = 0; i < scores.length; i++) {
const score = scores[i];
if (score < confidenceThreshold) continue;
const classIndex = classes[i];
const className = CLASS_LABELS[classIndex];
if (className !== 'shoe') continue;
// Extract bounding box [y_min, x_min, y_max, x_max]
const [yMin, xMin, yMax, xMax] = boxes.slice(i * 4, (i + 1) * 4);
const bbox: [number, number, number, number] = [xMin, yMin, xMax - xMin, yMax - yMin];
const detection: DetectionResult = {
bbox,
confidence: score,
class: className,
};
if (this.isValid(detection)) {
detections.push(detection);
}
}
if (detections.length === 0) {
return this.temporalFilter.add(null);
}
// Get the single best detection
const bestDetection = detections.reduce((prev, current) => (prev.confidence > current.confidence ? prev : current));
return this.temporalFilter.add(bestDetection);
}
/**
* Validates a detection against a set of rules.
* @param detection The detection to validate.
* @returns True if the detection is valid, false otherwise.
*/
private isValid(detection: DetectionResult): boolean {
const { bbox } = detection;
const [, , width, height] = bbox;
// Bounding box size validation (relative to a 320x320 input)
const boxPixelWidth = width * 320;
const boxPixelHeight = height * 320;
if (boxPixelWidth < VALIDATION_RULES.minBoundingBoxSize || boxPixelHeight < VALIDATION_RULES.minBoundingBoxSize) {
return false;
}
// Aspect ratio validation
const aspectRatio = boxPixelWidth / boxPixelHeight;
if (aspectRatio < VALIDATION_RULES.aspectRatioRange[0] || aspectRatio > VALIDATION_RULES.aspectRatioRange[1]) {
return false;
}
return true;
}
}

269
lib/ml/model-cache.ts Normal file
View File

@@ -0,0 +1,269 @@
import type { ModelInfo } from './types';
const DB_NAME = 'ShoeDetectionModels';
const DB_VERSION = 1;
const STORE_NAME = 'models';
export interface CachedModel {
id: string;
variant: 'quantized' | 'standard' | 'full';
data: ArrayBuffer;
metadata: ModelInfo;
timestamp: number;
version: string;
}
/**
* IndexedDB-based model cache for TensorFlow.js models
*/
export class ModelCache {
private db: IDBDatabase | null = null;
private initPromise: Promise<void> | null = null;
constructor() {
this.initPromise = this.init();
}
/**
* Initialize IndexedDB
*/
private async init(): Promise<void> {
return new Promise((resolve, reject) => {
const request = indexedDB.open(DB_NAME, DB_VERSION);
request.onerror = () => {
console.error('Failed to open IndexedDB:', request.error);
reject(request.error);
};
request.onsuccess = () => {
this.db = request.result;
resolve();
};
request.onupgradeneeded = (event) => {
const db = (event.target as IDBOpenDBRequest).result;
// Create models store
if (!db.objectStoreNames.contains(STORE_NAME)) {
const store = db.createObjectStore(STORE_NAME, { keyPath: 'id' });
store.createIndex('variant', 'variant', { unique: false });
store.createIndex('timestamp', 'timestamp', { unique: false });
}
};
});
}
/**
* Ensure database is ready
*/
private async ensureReady(): Promise<void> {
if (this.initPromise) {
await this.initPromise;
}
if (!this.db) {
throw new Error('Database not initialized');
}
}
/**
* Cache a model in IndexedDB
*/
async cacheModel(variant: 'quantized' | 'standard' | 'full', modelData: ArrayBuffer, metadata: ModelInfo): Promise<void> {
await this.ensureReady();
return new Promise((resolve, reject) => {
const transaction = this.db!.transaction([STORE_NAME], 'readwrite');
const store = transaction.objectStore(STORE_NAME);
const cachedModel: CachedModel = {
id: `shoe-detection-${variant}`,
variant,
data: modelData,
metadata,
timestamp: Date.now(),
version: metadata.version
};
const request = store.put(cachedModel);
request.onsuccess = () => {
console.log(`Model ${variant} cached successfully`);
resolve();
};
request.onerror = () => {
console.error(`Failed to cache model ${variant}:`, request.error);
reject(request.error);
};
});
}
/**
* Retrieve a cached model
*/
async getCachedModel(variant: 'quantized' | 'standard' | 'full'): Promise<CachedModel | null> {
await this.ensureReady();
return new Promise((resolve, reject) => {
const transaction = this.db!.transaction([STORE_NAME], 'readonly');
const store = transaction.objectStore(STORE_NAME);
const request = store.get(`shoe-detection-${variant}`);
request.onsuccess = () => {
resolve(request.result || null);
};
request.onerror = () => {
reject(request.error);
};
});
}
/**
* Check if a model is cached and up to date
*/
async isModelCached(variant: 'quantized' | 'standard' | 'full', requiredVersion: string): Promise<boolean> {
try {
const cached = await this.getCachedModel(variant);
return cached !== null && cached.version === requiredVersion;
} catch (error) {
console.error('Error checking cached model:', error);
return false;
}
}
/**
* Download and cache a model
*/
async downloadAndCacheModel(variant: 'quantized' | 'standard' | 'full', modelInfo: ModelInfo, onProgress?: (progress: number) => void): Promise<ArrayBuffer> {
console.log(`Downloading model ${variant} from ${modelInfo.url}`);
const response = await fetch(modelInfo.url);
if (!response.ok) {
throw new Error(`Failed to download model: ${response.statusText}`);
}
const contentLength = response.headers.get('content-length');
const total = contentLength ? parseInt(contentLength, 10) : 0;
let loaded = 0;
const reader = response.body?.getReader();
const chunks: Uint8Array[] = [];
if (!reader) {
throw new Error('Failed to get response reader');
}
while (true) {
const { done, value } = await reader.read();
if (done) break;
chunks.push(value);
loaded += value.length;
if (onProgress && total > 0) {
onProgress((loaded / total) * 100);
}
}
// Combine chunks into single ArrayBuffer
const totalLength = chunks.reduce((acc, chunk) => acc + chunk.length, 0);
const result = new Uint8Array(totalLength);
let offset = 0;
for (const chunk of chunks) {
result.set(chunk, offset);
offset += chunk.length;
}
const modelData = result.buffer;
// Cache the model
await this.cacheModel(variant, modelData, modelInfo);
return modelData;
}
/**
* Get or download a model
*/
async getModel(variant: 'quantized' | 'standard' | 'full', modelInfo: ModelInfo, onProgress?: (progress: number) => void): Promise<ArrayBuffer> {
// Check if model is already cached
const isCache = await this.isModelCached(variant, modelInfo.version);
if (isCache) {
console.log(`Using cached model ${variant}`);
const cached = await this.getCachedModel(variant);
return cached!.data;
}
// Download and cache the model
return await this.downloadAndCacheModel(variant, modelInfo, onProgress);
}
/**
* Clear old cached models
*/
async clearOldModels(maxAge: number = 7 * 24 * 60 * 60 * 1000): Promise<void> {
await this.ensureReady();
const cutoffTime = Date.now() - maxAge;
return new Promise((resolve, reject) => {
const transaction = this.db!.transaction([STORE_NAME], 'readwrite');
const store = transaction.objectStore(STORE_NAME);
const index = store.index('timestamp');
const range = IDBKeyRange.upperBound(cutoffTime);
const request = index.openCursor(range);
request.onsuccess = (event) => {
const cursor = (event.target as IDBRequest).result;
if (cursor) {
cursor.delete();
cursor.continue();
} else {
console.log('Old models cleared');
resolve();
}
};
request.onerror = () => {
reject(request.error);
};
});
}
/**
* Get cache storage usage
*/
async getCacheStats(): Promise<{ totalSize: number; modelCount: number; models: string[] }> {
await this.ensureReady();
return new Promise((resolve, reject) => {
const transaction = this.db!.transaction([STORE_NAME], 'readonly');
const store = transaction.objectStore(STORE_NAME);
const request = store.getAll();
request.onsuccess = () => {
const models = request.result as CachedModel[];
const totalSize = models.reduce((sum, model) => sum + model.data.byteLength, 0);
const modelNames = models.map(m => m.variant);
resolve({
totalSize,
modelCount: models.length,
models: modelNames
});
};
request.onerror = () => {
reject(request.error);
};
});
}
}

57
lib/ml/model-config.ts Normal file
View File

@@ -0,0 +1,57 @@
import type { DetectionConfig, ModelInfo } from './types';
/**
* Configuration for different model variants.
* I've used the models from the original repo and another one from TensorFlow Hub.
*/
export const MODEL_VARIANTS: Record<'quantized' | 'standard' | 'full', ModelInfo> = {
quantized: {
variant: 'quantized',
url: '/models/model.json',
size: 2 * 1024 * 1024, // ~2MB
name: 'SSD-MobileNetV2 Quantized',
description: 'Fastest, for continuous detection.'
},
standard: {
variant: 'standard',
url: '/models/model.json',
size: 2 * 1024 * 1024, // Same model, different configs
name: 'SSD-MobileNetV2 Standard',
description: 'Balanced speed and accuracy.'
},
full: {
variant: 'full',
url: '/models/model.json',
size: 2 * 1024 * 1024, // Same model, different configs
name: 'SSD-MobileNetV2 Full',
description: 'Most accurate, for on-demand scan.'
}
};
/**
* Default detection configuration.
*/
export const DEFAULT_CONFIG: DetectionConfig = {
frameSkip: 5,
confidenceThreshold: 0.3, // Match the working implementation (30%)
modelVariant: 'standard',
maxDetections: 1,
inputSize: [300, 300], // Match the working implementation
enableContinuous: true,
enableTrigger: true,
};
/**
* Class labels for the models.
* IMPORTANT: This must match the order of the model's output classes.
*/
export const CLASS_LABELS = ['shoe'];
/**
* Rules to validate detections and reduce false positives.
*/
export const VALIDATION_RULES = {
minBoundingBoxSize: 30, // Minimum pixel width/height of a bounding box
aspectRatioRange: [0.5, 2.0], // Plausible aspect ratio (width / height) for a shoe
temporalConsistencyFrames: 3, // Must be detected in N consecutive frames
};

78
lib/ml/types.ts Normal file
View File

@@ -0,0 +1,78 @@
/**
* This file contains all the TypeScript interfaces for the ML detection system.
*/
/**
* Configuration for the detection engine.
*/
export interface DetectionConfig {
frameSkip: number;
confidenceThreshold: number;
modelVariant: 'quantized' | 'standard' | 'full';
maxDetections: number;
inputSize: [number, number];
enableContinuous: boolean;
enableTrigger: boolean;
}
/**
* Information about a specific model variant.
*/
export interface ModelInfo {
variant: 'quantized' | 'standard' | 'full';
url: string;
size: number; // in bytes
name: string;
description: string;
}
/**
* Represents a single detected object.
*/
export interface DetectionResult {
bbox: [number, number, number, number]; // [x, y, width, height]
confidence: number;
class: string;
}
/**
* Defines the performance tier of the user's device.
*/
export type DeviceTier = 'low' | 'mid' | 'high';
/**
* Holds information about the device's capabilities.
*/
export interface DeviceCapabilities {
tier: DeviceTier;
hasWebGL: boolean;
cpuCores: number;
memory: number; // in GB
}
/**
* Performance metrics for the detection engine.
*/
export interface DetectionMetrics {
fps: number;
inferenceTime: number;
memoryUsage: number; // in MB
}
/**
* Types for messages sent to and from the detection worker.
*/
export type WorkerMessage =
| { type: 'INITIALIZE' }
| { type: 'DETECT'; imageData: ImageData }
| { type: 'UPDATE_CONFIG'; config: DetectionConfig }
| { type: 'LOAD_MODEL'; variant: 'quantized' | 'standard' | 'full'; modelData: ArrayBuffer }
| { type: 'CONFIGURE'; config: DetectionConfig };
export type WorkerResponse =
| { type: 'INITIALIZED' }
| { type: 'DETECTION_RESULT'; result: DetectionResult | null }
| { type: 'METRICS_UPDATE'; metrics: Partial<DetectionMetrics> }
| { type: 'ERROR'; error: string }
| { type: 'LOADED_MODEL' }
| { type: 'CONFIGURED' };

281
lib/ml/use-detection.ts Normal file
View File

@@ -0,0 +1,281 @@
import { useEffect, useRef, useState, useCallback } from 'react';
import type { DetectionConfig, DetectionResult, DetectionMetrics } from './types';
import { DetectionEngine } from './detection-engine';
interface UseDetectionOptions {
modelVariant?: 'quantized' | 'standard' | 'full';
enableContinuous?: boolean;
enableTrigger?: boolean;
onDetection?: (detection: DetectionResult | null) => void;
onError?: (error: Error) => void;
}
interface UseDetectionReturn {
// State
isLoading: boolean;
isDetecting: boolean;
currentDetection: DetectionResult | null;
metrics: DetectionMetrics | null;
error: string | null;
// Actions
initialize: (videoElement: HTMLVideoElement) => Promise<void>;
startContinuous: () => void;
stopContinuous: () => void;
triggerDetection: () => Promise<DetectionResult | null>;
updateConfig: (config: Partial<DetectionConfig>) => Promise<void>;
// Config
config: DetectionConfig | null;
}
/**
* React hook for shoe detection functionality
*/
export function useDetection(options: UseDetectionOptions = {}): UseDetectionReturn {
const {
modelVariant = 'standard',
enableContinuous = true,
enableTrigger = true,
onDetection,
onError
} = options;
// State
const [isLoading, setIsLoading] = useState(false);
const [isDetecting, setIsDetecting] = useState(false);
const [currentDetection, setCurrentDetection] = useState<DetectionResult | null>(null);
const [metrics, setMetrics] = useState<DetectionMetrics | null>(null);
const [error, setError] = useState<string | null>(null);
const [config, setConfig] = useState<DetectionConfig | null>(null);
// Refs
const detectionEngineRef = useRef<DetectionEngine | null>(null);
const videoElementRef = useRef<HTMLVideoElement | null>(null);
const initializationPromiseRef = useRef<Promise<void> | null>(null);
// Initialize detection engine
const initialize = useCallback(async (videoElement: HTMLVideoElement): Promise<void> => {
// Prevent multiple initializations
if (initializationPromiseRef.current) {
return initializationPromiseRef.current;
}
setIsLoading(true);
setError(null);
const initPromise = (async () => {
try {
// Create detection engine
const engine = new DetectionEngine();
detectionEngineRef.current = engine;
videoElementRef.current = videoElement;
// Set up event listeners
engine.onDetection((detection) => {
setCurrentDetection(detection);
onDetection?.(detection);
});
engine.onMetrics((newMetrics) => {
setMetrics(newMetrics);
});
// Initialize with progress tracking
await engine.initialize(modelVariant, (progress) => {
// You could add progress state here if needed
console.log(`Model loading: ${progress.toFixed(1)}%`);
});
// Get initial configuration
const initialConfig = engine.getConfig();
setConfig(initialConfig);
console.log('Detection hook initialized successfully');
} catch (err) {
const error = err instanceof Error ? err : new Error('Unknown initialization error');
console.error('Detection initialization failed:', error);
setError(error.message);
onError?.(error);
throw error;
} finally {
setIsLoading(false);
}
})();
initializationPromiseRef.current = initPromise;
return initPromise;
}, [modelVariant, onDetection, onError]);
// Start continuous detection
const startContinuous = useCallback(() => {
if (!detectionEngineRef.current || !videoElementRef.current) {
console.warn('Detection engine or video element not available');
return;
}
if (!enableContinuous) {
console.warn('Continuous detection is disabled');
return;
}
try {
detectionEngineRef.current.startContinuousDetection(videoElementRef.current);
setIsDetecting(true);
setError(null);
} catch (err) {
const error = err instanceof Error ? err : new Error('Failed to start continuous detection');
console.error('Start continuous detection failed:', error);
setError(error.message);
onError?.(error);
}
}, [enableContinuous, onError]);
// Stop continuous detection
const stopContinuous = useCallback(() => {
if (!detectionEngineRef.current) {
return;
}
try {
detectionEngineRef.current.stopContinuousDetection();
setIsDetecting(false);
setCurrentDetection(null);
} catch (err) {
console.error('Stop continuous detection failed:', err);
}
}, []);
// Trigger single detection
const triggerDetection = useCallback(async (): Promise<DetectionResult | null> => {
if (!detectionEngineRef.current || !videoElementRef.current) {
throw new Error('Detection engine or video element not available');
}
if (!enableTrigger) {
throw new Error('Trigger detection is disabled');
}
try {
setError(null);
const detection = await detectionEngineRef.current.triggerDetection(videoElementRef.current);
// Update current detection state
setCurrentDetection(detection);
onDetection?.(detection);
return detection;
} catch (err) {
const error = err instanceof Error ? err : new Error('Trigger detection failed');
console.error('Trigger detection failed:', error);
setError(error.message);
onError?.(error);
throw error;
}
}, [enableTrigger, onDetection, onError]);
// Update configuration
const updateConfig = useCallback(async (newConfig: Partial<DetectionConfig>): Promise<void> => {
if (!detectionEngineRef.current) {
throw new Error('Detection engine not available');
}
try {
await detectionEngineRef.current.updateConfig(newConfig);
const updatedConfig = detectionEngineRef.current.getConfig();
setConfig(updatedConfig);
setError(null);
} catch (err) {
const error = err instanceof Error ? err : new Error('Failed to update configuration');
console.error('Update config failed:', error);
setError(error.message);
onError?.(error);
throw error;
}
}, [onError]);
// Cleanup on unmount
useEffect(() => {
return () => {
if (detectionEngineRef.current) {
detectionEngineRef.current.destroy();
detectionEngineRef.current = null;
}
initializationPromiseRef.current = null;
videoElementRef.current = null;
};
}, []);
return {
// State
isLoading,
isDetecting,
currentDetection,
metrics,
error,
// Actions
initialize,
startContinuous,
stopContinuous,
triggerDetection,
updateConfig,
// Config
config
};
}
/**
* Hook for detection metrics monitoring
*/
export function useDetectionMetrics(detectionEngine: DetectionEngine | null) {
const [metrics, setMetrics] = useState<DetectionMetrics | null>(null);
useEffect(() => {
if (!detectionEngine) return;
detectionEngine.onMetrics(setMetrics);
// Get initial metrics
const initialMetrics = detectionEngine.getMetrics();
setMetrics(initialMetrics);
}, [detectionEngine]);
return metrics;
}
/**
* Hook for performance monitoring and adjustments
*/
export function usePerformanceOptimization(detectionEngine: DetectionEngine | null) {
const [recommendations, setRecommendations] = useState<string[]>([]);
useEffect(() => {
if (!detectionEngine) return;
const interval = setInterval(() => {
const metrics = detectionEngine.getMetrics();
const newRecommendations: string[] = [];
if (metrics.fps < 15) {
newRecommendations.push('Consider increasing frame skip or switching to a lighter model');
}
if (metrics.inferenceTime > 100) {
newRecommendations.push('Inference time is high, consider switching to quantized model');
}
if (metrics.memoryUsage > 100) {
newRecommendations.push('High memory usage detected');
}
setRecommendations(newRecommendations);
}, 5000); // Check every 5 seconds
return () => clearInterval(interval);
}, [detectionEngine]);
return recommendations;
}

491
package-lock.json generated
View File

@@ -13,6 +13,8 @@
"@radix-ui/react-select": "^2.2.6",
"@radix-ui/react-separator": "^1.1.7",
"@radix-ui/react-slot": "^1.2.3",
"@tensorflow/tfjs": "^4.22.0",
"@tensorflow/tfjs-backend-webgl": "^4.22.0",
"class-variance-authority": "^0.7.1",
"clsx": "^2.1.1",
"embla-carousel-react": "^8.6.0",
@@ -1901,6 +1903,128 @@
"tailwindcss": "4.1.12"
}
},
"node_modules/@tensorflow/tfjs": {
"version": "4.22.0",
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs/-/tfjs-4.22.0.tgz",
"integrity": "sha512-0TrIrXs6/b7FLhLVNmfh8Sah6JgjBPH4mZ8JGb7NU6WW+cx00qK5BcAZxw7NCzxj6N8MRAIfHq+oNbPUNG5VAg==",
"license": "Apache-2.0",
"dependencies": {
"@tensorflow/tfjs-backend-cpu": "4.22.0",
"@tensorflow/tfjs-backend-webgl": "4.22.0",
"@tensorflow/tfjs-converter": "4.22.0",
"@tensorflow/tfjs-core": "4.22.0",
"@tensorflow/tfjs-data": "4.22.0",
"@tensorflow/tfjs-layers": "4.22.0",
"argparse": "^1.0.10",
"chalk": "^4.1.0",
"core-js": "3.29.1",
"regenerator-runtime": "^0.13.5",
"yargs": "^16.0.3"
},
"bin": {
"tfjs-custom-module": "dist/tools/custom_module/cli.js"
}
},
"node_modules/@tensorflow/tfjs-backend-cpu": {
"version": "4.22.0",
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs-backend-cpu/-/tfjs-backend-cpu-4.22.0.tgz",
"integrity": "sha512-1u0FmuLGuRAi8D2c3cocHTASGXOmHc/4OvoVDENJayjYkS119fcTcQf4iHrtLthWyDIPy3JiPhRrZQC9EwnhLw==",
"license": "Apache-2.0",
"dependencies": {
"@types/seedrandom": "^2.4.28",
"seedrandom": "^3.0.5"
},
"engines": {
"yarn": ">= 1.3.2"
},
"peerDependencies": {
"@tensorflow/tfjs-core": "4.22.0"
}
},
"node_modules/@tensorflow/tfjs-backend-webgl": {
"version": "4.22.0",
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs-backend-webgl/-/tfjs-backend-webgl-4.22.0.tgz",
"integrity": "sha512-H535XtZWnWgNwSzv538czjVlbJebDl5QTMOth4RXr2p/kJ1qSIXE0vZvEtO+5EC9b00SvhplECny2yDewQb/Yg==",
"license": "Apache-2.0",
"dependencies": {
"@tensorflow/tfjs-backend-cpu": "4.22.0",
"@types/offscreencanvas": "~2019.3.0",
"@types/seedrandom": "^2.4.28",
"seedrandom": "^3.0.5"
},
"engines": {
"yarn": ">= 1.3.2"
},
"peerDependencies": {
"@tensorflow/tfjs-core": "4.22.0"
}
},
"node_modules/@tensorflow/tfjs-converter": {
"version": "4.22.0",
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs-converter/-/tfjs-converter-4.22.0.tgz",
"integrity": "sha512-PT43MGlnzIo+YfbsjM79Lxk9lOq6uUwZuCc8rrp0hfpLjF6Jv8jS84u2jFb+WpUeuF4K33ZDNx8CjiYrGQ2trQ==",
"license": "Apache-2.0",
"peerDependencies": {
"@tensorflow/tfjs-core": "4.22.0"
}
},
"node_modules/@tensorflow/tfjs-core": {
"version": "4.22.0",
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs-core/-/tfjs-core-4.22.0.tgz",
"integrity": "sha512-LEkOyzbknKFoWUwfkr59vSB68DMJ4cjwwHgicXN0DUi3a0Vh1Er3JQqCI1Hl86GGZQvY8ezVrtDIvqR1ZFW55A==",
"license": "Apache-2.0",
"dependencies": {
"@types/long": "^4.0.1",
"@types/offscreencanvas": "~2019.7.0",
"@types/seedrandom": "^2.4.28",
"@webgpu/types": "0.1.38",
"long": "4.0.0",
"node-fetch": "~2.6.1",
"seedrandom": "^3.0.5"
},
"engines": {
"yarn": ">= 1.3.2"
}
},
"node_modules/@tensorflow/tfjs-core/node_modules/@types/offscreencanvas": {
"version": "2019.7.3",
"resolved": "https://registry.npmjs.org/@types/offscreencanvas/-/offscreencanvas-2019.7.3.tgz",
"integrity": "sha512-ieXiYmgSRXUDeOntE1InxjWyvEelZGP63M+cGuquuRLuIKKT1osnkXjxev9B7d1nXSug5vpunx+gNlbVxMlC9A==",
"license": "MIT"
},
"node_modules/@tensorflow/tfjs-data": {
"version": "4.22.0",
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs-data/-/tfjs-data-4.22.0.tgz",
"integrity": "sha512-dYmF3LihQIGvtgJrt382hSRH4S0QuAp2w1hXJI2+kOaEqo5HnUPG0k5KA6va+S1yUhx7UBToUKCBHeLHFQRV4w==",
"license": "Apache-2.0",
"dependencies": {
"@types/node-fetch": "^2.1.2",
"node-fetch": "~2.6.1",
"string_decoder": "^1.3.0"
},
"peerDependencies": {
"@tensorflow/tfjs-core": "4.22.0",
"seedrandom": "^3.0.5"
}
},
"node_modules/@tensorflow/tfjs-layers": {
"version": "4.22.0",
"resolved": "https://registry.npmjs.org/@tensorflow/tfjs-layers/-/tfjs-layers-4.22.0.tgz",
"integrity": "sha512-lybPj4ZNj9iIAPUj7a8ZW1hg8KQGfqWLlCZDi9eM/oNKCCAgchiyzx8OrYoWmRrB+AM6VNEeIT+2gZKg5ReihA==",
"license": "Apache-2.0 AND MIT",
"peerDependencies": {
"@tensorflow/tfjs-core": "4.22.0"
}
},
"node_modules/@tensorflow/tfjs/node_modules/argparse": {
"version": "1.0.10",
"resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz",
"integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==",
"license": "MIT",
"dependencies": {
"sprintf-js": "~1.0.2"
}
},
"node_modules/@tybys/wasm-util": {
"version": "0.10.0",
"resolved": "https://registry.npmjs.org/@tybys/wasm-util/-/wasm-util-0.10.0.tgz",
@@ -1933,16 +2057,37 @@
"dev": true,
"license": "MIT"
},
"node_modules/@types/long": {
"version": "4.0.2",
"resolved": "https://registry.npmjs.org/@types/long/-/long-4.0.2.tgz",
"integrity": "sha512-MqTGEo5bj5t157U6fA/BiDynNkn0YknVdh48CMPkTSpFTVmvao5UQmm7uEF6xBEo7qIMAlY/JSleYaE6VOdpaA==",
"license": "MIT"
},
"node_modules/@types/node": {
"version": "20.19.11",
"resolved": "https://registry.npmjs.org/@types/node/-/node-20.19.11.tgz",
"integrity": "sha512-uug3FEEGv0r+jrecvUUpbY8lLisvIjg6AAic6a2bSP5OEOLeJsDSnvhCDov7ipFFMXS3orMpzlmi0ZcuGkBbow==",
"dev": true,
"license": "MIT",
"dependencies": {
"undici-types": "~6.21.0"
}
},
"node_modules/@types/node-fetch": {
"version": "2.6.13",
"resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.6.13.tgz",
"integrity": "sha512-QGpRVpzSaUs30JBSGPjOg4Uveu384erbHBoT1zeONvyCfwQxIkUshLAOqN/k9EjGviPRmWTTe6aH2qySWKTVSw==",
"license": "MIT",
"dependencies": {
"@types/node": "*",
"form-data": "^4.0.4"
}
},
"node_modules/@types/offscreencanvas": {
"version": "2019.3.0",
"resolved": "https://registry.npmjs.org/@types/offscreencanvas/-/offscreencanvas-2019.3.0.tgz",
"integrity": "sha512-esIJx9bQg+QYF0ra8GnvfianIY8qWB0GBx54PK5Eps6m+xTj86KLavHv6qDhzKcu5UUOgNfJ2pWaIIV7TRUd9Q==",
"license": "MIT"
},
"node_modules/@types/react": {
"version": "19.1.12",
"resolved": "https://registry.npmjs.org/@types/react/-/react-19.1.12.tgz",
@@ -1963,6 +2108,12 @@
"@types/react": "^19.0.0"
}
},
"node_modules/@types/seedrandom": {
"version": "2.4.34",
"resolved": "https://registry.npmjs.org/@types/seedrandom/-/seedrandom-2.4.34.tgz",
"integrity": "sha512-ytDiArvrn/3Xk6/vtylys5tlY6eo7Ane0hvcx++TKo6RxQXuVfW0AF/oeWqAj9dN29SyhtawuXstgmPlwNcv/A==",
"license": "MIT"
},
"node_modules/@typescript-eslint/eslint-plugin": {
"version": "8.41.0",
"resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.41.0.tgz",
@@ -2520,6 +2671,12 @@
"win32"
]
},
"node_modules/@webgpu/types": {
"version": "0.1.38",
"resolved": "https://registry.npmjs.org/@webgpu/types/-/types-0.1.38.tgz",
"integrity": "sha512-7LrhVKz2PRh+DD7+S+PVaFd5HxaWQvoMqBbsV9fNJO1pjUs1P8bM2vQVNfk+3URTqbuTI7gkXi0rfsN0IadoBA==",
"license": "BSD-3-Clause"
},
"node_modules/acorn": {
"version": "8.15.0",
"resolved": "https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz",
@@ -2560,11 +2717,19 @@
"url": "https://github.com/sponsors/epoberezkin"
}
},
"node_modules/ansi-regex": {
"version": "5.0.1",
"resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
"integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==",
"license": "MIT",
"engines": {
"node": ">=8"
}
},
"node_modules/ansi-styles": {
"version": "4.3.0",
"resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
"integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
"dev": true,
"license": "MIT",
"dependencies": {
"color-convert": "^2.0.1"
@@ -2782,6 +2947,12 @@
"node": ">= 0.4"
}
},
"node_modules/asynckit": {
"version": "0.4.0",
"resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
"integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==",
"license": "MIT"
},
"node_modules/available-typed-arrays": {
"version": "1.0.7",
"resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz",
@@ -2883,7 +3054,6 @@
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz",
"integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"es-errors": "^1.3.0",
@@ -2944,7 +3114,6 @@
"version": "4.1.2",
"resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz",
"integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==",
"dev": true,
"license": "MIT",
"dependencies": {
"ansi-styles": "^4.1.0",
@@ -2985,6 +3154,17 @@
"integrity": "sha512-IV3Ou0jSMzZrd3pZ48nLkT9DA7Ag1pnPzaiQhpW7c3RbcqqzvzzVu+L8gfqMp/8IM2MQtSiqaCxrrcfu8I8rMA==",
"license": "MIT"
},
"node_modules/cliui": {
"version": "7.0.4",
"resolved": "https://registry.npmjs.org/cliui/-/cliui-7.0.4.tgz",
"integrity": "sha512-OcRE68cOsVMXp1Yvonl/fzkQOyjLSu/8bhPDfQt0e0/Eb283TKP20Fs2MqoPsr9SwA595rRCA+QMzYc9nBP+JQ==",
"license": "ISC",
"dependencies": {
"string-width": "^4.2.0",
"strip-ansi": "^6.0.0",
"wrap-ansi": "^7.0.0"
}
},
"node_modules/clsx": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/clsx/-/clsx-2.1.1.tgz",
@@ -3012,7 +3192,6 @@
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
"integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
"devOptional": true,
"license": "MIT",
"dependencies": {
"color-name": "~1.1.4"
@@ -3025,7 +3204,6 @@
"version": "1.1.4",
"resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
"integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==",
"devOptional": true,
"license": "MIT"
},
"node_modules/color-string": {
@@ -3039,6 +3217,18 @@
"simple-swizzle": "^0.2.2"
}
},
"node_modules/combined-stream": {
"version": "1.0.8",
"resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
"integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
"license": "MIT",
"dependencies": {
"delayed-stream": "~1.0.0"
},
"engines": {
"node": ">= 0.8"
}
},
"node_modules/concat-map": {
"version": "0.0.1",
"resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz",
@@ -3046,6 +3236,17 @@
"dev": true,
"license": "MIT"
},
"node_modules/core-js": {
"version": "3.29.1",
"resolved": "https://registry.npmjs.org/core-js/-/core-js-3.29.1.tgz",
"integrity": "sha512-+jwgnhg6cQxKYIIjGtAHq2nwUOolo9eoFZ4sHfUH09BLXBgxnH4gA0zEd+t+BO2cNB8idaBtZFcFTRjQJRJmAw==",
"hasInstallScript": true,
"license": "MIT",
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/core-js"
}
},
"node_modules/cross-spawn": {
"version": "7.0.6",
"resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz",
@@ -3190,6 +3391,15 @@
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/delayed-stream": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
"integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==",
"license": "MIT",
"engines": {
"node": ">=0.4.0"
}
},
"node_modules/detect-libc": {
"version": "2.0.4",
"resolved": "https://registry.npmjs.org/detect-libc/-/detect-libc-2.0.4.tgz",
@@ -3223,7 +3433,6 @@
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz",
"integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==",
"dev": true,
"license": "MIT",
"dependencies": {
"call-bind-apply-helpers": "^1.0.1",
@@ -3356,7 +3565,6 @@
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz",
"integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.4"
@@ -3366,7 +3574,6 @@
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz",
"integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.4"
@@ -3404,7 +3611,6 @@
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz",
"integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==",
"dev": true,
"license": "MIT",
"dependencies": {
"es-errors": "^1.3.0"
@@ -3417,7 +3623,6 @@
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/es-set-tostringtag/-/es-set-tostringtag-2.1.0.tgz",
"integrity": "sha512-j6vWzfrGVfyXxge+O0x5sh6cvxAog0a/4Rdd2K36zCMV5eJ+/+tOAngRO8cODMNWbVRdVlmGZQL2YS3yR8bIUA==",
"dev": true,
"license": "MIT",
"dependencies": {
"es-errors": "^1.3.0",
@@ -3460,6 +3665,15 @@
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/escalade": {
"version": "3.2.0",
"resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz",
"integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==",
"license": "MIT",
"engines": {
"node": ">=6"
}
},
"node_modules/escape-string-regexp": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz",
@@ -4040,11 +4254,26 @@
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/form-data": {
"version": "4.0.4",
"resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.4.tgz",
"integrity": "sha512-KrGhL9Q4zjj0kiUt5OO4Mr/A/jlI2jDYs5eHBpYHPcBEVSiipAvn2Ko2HnPe20rmcuuvMHNdZFp+4IlGTMF0Ow==",
"license": "MIT",
"dependencies": {
"asynckit": "^0.4.0",
"combined-stream": "^1.0.8",
"es-set-tostringtag": "^2.1.0",
"hasown": "^2.0.2",
"mime-types": "^2.1.12"
},
"engines": {
"node": ">= 6"
}
},
"node_modules/function-bind": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz",
"integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==",
"dev": true,
"license": "MIT",
"funding": {
"url": "https://github.com/sponsors/ljharb"
@@ -4081,11 +4310,19 @@
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/get-caller-file": {
"version": "2.0.5",
"resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz",
"integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg==",
"license": "ISC",
"engines": {
"node": "6.* || 8.* || >= 10.*"
}
},
"node_modules/get-intrinsic": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz",
"integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"call-bind-apply-helpers": "^1.0.2",
@@ -4119,7 +4356,6 @@
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz",
"integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==",
"dev": true,
"license": "MIT",
"dependencies": {
"dunder-proto": "^1.0.1",
@@ -4207,7 +4443,6 @@
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz",
"integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.4"
@@ -4247,7 +4482,6 @@
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz",
"integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">=8"
@@ -4286,7 +4520,6 @@
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz",
"integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.4"
@@ -4299,7 +4532,6 @@
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.2.tgz",
"integrity": "sha512-NqADB8VjPFLM2V0VvHUewwwsw0ZWBaIdgo+ieHtK3hasLz4qeCRjYcqfB6AQrBggRKppKF8L52/VqdVsO47Dlw==",
"dev": true,
"license": "MIT",
"dependencies": {
"has-symbols": "^1.0.3"
@@ -4315,7 +4547,6 @@
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz",
"integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"function-bind": "^1.1.2"
@@ -4554,6 +4785,15 @@
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/is-fullwidth-code-point": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz",
"integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==",
"license": "MIT",
"engines": {
"node": ">=8"
}
},
"node_modules/is-generator-function": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/is-generator-function/-/is-generator-function-1.1.0.tgz",
@@ -5202,6 +5442,12 @@
"dev": true,
"license": "MIT"
},
"node_modules/long": {
"version": "4.0.0",
"resolved": "https://registry.npmjs.org/long/-/long-4.0.0.tgz",
"integrity": "sha512-XsP+KhQif4bjX1kbuSiySJFNAehNxgLb6hPRGJ9QsUr8ajHkuXGdrHmFUTUUXhDwVX2R5bY4JNZEwbUiMhV+MA==",
"license": "Apache-2.0"
},
"node_modules/loose-envify": {
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz",
@@ -5238,7 +5484,6 @@
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz",
"integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.4"
@@ -5268,6 +5513,27 @@
"node": ">=8.6"
}
},
"node_modules/mime-db": {
"version": "1.52.0",
"resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz",
"integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==",
"license": "MIT",
"engines": {
"node": ">= 0.6"
}
},
"node_modules/mime-types": {
"version": "2.1.35",
"resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz",
"integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==",
"license": "MIT",
"dependencies": {
"mime-db": "1.52.0"
},
"engines": {
"node": ">= 0.6"
}
},
"node_modules/minimatch": {
"version": "3.1.2",
"resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
@@ -5460,6 +5726,26 @@
"node": "^10 || ^12 || >=14"
}
},
"node_modules/node-fetch": {
"version": "2.6.13",
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.13.tgz",
"integrity": "sha512-StxNAxh15zr77QvvkmveSQ8uCQ4+v5FkvNTj0OESmiHu+VRi/gXArXtkWMElOsOUNLtUEvI4yS+rdtOHZTwlQA==",
"license": "MIT",
"dependencies": {
"whatwg-url": "^5.0.0"
},
"engines": {
"node": "4.x || >=6.0.0"
},
"peerDependencies": {
"encoding": "^0.1.0"
},
"peerDependenciesMeta": {
"encoding": {
"optional": true
}
}
},
"node_modules/object-assign": {
"version": "4.1.1",
"resolved": "https://registry.npmjs.org/object-assign/-/object-assign-4.1.1.tgz",
@@ -5922,6 +6208,12 @@
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/regenerator-runtime": {
"version": "0.13.11",
"resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.13.11.tgz",
"integrity": "sha512-kY1AZVr2Ra+t+piVaJ4gxaFaReZVH40AKNo7UCX6W+dEwBo/2oZJzqfuN1qLq1oL45o56cPaTXELwrTh8Fpggg==",
"license": "MIT"
},
"node_modules/regexp.prototype.flags": {
"version": "1.5.4",
"resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.4.tgz",
@@ -5943,6 +6235,15 @@
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/require-directory": {
"version": "2.1.1",
"resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz",
"integrity": "sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q==",
"license": "MIT",
"engines": {
"node": ">=0.10.0"
}
},
"node_modules/resolve": {
"version": "1.22.10",
"resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.10.tgz",
@@ -6039,6 +6340,26 @@
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/safe-buffer": {
"version": "5.2.1",
"resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz",
"integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/feross"
},
{
"type": "patreon",
"url": "https://www.patreon.com/feross"
},
{
"type": "consulting",
"url": "https://feross.org/support"
}
],
"license": "MIT"
},
"node_modules/safe-push-apply": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/safe-push-apply/-/safe-push-apply-1.0.0.tgz",
@@ -6080,6 +6401,12 @@
"integrity": "sha512-NlHwttCI/l5gCPR3D1nNXtWABUmBwvZpEQiD4IXSbIDq8BzLIK/7Ir5gTFSGZDUu37K5cMNp0hFtzO38sC7gWA==",
"license": "MIT"
},
"node_modules/seedrandom": {
"version": "3.0.5",
"resolved": "https://registry.npmjs.org/seedrandom/-/seedrandom-3.0.5.tgz",
"integrity": "sha512-8OwmbklUNzwezjGInmZ+2clQmExQPvomqjL7LFqOYqtmuxRgQYqOD3mHaU+MvZn5FLUeVxVfQjwLZW/n/JFuqg==",
"license": "MIT"
},
"node_modules/semver": {
"version": "7.7.2",
"resolved": "https://registry.npmjs.org/semver/-/semver-7.7.2.tgz",
@@ -6300,6 +6627,12 @@
"node": ">=0.10.0"
}
},
"node_modules/sprintf-js": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz",
"integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==",
"license": "BSD-3-Clause"
},
"node_modules/stable-hash": {
"version": "0.0.5",
"resolved": "https://registry.npmjs.org/stable-hash/-/stable-hash-0.0.5.tgz",
@@ -6329,6 +6662,35 @@
"node": ">=10.0.0"
}
},
"node_modules/string_decoder": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz",
"integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==",
"license": "MIT",
"dependencies": {
"safe-buffer": "~5.2.0"
}
},
"node_modules/string-width": {
"version": "4.2.3",
"resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz",
"integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==",
"license": "MIT",
"dependencies": {
"emoji-regex": "^8.0.0",
"is-fullwidth-code-point": "^3.0.0",
"strip-ansi": "^6.0.1"
},
"engines": {
"node": ">=8"
}
},
"node_modules/string-width/node_modules/emoji-regex": {
"version": "8.0.0",
"resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz",
"integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==",
"license": "MIT"
},
"node_modules/string.prototype.includes": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/string.prototype.includes/-/string.prototype.includes-2.0.1.tgz",
@@ -6442,6 +6804,18 @@
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/strip-ansi": {
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
"integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
"license": "MIT",
"dependencies": {
"ansi-regex": "^5.0.1"
},
"engines": {
"node": ">=8"
}
},
"node_modules/strip-bom": {
"version": "3.0.0",
"resolved": "https://registry.npmjs.org/strip-bom/-/strip-bom-3.0.0.tgz",
@@ -6492,7 +6866,6 @@
"version": "7.2.0",
"resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz",
"integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==",
"dev": true,
"license": "MIT",
"dependencies": {
"has-flag": "^4.0.0"
@@ -6624,6 +6997,12 @@
"node": ">=8.0"
}
},
"node_modules/tr46": {
"version": "0.0.3",
"resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz",
"integrity": "sha512-N3WMsuqV66lT30CrXNbEjx4GEwlow3v6rr4mCcv6prnfwhS01rkgyFdjPNBYd9br7LpXV1+Emh01fHnq2Gdgrw==",
"license": "MIT"
},
"node_modules/ts-api-utils": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.1.0.tgz",
@@ -6794,7 +7173,6 @@
"version": "6.21.0",
"resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz",
"integrity": "sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ==",
"dev": true,
"license": "MIT"
},
"node_modules/unrs-resolver": {
@@ -6898,6 +7276,22 @@
"react-dom": "^16.8 || ^17.0 || ^18.0 || ^19.0.0 || ^19.0.0-rc"
}
},
"node_modules/webidl-conversions": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz",
"integrity": "sha512-2JAn3z8AR6rjK8Sm8orRC0h/bcl/DqL7tRPdGZ4I1CjdF+EaMLmYxBHyXuKL849eucPFhvBoxMsflfOb8kxaeQ==",
"license": "BSD-2-Clause"
},
"node_modules/whatwg-url": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz",
"integrity": "sha512-saE57nupxk6v3HY35+jzBwYa0rKSy0XR8JSxZPwgLr7ys0IBzhGviA1/TUGJLmSVqs8pb9AnvICXEuOHLprYTw==",
"license": "MIT",
"dependencies": {
"tr46": "~0.0.3",
"webidl-conversions": "^3.0.0"
}
},
"node_modules/which": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz",
@@ -7013,6 +7407,32 @@
"node": ">=0.10.0"
}
},
"node_modules/wrap-ansi": {
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz",
"integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==",
"license": "MIT",
"dependencies": {
"ansi-styles": "^4.0.0",
"string-width": "^4.1.0",
"strip-ansi": "^6.0.0"
},
"engines": {
"node": ">=10"
},
"funding": {
"url": "https://github.com/chalk/wrap-ansi?sponsor=1"
}
},
"node_modules/y18n": {
"version": "5.0.8",
"resolved": "https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz",
"integrity": "sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA==",
"license": "ISC",
"engines": {
"node": ">=10"
}
},
"node_modules/yallist": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/yallist/-/yallist-5.0.0.tgz",
@@ -7023,6 +7443,33 @@
"node": ">=18"
}
},
"node_modules/yargs": {
"version": "16.2.0",
"resolved": "https://registry.npmjs.org/yargs/-/yargs-16.2.0.tgz",
"integrity": "sha512-D1mvvtDG0L5ft/jGWkLpG1+m0eQxOfaBvTNELraWj22wSVUMWxZUvYgJYcKh6jGGIkJFhH4IZPQhR4TKpc8mBw==",
"license": "MIT",
"dependencies": {
"cliui": "^7.0.2",
"escalade": "^3.1.1",
"get-caller-file": "^2.0.5",
"require-directory": "^2.1.1",
"string-width": "^4.2.0",
"y18n": "^5.0.5",
"yargs-parser": "^20.2.2"
},
"engines": {
"node": ">=10"
}
},
"node_modules/yargs-parser": {
"version": "20.2.9",
"resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-20.2.9.tgz",
"integrity": "sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==",
"license": "ISC",
"engines": {
"node": ">=10"
}
},
"node_modules/yocto-queue": {
"version": "0.1.0",
"resolved": "https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz",

View File

@@ -13,7 +13,11 @@
"@radix-ui/react-dialog": "^1.1.15",
"@radix-ui/react-select": "^2.2.6",
"@radix-ui/react-separator": "^1.1.7",
"@radix-ui/react-slider": "^1.3.6",
"@radix-ui/react-slot": "^1.2.3",
"@radix-ui/react-switch": "^1.2.6",
"@tensorflow/tfjs": "^4.22.0",
"@tensorflow/tfjs-backend-webgl": "^4.22.0",
"class-variance-authority": "^0.7.1",
"clsx": "^2.1.1",
"embla-carousel-react": "^8.6.0",

5043
pnpm-lock.yaml generated Normal file

File diff suppressed because it is too large Load Diff

Binary file not shown.

1
public/models/model.json Normal file

File diff suppressed because one or more lines are too long

192
workers/detection-worker.ts Normal file
View File

@@ -0,0 +1,192 @@
import type { DetectionConfig, DetectionResult, WorkerMessage, WorkerResponse } from '../lib/ml/types';
import { InferencePipeline } from '../lib/ml/inference-pipeline';
declare const self: DedicatedWorkerGlobalScope;
let tfGlobal: any = null;
let model: any = null;
let config: DetectionConfig | null = null;
let pipeline: InferencePipeline | null = null;
async function initialize(id: string) {
console.log('Initializing worker...');
tfGlobal = await import('@tensorflow/tfjs');
await import('@tensorflow/tfjs-backend-webgl');
await tfGlobal.setBackend('webgl');
await tfGlobal.ready();
console.log('TensorFlow.js backend set to:', tfGlobal.getBackend());
pipeline = new InferencePipeline();
self.postMessage({ type: 'INITIALIZED', id });
}
async function loadModelWorker(variant: 'quantized' | 'standard' | 'full', modelData: ArrayBuffer, id: string) {
console.log(`Worker: Loading model ${variant}...`);
try {
if (!tfGlobal) {
throw new Error('TensorFlow.js not initialized');
}
// Use local model files from public folder with full URL for worker context
const baseUrl = self.location.origin;
const modelUrls = {
'quantized': `${baseUrl}/models/model.json`,
'standard': `${baseUrl}/models/model.json`,
'full': `${baseUrl}/models/model.json`
};
console.log(`Worker: Loading REAL model from ${modelUrls[variant]}`);
// Load the real model like in the working GitHub implementation
model = await tfGlobal.loadGraphModel(modelUrls[variant]);
console.log('Worker: Real model loaded successfully', model);
// Warm up the model (assuming config is available by now)
if (model && config) {
console.log('Worker: Warming up model with input size:', config.inputSize);
const dummyInput = tfGlobal.zeros([1, ...config.inputSize, 3]);
const result = model.execute(dummyInput);
console.log('Worker: Warmup result:', result);
if (Array.isArray(result)) {
result.forEach(t => t.dispose());
} else {
result.dispose();
}
dummyInput.dispose();
console.log('Worker: Model warmed up successfully.');
}
self.postMessage({ type: 'LOADED_MODEL', id });
} catch (error) {
console.error(`Worker: Failed to load model ${variant}:`, error);
self.postMessage({ type: 'ERROR', error: error instanceof Error ? error.message : 'Unknown error during model loading', id });
}
}
async function configureWorker(newConfig: DetectionConfig, id: string) {
console.log('Worker: Configuring...');
config = newConfig;
self.postMessage({ type: 'CONFIGURED', id });
}
async function detect(imageData: ImageData, id: string) {
console.log('Worker: detect function called.');
if (!model || !config || !pipeline) {
self.postMessage({ type: 'ERROR', error: 'Worker not initialized or configured.', id });
return;
}
const tensor = tfGlobal.tidy(() => {
// Convert ImageData to tensor in Web Worker context
const { data, width, height } = imageData;
// In Web Worker, we need to create tensor manually from the pixel data
// Convert RGBA to RGB by dropping every 4th value (alpha channel)
const rgbData = new Uint8Array(width * height * 3);
for (let i = 0; i < width * height; i++) {
rgbData[i * 3] = data[i * 4]; // R
rgbData[i * 3 + 1] = data[i * 4 + 1]; // G
rgbData[i * 3 + 2] = data[i * 4 + 2]; // B
// Skip alpha channel (data[i * 4 + 3])
}
// Create tensor from RGB data
const img = tfGlobal.tensor3d(rgbData, [height, width, 3]);
// Resize to model input size (300x300) and add batch dimension
const resized = tfGlobal.image.resizeBilinear(img, config!.inputSize);
return resized.expandDims(0); // Keep values in [0,255] range like the working implementation
});
try {
console.log('Worker: About to execute model with tensor shape:', tensor.shape);
const result = await model.executeAsync(tensor);
tensor.dispose();
console.log('Worker: Model execution result:', result);
console.log('Worker: Result type:', typeof result);
console.log('Worker: Result is array:', Array.isArray(result));
if (result) {
console.log('Worker: Result length:', result.length);
if (Array.isArray(result)) {
result.forEach((item, index) => {
console.log(`Worker: Result[${index}]:`, item, 'shape:', item?.shape);
});
}
}
if (!result || !Array.isArray(result) || result.length < 3) {
console.error('Worker: Invalid model output:', result);
self.postMessage({ type: 'DETECTION_RESULT', result: null, id });
return;
}
const [boxes, scores, classes] = result;
console.log('Worker: Extracting data from tensors...');
const boxesData = await boxes.data();
const scoresData = await scores.data();
const classesData = await classes.data();
console.log('Worker: Raw model outputs:', {
boxesShape: boxes.shape,
scoresShape: scores.shape,
classesShape: classes.shape,
boxesLength: boxesData.length,
scoresLength: scoresData.length,
classesLength: classesData.length,
firstFewBoxes: Array.from(boxesData.slice(0, 8)),
firstFewScores: Array.from(scoresData.slice(0, 4)),
firstFewClasses: Array.from(classesData.slice(0, 4))
});
result.forEach(t => t.dispose());
const detectionResult = pipeline.process(
boxesData as number[],
scoresData as number[],
classesData as number[],
config.confidenceThreshold
);
console.log('Worker detectionResult:', detectionResult);
self.postMessage({ type: 'DETECTION_RESULT', result: detectionResult, id });
} catch (error) {
tensor.dispose();
console.error('Worker: Detection execution failed:', error);
console.error('Worker: Error stack:', error.stack);
self.postMessage({ type: 'ERROR', error: error instanceof Error ? error.message : 'Detection execution failed', id });
}
}
(async () => {
self.onmessage = async (event: MessageEvent<WorkerMessage>) => {
try {
switch (event.data.type) {
case 'INITIALIZE':
await initialize(event.data.id);
break;
case 'LOAD_MODEL':
await loadModelWorker(event.data.variant, event.data.modelData, event.data.id);
break;
case 'CONFIGURE':
await configureWorker(event.data.config, event.data.id);
break;
case 'DETECT':
await detect(event.data.imageData, event.data.id);
break;
case 'UPDATE_CONFIG':
await configureWorker(event.data.config, event.data.id);
break;
default:
throw new Error(`Unknown message type: ${(event.data as any).type}`);
}
} catch (error) {
self.postMessage({ type: 'ERROR', error: error instanceof Error ? error.message : 'Unknown error in worker', id: event.data.id });
}
};
})();