feat: Add Three.js visualization entry point and data processor

Add viz.html as the main entry point that loads Three.js from CDN and
orchestrates all visualization components (scene, body model, signal
viz, environment, HUD). Add data-processor.js that transforms API
WebSocket messages into geometry updates and provides demo mode with
pre-recorded pose cycling when the server is unavailable.

https://claude.ai/code/session_01Ki7pvEZtJDvqJkmyn6B714
This commit is contained in:
Claude
2026-02-28 06:29:28 +00:00
parent dd382824fe
commit a8ac309258
2 changed files with 734 additions and 0 deletions

View File

@@ -0,0 +1,380 @@
// Data Processor - WiFi DensePose 3D Visualization
// Transforms API data into Three.js geometry updates
export class DataProcessor {
constructor() {
// Demo mode state
this.demoMode = false;
this.demoElapsed = 0;
this.demoPoseIndex = 0;
this.demoPoseCycleTime = 4; // seconds per pose transition
// Pre-recorded demo poses (COCO 17-keypoint format, normalized [0,1])
// Each pose: array of {x, y, confidence} for 17 keypoints
this.demoPoses = this._buildDemoPoses();
// Smoothing buffers
this._lastProcessedPersons = [];
this._smoothingFactor = 0.3;
}
// Process incoming WebSocket message into visualization-ready data
processMessage(message) {
if (!message) return null;
const result = {
persons: [],
zoneOccupancy: {},
signalData: null,
metadata: {
isRealData: false,
timestamp: null,
processingTime: 0,
frameId: null,
sensingMode: 'Mock'
}
};
// Handle different message types from the API
if (message.type === 'pose_data') {
const payload = message.data || message.payload;
if (payload) {
result.persons = this._extractPersons(payload);
result.zoneOccupancy = this._extractZoneOccupancy(payload, message.zone_id);
result.signalData = this._extractSignalData(payload);
result.metadata.isRealData = payload.metadata?.mock_data === false;
result.metadata.timestamp = message.timestamp;
result.metadata.processingTime = payload.metadata?.processing_time_ms || 0;
result.metadata.frameId = payload.metadata?.frame_id;
// Determine sensing mode
if (payload.metadata?.source === 'csi') {
result.metadata.sensingMode = 'CSI';
} else if (payload.metadata?.source === 'rssi') {
result.metadata.sensingMode = 'RSSI';
} else if (payload.metadata?.mock_data !== false) {
result.metadata.sensingMode = 'Mock';
} else {
result.metadata.sensingMode = 'CSI';
}
}
}
return result;
}
// Extract person data with keypoints in COCO format
_extractPersons(payload) {
const persons = [];
if (payload.pose && payload.pose.persons) {
for (const person of payload.pose.persons) {
const processed = {
id: person.id || `person_${persons.length}`,
confidence: person.confidence || 0,
keypoints: this._normalizeKeypoints(person.keypoints),
bbox: person.bbox || null,
body_parts: person.densepose_parts || person.body_parts || null
};
persons.push(processed);
}
} else if (payload.persons) {
// Alternative format: persons at top level
for (const person of payload.persons) {
persons.push({
id: person.id || `person_${persons.length}`,
confidence: person.confidence || 0,
keypoints: this._normalizeKeypoints(person.keypoints),
bbox: person.bbox || null,
body_parts: person.densepose_parts || person.body_parts || null
});
}
}
return persons;
}
// Normalize keypoints to {x, y, confidence} format in [0,1] range
_normalizeKeypoints(keypoints) {
if (!keypoints || keypoints.length === 0) return [];
return keypoints.map(kp => {
// Handle various formats
if (Array.isArray(kp)) {
return { x: kp[0], y: kp[1], confidence: kp[2] || 0.5 };
}
return {
x: kp.x !== undefined ? kp.x : 0,
y: kp.y !== undefined ? kp.y : 0,
confidence: kp.confidence !== undefined ? kp.confidence : (kp.score || 0.5)
};
});
}
// Extract zone occupancy data
_extractZoneOccupancy(payload, zoneId) {
const occupancy = {};
if (payload.zone_summary) {
Object.assign(occupancy, payload.zone_summary);
}
if (zoneId && payload.pose?.persons?.length > 0) {
occupancy[zoneId] = payload.pose.persons.length;
}
return occupancy;
}
// Extract signal/CSI data if available
_extractSignalData(payload) {
if (payload.signal_data || payload.csi_data) {
const sig = payload.signal_data || payload.csi_data;
return {
amplitude: sig.amplitude || null,
phase: sig.phase || null,
doppler: sig.doppler || sig.doppler_spectrum || null,
motionEnergy: sig.motion_energy !== undefined ? sig.motion_energy : null
};
}
return null;
}
// Generate demo data that cycles through pre-recorded poses
generateDemoData(deltaTime) {
this.demoElapsed += deltaTime;
const totalPoses = this.demoPoses.length;
const cycleProgress = (this.demoElapsed % (this.demoPoseCycleTime * totalPoses)) / this.demoPoseCycleTime;
const currentPoseIdx = Math.floor(cycleProgress) % totalPoses;
const nextPoseIdx = (currentPoseIdx + 1) % totalPoses;
const t = cycleProgress - Math.floor(cycleProgress); // interpolation factor [0,1]
// Smooth interpolation between poses
const smoothT = t * t * (3 - 2 * t); // smoothstep
const currentPose = this.demoPoses[currentPoseIdx];
const nextPose = this.demoPoses[nextPoseIdx];
const interpolatedKeypoints = currentPose.map((kp, i) => {
const next = nextPose[i];
return {
x: kp.x + (next.x - kp.x) * smoothT,
y: kp.y + (next.y - kp.y) * smoothT,
confidence: 0.7 + Math.sin(this.demoElapsed * 2 + i * 0.5) * 0.2
};
});
// Simulate confidence variation
const baseConf = 0.65 + Math.sin(this.demoElapsed * 0.5) * 0.2;
// Determine active zone based on position
const hipX = (interpolatedKeypoints[11].x + interpolatedKeypoints[12].x) / 2;
let activeZone = 'zone_2';
if (hipX < 0.35) activeZone = 'zone_1';
else if (hipX > 0.65) activeZone = 'zone_3';
return {
persons: [{
id: 'demo_person_0',
confidence: Math.max(0, Math.min(1, baseConf)),
keypoints: interpolatedKeypoints,
bbox: null,
body_parts: this._generateDemoBodyParts(this.demoElapsed)
}],
zoneOccupancy: {
[activeZone]: 1
},
signalData: null, // SignalVisualization generates its own demo data
metadata: {
isRealData: false,
timestamp: new Date().toISOString(),
processingTime: 8 + Math.random() * 5,
frameId: `demo_${Math.floor(this.demoElapsed * 30)}`,
sensingMode: 'Mock'
}
};
}
_generateDemoBodyParts(elapsed) {
const parts = {};
for (let i = 1; i <= 24; i++) {
// Simulate body parts being detected with varying confidence
// Create a wave pattern across parts
parts[i] = 0.4 + Math.sin(elapsed * 1.2 + i * 0.5) * 0.3 + Math.random() * 0.1;
parts[i] = Math.max(0, Math.min(1, parts[i]));
}
return parts;
}
_buildDemoPoses() {
// Pre-recorded poses: normalized COCO 17 keypoints
// Each keypoint: {x, y, confidence}
// Standing at center
const standing = [
{ x: 0.50, y: 0.12, confidence: 0.9 }, // 0: nose
{ x: 0.48, y: 0.10, confidence: 0.8 }, // 1: left_eye
{ x: 0.52, y: 0.10, confidence: 0.8 }, // 2: right_eye
{ x: 0.46, y: 0.12, confidence: 0.7 }, // 3: left_ear
{ x: 0.54, y: 0.12, confidence: 0.7 }, // 4: right_ear
{ x: 0.42, y: 0.22, confidence: 0.9 }, // 5: left_shoulder
{ x: 0.58, y: 0.22, confidence: 0.9 }, // 6: right_shoulder
{ x: 0.38, y: 0.38, confidence: 0.85 }, // 7: left_elbow
{ x: 0.62, y: 0.38, confidence: 0.85 }, // 8: right_elbow
{ x: 0.36, y: 0.52, confidence: 0.8 }, // 9: left_wrist
{ x: 0.64, y: 0.52, confidence: 0.8 }, // 10: right_wrist
{ x: 0.45, y: 0.50, confidence: 0.9 }, // 11: left_hip
{ x: 0.55, y: 0.50, confidence: 0.9 }, // 12: right_hip
{ x: 0.44, y: 0.70, confidence: 0.85 }, // 13: left_knee
{ x: 0.56, y: 0.70, confidence: 0.85 }, // 14: right_knee
{ x: 0.44, y: 0.90, confidence: 0.8 }, // 15: left_ankle
{ x: 0.56, y: 0.90, confidence: 0.8 } // 16: right_ankle
];
// Walking - left leg forward
const walkLeft = [
{ x: 0.50, y: 0.12, confidence: 0.9 },
{ x: 0.48, y: 0.10, confidence: 0.8 },
{ x: 0.52, y: 0.10, confidence: 0.8 },
{ x: 0.46, y: 0.12, confidence: 0.7 },
{ x: 0.54, y: 0.12, confidence: 0.7 },
{ x: 0.42, y: 0.22, confidence: 0.9 },
{ x: 0.58, y: 0.22, confidence: 0.9 },
{ x: 0.40, y: 0.35, confidence: 0.85 },
{ x: 0.60, y: 0.40, confidence: 0.85 },
{ x: 0.42, y: 0.48, confidence: 0.8 },
{ x: 0.56, y: 0.55, confidence: 0.8 },
{ x: 0.45, y: 0.50, confidence: 0.9 },
{ x: 0.55, y: 0.50, confidence: 0.9 },
{ x: 0.40, y: 0.68, confidence: 0.85 },
{ x: 0.58, y: 0.72, confidence: 0.85 },
{ x: 0.38, y: 0.88, confidence: 0.8 },
{ x: 0.56, y: 0.90, confidence: 0.8 }
];
// Walking - right leg forward
const walkRight = [
{ x: 0.50, y: 0.12, confidence: 0.9 },
{ x: 0.48, y: 0.10, confidence: 0.8 },
{ x: 0.52, y: 0.10, confidence: 0.8 },
{ x: 0.46, y: 0.12, confidence: 0.7 },
{ x: 0.54, y: 0.12, confidence: 0.7 },
{ x: 0.42, y: 0.22, confidence: 0.9 },
{ x: 0.58, y: 0.22, confidence: 0.9 },
{ x: 0.38, y: 0.40, confidence: 0.85 },
{ x: 0.62, y: 0.35, confidence: 0.85 },
{ x: 0.36, y: 0.55, confidence: 0.8 },
{ x: 0.60, y: 0.48, confidence: 0.8 },
{ x: 0.45, y: 0.50, confidence: 0.9 },
{ x: 0.55, y: 0.50, confidence: 0.9 },
{ x: 0.47, y: 0.72, confidence: 0.85 },
{ x: 0.52, y: 0.68, confidence: 0.85 },
{ x: 0.47, y: 0.90, confidence: 0.8 },
{ x: 0.50, y: 0.88, confidence: 0.8 }
];
// Arms raised
const armsUp = [
{ x: 0.50, y: 0.12, confidence: 0.9 },
{ x: 0.48, y: 0.10, confidence: 0.8 },
{ x: 0.52, y: 0.10, confidence: 0.8 },
{ x: 0.46, y: 0.12, confidence: 0.7 },
{ x: 0.54, y: 0.12, confidence: 0.7 },
{ x: 0.42, y: 0.22, confidence: 0.9 },
{ x: 0.58, y: 0.22, confidence: 0.9 },
{ x: 0.38, y: 0.15, confidence: 0.85 },
{ x: 0.62, y: 0.15, confidence: 0.85 },
{ x: 0.36, y: 0.05, confidence: 0.8 },
{ x: 0.64, y: 0.05, confidence: 0.8 },
{ x: 0.45, y: 0.50, confidence: 0.9 },
{ x: 0.55, y: 0.50, confidence: 0.9 },
{ x: 0.44, y: 0.70, confidence: 0.85 },
{ x: 0.56, y: 0.70, confidence: 0.85 },
{ x: 0.44, y: 0.90, confidence: 0.8 },
{ x: 0.56, y: 0.90, confidence: 0.8 }
];
// Sitting
const sitting = [
{ x: 0.50, y: 0.22, confidence: 0.9 },
{ x: 0.48, y: 0.20, confidence: 0.8 },
{ x: 0.52, y: 0.20, confidence: 0.8 },
{ x: 0.46, y: 0.22, confidence: 0.7 },
{ x: 0.54, y: 0.22, confidence: 0.7 },
{ x: 0.42, y: 0.32, confidence: 0.9 },
{ x: 0.58, y: 0.32, confidence: 0.9 },
{ x: 0.38, y: 0.45, confidence: 0.85 },
{ x: 0.62, y: 0.45, confidence: 0.85 },
{ x: 0.40, y: 0.55, confidence: 0.8 },
{ x: 0.60, y: 0.55, confidence: 0.8 },
{ x: 0.45, y: 0.55, confidence: 0.9 },
{ x: 0.55, y: 0.55, confidence: 0.9 },
{ x: 0.42, y: 0.58, confidence: 0.85 },
{ x: 0.58, y: 0.58, confidence: 0.85 },
{ x: 0.38, y: 0.90, confidence: 0.8 },
{ x: 0.62, y: 0.90, confidence: 0.8 }
];
// Waving (left hand up, right hand at side)
const waving = [
{ x: 0.50, y: 0.12, confidence: 0.9 },
{ x: 0.48, y: 0.10, confidence: 0.8 },
{ x: 0.52, y: 0.10, confidence: 0.8 },
{ x: 0.46, y: 0.12, confidence: 0.7 },
{ x: 0.54, y: 0.12, confidence: 0.7 },
{ x: 0.42, y: 0.22, confidence: 0.9 },
{ x: 0.58, y: 0.22, confidence: 0.9 },
{ x: 0.35, y: 0.12, confidence: 0.85 },
{ x: 0.62, y: 0.38, confidence: 0.85 },
{ x: 0.30, y: 0.04, confidence: 0.8 },
{ x: 0.64, y: 0.52, confidence: 0.8 },
{ x: 0.45, y: 0.50, confidence: 0.9 },
{ x: 0.55, y: 0.50, confidence: 0.9 },
{ x: 0.44, y: 0.70, confidence: 0.85 },
{ x: 0.56, y: 0.70, confidence: 0.85 },
{ x: 0.44, y: 0.90, confidence: 0.8 },
{ x: 0.56, y: 0.90, confidence: 0.8 }
];
return [standing, walkLeft, standing, walkRight, armsUp, standing, sitting, standing, waving, standing];
}
// Generate a confidence heatmap from person positions
generateConfidenceHeatmap(persons, cols, rows, roomWidth, roomDepth) {
const positions = (persons || []).map(p => {
if (!p.keypoints || p.keypoints.length < 13) return null;
const hipX = (p.keypoints[11].x + p.keypoints[12].x) / 2;
const hipY = (p.keypoints[11].y + p.keypoints[12].y) / 2;
return {
x: (hipX - 0.5) * roomWidth,
z: (hipY - 0.5) * roomDepth,
confidence: p.confidence
};
}).filter(Boolean);
const map = new Float32Array(cols * rows);
const cellW = roomWidth / cols;
const cellD = roomDepth / rows;
for (const pos of positions) {
for (let r = 0; r < rows; r++) {
for (let c = 0; c < cols; c++) {
const cx = (c + 0.5) * cellW - roomWidth / 2;
const cz = (r + 0.5) * cellD - roomDepth / 2;
const dx = cx - pos.x;
const dz = cz - pos.z;
const dist = Math.sqrt(dx * dx + dz * dz);
const conf = Math.exp(-dist * dist * 0.5) * pos.confidence;
map[r * cols + c] = Math.max(map[r * cols + c], conf);
}
}
}
return map;
}
dispose() {
this.demoPoses = [];
}
}

354
ui/viz.html Normal file
View File

@@ -0,0 +1,354 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>WiFi DensePose - 3D Visualization</title>
<style>
* { margin: 0; padding: 0; box-sizing: border-box; }
html, body {
width: 100%;
height: 100%;
overflow: hidden;
background: #050510;
font-family: 'Courier New', 'Consolas', monospace;
color: #88bbdd;
}
#viz-container {
width: 100%;
height: 100%;
position: relative;
}
#loading-overlay {
position: absolute;
top: 0; left: 0; right: 0; bottom: 0;
background: #050510;
display: flex;
flex-direction: column;
align-items: center;
justify-content: center;
z-index: 9999;
transition: opacity 0.6s ease;
}
#loading-overlay.hidden {
opacity: 0;
pointer-events: none;
}
.loading-title {
font-size: 22px;
color: #aaddff;
margin-bottom: 16px;
letter-spacing: 4px;
text-transform: uppercase;
}
.loading-bar-track {
width: 280px;
height: 3px;
background: #112233;
border-radius: 2px;
overflow: hidden;
margin-bottom: 12px;
}
.loading-bar-fill {
height: 100%;
width: 0%;
background: linear-gradient(90deg, #0066ff, #00ccff);
border-radius: 2px;
transition: width 0.3s ease;
}
.loading-status {
font-size: 11px;
color: #446688;
}
/* Stats.js panel positioning */
#stats-container {
position: absolute;
top: 40px;
right: 140px;
z-index: 200;
}
</style>
</head>
<body>
<div id="viz-container">
<!-- Loading overlay -->
<div id="loading-overlay">
<div class="loading-title">WiFi DensePose</div>
<div class="loading-bar-track">
<div class="loading-bar-fill" id="loading-fill"></div>
</div>
<div class="loading-status" id="loading-status">Initializing...</div>
</div>
<!-- Stats.js container -->
<div id="stats-container"></div>
</div>
<!-- Three.js and OrbitControls from CDN -->
<script src="https://unpkg.com/three@0.160.0/build/three.min.js"></script>
<script src="https://unpkg.com/three@0.160.0/examples/js/controls/OrbitControls.js"></script>
<!-- Stats.js for performance monitoring -->
<script src="https://unpkg.com/stats.js@0.17.0/build/stats.min.js"></script>
<!-- Application modules loaded as ES modules via importmap workaround -->
<script type="module">
// Import all modules
import { Scene } from './components/scene.js';
import { BodyModel, BodyModelManager } from './components/body-model.js';
import { SignalVisualization } from './components/signal-viz.js';
import { Environment } from './components/environment.js';
import { DashboardHUD } from './components/dashboard-hud.js';
import { WebSocketClient } from './services/websocket-client.js';
import { DataProcessor } from './services/data-processor.js';
// -- Application State --
const state = {
scene: null,
environment: null,
bodyModelManager: null,
signalViz: null,
hud: null,
wsClient: null,
dataProcessor: null,
stats: null,
isDemoMode: true,
startTime: Date.now()
};
// -- Loading Progress --
function setLoadingProgress(pct, msg) {
const fill = document.getElementById('loading-fill');
const status = document.getElementById('loading-status');
if (fill) fill.style.width = pct + '%';
if (status) status.textContent = msg;
}
function hideLoading() {
const overlay = document.getElementById('loading-overlay');
if (overlay) overlay.classList.add('hidden');
setTimeout(() => {
if (overlay && overlay.parentNode) overlay.parentNode.removeChild(overlay);
}, 700);
}
// -- Initialize Stats.js --
function initStats() {
const stats = new Stats();
stats.showPanel(0); // FPS panel
stats.dom.style.position = 'relative';
document.getElementById('stats-container').appendChild(stats.dom);
return stats;
}
// -- Main Initialization --
async function init() {
const container = document.getElementById('viz-container');
try {
setLoadingProgress(10, 'Creating 3D scene...');
// 1. Scene setup
state.scene = new Scene(container);
setLoadingProgress(25, 'Building environment...');
// 2. Environment (room, grid, APs, zones)
state.environment = new Environment(state.scene.getScene());
setLoadingProgress(40, 'Preparing body models...');
// 3. Body model manager
state.bodyModelManager = new BodyModelManager(state.scene.getScene());
setLoadingProgress(55, 'Setting up signal visualization...');
// 4. Signal visualization
state.signalViz = new SignalVisualization(state.scene.getScene());
setLoadingProgress(65, 'Creating HUD...');
// 5. Dashboard HUD
state.hud = new DashboardHUD(container);
setLoadingProgress(75, 'Initializing data processor...');
// 6. Data processor
state.dataProcessor = new DataProcessor();
setLoadingProgress(80, 'Setting up Stats.js...');
// 7. Stats.js
state.stats = initStats();
setLoadingProgress(85, 'Connecting to server...');
// 8. WebSocket client
state.wsClient = new WebSocketClient({
url: 'ws://localhost:8000/ws/pose',
onMessage: (msg) => handleWebSocketMessage(msg),
onStateChange: (newState, oldState) => handleConnectionStateChange(newState, oldState),
onError: (err) => console.error('[VIZ] WebSocket error:', err)
});
// Attempt connection (will fall back to demo mode if server unavailable)
state.wsClient.connect();
setLoadingProgress(95, 'Starting render loop...');
// 9. Register the main update loop
state.scene.onUpdate((delta, elapsed) => {
mainUpdate(delta, elapsed);
});
// Start rendering
state.scene.start();
setLoadingProgress(100, 'Ready');
// Hide loading after a brief moment
setTimeout(hideLoading, 400);
console.log('[VIZ] Initialization complete');
} catch (err) {
console.error('[VIZ] Initialization failed:', err);
setLoadingProgress(100, 'Error: ' + err.message);
}
}
// -- Main Update Loop (called every frame) --
function mainUpdate(delta, elapsed) {
// Stats.js begin
if (state.stats) state.stats.begin();
// Determine data source
let vizData = null;
if (state.isDemoMode) {
// Generate demo data
vizData = state.dataProcessor.generateDemoData(delta);
// Generate demo signal data
const demoSignal = SignalVisualization.generateDemoData(elapsed);
state.signalViz.updateSignalData(demoSignal);
}
// If we have viz data (from demo or last processed real data), update visualizations
if (vizData) {
// Update body models
state.bodyModelManager.update(vizData.persons, delta);
// Update zone occupancy
state.environment.updateZoneOccupancy(vizData.zoneOccupancy);
// Update confidence heatmap
const heatmap = state.dataProcessor.generateConfidenceHeatmap(
vizData.persons, 20, 15, 8, 6
);
state.environment.updateConfidenceHeatmap(heatmap);
}
// Update environment animations (AP pulse, signal paths)
state.environment.update(delta, elapsed);
// Update signal visualization animations
state.signalViz.update(delta, elapsed);
// Update HUD
if (state.hud) {
state.hud.tickFPS();
const wsMetrics = state.wsClient.getMetrics();
state.hud.updateState({
connectionStatus: state.wsClient.state,
isRealData: state.wsClient.isRealData && !state.isDemoMode,
latency: wsMetrics.latency,
messageCount: wsMetrics.messageCount,
uptime: wsMetrics.uptime,
personCount: state.bodyModelManager.getActiveCount(),
confidence: state.bodyModelManager.getAverageConfidence(),
sensingMode: state.isDemoMode ? 'Mock' : (state.wsClient.isRealData ? 'CSI' : 'Mock')
});
}
// Stats.js end
if (state.stats) state.stats.end();
}
// -- Handle incoming WebSocket messages --
function handleWebSocketMessage(message) {
const processed = state.dataProcessor.processMessage(message);
if (!processed) return;
// Switch off demo mode when we get real data
if (processed.persons.length > 0) {
if (state.isDemoMode) {
state.isDemoMode = false;
console.log('[VIZ] Switched to live data mode');
}
// Update body models
state.bodyModelManager.update(processed.persons, 0.016);
// Update zone occupancy
state.environment.updateZoneOccupancy(processed.zoneOccupancy);
// Update signal data if available
if (processed.signalData) {
state.signalViz.updateSignalData(processed.signalData);
}
// Update confidence heatmap
const heatmap = state.dataProcessor.generateConfidenceHeatmap(
processed.persons, 20, 15, 8, 6
);
state.environment.updateConfidenceHeatmap(heatmap);
}
}
// -- Handle WebSocket connection state changes --
function handleConnectionStateChange(newState, oldState) {
console.log(`[VIZ] Connection: ${oldState} -> ${newState}`);
if (newState === 'connected') {
// Will switch from demo to real when data arrives
console.log('[VIZ] Connected to server, waiting for data...');
} else if (newState === 'error' || newState === 'disconnected') {
// Fall back to demo mode
if (!state.isDemoMode) {
state.isDemoMode = true;
console.log('[VIZ] Switched to demo mode (server unavailable)');
}
}
}
// -- Cleanup on page unload --
window.addEventListener('beforeunload', () => {
if (state.wsClient) state.wsClient.dispose();
if (state.bodyModelManager) state.bodyModelManager.dispose();
if (state.signalViz) state.signalViz.dispose();
if (state.environment) state.environment.dispose();
if (state.hud) state.hud.dispose();
if (state.scene) state.scene.dispose();
});
// -- Keyboard shortcuts --
document.addEventListener('keydown', (e) => {
switch (e.key.toLowerCase()) {
case 'r':
// Reset camera
if (state.scene) state.scene.resetCamera();
break;
case 'd':
// Toggle demo mode
state.isDemoMode = !state.isDemoMode;
console.log(`[VIZ] Demo mode: ${state.isDemoMode ? 'ON' : 'OFF'}`);
break;
case 'c':
// Force reconnect
if (state.wsClient) {
state.wsClient.disconnect();
state.wsClient.autoReconnect = true;
state.wsClient.reconnectAttempts = 0;
state.wsClient.connect();
}
break;
}
});
// -- Start --
init();
</script>
</body>
</html>