Files
wifi-densepose/v1/tests/integration/live_sense_monitor.py
ruv b7e0f07e6e feat: Sensing-only UI mode with Gaussian splat visualization and Rust migration ADR
- Add Python WebSocket sensing server (ws_server.py) with ESP32 UDP CSI
  and Windows RSSI auto-detect collectors on port 8765
- Add Three.js Gaussian splat renderer with custom GLSL shaders for
  real-time WiFi signal field visualization (blue→green→red gradient)
- Add SensingTab component with RSSI sparkline, feature meters, and
  motion classification badge
- Add sensing.service.js WebSocket client with reconnect and simulation fallback
- Implement sensing-only mode: suppress all DensePose API calls when
  FastAPI backend (port 8000) is not running, clean console output
- ADR-019: Document sensing-only UI architecture and data flow
- ADR-020: Migrate AI/model inference to Rust with RuVector ONNX Runtime,
  replacing ~2.7GB Python stack with ~50MB static binary
- Add ruvnet/ruvector as upstream remote for RuVector crate ecosystem

Co-Authored-By: claude-flow <ruv@ruv.net>
2026-02-28 14:37:29 -05:00

114 lines
4.1 KiB
Python

#!/usr/bin/env python3
"""
Live WiFi sensing monitor — collects RSSI from Windows WiFi and classifies
presence/motion in real-time using the ADR-013 commodity sensing pipeline.
Usage:
python v1/tests/integration/live_sense_monitor.py
Walk around the room (especially between laptop and router) to trigger detection.
Press Ctrl+C to stop.
"""
import sys
import time
from v1.src.sensing.rssi_collector import WindowsWifiCollector
from v1.src.sensing.feature_extractor import RssiFeatureExtractor
from v1.src.sensing.classifier import PresenceClassifier
SAMPLE_RATE = 2.0 # Hz (netsh is slow, 2 Hz is practical max)
WINDOW_SEC = 15.0 # Analysis window
REPORT_INTERVAL = 3.0 # Print classification every N seconds
def main():
collector = WindowsWifiCollector(interface="Wi-Fi", sample_rate_hz=SAMPLE_RATE)
extractor = RssiFeatureExtractor(window_seconds=WINDOW_SEC)
classifier = PresenceClassifier(
presence_variance_threshold=0.3, # Lower threshold for netsh quantization
motion_energy_threshold=0.05,
)
print("=" * 65)
print(" WiFi-DensePose Live Sensing Monitor (ADR-013)")
print(" Pipeline: WindowsWifiCollector -> Extractor -> Classifier")
print("=" * 65)
print(f" Sample rate: {SAMPLE_RATE} Hz")
print(f" Window: {WINDOW_SEC}s")
print(f" Report every: {REPORT_INTERVAL}s")
print()
print(" Collecting baseline... walk around after 15s to test detection.")
print(" Press Ctrl+C to stop.")
print("-" * 65)
collector.start()
try:
last_report = 0.0
while True:
time.sleep(0.5)
now = time.time()
if now - last_report < REPORT_INTERVAL:
continue
last_report = now
samples = collector.get_samples()
n = len(samples)
if n < 4:
print(f" [{time.strftime('%H:%M:%S')}] Buffering... ({n} samples)")
continue
rssi_vals = [s.rssi_dbm for s in samples]
features = extractor.extract(samples)
result = classifier.classify(features)
# Motion bar visualization
bar_len = min(40, max(0, int(features.variance * 20)))
bar = "#" * bar_len + "." * (40 - bar_len)
level_icon = {
"absent": " ",
"present_still": "🧍",
"active": "🏃",
}.get(result.motion_level.value, "??")
print(
f" [{time.strftime('%H:%M:%S')}] "
f"RSSI: {features.mean:6.1f} dBm | "
f"var: {features.variance:6.3f} | "
f"motion_e: {features.motion_band_power:7.4f} | "
f"breath_e: {features.breathing_band_power:7.4f} | "
f"{result.motion_level.value:14s} {level_icon} "
f"({result.confidence:.0%})"
)
print(f" [{bar}] n={n} rssi=[{min(rssi_vals):.0f}..{max(rssi_vals):.0f}]")
except KeyboardInterrupt:
print()
print("-" * 65)
print(" Stopped. Final sample count:", len(collector.get_samples()))
# Print summary
samples = collector.get_samples()
if len(samples) >= 4:
features = extractor.extract(samples)
result = classifier.classify(features)
rssi_vals = [s.rssi_dbm for s in samples]
print()
print(" SUMMARY")
print(f" Duration: {samples[-1].timestamp - samples[0].timestamp:.1f}s")
print(f" Total samples: {len(samples)}")
print(f" RSSI range: {min(rssi_vals):.1f} to {max(rssi_vals):.1f} dBm")
print(f" RSSI variance: {features.variance:.4f}")
print(f" Motion energy: {features.motion_band_power:.4f}")
print(f" Breath energy: {features.breathing_band_power:.4f}")
print(f" Change points: {features.n_change_points}")
print(f" Final verdict: {result.motion_level.value} ({result.confidence:.0%})")
print("=" * 65)
finally:
collector.stop()
if __name__ == "__main__":
main()