feat: Complete Rust port of WiFi-DensePose with modular crates

Major changes:
- Organized Python v1 implementation into v1/ subdirectory
- Created Rust workspace with 9 modular crates:
  - wifi-densepose-core: Core types, traits, errors
  - wifi-densepose-signal: CSI processing, phase sanitization, FFT
  - wifi-densepose-nn: Neural network inference (ONNX/Candle/tch)
  - wifi-densepose-api: Axum-based REST/WebSocket API
  - wifi-densepose-db: SQLx database layer
  - wifi-densepose-config: Configuration management
  - wifi-densepose-hardware: Hardware abstraction
  - wifi-densepose-wasm: WebAssembly bindings
  - wifi-densepose-cli: Command-line interface

Documentation:
- ADR-001: Workspace structure
- ADR-002: Signal processing library selection
- ADR-003: Neural network inference strategy
- DDD domain model with bounded contexts

Testing:
- 69 tests passing across all crates
- Signal processing: 45 tests
- Neural networks: 21 tests
- Core: 3 doc tests

Performance targets:
- 10x faster CSI processing (~0.5ms vs ~5ms)
- 5x lower memory usage (~100MB vs ~500MB)
- WASM support for browser deployment
This commit is contained in:
Claude
2026-01-13 03:11:16 +00:00
parent 5101504b72
commit 6ed69a3d48
427 changed files with 90993 additions and 0 deletions

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1,376 +0,0 @@
#!/usr/bin/env python3
"""
API Endpoint Testing Script
Tests all WiFi-DensePose API endpoints and provides debugging information.
"""
import asyncio
import json
import sys
import time
import traceback
from datetime import datetime, timedelta
from typing import Dict, List, Any, Optional
import aiohttp
import websockets
from colorama import Fore, Style, init
# Initialize colorama for colored output
init(autoreset=True)
class APITester:
"""Comprehensive API endpoint tester."""
def __init__(self, base_url: str = "http://localhost:8000"):
self.base_url = base_url
self.session = None
self.results = {
"total_tests": 0,
"passed": 0,
"failed": 0,
"errors": [],
"test_details": []
}
async def __aenter__(self):
"""Async context manager entry."""
self.session = aiohttp.ClientSession()
return self
async def __aexit__(self, exc_type, exc_val, exc_tb):
"""Async context manager exit."""
if self.session:
await self.session.close()
def log_success(self, message: str):
"""Log success message."""
print(f"{Fore.GREEN}{message}{Style.RESET_ALL}")
def log_error(self, message: str):
"""Log error message."""
print(f"{Fore.RED}{message}{Style.RESET_ALL}")
def log_info(self, message: str):
"""Log info message."""
print(f"{Fore.BLUE} {message}{Style.RESET_ALL}")
def log_warning(self, message: str):
"""Log warning message."""
print(f"{Fore.YELLOW}{message}{Style.RESET_ALL}")
async def test_endpoint(
self,
method: str,
endpoint: str,
expected_status: int = 200,
data: Optional[Dict] = None,
params: Optional[Dict] = None,
headers: Optional[Dict] = None,
description: str = ""
) -> Dict[str, Any]:
"""Test a single API endpoint."""
self.results["total_tests"] += 1
test_name = f"{method.upper()} {endpoint}"
try:
url = f"{self.base_url}{endpoint}"
# Prepare request
kwargs = {}
if data:
kwargs["json"] = data
if params:
kwargs["params"] = params
if headers:
kwargs["headers"] = headers
# Make request
start_time = time.time()
async with self.session.request(method, url, **kwargs) as response:
response_time = (time.time() - start_time) * 1000
response_text = await response.text()
# Try to parse JSON response
try:
response_data = json.loads(response_text) if response_text else {}
except json.JSONDecodeError:
response_data = {"raw_response": response_text}
# Check status code
status_ok = response.status == expected_status
test_result = {
"test_name": test_name,
"description": description,
"url": url,
"method": method.upper(),
"expected_status": expected_status,
"actual_status": response.status,
"response_time_ms": round(response_time, 2),
"response_data": response_data,
"success": status_ok,
"timestamp": datetime.now().isoformat()
}
if status_ok:
self.results["passed"] += 1
self.log_success(f"{test_name} - {response.status} ({response_time:.1f}ms)")
if description:
print(f" {description}")
else:
self.results["failed"] += 1
self.log_error(f"{test_name} - Expected {expected_status}, got {response.status}")
if description:
print(f" {description}")
print(f" Response: {response_text[:200]}...")
self.results["test_details"].append(test_result)
return test_result
except Exception as e:
self.results["failed"] += 1
error_msg = f"{test_name} - Exception: {str(e)}"
self.log_error(error_msg)
test_result = {
"test_name": test_name,
"description": description,
"url": f"{self.base_url}{endpoint}",
"method": method.upper(),
"expected_status": expected_status,
"actual_status": None,
"response_time_ms": None,
"response_data": None,
"success": False,
"error": str(e),
"traceback": traceback.format_exc(),
"timestamp": datetime.now().isoformat()
}
self.results["errors"].append(error_msg)
self.results["test_details"].append(test_result)
return test_result
async def test_websocket_endpoint(self, endpoint: str, description: str = "") -> Dict[str, Any]:
"""Test WebSocket endpoint."""
self.results["total_tests"] += 1
test_name = f"WebSocket {endpoint}"
try:
ws_url = f"ws://localhost:8000{endpoint}"
start_time = time.time()
async with websockets.connect(ws_url) as websocket:
# Send a test message
test_message = {"type": "subscribe", "zone_ids": ["zone_1"]}
await websocket.send(json.dumps(test_message))
# Wait for response
response = await asyncio.wait_for(websocket.recv(), timeout=3)
response_time = (time.time() - start_time) * 1000
try:
response_data = json.loads(response)
except json.JSONDecodeError:
response_data = {"raw_response": response}
test_result = {
"test_name": test_name,
"description": description,
"url": ws_url,
"method": "WebSocket",
"response_time_ms": round(response_time, 2),
"response_data": response_data,
"success": True,
"timestamp": datetime.now().isoformat()
}
self.results["passed"] += 1
self.log_success(f"{test_name} - Connected ({response_time:.1f}ms)")
if description:
print(f" {description}")
self.results["test_details"].append(test_result)
return test_result
except Exception as e:
self.results["failed"] += 1
error_msg = f"{test_name} - Exception: {str(e)}"
self.log_error(error_msg)
test_result = {
"test_name": test_name,
"description": description,
"url": f"ws://localhost:8000{endpoint}",
"method": "WebSocket",
"response_time_ms": None,
"response_data": None,
"success": False,
"error": str(e),
"traceback": traceback.format_exc(),
"timestamp": datetime.now().isoformat()
}
self.results["errors"].append(error_msg)
self.results["test_details"].append(test_result)
return test_result
async def run_all_tests(self):
"""Run all API endpoint tests."""
print(f"{Fore.CYAN}{'='*60}")
print(f"{Fore.CYAN}WiFi-DensePose API Endpoint Testing")
print(f"{Fore.CYAN}{'='*60}{Style.RESET_ALL}")
print()
# Test Health Endpoints
print(f"{Fore.MAGENTA}Testing Health Endpoints:{Style.RESET_ALL}")
await self.test_endpoint("GET", "/health/health", description="System health check")
await self.test_endpoint("GET", "/health/ready", description="Readiness check")
print()
# Test Pose Estimation Endpoints
print(f"{Fore.MAGENTA}Testing Pose Estimation Endpoints:{Style.RESET_ALL}")
await self.test_endpoint("GET", "/api/v1/pose/current", description="Current pose estimation")
await self.test_endpoint("GET", "/api/v1/pose/current",
params={"zone_ids": ["zone_1"], "confidence_threshold": 0.7},
description="Current pose estimation with parameters")
await self.test_endpoint("POST", "/api/v1/pose/analyze", description="Pose analysis (requires auth)")
await self.test_endpoint("GET", "/api/v1/pose/zones/zone_1/occupancy", description="Zone occupancy")
await self.test_endpoint("GET", "/api/v1/pose/zones/summary", description="All zones summary")
print()
# Test Historical Data Endpoints
print(f"{Fore.MAGENTA}Testing Historical Data Endpoints:{Style.RESET_ALL}")
end_time = datetime.now()
start_time = end_time - timedelta(hours=1)
historical_data = {
"start_time": start_time.isoformat(),
"end_time": end_time.isoformat(),
"zone_ids": ["zone_1"],
"aggregation_interval": 300
}
await self.test_endpoint("POST", "/api/v1/pose/historical",
data=historical_data,
description="Historical pose data (requires auth)")
await self.test_endpoint("GET", "/api/v1/pose/activities", description="Recent activities")
await self.test_endpoint("GET", "/api/v1/pose/activities",
params={"zone_id": "zone_1", "limit": 5},
description="Activities for specific zone")
print()
# Test Calibration Endpoints
print(f"{Fore.MAGENTA}Testing Calibration Endpoints:{Style.RESET_ALL}")
await self.test_endpoint("GET", "/api/v1/pose/calibration/status", description="Calibration status (requires auth)")
await self.test_endpoint("POST", "/api/v1/pose/calibrate", description="Start calibration (requires auth)")
print()
# Test Statistics Endpoints
print(f"{Fore.MAGENTA}Testing Statistics Endpoints:{Style.RESET_ALL}")
await self.test_endpoint("GET", "/api/v1/pose/stats", description="Pose statistics")
await self.test_endpoint("GET", "/api/v1/pose/stats",
params={"hours": 12}, description="Pose statistics (12 hours)")
print()
# Test Stream Endpoints
print(f"{Fore.MAGENTA}Testing Stream Endpoints:{Style.RESET_ALL}")
await self.test_endpoint("GET", "/api/v1/stream/status", description="Stream status")
await self.test_endpoint("POST", "/api/v1/stream/start", description="Start streaming (requires auth)")
await self.test_endpoint("POST", "/api/v1/stream/stop", description="Stop streaming (requires auth)")
print()
# Test WebSocket Endpoints
print(f"{Fore.MAGENTA}Testing WebSocket Endpoints:{Style.RESET_ALL}")
await self.test_websocket_endpoint("/api/v1/stream/pose", description="Pose WebSocket")
await self.test_websocket_endpoint("/api/v1/stream/events", description="Events WebSocket")
print()
# Test Documentation Endpoints
print(f"{Fore.MAGENTA}Testing Documentation Endpoints:{Style.RESET_ALL}")
await self.test_endpoint("GET", "/docs", description="API documentation")
await self.test_endpoint("GET", "/openapi.json", description="OpenAPI schema")
print()
# Test API Info Endpoints
print(f"{Fore.MAGENTA}Testing API Info Endpoints:{Style.RESET_ALL}")
await self.test_endpoint("GET", "/", description="Root endpoint")
await self.test_endpoint("GET", "/api/v1/info", description="API information")
await self.test_endpoint("GET", "/api/v1/status", description="API status")
print()
# Test Error Cases
print(f"{Fore.MAGENTA}Testing Error Cases:{Style.RESET_ALL}")
await self.test_endpoint("GET", "/nonexistent", expected_status=404,
description="Non-existent endpoint")
await self.test_endpoint("POST", "/api/v1/pose/analyze",
data={"invalid": "data"}, expected_status=401,
description="Unauthorized request (no auth)")
print()
def print_summary(self):
"""Print test summary."""
print(f"{Fore.CYAN}{'='*60}")
print(f"{Fore.CYAN}Test Summary")
print(f"{Fore.CYAN}{'='*60}{Style.RESET_ALL}")
total = self.results["total_tests"]
passed = self.results["passed"]
failed = self.results["failed"]
success_rate = (passed / total * 100) if total > 0 else 0
print(f"Total Tests: {total}")
print(f"{Fore.GREEN}Passed: {passed}{Style.RESET_ALL}")
print(f"{Fore.RED}Failed: {failed}{Style.RESET_ALL}")
print(f"Success Rate: {success_rate:.1f}%")
print()
if self.results["errors"]:
print(f"{Fore.RED}Errors:{Style.RESET_ALL}")
for error in self.results["errors"]:
print(f" - {error}")
print()
# Save detailed results to file
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
results_file = f"scripts/api_test_results_{timestamp}.json"
try:
with open(results_file, 'w') as f:
json.dump(self.results, f, indent=2, default=str)
print(f"Detailed results saved to: {results_file}")
except Exception as e:
self.log_warning(f"Could not save results file: {e}")
return failed == 0
async def main():
"""Main test function."""
try:
async with APITester() as tester:
await tester.run_all_tests()
success = tester.print_summary()
# Exit with appropriate code
sys.exit(0 if success else 1)
except KeyboardInterrupt:
print(f"\n{Fore.YELLOW}Tests interrupted by user{Style.RESET_ALL}")
sys.exit(1)
except Exception as e:
print(f"\n{Fore.RED}Fatal error: {e}{Style.RESET_ALL}")
traceback.print_exc()
sys.exit(1)
if __name__ == "__main__":
# Check if required packages are available
try:
import aiohttp
import websockets
import colorama
except ImportError as e:
print(f"Missing required package: {e}")
print("Install with: pip install aiohttp websockets colorama")
sys.exit(1)
# Run tests
asyncio.run(main())

View File

@@ -1,366 +0,0 @@
#!/usr/bin/env python3
"""
Test script for WiFi-DensePose monitoring functionality
"""
import asyncio
import aiohttp
import json
import sys
from datetime import datetime
from typing import Dict, Any, List
import time
class MonitoringTester:
"""Test monitoring endpoints and metrics collection."""
def __init__(self, base_url: str = "http://localhost:8000"):
self.base_url = base_url
self.session = None
self.results = []
async def setup(self):
"""Setup test session."""
self.session = aiohttp.ClientSession()
async def teardown(self):
"""Cleanup test session."""
if self.session:
await self.session.close()
async def test_health_endpoint(self):
"""Test the /health endpoint."""
print("\n[TEST] Health Endpoint")
try:
async with self.session.get(f"{self.base_url}/health") as response:
status = response.status
data = await response.json()
print(f"Status: {status}")
print(f"Response: {json.dumps(data, indent=2)}")
self.results.append({
"test": "health_endpoint",
"status": "passed" if status == 200 else "failed",
"response_code": status,
"data": data
})
# Verify structure
assert "status" in data
assert "timestamp" in data
assert "components" in data
assert "system_metrics" in data
print("✅ Health endpoint test passed")
except Exception as e:
print(f"❌ Health endpoint test failed: {e}")
self.results.append({
"test": "health_endpoint",
"status": "failed",
"error": str(e)
})
async def test_ready_endpoint(self):
"""Test the /ready endpoint."""
print("\n[TEST] Readiness Endpoint")
try:
async with self.session.get(f"{self.base_url}/ready") as response:
status = response.status
data = await response.json()
print(f"Status: {status}")
print(f"Response: {json.dumps(data, indent=2)}")
self.results.append({
"test": "ready_endpoint",
"status": "passed" if status == 200 else "failed",
"response_code": status,
"data": data
})
# Verify structure
assert "ready" in data
assert "timestamp" in data
assert "checks" in data
assert "message" in data
print("✅ Readiness endpoint test passed")
except Exception as e:
print(f"❌ Readiness endpoint test failed: {e}")
self.results.append({
"test": "ready_endpoint",
"status": "failed",
"error": str(e)
})
async def test_liveness_endpoint(self):
"""Test the /live endpoint."""
print("\n[TEST] Liveness Endpoint")
try:
async with self.session.get(f"{self.base_url}/live") as response:
status = response.status
data = await response.json()
print(f"Status: {status}")
print(f"Response: {json.dumps(data, indent=2)}")
self.results.append({
"test": "liveness_endpoint",
"status": "passed" if status == 200 else "failed",
"response_code": status,
"data": data
})
# Verify structure
assert "status" in data
assert "timestamp" in data
print("✅ Liveness endpoint test passed")
except Exception as e:
print(f"❌ Liveness endpoint test failed: {e}")
self.results.append({
"test": "liveness_endpoint",
"status": "failed",
"error": str(e)
})
async def test_metrics_endpoint(self):
"""Test the /metrics endpoint."""
print("\n[TEST] Metrics Endpoint")
try:
async with self.session.get(f"{self.base_url}/metrics") as response:
status = response.status
data = await response.json()
print(f"Status: {status}")
print(f"Response: {json.dumps(data, indent=2)}")
self.results.append({
"test": "metrics_endpoint",
"status": "passed" if status == 200 else "failed",
"response_code": status,
"data": data
})
# Verify structure
assert "timestamp" in data
assert "metrics" in data
# Check for system metrics
metrics = data.get("metrics", {})
assert "cpu" in metrics
assert "memory" in metrics
assert "disk" in metrics
assert "network" in metrics
print("✅ Metrics endpoint test passed")
except Exception as e:
print(f"❌ Metrics endpoint test failed: {e}")
self.results.append({
"test": "metrics_endpoint",
"status": "failed",
"error": str(e)
})
async def test_version_endpoint(self):
"""Test the /version endpoint."""
print("\n[TEST] Version Endpoint")
try:
async with self.session.get(f"{self.base_url}/version") as response:
status = response.status
data = await response.json()
print(f"Status: {status}")
print(f"Response: {json.dumps(data, indent=2)}")
self.results.append({
"test": "version_endpoint",
"status": "passed" if status == 200 else "failed",
"response_code": status,
"data": data
})
# Verify structure
assert "name" in data
assert "version" in data
assert "environment" in data
assert "timestamp" in data
print("✅ Version endpoint test passed")
except Exception as e:
print(f"❌ Version endpoint test failed: {e}")
self.results.append({
"test": "version_endpoint",
"status": "failed",
"error": str(e)
})
async def test_metrics_collection(self):
"""Test metrics collection over time."""
print("\n[TEST] Metrics Collection Over Time")
try:
# Collect metrics 3 times with 2-second intervals
metrics_snapshots = []
for i in range(3):
async with self.session.get(f"{self.base_url}/metrics") as response:
data = await response.json()
metrics_snapshots.append({
"timestamp": time.time(),
"metrics": data.get("metrics", {})
})
if i < 2:
await asyncio.sleep(2)
# Verify metrics are changing
cpu_values = [
snapshot["metrics"].get("cpu", {}).get("percent", 0)
for snapshot in metrics_snapshots
]
print(f"CPU usage over time: {cpu_values}")
# Check if at least some metrics are non-zero
all_zeros = all(v == 0 for v in cpu_values)
assert not all_zeros, "All CPU metrics are zero"
self.results.append({
"test": "metrics_collection",
"status": "passed",
"snapshots": len(metrics_snapshots),
"cpu_values": cpu_values
})
print("✅ Metrics collection test passed")
except Exception as e:
print(f"❌ Metrics collection test failed: {e}")
self.results.append({
"test": "metrics_collection",
"status": "failed",
"error": str(e)
})
async def test_system_load(self):
"""Test system under load to verify monitoring."""
print("\n[TEST] System Load Monitoring")
try:
# Generate some load by making multiple concurrent requests
print("Generating load with 20 concurrent requests...")
tasks = []
for i in range(20):
tasks.append(self.session.get(f"{self.base_url}/health"))
start_time = time.time()
responses = await asyncio.gather(*tasks, return_exceptions=True)
duration = time.time() - start_time
success_count = sum(
1 for r in responses
if not isinstance(r, Exception) and r.status == 200
)
print(f"Completed {len(responses)} requests in {duration:.2f}s")
print(f"Success rate: {success_count}/{len(responses)}")
# Check metrics after load
async with self.session.get(f"{self.base_url}/metrics") as response:
data = await response.json()
metrics = data.get("metrics", {})
print(f"CPU after load: {metrics.get('cpu', {}).get('percent', 0)}%")
print(f"Memory usage: {metrics.get('memory', {}).get('percent', 0)}%")
self.results.append({
"test": "system_load",
"status": "passed",
"requests": len(responses),
"success_rate": f"{success_count}/{len(responses)}",
"duration": duration
})
print("✅ System load monitoring test passed")
except Exception as e:
print(f"❌ System load monitoring test failed: {e}")
self.results.append({
"test": "system_load",
"status": "failed",
"error": str(e)
})
async def run_all_tests(self):
"""Run all monitoring tests."""
print("=== WiFi-DensePose Monitoring Tests ===")
print(f"Base URL: {self.base_url}")
print(f"Started at: {datetime.now().isoformat()}")
await self.setup()
try:
# Run all tests
await self.test_health_endpoint()
await self.test_ready_endpoint()
await self.test_liveness_endpoint()
await self.test_metrics_endpoint()
await self.test_version_endpoint()
await self.test_metrics_collection()
await self.test_system_load()
finally:
await self.teardown()
# Print summary
print("\n=== Test Summary ===")
passed = sum(1 for r in self.results if r["status"] == "passed")
failed = sum(1 for r in self.results if r["status"] == "failed")
print(f"Total tests: {len(self.results)}")
print(f"Passed: {passed}")
print(f"Failed: {failed}")
if failed > 0:
print("\nFailed tests:")
for result in self.results:
if result["status"] == "failed":
print(f" - {result['test']}: {result.get('error', 'Unknown error')}")
# Save results
with open("monitoring_test_results.json", "w") as f:
json.dump({
"timestamp": datetime.now().isoformat(),
"base_url": self.base_url,
"summary": {
"total": len(self.results),
"passed": passed,
"failed": failed
},
"results": self.results
}, f, indent=2)
print("\nResults saved to monitoring_test_results.json")
return failed == 0
async def main():
"""Main entry point."""
base_url = sys.argv[1] if len(sys.argv) > 1 else "http://localhost:8000"
tester = MonitoringTester(base_url)
success = await tester.run_all_tests()
sys.exit(0 if success else 1)
if __name__ == "__main__":
asyncio.run(main())

View File

@@ -1,157 +0,0 @@
#!/usr/bin/env python3
"""
WebSocket Streaming Test Script
Tests real-time pose data streaming via WebSocket
"""
import asyncio
import json
import websockets
from datetime import datetime
async def test_pose_streaming():
"""Test pose data streaming via WebSocket."""
uri = "ws://localhost:8000/api/v1/stream/pose?zone_ids=zone_1,zone_2&min_confidence=0.3&max_fps=10"
print(f"[{datetime.now()}] Connecting to WebSocket...")
try:
async with websockets.connect(uri) as websocket:
print(f"[{datetime.now()}] Connected successfully!")
# Wait for connection confirmation
response = await websocket.recv()
data = json.loads(response)
print(f"[{datetime.now()}] Connection confirmed:")
print(json.dumps(data, indent=2))
# Send a ping message
ping_msg = {"type": "ping"}
await websocket.send(json.dumps(ping_msg))
print(f"[{datetime.now()}] Sent ping message")
# Listen for messages for 10 seconds
print(f"[{datetime.now()}] Listening for pose updates...")
start_time = asyncio.get_event_loop().time()
message_count = 0
while asyncio.get_event_loop().time() - start_time < 10:
try:
# Wait for message with timeout
message = await asyncio.wait_for(websocket.recv(), timeout=1.0)
data = json.loads(message)
message_count += 1
msg_type = data.get("type", "unknown")
if msg_type == "pose_update":
print(f"[{datetime.now()}] Pose update received:")
print(f" - Frame ID: {data.get('frame_id')}")
print(f" - Persons detected: {len(data.get('persons', []))}")
print(f" - Zone summary: {data.get('zone_summary', {})}")
elif msg_type == "pong":
print(f"[{datetime.now()}] Pong received")
else:
print(f"[{datetime.now()}] Message type '{msg_type}' received")
except asyncio.TimeoutError:
# No message received in timeout period
continue
except Exception as e:
print(f"[{datetime.now()}] Error receiving message: {e}")
print(f"\n[{datetime.now()}] Test completed!")
print(f"Total messages received: {message_count}")
# Send disconnect message
disconnect_msg = {"type": "disconnect"}
await websocket.send(json.dumps(disconnect_msg))
except Exception as e:
print(f"[{datetime.now()}] WebSocket error: {e}")
async def test_event_streaming():
"""Test event streaming via WebSocket."""
uri = "ws://localhost:8000/api/v1/stream/events?event_types=motion,presence&zone_ids=zone_1"
print(f"\n[{datetime.now()}] Testing event streaming...")
print(f"[{datetime.now()}] Connecting to WebSocket...")
try:
async with websockets.connect(uri) as websocket:
print(f"[{datetime.now()}] Connected successfully!")
# Wait for connection confirmation
response = await websocket.recv()
data = json.loads(response)
print(f"[{datetime.now()}] Connection confirmed:")
print(json.dumps(data, indent=2))
# Get status
status_msg = {"type": "get_status"}
await websocket.send(json.dumps(status_msg))
print(f"[{datetime.now()}] Requested status")
# Listen for a few messages
for i in range(5):
try:
message = await asyncio.wait_for(websocket.recv(), timeout=2.0)
data = json.loads(message)
print(f"[{datetime.now()}] Event received: {data.get('type')}")
except asyncio.TimeoutError:
print(f"[{datetime.now()}] No event received (timeout)")
except Exception as e:
print(f"[{datetime.now()}] WebSocket error: {e}")
async def test_websocket_errors():
"""Test WebSocket error handling."""
print(f"\n[{datetime.now()}] Testing error handling...")
# Test invalid endpoint
try:
uri = "ws://localhost:8000/api/v1/stream/invalid"
async with websockets.connect(uri) as websocket:
print("Connected to invalid endpoint (unexpected)")
except Exception as e:
print(f"[{datetime.now()}] Expected error for invalid endpoint: {type(e).__name__}")
# Test sending invalid JSON
try:
uri = "ws://localhost:8000/api/v1/stream/pose"
async with websockets.connect(uri) as websocket:
await websocket.send("invalid json {")
response = await websocket.recv()
data = json.loads(response)
if data.get("type") == "error":
print(f"[{datetime.now()}] Received expected error for invalid JSON")
except Exception as e:
print(f"[{datetime.now()}] Error testing invalid JSON: {e}")
async def main():
"""Run all WebSocket tests."""
print("=" * 60)
print("WiFi-DensePose WebSocket Streaming Tests")
print("=" * 60)
# Test pose streaming
await test_pose_streaming()
# Test event streaming
await test_event_streaming()
# Test error handling
await test_websocket_errors()
print("\n" + "=" * 60)
print("All tests completed!")
print("=" * 60)
if __name__ == "__main__":
asyncio.run(main())

View File

@@ -1,398 +0,0 @@
#!/bin/bash
# WiFi-DensePose Deployment Validation Script
# This script validates that all deployment components are functioning correctly
set -euo pipefail
# Configuration
NAMESPACE="wifi-densepose"
MONITORING_NAMESPACE="monitoring"
TIMEOUT=300
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
# Logging functions
log_info() {
echo -e "${BLUE}[INFO]${NC} $1"
}
log_success() {
echo -e "${GREEN}[SUCCESS]${NC} $1"
}
log_warning() {
echo -e "${YELLOW}[WARNING]${NC} $1"
}
log_error() {
echo -e "${RED}[ERROR]${NC} $1"
}
# Check if kubectl is available and configured
check_kubectl() {
log_info "Checking kubectl configuration..."
if ! command -v kubectl &> /dev/null; then
log_error "kubectl is not installed or not in PATH"
return 1
fi
if ! kubectl cluster-info &> /dev/null; then
log_error "kubectl is not configured or cluster is not accessible"
return 1
fi
log_success "kubectl is configured and cluster is accessible"
return 0
}
# Validate namespace exists
validate_namespace() {
local ns=$1
log_info "Validating namespace: $ns"
if kubectl get namespace "$ns" &> /dev/null; then
log_success "Namespace $ns exists"
return 0
else
log_error "Namespace $ns does not exist"
return 1
fi
}
# Validate deployments are ready
validate_deployments() {
log_info "Validating deployments in namespace: $NAMESPACE"
local deployments
deployments=$(kubectl get deployments -n "$NAMESPACE" -o jsonpath='{.items[*].metadata.name}')
if [ -z "$deployments" ]; then
log_warning "No deployments found in namespace $NAMESPACE"
return 1
fi
local failed=0
for deployment in $deployments; do
log_info "Checking deployment: $deployment"
if kubectl wait --for=condition=available --timeout="${TIMEOUT}s" "deployment/$deployment" -n "$NAMESPACE" &> /dev/null; then
local ready_replicas
ready_replicas=$(kubectl get deployment "$deployment" -n "$NAMESPACE" -o jsonpath='{.status.readyReplicas}')
local desired_replicas
desired_replicas=$(kubectl get deployment "$deployment" -n "$NAMESPACE" -o jsonpath='{.spec.replicas}')
if [ "$ready_replicas" = "$desired_replicas" ]; then
log_success "Deployment $deployment is ready ($ready_replicas/$desired_replicas replicas)"
else
log_warning "Deployment $deployment has $ready_replicas/$desired_replicas replicas ready"
failed=1
fi
else
log_error "Deployment $deployment is not ready within ${TIMEOUT}s"
failed=1
fi
done
return $failed
}
# Validate services are accessible
validate_services() {
log_info "Validating services in namespace: $NAMESPACE"
local services
services=$(kubectl get services -n "$NAMESPACE" -o jsonpath='{.items[*].metadata.name}')
if [ -z "$services" ]; then
log_warning "No services found in namespace $NAMESPACE"
return 1
fi
local failed=0
for service in $services; do
log_info "Checking service: $service"
local endpoints
endpoints=$(kubectl get endpoints "$service" -n "$NAMESPACE" -o jsonpath='{.subsets[*].addresses[*].ip}')
if [ -n "$endpoints" ]; then
log_success "Service $service has endpoints: $endpoints"
else
log_error "Service $service has no endpoints"
failed=1
fi
done
return $failed
}
# Validate ingress configuration
validate_ingress() {
log_info "Validating ingress configuration in namespace: $NAMESPACE"
local ingresses
ingresses=$(kubectl get ingress -n "$NAMESPACE" -o jsonpath='{.items[*].metadata.name}')
if [ -z "$ingresses" ]; then
log_warning "No ingress resources found in namespace $NAMESPACE"
return 0
fi
local failed=0
for ingress in $ingresses; do
log_info "Checking ingress: $ingress"
local hosts
hosts=$(kubectl get ingress "$ingress" -n "$NAMESPACE" -o jsonpath='{.spec.rules[*].host}')
if [ -n "$hosts" ]; then
log_success "Ingress $ingress configured for hosts: $hosts"
# Check if ingress has an IP/hostname assigned
local address
address=$(kubectl get ingress "$ingress" -n "$NAMESPACE" -o jsonpath='{.status.loadBalancer.ingress[0].ip}{.status.loadBalancer.ingress[0].hostname}')
if [ -n "$address" ]; then
log_success "Ingress $ingress has address: $address"
else
log_warning "Ingress $ingress does not have an assigned address yet"
fi
else
log_error "Ingress $ingress has no configured hosts"
failed=1
fi
done
return $failed
}
# Validate ConfigMaps and Secrets
validate_config() {
log_info "Validating ConfigMaps and Secrets in namespace: $NAMESPACE"
# Check ConfigMaps
local configmaps
configmaps=$(kubectl get configmaps -n "$NAMESPACE" -o jsonpath='{.items[*].metadata.name}')
if [ -n "$configmaps" ]; then
log_success "ConfigMaps found: $configmaps"
else
log_warning "No ConfigMaps found in namespace $NAMESPACE"
fi
# Check Secrets
local secrets
secrets=$(kubectl get secrets -n "$NAMESPACE" -o jsonpath='{.items[*].metadata.name}' | tr ' ' '\n' | grep -v "default-token" | tr '\n' ' ')
if [ -n "$secrets" ]; then
log_success "Secrets found: $secrets"
else
log_warning "No custom secrets found in namespace $NAMESPACE"
fi
return 0
}
# Validate HPA configuration
validate_hpa() {
log_info "Validating Horizontal Pod Autoscaler in namespace: $NAMESPACE"
local hpas
hpas=$(kubectl get hpa -n "$NAMESPACE" -o jsonpath='{.items[*].metadata.name}')
if [ -z "$hpas" ]; then
log_warning "No HPA resources found in namespace $NAMESPACE"
return 0
fi
local failed=0
for hpa in $hpas; do
log_info "Checking HPA: $hpa"
local current_replicas
current_replicas=$(kubectl get hpa "$hpa" -n "$NAMESPACE" -o jsonpath='{.status.currentReplicas}')
local desired_replicas
desired_replicas=$(kubectl get hpa "$hpa" -n "$NAMESPACE" -o jsonpath='{.status.desiredReplicas}')
if [ -n "$current_replicas" ] && [ -n "$desired_replicas" ]; then
log_success "HPA $hpa: current=$current_replicas, desired=$desired_replicas"
else
log_warning "HPA $hpa metrics not available yet"
fi
done
return $failed
}
# Test application health endpoints
test_health_endpoints() {
log_info "Testing application health endpoints..."
# Get application pods
local pods
pods=$(kubectl get pods -n "$NAMESPACE" -l app=wifi-densepose -o jsonpath='{.items[*].metadata.name}')
if [ -z "$pods" ]; then
log_error "No application pods found"
return 1
fi
local failed=0
for pod in $pods; do
log_info "Testing health endpoint for pod: $pod"
# Port forward and test health endpoint
kubectl port-forward "pod/$pod" 8080:8080 -n "$NAMESPACE" &
local pf_pid=$!
sleep 2
if curl -f http://localhost:8080/health &> /dev/null; then
log_success "Health endpoint for pod $pod is responding"
else
log_error "Health endpoint for pod $pod is not responding"
failed=1
fi
kill $pf_pid 2>/dev/null || true
sleep 1
done
return $failed
}
# Validate monitoring stack
validate_monitoring() {
log_info "Validating monitoring stack in namespace: $MONITORING_NAMESPACE"
if ! validate_namespace "$MONITORING_NAMESPACE"; then
log_warning "Monitoring namespace not found, skipping monitoring validation"
return 0
fi
# Check Prometheus
if kubectl get deployment prometheus-server -n "$MONITORING_NAMESPACE" &> /dev/null; then
if kubectl wait --for=condition=available --timeout=60s deployment/prometheus-server -n "$MONITORING_NAMESPACE" &> /dev/null; then
log_success "Prometheus is running"
else
log_error "Prometheus is not ready"
fi
else
log_warning "Prometheus deployment not found"
fi
# Check Grafana
if kubectl get deployment grafana -n "$MONITORING_NAMESPACE" &> /dev/null; then
if kubectl wait --for=condition=available --timeout=60s deployment/grafana -n "$MONITORING_NAMESPACE" &> /dev/null; then
log_success "Grafana is running"
else
log_error "Grafana is not ready"
fi
else
log_warning "Grafana deployment not found"
fi
return 0
}
# Validate logging stack
validate_logging() {
log_info "Validating logging stack..."
# Check Fluentd DaemonSet
if kubectl get daemonset fluentd -n kube-system &> /dev/null; then
local desired
desired=$(kubectl get daemonset fluentd -n kube-system -o jsonpath='{.status.desiredNumberScheduled}')
local ready
ready=$(kubectl get daemonset fluentd -n kube-system -o jsonpath='{.status.numberReady}')
if [ "$desired" = "$ready" ]; then
log_success "Fluentd DaemonSet is ready ($ready/$desired nodes)"
else
log_warning "Fluentd DaemonSet has $ready/$desired pods ready"
fi
else
log_warning "Fluentd DaemonSet not found"
fi
return 0
}
# Check resource usage
check_resource_usage() {
log_info "Checking resource usage..."
# Check node resource usage
log_info "Node resource usage:"
kubectl top nodes 2>/dev/null || log_warning "Metrics server not available for node metrics"
# Check pod resource usage
log_info "Pod resource usage in namespace $NAMESPACE:"
kubectl top pods -n "$NAMESPACE" 2>/dev/null || log_warning "Metrics server not available for pod metrics"
return 0
}
# Generate validation report
generate_report() {
local total_checks=$1
local failed_checks=$2
local passed_checks=$((total_checks - failed_checks))
echo ""
log_info "=== Deployment Validation Report ==="
echo "Total checks: $total_checks"
echo "Passed: $passed_checks"
echo "Failed: $failed_checks"
if [ $failed_checks -eq 0 ]; then
log_success "All validation checks passed! 🎉"
return 0
else
log_error "Some validation checks failed. Please review the output above."
return 1
fi
}
# Main validation function
main() {
log_info "Starting WiFi-DensePose deployment validation..."
local total_checks=0
local failed_checks=0
# Run validation checks
checks=(
"check_kubectl"
"validate_namespace $NAMESPACE"
"validate_deployments"
"validate_services"
"validate_ingress"
"validate_config"
"validate_hpa"
"test_health_endpoints"
"validate_monitoring"
"validate_logging"
"check_resource_usage"
)
for check in "${checks[@]}"; do
total_checks=$((total_checks + 1))
echo ""
if ! eval "$check"; then
failed_checks=$((failed_checks + 1))
fi
done
# Generate final report
generate_report $total_checks $failed_checks
}
# Run main function
main "$@"

View File

@@ -1,458 +0,0 @@
#!/bin/bash
# WiFi-DensePose Integration Validation Script
# This script validates the complete system integration
set -e # Exit on any error
# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
BLUE='\033[0;34m'
NC='\033[0m' # No Color
# Configuration
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
PROJECT_ROOT="$(dirname "$SCRIPT_DIR")"
VENV_PATH="${PROJECT_ROOT}/.venv"
TEST_DB_PATH="${PROJECT_ROOT}/test_integration.db"
LOG_FILE="${PROJECT_ROOT}/integration_validation.log"
# Functions
log() {
echo -e "${BLUE}[$(date +'%Y-%m-%d %H:%M:%S')]${NC} $1" | tee -a "$LOG_FILE"
}
success() {
echo -e "${GREEN}$1${NC}" | tee -a "$LOG_FILE"
}
warning() {
echo -e "${YELLOW}⚠️ $1${NC}" | tee -a "$LOG_FILE"
}
error() {
echo -e "${RED}$1${NC}" | tee -a "$LOG_FILE"
}
cleanup() {
log "Cleaning up test resources..."
# Stop any running servers
pkill -f "wifi-densepose" || true
pkill -f "uvicorn.*src.app" || true
# Remove test database
[ -f "$TEST_DB_PATH" ] && rm -f "$TEST_DB_PATH"
# Remove test logs
find "$PROJECT_ROOT" -name "*.log" -path "*/test*" -delete 2>/dev/null || true
success "Cleanup completed"
}
check_prerequisites() {
log "Checking prerequisites..."
# Check Python version
if ! python3 --version | grep -E "Python 3\.(9|10|11|12)" > /dev/null; then
error "Python 3.9+ is required"
exit 1
fi
success "Python version check passed"
# Check if virtual environment exists
if [ ! -d "$VENV_PATH" ]; then
warning "Virtual environment not found, creating one..."
python3 -m venv "$VENV_PATH"
fi
success "Virtual environment check passed"
# Activate virtual environment
source "$VENV_PATH/bin/activate"
# Check if requirements are installed
if ! pip list | grep -q "fastapi"; then
warning "Dependencies not installed, installing..."
pip install -e ".[dev]"
fi
success "Dependencies check passed"
}
validate_package_structure() {
log "Validating package structure..."
# Check main application files
required_files=(
"src/__init__.py"
"src/main.py"
"src/app.py"
"src/config.py"
"src/logger.py"
"src/cli.py"
"pyproject.toml"
"setup.py"
"MANIFEST.in"
)
for file in "${required_files[@]}"; do
if [ ! -f "$PROJECT_ROOT/$file" ]; then
error "Required file missing: $file"
exit 1
fi
done
success "Package structure validation passed"
# Check directory structure
required_dirs=(
"src/config"
"src/core"
"src/api"
"src/services"
"src/middleware"
"src/database"
"src/tasks"
"src/commands"
"tests/unit"
"tests/integration"
)
for dir in "${required_dirs[@]}"; do
if [ ! -d "$PROJECT_ROOT/$dir" ]; then
error "Required directory missing: $dir"
exit 1
fi
done
success "Directory structure validation passed"
}
validate_imports() {
log "Validating Python imports..."
cd "$PROJECT_ROOT"
source "$VENV_PATH/bin/activate"
# Test main package import
if ! python -c "import src; print(f'Package version: {src.__version__}')"; then
error "Failed to import main package"
exit 1
fi
success "Main package import passed"
# Test core components
core_modules=(
"src.app"
"src.config.settings"
"src.logger"
"src.cli"
"src.core.csi_processor"
"src.core.phase_sanitizer"
"src.core.pose_estimator"
"src.core.router_interface"
"src.services.orchestrator"
"src.database.connection"
"src.database.models"
)
for module in "${core_modules[@]}"; do
if ! python -c "import $module" 2>/dev/null; then
error "Failed to import module: $module"
exit 1
fi
done
success "Core modules import passed"
}
validate_configuration() {
log "Validating configuration..."
cd "$PROJECT_ROOT"
source "$VENV_PATH/bin/activate"
# Test configuration loading
if ! python -c "
from src.config.settings import get_settings
settings = get_settings()
print(f'Environment: {settings.environment}')
print(f'Debug: {settings.debug}')
print(f'API Version: {settings.api_version}')
"; then
error "Configuration validation failed"
exit 1
fi
success "Configuration validation passed"
}
validate_database() {
log "Validating database integration..."
cd "$PROJECT_ROOT"
source "$VENV_PATH/bin/activate"
# Test database connection and models
if ! python -c "
import asyncio
from src.config.settings import get_settings
from src.database.connection import get_database_manager
async def test_db():
settings = get_settings()
settings.database_url = 'sqlite+aiosqlite:///test_integration.db'
db_manager = get_database_manager(settings)
await db_manager.initialize()
await db_manager.test_connection()
# Test connection stats
stats = await db_manager.get_connection_stats()
print(f'Database connected: {stats[\"database\"][\"connected\"]}')
await db_manager.close_all_connections()
print('Database validation passed')
asyncio.run(test_db())
"; then
error "Database validation failed"
exit 1
fi
success "Database validation passed"
}
validate_api_endpoints() {
log "Validating API endpoints..."
cd "$PROJECT_ROOT"
source "$VENV_PATH/bin/activate"
# Start server in background
export WIFI_DENSEPOSE_ENVIRONMENT=test
export WIFI_DENSEPOSE_DATABASE_URL="sqlite+aiosqlite:///test_integration.db"
python -m uvicorn src.app:app --host 127.0.0.1 --port 8888 --log-level error &
SERVER_PID=$!
# Wait for server to start
sleep 5
# Test endpoints
endpoints=(
"http://127.0.0.1:8888/health"
"http://127.0.0.1:8888/metrics"
"http://127.0.0.1:8888/api/v1/devices"
"http://127.0.0.1:8888/api/v1/sessions"
)
for endpoint in "${endpoints[@]}"; do
if ! curl -s -f "$endpoint" > /dev/null; then
error "API endpoint failed: $endpoint"
kill $SERVER_PID 2>/dev/null || true
exit 1
fi
done
# Stop server
kill $SERVER_PID 2>/dev/null || true
wait $SERVER_PID 2>/dev/null || true
success "API endpoints validation passed"
}
validate_cli() {
log "Validating CLI interface..."
cd "$PROJECT_ROOT"
source "$VENV_PATH/bin/activate"
# Test CLI commands
if ! python -m src.cli --help > /dev/null; then
error "CLI help command failed"
exit 1
fi
success "CLI help command passed"
# Test version command
if ! python -m src.cli version > /dev/null; then
error "CLI version command failed"
exit 1
fi
success "CLI version command passed"
# Test config validation
export WIFI_DENSEPOSE_ENVIRONMENT=test
export WIFI_DENSEPOSE_DATABASE_URL="sqlite+aiosqlite:///test_integration.db"
if ! python -m src.cli config validate > /dev/null; then
error "CLI config validation failed"
exit 1
fi
success "CLI config validation passed"
}
validate_background_tasks() {
log "Validating background tasks..."
cd "$PROJECT_ROOT"
source "$VENV_PATH/bin/activate"
# Test task managers
if ! python -c "
import asyncio
from src.config.settings import get_settings
from src.tasks.cleanup import get_cleanup_manager
from src.tasks.monitoring import get_monitoring_manager
from src.tasks.backup import get_backup_manager
async def test_tasks():
settings = get_settings()
settings.database_url = 'sqlite+aiosqlite:///test_integration.db'
# Test cleanup manager
cleanup_manager = get_cleanup_manager(settings)
cleanup_stats = cleanup_manager.get_stats()
print(f'Cleanup manager initialized: {\"manager\" in cleanup_stats}')
# Test monitoring manager
monitoring_manager = get_monitoring_manager(settings)
monitoring_stats = monitoring_manager.get_stats()
print(f'Monitoring manager initialized: {\"manager\" in monitoring_stats}')
# Test backup manager
backup_manager = get_backup_manager(settings)
backup_stats = backup_manager.get_stats()
print(f'Backup manager initialized: {\"manager\" in backup_stats}')
print('Background tasks validation passed')
asyncio.run(test_tasks())
"; then
error "Background tasks validation failed"
exit 1
fi
success "Background tasks validation passed"
}
run_integration_tests() {
log "Running integration tests..."
cd "$PROJECT_ROOT"
source "$VENV_PATH/bin/activate"
# Set test environment
export WIFI_DENSEPOSE_ENVIRONMENT=test
export WIFI_DENSEPOSE_DATABASE_URL="sqlite+aiosqlite:///test_integration.db"
# Run integration tests
if ! python -m pytest tests/integration/ -v --tb=short; then
error "Integration tests failed"
exit 1
fi
success "Integration tests passed"
}
validate_package_build() {
log "Validating package build..."
cd "$PROJECT_ROOT"
source "$VENV_PATH/bin/activate"
# Install build tools
pip install build twine
# Build package
if ! python -m build; then
error "Package build failed"
exit 1
fi
success "Package build passed"
# Check package
if ! python -m twine check dist/*; then
error "Package check failed"
exit 1
fi
success "Package check passed"
# Clean up build artifacts
rm -rf build/ dist/ *.egg-info/
}
generate_report() {
log "Generating integration report..."
cat > "$PROJECT_ROOT/integration_report.md" << EOF
# WiFi-DensePose Integration Validation Report
**Date:** $(date)
**Status:** ✅ PASSED
## Validation Results
### Prerequisites
- ✅ Python version check
- ✅ Virtual environment setup
- ✅ Dependencies installation
### Package Structure
- ✅ Required files present
- ✅ Directory structure valid
- ✅ Python imports working
### Core Components
- ✅ Configuration management
- ✅ Database integration
- ✅ API endpoints
- ✅ CLI interface
- ✅ Background tasks
### Testing
- ✅ Integration tests passed
- ✅ Package build successful
## System Information
**Python Version:** $(python --version)
**Package Version:** $(python -c "import src; print(src.__version__)")
**Environment:** $(python -c "from src.config.settings import get_settings; print(get_settings().environment)")
## Next Steps
The WiFi-DensePose system has been successfully integrated and validated.
You can now:
1. Start the server: \`wifi-densepose start\`
2. Check status: \`wifi-densepose status\`
3. View configuration: \`wifi-densepose config show\`
4. Run tests: \`pytest tests/\`
For more information, see the documentation in the \`docs/\` directory.
EOF
success "Integration report generated: integration_report.md"
}
main() {
log "Starting WiFi-DensePose integration validation..."
# Trap cleanup on exit
trap cleanup EXIT
# Run validation steps
check_prerequisites
validate_package_structure
validate_imports
validate_configuration
validate_database
validate_api_endpoints
validate_cli
validate_background_tasks
run_integration_tests
validate_package_build
generate_report
success "🎉 All integration validations passed!"
log "Integration validation completed successfully"
}
# Run main function
main "$@"