feat: Complete Rust port of WiFi-DensePose with modular crates

Major changes:
- Organized Python v1 implementation into v1/ subdirectory
- Created Rust workspace with 9 modular crates:
  - wifi-densepose-core: Core types, traits, errors
  - wifi-densepose-signal: CSI processing, phase sanitization, FFT
  - wifi-densepose-nn: Neural network inference (ONNX/Candle/tch)
  - wifi-densepose-api: Axum-based REST/WebSocket API
  - wifi-densepose-db: SQLx database layer
  - wifi-densepose-config: Configuration management
  - wifi-densepose-hardware: Hardware abstraction
  - wifi-densepose-wasm: WebAssembly bindings
  - wifi-densepose-cli: Command-line interface

Documentation:
- ADR-001: Workspace structure
- ADR-002: Signal processing library selection
- ADR-003: Neural network inference strategy
- DDD domain model with bounded contexts

Testing:
- 69 tests passing across all crates
- Signal processing: 45 tests
- Neural networks: 21 tests
- Core: 3 doc tests

Performance targets:
- 10x faster CSI processing (~0.5ms vs ~5ms)
- 5x lower memory usage (~100MB vs ~500MB)
- WASM support for browser deployment
This commit is contained in:
Claude
2026-01-13 03:11:16 +00:00
parent 5101504b72
commit 6ed69a3d48
427 changed files with 90993 additions and 0 deletions

View File

@@ -0,0 +1,640 @@
"""
Database connection management for WiFi-DensePose API
"""
import asyncio
import logging
from typing import Optional, Dict, Any, AsyncGenerator
from contextlib import asynccontextmanager
from datetime import datetime
from sqlalchemy import create_engine, event, pool, text
from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession, async_sessionmaker
from sqlalchemy.orm import sessionmaker, Session
from sqlalchemy.pool import QueuePool, NullPool
from sqlalchemy.exc import SQLAlchemyError, DisconnectionError
import redis.asyncio as redis
from redis.exceptions import ConnectionError as RedisConnectionError
from src.config.settings import Settings
from src.logger import get_logger
logger = get_logger(__name__)
class DatabaseConnectionError(Exception):
"""Database connection error."""
pass
class DatabaseManager:
"""Database connection manager."""
def __init__(self, settings: Settings):
self.settings = settings
self._async_engine = None
self._sync_engine = None
self._async_session_factory = None
self._sync_session_factory = None
self._redis_client = None
self._initialized = False
self._connection_pool_size = settings.db_pool_size
self._max_overflow = settings.db_max_overflow
self._pool_timeout = settings.db_pool_timeout
self._pool_recycle = settings.db_pool_recycle
async def initialize(self):
"""Initialize database connections."""
if self._initialized:
return
logger.info("Initializing database connections")
try:
# Initialize PostgreSQL connections
await self._initialize_postgresql()
# Initialize Redis connection
await self._initialize_redis()
self._initialized = True
logger.info("Database connections initialized successfully")
except Exception as e:
logger.error(f"Failed to initialize database connections: {e}")
raise DatabaseConnectionError(f"Database initialization failed: {e}")
async def _initialize_postgresql(self):
"""Initialize PostgreSQL connections with SQLite failsafe."""
postgresql_failed = False
try:
# Try PostgreSQL first
await self._initialize_postgresql_primary()
logger.info("PostgreSQL connections initialized")
return
except Exception as e:
postgresql_failed = True
logger.error(f"PostgreSQL initialization failed: {e}")
if not self.settings.enable_database_failsafe:
raise DatabaseConnectionError(f"PostgreSQL connection failed and failsafe disabled: {e}")
logger.warning("Falling back to SQLite database")
# Fallback to SQLite if PostgreSQL failed and failsafe is enabled
if postgresql_failed and self.settings.enable_database_failsafe:
await self._initialize_sqlite_fallback()
logger.info("SQLite fallback database initialized")
async def _initialize_postgresql_primary(self):
"""Initialize primary PostgreSQL connections."""
# Build database URL
if self.settings.database_url and "postgresql" in self.settings.database_url:
db_url = self.settings.database_url
async_db_url = self.settings.database_url.replace("postgresql://", "postgresql+asyncpg://")
elif self.settings.db_host and self.settings.db_name and self.settings.db_user:
db_url = (
f"postgresql://{self.settings.db_user}:{self.settings.db_password}"
f"@{self.settings.db_host}:{self.settings.db_port}/{self.settings.db_name}"
)
async_db_url = (
f"postgresql+asyncpg://{self.settings.db_user}:{self.settings.db_password}"
f"@{self.settings.db_host}:{self.settings.db_port}/{self.settings.db_name}"
)
else:
raise ValueError("PostgreSQL connection parameters not configured")
# Create async engine (don't specify poolclass for async engines)
self._async_engine = create_async_engine(
async_db_url,
pool_size=self._connection_pool_size,
max_overflow=self._max_overflow,
pool_timeout=self._pool_timeout,
pool_recycle=self._pool_recycle,
pool_pre_ping=True,
echo=self.settings.db_echo,
future=True,
)
# Create sync engine for migrations and admin tasks
self._sync_engine = create_engine(
db_url,
poolclass=QueuePool,
pool_size=max(2, self._connection_pool_size // 2),
max_overflow=self._max_overflow // 2,
pool_timeout=self._pool_timeout,
pool_recycle=self._pool_recycle,
pool_pre_ping=True,
echo=self.settings.db_echo,
future=True,
)
# Create session factories
self._async_session_factory = async_sessionmaker(
self._async_engine,
class_=AsyncSession,
expire_on_commit=False,
)
self._sync_session_factory = sessionmaker(
self._sync_engine,
expire_on_commit=False,
)
# Add connection event listeners
self._setup_connection_events()
# Test connections
await self._test_postgresql_connection()
async def _initialize_sqlite_fallback(self):
"""Initialize SQLite fallback database."""
import os
# Ensure directory exists
sqlite_path = self.settings.sqlite_fallback_path
os.makedirs(os.path.dirname(sqlite_path), exist_ok=True)
# Build SQLite URLs
db_url = f"sqlite:///{sqlite_path}"
async_db_url = f"sqlite+aiosqlite:///{sqlite_path}"
# Create async engine for SQLite
self._async_engine = create_async_engine(
async_db_url,
echo=self.settings.db_echo,
future=True,
)
# Create sync engine for SQLite
self._sync_engine = create_engine(
db_url,
poolclass=NullPool, # SQLite doesn't need connection pooling
echo=self.settings.db_echo,
future=True,
)
# Create session factories
self._async_session_factory = async_sessionmaker(
self._async_engine,
class_=AsyncSession,
expire_on_commit=False,
)
self._sync_session_factory = sessionmaker(
self._sync_engine,
expire_on_commit=False,
)
# Add connection event listeners
self._setup_connection_events()
# Test SQLite connection
await self._test_sqlite_connection()
async def _test_sqlite_connection(self):
"""Test SQLite connection."""
try:
async with self._async_engine.begin() as conn:
result = await conn.execute(text("SELECT 1"))
result.fetchone() # Don't await this - fetchone() is not async
logger.debug("SQLite connection test successful")
except Exception as e:
logger.error(f"SQLite connection test failed: {e}")
raise DatabaseConnectionError(f"SQLite connection test failed: {e}")
async def _initialize_redis(self):
"""Initialize Redis connection with failsafe."""
if not self.settings.redis_enabled:
logger.info("Redis disabled, skipping initialization")
return
try:
# Build Redis URL
if self.settings.redis_url:
redis_url = self.settings.redis_url
else:
redis_url = (
f"redis://{self.settings.redis_host}:{self.settings.redis_port}"
f"/{self.settings.redis_db}"
)
# Create Redis client
self._redis_client = redis.from_url(
redis_url,
password=self.settings.redis_password,
encoding="utf-8",
decode_responses=True,
max_connections=self.settings.redis_max_connections,
retry_on_timeout=True,
socket_timeout=self.settings.redis_socket_timeout,
socket_connect_timeout=self.settings.redis_connect_timeout,
)
# Test Redis connection
await self._test_redis_connection()
logger.info("Redis connection initialized")
except Exception as e:
logger.error(f"Failed to initialize Redis: {e}")
if self.settings.redis_required:
raise DatabaseConnectionError(f"Redis connection failed and is required: {e}")
elif self.settings.enable_redis_failsafe:
logger.warning("Redis initialization failed, continuing without Redis (failsafe enabled)")
self._redis_client = None
else:
logger.warning("Redis initialization failed but not required, continuing without Redis")
self._redis_client = None
def _setup_connection_events(self):
"""Setup database connection event listeners."""
@event.listens_for(self._sync_engine, "connect")
def set_sqlite_pragma(dbapi_connection, connection_record):
"""Set database-specific settings on connection."""
if "sqlite" in str(self._sync_engine.url):
cursor = dbapi_connection.cursor()
cursor.execute("PRAGMA foreign_keys=ON")
cursor.close()
@event.listens_for(self._sync_engine, "checkout")
def receive_checkout(dbapi_connection, connection_record, connection_proxy):
"""Log connection checkout."""
logger.debug("Database connection checked out")
@event.listens_for(self._sync_engine, "checkin")
def receive_checkin(dbapi_connection, connection_record):
"""Log connection checkin."""
logger.debug("Database connection checked in")
@event.listens_for(self._sync_engine, "invalidate")
def receive_invalidate(dbapi_connection, connection_record, exception):
"""Handle connection invalidation."""
logger.warning(f"Database connection invalidated: {exception}")
async def _test_postgresql_connection(self):
"""Test PostgreSQL connection."""
try:
async with self._async_engine.begin() as conn:
result = await conn.execute(text("SELECT 1"))
result.fetchone() # Don't await this - fetchone() is not async
logger.debug("PostgreSQL connection test successful")
except Exception as e:
logger.error(f"PostgreSQL connection test failed: {e}")
raise DatabaseConnectionError(f"PostgreSQL connection test failed: {e}")
async def _test_redis_connection(self):
"""Test Redis connection."""
if not self._redis_client:
return
try:
await self._redis_client.ping()
logger.debug("Redis connection test successful")
except Exception as e:
logger.error(f"Redis connection test failed: {e}")
if self.settings.redis_required:
raise DatabaseConnectionError(f"Redis connection test failed: {e}")
@asynccontextmanager
async def get_async_session(self) -> AsyncGenerator[AsyncSession, None]:
"""Get async database session."""
if not self._initialized:
await self.initialize()
if not self._async_session_factory:
raise DatabaseConnectionError("Async session factory not initialized")
session = self._async_session_factory()
try:
yield session
await session.commit()
except Exception as e:
await session.rollback()
logger.error(f"Database session error: {e}")
raise
finally:
await session.close()
@asynccontextmanager
async def get_sync_session(self) -> Session:
"""Get sync database session."""
if not self._initialized:
await self.initialize()
if not self._sync_session_factory:
raise DatabaseConnectionError("Sync session factory not initialized")
session = self._sync_session_factory()
try:
yield session
session.commit()
except Exception as e:
session.rollback()
logger.error(f"Database session error: {e}")
raise
finally:
session.close()
async def get_redis_client(self) -> Optional[redis.Redis]:
"""Get Redis client."""
if not self._initialized:
await self.initialize()
return self._redis_client
async def health_check(self) -> Dict[str, Any]:
"""Perform database health check."""
health_status = {
"database": {"status": "unknown", "details": {}},
"redis": {"status": "unknown", "details": {}},
"overall": "unknown"
}
# Check Database (PostgreSQL or SQLite)
try:
start_time = datetime.utcnow()
async with self.get_async_session() as session:
result = await session.execute(text("SELECT 1"))
result.fetchone() # Don't await this - fetchone() is not async
response_time = (datetime.utcnow() - start_time).total_seconds()
# Determine database type and status
is_sqlite = self.is_using_sqlite_fallback()
db_type = "sqlite_fallback" if is_sqlite else "postgresql"
details = {
"type": db_type,
"response_time_ms": round(response_time * 1000, 2),
}
# Add pool info for PostgreSQL
if not is_sqlite and hasattr(self._async_engine, 'pool'):
details.update({
"pool_size": self._async_engine.pool.size(),
"checked_out": self._async_engine.pool.checkedout(),
"overflow": self._async_engine.pool.overflow(),
})
# Add failsafe info
if is_sqlite:
details["failsafe_active"] = True
details["fallback_path"] = self.settings.sqlite_fallback_path
health_status["database"] = {
"status": "healthy",
"details": details
}
except Exception as e:
health_status["database"] = {
"status": "unhealthy",
"details": {"error": str(e)}
}
# Check Redis
if self._redis_client:
try:
start_time = datetime.utcnow()
await self._redis_client.ping()
response_time = (datetime.utcnow() - start_time).total_seconds()
info = await self._redis_client.info()
health_status["redis"] = {
"status": "healthy",
"details": {
"response_time_ms": round(response_time * 1000, 2),
"connected_clients": info.get("connected_clients", 0),
"used_memory": info.get("used_memory_human", "unknown"),
"uptime": info.get("uptime_in_seconds", 0),
}
}
except Exception as e:
health_status["redis"] = {
"status": "unhealthy",
"details": {"error": str(e)}
}
else:
health_status["redis"] = {
"status": "disabled",
"details": {"message": "Redis not enabled"}
}
# Determine overall status
database_healthy = health_status["database"]["status"] == "healthy"
redis_healthy = (
health_status["redis"]["status"] in ["healthy", "disabled"] or
not self.settings.redis_required
)
# Check if using failsafe modes
using_sqlite_fallback = self.is_using_sqlite_fallback()
redis_unavailable = not self.is_redis_available() and self.settings.redis_enabled
if database_healthy and redis_healthy:
if using_sqlite_fallback or redis_unavailable:
health_status["overall"] = "degraded" # Working but using failsafe
else:
health_status["overall"] = "healthy"
elif database_healthy:
health_status["overall"] = "degraded"
else:
health_status["overall"] = "unhealthy"
return health_status
async def get_connection_stats(self) -> Dict[str, Any]:
"""Get database connection statistics."""
stats = {
"postgresql": {},
"redis": {}
}
# PostgreSQL stats
if self._async_engine:
pool = self._async_engine.pool
stats["postgresql"] = {
"pool_size": pool.size(),
"checked_out": pool.checkedout(),
"overflow": pool.overflow(),
"checked_in": pool.checkedin(),
"total_connections": pool.size() + pool.overflow(),
"available_connections": pool.size() - pool.checkedout(),
}
# Redis stats
if self._redis_client:
try:
info = await self._redis_client.info()
stats["redis"] = {
"connected_clients": info.get("connected_clients", 0),
"blocked_clients": info.get("blocked_clients", 0),
"total_connections_received": info.get("total_connections_received", 0),
"rejected_connections": info.get("rejected_connections", 0),
}
except Exception as e:
stats["redis"] = {"error": str(e)}
return stats
async def close_connections(self):
"""Close all database connections."""
logger.info("Closing database connections")
# Close PostgreSQL connections
if self._async_engine:
await self._async_engine.dispose()
logger.debug("Async PostgreSQL engine disposed")
if self._sync_engine:
self._sync_engine.dispose()
logger.debug("Sync PostgreSQL engine disposed")
# Close Redis connection
if self._redis_client:
await self._redis_client.close()
logger.debug("Redis connection closed")
self._initialized = False
logger.info("Database connections closed")
def is_using_sqlite_fallback(self) -> bool:
"""Check if currently using SQLite fallback database."""
if not self._async_engine:
return False
return "sqlite" in str(self._async_engine.url)
def is_redis_available(self) -> bool:
"""Check if Redis is available."""
return self._redis_client is not None
async def test_connection(self) -> bool:
"""Test database connection for CLI validation."""
try:
if not self._initialized:
await self.initialize()
# Test database connection (PostgreSQL or SQLite)
async with self.get_async_session() as session:
result = await session.execute(text("SELECT 1"))
result.fetchone() # Don't await this - fetchone() is not async
# Test Redis connection if enabled
if self._redis_client:
await self._redis_client.ping()
return True
except Exception as e:
logger.error(f"Database connection test failed: {e}")
return False
async def reset_connections(self):
"""Reset all database connections."""
logger.info("Resetting database connections")
await self.close_connections()
await self.initialize()
logger.info("Database connections reset")
# Global database manager instance
_db_manager: Optional[DatabaseManager] = None
def get_database_manager(settings: Settings) -> DatabaseManager:
"""Get database manager instance."""
global _db_manager
if _db_manager is None:
_db_manager = DatabaseManager(settings)
return _db_manager
async def get_async_session(settings: Settings) -> AsyncGenerator[AsyncSession, None]:
"""Dependency to get async database session."""
db_manager = get_database_manager(settings)
async with db_manager.get_async_session() as session:
yield session
async def get_redis_client(settings: Settings) -> Optional[redis.Redis]:
"""Dependency to get Redis client."""
db_manager = get_database_manager(settings)
return await db_manager.get_redis_client()
class DatabaseHealthCheck:
"""Database health check utility."""
def __init__(self, db_manager: DatabaseManager):
self.db_manager = db_manager
async def check_postgresql(self) -> Dict[str, Any]:
"""Check PostgreSQL health."""
try:
start_time = datetime.utcnow()
async with self.db_manager.get_async_session() as session:
result = await session.execute(text("SELECT version()"))
version = result.fetchone()[0] # Don't await this - fetchone() is not async
response_time = (datetime.utcnow() - start_time).total_seconds()
return {
"status": "healthy",
"version": version,
"response_time_ms": round(response_time * 1000, 2),
}
except Exception as e:
return {
"status": "unhealthy",
"error": str(e),
}
async def check_redis(self) -> Dict[str, Any]:
"""Check Redis health."""
redis_client = await self.db_manager.get_redis_client()
if not redis_client:
return {
"status": "disabled",
"message": "Redis not configured"
}
try:
start_time = datetime.utcnow()
pong = await redis_client.ping()
response_time = (datetime.utcnow() - start_time).total_seconds()
info = await redis_client.info("server")
return {
"status": "healthy",
"ping": pong,
"version": info.get("redis_version", "unknown"),
"response_time_ms": round(response_time * 1000, 2),
}
except Exception as e:
return {
"status": "unhealthy",
"error": str(e),
}
async def full_health_check(self) -> Dict[str, Any]:
"""Perform full database health check."""
postgresql_health = await self.check_postgresql()
redis_health = await self.check_redis()
overall_status = "healthy"
if postgresql_health["status"] != "healthy":
overall_status = "unhealthy"
elif redis_health["status"] == "unhealthy":
overall_status = "degraded"
return {
"overall_status": overall_status,
"postgresql": postgresql_health,
"redis": redis_health,
"timestamp": datetime.utcnow().isoformat(),
}

View File

@@ -0,0 +1,370 @@
"""
Initial database migration for WiFi-DensePose API
Revision ID: 001_initial
Revises:
Create Date: 2025-01-07 07:58:00.000000
"""
from alembic import op
import sqlalchemy as sa
from sqlalchemy.dialects import postgresql
# revision identifiers
revision = '001_initial'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
"""Create initial database schema."""
# Create devices table
op.create_table(
'devices',
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('device_type', sa.String(length=50), nullable=False),
sa.Column('mac_address', sa.String(length=17), nullable=False),
sa.Column('ip_address', sa.String(length=45), nullable=True),
sa.Column('status', sa.String(length=20), nullable=False),
sa.Column('firmware_version', sa.String(length=50), nullable=True),
sa.Column('hardware_version', sa.String(length=50), nullable=True),
sa.Column('location_name', sa.String(length=255), nullable=True),
sa.Column('room_id', sa.String(length=100), nullable=True),
sa.Column('coordinates_x', sa.Float(), nullable=True),
sa.Column('coordinates_y', sa.Float(), nullable=True),
sa.Column('coordinates_z', sa.Float(), nullable=True),
sa.Column('config', sa.JSON(), nullable=True),
sa.Column('capabilities', postgresql.ARRAY(sa.String()), nullable=True),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('tags', postgresql.ARRAY(sa.String()), nullable=True),
sa.CheckConstraint("status IN ('active', 'inactive', 'maintenance', 'error')", name='check_device_status'),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('mac_address')
)
# Create indexes for devices table
op.create_index('idx_device_mac_address', 'devices', ['mac_address'])
op.create_index('idx_device_status', 'devices', ['status'])
op.create_index('idx_device_type', 'devices', ['device_type'])
# Create sessions table
op.create_table(
'sessions',
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('name', sa.String(length=255), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('started_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('ended_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('duration_seconds', sa.Integer(), nullable=True),
sa.Column('status', sa.String(length=20), nullable=False),
sa.Column('config', sa.JSON(), nullable=True),
sa.Column('device_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('tags', postgresql.ARRAY(sa.String()), nullable=True),
sa.Column('metadata', sa.JSON(), nullable=True),
sa.Column('total_frames', sa.Integer(), nullable=False),
sa.Column('processed_frames', sa.Integer(), nullable=False),
sa.Column('error_count', sa.Integer(), nullable=False),
sa.CheckConstraint("status IN ('active', 'completed', 'failed', 'cancelled')", name='check_session_status'),
sa.CheckConstraint('total_frames >= 0', name='check_total_frames_positive'),
sa.CheckConstraint('processed_frames >= 0', name='check_processed_frames_positive'),
sa.CheckConstraint('error_count >= 0', name='check_error_count_positive'),
sa.ForeignKeyConstraint(['device_id'], ['devices.id'], ),
sa.PrimaryKeyConstraint('id')
)
# Create indexes for sessions table
op.create_index('idx_session_device_id', 'sessions', ['device_id'])
op.create_index('idx_session_status', 'sessions', ['status'])
op.create_index('idx_session_started_at', 'sessions', ['started_at'])
# Create csi_data table
op.create_table(
'csi_data',
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('sequence_number', sa.Integer(), nullable=False),
sa.Column('timestamp_ns', sa.BigInteger(), nullable=False),
sa.Column('device_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('session_id', postgresql.UUID(as_uuid=True), nullable=True),
sa.Column('amplitude', postgresql.ARRAY(sa.Float()), nullable=False),
sa.Column('phase', postgresql.ARRAY(sa.Float()), nullable=False),
sa.Column('frequency', sa.Float(), nullable=False),
sa.Column('bandwidth', sa.Float(), nullable=False),
sa.Column('rssi', sa.Float(), nullable=True),
sa.Column('snr', sa.Float(), nullable=True),
sa.Column('noise_floor', sa.Float(), nullable=True),
sa.Column('tx_antenna', sa.Integer(), nullable=True),
sa.Column('rx_antenna', sa.Integer(), nullable=True),
sa.Column('num_subcarriers', sa.Integer(), nullable=False),
sa.Column('processing_status', sa.String(length=20), nullable=False),
sa.Column('processed_at', sa.DateTime(timezone=True), nullable=True),
sa.Column('quality_score', sa.Float(), nullable=True),
sa.Column('is_valid', sa.Boolean(), nullable=False),
sa.Column('metadata', sa.JSON(), nullable=True),
sa.CheckConstraint('frequency > 0', name='check_frequency_positive'),
sa.CheckConstraint('bandwidth > 0', name='check_bandwidth_positive'),
sa.CheckConstraint('num_subcarriers > 0', name='check_subcarriers_positive'),
sa.CheckConstraint("processing_status IN ('pending', 'processing', 'completed', 'failed')", name='check_processing_status'),
sa.ForeignKeyConstraint(['device_id'], ['devices.id'], ),
sa.ForeignKeyConstraint(['session_id'], ['sessions.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('device_id', 'sequence_number', 'timestamp_ns', name='uq_csi_device_seq_time')
)
# Create indexes for csi_data table
op.create_index('idx_csi_device_id', 'csi_data', ['device_id'])
op.create_index('idx_csi_session_id', 'csi_data', ['session_id'])
op.create_index('idx_csi_timestamp', 'csi_data', ['timestamp_ns'])
op.create_index('idx_csi_sequence', 'csi_data', ['sequence_number'])
op.create_index('idx_csi_processing_status', 'csi_data', ['processing_status'])
# Create pose_detections table
op.create_table(
'pose_detections',
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('frame_number', sa.Integer(), nullable=False),
sa.Column('timestamp_ns', sa.BigInteger(), nullable=False),
sa.Column('session_id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('person_count', sa.Integer(), nullable=False),
sa.Column('keypoints', sa.JSON(), nullable=True),
sa.Column('bounding_boxes', sa.JSON(), nullable=True),
sa.Column('detection_confidence', sa.Float(), nullable=True),
sa.Column('pose_confidence', sa.Float(), nullable=True),
sa.Column('overall_confidence', sa.Float(), nullable=True),
sa.Column('processing_time_ms', sa.Float(), nullable=True),
sa.Column('model_version', sa.String(length=50), nullable=True),
sa.Column('algorithm', sa.String(length=100), nullable=True),
sa.Column('image_quality', sa.Float(), nullable=True),
sa.Column('pose_quality', sa.Float(), nullable=True),
sa.Column('is_valid', sa.Boolean(), nullable=False),
sa.Column('metadata', sa.JSON(), nullable=True),
sa.CheckConstraint('person_count >= 0', name='check_person_count_positive'),
sa.CheckConstraint('detection_confidence >= 0 AND detection_confidence <= 1', name='check_detection_confidence_range'),
sa.CheckConstraint('pose_confidence >= 0 AND pose_confidence <= 1', name='check_pose_confidence_range'),
sa.CheckConstraint('overall_confidence >= 0 AND overall_confidence <= 1', name='check_overall_confidence_range'),
sa.ForeignKeyConstraint(['session_id'], ['sessions.id'], ),
sa.PrimaryKeyConstraint('id')
)
# Create indexes for pose_detections table
op.create_index('idx_pose_session_id', 'pose_detections', ['session_id'])
op.create_index('idx_pose_timestamp', 'pose_detections', ['timestamp_ns'])
op.create_index('idx_pose_frame', 'pose_detections', ['frame_number'])
op.create_index('idx_pose_person_count', 'pose_detections', ['person_count'])
# Create system_metrics table
op.create_table(
'system_metrics',
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('metric_name', sa.String(length=255), nullable=False),
sa.Column('metric_type', sa.String(length=50), nullable=False),
sa.Column('value', sa.Float(), nullable=False),
sa.Column('unit', sa.String(length=50), nullable=True),
sa.Column('labels', sa.JSON(), nullable=True),
sa.Column('tags', postgresql.ARRAY(sa.String()), nullable=True),
sa.Column('source', sa.String(length=255), nullable=True),
sa.Column('component', sa.String(length=100), nullable=True),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('metadata', sa.JSON(), nullable=True),
sa.PrimaryKeyConstraint('id')
)
# Create indexes for system_metrics table
op.create_index('idx_metric_name', 'system_metrics', ['metric_name'])
op.create_index('idx_metric_type', 'system_metrics', ['metric_type'])
op.create_index('idx_metric_created_at', 'system_metrics', ['created_at'])
op.create_index('idx_metric_source', 'system_metrics', ['source'])
op.create_index('idx_metric_component', 'system_metrics', ['component'])
# Create audit_logs table
op.create_table(
'audit_logs',
sa.Column('id', postgresql.UUID(as_uuid=True), nullable=False),
sa.Column('created_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('updated_at', sa.DateTime(timezone=True), server_default=sa.text('now()'), nullable=False),
sa.Column('event_type', sa.String(length=100), nullable=False),
sa.Column('event_name', sa.String(length=255), nullable=False),
sa.Column('description', sa.Text(), nullable=True),
sa.Column('user_id', sa.String(length=255), nullable=True),
sa.Column('session_id', sa.String(length=255), nullable=True),
sa.Column('ip_address', sa.String(length=45), nullable=True),
sa.Column('user_agent', sa.Text(), nullable=True),
sa.Column('resource_type', sa.String(length=100), nullable=True),
sa.Column('resource_id', sa.String(length=255), nullable=True),
sa.Column('before_state', sa.JSON(), nullable=True),
sa.Column('after_state', sa.JSON(), nullable=True),
sa.Column('changes', sa.JSON(), nullable=True),
sa.Column('success', sa.Boolean(), nullable=False),
sa.Column('error_message', sa.Text(), nullable=True),
sa.Column('metadata', sa.JSON(), nullable=True),
sa.Column('tags', postgresql.ARRAY(sa.String()), nullable=True),
sa.PrimaryKeyConstraint('id')
)
# Create indexes for audit_logs table
op.create_index('idx_audit_event_type', 'audit_logs', ['event_type'])
op.create_index('idx_audit_user_id', 'audit_logs', ['user_id'])
op.create_index('idx_audit_resource', 'audit_logs', ['resource_type', 'resource_id'])
op.create_index('idx_audit_created_at', 'audit_logs', ['created_at'])
op.create_index('idx_audit_success', 'audit_logs', ['success'])
# Create triggers for updated_at columns
op.execute("""
CREATE OR REPLACE FUNCTION update_updated_at_column()
RETURNS TRIGGER AS $$
BEGIN
NEW.updated_at = now();
RETURN NEW;
END;
$$ language 'plpgsql';
""")
# Add triggers to all tables with updated_at column
tables_with_updated_at = [
'devices', 'sessions', 'csi_data', 'pose_detections',
'system_metrics', 'audit_logs'
]
for table in tables_with_updated_at:
op.execute(f"""
CREATE TRIGGER update_{table}_updated_at
BEFORE UPDATE ON {table}
FOR EACH ROW
EXECUTE FUNCTION update_updated_at_column();
""")
# Insert initial data
_insert_initial_data()
def downgrade():
"""Drop all tables and functions."""
# Drop triggers first
tables_with_updated_at = [
'devices', 'sessions', 'csi_data', 'pose_detections',
'system_metrics', 'audit_logs'
]
for table in tables_with_updated_at:
op.execute(f"DROP TRIGGER IF EXISTS update_{table}_updated_at ON {table};")
# Drop function
op.execute("DROP FUNCTION IF EXISTS update_updated_at_column();")
# Drop tables in reverse order (respecting foreign key constraints)
op.drop_table('audit_logs')
op.drop_table('system_metrics')
op.drop_table('pose_detections')
op.drop_table('csi_data')
op.drop_table('sessions')
op.drop_table('devices')
def _insert_initial_data():
"""Insert initial data into tables."""
# Insert sample device
op.execute("""
INSERT INTO devices (
id, name, device_type, mac_address, ip_address, status,
firmware_version, hardware_version, location_name, room_id,
coordinates_x, coordinates_y, coordinates_z,
config, capabilities, description, tags
) VALUES (
gen_random_uuid(),
'Demo Router',
'router',
'00:11:22:33:44:55',
'192.168.1.1',
'active',
'1.0.0',
'v1.0',
'Living Room',
'room_001',
0.0,
0.0,
2.5,
'{"channel": 6, "power": 20, "bandwidth": 80}',
ARRAY['wifi6', 'csi', 'beamforming'],
'Demo WiFi router for testing',
ARRAY['demo', 'testing']
);
""")
# Insert sample session
op.execute("""
INSERT INTO sessions (
id, name, description, started_at, status, config,
device_id, tags, metadata, total_frames, processed_frames, error_count
) VALUES (
gen_random_uuid(),
'Demo Session',
'Initial demo session for testing',
now(),
'active',
'{"duration": 3600, "sampling_rate": 100}',
(SELECT id FROM devices WHERE name = 'Demo Router' LIMIT 1),
ARRAY['demo', 'initial'],
'{"purpose": "testing", "environment": "lab"}',
0,
0,
0
);
""")
# Insert initial system metrics
metrics_data = [
('system_startup', 'counter', 1.0, 'count', 'system', 'application'),
('database_connections', 'gauge', 0.0, 'count', 'database', 'postgresql'),
('api_requests_total', 'counter', 0.0, 'count', 'api', 'http'),
('memory_usage', 'gauge', 0.0, 'bytes', 'system', 'memory'),
('cpu_usage', 'gauge', 0.0, 'percent', 'system', 'cpu'),
]
for metric_name, metric_type, value, unit, source, component in metrics_data:
op.execute(f"""
INSERT INTO system_metrics (
id, metric_name, metric_type, value, unit, source, component,
description, metadata
) VALUES (
gen_random_uuid(),
'{metric_name}',
'{metric_type}',
{value},
'{unit}',
'{source}',
'{component}',
'Initial {metric_name} metric',
'{{"initial": true, "version": "1.0.0"}}'
);
""")
# Insert initial audit log
op.execute("""
INSERT INTO audit_logs (
id, event_type, event_name, description, user_id, success,
resource_type, metadata
) VALUES (
gen_random_uuid(),
'system',
'database_migration',
'Initial database schema created',
'system',
true,
'database',
'{"migration": "001_initial", "version": "1.0.0"}'
);
""")

View File

@@ -0,0 +1,109 @@
"""Alembic environment configuration for WiFi-DensePose API."""
import asyncio
import os
import sys
from logging.config import fileConfig
from pathlib import Path
from sqlalchemy import pool
from sqlalchemy.engine import Connection
from sqlalchemy.ext.asyncio import async_engine_from_config
from alembic import context
# Add the project root to the Python path
project_root = Path(__file__).parent.parent.parent.parent
sys.path.insert(0, str(project_root))
# Import the models and settings
from src.database.models import Base
from src.config.settings import get_settings
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
fileConfig(config.config_file_name)
# add your model's MetaData object here
# for 'autogenerate' support
target_metadata = Base.metadata
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def get_database_url():
"""Get the database URL from settings."""
try:
settings = get_settings()
return settings.get_database_url()
except Exception:
# Fallback to SQLite if settings can't be loaded
return "sqlite:///./data/wifi_densepose_fallback.db"
def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = get_database_url()
context.configure(
url=url,
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)
with context.begin_transaction():
context.run_migrations()
def do_run_migrations(connection: Connection) -> None:
"""Run migrations with a database connection."""
context.configure(connection=connection, target_metadata=target_metadata)
with context.begin_transaction():
context.run_migrations()
async def run_async_migrations() -> None:
"""Run migrations in async mode."""
configuration = config.get_section(config.config_ini_section)
configuration["sqlalchemy.url"] = get_database_url()
connectable = async_engine_from_config(
configuration,
prefix="sqlalchemy.",
poolclass=pool.NullPool,
)
async with connectable.connect() as connection:
await connection.run_sync(do_run_migrations)
await connectable.dispose()
def run_migrations_online() -> None:
"""Run migrations in 'online' mode."""
asyncio.run(run_async_migrations())
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()

View File

@@ -0,0 +1,26 @@
"""${message}
Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}
"""
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}
# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}
def upgrade() -> None:
"""Upgrade database schema."""
${upgrades if upgrades else "pass"}
def downgrade() -> None:
"""Downgrade database schema."""
${downgrades if downgrades else "pass"}

View File

@@ -0,0 +1,60 @@
"""
Database type compatibility helpers for WiFi-DensePose API
"""
from typing import Type, Any
from sqlalchemy import String, Text, JSON
from sqlalchemy.dialects.postgresql import ARRAY as PostgreSQL_ARRAY
from sqlalchemy.ext.compiler import compiles
from sqlalchemy.sql import sqltypes
class ArrayType(sqltypes.TypeDecorator):
"""Array type that works with both PostgreSQL and SQLite."""
impl = Text
cache_ok = True
def __init__(self, item_type: Type = String):
super().__init__()
self.item_type = item_type
def load_dialect_impl(self, dialect):
"""Load dialect-specific implementation."""
if dialect.name == 'postgresql':
return dialect.type_descriptor(PostgreSQL_ARRAY(self.item_type))
else:
# For SQLite and others, use JSON
return dialect.type_descriptor(JSON)
def process_bind_param(self, value, dialect):
"""Process value before saving to database."""
if value is None:
return value
if dialect.name == 'postgresql':
return value
else:
# For SQLite, convert to JSON
return value if isinstance(value, (list, type(None))) else list(value)
def process_result_value(self, value, dialect):
"""Process value after loading from database."""
if value is None:
return value
if dialect.name == 'postgresql':
return value
else:
# For SQLite, value is already a list from JSON
return value if isinstance(value, list) else []
def get_array_type(item_type: Type = String) -> Type:
"""Get appropriate array type based on database."""
return ArrayType(item_type)
# Convenience types
StringArray = ArrayType(String)
FloatArray = ArrayType(sqltypes.Float)

498
v1/src/database/models.py Normal file
View File

@@ -0,0 +1,498 @@
"""
SQLAlchemy models for WiFi-DensePose API
"""
import uuid
from datetime import datetime
from typing import Optional, Dict, Any, List
from enum import Enum
from sqlalchemy import (
Column, String, Integer, Float, Boolean, DateTime, Text, JSON,
ForeignKey, Index, UniqueConstraint, CheckConstraint
)
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship, validates
from sqlalchemy.dialects.postgresql import UUID
from sqlalchemy.sql import func
# Import custom array type for compatibility
from src.database.model_types import StringArray, FloatArray
Base = declarative_base()
class TimestampMixin:
"""Mixin for timestamp fields."""
created_at = Column(DateTime(timezone=True), server_default=func.now(), nullable=False)
updated_at = Column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now(), nullable=False)
class UUIDMixin:
"""Mixin for UUID primary key."""
id = Column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4, nullable=False)
class DeviceStatus(str, Enum):
"""Device status enumeration."""
ACTIVE = "active"
INACTIVE = "inactive"
MAINTENANCE = "maintenance"
ERROR = "error"
class SessionStatus(str, Enum):
"""Session status enumeration."""
ACTIVE = "active"
COMPLETED = "completed"
FAILED = "failed"
CANCELLED = "cancelled"
class ProcessingStatus(str, Enum):
"""Processing status enumeration."""
PENDING = "pending"
PROCESSING = "processing"
COMPLETED = "completed"
FAILED = "failed"
class Device(Base, UUIDMixin, TimestampMixin):
"""Device model for WiFi routers and sensors."""
__tablename__ = "devices"
# Basic device information
name = Column(String(255), nullable=False)
device_type = Column(String(50), nullable=False) # router, sensor, etc.
mac_address = Column(String(17), unique=True, nullable=False)
ip_address = Column(String(45), nullable=True) # IPv4 or IPv6
# Device status and configuration
status = Column(String(20), default=DeviceStatus.INACTIVE, nullable=False)
firmware_version = Column(String(50), nullable=True)
hardware_version = Column(String(50), nullable=True)
# Location information
location_name = Column(String(255), nullable=True)
room_id = Column(String(100), nullable=True)
coordinates_x = Column(Float, nullable=True)
coordinates_y = Column(Float, nullable=True)
coordinates_z = Column(Float, nullable=True)
# Configuration
config = Column(JSON, nullable=True)
capabilities = Column(StringArray, nullable=True)
# Metadata
description = Column(Text, nullable=True)
tags = Column(StringArray, nullable=True)
# Relationships
sessions = relationship("Session", back_populates="device", cascade="all, delete-orphan")
csi_data = relationship("CSIData", back_populates="device", cascade="all, delete-orphan")
# Constraints and indexes
__table_args__ = (
Index("idx_device_mac_address", "mac_address"),
Index("idx_device_status", "status"),
Index("idx_device_type", "device_type"),
CheckConstraint("status IN ('active', 'inactive', 'maintenance', 'error')", name="check_device_status"),
)
@validates('mac_address')
def validate_mac_address(self, key, address):
"""Validate MAC address format."""
if address and len(address) == 17:
# Basic MAC address format validation
parts = address.split(':')
if len(parts) == 6 and all(len(part) == 2 for part in parts):
return address.lower()
raise ValueError("Invalid MAC address format")
def to_dict(self) -> Dict[str, Any]:
"""Convert to dictionary."""
return {
"id": str(self.id),
"name": self.name,
"device_type": self.device_type,
"mac_address": self.mac_address,
"ip_address": self.ip_address,
"status": self.status,
"firmware_version": self.firmware_version,
"hardware_version": self.hardware_version,
"location_name": self.location_name,
"room_id": self.room_id,
"coordinates": {
"x": self.coordinates_x,
"y": self.coordinates_y,
"z": self.coordinates_z,
} if any([self.coordinates_x, self.coordinates_y, self.coordinates_z]) else None,
"config": self.config,
"capabilities": self.capabilities,
"description": self.description,
"tags": self.tags,
"created_at": self.created_at.isoformat() if self.created_at else None,
"updated_at": self.updated_at.isoformat() if self.updated_at else None,
}
class Session(Base, UUIDMixin, TimestampMixin):
"""Session model for tracking data collection sessions."""
__tablename__ = "sessions"
# Session identification
name = Column(String(255), nullable=False)
description = Column(Text, nullable=True)
# Session timing
started_at = Column(DateTime(timezone=True), nullable=True)
ended_at = Column(DateTime(timezone=True), nullable=True)
duration_seconds = Column(Integer, nullable=True)
# Session status and configuration
status = Column(String(20), default=SessionStatus.ACTIVE, nullable=False)
config = Column(JSON, nullable=True)
# Device relationship
device_id = Column(UUID(as_uuid=True), ForeignKey("devices.id"), nullable=False)
device = relationship("Device", back_populates="sessions")
# Data relationships
csi_data = relationship("CSIData", back_populates="session", cascade="all, delete-orphan")
pose_detections = relationship("PoseDetection", back_populates="session", cascade="all, delete-orphan")
# Metadata
tags = Column(StringArray, nullable=True)
meta_data = Column(JSON, nullable=True)
# Statistics
total_frames = Column(Integer, default=0, nullable=False)
processed_frames = Column(Integer, default=0, nullable=False)
error_count = Column(Integer, default=0, nullable=False)
# Constraints and indexes
__table_args__ = (
Index("idx_session_device_id", "device_id"),
Index("idx_session_status", "status"),
Index("idx_session_started_at", "started_at"),
CheckConstraint("status IN ('active', 'completed', 'failed', 'cancelled')", name="check_session_status"),
CheckConstraint("total_frames >= 0", name="check_total_frames_positive"),
CheckConstraint("processed_frames >= 0", name="check_processed_frames_positive"),
CheckConstraint("error_count >= 0", name="check_error_count_positive"),
)
def to_dict(self) -> Dict[str, Any]:
"""Convert to dictionary."""
return {
"id": str(self.id),
"name": self.name,
"description": self.description,
"started_at": self.started_at.isoformat() if self.started_at else None,
"ended_at": self.ended_at.isoformat() if self.ended_at else None,
"duration_seconds": self.duration_seconds,
"status": self.status,
"config": self.config,
"device_id": str(self.device_id),
"tags": self.tags,
"metadata": self.meta_data,
"total_frames": self.total_frames,
"processed_frames": self.processed_frames,
"error_count": self.error_count,
"created_at": self.created_at.isoformat() if self.created_at else None,
"updated_at": self.updated_at.isoformat() if self.updated_at else None,
}
class CSIData(Base, UUIDMixin, TimestampMixin):
"""CSI (Channel State Information) data model."""
__tablename__ = "csi_data"
# Data identification
sequence_number = Column(Integer, nullable=False)
timestamp_ns = Column(Integer, nullable=False) # Nanosecond timestamp
# Device and session relationships
device_id = Column(UUID(as_uuid=True), ForeignKey("devices.id"), nullable=False)
session_id = Column(UUID(as_uuid=True), ForeignKey("sessions.id"), nullable=True)
device = relationship("Device", back_populates="csi_data")
session = relationship("Session", back_populates="csi_data")
# CSI data
amplitude = Column(FloatArray, nullable=False)
phase = Column(FloatArray, nullable=False)
frequency = Column(Float, nullable=False) # MHz
bandwidth = Column(Float, nullable=False) # MHz
# Signal characteristics
rssi = Column(Float, nullable=True) # dBm
snr = Column(Float, nullable=True) # dB
noise_floor = Column(Float, nullable=True) # dBm
# Antenna information
tx_antenna = Column(Integer, nullable=True)
rx_antenna = Column(Integer, nullable=True)
num_subcarriers = Column(Integer, nullable=False)
# Processing status
processing_status = Column(String(20), default=ProcessingStatus.PENDING, nullable=False)
processed_at = Column(DateTime(timezone=True), nullable=True)
# Quality metrics
quality_score = Column(Float, nullable=True)
is_valid = Column(Boolean, default=True, nullable=False)
# Metadata
meta_data = Column(JSON, nullable=True)
# Constraints and indexes
__table_args__ = (
Index("idx_csi_device_id", "device_id"),
Index("idx_csi_session_id", "session_id"),
Index("idx_csi_timestamp", "timestamp_ns"),
Index("idx_csi_sequence", "sequence_number"),
Index("idx_csi_processing_status", "processing_status"),
UniqueConstraint("device_id", "sequence_number", "timestamp_ns", name="uq_csi_device_seq_time"),
CheckConstraint("frequency > 0", name="check_frequency_positive"),
CheckConstraint("bandwidth > 0", name="check_bandwidth_positive"),
CheckConstraint("num_subcarriers > 0", name="check_subcarriers_positive"),
CheckConstraint("processing_status IN ('pending', 'processing', 'completed', 'failed')", name="check_processing_status"),
)
def to_dict(self) -> Dict[str, Any]:
"""Convert to dictionary."""
return {
"id": str(self.id),
"sequence_number": self.sequence_number,
"timestamp_ns": self.timestamp_ns,
"device_id": str(self.device_id),
"session_id": str(self.session_id) if self.session_id else None,
"amplitude": self.amplitude,
"phase": self.phase,
"frequency": self.frequency,
"bandwidth": self.bandwidth,
"rssi": self.rssi,
"snr": self.snr,
"noise_floor": self.noise_floor,
"tx_antenna": self.tx_antenna,
"rx_antenna": self.rx_antenna,
"num_subcarriers": self.num_subcarriers,
"processing_status": self.processing_status,
"processed_at": self.processed_at.isoformat() if self.processed_at else None,
"quality_score": self.quality_score,
"is_valid": self.is_valid,
"metadata": self.meta_data,
"created_at": self.created_at.isoformat() if self.created_at else None,
"updated_at": self.updated_at.isoformat() if self.updated_at else None,
}
class PoseDetection(Base, UUIDMixin, TimestampMixin):
"""Pose detection results model."""
__tablename__ = "pose_detections"
# Detection identification
frame_number = Column(Integer, nullable=False)
timestamp_ns = Column(Integer, nullable=False)
# Session relationship
session_id = Column(UUID(as_uuid=True), ForeignKey("sessions.id"), nullable=False)
session = relationship("Session", back_populates="pose_detections")
# Detection results
person_count = Column(Integer, default=0, nullable=False)
keypoints = Column(JSON, nullable=True) # Array of person keypoints
bounding_boxes = Column(JSON, nullable=True) # Array of bounding boxes
# Confidence scores
detection_confidence = Column(Float, nullable=True)
pose_confidence = Column(Float, nullable=True)
overall_confidence = Column(Float, nullable=True)
# Processing information
processing_time_ms = Column(Float, nullable=True)
model_version = Column(String(50), nullable=True)
algorithm = Column(String(100), nullable=True)
# Quality metrics
image_quality = Column(Float, nullable=True)
pose_quality = Column(Float, nullable=True)
is_valid = Column(Boolean, default=True, nullable=False)
# Metadata
meta_data = Column(JSON, nullable=True)
# Constraints and indexes
__table_args__ = (
Index("idx_pose_session_id", "session_id"),
Index("idx_pose_timestamp", "timestamp_ns"),
Index("idx_pose_frame", "frame_number"),
Index("idx_pose_person_count", "person_count"),
CheckConstraint("person_count >= 0", name="check_person_count_positive"),
CheckConstraint("detection_confidence >= 0 AND detection_confidence <= 1", name="check_detection_confidence_range"),
CheckConstraint("pose_confidence >= 0 AND pose_confidence <= 1", name="check_pose_confidence_range"),
CheckConstraint("overall_confidence >= 0 AND overall_confidence <= 1", name="check_overall_confidence_range"),
)
def to_dict(self) -> Dict[str, Any]:
"""Convert to dictionary."""
return {
"id": str(self.id),
"frame_number": self.frame_number,
"timestamp_ns": self.timestamp_ns,
"session_id": str(self.session_id),
"person_count": self.person_count,
"keypoints": self.keypoints,
"bounding_boxes": self.bounding_boxes,
"detection_confidence": self.detection_confidence,
"pose_confidence": self.pose_confidence,
"overall_confidence": self.overall_confidence,
"processing_time_ms": self.processing_time_ms,
"model_version": self.model_version,
"algorithm": self.algorithm,
"image_quality": self.image_quality,
"pose_quality": self.pose_quality,
"is_valid": self.is_valid,
"metadata": self.meta_data,
"created_at": self.created_at.isoformat() if self.created_at else None,
"updated_at": self.updated_at.isoformat() if self.updated_at else None,
}
class SystemMetric(Base, UUIDMixin, TimestampMixin):
"""System metrics model for monitoring."""
__tablename__ = "system_metrics"
# Metric identification
metric_name = Column(String(255), nullable=False)
metric_type = Column(String(50), nullable=False) # counter, gauge, histogram
# Metric value
value = Column(Float, nullable=False)
unit = Column(String(50), nullable=True)
# Labels and tags
labels = Column(JSON, nullable=True)
tags = Column(StringArray, nullable=True)
# Source information
source = Column(String(255), nullable=True)
component = Column(String(100), nullable=True)
# Metadata
description = Column(Text, nullable=True)
meta_data = Column(JSON, nullable=True)
# Constraints and indexes
__table_args__ = (
Index("idx_metric_name", "metric_name"),
Index("idx_metric_type", "metric_type"),
Index("idx_metric_created_at", "created_at"),
Index("idx_metric_source", "source"),
Index("idx_metric_component", "component"),
)
def to_dict(self) -> Dict[str, Any]:
"""Convert to dictionary."""
return {
"id": str(self.id),
"metric_name": self.metric_name,
"metric_type": self.metric_type,
"value": self.value,
"unit": self.unit,
"labels": self.labels,
"tags": self.tags,
"source": self.source,
"component": self.component,
"description": self.description,
"metadata": self.meta_data,
"created_at": self.created_at.isoformat() if self.created_at else None,
"updated_at": self.updated_at.isoformat() if self.updated_at else None,
}
class AuditLog(Base, UUIDMixin, TimestampMixin):
"""Audit log model for tracking system events."""
__tablename__ = "audit_logs"
# Event information
event_type = Column(String(100), nullable=False)
event_name = Column(String(255), nullable=False)
description = Column(Text, nullable=True)
# User and session information
user_id = Column(String(255), nullable=True)
session_id = Column(String(255), nullable=True)
ip_address = Column(String(45), nullable=True)
user_agent = Column(Text, nullable=True)
# Resource information
resource_type = Column(String(100), nullable=True)
resource_id = Column(String(255), nullable=True)
# Event details
before_state = Column(JSON, nullable=True)
after_state = Column(JSON, nullable=True)
changes = Column(JSON, nullable=True)
# Result information
success = Column(Boolean, nullable=False)
error_message = Column(Text, nullable=True)
# Metadata
meta_data = Column(JSON, nullable=True)
tags = Column(StringArray, nullable=True)
# Constraints and indexes
__table_args__ = (
Index("idx_audit_event_type", "event_type"),
Index("idx_audit_user_id", "user_id"),
Index("idx_audit_resource", "resource_type", "resource_id"),
Index("idx_audit_created_at", "created_at"),
Index("idx_audit_success", "success"),
)
def to_dict(self) -> Dict[str, Any]:
"""Convert to dictionary."""
return {
"id": str(self.id),
"event_type": self.event_type,
"event_name": self.event_name,
"description": self.description,
"user_id": self.user_id,
"session_id": self.session_id,
"ip_address": self.ip_address,
"user_agent": self.user_agent,
"resource_type": self.resource_type,
"resource_id": self.resource_id,
"before_state": self.before_state,
"after_state": self.after_state,
"changes": self.changes,
"success": self.success,
"error_message": self.error_message,
"metadata": self.meta_data,
"tags": self.tags,
"created_at": self.created_at.isoformat() if self.created_at else None,
"updated_at": self.updated_at.isoformat() if self.updated_at else None,
}
# Model registry for easy access
MODEL_REGISTRY = {
"Device": Device,
"Session": Session,
"CSIData": CSIData,
"PoseDetection": PoseDetection,
"SystemMetric": SystemMetric,
"AuditLog": AuditLog,
}
def get_model_by_name(name: str):
"""Get model class by name."""
return MODEL_REGISTRY.get(name)
def get_all_models() -> List:
"""Get all model classes."""
return list(MODEL_REGISTRY.values())