feat: Complete Rust port of WiFi-DensePose with modular crates
Major changes: - Organized Python v1 implementation into v1/ subdirectory - Created Rust workspace with 9 modular crates: - wifi-densepose-core: Core types, traits, errors - wifi-densepose-signal: CSI processing, phase sanitization, FFT - wifi-densepose-nn: Neural network inference (ONNX/Candle/tch) - wifi-densepose-api: Axum-based REST/WebSocket API - wifi-densepose-db: SQLx database layer - wifi-densepose-config: Configuration management - wifi-densepose-hardware: Hardware abstraction - wifi-densepose-wasm: WebAssembly bindings - wifi-densepose-cli: Command-line interface Documentation: - ADR-001: Workspace structure - ADR-002: Signal processing library selection - ADR-003: Neural network inference strategy - DDD domain model with bounded contexts Testing: - 69 tests passing across all crates - Signal processing: 45 tests - Neural networks: 21 tests - Core: 3 doc tests Performance targets: - 10x faster CSI processing (~0.5ms vs ~5ms) - 5x lower memory usage (~100MB vs ~500MB) - WASM support for browser deployment
This commit is contained in:
@@ -0,0 +1,60 @@
|
||||
[package]
|
||||
name = "wifi-densepose-nn"
|
||||
version.workspace = true
|
||||
edition.workspace = true
|
||||
authors.workspace = true
|
||||
license.workspace = true
|
||||
repository.workspace = true
|
||||
documentation.workspace = true
|
||||
keywords = ["neural-network", "onnx", "inference", "densepose", "deep-learning"]
|
||||
categories = ["science", "computer-vision"]
|
||||
description = "Neural network inference for WiFi-DensePose pose estimation"
|
||||
|
||||
[features]
|
||||
default = ["onnx"]
|
||||
onnx = ["ort"]
|
||||
tch-backend = ["tch"]
|
||||
candle-backend = ["candle-core", "candle-nn"]
|
||||
cuda = ["onnx"]
|
||||
tensorrt = ["onnx"]
|
||||
all-backends = ["onnx", "tch-backend", "candle-backend"]
|
||||
|
||||
[dependencies]
|
||||
# Core utilities
|
||||
thiserror.workspace = true
|
||||
anyhow.workspace = true
|
||||
serde.workspace = true
|
||||
serde_json.workspace = true
|
||||
tracing.workspace = true
|
||||
|
||||
# Tensor operations
|
||||
ndarray.workspace = true
|
||||
num-traits.workspace = true
|
||||
|
||||
# ONNX Runtime (default)
|
||||
ort = { workspace = true, optional = true }
|
||||
|
||||
# PyTorch backend (optional)
|
||||
tch = { workspace = true, optional = true }
|
||||
|
||||
# Candle backend (optional)
|
||||
candle-core = { workspace = true, optional = true }
|
||||
candle-nn = { workspace = true, optional = true }
|
||||
|
||||
# Async runtime
|
||||
tokio = { workspace = true, features = ["sync", "rt"] }
|
||||
|
||||
# Additional utilities
|
||||
parking_lot = "0.12"
|
||||
once_cell = "1.19"
|
||||
memmap2 = "0.9"
|
||||
|
||||
[dev-dependencies]
|
||||
criterion.workspace = true
|
||||
proptest.workspace = true
|
||||
tokio = { workspace = true, features = ["rt-multi-thread", "macros"] }
|
||||
tempfile = "3.10"
|
||||
|
||||
[[bench]]
|
||||
name = "inference_bench"
|
||||
harness = false
|
||||
Reference in New Issue
Block a user