fix: Clean up Rust warnings and add Python vital signs detection

Rust changes:
- Fix unused variable warnings in wifi-densepose-nn (densepose.rs, inference.rs, tensor.rs, translator.rs)
- Remove unused imports in wifi-densepose-mat (breathing.rs, pipeline.rs, csi_receiver.rs, debris_model.rs, vital_signs_classifier.rs)
- All tests continue to pass

Python changes:
- Add vital_signs.py module with BreathingDetector and HeartbeatDetector classes
- Mirror Rust wifi-densepose-mat detection functionality
- Update v1 package version to 1.2.0
- Export new vital signs classes from core module
This commit is contained in:
Claude
2026-01-14 17:42:37 +00:00
parent 16c50abca3
commit 7c00482314
13 changed files with 583 additions and 23 deletions

View File

@@ -1,6 +1,6 @@
//! Breathing pattern detection from CSI signals.
use crate::domain::{BreathingPattern, BreathingType, ConfidenceScore};
use crate::domain::{BreathingPattern, BreathingType};
/// Configuration for breathing detection
#[derive(Debug, Clone)]

View File

@@ -3,7 +3,7 @@
//! This module provides both traditional signal-processing-based detection
//! and optional ML-enhanced detection for improved accuracy.
use crate::domain::{ScanZone, VitalSignsReading, ConfidenceScore};
use crate::domain::{ScanZone, VitalSignsReading};
use crate::ml::{MlDetectionConfig, MlDetectionPipeline, MlDetectionResult};
use crate::{DisasterConfig, MatError};
use super::{

View File

@@ -28,8 +28,6 @@ use chrono::{DateTime, Utc};
use std::collections::VecDeque;
use std::io::{BufReader, Read};
use std::path::Path;
use std::sync::Arc;
use tokio::sync::{mpsc, Mutex};
/// Configuration for CSI receivers
#[derive(Debug, Clone)]

View File

@@ -16,13 +16,10 @@
//! - Depth estimation head with uncertainty (mean + variance output)
use super::{DebrisFeatures, DepthEstimate, MlError, MlResult};
use ndarray::{Array1, Array2, Array4, s};
use std::collections::HashMap;
use ndarray::{Array2, Array4};
use std::path::Path;
use std::sync::Arc;
use parking_lot::RwLock;
use thiserror::Error;
use tracing::{debug, info, instrument, warn};
use tracing::{info, instrument, warn};
#[cfg(feature = "onnx")]
use wifi_densepose_nn::{OnnxBackend, OnnxSession, InferenceOptions, Tensor, TensorShape};

View File

@@ -35,7 +35,6 @@ pub use vital_signs_classifier::{
};
use crate::detection::CsiDataBuffer;
use crate::domain::{VitalSignsReading, BreathingPattern, HeartbeatSignature};
use async_trait::async_trait;
use std::path::Path;
use thiserror::Error;

View File

@@ -27,12 +27,8 @@ use crate::domain::{
BreathingPattern, BreathingType, HeartbeatSignature, MovementProfile,
MovementType, SignalStrength, VitalSignsReading,
};
use ndarray::{Array1, Array2, Array4, s};
use std::collections::HashMap;
use std::path::Path;
use std::sync::Arc;
use parking_lot::RwLock;
use tracing::{debug, info, instrument, warn};
use tracing::{info, instrument, warn};
#[cfg(feature = "onnx")]
use wifi_densepose_nn::{OnnxBackend, OnnxSession, InferenceOptions, Tensor, TensorShape};

View File

@@ -252,7 +252,7 @@ impl DensePoseHead {
})?;
let input_arr = input.as_array4()?;
let (batch, _channels, height, width) = input_arr.dim();
let (_batch, _channels, _height, _width) = input_arr.dim();
// Apply shared convolutions
let mut current = input_arr.clone();

View File

@@ -206,7 +206,7 @@ impl Backend for MockBackend {
self.output_shapes.get(name).cloned()
}
fn run(&self, inputs: HashMap<String, Tensor>) -> NnResult<HashMap<String, Tensor>> {
fn run(&self, _inputs: HashMap<String, Tensor>) -> NnResult<HashMap<String, Tensor>> {
let mut outputs = HashMap::new();
for (name, shape) in &self.output_shapes {

View File

@@ -266,7 +266,7 @@ impl Tensor {
}
/// Apply softmax along axis
pub fn softmax(&self, axis: usize) -> NnResult<Tensor> {
pub fn softmax(&self, _axis: usize) -> NnResult<Tensor> {
match self {
Tensor::Float4D(a) => {
let max = a.fold(f32::NEG_INFINITY, |acc, &x| acc.max(x));

View File

@@ -342,7 +342,7 @@ impl ModalityTranslator {
})?;
let input_arr = input.as_array4()?;
let (batch, _channels, height, width) = input_arr.dim();
let (_batch, _channels, _height, _width) = input_arr.dim();
// Encode
let mut encoder_outputs = Vec::new();
@@ -461,7 +461,7 @@ impl ModalityTranslator {
weights: &ConvBlockWeights,
) -> NnResult<Array4<f32>> {
let (batch, in_channels, in_height, in_width) = input.dim();
let (out_channels, _, kernel_h, kernel_w) = weights.conv_weight.dim();
let (out_channels, _, _kernel_h, _kernel_w) = weights.conv_weight.dim();
// Upsample 2x
let out_height = in_height * 2;
@@ -536,7 +536,7 @@ impl ModalityTranslator {
fn apply_attention(
&self,
input: &Array4<f32>,
weights: &AttentionWeights,
_weights: &AttentionWeights,
) -> NnResult<(Array4<f32>, Array4<f32>)> {
let (batch, channels, height, width) = input.dim();
let seq_len = height * width;