Squashed 'vendor/ruvector/' content from commit b64c2172
git-subtree-dir: vendor/ruvector git-subtree-split: b64c21726f2bb37286d9ee36a7869fef60cc6900
This commit is contained in:
159
crates/ruvector-robotics/src/perception/clustering.rs
Normal file
159
crates/ruvector-robotics/src/perception/clustering.rs
Normal file
@@ -0,0 +1,159 @@
|
||||
//! Shared spatial-hash clustering with union-find.
|
||||
//!
|
||||
//! Used by the obstacle detector, scene graph builder, and perception pipeline
|
||||
//! to avoid duplicating the same algorithm.
|
||||
//!
|
||||
//! ## Optimizations
|
||||
//!
|
||||
//! - Union-by-rank prevents tree degeneration, keeping `find` near O(α(n)).
|
||||
//! - Path halving in `find` for efficient path compression.
|
||||
//! - `#[inline]` on hot helpers to ensure inlining in tight loops.
|
||||
|
||||
use crate::bridge::{Point3D, PointCloud};
|
||||
use std::collections::HashMap;
|
||||
|
||||
/// Cluster a point cloud using spatial hashing and union-find over adjacent cells.
|
||||
///
|
||||
/// Points are binned into a 3-D grid with the given `cell_size`. Cells that
|
||||
/// share a face, edge, or corner (26-neighbourhood) are merged via union-find.
|
||||
/// Returns the resulting groups as separate point vectors.
|
||||
pub fn cluster_point_cloud(cloud: &PointCloud, cell_size: f64) -> Vec<Vec<Point3D>> {
|
||||
if cloud.points.is_empty() || cell_size <= 0.0 {
|
||||
return Vec::new();
|
||||
}
|
||||
|
||||
// 1. Map each point to a grid cell.
|
||||
let mut cell_map: HashMap<(i64, i64, i64), Vec<usize>> = HashMap::new();
|
||||
for (idx, p) in cloud.points.iter().enumerate() {
|
||||
let key = cell_key(p, cell_size);
|
||||
cell_map.entry(key).or_default().push(idx);
|
||||
}
|
||||
|
||||
// 2. Build union-find over cells (with rank for balanced merges).
|
||||
let cells: Vec<(i64, i64, i64)> = cell_map.keys().copied().collect();
|
||||
let cell_count = cells.len();
|
||||
let cell_idx: HashMap<(i64, i64, i64), usize> = cells
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(i, &k)| (k, i))
|
||||
.collect();
|
||||
|
||||
let mut parent: Vec<usize> = (0..cell_count).collect();
|
||||
let mut rank: Vec<u8> = vec![0; cell_count];
|
||||
|
||||
for &(cx, cy, cz) in &cells {
|
||||
let a = cell_idx[&(cx, cy, cz)];
|
||||
for dx in -1..=1_i64 {
|
||||
for dy in -1..=1_i64 {
|
||||
for dz in -1..=1_i64 {
|
||||
let neighbor = (cx + dx, cy + dy, cz + dz);
|
||||
if let Some(&b) = cell_idx.get(&neighbor) {
|
||||
uf_union(&mut parent, &mut rank, a, b);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Group points by their root representative.
|
||||
let mut groups: HashMap<usize, Vec<Point3D>> = HashMap::new();
|
||||
for (key, point_indices) in &cell_map {
|
||||
let ci = cell_idx[key];
|
||||
let root = uf_find(&mut parent, ci);
|
||||
let entry = groups.entry(root).or_default();
|
||||
for &pi in point_indices {
|
||||
entry.push(cloud.points[pi]);
|
||||
}
|
||||
}
|
||||
|
||||
groups.into_values().collect()
|
||||
}
|
||||
|
||||
/// Compute the grid cell key for a point.
|
||||
#[inline]
|
||||
fn cell_key(p: &Point3D, cell_size: f64) -> (i64, i64, i64) {
|
||||
(
|
||||
(p.x as f64 / cell_size).floor() as i64,
|
||||
(p.y as f64 / cell_size).floor() as i64,
|
||||
(p.z as f64 / cell_size).floor() as i64,
|
||||
)
|
||||
}
|
||||
|
||||
/// Path-compressing find (path halving).
|
||||
#[inline]
|
||||
fn uf_find(parent: &mut [usize], mut i: usize) -> usize {
|
||||
while parent[i] != i {
|
||||
parent[i] = parent[parent[i]];
|
||||
i = parent[i];
|
||||
}
|
||||
i
|
||||
}
|
||||
|
||||
/// Union by rank: attaches the shorter tree under the taller root.
|
||||
#[inline]
|
||||
fn uf_union(parent: &mut [usize], rank: &mut [u8], a: usize, b: usize) {
|
||||
let ra = uf_find(parent, a);
|
||||
let rb = uf_find(parent, b);
|
||||
if ra != rb {
|
||||
match rank[ra].cmp(&rank[rb]) {
|
||||
std::cmp::Ordering::Less => parent[ra] = rb,
|
||||
std::cmp::Ordering::Greater => parent[rb] = ra,
|
||||
std::cmp::Ordering::Equal => {
|
||||
parent[rb] = ra;
|
||||
rank[ra] += 1;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
fn make_cloud(pts: &[[f32; 3]]) -> PointCloud {
|
||||
let points: Vec<Point3D> = pts.iter().map(|a| Point3D::new(a[0], a[1], a[2])).collect();
|
||||
PointCloud::new(points, 0)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_empty_cloud() {
|
||||
let cloud = PointCloud::default();
|
||||
let clusters = cluster_point_cloud(&cloud, 1.0);
|
||||
assert!(clusters.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_single_cluster() {
|
||||
let cloud = make_cloud(&[
|
||||
[1.0, 1.0, 0.0],
|
||||
[1.1, 1.0, 0.0],
|
||||
[1.0, 1.1, 0.0],
|
||||
]);
|
||||
let clusters = cluster_point_cloud(&cloud, 0.5);
|
||||
assert_eq!(clusters.len(), 1);
|
||||
assert_eq!(clusters[0].len(), 3);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_two_clusters() {
|
||||
let cloud = make_cloud(&[
|
||||
[0.0, 0.0, 0.0],
|
||||
[0.1, 0.0, 0.0],
|
||||
[10.0, 10.0, 0.0],
|
||||
[10.1, 10.0, 0.0],
|
||||
]);
|
||||
let clusters = cluster_point_cloud(&cloud, 0.5);
|
||||
assert_eq!(clusters.len(), 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_negative_coordinates() {
|
||||
let cloud = make_cloud(&[
|
||||
[-1.0, -1.0, 0.0],
|
||||
[-0.9, -1.0, 0.0],
|
||||
[1.0, 1.0, 0.0],
|
||||
]);
|
||||
let clusters = cluster_point_cloud(&cloud, 0.5);
|
||||
assert_eq!(clusters.len(), 2);
|
||||
}
|
||||
}
|
||||
175
crates/ruvector-robotics/src/perception/config.rs
Normal file
175
crates/ruvector-robotics/src/perception/config.rs
Normal file
@@ -0,0 +1,175 @@
|
||||
//! Configuration types for the perception pipeline.
|
||||
//!
|
||||
//! Provides tuning knobs for scene-graph construction, obstacle detection,
|
||||
//! and the top-level perception configuration that bundles them together.
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Scene-graph configuration
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Tuning parameters for scene-graph construction from point clouds.
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
pub struct SceneGraphConfig {
|
||||
/// Maximum distance between two points to be considered part of the same
|
||||
/// cluster (metres).
|
||||
pub cluster_radius: f64,
|
||||
/// Minimum number of points required to form a valid cluster / object.
|
||||
pub min_cluster_size: usize,
|
||||
/// Hard cap on the number of objects the builder will emit.
|
||||
pub max_objects: usize,
|
||||
/// Maximum centre-to-centre distance for two objects to be connected by an
|
||||
/// edge in the scene graph (metres).
|
||||
pub edge_distance_threshold: f64,
|
||||
}
|
||||
|
||||
impl Default for SceneGraphConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
cluster_radius: 0.5,
|
||||
min_cluster_size: 3,
|
||||
max_objects: 256,
|
||||
edge_distance_threshold: 5.0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Obstacle configuration
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Tuning parameters for the obstacle detector.
|
||||
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
|
||||
pub struct ObstacleConfig {
|
||||
/// Minimum number of points that must fall inside a cluster to be
|
||||
/// considered an obstacle.
|
||||
pub min_obstacle_size: usize,
|
||||
/// Maximum range from the robot within which obstacles are detected
|
||||
/// (metres).
|
||||
pub max_detection_range: f64,
|
||||
/// Extra padding added around every detected obstacle (metres).
|
||||
pub safety_margin: f64,
|
||||
}
|
||||
|
||||
impl Default for ObstacleConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
min_obstacle_size: 3,
|
||||
max_detection_range: 20.0,
|
||||
safety_margin: 0.2,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Top-level perception configuration
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Aggregated configuration for the full perception pipeline.
|
||||
#[derive(Debug, Clone, Default, PartialEq, Serialize, Deserialize)]
|
||||
pub struct PerceptionConfig {
|
||||
pub scene_graph: SceneGraphConfig,
|
||||
pub obstacle: ObstacleConfig,
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Tests
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_scene_graph_config_defaults() {
|
||||
let cfg = SceneGraphConfig::default();
|
||||
assert!((cfg.cluster_radius - 0.5).abs() < f64::EPSILON);
|
||||
assert_eq!(cfg.min_cluster_size, 3);
|
||||
assert_eq!(cfg.max_objects, 256);
|
||||
assert!((cfg.edge_distance_threshold - 5.0).abs() < f64::EPSILON);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_obstacle_config_defaults() {
|
||||
let cfg = ObstacleConfig::default();
|
||||
assert_eq!(cfg.min_obstacle_size, 3);
|
||||
assert!((cfg.max_detection_range - 20.0).abs() < f64::EPSILON);
|
||||
assert!((cfg.safety_margin - 0.2).abs() < f64::EPSILON);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_perception_config_defaults() {
|
||||
let cfg = PerceptionConfig::default();
|
||||
assert_eq!(cfg.scene_graph, SceneGraphConfig::default());
|
||||
assert_eq!(cfg.obstacle, ObstacleConfig::default());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_scene_graph_config_serde_roundtrip() {
|
||||
let cfg = SceneGraphConfig {
|
||||
cluster_radius: 1.0,
|
||||
min_cluster_size: 5,
|
||||
max_objects: 128,
|
||||
edge_distance_threshold: 3.0,
|
||||
};
|
||||
let json = serde_json::to_string(&cfg).unwrap();
|
||||
let restored: SceneGraphConfig = serde_json::from_str(&json).unwrap();
|
||||
assert_eq!(cfg, restored);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_obstacle_config_serde_roundtrip() {
|
||||
let cfg = ObstacleConfig {
|
||||
min_obstacle_size: 10,
|
||||
max_detection_range: 50.0,
|
||||
safety_margin: 0.5,
|
||||
};
|
||||
let json = serde_json::to_string(&cfg).unwrap();
|
||||
let restored: ObstacleConfig = serde_json::from_str(&json).unwrap();
|
||||
assert_eq!(cfg, restored);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_perception_config_serde_roundtrip() {
|
||||
let cfg = PerceptionConfig::default();
|
||||
let json = serde_json::to_string_pretty(&cfg).unwrap();
|
||||
let restored: PerceptionConfig = serde_json::from_str(&json).unwrap();
|
||||
assert_eq!(cfg, restored);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_config_clone_equality() {
|
||||
let a = PerceptionConfig::default();
|
||||
let b = a.clone();
|
||||
assert_eq!(a, b);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_config_debug_format() {
|
||||
let cfg = PerceptionConfig::default();
|
||||
let dbg = format!("{:?}", cfg);
|
||||
assert!(dbg.contains("PerceptionConfig"));
|
||||
assert!(dbg.contains("SceneGraphConfig"));
|
||||
assert!(dbg.contains("ObstacleConfig"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_custom_perception_config() {
|
||||
let cfg = PerceptionConfig {
|
||||
scene_graph: SceneGraphConfig {
|
||||
cluster_radius: 2.0,
|
||||
min_cluster_size: 10,
|
||||
max_objects: 64,
|
||||
edge_distance_threshold: 10.0,
|
||||
},
|
||||
obstacle: ObstacleConfig {
|
||||
min_obstacle_size: 5,
|
||||
max_detection_range: 100.0,
|
||||
safety_margin: 1.0,
|
||||
},
|
||||
};
|
||||
assert!((cfg.scene_graph.cluster_radius - 2.0).abs() < f64::EPSILON);
|
||||
assert_eq!(cfg.obstacle.min_obstacle_size, 5);
|
||||
}
|
||||
}
|
||||
782
crates/ruvector-robotics/src/perception/mod.rs
Normal file
782
crates/ruvector-robotics/src/perception/mod.rs
Normal file
@@ -0,0 +1,782 @@
|
||||
//! Perception subsystem: scene graph construction, obstacle detection, and pipeline.
|
||||
//!
|
||||
//! This module sits on top of [`crate::bridge`] types and provides higher-level
|
||||
//! perception building blocks used by the cognitive architecture.
|
||||
|
||||
pub mod clustering;
|
||||
pub mod config;
|
||||
pub mod obstacle_detector;
|
||||
pub mod scene_graph;
|
||||
pub mod sensor_fusion;
|
||||
|
||||
pub use config::{ObstacleConfig, PerceptionConfig, SceneGraphConfig};
|
||||
pub use obstacle_detector::{ClassifiedObstacle, DetectedObstacle, ObstacleClass, ObstacleDetector};
|
||||
pub use scene_graph::PointCloudSceneGraphBuilder;
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::bridge::{
|
||||
Obstacle, Point3D, PointCloud, SceneEdge, SceneGraph, SceneObject, Trajectory,
|
||||
};
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Error type
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Errors emitted by perception pipeline operations.
|
||||
#[derive(Debug, thiserror::Error)]
|
||||
pub enum PerceptionError {
|
||||
#[error("Invalid input: {0}")]
|
||||
InvalidInput(String),
|
||||
#[error("Processing failed: {0}")]
|
||||
ProcessingFailed(String),
|
||||
}
|
||||
|
||||
/// Convenience alias used throughout the perception module.
|
||||
pub type Result<T> = std::result::Result<T, PerceptionError>;
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Anomaly type
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// A point-cloud anomaly detected via z-score outlier analysis.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Anomaly {
|
||||
pub position: [f64; 3],
|
||||
pub score: f64,
|
||||
pub description: String,
|
||||
pub timestamp: i64,
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// SceneGraphBuilder
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Builds a [`SceneGraph`] from detected obstacles or raw point clouds.
|
||||
///
|
||||
/// The builder clusters scene objects, computes spatial edges between nearby
|
||||
/// objects, and produces a timestamped scene graph.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct SceneGraphBuilder {
|
||||
edge_distance_threshold: f64,
|
||||
max_objects: usize,
|
||||
}
|
||||
|
||||
impl Default for SceneGraphBuilder {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
edge_distance_threshold: 5.0,
|
||||
max_objects: 256,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl SceneGraphBuilder {
|
||||
/// Create a new builder with explicit parameters.
|
||||
pub fn new(edge_distance_threshold: f64, max_objects: usize) -> Self {
|
||||
Self {
|
||||
edge_distance_threshold,
|
||||
max_objects,
|
||||
}
|
||||
}
|
||||
|
||||
/// Build a scene graph from a list of [`SceneObject`]s.
|
||||
///
|
||||
/// Edges are created between every pair of objects whose centers are within
|
||||
/// `edge_distance_threshold`.
|
||||
pub fn build(&self, mut objects: Vec<SceneObject>, timestamp: i64) -> SceneGraph {
|
||||
objects.truncate(self.max_objects);
|
||||
|
||||
let threshold_sq = self.edge_distance_threshold * self.edge_distance_threshold;
|
||||
let mut edges = Vec::new();
|
||||
for i in 0..objects.len() {
|
||||
for j in (i + 1)..objects.len() {
|
||||
let dx = objects[i].center[0] - objects[j].center[0];
|
||||
let dy = objects[i].center[1] - objects[j].center[1];
|
||||
let dz = objects[i].center[2] - objects[j].center[2];
|
||||
let dist_sq = dx * dx + dy * dy + dz * dz;
|
||||
if dist_sq <= threshold_sq {
|
||||
// Only compute sqrt for edges that pass the filter.
|
||||
let dist = dist_sq.sqrt();
|
||||
let relation = if dist < 1.0 {
|
||||
"adjacent".to_string()
|
||||
} else if dist < 3.0 {
|
||||
"near".to_string()
|
||||
} else {
|
||||
"visible".to_string()
|
||||
};
|
||||
edges.push(SceneEdge {
|
||||
from: objects[i].id,
|
||||
to: objects[j].id,
|
||||
distance: dist,
|
||||
relation,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
SceneGraph::new(objects, edges, timestamp)
|
||||
}
|
||||
|
||||
/// Build a scene graph from detected obstacles.
|
||||
pub fn build_from_obstacles(
|
||||
&self,
|
||||
obstacles: &[DetectedObstacle],
|
||||
timestamp: i64,
|
||||
) -> SceneGraph {
|
||||
let objects: Vec<SceneObject> = obstacles
|
||||
.iter()
|
||||
.enumerate()
|
||||
.map(|(i, obs)| {
|
||||
let mut obj = SceneObject::new(i, obs.center, obs.extent);
|
||||
obj.confidence = 1.0 - (obs.min_distance as f32 / 30.0).min(0.9);
|
||||
obj.label = format!("obstacle_{}", i);
|
||||
obj
|
||||
})
|
||||
.collect();
|
||||
self.build(objects, timestamp)
|
||||
}
|
||||
|
||||
/// Merge two scene graphs into one, re-computing edges.
|
||||
pub fn merge(&self, a: &SceneGraph, b: &SceneGraph) -> SceneGraph {
|
||||
let mut objects = a.objects.clone();
|
||||
let offset = objects.len();
|
||||
for obj in &b.objects {
|
||||
let mut new_obj = obj.clone();
|
||||
new_obj.id += offset;
|
||||
objects.push(new_obj);
|
||||
}
|
||||
let timestamp = a.timestamp.max(b.timestamp);
|
||||
self.build(objects, timestamp)
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// PerceptionPipeline
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// End-to-end perception pipeline that processes sensor frames into scene
|
||||
/// graphs and obstacle lists.
|
||||
///
|
||||
/// Supports two construction modes:
|
||||
/// - [`PerceptionPipeline::new`] for config-driven construction
|
||||
/// - [`PerceptionPipeline::with_thresholds`] for threshold-driven
|
||||
/// construction (obstacle cell-size and anomaly z-score)
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct PerceptionPipeline {
|
||||
detector: ObstacleDetector,
|
||||
graph_builder: SceneGraphBuilder,
|
||||
frames_processed: u64,
|
||||
obstacle_threshold: f64,
|
||||
anomaly_threshold: f64,
|
||||
}
|
||||
|
||||
impl PerceptionPipeline {
|
||||
/// Create a new pipeline with the given configuration.
|
||||
pub fn new(config: PerceptionConfig) -> Self {
|
||||
let obstacle_threshold = config.obstacle.safety_margin * 5.0;
|
||||
let detector = ObstacleDetector::new(config.obstacle);
|
||||
let graph_builder = SceneGraphBuilder::new(
|
||||
config.scene_graph.edge_distance_threshold,
|
||||
config.scene_graph.max_objects,
|
||||
);
|
||||
Self {
|
||||
detector,
|
||||
graph_builder,
|
||||
frames_processed: 0,
|
||||
obstacle_threshold: obstacle_threshold.max(0.5),
|
||||
anomaly_threshold: 2.0,
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a pipeline from explicit thresholds.
|
||||
///
|
||||
/// * `obstacle_threshold` -- clustering cell size for obstacle grouping.
|
||||
/// * `anomaly_threshold` -- z-score threshold for anomaly detection.
|
||||
pub fn with_thresholds(obstacle_threshold: f64, anomaly_threshold: f64) -> Self {
|
||||
use crate::perception::config::{ObstacleConfig, SceneGraphConfig};
|
||||
|
||||
let obstacle_cfg = ObstacleConfig::default();
|
||||
let scene_cfg = SceneGraphConfig::default();
|
||||
let detector = ObstacleDetector::new(obstacle_cfg.clone());
|
||||
let graph_builder = SceneGraphBuilder::new(
|
||||
scene_cfg.edge_distance_threshold,
|
||||
scene_cfg.max_objects,
|
||||
);
|
||||
Self {
|
||||
detector,
|
||||
graph_builder,
|
||||
frames_processed: 0,
|
||||
obstacle_threshold,
|
||||
anomaly_threshold,
|
||||
}
|
||||
}
|
||||
|
||||
/// Process a point cloud relative to a robot position, returning
|
||||
/// detected obstacles and a scene graph.
|
||||
pub fn process(
|
||||
&mut self,
|
||||
cloud: &PointCloud,
|
||||
robot_pos: &[f64; 3],
|
||||
) -> (Vec<DetectedObstacle>, SceneGraph) {
|
||||
self.frames_processed += 1;
|
||||
let obstacles = self.detector.detect(cloud, robot_pos);
|
||||
let graph = self.graph_builder.build_from_obstacles(&obstacles, cloud.timestamp_us);
|
||||
(obstacles, graph)
|
||||
}
|
||||
|
||||
/// Classify previously detected obstacles.
|
||||
pub fn classify(
|
||||
&self,
|
||||
obstacles: &[DetectedObstacle],
|
||||
) -> Vec<ClassifiedObstacle> {
|
||||
self.detector.classify_obstacles(obstacles)
|
||||
}
|
||||
|
||||
/// Number of frames processed so far.
|
||||
pub fn frames_processed(&self) -> u64 {
|
||||
self.frames_processed
|
||||
}
|
||||
|
||||
// -- Obstacle detection (bridge-level) ----------------------------------
|
||||
|
||||
/// Detect obstacles in `cloud` relative to `robot_position`.
|
||||
///
|
||||
/// Points further than `max_distance` from the robot are ignored.
|
||||
/// Returns bridge-level [`Obstacle`] values sorted by distance.
|
||||
pub fn detect_obstacles(
|
||||
&self,
|
||||
cloud: &PointCloud,
|
||||
robot_position: [f64; 3],
|
||||
max_distance: f64,
|
||||
) -> Result<Vec<Obstacle>> {
|
||||
if cloud.is_empty() {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
|
||||
let cell_size = self.obstacle_threshold.max(0.1);
|
||||
let clusters = clustering::cluster_point_cloud(cloud, cell_size);
|
||||
|
||||
let mut obstacles: Vec<Obstacle> = Vec::new();
|
||||
let mut next_id: u64 = 0;
|
||||
|
||||
for cluster in &clusters {
|
||||
if cluster.len() < 2 {
|
||||
continue;
|
||||
}
|
||||
|
||||
let (center, radius) = Self::bounding_sphere(cluster);
|
||||
let dist = Self::dist_3d(¢er, &robot_position);
|
||||
|
||||
if dist > max_distance {
|
||||
continue;
|
||||
}
|
||||
|
||||
let confidence = (cluster.len() as f32 / cloud.points.len() as f32)
|
||||
.clamp(0.1, 1.0);
|
||||
|
||||
obstacles.push(Obstacle {
|
||||
id: next_id,
|
||||
position: center,
|
||||
distance: dist,
|
||||
radius,
|
||||
label: format!("obstacle_{}", next_id),
|
||||
confidence,
|
||||
});
|
||||
next_id += 1;
|
||||
}
|
||||
|
||||
obstacles.sort_by(|a, b| {
|
||||
a.distance
|
||||
.partial_cmp(&b.distance)
|
||||
.unwrap_or(std::cmp::Ordering::Equal)
|
||||
});
|
||||
|
||||
for (i, obs) in obstacles.iter_mut().enumerate() {
|
||||
obs.id = i as u64;
|
||||
}
|
||||
|
||||
Ok(obstacles)
|
||||
}
|
||||
|
||||
// -- Scene-graph construction -------------------------------------------
|
||||
|
||||
/// Build a scene graph from pre-classified objects.
|
||||
///
|
||||
/// Edges are created between objects whose centres are within
|
||||
/// `max_edge_distance`, labelled "adjacent" / "near" / "far".
|
||||
pub fn build_scene_graph(
|
||||
&self,
|
||||
objects: &[SceneObject],
|
||||
max_edge_distance: f64,
|
||||
) -> Result<SceneGraph> {
|
||||
if max_edge_distance <= 0.0 {
|
||||
return Err(PerceptionError::InvalidInput(
|
||||
"max_edge_distance must be positive".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let mut edges: Vec<SceneEdge> = Vec::new();
|
||||
|
||||
let max_dist_sq = max_edge_distance * max_edge_distance;
|
||||
for i in 0..objects.len() {
|
||||
for j in (i + 1)..objects.len() {
|
||||
let dx = objects[i].center[0] - objects[j].center[0];
|
||||
let dy = objects[i].center[1] - objects[j].center[1];
|
||||
let dz = objects[i].center[2] - objects[j].center[2];
|
||||
let d_sq = dx * dx + dy * dy + dz * dz;
|
||||
if d_sq <= max_dist_sq {
|
||||
let d = d_sq.sqrt();
|
||||
let relation = if d < max_edge_distance * 0.33 {
|
||||
"adjacent"
|
||||
} else if d < max_edge_distance * 0.66 {
|
||||
"near"
|
||||
} else {
|
||||
"far"
|
||||
};
|
||||
edges.push(SceneEdge {
|
||||
from: objects[i].id,
|
||||
to: objects[j].id,
|
||||
distance: d,
|
||||
relation: relation.to_string(),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(SceneGraph::new(objects.to_vec(), edges, 0))
|
||||
}
|
||||
|
||||
// -- Trajectory prediction ----------------------------------------------
|
||||
|
||||
/// Predict a future trajectory via linear extrapolation.
|
||||
///
|
||||
/// Returns a [`Trajectory`] with `steps` waypoints, each separated by
|
||||
/// `dt` seconds.
|
||||
pub fn predict_trajectory(
|
||||
&self,
|
||||
position: [f64; 3],
|
||||
velocity: [f64; 3],
|
||||
steps: usize,
|
||||
dt: f64,
|
||||
) -> Result<Trajectory> {
|
||||
if steps == 0 {
|
||||
return Err(PerceptionError::InvalidInput(
|
||||
"steps must be > 0".to_string(),
|
||||
));
|
||||
}
|
||||
if dt <= 0.0 {
|
||||
return Err(PerceptionError::InvalidInput(
|
||||
"dt must be positive".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let mut waypoints = Vec::with_capacity(steps);
|
||||
let mut timestamps = Vec::with_capacity(steps);
|
||||
|
||||
for i in 1..=steps {
|
||||
let t = i as f64 * dt;
|
||||
waypoints.push([
|
||||
position[0] + velocity[0] * t,
|
||||
position[1] + velocity[1] * t,
|
||||
position[2] + velocity[2] * t,
|
||||
]);
|
||||
timestamps.push((t * 1_000_000.0) as i64);
|
||||
}
|
||||
|
||||
let confidence = (1.0 - (steps as f64 * dt * 0.1)).max(0.1);
|
||||
Ok(Trajectory::new(waypoints, timestamps, confidence))
|
||||
}
|
||||
|
||||
// -- Attention focusing -------------------------------------------------
|
||||
|
||||
/// Filter points from `cloud` that lie within `radius` of `center`.
|
||||
pub fn focus_attention(
|
||||
&self,
|
||||
cloud: &PointCloud,
|
||||
center: [f64; 3],
|
||||
radius: f64,
|
||||
) -> Result<Vec<Point3D>> {
|
||||
if radius <= 0.0 {
|
||||
return Err(PerceptionError::InvalidInput(
|
||||
"radius must be positive".to_string(),
|
||||
));
|
||||
}
|
||||
|
||||
let r2 = radius * radius;
|
||||
let focused: Vec<Point3D> = cloud
|
||||
.points
|
||||
.iter()
|
||||
.filter(|p| {
|
||||
let dx = p.x as f64 - center[0];
|
||||
let dy = p.y as f64 - center[1];
|
||||
let dz = p.z as f64 - center[2];
|
||||
dx * dx + dy * dy + dz * dz <= r2
|
||||
})
|
||||
.copied()
|
||||
.collect();
|
||||
|
||||
Ok(focused)
|
||||
}
|
||||
|
||||
// -- Anomaly detection --------------------------------------------------
|
||||
|
||||
/// Detect anomalous points using z-score outlier analysis.
|
||||
///
|
||||
/// For each point the distance from the cloud centroid is computed;
|
||||
/// points whose z-score exceeds `anomaly_threshold` are returned.
|
||||
pub fn detect_anomalies(&self, cloud: &PointCloud) -> Result<Vec<Anomaly>> {
|
||||
if cloud.points.len() < 2 {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
|
||||
let n = cloud.points.len() as f64;
|
||||
|
||||
// Pass 1: compute centroid.
|
||||
let (mut cx, mut cy, mut cz) = (0.0_f64, 0.0_f64, 0.0_f64);
|
||||
for p in &cloud.points {
|
||||
cx += p.x as f64;
|
||||
cy += p.y as f64;
|
||||
cz += p.z as f64;
|
||||
}
|
||||
cx /= n;
|
||||
cy /= n;
|
||||
cz /= n;
|
||||
|
||||
// Pass 2: compute distances and running mean + variance (Welford's).
|
||||
let mut distances: Vec<f64> = Vec::with_capacity(cloud.points.len());
|
||||
let mut w_mean = 0.0_f64;
|
||||
let mut w_m2 = 0.0_f64;
|
||||
for (i, p) in cloud.points.iter().enumerate() {
|
||||
let dx = p.x as f64 - cx;
|
||||
let dy = p.y as f64 - cy;
|
||||
let dz = p.z as f64 - cz;
|
||||
let d = (dx * dx + dy * dy + dz * dz).sqrt();
|
||||
distances.push(d);
|
||||
let delta = d - w_mean;
|
||||
w_mean += delta / (i + 1) as f64;
|
||||
w_m2 += delta * (d - w_mean);
|
||||
}
|
||||
|
||||
let variance = w_m2 / n;
|
||||
let std_dev = variance.sqrt();
|
||||
|
||||
if std_dev < f64::EPSILON {
|
||||
return Ok(Vec::new());
|
||||
}
|
||||
|
||||
let mut anomalies = Vec::new();
|
||||
for (i, p) in cloud.points.iter().enumerate() {
|
||||
let z = (distances[i] - w_mean) / std_dev;
|
||||
if z.abs() > self.anomaly_threshold {
|
||||
anomalies.push(Anomaly {
|
||||
position: [p.x as f64, p.y as f64, p.z as f64],
|
||||
score: z.abs(),
|
||||
description: format!(
|
||||
"outlier at ({:.2}, {:.2}, {:.2}) z={:.2}",
|
||||
p.x, p.y, p.z, z
|
||||
),
|
||||
timestamp: cloud.timestamp_us,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
Ok(anomalies)
|
||||
}
|
||||
|
||||
// -- private helpers ----------------------------------------------------
|
||||
|
||||
fn bounding_sphere(points: &[Point3D]) -> ([f64; 3], f64) {
|
||||
debug_assert!(!points.is_empty(), "bounding_sphere called with empty slice");
|
||||
let n = points.len() as f64;
|
||||
let (mut sx, mut sy, mut sz) = (0.0_f64, 0.0_f64, 0.0_f64);
|
||||
for p in points {
|
||||
sx += p.x as f64;
|
||||
sy += p.y as f64;
|
||||
sz += p.z as f64;
|
||||
}
|
||||
let center = [sx / n, sy / n, sz / n];
|
||||
|
||||
// Compare squared distances and take a single sqrt at the end.
|
||||
let radius_sq = points
|
||||
.iter()
|
||||
.map(|p| {
|
||||
let dx = p.x as f64 - center[0];
|
||||
let dy = p.y as f64 - center[1];
|
||||
let dz = p.z as f64 - center[2];
|
||||
dx * dx + dy * dy + dz * dz
|
||||
})
|
||||
.fold(0.0_f64, f64::max);
|
||||
|
||||
(center, radius_sq.sqrt())
|
||||
}
|
||||
|
||||
#[inline]
|
||||
fn dist_3d(a: &[f64; 3], b: &[f64; 3]) -> f64 {
|
||||
((a[0] - b[0]).powi(2) + (a[1] - b[1]).powi(2) + (a[2] - b[2]).powi(2)).sqrt()
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Tests
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::bridge::Point3D;
|
||||
|
||||
fn make_cloud(pts: &[[f32; 3]]) -> PointCloud {
|
||||
let points: Vec<Point3D> =
|
||||
pts.iter().map(|a| Point3D::new(a[0], a[1], a[2])).collect();
|
||||
PointCloud::new(points, 1000)
|
||||
}
|
||||
|
||||
// -- SceneGraphBuilder (inline) -----------------------------------------
|
||||
|
||||
#[test]
|
||||
fn test_scene_graph_builder_basic() {
|
||||
let builder = SceneGraphBuilder::default();
|
||||
let objects = vec![
|
||||
SceneObject::new(0, [0.0, 0.0, 0.0], [0.5, 0.5, 0.5]),
|
||||
SceneObject::new(1, [2.0, 0.0, 0.0], [0.5, 0.5, 0.5]),
|
||||
SceneObject::new(2, [100.0, 0.0, 0.0], [0.5, 0.5, 0.5]),
|
||||
];
|
||||
let graph = builder.build(objects, 0);
|
||||
assert_eq!(graph.objects.len(), 3);
|
||||
assert_eq!(graph.edges.len(), 1);
|
||||
assert_eq!(graph.edges[0].from, 0);
|
||||
assert_eq!(graph.edges[0].to, 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_scene_graph_builder_merge() {
|
||||
let builder = SceneGraphBuilder::new(10.0, 256);
|
||||
let a = SceneGraph::new(
|
||||
vec![SceneObject::new(0, [0.0, 0.0, 0.0], [0.5, 0.5, 0.5])],
|
||||
vec![],
|
||||
100,
|
||||
);
|
||||
let b = SceneGraph::new(
|
||||
vec![SceneObject::new(0, [1.0, 0.0, 0.0], [0.5, 0.5, 0.5])],
|
||||
vec![],
|
||||
200,
|
||||
);
|
||||
let merged = builder.merge(&a, &b);
|
||||
assert_eq!(merged.objects.len(), 2);
|
||||
assert_eq!(merged.timestamp, 200);
|
||||
assert!(!merged.edges.is_empty());
|
||||
}
|
||||
|
||||
// -- PerceptionPipeline.process (config-driven) -------------------------
|
||||
|
||||
#[test]
|
||||
fn test_perception_pipeline_process() {
|
||||
let config = PerceptionConfig::default();
|
||||
let mut pipeline = PerceptionPipeline::new(config);
|
||||
|
||||
let cloud = make_cloud(&[
|
||||
[1.0, 0.0, 0.0],
|
||||
[1.1, 0.0, 0.0],
|
||||
[1.2, 0.0, 0.0],
|
||||
[5.0, 5.0, 0.0],
|
||||
[5.1, 5.0, 0.0],
|
||||
[5.2, 5.0, 0.0],
|
||||
]);
|
||||
|
||||
let (obstacles, graph) = pipeline.process(&cloud, &[0.0, 0.0, 0.0]);
|
||||
assert!(!obstacles.is_empty());
|
||||
assert!(!graph.objects.is_empty());
|
||||
assert_eq!(pipeline.frames_processed(), 1);
|
||||
}
|
||||
|
||||
// -- detect_obstacles ---------------------------------------------------
|
||||
|
||||
#[test]
|
||||
fn test_detect_obstacles_empty() {
|
||||
let pipe = PerceptionPipeline::with_thresholds(0.5, 2.0);
|
||||
let cloud = PointCloud::default();
|
||||
let result = pipe.detect_obstacles(&cloud, [0.0; 3], 10.0).unwrap();
|
||||
assert!(result.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_detect_obstacles_single_cluster() {
|
||||
let pipe = PerceptionPipeline::with_thresholds(1.0, 2.0);
|
||||
let cloud = make_cloud(&[
|
||||
[2.0, 0.0, 0.0],
|
||||
[2.1, 0.0, 0.0],
|
||||
[2.0, 0.1, 0.0],
|
||||
]);
|
||||
let obs = pipe.detect_obstacles(&cloud, [0.0; 3], 10.0).unwrap();
|
||||
assert_eq!(obs.len(), 1);
|
||||
assert!(obs[0].distance > 1.0);
|
||||
assert!(obs[0].distance < 3.0);
|
||||
assert!(!obs[0].label.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_detect_obstacles_filters_distant() {
|
||||
let pipe = PerceptionPipeline::with_thresholds(1.0, 2.0);
|
||||
let cloud = make_cloud(&[
|
||||
[50.0, 0.0, 0.0],
|
||||
[50.1, 0.0, 0.0],
|
||||
[50.0, 0.1, 0.0],
|
||||
]);
|
||||
let obs = pipe.detect_obstacles(&cloud, [0.0; 3], 5.0).unwrap();
|
||||
assert!(obs.is_empty());
|
||||
}
|
||||
|
||||
// -- build_scene_graph --------------------------------------------------
|
||||
|
||||
#[test]
|
||||
fn test_build_scene_graph_basic() {
|
||||
let pipe = PerceptionPipeline::with_thresholds(0.5, 2.0);
|
||||
let objects = vec![
|
||||
SceneObject::new(0, [0.0, 0.0, 0.0], [1.0, 1.0, 1.0]),
|
||||
SceneObject::new(1, [2.0, 0.0, 0.0], [1.0, 1.0, 1.0]),
|
||||
];
|
||||
let graph = pipe.build_scene_graph(&objects, 5.0).unwrap();
|
||||
assert_eq!(graph.objects.len(), 2);
|
||||
assert_eq!(graph.edges.len(), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_scene_graph_invalid_distance() {
|
||||
let pipe = PerceptionPipeline::with_thresholds(0.5, 2.0);
|
||||
let result = pipe.build_scene_graph(&[], -1.0);
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_scene_graph_no_edges() {
|
||||
let pipe = PerceptionPipeline::with_thresholds(0.5, 2.0);
|
||||
let objects = vec![
|
||||
SceneObject::new(0, [0.0, 0.0, 0.0], [1.0, 1.0, 1.0]),
|
||||
SceneObject::new(1, [100.0, 0.0, 0.0], [1.0, 1.0, 1.0]),
|
||||
];
|
||||
let graph = pipe.build_scene_graph(&objects, 5.0).unwrap();
|
||||
assert!(graph.edges.is_empty());
|
||||
}
|
||||
|
||||
// -- predict_trajectory -------------------------------------------------
|
||||
|
||||
#[test]
|
||||
fn test_predict_trajectory_linear() {
|
||||
let pipe = PerceptionPipeline::with_thresholds(0.5, 2.0);
|
||||
let traj = pipe
|
||||
.predict_trajectory([0.0, 0.0, 0.0], [1.0, 0.0, 0.0], 3, 1.0)
|
||||
.unwrap();
|
||||
assert_eq!(traj.len(), 3);
|
||||
assert!((traj.waypoints[0][0] - 1.0).abs() < 1e-9);
|
||||
assert!((traj.waypoints[1][0] - 2.0).abs() < 1e-9);
|
||||
assert!((traj.waypoints[2][0] - 3.0).abs() < 1e-9);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_predict_trajectory_zero_steps() {
|
||||
let pipe = PerceptionPipeline::with_thresholds(0.5, 2.0);
|
||||
let result = pipe.predict_trajectory([0.0; 3], [1.0, 0.0, 0.0], 0, 1.0);
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_predict_trajectory_negative_dt() {
|
||||
let pipe = PerceptionPipeline::with_thresholds(0.5, 2.0);
|
||||
let result = pipe.predict_trajectory([0.0; 3], [1.0, 0.0, 0.0], 5, -0.1);
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
// -- focus_attention ----------------------------------------------------
|
||||
|
||||
#[test]
|
||||
fn test_focus_attention_filters() {
|
||||
let pipe = PerceptionPipeline::with_thresholds(0.5, 2.0);
|
||||
let cloud = make_cloud(&[
|
||||
[0.0, 0.0, 0.0],
|
||||
[1.0, 0.0, 0.0],
|
||||
[10.0, 0.0, 0.0],
|
||||
]);
|
||||
let focused = pipe
|
||||
.focus_attention(&cloud, [0.0, 0.0, 0.0], 2.0)
|
||||
.unwrap();
|
||||
assert_eq!(focused.len(), 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_focus_attention_invalid_radius() {
|
||||
let pipe = PerceptionPipeline::with_thresholds(0.5, 2.0);
|
||||
let cloud = PointCloud::default();
|
||||
let result = pipe.focus_attention(&cloud, [0.0; 3], -1.0);
|
||||
assert!(result.is_err());
|
||||
}
|
||||
|
||||
// -- detect_anomalies ---------------------------------------------------
|
||||
|
||||
#[test]
|
||||
fn test_detect_anomalies_outlier() {
|
||||
let pipe = PerceptionPipeline::with_thresholds(0.5, 2.0);
|
||||
let mut pts: Vec<[f32; 3]> =
|
||||
(0..20).map(|i| [i as f32 * 0.1, 0.0, 0.0]).collect();
|
||||
pts.push([100.0, 100.0, 100.0]);
|
||||
let cloud = make_cloud(&pts);
|
||||
let anomalies = pipe.detect_anomalies(&cloud).unwrap();
|
||||
assert!(!anomalies.is_empty());
|
||||
assert!(anomalies.iter().any(|a| a.score > 2.0));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_detect_anomalies_no_outliers() {
|
||||
let pipe = PerceptionPipeline::with_thresholds(0.5, 2.0);
|
||||
let cloud = make_cloud(&[
|
||||
[1.0, 1.0, 1.0],
|
||||
[1.0, 1.0, 1.0],
|
||||
[1.0, 1.0, 1.0],
|
||||
]);
|
||||
let anomalies = pipe.detect_anomalies(&cloud).unwrap();
|
||||
assert!(anomalies.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_detect_anomalies_small_cloud() {
|
||||
let pipe = PerceptionPipeline::with_thresholds(0.5, 2.0);
|
||||
let cloud = make_cloud(&[[1.0, 0.0, 0.0]]);
|
||||
let anomalies = pipe.detect_anomalies(&cloud).unwrap();
|
||||
assert!(anomalies.is_empty());
|
||||
}
|
||||
|
||||
// -- edge cases & integration ------------------------------------------
|
||||
|
||||
#[test]
|
||||
fn test_pipeline_debug() {
|
||||
let pipe = PerceptionPipeline::with_thresholds(0.5, 2.0);
|
||||
let dbg = format!("{:?}", pipe);
|
||||
assert!(dbg.contains("PerceptionPipeline"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_scene_graph_edge_relations() {
|
||||
let pipe = PerceptionPipeline::with_thresholds(0.5, 2.0);
|
||||
let objects = vec![
|
||||
SceneObject::new(0, [0.0, 0.0, 0.0], [1.0, 1.0, 1.0]),
|
||||
SceneObject::new(1, [1.0, 0.0, 0.0], [1.0, 1.0, 1.0]),
|
||||
SceneObject::new(2, [6.0, 0.0, 0.0], [1.0, 1.0, 1.0]),
|
||||
SceneObject::new(3, [9.0, 0.0, 0.0], [1.0, 1.0, 1.0]),
|
||||
];
|
||||
let graph = pipe.build_scene_graph(&objects, 10.0).unwrap();
|
||||
let adj = graph.edges.iter().find(|e| e.from == 0 && e.to == 1);
|
||||
assert!(adj.is_some());
|
||||
assert_eq!(adj.unwrap().relation, "adjacent");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_trajectory_timestamps_are_microseconds() {
|
||||
let pipe = PerceptionPipeline::with_thresholds(0.5, 2.0);
|
||||
let traj = pipe
|
||||
.predict_trajectory([0.0; 3], [1.0, 0.0, 0.0], 2, 0.5)
|
||||
.unwrap();
|
||||
// 0.5s = 500_000 us, 1.0s = 1_000_000 us
|
||||
assert_eq!(traj.timestamps[0], 500_000);
|
||||
assert_eq!(traj.timestamps[1], 1_000_000);
|
||||
}
|
||||
}
|
||||
376
crates/ruvector-robotics/src/perception/obstacle_detector.rs
Normal file
376
crates/ruvector-robotics/src/perception/obstacle_detector.rs
Normal file
@@ -0,0 +1,376 @@
|
||||
//! Obstacle detection from point clouds.
|
||||
//!
|
||||
//! Uses spatial-hash clustering to group nearby points into obstacle
|
||||
//! candidates, then filters and classifies them based on geometry.
|
||||
|
||||
use crate::bridge::{Point3D, PointCloud};
|
||||
use crate::perception::clustering;
|
||||
use crate::perception::config::ObstacleConfig;
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Public types
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Classification category for an obstacle.
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
|
||||
pub enum ObstacleClass {
|
||||
/// Obstacle appears wall-like / elongated in at least one axis.
|
||||
Static,
|
||||
/// Compact obstacle that could be a moving object.
|
||||
Dynamic,
|
||||
/// Cannot determine class from geometry alone.
|
||||
Unknown,
|
||||
}
|
||||
|
||||
/// Raw detection result before classification.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct DetectedObstacle {
|
||||
/// Centroid of the cluster.
|
||||
pub center: [f64; 3],
|
||||
/// Axis-aligned bounding-box half-extents.
|
||||
pub extent: [f64; 3],
|
||||
/// Number of points in the cluster.
|
||||
pub point_count: usize,
|
||||
/// Closest distance from the cluster centroid to the robot.
|
||||
pub min_distance: f64,
|
||||
}
|
||||
|
||||
/// A detected obstacle with an attached classification.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ClassifiedObstacle {
|
||||
pub obstacle: DetectedObstacle,
|
||||
pub class: ObstacleClass,
|
||||
pub confidence: f32,
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Detector
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Detects and classifies obstacles from point-cloud data.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ObstacleDetector {
|
||||
config: ObstacleConfig,
|
||||
}
|
||||
|
||||
impl ObstacleDetector {
|
||||
/// Create a new detector with the given configuration.
|
||||
pub fn new(config: ObstacleConfig) -> Self {
|
||||
Self { config }
|
||||
}
|
||||
|
||||
/// Detect obstacles in a point cloud relative to a robot position.
|
||||
///
|
||||
/// The algorithm:
|
||||
/// 1. Discretise points into a spatial hash grid (cell size =
|
||||
/// `safety_margin * 5`).
|
||||
/// 2. Group cells using a simple flood-fill on the 26-neighbourhood.
|
||||
/// 3. Filter clusters smaller than `min_obstacle_size`.
|
||||
/// 4. Compute bounding box and centroid per cluster.
|
||||
/// 5. Filter by `max_detection_range` from the robot.
|
||||
/// 6. Sort results by distance (ascending).
|
||||
pub fn detect(
|
||||
&self,
|
||||
cloud: &PointCloud,
|
||||
robot_pos: &[f64; 3],
|
||||
) -> Vec<DetectedObstacle> {
|
||||
if cloud.is_empty() {
|
||||
return Vec::new();
|
||||
}
|
||||
|
||||
let cell_size = (self.config.safety_margin * 5.0).max(0.5);
|
||||
let clusters = clustering::cluster_point_cloud(cloud, cell_size);
|
||||
|
||||
let mut obstacles: Vec<DetectedObstacle> = clusters
|
||||
.into_iter()
|
||||
.filter(|pts| pts.len() >= self.config.min_obstacle_size)
|
||||
.filter_map(|pts| self.cluster_to_obstacle(&pts, robot_pos))
|
||||
.filter(|o| o.min_distance <= self.config.max_detection_range)
|
||||
.collect();
|
||||
|
||||
obstacles.sort_by(|a, b| {
|
||||
a.min_distance
|
||||
.partial_cmp(&b.min_distance)
|
||||
.unwrap_or(std::cmp::Ordering::Equal)
|
||||
});
|
||||
|
||||
obstacles
|
||||
}
|
||||
|
||||
/// Classify a list of detected obstacles using simple geometric
|
||||
/// heuristics.
|
||||
///
|
||||
/// * **Static** -- the ratio of the largest to smallest extent is > 3
|
||||
/// (wall-like).
|
||||
/// * **Dynamic** -- the largest-to-smallest ratio is <= 2 (compact).
|
||||
/// * **Unknown** -- everything else.
|
||||
pub fn classify_obstacles(
|
||||
&self,
|
||||
obstacles: &[DetectedObstacle],
|
||||
) -> Vec<ClassifiedObstacle> {
|
||||
obstacles
|
||||
.iter()
|
||||
.map(|o| {
|
||||
let (class, confidence) = self.classify_single(o);
|
||||
ClassifiedObstacle {
|
||||
obstacle: o.clone(),
|
||||
class,
|
||||
confidence,
|
||||
}
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
// -- private helpers ----------------------------------------------------
|
||||
|
||||
fn cluster_to_obstacle(
|
||||
&self,
|
||||
points: &[Point3D],
|
||||
robot_pos: &[f64; 3],
|
||||
) -> Option<DetectedObstacle> {
|
||||
if points.is_empty() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let (mut min_x, mut min_y, mut min_z) = (f64::MAX, f64::MAX, f64::MAX);
|
||||
let (mut max_x, mut max_y, mut max_z) = (f64::MIN, f64::MIN, f64::MIN);
|
||||
let (mut sum_x, mut sum_y, mut sum_z) = (0.0_f64, 0.0_f64, 0.0_f64);
|
||||
|
||||
for p in points {
|
||||
let (px, py, pz) = (p.x as f64, p.y as f64, p.z as f64);
|
||||
min_x = min_x.min(px);
|
||||
min_y = min_y.min(py);
|
||||
min_z = min_z.min(pz);
|
||||
max_x = max_x.max(px);
|
||||
max_y = max_y.max(py);
|
||||
max_z = max_z.max(pz);
|
||||
sum_x += px;
|
||||
sum_y += py;
|
||||
sum_z += pz;
|
||||
}
|
||||
|
||||
let n = points.len() as f64;
|
||||
let center = [sum_x / n, sum_y / n, sum_z / n];
|
||||
let extent = [
|
||||
(max_x - min_x) / 2.0 + self.config.safety_margin,
|
||||
(max_y - min_y) / 2.0 + self.config.safety_margin,
|
||||
(max_z - min_z) / 2.0 + self.config.safety_margin,
|
||||
];
|
||||
|
||||
let dist = ((center[0] - robot_pos[0]).powi(2)
|
||||
+ (center[1] - robot_pos[1]).powi(2)
|
||||
+ (center[2] - robot_pos[2]).powi(2))
|
||||
.sqrt();
|
||||
|
||||
Some(DetectedObstacle {
|
||||
center,
|
||||
extent,
|
||||
point_count: points.len(),
|
||||
min_distance: dist,
|
||||
})
|
||||
}
|
||||
|
||||
fn classify_single(&self, obstacle: &DetectedObstacle) -> (ObstacleClass, f32) {
|
||||
let exts = &obstacle.extent;
|
||||
let max_ext = exts[0].max(exts[1]).max(exts[2]);
|
||||
let min_ext = exts[0].min(exts[1]).min(exts[2]);
|
||||
|
||||
if min_ext < f64::EPSILON {
|
||||
return (ObstacleClass::Unknown, 0.3);
|
||||
}
|
||||
|
||||
let ratio = max_ext / min_ext;
|
||||
|
||||
if ratio > 3.0 {
|
||||
// Elongated -- likely a wall or static structure.
|
||||
let confidence = (ratio / 10.0).min(1.0) as f32;
|
||||
(ObstacleClass::Static, confidence.max(0.6))
|
||||
} else if ratio <= 2.0 {
|
||||
// Compact -- possibly a moving object.
|
||||
let confidence = (1.0 - (ratio - 1.0) / 2.0).max(0.5) as f32;
|
||||
(ObstacleClass::Dynamic, confidence)
|
||||
} else {
|
||||
(ObstacleClass::Unknown, 0.4)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Tests
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
fn make_cloud(raw: &[[f32; 3]]) -> PointCloud {
|
||||
let points: Vec<Point3D> = raw.iter().map(|p| Point3D::new(p[0], p[1], p[2])).collect();
|
||||
PointCloud::new(points, 0)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_detect_empty_cloud() {
|
||||
let det = ObstacleDetector::new(ObstacleConfig::default());
|
||||
let cloud = PointCloud::default();
|
||||
let result = det.detect(&cloud, &[0.0, 0.0, 0.0]);
|
||||
assert!(result.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_detect_single_cluster() {
|
||||
let det = ObstacleDetector::new(ObstacleConfig {
|
||||
min_obstacle_size: 3,
|
||||
max_detection_range: 100.0,
|
||||
safety_margin: 0.1,
|
||||
});
|
||||
let cloud = make_cloud(&[
|
||||
[1.0, 1.0, 0.0],
|
||||
[1.1, 1.0, 0.0],
|
||||
[1.0, 1.1, 0.0],
|
||||
[1.1, 1.1, 0.0],
|
||||
]);
|
||||
let result = det.detect(&cloud, &[0.0, 0.0, 0.0]);
|
||||
assert_eq!(result.len(), 1);
|
||||
assert!(result[0].min_distance > 0.0);
|
||||
assert_eq!(result[0].point_count, 4);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_detect_filters_by_range() {
|
||||
let det = ObstacleDetector::new(ObstacleConfig {
|
||||
min_obstacle_size: 3,
|
||||
max_detection_range: 1.0,
|
||||
safety_margin: 0.1,
|
||||
});
|
||||
// Cluster at ~10 units away -- should be filtered out.
|
||||
let cloud = make_cloud(&[
|
||||
[10.0, 0.0, 0.0],
|
||||
[10.1, 0.0, 0.0],
|
||||
[10.0, 0.1, 0.0],
|
||||
]);
|
||||
let result = det.detect(&cloud, &[0.0, 0.0, 0.0]);
|
||||
assert!(result.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_detect_filters_small_clusters() {
|
||||
let det = ObstacleDetector::new(ObstacleConfig {
|
||||
min_obstacle_size: 5,
|
||||
max_detection_range: 100.0,
|
||||
safety_margin: 0.1,
|
||||
});
|
||||
// Only 3 points -- below minimum.
|
||||
let cloud = make_cloud(&[
|
||||
[1.0, 1.0, 0.0],
|
||||
[1.1, 1.0, 0.0],
|
||||
[1.0, 1.1, 0.0],
|
||||
]);
|
||||
let result = det.detect(&cloud, &[0.0, 0.0, 0.0]);
|
||||
assert!(result.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_detect_sorted_by_distance() {
|
||||
let det = ObstacleDetector::new(ObstacleConfig {
|
||||
min_obstacle_size: 3,
|
||||
max_detection_range: 100.0,
|
||||
safety_margin: 0.1,
|
||||
});
|
||||
let cloud = make_cloud(&[
|
||||
// Far cluster
|
||||
[10.0, 0.0, 0.0],
|
||||
[10.1, 0.0, 0.0],
|
||||
[10.0, 0.1, 0.0],
|
||||
// Near cluster
|
||||
[1.0, 0.0, 0.0],
|
||||
[1.1, 0.0, 0.0],
|
||||
[1.0, 0.1, 0.0],
|
||||
]);
|
||||
let result = det.detect(&cloud, &[0.0, 0.0, 0.0]);
|
||||
assert!(result.len() >= 1);
|
||||
if result.len() >= 2 {
|
||||
assert!(result[0].min_distance <= result[1].min_distance);
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_classify_static_obstacle() {
|
||||
let det = ObstacleDetector::new(ObstacleConfig::default());
|
||||
// Wall-like: very elongated in X, thin in Y and Z.
|
||||
let obstacle = DetectedObstacle {
|
||||
center: [5.0, 0.0, 0.0],
|
||||
extent: [10.0, 0.5, 0.5],
|
||||
point_count: 50,
|
||||
min_distance: 5.0,
|
||||
};
|
||||
let classified = det.classify_obstacles(&[obstacle]);
|
||||
assert_eq!(classified.len(), 1);
|
||||
assert_eq!(classified[0].class, ObstacleClass::Static);
|
||||
assert!(classified[0].confidence >= 0.5);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_classify_dynamic_obstacle() {
|
||||
let det = ObstacleDetector::new(ObstacleConfig::default());
|
||||
// Compact: roughly equal extents.
|
||||
let obstacle = DetectedObstacle {
|
||||
center: [3.0, 0.0, 0.0],
|
||||
extent: [1.0, 1.0, 1.0],
|
||||
point_count: 20,
|
||||
min_distance: 3.0,
|
||||
};
|
||||
let classified = det.classify_obstacles(&[obstacle]);
|
||||
assert_eq!(classified.len(), 1);
|
||||
assert_eq!(classified[0].class, ObstacleClass::Dynamic);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_classify_unknown_obstacle() {
|
||||
let det = ObstacleDetector::new(ObstacleConfig::default());
|
||||
// Intermediate ratio.
|
||||
let obstacle = DetectedObstacle {
|
||||
center: [5.0, 0.0, 0.0],
|
||||
extent: [3.0, 1.1, 1.0],
|
||||
point_count: 15,
|
||||
min_distance: 5.0,
|
||||
};
|
||||
let classified = det.classify_obstacles(&[obstacle]);
|
||||
assert_eq!(classified.len(), 1);
|
||||
assert_eq!(classified[0].class, ObstacleClass::Unknown);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_classify_empty_list() {
|
||||
let det = ObstacleDetector::new(ObstacleConfig::default());
|
||||
let classified = det.classify_obstacles(&[]);
|
||||
assert!(classified.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_obstacle_detector_debug() {
|
||||
let det = ObstacleDetector::new(ObstacleConfig::default());
|
||||
let dbg = format!("{:?}", det);
|
||||
assert!(dbg.contains("ObstacleDetector"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_detect_two_separated_clusters() {
|
||||
let det = ObstacleDetector::new(ObstacleConfig {
|
||||
min_obstacle_size: 3,
|
||||
max_detection_range: 200.0,
|
||||
safety_margin: 0.1,
|
||||
});
|
||||
let cloud = make_cloud(&[
|
||||
// Cluster A around (0, 0, 0)
|
||||
[0.0, 0.0, 0.0],
|
||||
[0.1, 0.0, 0.0],
|
||||
[0.0, 0.1, 0.0],
|
||||
// Cluster B around (100, 100, 0) -- very far away
|
||||
[100.0, 100.0, 0.0],
|
||||
[100.1, 100.0, 0.0],
|
||||
[100.0, 100.1, 0.0],
|
||||
]);
|
||||
let result = det.detect(&cloud, &[50.0, 50.0, 0.0]);
|
||||
assert_eq!(result.len(), 2);
|
||||
}
|
||||
}
|
||||
373
crates/ruvector-robotics/src/perception/scene_graph.rs
Normal file
373
crates/ruvector-robotics/src/perception/scene_graph.rs
Normal file
@@ -0,0 +1,373 @@
|
||||
//! Scene-graph construction from point clouds and object lists.
|
||||
//!
|
||||
//! The [`SceneGraphBuilder`] turns raw sensor data into a structured
|
||||
//! [`SceneGraph`] of objects and spatial relationships.
|
||||
|
||||
use std::collections::HashMap;
|
||||
|
||||
use crate::bridge::{Point3D, PointCloud, SceneEdge, SceneGraph, SceneObject};
|
||||
use crate::perception::clustering;
|
||||
use crate::perception::config::SceneGraphConfig;
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Builder
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/// Builds [`SceneGraph`] instances from point clouds or pre-classified
|
||||
/// object lists using spatial-hash clustering with union-find.
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct PointCloudSceneGraphBuilder {
|
||||
config: SceneGraphConfig,
|
||||
}
|
||||
|
||||
impl PointCloudSceneGraphBuilder {
|
||||
/// Create a new builder with the given configuration.
|
||||
pub fn new(config: SceneGraphConfig) -> Self {
|
||||
Self { config }
|
||||
}
|
||||
|
||||
/// Build a scene graph by clustering a raw point cloud.
|
||||
///
|
||||
/// 1. Points are discretised into a spatial hash grid.
|
||||
/// 2. Adjacent cells are merged via union-find.
|
||||
/// 3. Each cluster above `min_cluster_size` becomes a `SceneObject`.
|
||||
/// 4. Edges are created between objects whose centres are within
|
||||
/// `edge_distance_threshold`.
|
||||
pub fn build_from_point_cloud(&self, cloud: &PointCloud) -> SceneGraph {
|
||||
if cloud.is_empty() {
|
||||
return SceneGraph::default();
|
||||
}
|
||||
|
||||
let clusters = clustering::cluster_point_cloud(cloud, self.config.cluster_radius);
|
||||
|
||||
// Convert clusters to SceneObjects (cap at max_objects).
|
||||
let mut objects: Vec<SceneObject> = clusters
|
||||
.into_iter()
|
||||
.filter(|pts| pts.len() >= self.config.min_cluster_size)
|
||||
.take(self.config.max_objects)
|
||||
.enumerate()
|
||||
.map(|(id, pts)| Self::cluster_to_object(id, &pts))
|
||||
.collect();
|
||||
|
||||
objects.sort_by(|a, b| a.id.cmp(&b.id));
|
||||
let edges = self.create_edges(&objects);
|
||||
|
||||
SceneGraph::new(objects, edges, cloud.timestamp_us)
|
||||
}
|
||||
|
||||
/// Build a scene graph from a pre-existing list of objects.
|
||||
///
|
||||
/// Edges are created between objects whose centres are within
|
||||
/// `edge_distance_threshold`.
|
||||
pub fn build_from_objects(&self, objects: &[SceneObject]) -> SceneGraph {
|
||||
let objects_vec: Vec<SceneObject> = objects
|
||||
.iter()
|
||||
.take(self.config.max_objects)
|
||||
.cloned()
|
||||
.collect();
|
||||
|
||||
let edges = self.create_edges(&objects_vec);
|
||||
SceneGraph::new(objects_vec, edges, 0)
|
||||
}
|
||||
|
||||
/// Merge multiple scene graphs into one, deduplicating objects that share
|
||||
/// the same `id`.
|
||||
pub fn merge_scenes(&self, scenes: &[SceneGraph]) -> SceneGraph {
|
||||
let mut seen_ids: HashMap<usize, SceneObject> = HashMap::new();
|
||||
let mut latest_ts: i64 = 0;
|
||||
|
||||
for scene in scenes {
|
||||
latest_ts = latest_ts.max(scene.timestamp);
|
||||
for obj in &scene.objects {
|
||||
// Keep the first occurrence of each id.
|
||||
seen_ids.entry(obj.id).or_insert_with(|| obj.clone());
|
||||
}
|
||||
}
|
||||
|
||||
let mut objects: Vec<SceneObject> = seen_ids.into_values().collect();
|
||||
objects.sort_by(|a, b| a.id.cmp(&b.id));
|
||||
|
||||
let truncated: Vec<SceneObject> = objects
|
||||
.into_iter()
|
||||
.take(self.config.max_objects)
|
||||
.collect();
|
||||
|
||||
let edges = self.create_edges(&truncated);
|
||||
SceneGraph::new(truncated, edges, latest_ts)
|
||||
}
|
||||
|
||||
// -- private helpers ----------------------------------------------------
|
||||
|
||||
fn cluster_to_object(id: usize, points: &[Point3D]) -> SceneObject {
|
||||
debug_assert!(!points.is_empty(), "cluster_to_object called with empty slice");
|
||||
let (mut min_x, mut min_y, mut min_z) = (f64::MAX, f64::MAX, f64::MAX);
|
||||
let (mut max_x, mut max_y, mut max_z) = (f64::MIN, f64::MIN, f64::MIN);
|
||||
let (mut sum_x, mut sum_y, mut sum_z) = (0.0_f64, 0.0_f64, 0.0_f64);
|
||||
|
||||
for p in points {
|
||||
let (px, py, pz) = (p.x as f64, p.y as f64, p.z as f64);
|
||||
min_x = min_x.min(px);
|
||||
min_y = min_y.min(py);
|
||||
min_z = min_z.min(pz);
|
||||
max_x = max_x.max(px);
|
||||
max_y = max_y.max(py);
|
||||
max_z = max_z.max(pz);
|
||||
sum_x += px;
|
||||
sum_y += py;
|
||||
sum_z += pz;
|
||||
}
|
||||
|
||||
let n = points.len() as f64;
|
||||
let center = [sum_x / n, sum_y / n, sum_z / n];
|
||||
let extent = [
|
||||
(max_x - min_x) / 2.0,
|
||||
(max_y - min_y) / 2.0,
|
||||
(max_z - min_z) / 2.0,
|
||||
];
|
||||
|
||||
SceneObject {
|
||||
id,
|
||||
center,
|
||||
extent,
|
||||
confidence: 1.0,
|
||||
label: format!("cluster_{}", id),
|
||||
velocity: None,
|
||||
}
|
||||
}
|
||||
|
||||
fn create_edges(&self, objects: &[SceneObject]) -> Vec<SceneEdge> {
|
||||
let mut edges = Vec::new();
|
||||
let threshold = self.config.edge_distance_threshold;
|
||||
|
||||
for i in 0..objects.len() {
|
||||
for j in (i + 1)..objects.len() {
|
||||
let d = Self::distance_3d(&objects[i].center, &objects[j].center);
|
||||
if d <= threshold {
|
||||
let relation = if d < threshold * 0.33 {
|
||||
"adjacent"
|
||||
} else if d < threshold * 0.66 {
|
||||
"near"
|
||||
} else {
|
||||
"far"
|
||||
};
|
||||
edges.push(SceneEdge {
|
||||
from: objects[i].id,
|
||||
to: objects[j].id,
|
||||
distance: d,
|
||||
relation: relation.to_string(),
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
edges
|
||||
}
|
||||
|
||||
fn distance_3d(a: &[f64; 3], b: &[f64; 3]) -> f64 {
|
||||
((a[0] - b[0]).powi(2) + (a[1] - b[1]).powi(2) + (a[2] - b[2]).powi(2)).sqrt()
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Tests
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
fn make_cloud(raw: &[[f32; 3]]) -> PointCloud {
|
||||
let points: Vec<Point3D> = raw.iter().map(|p| Point3D::new(p[0], p[1], p[2])).collect();
|
||||
PointCloud::new(points, 1000)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_empty_cloud() {
|
||||
let builder = PointCloudSceneGraphBuilder::new(SceneGraphConfig::default());
|
||||
let graph = builder.build_from_point_cloud(&PointCloud::default());
|
||||
assert!(graph.objects.is_empty());
|
||||
assert!(graph.edges.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_single_cluster() {
|
||||
let builder = PointCloudSceneGraphBuilder::new(SceneGraphConfig {
|
||||
cluster_radius: 1.0,
|
||||
min_cluster_size: 3,
|
||||
max_objects: 10,
|
||||
edge_distance_threshold: 5.0,
|
||||
});
|
||||
let cloud = make_cloud(&[
|
||||
[0.0, 0.0, 0.0],
|
||||
[0.1, 0.0, 0.0],
|
||||
[0.0, 0.1, 0.0],
|
||||
[0.1, 0.1, 0.0],
|
||||
]);
|
||||
let graph = builder.build_from_point_cloud(&cloud);
|
||||
assert_eq!(graph.objects.len(), 1);
|
||||
assert!(graph.edges.is_empty()); // Only one object, no edges.
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_room_point_cloud() {
|
||||
// Simulate a room with two walls (clusters far apart).
|
||||
let builder = PointCloudSceneGraphBuilder::new(SceneGraphConfig {
|
||||
cluster_radius: 0.5,
|
||||
min_cluster_size: 3,
|
||||
max_objects: 10,
|
||||
edge_distance_threshold: 50.0,
|
||||
});
|
||||
|
||||
let mut points = Vec::new();
|
||||
// Wall 1: cluster around (0, 0, 0)
|
||||
for i in 0..5 {
|
||||
points.push([i as f32 * 0.1, 0.0, 0.0]);
|
||||
}
|
||||
// Wall 2: cluster around (10, 0, 0)
|
||||
for i in 0..5 {
|
||||
points.push([10.0 + i as f32 * 0.1, 0.0, 0.0]);
|
||||
}
|
||||
|
||||
let cloud = make_cloud(&points);
|
||||
let graph = builder.build_from_point_cloud(&cloud);
|
||||
assert_eq!(graph.objects.len(), 2);
|
||||
// Both walls should be connected since threshold is 50.
|
||||
assert!(!graph.edges.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_separated_clusters_no_edge() {
|
||||
let builder = PointCloudSceneGraphBuilder::new(SceneGraphConfig {
|
||||
cluster_radius: 0.5,
|
||||
min_cluster_size: 3,
|
||||
max_objects: 10,
|
||||
edge_distance_threshold: 2.0,
|
||||
});
|
||||
|
||||
let cloud = make_cloud(&[
|
||||
// Cluster A
|
||||
[0.0, 0.0, 0.0],
|
||||
[0.1, 0.0, 0.0],
|
||||
[0.0, 0.1, 0.0],
|
||||
// Cluster B -- far away (100 units)
|
||||
[100.0, 0.0, 0.0],
|
||||
[100.1, 0.0, 0.0],
|
||||
[100.0, 0.1, 0.0],
|
||||
]);
|
||||
|
||||
let graph = builder.build_from_point_cloud(&cloud);
|
||||
assert_eq!(graph.objects.len(), 2);
|
||||
// Should NOT have edges -- clusters are 100 units apart, threshold is 2.
|
||||
assert!(graph.edges.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_from_objects() {
|
||||
let builder = PointCloudSceneGraphBuilder::new(SceneGraphConfig {
|
||||
edge_distance_threshold: 5.0,
|
||||
..SceneGraphConfig::default()
|
||||
});
|
||||
|
||||
let objects = vec![
|
||||
SceneObject::new(0, [0.0, 0.0, 0.0], [1.0, 1.0, 1.0]),
|
||||
SceneObject::new(1, [3.0, 0.0, 0.0], [1.0, 1.0, 1.0]),
|
||||
SceneObject::new(2, [100.0, 0.0, 0.0], [1.0, 1.0, 1.0]),
|
||||
];
|
||||
|
||||
let graph = builder.build_from_objects(&objects);
|
||||
assert_eq!(graph.objects.len(), 3);
|
||||
|
||||
// Objects 0 and 1 are 3.0 apart (within threshold),
|
||||
// object 2 is 100.0 away (outside threshold).
|
||||
assert_eq!(graph.edges.len(), 1);
|
||||
assert_eq!(graph.edges[0].from, 0);
|
||||
assert_eq!(graph.edges[0].to, 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_merge_deduplication() {
|
||||
let builder = PointCloudSceneGraphBuilder::new(SceneGraphConfig {
|
||||
edge_distance_threshold: 10.0,
|
||||
..SceneGraphConfig::default()
|
||||
});
|
||||
|
||||
let scene_a = SceneGraph::new(
|
||||
vec![
|
||||
SceneObject::new(0, [0.0, 0.0, 0.0], [1.0, 1.0, 1.0]),
|
||||
SceneObject::new(1, [2.0, 0.0, 0.0], [1.0, 1.0, 1.0]),
|
||||
],
|
||||
vec![],
|
||||
100,
|
||||
);
|
||||
|
||||
let scene_b = SceneGraph::new(
|
||||
vec![
|
||||
SceneObject::new(1, [2.0, 0.0, 0.0], [1.0, 1.0, 1.0]), // duplicate id
|
||||
SceneObject::new(2, [4.0, 0.0, 0.0], [1.0, 1.0, 1.0]),
|
||||
],
|
||||
vec![],
|
||||
200,
|
||||
);
|
||||
|
||||
let merged = builder.merge_scenes(&[scene_a, scene_b]);
|
||||
// Should have 3 unique objects: ids 0, 1, 2.
|
||||
assert_eq!(merged.objects.len(), 3);
|
||||
assert_eq!(merged.timestamp, 200);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_merge_preserves_latest_timestamp() {
|
||||
let builder = PointCloudSceneGraphBuilder::new(SceneGraphConfig::default());
|
||||
let s1 = SceneGraph::new(vec![], vec![], 50);
|
||||
let s2 = SceneGraph::new(vec![], vec![], 300);
|
||||
let s3 = SceneGraph::new(vec![], vec![], 100);
|
||||
let merged = builder.merge_scenes(&[s1, s2, s3]);
|
||||
assert_eq!(merged.timestamp, 300);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_edge_relations() {
|
||||
let builder = PointCloudSceneGraphBuilder::new(SceneGraphConfig {
|
||||
edge_distance_threshold: 30.0,
|
||||
..SceneGraphConfig::default()
|
||||
});
|
||||
|
||||
let objects = vec![
|
||||
SceneObject::new(0, [0.0, 0.0, 0.0], [1.0, 1.0, 1.0]),
|
||||
SceneObject::new(1, [5.0, 0.0, 0.0], [1.0, 1.0, 1.0]), // ~5 < 9.9 => adjacent
|
||||
SceneObject::new(2, [15.0, 0.0, 0.0], [1.0, 1.0, 1.0]), // ~15 < 19.8 => near
|
||||
SceneObject::new(3, [25.0, 0.0, 0.0], [1.0, 1.0, 1.0]), // ~25 < 30 => far
|
||||
];
|
||||
|
||||
let graph = builder.build_from_objects(&objects);
|
||||
|
||||
// Check that adjacent relation exists for objects 0 and 1.
|
||||
let edge_0_1 = graph
|
||||
.edges
|
||||
.iter()
|
||||
.find(|e| e.from == 0 && e.to == 1);
|
||||
assert!(edge_0_1.is_some());
|
||||
assert_eq!(edge_0_1.unwrap().relation, "adjacent");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_max_objects_cap() {
|
||||
let builder = PointCloudSceneGraphBuilder::new(SceneGraphConfig {
|
||||
cluster_radius: 0.5,
|
||||
min_cluster_size: 1,
|
||||
max_objects: 2,
|
||||
edge_distance_threshold: 100.0,
|
||||
});
|
||||
|
||||
let cloud = make_cloud(&[
|
||||
[0.0, 0.0, 0.0],
|
||||
[50.0, 0.0, 0.0],
|
||||
[100.0, 0.0, 0.0],
|
||||
]);
|
||||
|
||||
let graph = builder.build_from_point_cloud(&cloud);
|
||||
// min_cluster_size=1, so each point is its own cluster.
|
||||
// max_objects=2, so at most 2 objects.
|
||||
assert!(graph.objects.len() <= 2);
|
||||
}
|
||||
}
|
||||
189
crates/ruvector-robotics/src/perception/sensor_fusion.rs
Normal file
189
crates/ruvector-robotics/src/perception/sensor_fusion.rs
Normal file
@@ -0,0 +1,189 @@
|
||||
//! Multi-sensor point cloud fusion.
|
||||
//!
|
||||
//! Aligns and merges point clouds from multiple sensors into a single
|
||||
//! unified cloud, using nearest-timestamp matching and optional confidence
|
||||
//! weighting.
|
||||
|
||||
use crate::bridge::{Point3D, PointCloud};
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
/// Configuration for the sensor fusion module.
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct FusionConfig {
|
||||
/// Maximum timestamp delta (µs) for two frames to be considered
|
||||
/// synchronised. Frames further apart are discarded.
|
||||
pub max_time_delta_us: i64,
|
||||
/// Whether to apply confidence weighting based on point density.
|
||||
pub density_weighting: bool,
|
||||
/// Minimum voxel size for down-sampling the fused cloud. Set to 0.0
|
||||
/// to disable.
|
||||
pub voxel_size: f64,
|
||||
}
|
||||
|
||||
impl Default for FusionConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
max_time_delta_us: 50_000, // 50 ms
|
||||
density_weighting: false,
|
||||
voxel_size: 0.0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Fuse multiple point clouds into a single unified cloud.
|
||||
///
|
||||
/// Clouds whose timestamps are further than `config.max_time_delta_us` from
|
||||
/// the *reference* (first cloud in the slice) are skipped. When
|
||||
/// `voxel_size > 0`, the merged cloud is down-sampled via voxel-grid
|
||||
/// filtering.
|
||||
pub fn fuse_clouds(clouds: &[PointCloud], config: &FusionConfig) -> PointCloud {
|
||||
if clouds.is_empty() {
|
||||
return PointCloud::default();
|
||||
}
|
||||
|
||||
let reference_ts = clouds[0].timestamp_us;
|
||||
|
||||
// Pre-allocate merged vectors based on total point count of eligible clouds.
|
||||
let total_cap: usize = clouds
|
||||
.iter()
|
||||
.filter(|c| (c.timestamp_us - reference_ts).abs() <= config.max_time_delta_us)
|
||||
.map(|c| c.points.len())
|
||||
.sum();
|
||||
let mut merged_points: Vec<Point3D> = Vec::with_capacity(total_cap);
|
||||
let mut merged_intensities: Vec<f32> = Vec::with_capacity(total_cap);
|
||||
|
||||
for cloud in clouds {
|
||||
let dt = (cloud.timestamp_us - reference_ts).abs();
|
||||
if dt > config.max_time_delta_us {
|
||||
continue;
|
||||
}
|
||||
|
||||
merged_points.extend_from_slice(&cloud.points);
|
||||
|
||||
if config.density_weighting && !cloud.is_empty() {
|
||||
let weight = 1.0 / (cloud.points.len() as f32).sqrt();
|
||||
merged_intensities.extend(cloud.intensities.iter().map(|i| i * weight));
|
||||
} else {
|
||||
merged_intensities.extend_from_slice(&cloud.intensities);
|
||||
}
|
||||
}
|
||||
|
||||
if config.voxel_size > 0.0 && !merged_points.is_empty() {
|
||||
let (dp, di) = voxel_downsample(&merged_points, &merged_intensities, config.voxel_size);
|
||||
merged_points = dp;
|
||||
merged_intensities = di;
|
||||
}
|
||||
|
||||
let mut result = PointCloud::new(merged_points, reference_ts);
|
||||
result.intensities = merged_intensities;
|
||||
result
|
||||
}
|
||||
|
||||
/// Simple voxel grid down-sampling: keep one representative point per voxel.
|
||||
fn voxel_downsample(
|
||||
points: &[Point3D],
|
||||
intensities: &[f32],
|
||||
cell_size: f64,
|
||||
) -> (Vec<Point3D>, Vec<f32>) {
|
||||
use std::collections::HashMap;
|
||||
|
||||
let mut voxels: HashMap<(i64, i64, i64), (Point3D, f32, usize)> = HashMap::new();
|
||||
|
||||
for (i, p) in points.iter().enumerate() {
|
||||
let key = (
|
||||
(p.x as f64 / cell_size).floor() as i64,
|
||||
(p.y as f64 / cell_size).floor() as i64,
|
||||
(p.z as f64 / cell_size).floor() as i64,
|
||||
);
|
||||
let intensity = intensities.get(i).copied().unwrap_or(1.0);
|
||||
let entry = voxels.entry(key).or_insert((*p, intensity, 0));
|
||||
entry.2 += 1;
|
||||
// Running average position.
|
||||
let n = entry.2 as f32;
|
||||
entry.0.x = entry.0.x + (p.x - entry.0.x) / n;
|
||||
entry.0.y = entry.0.y + (p.y - entry.0.y) / n;
|
||||
entry.0.z = entry.0.z + (p.z - entry.0.z) / n;
|
||||
entry.1 = entry.1 + (intensity - entry.1) / n;
|
||||
}
|
||||
|
||||
let mut out_pts = Vec::with_capacity(voxels.len());
|
||||
let mut out_int = Vec::with_capacity(voxels.len());
|
||||
for (pt, inten, _) in voxels.into_values() {
|
||||
out_pts.push(pt);
|
||||
out_int.push(inten);
|
||||
}
|
||||
(out_pts, out_int)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
fn make_cloud(pts: &[[f32; 3]], ts: i64) -> PointCloud {
|
||||
let points: Vec<Point3D> = pts.iter().map(|a| Point3D::new(a[0], a[1], a[2])).collect();
|
||||
PointCloud::new(points, ts)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fuse_empty() {
|
||||
let result = fuse_clouds(&[], &FusionConfig::default());
|
||||
assert!(result.is_empty());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fuse_single() {
|
||||
let c = make_cloud(&[[1.0, 0.0, 0.0]], 1000);
|
||||
let result = fuse_clouds(&[c], &FusionConfig::default());
|
||||
assert_eq!(result.len(), 1);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fuse_two_clouds() {
|
||||
let c1 = make_cloud(&[[1.0, 0.0, 0.0]], 1000);
|
||||
let c2 = make_cloud(&[[2.0, 0.0, 0.0]], 1010);
|
||||
let result = fuse_clouds(&[c1, c2], &FusionConfig::default());
|
||||
assert_eq!(result.len(), 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fuse_skips_stale() {
|
||||
let c1 = make_cloud(&[[1.0, 0.0, 0.0]], 0);
|
||||
let c2 = make_cloud(&[[2.0, 0.0, 0.0]], 100_000); // 100ms apart
|
||||
let config = FusionConfig { max_time_delta_us: 50_000, ..Default::default() };
|
||||
let result = fuse_clouds(&[c1, c2], &config);
|
||||
assert_eq!(result.len(), 1); // c2 skipped
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_voxel_downsample() {
|
||||
let c1 = make_cloud(
|
||||
&[
|
||||
[0.0, 0.0, 0.0], [0.01, 0.01, 0.01], // same voxel
|
||||
[5.0, 5.0, 5.0], // different voxel
|
||||
],
|
||||
0,
|
||||
);
|
||||
let config = FusionConfig { voxel_size: 1.0, ..Default::default() };
|
||||
let result = fuse_clouds(&[c1], &config);
|
||||
assert_eq!(result.len(), 2);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_density_weighting() {
|
||||
let c1 = make_cloud(&[[1.0, 0.0, 0.0]], 0);
|
||||
let config = FusionConfig { density_weighting: true, ..Default::default() };
|
||||
let result = fuse_clouds(&[c1], &config);
|
||||
assert_eq!(result.len(), 1);
|
||||
// With 1 point, weight = 1/sqrt(1) = 1.0, so intensity unchanged.
|
||||
assert!((result.intensities[0] - 1.0).abs() < 1e-6);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_fuse_preserves_timestamp() {
|
||||
let c1 = make_cloud(&[[1.0, 0.0, 0.0]], 5000);
|
||||
let c2 = make_cloud(&[[2.0, 0.0, 0.0]], 5010);
|
||||
let result = fuse_clouds(&[c1, c2], &FusionConfig::default());
|
||||
assert_eq!(result.timestamp_us, 5000);
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user