Merge commit 'd803bfe2b1fe7f5e219e50ac20d6801a0a58ac75' as 'vendor/ruvector'

This commit is contained in:
ruv
2026-02-28 14:39:40 -05:00
7854 changed files with 3522914 additions and 0 deletions

View File

@@ -0,0 +1,125 @@
//! Bridge configuration: distance metrics and tuning knobs.
use serde::{Deserialize, Serialize};
/// Distance metric used for spatial search operations.
#[derive(Debug, Clone, Copy, Default, PartialEq, Eq, Serialize, Deserialize)]
pub enum DistanceMetric {
#[default]
Euclidean,
Cosine,
Manhattan,
}
/// Top-level configuration for the robotics bridge.
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct BridgeConfig {
/// Dimensionality of the vector space (typically 3 for XYZ).
pub dimensions: usize,
/// Metric used when computing distances.
pub distance_metric: DistanceMetric,
/// Maximum number of points the spatial index will accept.
pub max_points: usize,
/// Default *k* value for nearest-neighbour queries.
pub search_k: usize,
}
impl Default for BridgeConfig {
fn default() -> Self {
Self {
dimensions: 3,
distance_metric: DistanceMetric::Euclidean,
max_points: 1_000_000,
search_k: 10,
}
}
}
impl BridgeConfig {
/// Create a new configuration with explicit values.
pub fn new(
dimensions: usize,
distance_metric: DistanceMetric,
max_points: usize,
search_k: usize,
) -> Self {
Self {
dimensions,
distance_metric,
max_points,
search_k,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_default_distance_metric() {
assert_eq!(DistanceMetric::default(), DistanceMetric::Euclidean);
}
#[test]
fn test_default_bridge_config() {
let cfg = BridgeConfig::default();
assert_eq!(cfg.dimensions, 3);
assert_eq!(cfg.distance_metric, DistanceMetric::Euclidean);
assert_eq!(cfg.max_points, 1_000_000);
assert_eq!(cfg.search_k, 10);
}
#[test]
fn test_config_serde_roundtrip_json() {
let cfg = BridgeConfig::new(128, DistanceMetric::Cosine, 500_000, 20);
let json = serde_json::to_string(&cfg).unwrap();
let restored: BridgeConfig = serde_json::from_str(&json).unwrap();
assert_eq!(cfg, restored);
}
#[test]
fn test_config_serde_roundtrip_default() {
let cfg = BridgeConfig::default();
let json = serde_json::to_string_pretty(&cfg).unwrap();
let restored: BridgeConfig = serde_json::from_str(&json).unwrap();
assert_eq!(cfg, restored);
}
#[test]
fn test_distance_metric_serde_variants() {
for metric in [
DistanceMetric::Euclidean,
DistanceMetric::Cosine,
DistanceMetric::Manhattan,
] {
let json = serde_json::to_string(&metric).unwrap();
let restored: DistanceMetric = serde_json::from_str(&json).unwrap();
assert_eq!(metric, restored);
}
}
#[test]
fn test_config_new() {
let cfg = BridgeConfig::new(64, DistanceMetric::Manhattan, 250_000, 5);
assert_eq!(cfg.dimensions, 64);
assert_eq!(cfg.distance_metric, DistanceMetric::Manhattan);
assert_eq!(cfg.max_points, 250_000);
assert_eq!(cfg.search_k, 5);
}
#[test]
fn test_config_clone_eq() {
let a = BridgeConfig::default();
let b = a.clone();
assert_eq!(a, b);
}
#[test]
fn test_config_debug_format() {
let cfg = BridgeConfig::default();
let dbg = format!("{:?}", cfg);
assert!(dbg.contains("BridgeConfig"));
assert!(dbg.contains("Euclidean"));
}
}

View File

@@ -0,0 +1,390 @@
//! Type conversion functions between robotics domain types and flat vector
//! representations suitable for indexing, serialization, or ML inference.
use crate::bridge::{OccupancyGrid, Point3D, PointCloud, Pose, RobotState, SceneGraph};
// Quaternion is used in tests for constructing Pose values.
#[cfg(test)]
use crate::bridge::Quaternion;
use std::fmt;
/// Errors that can occur during type conversions.
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum ConversionError {
/// The input vector length does not match the expected dimensionality.
LengthMismatch { expected: usize, got: usize },
/// The input collection was empty when a non-empty one was required.
EmptyInput,
}
impl fmt::Display for ConversionError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::LengthMismatch { expected, got } => {
write!(f, "length mismatch: expected {expected}, got {got}")
}
Self::EmptyInput => write!(f, "empty input"),
}
}
}
impl std::error::Error for ConversionError {}
/// Convert a [`PointCloud`] to a `Vec` of `[x, y, z]` vectors.
pub fn point_cloud_to_vectors(cloud: &PointCloud) -> Vec<Vec<f32>> {
cloud.points.iter().map(|p| vec![p.x, p.y, p.z]).collect()
}
/// Convert a [`PointCloud`] to `[x, y, z, intensity]` vectors.
///
/// Returns [`ConversionError::LengthMismatch`] when the intensity array length
/// does not match the number of points.
pub fn point_cloud_to_vectors_with_intensity(
cloud: &PointCloud,
) -> Result<Vec<Vec<f32>>, ConversionError> {
if cloud.points.len() != cloud.intensities.len() {
return Err(ConversionError::LengthMismatch {
expected: cloud.points.len(),
got: cloud.intensities.len(),
});
}
Ok(cloud
.points
.iter()
.zip(cloud.intensities.iter())
.map(|(p, &i)| vec![p.x, p.y, p.z, i])
.collect())
}
/// Reconstruct a [`PointCloud`] from `[x, y, z]` vectors.
///
/// Each inner vector **must** have exactly 3 elements.
pub fn vectors_to_point_cloud(
vectors: &[Vec<f32>],
timestamp: i64,
) -> Result<PointCloud, ConversionError> {
if vectors.is_empty() {
return Err(ConversionError::EmptyInput);
}
let mut points = Vec::with_capacity(vectors.len());
for v in vectors {
if v.len() != 3 {
return Err(ConversionError::LengthMismatch {
expected: 3,
got: v.len(),
});
}
points.push(Point3D::new(v[0], v[1], v[2]));
}
Ok(PointCloud::new(points, timestamp))
}
/// Flatten a [`RobotState`] into `[px, py, pz, vx, vy, vz, ax, ay, az]`.
pub fn robot_state_to_vector(state: &RobotState) -> Vec<f64> {
let mut v = Vec::with_capacity(9);
v.extend_from_slice(&state.position);
v.extend_from_slice(&state.velocity);
v.extend_from_slice(&state.acceleration);
v
}
/// Reconstruct a [`RobotState`] from a 9-element vector and a timestamp.
pub fn vector_to_robot_state(
v: &[f64],
timestamp: i64,
) -> Result<RobotState, ConversionError> {
if v.len() != 9 {
return Err(ConversionError::LengthMismatch {
expected: 9,
got: v.len(),
});
}
Ok(RobotState {
position: [v[0], v[1], v[2]],
velocity: [v[3], v[4], v[5]],
acceleration: [v[6], v[7], v[8]],
timestamp_us: timestamp,
})
}
/// Flatten a [`Pose`] into `[px, py, pz, qx, qy, qz, qw]`.
pub fn pose_to_vector(pose: &Pose) -> Vec<f64> {
vec![
pose.position[0],
pose.position[1],
pose.position[2],
pose.orientation.x,
pose.orientation.y,
pose.orientation.z,
pose.orientation.w,
]
}
/// Extract occupied cells (value > 0.5) as `[world_x, world_y, value]` vectors.
pub fn occupancy_grid_to_vectors(grid: &OccupancyGrid) -> Vec<Vec<f32>> {
let mut result = Vec::new();
for y in 0..grid.height {
for x in 0..grid.width {
let val = grid.get(x, y).unwrap_or(0.0);
if val > 0.5 {
let wx = grid.origin[0] as f32 + x as f32 * grid.resolution as f32;
let wy = grid.origin[1] as f32 + y as f32 * grid.resolution as f32;
result.push(vec![wx, wy, val]);
}
}
}
result
}
/// Convert a [`SceneGraph`] into node feature vectors and an edge list.
///
/// Each node vector is `[cx, cy, cz, ex, ey, ez, confidence]`.
/// Each edge tuple is `(from_index, to_index, distance)`.
type NodeFeatures = Vec<Vec<f64>>;
type EdgeList = Vec<(usize, usize, f64)>;
pub fn scene_graph_to_adjacency(
scene: &SceneGraph,
) -> (NodeFeatures, EdgeList) {
let nodes: Vec<Vec<f64>> = scene
.objects
.iter()
.map(|o| {
vec![
o.center[0],
o.center[1],
o.center[2],
o.extent[0],
o.extent[1],
o.extent[2],
o.confidence as f64,
]
})
.collect();
let edges: Vec<(usize, usize, f64)> = scene
.edges
.iter()
.map(|e| (e.from, e.to, e.distance))
.collect();
(nodes, edges)
}
#[cfg(test)]
mod tests {
use super::*;
use crate::bridge::{OccupancyGrid, SceneEdge, SceneObject};
#[test]
fn test_point_cloud_to_vectors_basic() {
let cloud = PointCloud::new(
vec![Point3D::new(1.0, 2.0, 3.0), Point3D::new(4.0, 5.0, 6.0)],
100,
);
let vecs = point_cloud_to_vectors(&cloud);
assert_eq!(vecs.len(), 2);
assert_eq!(vecs[0], vec![1.0, 2.0, 3.0]);
assert_eq!(vecs[1], vec![4.0, 5.0, 6.0]);
}
#[test]
fn test_point_cloud_to_vectors_empty() {
let cloud = PointCloud::default();
let vecs = point_cloud_to_vectors(&cloud);
assert!(vecs.is_empty());
}
#[test]
fn test_point_cloud_with_intensity_ok() {
let cloud = PointCloud::new(vec![Point3D::new(1.0, 2.0, 3.0)], 0);
let vecs = point_cloud_to_vectors_with_intensity(&cloud).unwrap();
assert_eq!(vecs[0], vec![1.0, 2.0, 3.0, 1.0]);
}
#[test]
fn test_point_cloud_with_intensity_mismatch() {
let mut cloud = PointCloud::new(vec![Point3D::new(1.0, 2.0, 3.0)], 0);
cloud.intensities = vec![];
let err = point_cloud_to_vectors_with_intensity(&cloud).unwrap_err();
assert_eq!(err, ConversionError::LengthMismatch { expected: 1, got: 0 });
}
#[test]
fn test_vectors_to_point_cloud_ok() {
let vecs = vec![vec![1.0, 2.0, 3.0], vec![4.0, 5.0, 6.0]];
let cloud = vectors_to_point_cloud(&vecs, 42).unwrap();
assert_eq!(cloud.len(), 2);
assert_eq!(cloud.timestamp(), 42);
assert_eq!(cloud.points[0].x, 1.0);
}
#[test]
fn test_vectors_to_point_cloud_empty() {
let vecs: Vec<Vec<f32>> = vec![];
let err = vectors_to_point_cloud(&vecs, 0).unwrap_err();
assert_eq!(err, ConversionError::EmptyInput);
}
#[test]
fn test_vectors_to_point_cloud_wrong_dim() {
let vecs = vec![vec![1.0, 2.0]];
let err = vectors_to_point_cloud(&vecs, 0).unwrap_err();
assert_eq!(err, ConversionError::LengthMismatch { expected: 3, got: 2 });
}
#[test]
fn test_point_cloud_roundtrip() {
let pts = vec![Point3D::new(1.0, 2.0, 3.0), Point3D::new(-1.0, 0.0, 5.5)];
let original = PointCloud::new(pts, 999);
let vecs = point_cloud_to_vectors(&original);
let restored = vectors_to_point_cloud(&vecs, 999).unwrap();
assert_eq!(restored.len(), original.len());
}
#[test]
fn test_robot_state_to_vector() {
let state = RobotState {
position: [1.0, 2.0, 3.0],
velocity: [4.0, 5.0, 6.0],
acceleration: [7.0, 8.0, 9.0],
timestamp_us: 0,
};
let v = robot_state_to_vector(&state);
assert_eq!(v, vec![1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0]);
}
#[test]
fn test_vector_to_robot_state_ok() {
let v = vec![1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0, 9.0];
let state = vector_to_robot_state(&v, 123).unwrap();
assert_eq!(state.position, [1.0, 2.0, 3.0]);
assert_eq!(state.velocity, [4.0, 5.0, 6.0]);
assert_eq!(state.acceleration, [7.0, 8.0, 9.0]);
assert_eq!(state.timestamp_us, 123);
}
#[test]
fn test_vector_to_robot_state_wrong_len() {
let v = vec![1.0, 2.0, 3.0];
let err = vector_to_robot_state(&v, 0).unwrap_err();
assert_eq!(err, ConversionError::LengthMismatch { expected: 9, got: 3 });
}
#[test]
fn test_robot_state_roundtrip() {
let original = RobotState {
position: [10.0, 20.0, 30.0],
velocity: [-1.0, -2.0, -3.0],
acceleration: [0.1, 0.2, 0.3],
timestamp_us: 555,
};
let v = robot_state_to_vector(&original);
let restored = vector_to_robot_state(&v, 555).unwrap();
assert_eq!(original, restored);
}
#[test]
fn test_pose_to_vector() {
let pose = Pose {
position: [1.0, 2.0, 3.0],
orientation: Quaternion::new(0.1, 0.2, 0.3, 0.9),
frame_id: "map".into(),
};
let v = pose_to_vector(&pose);
assert_eq!(v.len(), 7);
assert!((v[0] - 1.0).abs() < f64::EPSILON);
assert!((v[3] - 0.1).abs() < f64::EPSILON);
assert!((v[6] - 0.9).abs() < f64::EPSILON);
}
#[test]
fn test_pose_to_vector_identity() {
let pose = Pose::default();
let v = pose_to_vector(&pose);
assert_eq!(v, vec![0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0]);
}
#[test]
fn test_occupancy_grid_to_vectors_no_occupied() {
let grid = OccupancyGrid::new(5, 5, 0.1);
let vecs = occupancy_grid_to_vectors(&grid);
assert!(vecs.is_empty());
}
#[test]
fn test_occupancy_grid_to_vectors_some_occupied() {
let mut grid = OccupancyGrid::new(3, 3, 0.5);
grid.set(1, 2, 0.9);
grid.set(0, 0, 0.3); // below threshold
let vecs = occupancy_grid_to_vectors(&grid);
assert_eq!(vecs.len(), 1);
let v = &vecs[0];
assert!((v[2] - 0.9).abs() < f32::EPSILON);
}
#[test]
fn test_occupancy_grid_with_origin() {
let mut grid = OccupancyGrid::new(2, 2, 1.0);
grid.origin = [10.0, 20.0, 0.0];
grid.set(1, 0, 0.8);
let vecs = occupancy_grid_to_vectors(&grid);
assert_eq!(vecs.len(), 1);
assert!((vecs[0][0] - 11.0).abs() < f32::EPSILON); // wx = 10 + 1*1
assert!((vecs[0][1] - 20.0).abs() < f32::EPSILON); // wy = 20 + 0*1
}
#[test]
fn test_scene_graph_to_adjacency_empty() {
let scene = SceneGraph::default();
let (nodes, edges) = scene_graph_to_adjacency(&scene);
assert!(nodes.is_empty());
assert!(edges.is_empty());
}
#[test]
fn test_scene_graph_to_adjacency() {
let o1 = SceneObject::new(0, [1.0, 2.0, 3.0], [0.5, 0.5, 0.5]);
let o2 = SceneObject::new(1, [4.0, 5.0, 6.0], [1.0, 1.0, 1.0]);
let edge = SceneEdge {
from: 0,
to: 1,
distance: 5.196,
relation: "near".into(),
};
let scene = SceneGraph::new(vec![o1, o2], vec![edge], 0);
let (nodes, edges) = scene_graph_to_adjacency(&scene);
assert_eq!(nodes.len(), 2);
assert_eq!(nodes[0].len(), 7);
assert!((nodes[0][0] - 1.0).abs() < f64::EPSILON);
assert!((nodes[0][6] - 1.0).abs() < f64::EPSILON); // confidence
assert_eq!(edges.len(), 1);
assert_eq!(edges[0].0, 0);
assert_eq!(edges[0].1, 1);
}
#[test]
fn test_point_cloud_10k_roundtrip() {
let points: Vec<Point3D> = (0..10_000)
.map(|i| {
let f = i as f32;
Point3D::new(f * 0.1, f * 0.2, f * 0.3)
})
.collect();
let cloud = PointCloud::new(points, 1_000_000);
let vecs = point_cloud_to_vectors(&cloud);
assert_eq!(vecs.len(), 10_000);
let restored = vectors_to_point_cloud(&vecs, 1_000_000).unwrap();
assert_eq!(restored.len(), 10_000);
}
#[test]
fn test_conversion_error_display() {
let e1 = ConversionError::LengthMismatch { expected: 3, got: 5 };
assert!(format!("{e1}").contains("3") && format!("{e1}").contains("5"));
assert!(format!("{}", ConversionError::EmptyInput).contains("empty"));
}
}

View File

@@ -0,0 +1,272 @@
//! Gaussian splatting types and point-cloud-to-Gaussian conversion.
//!
//! Provides a [`GaussianSplat`] representation that maps each point cloud
//! cluster to a 3D Gaussian with position, colour, opacity, scale, and
//! optional temporal trajectory. The serialised format is compatible with
//! the `vwm-viewer` Canvas2D renderer.
use crate::bridge::{Point3D, PointCloud};
use crate::perception::clustering;
use serde::{Deserialize, Serialize};
/// A single 3-D Gaussian suitable for splatting-based rendering.
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct GaussianSplat {
/// Centre of the Gaussian in world coordinates.
pub center: [f64; 3],
/// RGB colour in \[0, 1\].
pub color: [f32; 3],
/// Opacity in \[0, 1\].
pub opacity: f32,
/// Anisotropic scale along each axis.
pub scale: [f32; 3],
/// Number of raw points that contributed to this Gaussian.
pub point_count: usize,
/// Semantic label (e.g. `"obstacle"`, `"ground"`).
pub label: String,
/// Temporal trajectory: each entry is a position at a successive timestep.
/// Empty for static Gaussians.
pub trajectory: Vec<[f64; 3]>,
}
/// A collection of Gaussians derived from one or more point cloud frames.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct GaussianSplatCloud {
pub gaussians: Vec<GaussianSplat>,
pub timestamp_us: i64,
pub frame_id: String,
}
impl GaussianSplatCloud {
/// Number of Gaussians.
pub fn len(&self) -> usize {
self.gaussians.len()
}
pub fn is_empty(&self) -> bool {
self.gaussians.is_empty()
}
}
/// Configuration for point-cloud → Gaussian conversion.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct GaussianConfig {
/// Clustering cell size in metres. Smaller = more Gaussians.
pub cell_size: f64,
/// Minimum number of points to form a Gaussian.
pub min_cluster_size: usize,
/// Default colour for unlabelled Gaussians `[R, G, B]`.
pub default_color: [f32; 3],
/// Base opacity for generated Gaussians.
pub base_opacity: f32,
}
impl Default for GaussianConfig {
fn default() -> Self {
Self {
cell_size: 0.5,
min_cluster_size: 2,
default_color: [0.3, 0.5, 0.8],
base_opacity: 0.7,
}
}
}
/// Convert a [`PointCloud`] into a [`GaussianSplatCloud`] by clustering nearby
/// points and computing per-cluster statistics.
pub fn gaussians_from_cloud(
cloud: &PointCloud,
config: &GaussianConfig,
) -> GaussianSplatCloud {
if cloud.is_empty() || config.cell_size <= 0.0 {
return GaussianSplatCloud {
gaussians: Vec::new(),
timestamp_us: cloud.timestamp_us,
frame_id: cloud.frame_id.clone(),
};
}
let clusters = clustering::cluster_point_cloud(cloud, config.cell_size);
let gaussians: Vec<GaussianSplat> = clusters
.into_iter()
.filter(|c| c.len() >= config.min_cluster_size)
.map(|pts| cluster_to_gaussian(&pts, config))
.collect();
GaussianSplatCloud {
gaussians,
timestamp_us: cloud.timestamp_us,
frame_id: cloud.frame_id.clone(),
}
}
fn cluster_to_gaussian(points: &[Point3D], config: &GaussianConfig) -> GaussianSplat {
let n = points.len() as f64;
let (mut sx, mut sy, mut sz) = (0.0_f64, 0.0_f64, 0.0_f64);
for p in points {
sx += p.x as f64;
sy += p.y as f64;
sz += p.z as f64;
}
let center = [sx / n, sy / n, sz / n];
// Compute per-axis standard deviation as the scale.
let (mut vx, mut vy, mut vz) = (0.0_f64, 0.0_f64, 0.0_f64);
for p in points {
let dx = p.x as f64 - center[0];
let dy = p.y as f64 - center[1];
let dz = p.z as f64 - center[2];
vx += dx * dx;
vy += dy * dy;
vz += dz * dz;
}
let scale = [
(vx / n).sqrt().max(0.01) as f32,
(vy / n).sqrt().max(0.01) as f32,
(vz / n).sqrt().max(0.01) as f32,
];
// Opacity proportional to cluster density.
let opacity = (config.base_opacity * (points.len() as f32 / 50.0).min(1.0)).max(0.1);
GaussianSplat {
center,
color: config.default_color,
opacity,
scale,
point_count: points.len(),
label: String::new(),
trajectory: Vec::new(),
}
}
/// Serialise a [`GaussianSplatCloud`] to the JSON format expected by the
/// `vwm-viewer` Canvas2D renderer.
pub fn to_viewer_json(cloud: &GaussianSplatCloud) -> serde_json::Value {
let gs: Vec<serde_json::Value> = cloud
.gaussians
.iter()
.map(|g| {
let positions: Vec<Vec<f64>> = if g.trajectory.is_empty() {
vec![g.center.to_vec()]
} else {
g.trajectory.iter().map(|p| p.to_vec()).collect()
};
serde_json::json!({
"positions": positions,
"color": g.color,
"opacity": g.opacity,
"scale": g.scale,
"label": g.label,
"point_count": g.point_count,
})
})
.collect();
serde_json::json!({
"gaussians": gs,
"timestamp_us": cloud.timestamp_us,
"frame_id": cloud.frame_id,
"count": cloud.len(),
})
}
#[cfg(test)]
mod tests {
use super::*;
fn make_cloud(pts: &[[f32; 3]], ts: i64) -> PointCloud {
let points: Vec<Point3D> = pts.iter().map(|a| Point3D::new(a[0], a[1], a[2])).collect();
PointCloud::new(points, ts)
}
#[test]
fn test_empty_cloud() {
let cloud = PointCloud::default();
let gs = gaussians_from_cloud(&cloud, &GaussianConfig::default());
assert!(gs.is_empty());
}
#[test]
fn test_single_cluster() {
let cloud = make_cloud(
&[[1.0, 0.0, 0.0], [1.1, 0.0, 0.0], [1.0, 0.1, 0.0]],
1000,
);
let gs = gaussians_from_cloud(&cloud, &GaussianConfig::default());
assert_eq!(gs.len(), 1);
let g = &gs.gaussians[0];
assert_eq!(g.point_count, 3);
assert!(g.center[0] > 0.9 && g.center[0] < 1.2);
}
#[test]
fn test_two_clusters() {
let cloud = make_cloud(
&[
[0.0, 0.0, 0.0], [0.1, 0.0, 0.0],
[10.0, 10.0, 0.0], [10.1, 10.0, 0.0],
],
2000,
);
let gs = gaussians_from_cloud(&cloud, &GaussianConfig::default());
assert_eq!(gs.len(), 2);
}
#[test]
fn test_min_cluster_size_filtering() {
let cloud = make_cloud(
&[[0.0, 0.0, 0.0], [10.0, 10.0, 0.0]],
0,
);
let config = GaussianConfig { min_cluster_size: 3, ..Default::default() };
let gs = gaussians_from_cloud(&cloud, &config);
assert!(gs.is_empty());
}
#[test]
fn test_scale_reflects_spread() {
// Use a larger cell size so all three points end up in one cluster.
let cloud = make_cloud(
&[[0.0, 0.0, 0.0], [0.3, 0.0, 0.0], [0.15, 0.0, 0.0]],
0,
);
let gs = gaussians_from_cloud(&cloud, &GaussianConfig::default());
assert_eq!(gs.len(), 1);
let g = &gs.gaussians[0];
// X-axis spread > Y/Z spread (Y/Z should be clamped minimum 0.01).
assert!(g.scale[0] > g.scale[1]);
}
#[test]
fn test_viewer_json_format() {
let cloud = make_cloud(&[[1.0, 2.0, 3.0], [1.1, 2.0, 3.0]], 5000);
let gs = gaussians_from_cloud(&cloud, &GaussianConfig::default());
let json = to_viewer_json(&gs);
assert_eq!(json["count"], 1);
assert_eq!(json["timestamp_us"], 5000);
let arr = json["gaussians"].as_array().unwrap();
assert_eq!(arr.len(), 1);
assert!(arr[0]["positions"].is_array());
assert!(arr[0]["color"].is_array());
}
#[test]
fn test_serde_roundtrip() {
let cloud = make_cloud(&[[0.0, 0.0, 0.0], [0.1, 0.1, 0.0]], 0);
let gs = gaussians_from_cloud(&cloud, &GaussianConfig::default());
let json = serde_json::to_string(&gs).unwrap();
let restored: GaussianSplatCloud = serde_json::from_str(&json).unwrap();
assert_eq!(restored.len(), gs.len());
}
#[test]
fn test_zero_cell_size() {
let cloud = make_cloud(&[[1.0, 0.0, 0.0]], 0);
let config = GaussianConfig { cell_size: 0.0, ..Default::default() };
let gs = gaussians_from_cloud(&cloud, &config);
assert!(gs.is_empty());
}
}

View File

@@ -0,0 +1,506 @@
//! Flat spatial index with brute-force nearest-neighbour and radius search.
//!
//! Supports Euclidean, Manhattan, and Cosine distance metrics. Designed as a
//! lightweight, dependency-free baseline that can be swapped for an HNSW
//! implementation when the dataset outgrows brute-force search.
//!
//! ## Optimizations
//!
//! - Points stored in a flat `Vec<f32>` buffer (stride = dimensions) for cache
//! locality and zero per-point heap allocation.
//! - Euclidean kNN uses **squared** distances for comparison, deferring the
//! `sqrt` to only the final `k` results.
//! - Cosine distance computed in a single fused loop (dot, norm_a, norm_b).
use crate::bridge::config::DistanceMetric;
use crate::bridge::PointCloud;
#[cfg(test)]
use crate::bridge::Point3D;
use std::cmp::Ordering;
use std::collections::BinaryHeap;
use std::fmt;
/// Entry for the max-heap used by kNN search.
#[derive(PartialEq)]
struct MaxDistEntry {
index: usize,
distance: f32,
}
impl Eq for MaxDistEntry {}
impl PartialOrd for MaxDistEntry {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Ord for MaxDistEntry {
fn cmp(&self, other: &Self) -> Ordering {
// Larger distance = higher priority in the max-heap.
// NaN is treated as maximally distant so it gets evicted first.
self.distance
.partial_cmp(&other.distance)
.unwrap_or_else(|| {
if self.distance.is_nan() && other.distance.is_nan() {
Ordering::Equal
} else if self.distance.is_nan() {
Ordering::Greater
} else {
Ordering::Less
}
})
}
}
/// Errors returned by [`SpatialIndex`] operations.
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum IndexError {
/// Query or insertion vector has a different dimensionality than the index.
DimensionMismatch { expected: usize, got: usize },
/// The index contains no points.
EmptyIndex,
}
impl fmt::Display for IndexError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::DimensionMismatch { expected, got } => {
write!(f, "dimension mismatch: expected {expected}, got {got}")
}
Self::EmptyIndex => write!(f, "index is empty"),
}
}
}
impl std::error::Error for IndexError {}
/// Squared Euclidean distance (avoids sqrt for comparison-only use).
#[inline]
fn euclidean_distance_sq(a: &[f32], b: &[f32]) -> f32 {
a.iter()
.zip(b.iter())
.map(|(x, y)| {
let d = x - y;
d * d
})
.sum()
}
#[inline]
fn euclidean_distance(a: &[f32], b: &[f32]) -> f32 {
euclidean_distance_sq(a, b).sqrt()
}
#[inline]
fn manhattan_distance(a: &[f32], b: &[f32]) -> f32 {
a.iter()
.zip(b.iter())
.map(|(x, y)| (x - y).abs())
.sum()
}
/// Cosine distance in a single fused loop (dot + both norms together).
#[inline]
fn cosine_distance(a: &[f32], b: &[f32]) -> f32 {
let (mut dot, mut norm_a, mut norm_b) = (0.0_f32, 0.0_f32, 0.0_f32);
for (x, y) in a.iter().zip(b.iter()) {
dot += x * y;
norm_a += x * x;
norm_b += y * y;
}
let denom = norm_a.sqrt() * norm_b.sqrt();
if denom < f32::EPSILON {
return 1.0; // zero-vectors are maximally dissimilar
}
(1.0 - (dot / denom).clamp(-1.0, 1.0)).max(0.0)
}
/// A flat spatial index that stores points in a contiguous `f32` buffer.
///
/// Points are stored in a flat `Vec<f32>` with stride equal to `dimensions`,
/// eliminating per-point heap allocations and improving cache locality.
#[derive(Debug, Clone)]
pub struct SpatialIndex {
dimensions: usize,
metric: DistanceMetric,
/// Flat buffer: point `i` occupies `data[i*dimensions .. (i+1)*dimensions]`.
data: Vec<f32>,
}
impl SpatialIndex {
/// Create a new index with the given dimensionality and Euclidean metric.
pub fn new(dimensions: usize) -> Self {
Self {
dimensions,
metric: DistanceMetric::Euclidean,
data: Vec::new(),
}
}
/// Create a new index with an explicit distance metric.
pub fn with_metric(dimensions: usize, metric: DistanceMetric) -> Self {
Self {
dimensions,
metric,
data: Vec::new(),
}
}
/// Insert all points from a [`PointCloud`] into the index.
pub fn insert_point_cloud(&mut self, cloud: &PointCloud) {
self.data.reserve(cloud.points.len() * 3);
for p in &cloud.points {
self.data.extend_from_slice(&[p.x, p.y, p.z]);
}
}
/// Insert pre-built vectors. Vectors whose length does not match the
/// index dimensionality are silently skipped.
pub fn insert_vectors(&mut self, vectors: &[Vec<f32>]) {
for v in vectors {
if v.len() == self.dimensions {
self.data.extend_from_slice(v);
}
}
}
/// Find the `k` nearest neighbours to `query`.
///
/// Returns `(index, distance)` pairs sorted by ascending distance.
/// Uses a max-heap of size `k` for O(n log k) instead of O(n log n).
///
/// For the Euclidean metric, squared distances are used internally and
/// only the final `k` results are square-rooted.
pub fn search_nearest(
&self,
query: &[f32],
k: usize,
) -> Result<Vec<(usize, f32)>, IndexError> {
let n = self.len();
if n == 0 {
return Err(IndexError::EmptyIndex);
}
if query.len() != self.dimensions {
return Err(IndexError::DimensionMismatch {
expected: self.dimensions,
got: query.len(),
});
}
let use_sq = matches!(self.metric, DistanceMetric::Euclidean);
let mut heap: BinaryHeap<MaxDistEntry> = BinaryHeap::with_capacity(k + 1);
let dim = self.dimensions;
for i in 0..n {
let p = &self.data[i * dim..(i + 1) * dim];
let d = if use_sq {
euclidean_distance_sq(query, p)
} else {
self.compute_distance(query, p)
};
if heap.len() < k {
heap.push(MaxDistEntry {
index: i,
distance: d,
});
} else if let Some(top) = heap.peek() {
if d < top.distance {
heap.pop();
heap.push(MaxDistEntry {
index: i,
distance: d,
});
}
}
}
let mut result: Vec<(usize, f32)> = heap
.into_iter()
.map(|e| {
let dist = if use_sq { e.distance.sqrt() } else { e.distance };
(e.index, dist)
})
.collect();
result.sort_by(|a, b| a.1.partial_cmp(&b.1).unwrap_or(Ordering::Equal));
Ok(result)
}
/// Find all points within `radius` of `center`.
///
/// Returns `(index, distance)` pairs in arbitrary order.
/// For the Euclidean metric, squared distances are compared against
/// `radius²` internally.
pub fn search_radius(
&self,
center: &[f32],
radius: f32,
) -> Result<Vec<(usize, f32)>, IndexError> {
let n = self.len();
if n == 0 {
return Ok(Vec::new());
}
if center.len() != self.dimensions {
return Err(IndexError::DimensionMismatch {
expected: self.dimensions,
got: center.len(),
});
}
let use_sq = matches!(self.metric, DistanceMetric::Euclidean);
let threshold = if use_sq { radius * radius } else { radius };
let dim = self.dimensions;
let mut results = Vec::new();
for i in 0..n {
let p = &self.data[i * dim..(i + 1) * dim];
let d = if use_sq {
euclidean_distance_sq(center, p)
} else {
self.compute_distance(center, p)
};
if d <= threshold {
let dist = if use_sq { d.sqrt() } else { d };
results.push((i, dist));
}
}
Ok(results)
}
/// Number of points stored in the index.
pub fn len(&self) -> usize {
if self.dimensions == 0 {
0
} else {
self.data.len() / self.dimensions
}
}
/// Returns `true` if the index contains no points.
pub fn is_empty(&self) -> bool {
self.data.is_empty()
}
/// Remove all points from the index.
pub fn clear(&mut self) {
self.data.clear();
}
/// The dimensionality of this index.
pub fn dimensions(&self) -> usize {
self.dimensions
}
/// The distance metric in use.
pub fn metric(&self) -> DistanceMetric {
self.metric
}
#[inline]
fn compute_distance(&self, a: &[f32], b: &[f32]) -> f32 {
match self.metric {
DistanceMetric::Euclidean => euclidean_distance(a, b),
DistanceMetric::Manhattan => manhattan_distance(a, b),
DistanceMetric::Cosine => cosine_distance(a, b),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
fn make_cloud(pts: &[[f32; 3]]) -> PointCloud {
let points: Vec<Point3D> = pts.iter().map(|p| Point3D::new(p[0], p[1], p[2])).collect();
PointCloud::new(points, 0)
}
#[test]
fn test_new_index_is_empty() {
let idx = SpatialIndex::new(3);
assert!(idx.is_empty());
assert_eq!(idx.len(), 0);
assert_eq!(idx.dimensions(), 3);
}
#[test]
fn test_with_metric() {
let idx = SpatialIndex::with_metric(4, DistanceMetric::Cosine);
assert_eq!(idx.metric(), DistanceMetric::Cosine);
assert_eq!(idx.dimensions(), 4);
}
#[test]
fn test_insert_point_cloud() {
let mut idx = SpatialIndex::new(3);
let cloud = make_cloud(&[[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]]);
idx.insert_point_cloud(&cloud);
assert_eq!(idx.len(), 2);
}
#[test]
fn test_insert_vectors() {
let mut idx = SpatialIndex::new(3);
idx.insert_vectors(&[vec![1.0, 2.0, 3.0], vec![4.0, 5.0, 6.0]]);
assert_eq!(idx.len(), 2);
}
#[test]
fn test_insert_vectors_skips_wrong_dim() {
let mut idx = SpatialIndex::new(3);
idx.insert_vectors(&[vec![1.0, 2.0], vec![4.0, 5.0, 6.0]]);
assert_eq!(idx.len(), 1); // only the 3-d vector was inserted
}
#[test]
fn test_search_nearest_basic() {
let mut idx = SpatialIndex::new(3);
idx.insert_vectors(&[
vec![0.0, 0.0, 0.0],
vec![1.0, 0.0, 0.0],
vec![10.0, 0.0, 0.0],
]);
let results = idx.search_nearest(&[0.5, 0.0, 0.0], 2).unwrap();
assert_eq!(results.len(), 2);
// closest should be index 0 or 1
assert!(results[0].0 == 0 || results[0].0 == 1);
}
#[test]
fn test_search_nearest_returns_sorted() {
let mut idx = SpatialIndex::new(3);
idx.insert_vectors(&[
vec![10.0, 0.0, 0.0],
vec![1.0, 0.0, 0.0],
vec![5.0, 0.0, 0.0],
]);
let results = idx.search_nearest(&[0.0, 0.0, 0.0], 3).unwrap();
assert!(results[0].1 <= results[1].1);
assert!(results[1].1 <= results[2].1);
}
#[test]
fn test_search_nearest_k_larger_than_len() {
let mut idx = SpatialIndex::new(3);
idx.insert_vectors(&[vec![1.0, 2.0, 3.0]]);
let results = idx.search_nearest(&[0.0, 0.0, 0.0], 10).unwrap();
assert_eq!(results.len(), 1);
}
#[test]
fn test_search_nearest_empty_index() {
let idx = SpatialIndex::new(3);
let err = idx.search_nearest(&[0.0, 0.0, 0.0], 1).unwrap_err();
assert_eq!(err, IndexError::EmptyIndex);
}
#[test]
fn test_search_nearest_dim_mismatch() {
let mut idx = SpatialIndex::new(3);
idx.insert_vectors(&[vec![1.0, 2.0, 3.0]]);
let err = idx.search_nearest(&[0.0, 0.0], 1).unwrap_err();
assert_eq!(err, IndexError::DimensionMismatch { expected: 3, got: 2 });
}
#[test]
fn test_search_radius_basic() {
let mut idx = SpatialIndex::new(3);
idx.insert_vectors(&[
vec![0.0, 0.0, 0.0],
vec![1.0, 0.0, 0.0],
vec![10.0, 0.0, 0.0],
]);
let results = idx.search_radius(&[0.0, 0.0, 0.0], 1.5).unwrap();
assert_eq!(results.len(), 2); // indices 0 and 1
}
#[test]
fn test_search_radius_empty_index() {
let idx = SpatialIndex::new(3);
let results = idx.search_radius(&[0.0, 0.0, 0.0], 1.0).unwrap();
assert!(results.is_empty());
}
#[test]
fn test_search_radius_no_results() {
let mut idx = SpatialIndex::new(3);
idx.insert_vectors(&[vec![100.0, 100.0, 100.0]]);
let results = idx.search_radius(&[0.0, 0.0, 0.0], 1.0).unwrap();
assert!(results.is_empty());
}
#[test]
fn test_clear() {
let mut idx = SpatialIndex::new(3);
idx.insert_vectors(&[vec![1.0, 2.0, 3.0]]);
assert!(!idx.is_empty());
idx.clear();
assert!(idx.is_empty());
assert_eq!(idx.len(), 0);
}
#[test]
fn test_euclidean_distance() {
let d = euclidean_distance(&[0.0, 0.0, 0.0], &[3.0, 4.0, 0.0]);
assert!((d - 5.0).abs() < 1e-5);
}
#[test]
fn test_manhattan_distance() {
let d = manhattan_distance(&[0.0, 0.0, 0.0], &[3.0, 4.0, 1.0]);
assert!((d - 8.0).abs() < 1e-5);
}
#[test]
fn test_cosine_distance_identical() {
let d = cosine_distance(&[1.0, 0.0, 0.0], &[1.0, 0.0, 0.0]);
assert!(d.abs() < 1e-5);
}
#[test]
fn test_cosine_distance_orthogonal() {
let d = cosine_distance(&[1.0, 0.0, 0.0], &[0.0, 1.0, 0.0]);
assert!((d - 1.0).abs() < 1e-5);
}
#[test]
fn test_cosine_distance_zero_vector() {
let d = cosine_distance(&[0.0, 0.0, 0.0], &[1.0, 2.0, 3.0]);
assert!((d - 1.0).abs() < 1e-5);
}
#[test]
fn test_manhattan_metric_search() {
let mut idx = SpatialIndex::with_metric(3, DistanceMetric::Manhattan);
idx.insert_vectors(&[
vec![0.0, 0.0, 0.0],
vec![1.0, 1.0, 1.0],
vec![10.0, 10.0, 10.0],
]);
let results = idx.search_nearest(&[0.0, 0.0, 0.0], 1).unwrap();
assert_eq!(results[0].0, 0);
assert!(results[0].1.abs() < 1e-5);
}
#[test]
fn test_stress_10k_points() {
let mut idx = SpatialIndex::new(3);
let vecs: Vec<Vec<f32>> = (0..10_000)
.map(|i| vec![i as f32 * 0.01, i as f32 * 0.02, i as f32 * 0.03])
.collect();
idx.insert_vectors(&vecs);
assert_eq!(idx.len(), 10_000);
let results = idx.search_nearest(&[0.0, 0.0, 0.0], 5).unwrap();
assert_eq!(results.len(), 5);
assert_eq!(results[0].0, 0);
}
#[test]
fn test_index_error_display() {
let e = IndexError::DimensionMismatch { expected: 3, got: 5 };
assert!(format!("{e}").contains("3"));
assert!(format!("{}", IndexError::EmptyIndex).contains("empty"));
}
}

View File

@@ -0,0 +1,383 @@
//! Core robotics types, converters, spatial indexing, and perception pipeline.
//!
//! This module provides the foundational types that all other robotics modules
//! build upon: point clouds, robot state, scene graphs, poses, and trajectories.
pub mod config;
pub mod converters;
pub mod gaussian;
pub mod indexing;
pub mod pipeline;
pub mod search;
use serde::{Deserialize, Serialize};
// Re-exports
pub use config::{BridgeConfig, DistanceMetric};
pub use converters::ConversionError;
pub use gaussian::{GaussianConfig, GaussianSplat, GaussianSplatCloud};
pub use indexing::{IndexError, SpatialIndex};
pub use pipeline::{PerceptionResult, PipelineConfig, PipelineStats};
pub use search::{AlertSeverity, Neighbor, ObstacleAlert, SearchResult};
// ---------------------------------------------------------------------------
// Core types
// ---------------------------------------------------------------------------
/// 3D point used in point clouds and spatial operations.
#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize)]
pub struct Point3D {
pub x: f32,
pub y: f32,
pub z: f32,
}
impl Point3D {
pub fn new(x: f32, y: f32, z: f32) -> Self {
Self { x, y, z }
}
pub fn distance_to(&self, other: &Point3D) -> f32 {
let dx = self.x - other.x;
let dy = self.y - other.y;
let dz = self.z - other.z;
(dx * dx + dy * dy + dz * dz).sqrt()
}
pub fn as_f64_array(&self) -> [f64; 3] {
[self.x as f64, self.y as f64, self.z as f64]
}
pub fn from_f64_array(arr: &[f64; 3]) -> Self {
Self {
x: arr[0] as f32,
y: arr[1] as f32,
z: arr[2] as f32,
}
}
pub fn to_vec(&self) -> Vec<f32> {
vec![self.x, self.y, self.z]
}
}
/// A unit quaternion representing a 3-D rotation.
#[derive(Debug, Clone, Copy, PartialEq, Serialize, Deserialize)]
pub struct Quaternion {
pub x: f64,
pub y: f64,
pub z: f64,
pub w: f64,
}
impl Default for Quaternion {
fn default() -> Self {
Self { x: 0.0, y: 0.0, z: 0.0, w: 1.0 }
}
}
impl Quaternion {
pub fn identity() -> Self {
Self::default()
}
pub fn new(x: f64, y: f64, z: f64, w: f64) -> Self {
Self { x, y, z, w }
}
}
/// A 6-DOF pose: position + orientation.
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct Pose {
pub position: [f64; 3],
pub orientation: Quaternion,
pub frame_id: String,
}
impl Default for Pose {
fn default() -> Self {
Self {
position: [0.0; 3],
orientation: Quaternion::identity(),
frame_id: String::new(),
}
}
}
/// A collection of 3D points from a sensor (LiDAR, depth camera, etc.).
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct PointCloud {
pub points: Vec<Point3D>,
pub intensities: Vec<f32>,
pub normals: Option<Vec<Point3D>>,
pub timestamp_us: i64,
pub frame_id: String,
}
impl PointCloud {
pub fn new(points: Vec<Point3D>, timestamp: i64) -> Self {
let len = points.len();
Self {
points,
intensities: vec![1.0; len],
normals: None,
timestamp_us: timestamp,
frame_id: String::new(),
}
}
pub fn len(&self) -> usize {
self.points.len()
}
pub fn is_empty(&self) -> bool {
self.points.is_empty()
}
pub fn timestamp(&self) -> i64 {
self.timestamp_us
}
}
/// Robot state: position, velocity, acceleration, timestamp.
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct RobotState {
pub position: [f64; 3],
pub velocity: [f64; 3],
pub acceleration: [f64; 3],
pub timestamp_us: i64,
}
impl Default for RobotState {
fn default() -> Self {
Self {
position: [0.0; 3],
velocity: [0.0; 3],
acceleration: [0.0; 3],
timestamp_us: 0,
}
}
}
/// A synchronised bundle of sensor observations captured at one instant.
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct SensorFrame {
pub cloud: Option<PointCloud>,
pub state: Option<RobotState>,
pub pose: Option<Pose>,
pub timestamp_us: i64,
}
/// A 2-D occupancy grid map.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct OccupancyGrid {
pub width: usize,
pub height: usize,
pub resolution: f64,
pub data: Vec<f32>,
pub origin: [f64; 3],
}
impl OccupancyGrid {
pub fn new(width: usize, height: usize, resolution: f64) -> Self {
let size = width.checked_mul(height).expect("grid size overflow");
Self {
width,
height,
resolution,
data: vec![0.0; size],
origin: [0.0; 3],
}
}
/// Get the occupancy value at `(x, y)`, or `None` if out of bounds.
pub fn get(&self, x: usize, y: usize) -> Option<f32> {
if x < self.width && y < self.height {
Some(self.data[y * self.width + x])
} else {
None
}
}
/// Set the occupancy value at `(x, y)`. Out-of-bounds writes are ignored.
pub fn set(&mut self, x: usize, y: usize, value: f32) {
if x < self.width && y < self.height {
self.data[y * self.width + x] = value;
}
}
}
/// An object detected in a scene with bounding information.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SceneObject {
pub id: usize,
pub center: [f64; 3],
pub extent: [f64; 3],
pub confidence: f32,
pub label: String,
pub velocity: Option<[f64; 3]>,
}
impl SceneObject {
pub fn new(id: usize, center: [f64; 3], extent: [f64; 3]) -> Self {
Self {
id,
center,
extent,
confidence: 1.0,
label: String::new(),
velocity: None,
}
}
}
/// An edge in a scene graph connecting two objects.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SceneEdge {
pub from: usize,
pub to: usize,
pub distance: f64,
pub relation: String,
}
/// A scene graph representing spatial relationships between objects.
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct SceneGraph {
pub objects: Vec<SceneObject>,
pub edges: Vec<SceneEdge>,
pub timestamp: i64,
}
impl SceneGraph {
pub fn new(objects: Vec<SceneObject>, edges: Vec<SceneEdge>, timestamp: i64) -> Self {
Self { objects, edges, timestamp }
}
}
/// A predicted trajectory consisting of waypoints.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Trajectory {
pub waypoints: Vec<[f64; 3]>,
pub timestamps: Vec<i64>,
pub confidence: f64,
}
impl Trajectory {
pub fn new(waypoints: Vec<[f64; 3]>, timestamps: Vec<i64>, confidence: f64) -> Self {
Self { waypoints, timestamps, confidence }
}
pub fn len(&self) -> usize {
self.waypoints.len()
}
pub fn is_empty(&self) -> bool {
self.waypoints.is_empty()
}
}
/// An obstacle detected by the perception pipeline.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Obstacle {
pub id: u64,
pub position: [f64; 3],
pub distance: f64,
pub radius: f64,
pub label: String,
pub confidence: f32,
}
/// Bridge error type.
#[derive(Debug, thiserror::Error)]
pub enum BridgeError {
#[error("Invalid data: {0}")]
InvalidData(String),
#[error("Conversion error: {0}")]
ConversionError(String),
#[error("Serialization error: {0}")]
SerializationError(#[from] serde_json::Error),
}
pub type Result<T> = std::result::Result<T, BridgeError>;
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_point3d_distance() {
let a = Point3D::new(0.0, 0.0, 0.0);
let b = Point3D::new(3.0, 4.0, 0.0);
assert!((a.distance_to(&b) - 5.0).abs() < 1e-6);
}
#[test]
fn test_point_cloud() {
let cloud = PointCloud::new(vec![Point3D::new(1.0, 2.0, 3.0)], 100);
assert_eq!(cloud.len(), 1);
assert_eq!(cloud.timestamp(), 100);
}
#[test]
fn test_robot_state_default() {
let state = RobotState::default();
assert_eq!(state.position, [0.0; 3]);
assert_eq!(state.velocity, [0.0; 3]);
}
#[test]
fn test_scene_graph() {
let obj = SceneObject::new(0, [1.0, 2.0, 3.0], [0.5, 0.5, 0.5]);
let graph = SceneGraph::new(vec![obj], vec![], 0);
assert_eq!(graph.objects.len(), 1);
}
#[test]
fn test_quaternion_identity() {
let q = Quaternion::identity();
assert_eq!(q.w, 1.0);
assert_eq!(q.x, 0.0);
}
#[test]
fn test_pose_default() {
let p = Pose::default();
assert_eq!(p.position, [0.0; 3]);
assert_eq!(p.orientation.w, 1.0);
}
#[test]
fn test_sensor_frame_default() {
let f = SensorFrame::default();
assert!(f.cloud.is_none());
assert!(f.state.is_none());
assert!(f.pose.is_none());
}
#[test]
fn test_occupancy_grid() {
let mut grid = OccupancyGrid::new(10, 10, 0.05);
grid.set(3, 4, 0.8);
assert!((grid.get(3, 4).unwrap() - 0.8).abs() < f32::EPSILON);
assert!(grid.get(10, 10).is_none()); // out of bounds
}
#[test]
fn test_trajectory() {
let t = Trajectory::new(
vec![[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]],
vec![100, 200],
0.95,
);
assert_eq!(t.len(), 2);
assert!(!t.is_empty());
}
#[test]
fn test_serde_roundtrip() {
let obj = SceneObject::new(0, [1.0, 2.0, 3.0], [0.5, 0.5, 0.5]);
let json = serde_json::to_string(&obj).unwrap();
let obj2: SceneObject = serde_json::from_str(&json).unwrap();
assert_eq!(obj.id, obj2.id);
}
}

View File

@@ -0,0 +1,392 @@
//! Lightweight perception pipeline that ingests [`SensorFrame`]s, maintains a
//! [`SpatialIndex`], detects obstacles, and predicts linear trajectories.
use crate::bridge::indexing::SpatialIndex;
use crate::bridge::{Obstacle, PointCloud, SensorFrame, Trajectory};
use serde::{Deserialize, Serialize};
use std::time::Instant;
/// Configuration for the perception pipeline.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PipelineConfig {
/// Points within this radius (metres) of the robot are classified as obstacles.
pub obstacle_radius: f64,
/// Whether to predict linear trajectories from consecutive frames.
pub track_trajectories: bool,
}
impl Default for PipelineConfig {
fn default() -> Self {
Self {
obstacle_radius: 2.0,
track_trajectories: true,
}
}
}
/// Output of a single pipeline frame.
#[derive(Debug, Clone)]
pub struct PerceptionResult {
/// Detected obstacles in the current frame.
pub obstacles: Vec<Obstacle>,
/// Linear trajectory prediction (if enabled and enough history exists).
pub trajectory_prediction: Option<Trajectory>,
/// Wall-clock latency of this frame in microseconds.
pub frame_latency_us: u64,
}
/// Cumulative pipeline statistics.
#[derive(Debug, Clone, Default)]
pub struct PipelineStats {
/// Number of frames processed so far.
pub frames_processed: u64,
/// Running average latency in microseconds.
pub avg_latency_us: f64,
/// Total number of distinct objects tracked.
pub objects_tracked: u64,
}
/// A stateful perception pipeline.
///
/// Call [`process_frame`](Self::process_frame) once per sensor tick.
pub struct PerceptionPipeline {
config: PipelineConfig,
index: SpatialIndex,
stats: PipelineStats,
/// Positions from previous frames for trajectory prediction (capped at 1000).
position_history: Vec<[f64; 3]>,
last_timestamp: i64,
obstacle_counter: u64,
}
impl PerceptionPipeline {
/// Create a pipeline with the given configuration.
pub fn new(config: PipelineConfig) -> Self {
Self {
config,
index: SpatialIndex::new(3),
stats: PipelineStats::default(),
position_history: Vec::new(),
last_timestamp: 0,
obstacle_counter: 0,
}
}
/// Process a single [`SensorFrame`] and return perception results.
pub fn process_frame(&mut self, frame: &SensorFrame) -> PerceptionResult {
let start = Instant::now();
// -- 1. Rebuild spatial index from the point cloud -------------------
self.index.clear();
if let Some(ref cloud) = frame.cloud {
self.index.insert_point_cloud(cloud);
}
// -- 2. Determine robot position (prefer state, fallback to pose) ----
let robot_pos: Option<[f64; 3]> = frame
.state
.as_ref()
.map(|s| s.position)
.or_else(|| frame.pose.as_ref().map(|p| p.position));
// -- 3. Detect obstacles ---------------------------------------------
let obstacles = self.detect_obstacles(robot_pos, frame.cloud.as_ref());
// -- 4. Trajectory prediction ----------------------------------------
let trajectory_prediction = if self.config.track_trajectories {
if let Some(pos) = robot_pos {
// Cap history to prevent unbounded memory growth.
if self.position_history.len() >= 1000 {
self.position_history.drain(..500);
}
self.position_history.push(pos);
self.predict_trajectory(frame.timestamp_us)
} else {
None
}
} else {
None
};
// -- 5. Update stats -------------------------------------------------
let elapsed_us = start.elapsed().as_micros() as u64;
self.stats.frames_processed += 1;
self.stats.objects_tracked += obstacles.len() as u64;
let n = self.stats.frames_processed as f64;
self.stats.avg_latency_us =
self.stats.avg_latency_us * ((n - 1.0) / n) + elapsed_us as f64 / n;
self.last_timestamp = frame.timestamp_us;
PerceptionResult {
obstacles,
trajectory_prediction,
frame_latency_us: elapsed_us,
}
}
/// Return cumulative statistics.
pub fn stats(&self) -> &PipelineStats {
&self.stats
}
/// Return a reference to the current spatial index.
pub fn index(&self) -> &SpatialIndex {
&self.index
}
fn detect_obstacles(
&mut self,
robot_pos: Option<[f64; 3]>,
cloud: Option<&PointCloud>,
) -> Vec<Obstacle> {
let robot_pos = match robot_pos {
Some(p) => p,
None => return Vec::new(),
};
let cloud = match cloud {
Some(c) if !c.is_empty() => c,
_ => return Vec::new(),
};
let query = [
robot_pos[0] as f32,
robot_pos[1] as f32,
robot_pos[2] as f32,
];
let radius = self.config.obstacle_radius as f32;
let neighbours = match self.index.search_radius(&query, radius) {
Ok(n) => n,
Err(_) => return Vec::new(),
};
neighbours
.into_iter()
.map(|(idx, dist)| {
let pt = &cloud.points[idx];
self.obstacle_counter += 1;
Obstacle {
id: self.obstacle_counter,
position: [pt.x as f64, pt.y as f64, pt.z as f64],
distance: dist as f64,
radius: 0.1, // point-level radius
label: String::new(),
confidence: 1.0,
}
})
.collect()
}
fn predict_trajectory(&self, current_ts: i64) -> Option<Trajectory> {
if self.position_history.len() < 2 {
return None;
}
let n = self.position_history.len();
let prev = &self.position_history[n - 2];
let curr = &self.position_history[n - 1];
let vel = [
curr[0] - prev[0],
curr[1] - prev[1],
curr[2] - prev[2],
];
// Predict 5 steps into the future with constant velocity.
let steps = 5;
let dt_us: i64 = 100_000; // 100 ms per step
let mut waypoints = Vec::with_capacity(steps);
let mut timestamps = Vec::with_capacity(steps);
for i in 1..=steps {
let t = i as f64;
waypoints.push([
curr[0] + vel[0] * t,
curr[1] + vel[1] * t,
curr[2] + vel[2] * t,
]);
timestamps.push(current_ts + dt_us * i as i64);
}
Some(Trajectory::new(waypoints, timestamps, 0.8))
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::bridge::{Point3D, PointCloud, RobotState, SensorFrame};
fn make_frame(
points: Vec<Point3D>,
position: [f64; 3],
ts: i64,
) -> SensorFrame {
SensorFrame {
cloud: Some(PointCloud::new(points, ts)),
state: Some(RobotState {
position,
velocity: [0.0; 3],
acceleration: [0.0; 3],
timestamp_us: ts,
}),
pose: None,
timestamp_us: ts,
}
}
#[test]
fn test_empty_frame() {
let mut pipeline = PerceptionPipeline::new(PipelineConfig::default());
let frame = SensorFrame::default();
let result = pipeline.process_frame(&frame);
assert!(result.obstacles.is_empty());
assert!(result.trajectory_prediction.is_none());
assert_eq!(pipeline.stats().frames_processed, 1);
}
#[test]
fn test_obstacle_detection() {
let config = PipelineConfig {
obstacle_radius: 5.0,
..Default::default()
};
let mut pipeline = PerceptionPipeline::new(config);
// Robot at origin, obstacle at (1, 0, 0).
let frame = make_frame(
vec![
Point3D::new(1.0, 0.0, 0.0),
Point3D::new(100.0, 0.0, 0.0), // too far
],
[0.0, 0.0, 0.0],
1000,
);
let result = pipeline.process_frame(&frame);
assert_eq!(result.obstacles.len(), 1);
assert!((result.obstacles[0].distance - 1.0).abs() < 0.01);
}
#[test]
fn test_no_obstacles_when_far() {
let config = PipelineConfig {
obstacle_radius: 0.5,
..Default::default()
};
let mut pipeline = PerceptionPipeline::new(config);
let frame = make_frame(
vec![Point3D::new(10.0, 10.0, 10.0)],
[0.0, 0.0, 0.0],
1000,
);
let result = pipeline.process_frame(&frame);
assert!(result.obstacles.is_empty());
}
#[test]
fn test_trajectory_prediction() {
let config = PipelineConfig {
track_trajectories: true,
obstacle_radius: 0.1,
..Default::default()
};
let mut pipeline = PerceptionPipeline::new(config);
// Frame 1: robot at (0, 0, 0).
let f1 = make_frame(vec![], [0.0, 0.0, 0.0], 1000);
let r1 = pipeline.process_frame(&f1);
assert!(r1.trajectory_prediction.is_none()); // need 2+ frames
// Frame 2: robot at (1, 0, 0) => velocity = (1, 0, 0).
let f2 = make_frame(vec![], [1.0, 0.0, 0.0], 2000);
let r2 = pipeline.process_frame(&f2);
let traj = r2.trajectory_prediction.unwrap();
assert_eq!(traj.len(), 5);
// First predicted waypoint should be ~(2, 0, 0).
assert!((traj.waypoints[0][0] - 2.0).abs() < 1e-6);
}
#[test]
fn test_trajectory_disabled() {
let config = PipelineConfig {
track_trajectories: false,
..Default::default()
};
let mut pipeline = PerceptionPipeline::new(config);
let f1 = make_frame(vec![], [0.0, 0.0, 0.0], 0);
let f2 = make_frame(vec![], [1.0, 0.0, 0.0], 1000);
pipeline.process_frame(&f1);
let r2 = pipeline.process_frame(&f2);
assert!(r2.trajectory_prediction.is_none());
}
#[test]
fn test_stats_accumulate() {
let mut pipeline = PerceptionPipeline::new(PipelineConfig::default());
for _ in 0..5 {
pipeline.process_frame(&SensorFrame::default());
}
assert_eq!(pipeline.stats().frames_processed, 5);
}
#[test]
fn test_obstacle_ids_increment() {
let config = PipelineConfig {
obstacle_radius: 100.0,
..Default::default()
};
let mut pipeline = PerceptionPipeline::new(config);
let frame = make_frame(
vec![
Point3D::new(1.0, 0.0, 0.0),
Point3D::new(2.0, 0.0, 0.0),
],
[0.0, 0.0, 0.0],
0,
);
let r = pipeline.process_frame(&frame);
assert_eq!(r.obstacles.len(), 2);
// IDs should be monotonically increasing.
assert!(r.obstacles[0].id < r.obstacles[1].id);
}
#[test]
fn test_pipeline_config_serde() {
let cfg = PipelineConfig {
obstacle_radius: 3.5,
track_trajectories: false,
};
let json = serde_json::to_string(&cfg).unwrap();
let restored: PipelineConfig = serde_json::from_str(&json).unwrap();
assert!((restored.obstacle_radius - 3.5).abs() < f64::EPSILON);
assert!(!restored.track_trajectories);
}
#[test]
fn test_index_is_rebuilt_per_frame() {
let config = PipelineConfig {
obstacle_radius: 100.0,
..Default::default()
};
let mut pipeline = PerceptionPipeline::new(config);
let f1 = make_frame(
vec![Point3D::new(1.0, 0.0, 0.0)],
[0.0, 0.0, 0.0],
0,
);
pipeline.process_frame(&f1);
assert_eq!(pipeline.index().len(), 1);
let f2 = make_frame(
vec![
Point3D::new(1.0, 0.0, 0.0),
Point3D::new(2.0, 0.0, 0.0),
Point3D::new(3.0, 0.0, 0.0),
],
[0.0, 0.0, 0.0],
1000,
);
pipeline.process_frame(&f2);
// Index should reflect only the latest frame's cloud.
assert_eq!(pipeline.index().len(), 3);
}
}

View File

@@ -0,0 +1,174 @@
//! Search result types used by the bridge and perception layers.
use serde::{Deserialize, Serialize};
/// Severity level for obstacle proximity alerts.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
pub enum AlertSeverity {
/// Immediate collision risk.
Critical,
/// Object is approaching but not imminent.
Warning,
/// Informational -- object detected at moderate range.
Info,
}
/// A single nearest-neighbour result.
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct Neighbor {
/// Unique identifier of the indexed point.
pub id: u64,
/// Distance from the query point to this neighbour.
pub distance: f32,
/// 3-D position of the neighbour.
pub position: [f32; 3],
}
/// Result of a spatial search query.
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct SearchResult {
/// Identifier of the query that produced this result.
pub query_id: u64,
/// Nearest neighbours, sorted by ascending distance.
pub neighbors: Vec<Neighbor>,
/// Wall-clock latency of the search in microseconds.
pub latency_us: u64,
}
/// An alert generated when an obstacle is within a safety threshold.
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct ObstacleAlert {
/// Identifier of the obstacle that triggered the alert.
pub obstacle_id: u64,
/// Distance to the obstacle in metres.
pub distance: f32,
/// Unit direction vector pointing from the robot towards the obstacle.
pub direction: [f32; 3],
/// Severity of this alert.
pub severity: AlertSeverity,
}
// ---------------------------------------------------------------------------
// Tests
// ---------------------------------------------------------------------------
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_neighbor_serde_roundtrip() {
let n = Neighbor {
id: 42,
distance: 1.5,
position: [1.0, 2.0, 3.0],
};
let json = serde_json::to_string(&n).unwrap();
let restored: Neighbor = serde_json::from_str(&json).unwrap();
assert_eq!(n, restored);
}
#[test]
fn test_search_result_serde_roundtrip() {
let sr = SearchResult {
query_id: 7,
neighbors: vec![
Neighbor {
id: 1,
distance: 0.5,
position: [0.0, 0.0, 0.0],
},
Neighbor {
id: 2,
distance: 1.2,
position: [1.0, 1.0, 1.0],
},
],
latency_us: 150,
};
let json = serde_json::to_string(&sr).unwrap();
let restored: SearchResult = serde_json::from_str(&json).unwrap();
assert_eq!(sr, restored);
}
#[test]
fn test_obstacle_alert_serde_roundtrip() {
let alert = ObstacleAlert {
obstacle_id: 99,
distance: 0.3,
direction: [1.0, 0.0, 0.0],
severity: AlertSeverity::Critical,
};
let json = serde_json::to_string(&alert).unwrap();
let restored: ObstacleAlert = serde_json::from_str(&json).unwrap();
assert_eq!(alert, restored);
}
#[test]
fn test_alert_severity_all_variants() {
for severity in [
AlertSeverity::Critical,
AlertSeverity::Warning,
AlertSeverity::Info,
] {
let json = serde_json::to_string(&severity).unwrap();
let restored: AlertSeverity = serde_json::from_str(&json).unwrap();
assert_eq!(severity, restored);
}
}
#[test]
fn test_search_result_empty_neighbors() {
let sr = SearchResult {
query_id: 0,
neighbors: vec![],
latency_us: 0,
};
let json = serde_json::to_string(&sr).unwrap();
let restored: SearchResult = serde_json::from_str(&json).unwrap();
assert!(restored.neighbors.is_empty());
}
#[test]
fn test_neighbor_debug_format() {
let n = Neighbor {
id: 1,
distance: 0.0,
position: [0.0, 0.0, 0.0],
};
let dbg = format!("{:?}", n);
assert!(dbg.contains("Neighbor"));
}
#[test]
fn test_obstacle_alert_direction_preserved() {
let alert = ObstacleAlert {
obstacle_id: 1,
distance: 5.0,
direction: [0.577, 0.577, 0.577],
severity: AlertSeverity::Warning,
};
let json = serde_json::to_string(&alert).unwrap();
let restored: ObstacleAlert = serde_json::from_str(&json).unwrap();
for i in 0..3 {
assert!((alert.direction[i] - restored.direction[i]).abs() < 1e-6);
}
}
#[test]
fn test_search_result_json_structure() {
let sr = SearchResult {
query_id: 10,
neighbors: vec![Neighbor {
id: 5,
distance: 2.5,
position: [3.0, 4.0, 5.0],
}],
latency_us: 42,
};
let json = serde_json::to_string_pretty(&sr).unwrap();
assert!(json.contains("query_id"));
assert!(json.contains("neighbors"));
assert!(json.contains("latency_us"));
}
}