Merge commit 'd803bfe2b1fe7f5e219e50ac20d6801a0a58ac75' as 'vendor/ruvector'

This commit is contained in:
ruv
2026-02-28 14:39:40 -05:00
7854 changed files with 3522914 additions and 0 deletions

View File

@@ -0,0 +1,85 @@
//! Example: composable behavior trees for robot task execution.
//!
//! Demonstrates:
//! - Building a patrol behavior with sequence/selector/decorator nodes
//! - Ticking the tree and observing status changes
//! - Using the blackboard for inter-node communication
use ruvector_robotics::cognitive::{
BehaviorNode, BehaviorStatus, BehaviorTree, DecoratorType,
};
fn main() {
println!("=== Behavior Tree Demo ===\n");
// Build a patrol behavior:
// Selector [
// Sequence [has_obstacle → avoid_obstacle]
// Sequence [has_target → move_to_target → interact]
// Repeat(3) [ patrol_waypoint ]
// ]
let tree_root = BehaviorNode::Selector(vec![
// Priority 1: avoid obstacles
BehaviorNode::Sequence(vec![
BehaviorNode::Condition("has_obstacle".into()),
BehaviorNode::Action("avoid_obstacle".into()),
]),
// Priority 2: pursue target
BehaviorNode::Sequence(vec![
BehaviorNode::Condition("has_target".into()),
BehaviorNode::Action("move_to_target".into()),
BehaviorNode::Action("interact".into()),
]),
// Priority 3: patrol
BehaviorNode::Decorator(
DecoratorType::Repeat(3),
Box::new(BehaviorNode::Action("patrol_waypoint".into())),
),
]);
let mut tree = BehaviorTree::new(tree_root);
// Scenario 1: No obstacle, no target → patrol
println!("--- Scenario 1: Patrolling ---");
tree.set_action_result("patrol_waypoint", BehaviorStatus::Success);
tree.set_action_result("avoid_obstacle", BehaviorStatus::Success);
tree.set_action_result("move_to_target", BehaviorStatus::Success);
tree.set_action_result("interact", BehaviorStatus::Success);
let status = tree.tick();
println!(" Tick 1 result: {:?}", status);
println!(" (Should patrol since no conditions are true)\n");
// Scenario 2: Obstacle detected
println!("--- Scenario 2: Obstacle detected ---");
tree.set_condition("has_obstacle", true);
let status = tree.tick();
println!(" Tick 2 result: {:?}", status);
println!(" (Should avoid obstacle via selector priority)\n");
// Scenario 3: Obstacle cleared, target found
println!("--- Scenario 3: Target acquired ---");
tree.set_condition("has_obstacle", false);
tree.set_condition("has_target", true);
let status = tree.tick();
println!(" Tick 3 result: {:?}", status);
println!(" (Should move to target and interact)\n");
// Scenario 4: Timeout decorator
println!("--- Scenario 4: Timeout behavior ---");
let timeout_tree = BehaviorNode::Decorator(
DecoratorType::Timeout(2),
Box::new(BehaviorNode::Action("long_task".into())),
);
let mut t2 = BehaviorTree::new(timeout_tree);
t2.set_action_result("long_task", BehaviorStatus::Running);
for i in 1..=4 {
let s = t2.tick();
println!(" Tick {}: {:?}{}", i, s,
if s == BehaviorStatus::Failure { " (TIMED OUT)" } else { "" }
);
}
println!("\nFinal tick count: {}", tree.context().tick_count);
}

View File

@@ -0,0 +1,83 @@
//! Example: full perceive-think-act-learn cognitive loop.
//!
//! Demonstrates:
//! - Feeding percepts into the `CognitiveCore`
//! - Producing decisions via the think phase
//! - Executing actions and incorporating feedback
use ruvector_robotics::cognitive::{
ActionType, CognitiveConfig, CognitiveCore, CognitiveMode, Outcome, Percept,
};
fn main() {
println!("=== Cognitive Loop Demo ===\n");
let config = CognitiveConfig {
mode: CognitiveMode::Deliberative,
attention_threshold: 0.4,
learning_rate: 0.05,
max_percepts: 50,
};
let mut core = CognitiveCore::new(config);
// Simulate 5 cycles.
let sensor_data = [
("lidar", vec![1.0, 0.5, 0.0], 0.9),
("camera", vec![0.0, 1.0, 0.2], 0.7),
("lidar", vec![2.0, 0.0, 1.5], 0.95),
("ir_sensor", vec![0.1, 0.1, 0.1], 0.3), // below threshold
("camera", vec![3.0, 2.0, 0.0], 0.85),
];
for (cycle, (source, data, confidence)) in sensor_data.iter().enumerate() {
println!("--- Cycle {} ---", cycle + 1);
// Perceive
let percept = Percept {
source: source.to_string(),
data: data.clone(),
confidence: *confidence,
timestamp: (cycle as i64 + 1) * 1000,
};
let state = core.perceive(percept);
println!(
" Perceived: source={}, confidence={:.2}, state={:?}",
source, confidence, state
);
println!(" Buffer size: {}", core.percept_count());
// Think
if let Some(decision) = core.think() {
println!(" Decision: {}", decision.reasoning);
println!(" Priority: {}", decision.action.priority);
// Act
let cmd = core.act(decision);
match &cmd.action {
ActionType::Move(pos) => println!(" Action: Move to [{:.1}, {:.1}, {:.1}]", pos[0], pos[1], pos[2]),
ActionType::Wait(ms) => println!(" Action: Wait {}ms", ms),
_ => println!(" Action: {:?}", cmd.action),
}
// Learn
let success = *confidence > 0.8;
core.learn(Outcome {
success,
reward: if success { 1.0 } else { -0.5 },
description: format!("cycle_{}", cycle),
});
println!(
" Learned: success={}, cumulative_reward={:.4}",
success,
core.cumulative_reward()
);
} else {
println!(" No decision (empty buffer or below threshold)");
println!(" State: {:?}", core.state());
}
println!();
}
println!("Total decisions made: {}", core.decision_count());
}

View File

@@ -0,0 +1,79 @@
//! Example: obstacle detection from point cloud data.
//!
//! Demonstrates:
//! - Creating a `PointCloud` from synthetic sensor data
//! - Running the `ObstacleDetector` to find clusters
//! - Classifying obstacles as Static, Dynamic, or Unknown
use ruvector_robotics::bridge::{Point3D, PointCloud};
use ruvector_robotics::perception::{ObstacleDetector, ObstacleClass};
use ruvector_robotics::perception::config::ObstacleConfig;
fn main() {
println!("=== Obstacle Detection Demo ===\n");
// Simulate a warehouse scene.
let mut points = Vec::new();
// Wall (static, elongated)
for i in 0..30 {
points.push(Point3D::new(5.0, i as f32 * 0.3, 0.0));
points.push(Point3D::new(5.1, i as f32 * 0.3, 0.0));
}
// Moving person (dynamic, compact)
for dx in 0..4 {
for dy in 0..4 {
points.push(Point3D::new(
2.0 + dx as f32 * 0.15,
3.0 + dy as f32 * 0.15,
0.0,
));
}
}
// Small debris (may be below threshold)
points.push(Point3D::new(8.0, 1.0, 0.0));
points.push(Point3D::new(8.1, 1.0, 0.0));
let cloud = PointCloud::new(points, 1_000_000);
let robot_pos = [0.0, 0.0, 0.0];
println!("Point cloud: {} points", cloud.len());
println!("Robot position: {:?}\n", robot_pos);
let config = ObstacleConfig {
min_obstacle_size: 3,
max_detection_range: 30.0,
safety_margin: 0.5,
};
let detector = ObstacleDetector::new(config);
// Detect
let obstacles = detector.detect(&cloud, &robot_pos);
println!("Detected {} obstacle clusters:\n", obstacles.len());
for (i, obs) in obstacles.iter().enumerate() {
println!(
" Obstacle {}: center=[{:.2}, {:.2}, {:.2}], extent=[{:.2}, {:.2}, {:.2}], points={}, dist={:.2}m",
i, obs.center[0], obs.center[1], obs.center[2],
obs.extent[0], obs.extent[1], obs.extent[2],
obs.point_count, obs.min_distance,
);
}
// Classify
let classified = detector.classify_obstacles(&obstacles);
println!("\nClassification:");
for (i, c) in classified.iter().enumerate() {
let label = match c.class {
ObstacleClass::Static => "STATIC (wall/barrier)",
ObstacleClass::Dynamic => "DYNAMIC (movable)",
ObstacleClass::Unknown => "UNKNOWN",
};
println!(
" Obstacle {}: {} (confidence: {:.2})",
i, label, c.confidence
);
}
}

View File

@@ -0,0 +1,79 @@
//! Example: spatial indexing for fast nearest-neighbor search.
//!
//! Demonstrates:
//! - Creating a `SpatialIndex` from a point cloud
//! - kNN search for finding nearest neighbors
//! - Radius search for proximity queries
//! - Multiple distance metrics (Euclidean, Cosine, Manhattan)
use ruvector_robotics::bridge::{DistanceMetric, Point3D, PointCloud, SpatialIndex};
fn main() {
println!("=== Spatial Indexing Demo ===\n");
// Generate a synthetic warehouse layout.
let mut points = Vec::new();
// Shelves along the x-axis.
for row in 0..5 {
for col in 0..10 {
points.push(Point3D::new(
col as f32 * 2.0,
row as f32 * 3.0,
0.0,
));
}
}
// A few elevated points (items on shelves).
points.push(Point3D::new(4.0, 6.0, 2.5));
points.push(Point3D::new(4.1, 6.1, 2.6));
points.push(Point3D::new(8.0, 3.0, 1.8));
let cloud = PointCloud::new(points, 1_000_000);
println!("Point cloud: {} points", cloud.len());
// Build spatial index.
let mut index = SpatialIndex::new(3);
index.insert_point_cloud(&cloud);
println!("Index size: {} vectors\n", index.len());
// kNN search: find 5 nearest to robot position.
let robot_pos = [4.0, 6.0, 0.0];
println!("--- kNN Search (k=5, query={:?}) ---", robot_pos);
match index.search_nearest(&robot_pos, 5) {
Ok(neighbors) => {
for (i, (idx, dist)) in neighbors.iter().enumerate() {
println!(" #{}: index={}, distance={:.4}", i + 1, idx, dist);
}
}
Err(e) => println!(" Error: {}", e),
}
// Radius search: find all within 3.0 units.
println!("\n--- Radius Search (r=3.0, query={:?}) ---", robot_pos);
match index.search_radius(&robot_pos, 3.0) {
Ok(neighbors) => {
println!(" Found {} points within radius 3.0", neighbors.len());
for (idx, dist) in neighbors.iter().take(8) {
println!(" index={}, distance={:.4}", idx, dist);
}
if neighbors.len() > 8 {
println!(" ... and {} more", neighbors.len() - 8);
}
}
Err(e) => println!(" Error: {}", e),
}
// Cosine distance index.
println!("\n--- Cosine Distance Index ---");
let mut cos_index = SpatialIndex::with_metric(3, DistanceMetric::Cosine);
cos_index.insert_point_cloud(&cloud);
match cos_index.search_nearest(&[1.0, 1.0, 0.0], 3) {
Ok(neighbors) => {
println!(" Nearest 3 by cosine distance to [1, 1, 0]:");
for (idx, dist) in &neighbors {
println!(" index={}, cosine_dist={:.4}", idx, dist);
}
}
Err(e) => println!(" Error: {}", e),
}
}

View File

@@ -0,0 +1,122 @@
//! Example: multi-robot swarm coordination.
//!
//! Demonstrates:
//! - Registering robots with capabilities
//! - Task assignment based on capability matching
//! - Formation computation (line, circle, grid)
//! - Consensus voting
use ruvector_robotics::cognitive::{
Formation, FormationType, RobotCapabilities, SwarmConfig, SwarmCoordinator, SwarmTask,
};
fn main() {
println!("=== Swarm Coordination Demo ===\n");
let config = SwarmConfig {
max_robots: 10,
communication_range: 100.0,
consensus_threshold: 0.5,
};
let mut coordinator = SwarmCoordinator::new(config);
// Register robots
let robots = vec![
RobotCapabilities {
id: 1,
max_speed: 2.0,
payload: 10.0,
sensors: vec!["lidar".into(), "camera".into()],
},
RobotCapabilities {
id: 2,
max_speed: 1.5,
payload: 50.0,
sensors: vec!["lidar".into(), "gripper".into()],
},
RobotCapabilities {
id: 3,
max_speed: 3.0,
payload: 5.0,
sensors: vec!["camera".into(), "thermal".into()],
},
RobotCapabilities {
id: 4,
max_speed: 1.0,
payload: 100.0,
sensors: vec!["lidar".into(), "gripper".into(), "camera".into()],
},
];
for robot in &robots {
let registered = coordinator.register_robot(robot.clone());
println!("Registered robot {} (speed={}, sensors={:?}): {}",
robot.id, robot.max_speed, robot.sensors, registered);
}
println!("\nActive robots: {}\n", coordinator.robot_count());
// Task assignment
println!("--- Task Assignment ---");
let tasks = vec![
SwarmTask {
id: 100,
description: "Inspect corridor A".into(),
location: [10.0, 0.0, 0.0],
required_capabilities: vec!["camera".into()],
priority: 5,
},
SwarmTask {
id: 101,
description: "Move pallet B".into(),
location: [5.0, 5.0, 0.0],
required_capabilities: vec!["gripper".into()],
priority: 8,
},
SwarmTask {
id: 102,
description: "Map area C".into(),
location: [0.0, 10.0, 0.0],
required_capabilities: vec!["lidar".into()],
priority: 3,
},
];
let assignments = coordinator.assign_tasks(&tasks);
for a in &assignments {
let task = tasks.iter().find(|t| t.id == a.task_id).unwrap();
println!(
" Robot {} -> Task {} ({}) [ETA: {:.1}s]",
a.robot_id, a.task_id, task.description, a.estimated_completion
);
}
// Formation computation
println!("\n--- Formations ---");
for (name, ftype) in [
("Line", FormationType::Line),
("Circle", FormationType::Circle),
("Grid", FormationType::Grid),
] {
let formation = Formation {
formation_type: ftype,
spacing: 3.0,
center: [0.0, 0.0, 0.0],
};
let positions = coordinator.compute_formation(&formation);
println!(" {} formation:", name);
for (i, pos) in positions.iter().enumerate() {
println!(" Robot {}: [{:.2}, {:.2}, {:.2}]", i, pos[0], pos[1], pos[2]);
}
}
// Consensus
println!("\n--- Consensus Voting ---");
let result = coordinator.propose_consensus("Explore sector 7");
println!(
" Proposal: '{}' -> {} (for={}, against={})",
result.proposal,
if result.accepted { "ACCEPTED" } else { "REJECTED" },
result.votes_for,
result.votes_against,
);
}