Merge commit 'd803bfe2b1fe7f5e219e50ac20d6801a0a58ac75' as 'vendor/ruvector'

This commit is contained in:
ruv
2026-02-28 14:39:40 -05:00
7854 changed files with 3522914 additions and 0 deletions

View File

@@ -0,0 +1,732 @@
//! FusionGraph: Unified Vector + Graph Layer
//!
//! Implements the core fusion layer that merges vector similarity edges
//! with graph relation edges into a unified weighted graph for minimum-cut analysis.
use std::collections::{HashMap, HashSet};
/// Unique identifier for fusion nodes
pub type NodeId = u64;
/// Origin of an edge in the fusion graph
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum EdgeOrigin {
/// Edge derived from vector similarity
Vector,
/// Edge from explicit graph relation
Graph,
/// Edge learned from access patterns
SelfLearn,
}
/// Type of graph relation
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum RelationType {
/// Hierarchical parent-child relationship
ParentChild,
/// Reference or citation
References,
/// Co-occurrence in same context
CoOccurs,
/// Similarity link
SimilarTo,
/// Custom relation type
Custom(u8),
}
impl RelationType {
/// Get weight factor for this relation type
pub fn weight_factor(&self) -> f64 {
match self {
RelationType::ParentChild => 1.0,
RelationType::References => 0.8,
RelationType::CoOccurs => 0.6,
RelationType::SimilarTo => 0.9,
RelationType::Custom(_) => 0.5,
}
}
}
/// A node in the fusion graph
#[derive(Debug, Clone)]
pub struct FusionNode {
/// Unique identifier
pub id: NodeId,
/// Dense vector representation
pub vector: Vec<f32>,
/// Metadata as key-value pairs
pub meta: HashMap<String, String>,
/// Creation timestamp
pub created_ts: u64,
/// Whether the node is active
pub active: bool,
}
impl FusionNode {
/// Create a new fusion node
pub fn new(id: NodeId, vector: Vec<f32>) -> Self {
Self {
id,
vector,
meta: HashMap::new(),
created_ts: std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap_or_default()
.as_secs(),
active: true,
}
}
/// Add metadata
pub fn with_meta(mut self, key: &str, value: &str) -> Self {
self.meta.insert(key.to_string(), value.to_string());
self
}
/// Compute cosine similarity with another node
pub fn similarity(&self, other: &FusionNode) -> f64 {
if self.vector.len() != other.vector.len() {
return 0.0;
}
let mut dot = 0.0;
let mut norm_a = 0.0;
let mut norm_b = 0.0;
for i in 0..self.vector.len() {
dot += f64::from(self.vector[i]) * f64::from(other.vector[i]);
norm_a += f64::from(self.vector[i]) * f64::from(self.vector[i]);
norm_b += f64::from(other.vector[i]) * f64::from(other.vector[i]);
}
if norm_a == 0.0 || norm_b == 0.0 {
return 0.0;
}
dot / (norm_a.sqrt() * norm_b.sqrt())
}
}
/// An edge in the fusion graph
#[derive(Debug, Clone)]
pub struct FusionEdge {
/// Source node
pub src: NodeId,
/// Destination node
pub dst: NodeId,
/// Origin of this edge
pub origin: EdgeOrigin,
/// Relation type (for Graph origin)
pub relation_type: Option<RelationType>,
/// Raw strength before fusion
pub raw_strength: f64,
/// Computed capacity after fusion
pub capacity: f64,
}
impl FusionEdge {
/// Create a vector similarity edge
pub fn from_vector(src: NodeId, dst: NodeId, similarity: f64) -> Self {
Self {
src,
dst,
origin: EdgeOrigin::Vector,
relation_type: None,
raw_strength: similarity,
capacity: similarity, // Will be recomputed
}
}
/// Create a graph relation edge
pub fn from_graph(src: NodeId, dst: NodeId, rel_type: RelationType, strength: f64) -> Self {
Self {
src,
dst,
origin: EdgeOrigin::Graph,
relation_type: Some(rel_type),
raw_strength: strength,
capacity: strength, // Will be recomputed
}
}
/// Create a self-learned edge
pub fn from_learning(src: NodeId, dst: NodeId, strength: f64) -> Self {
Self {
src,
dst,
origin: EdgeOrigin::SelfLearn,
relation_type: None,
raw_strength: strength,
capacity: strength,
}
}
/// Get edge key for deduplication
pub fn key(&self) -> (NodeId, NodeId) {
if self.src < self.dst {
(self.src, self.dst)
} else {
(self.dst, self.src)
}
}
}
/// Configuration for fusion graph
#[derive(Debug, Clone)]
pub struct FusionConfig {
/// Weight for vector similarity edges (w_v)
pub vector_weight: f64,
/// Weight for graph relation edges (w_g)
pub graph_weight: f64,
/// Weight for self-learned edges
pub learn_weight: f64,
/// Minimum similarity threshold for vector edges
pub similarity_threshold: f64,
/// Top-k similar nodes to connect
pub top_k: usize,
/// Enable automatic brittleness detection
pub enable_monitoring: bool,
}
impl Default for FusionConfig {
fn default() -> Self {
Self {
vector_weight: 0.6,
graph_weight: 0.4,
learn_weight: 0.3,
similarity_threshold: 0.7,
top_k: 10,
enable_monitoring: true,
}
}
}
/// Result of a fusion query
#[derive(Debug, Clone)]
pub struct FusionResult {
/// Retrieved node IDs
pub nodes: Vec<NodeId>,
/// Minimum cut value for the result subgraph
pub min_cut: f64,
/// Partition if cut is low
pub partition: Option<(Vec<NodeId>, Vec<NodeId>)>,
/// Brittleness warning
pub brittleness_warning: Option<String>,
/// Number of cut edges
pub num_cut_edges: usize,
}
/// The fusion graph combining vector and graph layers
#[derive(Debug)]
pub struct FusionGraph {
/// Configuration
config: FusionConfig,
/// All nodes by ID
nodes: HashMap<NodeId, FusionNode>,
/// All edges
edges: Vec<FusionEdge>,
/// Adjacency list (node -> set of neighbor nodes)
adjacency: HashMap<NodeId, HashSet<NodeId>>,
/// Edge lookup (normalized key -> edge index)
edge_index: HashMap<(NodeId, NodeId), usize>,
/// Next available node ID
next_id: NodeId,
/// Current minimum cut estimate
min_cut_estimate: f64,
/// Boundary edges in current cut
boundary_edges: Vec<(NodeId, NodeId)>,
}
impl FusionGraph {
/// Create a new fusion graph with default config
pub fn new() -> Self {
Self::with_config(FusionConfig::default())
}
/// Create a fusion graph with custom config
pub fn with_config(config: FusionConfig) -> Self {
Self {
config,
nodes: HashMap::new(),
edges: Vec::new(),
adjacency: HashMap::new(),
edge_index: HashMap::new(),
next_id: 1,
min_cut_estimate: f64::INFINITY,
boundary_edges: Vec::new(),
}
}
/// Get configuration
pub fn config(&self) -> &FusionConfig {
&self.config
}
/// Number of nodes
pub fn num_nodes(&self) -> usize {
self.nodes.len()
}
/// Number of edges
pub fn num_edges(&self) -> usize {
self.edges.len()
}
/// Current minimum cut estimate
pub fn min_cut(&self) -> f64 {
self.min_cut_estimate
}
/// Get boundary edges
pub fn boundary_edges(&self) -> &[(NodeId, NodeId)] {
&self.boundary_edges
}
/// Ingest a new node
pub fn ingest_node(&mut self, vector: Vec<f32>) -> NodeId {
let id = self.next_id;
self.next_id += 1;
let node = FusionNode::new(id, vector);
// Find similar nodes and create vector edges
let similar_nodes = self.find_similar_nodes(&node);
self.nodes.insert(id, node);
self.adjacency.insert(id, HashSet::new());
// Add edges to similar nodes
for (neighbor_id, similarity) in similar_nodes {
if similarity >= self.config.similarity_threshold {
self.add_vector_edge(id, neighbor_id, similarity);
}
}
// Recompute min-cut estimate
self.update_min_cut_estimate();
id
}
/// Ingest a node with explicit ID
pub fn ingest_node_with_id(&mut self, id: NodeId, vector: Vec<f32>) {
let node = FusionNode::new(id, vector);
// Find similar nodes
let similar_nodes = self.find_similar_nodes(&node);
self.nodes.insert(id, node);
self.adjacency.insert(id, HashSet::new());
// Add edges to similar nodes
for (neighbor_id, similarity) in similar_nodes {
if similarity >= self.config.similarity_threshold {
self.add_vector_edge(id, neighbor_id, similarity);
}
}
self.next_id = self.next_id.max(id + 1);
self.update_min_cut_estimate();
}
/// Add a graph relation edge
pub fn add_relation(
&mut self,
src: NodeId,
dst: NodeId,
rel_type: RelationType,
strength: f64,
) {
if !self.nodes.contains_key(&src) || !self.nodes.contains_key(&dst) {
return;
}
let edge = FusionEdge::from_graph(src, dst, rel_type, strength);
self.add_edge_internal(edge);
self.update_min_cut_estimate();
}
/// Add a self-learned edge from access patterns
pub fn add_learned_edge(&mut self, src: NodeId, dst: NodeId, strength: f64) {
if !self.nodes.contains_key(&src) || !self.nodes.contains_key(&dst) {
return;
}
let edge = FusionEdge::from_learning(src, dst, strength);
self.add_edge_internal(edge);
self.update_min_cut_estimate();
}
/// Delete a node and its edges
pub fn delete_node(&mut self, id: NodeId) -> bool {
if self.nodes.remove(&id).is_none() {
return false;
}
// Remove all edges involving this node
self.edges.retain(|e| e.src != id && e.dst != id);
// Rebuild edge index
self.edge_index.clear();
for (i, edge) in self.edges.iter().enumerate() {
self.edge_index.insert(edge.key(), i);
}
// Update adjacency
self.adjacency.remove(&id);
for neighbors in self.adjacency.values_mut() {
neighbors.remove(&id);
}
self.update_min_cut_estimate();
true
}
/// Query nodes by similarity with brittleness awareness
pub fn query(&self, query_vector: &[f32], limit: usize) -> FusionResult {
let mut scores: Vec<(NodeId, f64)> = Vec::new();
for node in self.nodes.values() {
if !node.active {
continue;
}
let sim = self.cosine_similarity(&node.vector, query_vector);
if sim > 0.0 {
scores.push((node.id, sim));
}
}
// Sort by similarity descending
scores.sort_by(|a, b| b.1.partial_cmp(&a.1).unwrap_or(std::cmp::Ordering::Equal));
let result_nodes: Vec<NodeId> = scores.iter().take(limit).map(|(id, _)| *id).collect();
// Compute min-cut for the result subgraph
let (subgraph_cut, partition) = self.compute_subgraph_cut(&result_nodes);
let brittleness_warning = if subgraph_cut < 2.0 && result_nodes.len() > 2 {
Some(format!(
"Low connectivity (λ={:.2}): results may be fragmented",
subgraph_cut
))
} else {
None
};
let num_cut_edges = if subgraph_cut < f64::INFINITY {
subgraph_cut as usize
} else {
0
};
FusionResult {
nodes: result_nodes,
min_cut: subgraph_cut,
partition,
brittleness_warning,
num_cut_edges,
}
}
/// Get all edges for a node
pub fn get_node_edges(&self, id: NodeId) -> Vec<&FusionEdge> {
self.edges
.iter()
.filter(|e| e.src == id || e.dst == id)
.collect()
}
/// Get node by ID
pub fn get_node(&self, id: NodeId) -> Option<&FusionNode> {
self.nodes.get(&id)
}
/// Get all edges
pub fn get_edges(&self) -> &[FusionEdge] {
&self.edges
}
/// Get adjacency for export to mincut
pub fn to_weighted_edges(&self) -> Vec<(u64, u64, f64)> {
self.edges
.iter()
.map(|e| (e.src, e.dst, e.capacity))
.collect()
}
// Private helper methods
fn find_similar_nodes(&self, node: &FusionNode) -> Vec<(NodeId, f64)> {
let mut similarities: Vec<(NodeId, f64)> = self
.nodes
.values()
.filter(|n| n.id != node.id && n.active)
.map(|n| (n.id, node.similarity(n)))
.collect();
similarities.sort_by(|a, b| b.1.partial_cmp(&a.1).unwrap_or(std::cmp::Ordering::Equal));
similarities.truncate(self.config.top_k);
similarities
}
fn add_vector_edge(&mut self, src: NodeId, dst: NodeId, similarity: f64) {
let edge = FusionEdge::from_vector(src, dst, similarity);
self.add_edge_internal(edge);
}
fn add_edge_internal(&mut self, mut edge: FusionEdge) {
let key = edge.key();
// Check for existing edge
if let Some(&idx) = self.edge_index.get(&key) {
// Merge: take max capacity
let new_capacity = self.compute_capacity(&edge);
edge.capacity = new_capacity;
if edge.capacity > self.edges[idx].capacity {
self.edges[idx] = edge;
}
return;
}
// Compute capacity with fusion weights
edge.capacity = self.compute_capacity(&edge);
// Add to adjacency
self.adjacency
.entry(edge.src)
.or_insert_with(HashSet::new)
.insert(edge.dst);
self.adjacency
.entry(edge.dst)
.or_insert_with(HashSet::new)
.insert(edge.src);
// Add edge
let idx = self.edges.len();
self.edge_index.insert(key, idx);
self.edges.push(edge);
}
/// Compute edge capacity with fusion formula:
/// c(u,v) = w_v * f_v(similarity) + w_g * f_g(relation_strength, relation_type)
fn compute_capacity(&self, edge: &FusionEdge) -> f64 {
match edge.origin {
EdgeOrigin::Vector => {
// f_v(s) = s^2 for similarity (emphasizes high similarity)
let f_v = edge.raw_strength * edge.raw_strength;
self.config.vector_weight * f_v
}
EdgeOrigin::Graph => {
// f_g(strength, type) = strength * type_factor
let type_factor = edge.relation_type.map(|r| r.weight_factor()).unwrap_or(1.0);
let f_g = edge.raw_strength * type_factor;
self.config.graph_weight * f_g
}
EdgeOrigin::SelfLearn => {
// Learned edges use learn weight
self.config.learn_weight * edge.raw_strength
}
}
}
fn update_min_cut_estimate(&mut self) {
if self.nodes.len() < 2 {
self.min_cut_estimate = f64::INFINITY;
self.boundary_edges.clear();
return;
}
// Simple min-cut approximation via minimum degree
// Real implementation would use the full mincut algorithm
let mut min_degree = f64::INFINITY;
let mut min_node = None;
for (&node_id, neighbors) in &self.adjacency {
let degree: f64 = neighbors
.iter()
.filter_map(|&n| {
let key = if node_id < n {
(node_id, n)
} else {
(n, node_id)
};
self.edge_index.get(&key).map(|&i| self.edges[i].capacity)
})
.sum();
if degree < min_degree && degree > 0.0 {
min_degree = degree;
min_node = Some(node_id);
}
}
self.min_cut_estimate = min_degree;
// Update boundary edges (edges incident to minimum degree node)
self.boundary_edges.clear();
if let Some(node_id) = min_node {
if let Some(neighbors) = self.adjacency.get(&node_id) {
for &n in neighbors {
self.boundary_edges.push((node_id, n));
}
}
}
}
fn compute_subgraph_cut(&self, nodes: &[NodeId]) -> (f64, Option<(Vec<NodeId>, Vec<NodeId>)>) {
if nodes.len() < 2 {
return (f64::INFINITY, None);
}
let node_set: HashSet<_> = nodes.iter().copied().collect();
// Compute induced subgraph edges
let mut subgraph_adj: HashMap<NodeId, Vec<(NodeId, f64)>> = HashMap::new();
for &node in nodes {
subgraph_adj.insert(node, Vec::new());
}
for edge in &self.edges {
if node_set.contains(&edge.src) && node_set.contains(&edge.dst) {
subgraph_adj
.entry(edge.src)
.or_default()
.push((edge.dst, edge.capacity));
subgraph_adj
.entry(edge.dst)
.or_default()
.push((edge.src, edge.capacity));
}
}
// Find minimum degree (approximation)
let mut min_cut = f64::INFINITY;
let mut min_node = None;
for (&node, neighbors) in &subgraph_adj {
let degree: f64 = neighbors.iter().map(|(_, w)| w).sum();
if degree < min_cut && degree > 0.0 {
min_cut = degree;
min_node = Some(node);
}
}
// Generate partition
let partition = min_node.map(|n| {
let s = vec![n];
let t: Vec<_> = nodes.iter().copied().filter(|&x| x != n).collect();
(s, t)
});
(min_cut, partition)
}
fn cosine_similarity(&self, a: &[f32], b: &[f32]) -> f64 {
if a.len() != b.len() {
return 0.0;
}
let mut dot = 0.0;
let mut norm_a = 0.0;
let mut norm_b = 0.0;
for i in 0..a.len() {
dot += f64::from(a[i]) * f64::from(b[i]);
norm_a += f64::from(a[i]) * f64::from(a[i]);
norm_b += f64::from(b[i]) * f64::from(b[i]);
}
if norm_a == 0.0 || norm_b == 0.0 {
return 0.0;
}
dot / (norm_a.sqrt() * norm_b.sqrt())
}
}
impl Default for FusionGraph {
fn default() -> Self {
Self::new()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_fusion_graph_creation() {
let graph = FusionGraph::new();
assert_eq!(graph.num_nodes(), 0);
assert_eq!(graph.num_edges(), 0);
}
#[test]
fn test_node_ingestion() {
let mut graph = FusionGraph::new();
let id = graph.ingest_node(vec![1.0, 0.0, 0.0]);
assert_eq!(id, 1);
assert_eq!(graph.num_nodes(), 1);
}
#[test]
fn test_similarity_edges() {
let mut graph = FusionGraph::with_config(FusionConfig {
similarity_threshold: 0.5,
..Default::default()
});
// Similar vectors
graph.ingest_node_with_id(1, vec![1.0, 0.0, 0.0]);
graph.ingest_node_with_id(2, vec![0.9, 0.1, 0.0]); // Similar to 1
assert!(graph.num_edges() > 0);
}
#[test]
fn test_relation_edge() {
let mut graph = FusionGraph::new();
graph.ingest_node_with_id(1, vec![1.0, 0.0]);
graph.ingest_node_with_id(2, vec![0.0, 1.0]);
graph.add_relation(1, 2, RelationType::References, 0.8);
assert_eq!(graph.num_edges(), 1);
}
#[test]
fn test_query() {
let mut graph = FusionGraph::with_config(FusionConfig {
similarity_threshold: 0.3,
..Default::default()
});
graph.ingest_node_with_id(1, vec![1.0, 0.0, 0.0]);
graph.ingest_node_with_id(2, vec![0.9, 0.1, 0.0]);
graph.ingest_node_with_id(3, vec![0.0, 1.0, 0.0]);
let result = graph.query(&[1.0, 0.0, 0.0], 2);
assert!(!result.nodes.is_empty());
assert!(result.nodes.contains(&1));
}
#[test]
fn test_capacity_computation() {
let config = FusionConfig {
vector_weight: 0.6,
graph_weight: 0.4,
..Default::default()
};
let graph = FusionGraph::with_config(config);
let vector_edge = FusionEdge::from_vector(1, 2, 0.9);
let capacity = graph.compute_capacity(&vector_edge);
assert!((capacity - 0.486).abs() < 0.01); // 0.6 * 0.9^2 = 0.486
let graph_edge = FusionEdge::from_graph(1, 2, RelationType::ParentChild, 1.0);
let capacity = graph.compute_capacity(&graph_edge);
assert!((capacity - 0.4).abs() < 0.01); // 0.4 * 1.0 * 1.0 = 0.4
}
}

View File

@@ -0,0 +1,19 @@
//! Vector-Graph Fusion Module
//!
//! Unified retrieval substrate combining vector similarity and graph relations
//! with minimum-cut brittleness detection for robust knowledge retrieval.
mod fusion_graph;
mod optimizer;
mod structural_monitor;
pub use fusion_graph::{
EdgeOrigin, FusionConfig, FusionEdge, FusionGraph, FusionNode, FusionResult, RelationType,
};
pub use optimizer::{
LearningGate, MaintenancePlan, MaintenanceTask, OptimizationResult, Optimizer, OptimizerAction,
};
pub use structural_monitor::{
BrittlenessSignal, MonitorConfig as StructuralMonitorConfig, MonitorState, StructuralMonitor,
Trigger, TriggerType,
};

View File

@@ -0,0 +1,511 @@
//! Optimizer: Maintenance Planning and Actions
//!
//! Provides optimization actions and maintenance planning based on
//! structural monitor signals.
use super::structural_monitor::{BrittlenessSignal, StructuralMonitor, TriggerType};
use std::collections::HashMap;
/// Optimization action types
#[derive(Debug, Clone, PartialEq)]
pub enum OptimizerAction {
/// Reindex: rebuild vector similarity edges
Reindex {
/// Affected nodes
nodes: Vec<u64>,
/// New similarity threshold
new_threshold: Option<f64>,
},
/// Rewire: adjust edge capacities
Rewire {
/// Edges to strengthen
strengthen: Vec<(u64, u64, f64)>,
/// Edges to weaken
weaken: Vec<(u64, u64, f64)>,
},
/// Split shard: divide a partition
SplitShard {
/// Shard ID to split
shard_id: u64,
/// Split point (if applicable)
split_at: Option<u64>,
},
/// Merge shards: combine partitions
MergeShards {
/// Shard IDs to merge
shard_ids: Vec<u64>,
},
/// Learning gate: enable/disable self-learning
LearningGate {
/// Whether to enable learning
enable: bool,
/// Learning rate adjustment
rate_multiplier: f64,
},
/// No operation needed
NoOp,
}
/// Learning gate controller
#[derive(Debug, Clone)]
pub struct LearningGate {
/// Whether learning is enabled
pub enabled: bool,
/// Current learning rate
pub learning_rate: f64,
/// Base learning rate
pub base_rate: f64,
/// Minimum rate before disabling
pub min_rate: f64,
/// Maximum rate
pub max_rate: f64,
}
impl Default for LearningGate {
fn default() -> Self {
Self {
enabled: true,
learning_rate: 0.01,
base_rate: 0.01,
min_rate: 0.001,
max_rate: 0.1,
}
}
}
impl LearningGate {
/// Create new learning gate
pub fn new(base_rate: f64) -> Self {
Self {
learning_rate: base_rate,
base_rate,
..Default::default()
}
}
/// Adjust learning rate based on signal
pub fn adjust(&mut self, signal: BrittlenessSignal) {
match signal {
BrittlenessSignal::Healthy => {
// Increase learning rate when stable
self.learning_rate = (self.learning_rate * 1.1).min(self.max_rate);
}
BrittlenessSignal::Warning => {
// Keep current rate
}
BrittlenessSignal::Critical | BrittlenessSignal::Disconnected => {
// Reduce learning to avoid further instability
self.learning_rate = (self.learning_rate * 0.5).max(self.min_rate);
if self.learning_rate <= self.min_rate {
self.enabled = false;
}
}
}
}
/// Reset to defaults
pub fn reset(&mut self) {
self.enabled = true;
self.learning_rate = self.base_rate;
}
}
/// A maintenance task
#[derive(Debug, Clone)]
pub struct MaintenanceTask {
/// Task ID
pub id: u64,
/// Action to perform
pub action: OptimizerAction,
/// Priority (higher = more urgent)
pub priority: u8,
/// Estimated cost (1-10)
pub cost: u8,
/// Expected benefit description
pub benefit: String,
/// Whether the task is critical
pub critical: bool,
}
impl MaintenanceTask {
/// Create new maintenance task
pub fn new(id: u64, action: OptimizerAction, priority: u8) -> Self {
let (cost, critical) = match &action {
OptimizerAction::Reindex { nodes, .. } => {
(if nodes.len() > 100 { 8 } else { 4 }, false)
}
OptimizerAction::Rewire {
strengthen, weaken, ..
} => ((strengthen.len() + weaken.len()).min(10) as u8, false),
OptimizerAction::SplitShard { .. } => (6, false),
OptimizerAction::MergeShards { shard_ids } => (shard_ids.len().min(10) as u8, false),
OptimizerAction::LearningGate { enable, .. } => {
if *enable {
(1, false)
} else {
(2, true)
}
}
OptimizerAction::NoOp => (0, false),
};
let benefit = match &action {
OptimizerAction::Reindex { .. } => "Refresh vector similarity edges".to_string(),
OptimizerAction::Rewire { .. } => "Adjust edge weights for better balance".to_string(),
OptimizerAction::SplitShard { .. } => {
"Reduce partition size for better locality".to_string()
}
OptimizerAction::MergeShards { .. } => {
"Combine sparse partitions for density".to_string()
}
OptimizerAction::LearningGate { enable, .. } => {
if *enable {
"Re-enable learning for adaptation".to_string()
} else {
"Pause learning to stabilize".to_string()
}
}
OptimizerAction::NoOp => "No action needed".to_string(),
};
Self {
id,
action,
priority,
cost,
benefit,
critical,
}
}
}
/// A maintenance plan
#[derive(Debug, Clone, Default)]
pub struct MaintenancePlan {
/// Ordered list of tasks
pub tasks: Vec<MaintenanceTask>,
/// Total estimated cost
pub total_cost: u32,
/// Plan generation timestamp
pub created_at: u64,
/// Human-readable summary
pub summary: String,
}
impl MaintenancePlan {
/// Create a new plan
pub fn new() -> Self {
Self {
created_at: std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap_or_default()
.as_secs(),
..Default::default()
}
}
/// Add a task to the plan
pub fn add_task(&mut self, task: MaintenanceTask) {
self.total_cost += u32::from(task.cost);
self.tasks.push(task);
self.update_summary();
}
/// Sort tasks by priority (highest first)
pub fn prioritize(&mut self) {
self.tasks.sort_by(|a, b| b.priority.cmp(&a.priority));
}
/// Get critical tasks only
pub fn critical_tasks(&self) -> Vec<&MaintenanceTask> {
self.tasks.iter().filter(|t| t.critical).collect()
}
/// Check if plan is empty
pub fn is_empty(&self) -> bool {
self.tasks.is_empty()
}
fn update_summary(&mut self) {
let critical_count = self.tasks.iter().filter(|t| t.critical).count();
self.summary = format!(
"{} tasks ({} critical), total cost: {}",
self.tasks.len(),
critical_count,
self.total_cost
);
}
}
/// Result of optimization analysis
#[derive(Debug, Clone)]
pub struct OptimizationResult {
/// Current graph health signal
pub signal: BrittlenessSignal,
/// Recommended immediate action
pub immediate_action: OptimizerAction,
/// Full maintenance plan
pub plan: MaintenancePlan,
/// Metrics snapshot
pub metrics: HashMap<String, f64>,
}
/// The optimizer that plans maintenance actions
#[derive(Debug)]
pub struct Optimizer {
/// Learning gate controller
learning_gate: LearningGate,
/// Task ID counter
next_task_id: u64,
/// Last optimization result
last_result: Option<OptimizationResult>,
}
impl Optimizer {
/// Create new optimizer
pub fn new() -> Self {
Self {
learning_gate: LearningGate::default(),
next_task_id: 1,
last_result: None,
}
}
/// Get the learning gate
pub fn learning_gate(&self) -> &LearningGate {
&self.learning_gate
}
/// Get mutable learning gate
pub fn learning_gate_mut(&mut self) -> &mut LearningGate {
&mut self.learning_gate
}
/// Analyze monitor state and generate optimization plan
pub fn analyze(&mut self, monitor: &StructuralMonitor) -> OptimizationResult {
let signal = monitor.signal();
let state = monitor.state();
// Adjust learning gate based on signal
self.learning_gate.adjust(signal);
// Build maintenance plan
let mut plan = MaintenancePlan::new();
let mut immediate_action = OptimizerAction::NoOp;
// Check triggers and add tasks
for trigger in monitor.triggers() {
let (action, priority) = self.action_for_trigger(trigger.trigger_type, state);
if priority >= 8 && matches!(immediate_action, OptimizerAction::NoOp) {
immediate_action = action.clone();
}
let task = MaintenanceTask::new(self.next_task_id, action, priority);
self.next_task_id += 1;
plan.add_task(task);
}
// Add proactive maintenance based on signal
if matches!(signal, BrittlenessSignal::Warning) && plan.is_empty() {
let task = MaintenanceTask::new(
self.next_task_id,
OptimizerAction::Rewire {
strengthen: state
.boundary_edges
.iter()
.map(|&(u, v)| (u, v, 1.2))
.collect(),
weaken: Vec::new(),
},
5,
);
self.next_task_id += 1;
plan.add_task(task);
}
// Sort by priority
plan.prioritize();
// Collect metrics
let mut metrics = HashMap::new();
metrics.insert("lambda_est".to_string(), state.lambda_est);
metrics.insert("lambda_trend".to_string(), state.lambda_trend);
metrics.insert("cut_volatility".to_string(), state.cut_volatility);
metrics.insert(
"boundary_edges".to_string(),
state.boundary_edges.len() as f64,
);
metrics.insert(
"learning_rate".to_string(),
self.learning_gate.learning_rate,
);
let result = OptimizationResult {
signal,
immediate_action,
plan,
metrics,
};
self.last_result = Some(result.clone());
result
}
/// Get the last optimization result
pub fn last_result(&self) -> Option<&OptimizationResult> {
self.last_result.as_ref()
}
/// Generate action for a trigger type
fn action_for_trigger(
&self,
trigger_type: TriggerType,
state: &super::structural_monitor::MonitorState,
) -> (OptimizerAction, u8) {
match trigger_type {
TriggerType::IslandingRisk => {
// Strengthen boundary edges to prevent islanding
let strengthen: Vec<_> = state
.boundary_edges
.iter()
.map(|&(u, v)| (u, v, 1.5))
.collect();
(
OptimizerAction::Rewire {
strengthen,
weaken: Vec::new(),
},
9,
)
}
TriggerType::Instability => {
// Pause learning to stabilize
(
OptimizerAction::LearningGate {
enable: false,
rate_multiplier: 0.5,
},
7,
)
}
TriggerType::Degradation => {
// Reindex to refresh connections
(
OptimizerAction::Reindex {
nodes: Vec::new(), // All nodes
new_threshold: Some(0.6), // Lower threshold
},
6,
)
}
TriggerType::OverClustering => {
// Merge shards
(
OptimizerAction::MergeShards {
shard_ids: vec![0, 1], // Placeholder
},
4,
)
}
TriggerType::Disconnected => {
// Critical: attempt to reconnect
(
OptimizerAction::Reindex {
nodes: Vec::new(),
new_threshold: Some(0.5), // Very low threshold
},
10,
)
}
}
}
}
impl Default for Optimizer {
fn default() -> Self {
Self::new()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_optimizer_creation() {
let optimizer = Optimizer::new();
assert!(optimizer.learning_gate().enabled);
}
#[test]
fn test_learning_gate_adjustment() {
let mut gate = LearningGate::default();
// Healthy should increase rate
let initial_rate = gate.learning_rate;
gate.adjust(BrittlenessSignal::Healthy);
assert!(gate.learning_rate > initial_rate);
// Critical should decrease rate
gate.adjust(BrittlenessSignal::Critical);
assert!(gate.learning_rate < initial_rate);
}
#[test]
fn test_maintenance_plan() {
let mut plan = MaintenancePlan::new();
assert!(plan.is_empty());
let task = MaintenanceTask::new(1, OptimizerAction::NoOp, 5);
plan.add_task(task);
assert!(!plan.is_empty());
assert_eq!(plan.tasks.len(), 1);
}
#[test]
fn test_plan_prioritization() {
let mut plan = MaintenancePlan::new();
plan.add_task(MaintenanceTask::new(1, OptimizerAction::NoOp, 3));
plan.add_task(MaintenanceTask::new(2, OptimizerAction::NoOp, 9));
plan.add_task(MaintenanceTask::new(3, OptimizerAction::NoOp, 5));
plan.prioritize();
assert_eq!(plan.tasks[0].priority, 9);
assert_eq!(plan.tasks[1].priority, 5);
assert_eq!(plan.tasks[2].priority, 3);
}
#[test]
fn test_optimizer_analyze() {
let mut optimizer = Optimizer::new();
let mut monitor = StructuralMonitor::new();
// Healthy observation
monitor.observe(5.0, vec![]);
let result = optimizer.analyze(&monitor);
assert_eq!(result.signal, BrittlenessSignal::Healthy);
// Critical observation
monitor.observe(0.5, vec![(1, 2)]);
let result = optimizer.analyze(&monitor);
assert_eq!(result.signal, BrittlenessSignal::Critical);
assert!(!result.plan.is_empty());
}
#[test]
fn test_action_generation() {
let optimizer = Optimizer::new();
let state = super::super::structural_monitor::MonitorState {
lambda_est: 0.5,
boundary_edges: vec![(1, 2)],
..Default::default()
};
let (action, priority) = optimizer.action_for_trigger(TriggerType::IslandingRisk, &state);
assert!(priority >= 8);
assert!(matches!(action, OptimizerAction::Rewire { .. }));
}
}

View File

@@ -0,0 +1,440 @@
//! Structural Monitor: Brittleness Detection
//!
//! Monitors the fusion graph's structural health by tracking minimum-cut
//! trends, volatility, and generating actionable triggers.
use std::collections::VecDeque;
/// Configuration for the structural monitor
#[derive(Debug, Clone)]
pub struct MonitorConfig {
/// Window size for trend analysis
pub window_size: usize,
/// Threshold for low-cut warning (λ_low)
pub lambda_low: f64,
/// Threshold for critical warning (λ_critical)
pub lambda_critical: f64,
/// Volatility threshold for instability warning
pub volatility_threshold: f64,
/// Trend slope threshold for degradation warning
pub trend_slope_threshold: f64,
}
impl Default for MonitorConfig {
fn default() -> Self {
Self {
window_size: 100,
lambda_low: 3.0,
lambda_critical: 1.0,
volatility_threshold: 0.5,
trend_slope_threshold: -0.1,
}
}
}
/// Current state of the monitor
#[derive(Debug, Clone)]
pub struct MonitorState {
/// Current minimum-cut estimate
pub lambda_est: f64,
/// Moving average slope (trend)
pub lambda_trend: f64,
/// Variance over window (volatility)
pub cut_volatility: f64,
/// Top-k edges crossing current cut
pub boundary_edges: Vec<(u64, u64)>,
/// Number of observations in window
pub observation_count: usize,
/// Last update timestamp
pub last_update_ts: u64,
}
impl Default for MonitorState {
fn default() -> Self {
Self {
lambda_est: f64::INFINITY,
lambda_trend: 0.0,
cut_volatility: 0.0,
boundary_edges: Vec::new(),
observation_count: 0,
last_update_ts: 0,
}
}
}
/// Type of trigger condition
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum TriggerType {
/// Min-cut below critical threshold (islanding risk)
IslandingRisk,
/// Volatility above threshold (unstable structure)
Instability,
/// Negative trend (degrading connectivity)
Degradation,
/// Cut increased significantly (over-clustering)
OverClustering,
/// Graph became disconnected
Disconnected,
}
/// A trigger event from the monitor
#[derive(Debug, Clone)]
pub struct Trigger {
/// Type of trigger
pub trigger_type: TriggerType,
/// Current minimum-cut value
pub lambda_current: f64,
/// Threshold that was crossed
pub threshold: f64,
/// Severity (0.0 - 1.0)
pub severity: f64,
/// Recommended action
pub recommendation: String,
/// Timestamp
pub timestamp: u64,
}
impl Trigger {
/// Create a new trigger
pub fn new(trigger_type: TriggerType, lambda: f64, threshold: f64) -> Self {
let severity = match trigger_type {
TriggerType::Disconnected => 1.0,
TriggerType::IslandingRisk => 0.8,
TriggerType::Instability => 0.6,
TriggerType::Degradation => 0.5,
TriggerType::OverClustering => 0.3,
};
let recommendation = match trigger_type {
TriggerType::IslandingRisk => {
"Consider adding bridge edges or merging sparse partitions".to_string()
}
TriggerType::Instability => {
"Structure is volatile; consider stabilizing with explicit relations".to_string()
}
TriggerType::Degradation => {
"Connectivity trending down; review recent deletions".to_string()
}
TriggerType::OverClustering => {
"May have too many clusters; consider relaxing similarity threshold".to_string()
}
TriggerType::Disconnected => "Critical: graph has disconnected components".to_string(),
};
Self {
trigger_type,
lambda_current: lambda,
threshold,
severity,
recommendation,
timestamp: std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap_or_default()
.as_secs(),
}
}
}
/// Signal from the monitor indicating graph health
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum BrittlenessSignal {
/// Healthy: good connectivity
Healthy,
/// Warning: connectivity getting low
Warning,
/// Critical: at risk of fragmentation
Critical,
/// Disconnected: already fragmented
Disconnected,
}
impl BrittlenessSignal {
/// Get string representation
pub fn as_str(&self) -> &'static str {
match self {
BrittlenessSignal::Healthy => "healthy",
BrittlenessSignal::Warning => "warning",
BrittlenessSignal::Critical => "critical",
BrittlenessSignal::Disconnected => "disconnected",
}
}
}
/// The structural monitor
#[derive(Debug)]
pub struct StructuralMonitor {
/// Configuration
config: MonitorConfig,
/// Current state
state: MonitorState,
/// History of lambda values for trend analysis
lambda_history: VecDeque<f64>,
/// Active triggers
active_triggers: Vec<Trigger>,
/// Total observations processed
total_observations: u64,
}
impl StructuralMonitor {
/// Create a new monitor with default config
pub fn new() -> Self {
Self::with_config(MonitorConfig::default())
}
/// Create a monitor with custom config
pub fn with_config(config: MonitorConfig) -> Self {
Self {
config,
state: MonitorState::default(),
lambda_history: VecDeque::new(),
active_triggers: Vec::new(),
total_observations: 0,
}
}
/// Get current state
pub fn state(&self) -> &MonitorState {
&self.state
}
/// Get current brittleness signal
pub fn signal(&self) -> BrittlenessSignal {
if self.state.lambda_est == 0.0 {
BrittlenessSignal::Disconnected
} else if self.state.lambda_est < self.config.lambda_critical {
BrittlenessSignal::Critical
} else if self.state.lambda_est < self.config.lambda_low {
BrittlenessSignal::Warning
} else {
BrittlenessSignal::Healthy
}
}
/// Get active triggers
pub fn triggers(&self) -> &[Trigger] {
&self.active_triggers
}
/// Update the monitor with a new minimum-cut observation
pub fn observe(&mut self, lambda: f64, boundary_edges: Vec<(u64, u64)>) -> Vec<Trigger> {
let mut new_triggers = Vec::new();
// Update history
self.lambda_history.push_back(lambda);
if self.lambda_history.len() > self.config.window_size {
self.lambda_history.pop_front();
}
// Update state
self.state.lambda_est = lambda;
self.state.boundary_edges = boundary_edges;
self.state.observation_count = self.lambda_history.len();
self.state.last_update_ts = std::time::SystemTime::now()
.duration_since(std::time::UNIX_EPOCH)
.unwrap_or_default()
.as_secs();
self.total_observations += 1;
// Compute trend (linear regression slope)
self.state.lambda_trend = self.compute_trend();
// Compute volatility (variance)
self.state.cut_volatility = self.compute_volatility();
// Check triggers
if lambda == 0.0 || lambda.is_infinite() && lambda.is_sign_negative() {
new_triggers.push(Trigger::new(TriggerType::Disconnected, lambda, 0.0));
} else if lambda < self.config.lambda_critical {
new_triggers.push(Trigger::new(
TriggerType::IslandingRisk,
lambda,
self.config.lambda_critical,
));
}
if self.state.cut_volatility > self.config.volatility_threshold {
new_triggers.push(Trigger::new(
TriggerType::Instability,
lambda,
self.config.volatility_threshold,
));
}
if self.state.lambda_trend < self.config.trend_slope_threshold {
new_triggers.push(Trigger::new(
TriggerType::Degradation,
lambda,
self.config.trend_slope_threshold,
));
}
// Update active triggers
self.active_triggers = new_triggers.clone();
new_triggers
}
/// Check if immediate action is needed
pub fn needs_action(&self) -> bool {
!self.active_triggers.is_empty()
}
/// Get summary report
pub fn report(&self) -> String {
let signal = self.signal();
let trend_dir = if self.state.lambda_trend > 0.01 {
"↑ improving"
} else if self.state.lambda_trend < -0.01 {
"↓ degrading"
} else {
"→ stable"
};
format!(
"Signal: {} | λ={:.2} | Trend: {} ({:.3}) | Volatility: {:.3} | Boundary: {} edges",
signal.as_str(),
self.state.lambda_est,
trend_dir,
self.state.lambda_trend,
self.state.cut_volatility,
self.state.boundary_edges.len()
)
}
/// Reset the monitor
pub fn reset(&mut self) {
self.state = MonitorState::default();
self.lambda_history.clear();
self.active_triggers.clear();
}
/// Compute linear regression slope for trend
fn compute_trend(&self) -> f64 {
let n = self.lambda_history.len();
if n < 2 {
return 0.0;
}
let n_f64 = n as f64;
let sum_x: f64 = (0..n).map(|i| i as f64).sum();
let sum_y: f64 = self.lambda_history.iter().sum();
let sum_xy: f64 = self
.lambda_history
.iter()
.enumerate()
.map(|(i, &y)| i as f64 * y)
.sum();
let sum_xx: f64 = (0..n).map(|i| (i * i) as f64).sum();
let denominator = n_f64 * sum_xx - sum_x * sum_x;
if denominator.abs() < 1e-10 {
return 0.0;
}
(n_f64 * sum_xy - sum_x * sum_y) / denominator
}
/// Compute variance for volatility
fn compute_volatility(&self) -> f64 {
let n = self.lambda_history.len();
if n < 2 {
return 0.0;
}
let mean: f64 = self.lambda_history.iter().sum::<f64>() / n as f64;
let variance: f64 = self
.lambda_history
.iter()
.map(|&x| (x - mean) * (x - mean))
.sum::<f64>()
/ (n - 1) as f64;
variance.sqrt()
}
}
impl Default for StructuralMonitor {
fn default() -> Self {
Self::new()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_monitor_creation() {
let monitor = StructuralMonitor::new();
// Initial state has lambda_est = INFINITY (no observations yet)
// which counts as Healthy since it's above all thresholds
assert_eq!(monitor.signal(), BrittlenessSignal::Healthy);
}
#[test]
fn test_healthy_observation() {
let mut monitor = StructuralMonitor::new();
monitor.observe(5.0, vec![]);
assert_eq!(monitor.signal(), BrittlenessSignal::Healthy);
}
#[test]
fn test_warning_observation() {
let mut monitor = StructuralMonitor::new();
monitor.observe(2.0, vec![]);
assert_eq!(monitor.signal(), BrittlenessSignal::Warning);
}
#[test]
fn test_critical_observation() {
let mut monitor = StructuralMonitor::new();
monitor.observe(0.5, vec![]);
assert_eq!(monitor.signal(), BrittlenessSignal::Critical);
}
#[test]
fn test_trigger_generation() {
let mut monitor = StructuralMonitor::new();
let triggers = monitor.observe(0.5, vec![(1, 2)]);
assert!(!triggers.is_empty());
assert!(triggers
.iter()
.any(|t| t.trigger_type == TriggerType::IslandingRisk));
}
#[test]
fn test_trend_computation() {
let mut monitor = StructuralMonitor::new();
// Simulate decreasing trend
for i in (0..10).rev() {
monitor.observe(i as f64, vec![]);
}
assert!(monitor.state().lambda_trend < 0.0);
}
#[test]
fn test_volatility_computation() {
let mut monitor = StructuralMonitor::new();
// Simulate volatile observations
for i in 0..10 {
let value = if i % 2 == 0 { 5.0 } else { 1.0 };
monitor.observe(value, vec![]);
}
assert!(monitor.state().cut_volatility > 0.0);
}
#[test]
fn test_report() {
let mut monitor = StructuralMonitor::new();
monitor.observe(3.5, vec![(1, 2), (2, 3)]);
let report = monitor.report();
assert!(report.contains("healthy"));
assert!(report.contains("3.50"));
}
}

View File

@@ -0,0 +1,753 @@
//! Subpolynomial-Time Dynamic Minimum Cut Demo
//!
//! This example demonstrates the key features of the ruvector-mincut crate:
//! 1. Basic minimum cut computation
//! 2. Dynamic updates (insert/delete edges)
//! 3. Exact vs approximate modes
//! 4. Real-time monitoring
//! 5. Network resilience analysis
//! 6. Performance scaling
//! 7. Vector-Graph Fusion with brittleness detection
use rand::prelude::*;
use ruvector_mincut::prelude::*;
use ruvector_mincut::{EventType, MonitorBuilder};
use std::sync::atomic::{AtomicU64, Ordering};
use std::sync::Arc;
use std::time::Instant;
mod fusion;
use fusion::{
BrittlenessSignal, FusionConfig, FusionGraph, Optimizer, OptimizerAction, RelationType,
StructuralMonitor, StructuralMonitorConfig,
};
fn main() {
println!("╔══════════════════════════════════════════════════════════════╗");
println!("║ Subpolynomial-Time Dynamic Minimum Cut Algorithm Demo ║");
println!("║ ruvector-mincut v0.1.0 + Vector-Graph Fusion ║");
println!("╚══════════════════════════════════════════════════════════════╝\n");
// Demo 1: Basic usage
demo_basic_usage();
println!("\n{}\n", "".repeat(64));
// Demo 2: Dynamic updates
demo_dynamic_updates();
println!("\n{}\n", "".repeat(64));
// Demo 3: Exact vs approximate
demo_exact_vs_approximate();
println!("\n{}\n", "".repeat(64));
// Demo 4: Real-time monitoring
demo_monitoring();
println!("\n{}\n", "".repeat(64));
// Demo 5: Network resilience
demo_network_resilience();
println!("\n{}\n", "".repeat(64));
// Demo 6: Performance scaling
demo_performance_scaling();
println!("\n{}\n", "".repeat(64));
// Demo 7: Vector-Graph Fusion
demo_vector_graph_fusion();
println!("\n{}\n", "".repeat(64));
// Demo 8: Brittleness Detection
demo_brittleness_detection();
println!("\n{}\n", "".repeat(64));
// Demo 9: Self-Learning Optimization
demo_self_learning_optimization();
println!("\n╔══════════════════════════════════════════════════════════════╗");
println!("║ Demo Complete! ║");
println!("╚══════════════════════════════════════════════════════════════╝");
}
/// Demo 1: Basic minimum cut computation
fn demo_basic_usage() {
println!("📊 DEMO 1: Basic Minimum Cut Computation");
println!("Creating a triangle graph with vertices 1, 2, 3...\n");
// Create a triangle graph: 1-2, 2-3, 3-1
let mincut = MinCutBuilder::new()
.exact()
.with_edges(vec![(1, 2, 1.0), (2, 3, 1.0), (3, 1, 1.0)])
.build()
.expect("Failed to build mincut");
println!("Graph created:");
println!(" • Vertices: {}", mincut.num_vertices());
println!(" • Edges: {}", mincut.num_edges());
println!(" • Connected: {}", mincut.is_connected());
// Query the minimum cut
let result = mincut.min_cut();
println!("\nMinimum cut result:");
println!(" • Value: {}", result.value);
println!(" • Is exact: {}", result.is_exact);
println!(" • Approximation ratio: {}", result.approximation_ratio);
if let Some((s, t)) = result.partition {
println!(" • Partition S: {:?}", s);
println!(" • Partition T: {:?}", t);
}
if let Some(cut_edges) = result.cut_edges {
println!(" • Number of cut edges: {}", cut_edges.len());
for edge in &cut_edges {
println!(
" - Edge ({}, {}) with weight {}",
edge.source, edge.target, edge.weight
);
}
}
// Get graph statistics
let graph = mincut.graph();
let stats = graph.read().stats();
println!("\nGraph statistics:");
println!(" • Total weight: {}", stats.total_weight);
println!(" • Min degree: {}", stats.min_degree);
println!(" • Max degree: {}", stats.max_degree);
println!(" • Avg degree: {:.2}", stats.avg_degree);
}
/// Demo 2: Dynamic edge insertions and deletions
fn demo_dynamic_updates() {
println!("🔄 DEMO 2: Dynamic Updates");
println!("Starting with an empty graph and adding edges dynamically...\n");
let mut mincut = MinCutBuilder::new()
.exact()
.build()
.expect("Failed to build mincut");
println!("Initial state:");
println!(" • Min cut: {}", mincut.min_cut_value());
// Insert edges one by one
println!("\nInserting edge (1, 2)...");
let cut = mincut.insert_edge(1, 2, 1.0).expect("Insert failed");
println!(" • New min cut: {}", cut);
println!("Inserting edge (2, 3)...");
let cut = mincut.insert_edge(2, 3, 1.0).expect("Insert failed");
println!(" • New min cut: {}", cut);
println!("Inserting edge (3, 1)...");
let cut = mincut.insert_edge(3, 1, 1.0).expect("Insert failed");
println!(" • New min cut: {} (triangle formed)", cut);
// Add a fourth vertex
println!("\nAdding vertex 4 with edge to vertex 3...");
println!("Inserting edge (3, 4)...");
let cut = mincut.insert_edge(3, 4, 2.0).expect("Insert failed");
println!(" • New min cut: {}", cut);
// Now delete an edge from the triangle
println!("\nDeleting edge (3, 1)...");
let cut = mincut.delete_edge(3, 1).expect("Delete failed");
println!(" • New min cut: {} (triangle broken)", cut);
// Add it back
println!("\nRe-inserting edge (1, 3)...");
let cut = mincut.insert_edge(1, 3, 1.5).expect("Insert failed");
println!(" • New min cut: {} (different weight this time)", cut);
// Check algorithm statistics
let stats = mincut.stats();
println!("\nAlgorithm statistics:");
println!(
" • Total insertions: {} (including re-insertion)",
stats.insertions
);
println!(" • Total deletions: {}", stats.deletions);
println!(" • Total queries: {}", stats.queries);
println!(" • Avg update time: {:.2} μs", stats.avg_update_time_us);
println!(" • Avg query time: {:.2} μs", stats.avg_query_time_us);
}
/// Demo 3: Exact vs approximate algorithms
fn demo_exact_vs_approximate() {
println!("⚖️ DEMO 3: Exact vs Approximate Algorithms");
println!("Comparing exact and approximate modes on the same graph...\n");
// Create test graph: a bridge graph (two triangles connected by an edge)
let edges = vec![
// Triangle 1
(1, 2, 2.0),
(2, 3, 2.0),
(3, 1, 2.0),
// Bridge
(3, 4, 1.0),
// Triangle 2
(4, 5, 2.0),
(5, 6, 2.0),
(6, 4, 2.0),
];
// Exact mode
println!("Building with exact algorithm...");
let start = Instant::now();
let exact_mincut = MinCutBuilder::new()
.exact()
.with_edges(edges.clone())
.build()
.expect("Failed to build exact");
let exact_time = start.elapsed();
let exact_result = exact_mincut.min_cut();
println!("Exact algorithm:");
println!(" • Build time: {:?}", exact_time);
println!(" • Min cut value: {}", exact_result.value);
println!(" • Is exact: {}", exact_result.is_exact);
println!(
" • Approximation ratio: {}",
exact_result.approximation_ratio
);
// Approximate mode with ε = 0.1 (10% approximation)
println!("\nBuilding with approximate algorithm (ε = 0.1)...");
let start = Instant::now();
let approx_mincut = MinCutBuilder::new()
.approximate(0.1)
.with_edges(edges.clone())
.build()
.expect("Failed to build approximate");
let approx_time = start.elapsed();
let approx_result = approx_mincut.min_cut();
println!("Approximate algorithm:");
println!(" • Build time: {:?}", approx_time);
println!(" • Min cut value: {}", approx_result.value);
println!(" • Is exact: {}", approx_result.is_exact);
println!(
" • Approximation ratio: {}",
approx_result.approximation_ratio
);
// Compare results
println!("\nComparison:");
println!(" • Exact value: {}", exact_result.value);
println!(" • Approximate value: {}", approx_result.value);
let error = ((approx_result.value - exact_result.value) / exact_result.value * 100.0).abs();
println!(" • Error: {:.2}%", error);
println!(
" • Speedup: {:.2}x",
exact_time.as_secs_f64() / approx_time.as_secs_f64()
);
}
/// Demo 4: Real-time monitoring with thresholds
fn demo_monitoring() {
println!("📡 DEMO 4: Real-time Monitoring");
println!("Setting up event monitoring with thresholds...\n");
// Create counters for different event types
let cut_increased_count = Arc::new(AtomicU64::new(0));
let cut_decreased_count = Arc::new(AtomicU64::new(0));
let threshold_count = Arc::new(AtomicU64::new(0));
let disconnected_count = Arc::new(AtomicU64::new(0));
// Build monitor with thresholds
let inc_clone = cut_increased_count.clone();
let dec_clone = cut_decreased_count.clone();
let thr_clone = threshold_count.clone();
let dis_clone = disconnected_count.clone();
let monitor = MonitorBuilder::new()
.threshold_below(1.5, "critical")
.threshold_above(5.0, "warning")
.on_event_type(EventType::CutIncreased, "inc_cb", move |event| {
inc_clone.fetch_add(1, Ordering::SeqCst);
println!(
" [EVENT] Cut increased: {}{}",
event.old_value, event.new_value
);
})
.on_event_type(EventType::CutDecreased, "dec_cb", move |event| {
dec_clone.fetch_add(1, Ordering::SeqCst);
println!(
" [EVENT] Cut decreased: {}{}",
event.old_value, event.new_value
);
})
.on_event_type(EventType::ThresholdCrossedBelow, "thr_cb", move |event| {
thr_clone.fetch_add(1, Ordering::SeqCst);
println!(
" [ALERT] Threshold crossed below: {} (threshold: {:?})",
event.new_value, event.threshold
);
})
.on_event_type(EventType::Disconnected, "dis_cb", move |_event| {
dis_clone.fetch_add(1, Ordering::SeqCst);
println!(" [CRITICAL] Graph became disconnected!");
})
.build();
println!("Monitor configured with:");
println!(" • Critical threshold: < 1.5");
println!(" • Warning threshold: > 5.0");
println!(" • 4 event callbacks registered\n");
// Simulate a series of graph changes
println!("Simulating graph updates...\n");
monitor.notify(0.0, 2.0, Some((1, 2)));
std::thread::sleep(std::time::Duration::from_millis(10));
monitor.notify(2.0, 3.0, Some((2, 3)));
std::thread::sleep(std::time::Duration::from_millis(10));
monitor.notify(3.0, 1.0, None);
std::thread::sleep(std::time::Duration::from_millis(10));
monitor.notify(1.0, 6.0, Some((3, 4)));
std::thread::sleep(std::time::Duration::from_millis(10));
monitor.notify(6.0, 0.0, None);
std::thread::sleep(std::time::Duration::from_millis(10));
// Get metrics
let metrics = monitor.metrics();
println!("\nMonitoring metrics:");
println!(" • Total events: {}", metrics.total_events);
println!(
" • Cut increased events: {}",
cut_increased_count.load(Ordering::SeqCst)
);
println!(
" • Cut decreased events: {}",
cut_decreased_count.load(Ordering::SeqCst)
);
println!(
" • Threshold violations: {}",
threshold_count.load(Ordering::SeqCst)
);
println!(
" • Disconnection events: {}",
disconnected_count.load(Ordering::SeqCst)
);
println!(" • Min observed cut: {}", metrics.min_observed);
println!(" • Max observed cut: {}", metrics.max_observed);
println!(" • Average cut: {:.2}", metrics.avg_cut);
// Print event breakdown
println!("\nEvents by type:");
for (event_type, count) in &metrics.events_by_type {
println!("{}: {}", event_type, count);
}
}
/// Demo 5: Network resilience analysis
fn demo_network_resilience() {
println!("🛡️ DEMO 5: Network Resilience Analysis");
println!("Analyzing a network's resistance to failures...\n");
// Create a network topology: a mesh with redundant paths
println!("Building a mesh network (6 nodes, 9 edges)...");
let mincut = MinCutBuilder::new()
.exact()
.with_edges(vec![
// Core ring
(1, 2, 1.0),
(2, 3, 1.0),
(3, 4, 1.0),
(4, 5, 1.0),
(5, 6, 1.0),
(6, 1, 1.0),
// Cross connections for redundancy
(1, 3, 1.0),
(2, 4, 1.0),
(3, 5, 1.0),
])
.build()
.expect("Failed to build network");
let graph = mincut.graph();
let stats = graph.read().stats();
println!("\nNetwork topology:");
println!(" • Nodes: {}", stats.num_vertices);
println!(" • Links: {}", stats.num_edges);
println!(" • Avg degree: {:.2}", stats.avg_degree);
println!(" • Min cut: {}", mincut.min_cut_value());
println!("\nResilience interpretation:");
let min_cut = mincut.min_cut_value();
if min_cut == 0.0 {
println!(" ❌ Network is disconnected - no resilience");
} else if min_cut == 1.0 {
println!(" ⚠️ Single point of failure - low resilience");
} else if min_cut == 2.0 {
println!(" ⚡ Moderate resilience - can survive 1 failure");
} else {
println!(
" ✅ High resilience - can survive {} failures",
min_cut as u32 - 1
);
}
// Simulate edge failures
println!("\nSimulating link failures...");
let result = mincut.min_cut();
if let Some(cut_edges) = result.cut_edges {
println!("\nCritical edges (minimum cut set):");
for (i, edge) in cut_edges.iter().enumerate() {
println!(
" {}. ({}, {}) - weight {}",
i + 1,
edge.source,
edge.target,
edge.weight
);
}
println!(
"\nRemoving these {} edge(s) would disconnect the network!",
cut_edges.len()
);
}
// Identify the partition
if let Some((s, t)) = result.partition {
println!("\nNetwork would split into:");
println!(" • Component A: {} nodes {:?}", s.len(), s);
println!(" • Component B: {} nodes {:?}", t.len(), t);
}
}
/// Demo 6: Performance scaling analysis
fn demo_performance_scaling() {
println!("📈 DEMO 6: Performance Scaling");
println!("Measuring performance at different graph sizes...\n");
let sizes = vec![10, 50, 100, 200];
println!(
"{:<10} {:<15} {:<15} {:<15}",
"Vertices", "Edges", "Build Time", "Query Time"
);
println!("{}", "".repeat(60));
for n in sizes {
// Create a random graph
let mut rng = rand::thread_rng();
let mut edges = Vec::new();
// Create a path to ensure connectivity
for i in 0..n - 1 {
edges.push((i, i + 1, rng.gen_range(1.0..10.0)));
}
// Add random edges for density
let num_random_edges = n / 2;
for _ in 0..num_random_edges {
let u = rng.gen_range(0..n);
let v = rng.gen_range(0..n);
if u != v {
edges.push((u, v, rng.gen_range(1.0..10.0)));
}
}
// Build and measure
let start = Instant::now();
let mincut = MinCutBuilder::new().exact().with_edges(edges).build();
let build_time = start.elapsed();
if let Ok(mincut) = mincut {
let start = Instant::now();
let _cut = mincut.min_cut_value();
let query_time = start.elapsed();
println!(
"{:<10} {:<15} {:<15?} {:<15?}",
n,
mincut.num_edges(),
build_time,
query_time
);
}
}
println!("\n💡 Key observations:");
println!(" • Query time is O(1) - constant regardless of size");
println!(" • Build time grows subpolynomially: O(n^{{o(1)}})");
println!(" • Update time (insert/delete) is also subpolynomial");
// Demonstrate update performance
println!("\nMeasuring update performance on n=100 graph...");
let mut edges = Vec::new();
for i in 0..99 {
edges.push((i, i + 1, 1.0));
}
let mut mincut = MinCutBuilder::new()
.exact()
.with_edges(edges)
.build()
.expect("Build failed");
// Measure insertions
let start = Instant::now();
for i in 0..10 {
let _ = mincut.insert_edge(i, i + 50, 1.0);
}
let insert_time = start.elapsed();
// Measure deletions
let start = Instant::now();
for i in 0..10 {
let _ = mincut.delete_edge(i, i + 1);
}
let delete_time = start.elapsed();
println!("\nUpdate performance (10 operations):");
println!(" • Total insertion time: {:?}", insert_time);
println!(" • Avg per insertion: {:?}", insert_time / 10);
println!(" • Total deletion time: {:?}", delete_time);
println!(" • Avg per deletion: {:?}", delete_time / 10);
let stats = mincut.stats();
println!("\nAggregate statistics:");
println!(" • Total updates: {}", stats.insertions + stats.deletions);
println!(" • Avg update time: {:.2} μs", stats.avg_update_time_us);
}
/// Demo 7: Vector-Graph Fusion
fn demo_vector_graph_fusion() {
println!("🔗 DEMO 7: Vector-Graph Fusion");
println!("Combining vector similarity with graph relations...\n");
// Create fusion graph with custom config
let config = FusionConfig {
vector_weight: 0.6,
graph_weight: 0.4,
similarity_threshold: 0.5,
top_k: 5,
..Default::default()
};
let mut fusion = FusionGraph::with_config(config);
// Ingest document vectors (simulating embeddings)
println!("Ingesting document vectors...");
let docs = vec![
(1, vec![1.0, 0.0, 0.0, 0.0]), // Topic A
(2, vec![0.9, 0.1, 0.0, 0.0]), // Similar to Topic A
(3, vec![0.8, 0.2, 0.0, 0.0]), // Similar to Topic A
(4, vec![0.0, 1.0, 0.0, 0.0]), // Topic B
(5, vec![0.0, 0.9, 0.1, 0.0]), // Similar to Topic B
(6, vec![0.0, 0.0, 1.0, 0.0]), // Topic C (isolated)
];
for (id, vec) in &docs {
fusion.ingest_node_with_id(*id, vec.clone());
}
println!(" • Nodes: {}", fusion.num_nodes());
println!(" • Edges from similarity: {}", fusion.num_edges());
// Add explicit relations
println!("\nAdding explicit graph relations...");
fusion.add_relation(1, 4, RelationType::References, 0.8);
fusion.add_relation(2, 5, RelationType::CoOccurs, 0.6);
println!(" • Total edges: {}", fusion.num_edges());
println!(" • Min cut estimate: {:.2}", fusion.min_cut());
// Show fusion edge capacities
println!("\nEdge capacity computation:");
println!(" c(u,v) = w_v * f_v(similarity) + w_g * f_g(strength, type)");
println!(" where w_v=0.6, w_g=0.4");
for edge in fusion.get_edges().iter().take(5) {
let origin = match edge.origin {
fusion::EdgeOrigin::Vector => "Vector",
fusion::EdgeOrigin::Graph => "Graph",
fusion::EdgeOrigin::SelfLearn => "Learned",
};
println!(
" • ({}, {}) [{:>7}]: raw={:.2}, capacity={:.4}",
edge.src, edge.dst, origin, edge.raw_strength, edge.capacity
);
}
// Query with brittleness awareness
println!("\nQuerying for Topic A documents...");
let result = fusion.query(&[1.0, 0.0, 0.0, 0.0], 4);
println!(" • Retrieved: {:?}", result.nodes);
println!(" • Subgraph min-cut: {:.2}", result.min_cut);
if let Some(warning) = result.brittleness_warning {
println!(" • ⚠️ {}", warning);
} else {
println!(" • ✓ Good connectivity");
}
}
/// Demo 8: Brittleness Detection
fn demo_brittleness_detection() {
println!("🔍 DEMO 8: Brittleness Detection");
println!("Monitoring graph health with structural analysis...\n");
let mut monitor = StructuralMonitor::with_config(StructuralMonitorConfig {
window_size: 10,
lambda_low: 3.0,
lambda_critical: 1.5,
volatility_threshold: 0.5,
trend_slope_threshold: -0.2,
});
// Simulate a series of graph states
println!("Simulating graph evolution...\n");
let observations = vec![
(5.0, "Healthy: strong connectivity"),
(4.5, "Slight decrease"),
(4.2, "Continuing decline"),
(3.8, "Below warning threshold"),
(2.5, "Warning: low connectivity"),
(1.8, "Approaching critical"),
(1.0, "Critical: islanding risk!"),
];
for (lambda, description) in observations {
let triggers = monitor.observe(lambda, vec![(1, 2), (2, 3)]);
let signal = monitor.signal();
let signal_icon = match signal {
BrittlenessSignal::Healthy => "🟢",
BrittlenessSignal::Warning => "🟡",
BrittlenessSignal::Critical => "🔴",
BrittlenessSignal::Disconnected => "",
};
println!(
"{} λ={:.1}: {} [{}]",
signal_icon,
lambda,
description,
signal.as_str()
);
for trigger in triggers {
println!(
" ⚡ TRIGGER: {:?} (severity: {:.0}%)",
trigger.trigger_type,
trigger.severity * 100.0
);
println!("{}", trigger.recommendation);
}
}
// Show trend analysis
println!("\nTrend Analysis:");
let state = monitor.state();
let trend_dir = if state.lambda_trend > 0.0 {
""
} else {
""
};
println!(
" • Trend slope: {}{:.3} per observation",
trend_dir,
state.lambda_trend.abs()
);
println!(" • Volatility: {:.3}", state.cut_volatility);
println!(" • Boundary edges: {}", state.boundary_edges.len());
println!("\nMonitor Report:");
println!(" {}", monitor.report());
}
/// Demo 9: Self-Learning Optimization
fn demo_self_learning_optimization() {
println!("🧠 DEMO 9: Self-Learning Optimization");
println!("Adaptive maintenance planning with learning gate...\n");
let mut monitor = StructuralMonitor::new();
let mut optimizer = Optimizer::new();
// Simulate different graph health states
let scenarios = vec![
(5.0, "Healthy graph"),
(2.5, "Degrading connectivity"),
(0.8, "Critical brittleness"),
(3.5, "Recovering"),
(4.0, "Stable again"),
];
println!("Scenario-based optimization:\n");
for (lambda, description) in scenarios {
println!("━━━ {} (λ={}) ━━━", description, lambda);
// Update monitor
monitor.observe(lambda, vec![(1, 2)]);
// Get optimization result
let result = optimizer.analyze(&monitor);
println!(
"Signal: {} | Learning rate: {:.4}",
result.signal.as_str(),
optimizer.learning_gate().learning_rate
);
// Show immediate action if any
match &result.immediate_action {
OptimizerAction::NoOp => {
println!("Immediate action: None needed");
}
OptimizerAction::Rewire { strengthen, .. } => {
println!(
"Immediate action: Rewire ({} edges to strengthen)",
strengthen.len()
);
}
OptimizerAction::Reindex { new_threshold, .. } => {
println!("Immediate action: Reindex (threshold: {:?})", new_threshold);
}
OptimizerAction::LearningGate {
enable,
rate_multiplier,
} => {
println!(
"Immediate action: {} learning (rate x{})",
if *enable { "Enable" } else { "Disable" },
rate_multiplier
);
}
_ => {
println!("Immediate action: {:?}", result.immediate_action);
}
}
// Show maintenance plan
if !result.plan.is_empty() {
println!("Maintenance plan: {}", result.plan.summary);
for task in result.plan.tasks.iter().take(2) {
println!(" • [P{}] {}", task.priority, task.benefit);
}
}
println!();
}
// Show metrics summary
println!("Final Optimization Metrics:");
if let Some(result) = optimizer.last_result() {
for (key, value) in &result.metrics {
println!("{}: {:.4}", key, value);
}
}
println!("\nLearning Gate Status:");
let gate = optimizer.learning_gate();
println!(" • Enabled: {}", gate.enabled);
println!(" • Current rate: {:.4}", gate.learning_rate);
println!(" • Base rate: {:.4}", gate.base_rate);
}