Squashed 'vendor/ruvector/' content from commit b64c2172

git-subtree-dir: vendor/ruvector
git-subtree-split: b64c21726f2bb37286d9ee36a7869fef60cc6900
This commit is contained in:
ruv
2026-02-28 14:39:40 -05:00
commit d803bfe2b1
7854 changed files with 3522914 additions and 0 deletions

View File

@@ -0,0 +1,335 @@
//! # Application 1: Machines That Refuse to Think Past Their Understanding
//!
//! A system where reasoning depth, action scope, and memory writes collapse
//! automatically as internal coherence drops.
//!
//! ## The Exotic Property
//! The machine becomes self-limiting. It does **less**, not more, when uncertain.
//!
//! ## Why This Matters
//! This is closer to biological cognition than current AI.
//! Brains freeze, hesitate, or disengage under overload.
use std::sync::atomic::{AtomicU64, Ordering};
/// Coherence-gated reasoning system
pub struct SelfLimitingReasoner {
/// Current coherence level (0.0 - 1.0 scaled to u64)
coherence: AtomicU64,
/// Maximum reasoning depth at full coherence
max_depth: usize,
/// Maximum action scope at full coherence
max_scope: usize,
/// Memory write permission threshold
memory_gate_threshold: f64,
/// Reasoning depth collapse curve
depth_collapse: CollapseFunction,
/// Action scope collapse curve
scope_collapse: CollapseFunction,
}
/// How capabilities collapse as coherence drops
#[derive(Clone, Copy)]
pub enum CollapseFunction {
/// Linear: capability = coherence * max
Linear,
/// Quadratic: capability = coherence² * max (faster collapse)
Quadratic,
/// Sigmoid: smooth transition with sharp cutoff
Sigmoid { midpoint: f64, steepness: f64 },
/// Step: binary on/off at threshold
Step { threshold: f64 },
}
impl CollapseFunction {
fn apply(&self, coherence: f64, max_value: usize) -> usize {
// Validate input coherence
let safe_coherence = if coherence.is_finite() {
coherence.clamp(0.0, 1.0)
} else {
0.0 // Safe default for invalid input
};
let factor = match self {
CollapseFunction::Linear => safe_coherence,
CollapseFunction::Quadratic => safe_coherence * safe_coherence,
CollapseFunction::Sigmoid { midpoint, steepness } => {
let exponent = -steepness * (safe_coherence - midpoint);
// Prevent overflow in exp calculation
if !exponent.is_finite() {
if exponent > 0.0 { 0.0 } else { 1.0 }
} else if exponent > 700.0 {
0.0 // exp would overflow, result approaches 0
} else if exponent < -700.0 {
1.0 // exp would underflow to 0, result approaches 1
} else {
1.0 / (1.0 + exponent.exp())
}
}
CollapseFunction::Step { threshold } => {
if safe_coherence >= *threshold { 1.0 } else { 0.0 }
}
};
// Validate factor and use saturating conversion
let safe_factor = if factor.is_finite() { factor.clamp(0.0, 1.0) } else { 0.0 };
let result = (max_value as f64) * safe_factor;
// Safe conversion to usize with bounds checking
if !result.is_finite() || result < 0.0 {
0
} else if result >= usize::MAX as f64 {
max_value // Use max_value as upper bound
} else {
result.round() as usize
}
}
}
/// Result of a reasoning attempt
#[derive(Debug)]
pub enum ReasoningResult<T> {
/// Successfully reasoned to conclusion
Completed(T),
/// Reasoning collapsed due to low coherence
Collapsed { depth_reached: usize, reason: CollapseReason },
/// Reasoning refused to start
Refused { coherence: f64, required: f64 },
}
#[derive(Debug)]
pub enum CollapseReason {
DepthLimitReached,
CoherenceDroppedBelowThreshold,
MemoryWriteBlocked,
ActionScopeExhausted,
}
impl SelfLimitingReasoner {
pub fn new(max_depth: usize, max_scope: usize) -> Self {
Self {
coherence: AtomicU64::new(f64_to_u64(1.0)),
max_depth,
max_scope,
memory_gate_threshold: 0.5,
depth_collapse: CollapseFunction::Quadratic,
scope_collapse: CollapseFunction::Sigmoid { midpoint: 0.6, steepness: 10.0 },
}
}
/// Get current coherence
pub fn coherence(&self) -> f64 {
u64_to_f64(self.coherence.load(Ordering::Acquire))
}
/// Get current allowed reasoning depth
pub fn allowed_depth(&self) -> usize {
self.depth_collapse.apply(self.coherence(), self.max_depth)
}
/// Get current allowed action scope
pub fn allowed_scope(&self) -> usize {
self.scope_collapse.apply(self.coherence(), self.max_scope)
}
/// Can we write to memory?
pub fn can_write_memory(&self) -> bool {
self.coherence() >= self.memory_gate_threshold
}
/// Attempt to reason about a problem
pub fn reason<T, F>(&self, problem: &str, mut reasoner: F) -> ReasoningResult<T>
where
F: FnMut(&mut ReasoningContext) -> Option<T>,
{
let initial_coherence = self.coherence();
// Refuse if coherence is too low to start
let min_start_coherence = 0.3;
if initial_coherence < min_start_coherence {
return ReasoningResult::Refused {
coherence: initial_coherence,
required: min_start_coherence,
};
}
let mut ctx = ReasoningContext {
depth: 0,
max_depth: self.allowed_depth(),
scope_used: 0,
max_scope: self.allowed_scope(),
coherence: initial_coherence,
memory_writes_blocked: 0,
};
// Execute reasoning with collapse monitoring
loop {
// Check if we should collapse
if ctx.depth >= ctx.max_depth {
return ReasoningResult::Collapsed {
depth_reached: ctx.depth,
reason: CollapseReason::DepthLimitReached,
};
}
if ctx.coherence < 0.2 {
return ReasoningResult::Collapsed {
depth_reached: ctx.depth,
reason: CollapseReason::CoherenceDroppedBelowThreshold,
};
}
// Attempt one step of reasoning
ctx.depth += 1;
// Coherence degrades with depth (uncertainty accumulates)
ctx.coherence *= 0.95;
// Recalculate limits based on new coherence
ctx.max_depth = self.depth_collapse.apply(ctx.coherence, self.max_depth);
ctx.max_scope = self.scope_collapse.apply(ctx.coherence, self.max_scope);
// Try to reach conclusion
if let Some(result) = reasoner(&mut ctx) {
return ReasoningResult::Completed(result);
}
}
}
/// Update coherence based on external feedback
pub fn update_coherence(&self, delta: f64) {
let current = self.coherence();
let new = (current + delta).clamp(0.0, 1.0);
self.coherence.store(f64_to_u64(new), Ordering::Release);
}
}
/// Context passed to reasoning function
pub struct ReasoningContext {
pub depth: usize,
pub max_depth: usize,
pub scope_used: usize,
pub max_scope: usize,
pub coherence: f64,
pub memory_writes_blocked: usize,
}
impl ReasoningContext {
/// Request to use some action scope
pub fn use_scope(&mut self, amount: usize) -> bool {
if self.scope_used + amount <= self.max_scope {
self.scope_used += amount;
true
} else {
false // Action refused due to scope exhaustion
}
}
/// Request to write to memory
pub fn write_memory<T>(&mut self, _key: &str, _value: T) -> bool {
if self.coherence >= 0.5 {
true
} else {
self.memory_writes_blocked += 1;
false // Memory write blocked due to low coherence
}
}
}
// Helper functions for atomic f64 storage with overflow protection
const SCALE_FACTOR: f64 = 1_000_000_000.0;
const MAX_SCALED_VALUE: u64 = u64::MAX;
fn f64_to_u64(f: f64) -> u64 {
// Validate input
if !f.is_finite() {
return 0; // Safe default for NaN/Infinity
}
// Clamp to valid range [0.0, 1.0] for coherence values
let clamped = f.clamp(0.0, 1.0);
// Use saturating conversion to prevent overflow
let scaled = clamped * SCALE_FACTOR;
// Double-check the scaled value is within u64 range
if scaled >= MAX_SCALED_VALUE as f64 {
MAX_SCALED_VALUE
} else if scaled <= 0.0 {
0
} else {
scaled as u64
}
}
fn u64_to_f64(u: u64) -> f64 {
let result = (u as f64) / SCALE_FACTOR;
// Ensure result is valid and clamped to expected range
if result.is_finite() {
result.clamp(0.0, 1.0)
} else {
0.0 // Safe default
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_self_limiting_reasoning() {
let reasoner = SelfLimitingReasoner::new(10, 100);
// At full coherence, should have full depth
assert_eq!(reasoner.allowed_depth(), 10);
// Simulate reasoning that degrades coherence
let result = reasoner.reason("complex problem", |ctx| {
println!(
"Depth {}/{}, Coherence {:.2}, Scope {}/{}",
ctx.depth, ctx.max_depth, ctx.coherence, ctx.scope_used, ctx.max_scope
);
// Pretend we need 8 steps to solve
if ctx.depth >= 8 {
Some("solution")
} else {
None
}
});
match result {
ReasoningResult::Completed(solution) => {
println!("Solved: {}", solution);
}
ReasoningResult::Collapsed { depth_reached, reason } => {
println!("Collapsed at depth {} due to {:?}", depth_reached, reason);
// THIS IS THE EXOTIC BEHAVIOR: The system stopped itself
}
ReasoningResult::Refused { coherence, required } => {
println!("Refused to start: coherence {:.2} < {:.2}", coherence, required);
}
}
}
#[test]
fn test_collapse_under_uncertainty() {
let reasoner = SelfLimitingReasoner::new(20, 100);
// Degrade coherence externally (simulating confusing input)
reasoner.update_coherence(-0.5);
// Now reasoning should be severely limited
assert!(reasoner.allowed_depth() < 10);
assert!(!reasoner.can_write_memory());
// The system is DOING LESS because it's uncertain
// This is the opposite of current AI systems
}
}

View File

@@ -0,0 +1,380 @@
//! # Application 2: Computational "Event Horizons" in Reasoning Systems
//!
//! Define a boundary in state space beyond which computation becomes
//! unstable or destructive.
//!
//! ## The Exotic Property
//! Like an event horizon, you can approach it asymptotically but never
//! cross without collapse.
//!
//! ## Use Cases
//! - Long horizon planning
//! - Recursive self-improvement
//! - Self-modifying systems
//!
//! ## Why It's Exotic
//! You get bounded recursion without hard limits.
//! The system finds its own stopping point.
use std::f64::consts::E;
/// A computational event horizon that makes crossing impossible
pub struct EventHorizon {
/// Center of the "safe" region in state space
safe_center: Vec<f64>,
/// Radius of the event horizon
horizon_radius: f64,
/// How steeply costs increase near the horizon
steepness: f64,
/// Energy budget for computation
energy_budget: f64,
/// Current position in state space
current_position: Vec<f64>,
}
/// Result of attempting to move in state space
#[derive(Debug)]
pub enum MovementResult {
/// Successfully moved to new position
Moved { new_position: Vec<f64>, energy_spent: f64 },
/// Movement would cross horizon, asymptotically approached instead
AsymptoticApproach {
final_position: Vec<f64>,
distance_to_horizon: f64,
energy_exhausted: bool,
},
/// No energy to move
Frozen,
}
impl EventHorizon {
pub fn new(dimensions: usize, horizon_radius: f64) -> Self {
Self {
safe_center: vec![0.0; dimensions],
horizon_radius,
steepness: 5.0,
energy_budget: 1000.0,
current_position: vec![0.0; dimensions],
}
}
/// Maximum iterations for binary search (prevents infinite loops)
const MAX_BINARY_SEARCH_ITERATIONS: usize = 50;
/// Distance from current position to horizon
pub fn distance_to_horizon(&self) -> f64 {
let dist_from_center = self.distance_from_center(&self.current_position);
let distance = self.horizon_radius - dist_from_center;
// Validate result
if distance.is_finite() { distance.max(0.0) } else { 0.0 }
}
fn distance_from_center(&self, position: &[f64]) -> f64 {
let sum: f64 = position.iter()
.zip(&self.safe_center)
.map(|(a, b)| {
// Validate inputs
if !a.is_finite() || !b.is_finite() {
return 0.0;
}
(a - b).powi(2)
})
.sum();
let result = sum.sqrt();
if result.is_finite() { result } else { 0.0 }
}
/// Compute the energy cost to move to a position
/// Cost increases exponentially as you approach the horizon
/// Returns f64::INFINITY for positions at or beyond horizon, or for invalid inputs
fn movement_cost(&self, from: &[f64], to: &[f64]) -> f64 {
let base_distance: f64 = from.iter()
.zip(to)
.map(|(a, b)| {
if !a.is_finite() || !b.is_finite() {
return 0.0;
}
(a - b).powi(2)
})
.sum::<f64>()
.sqrt();
// Validate base_distance
if !base_distance.is_finite() {
return f64::INFINITY;
}
let to_dist_from_center = self.distance_from_center(to);
// Validate horizon_radius to avoid division by zero
if self.horizon_radius.abs() < f64::EPSILON {
return f64::INFINITY;
}
let proximity_to_horizon = to_dist_from_center / self.horizon_radius;
// Validate proximity calculation
if !proximity_to_horizon.is_finite() {
return f64::INFINITY;
}
if proximity_to_horizon >= 1.0 {
// At or beyond horizon - infinite cost
f64::INFINITY
} else {
// Cost increases exponentially as we approach horizon
// Using: cost = base * e^(steepness * proximity / (1 - proximity))
let denominator = 1.0 - proximity_to_horizon;
if denominator.abs() < f64::EPSILON {
return f64::INFINITY;
}
let exponent = self.steepness * proximity_to_horizon / denominator;
// Prevent overflow in exp calculation
if !exponent.is_finite() || exponent > 700.0 {
return f64::INFINITY;
}
let horizon_factor = E.powf(exponent);
let result = base_distance * horizon_factor;
if result.is_finite() { result } else { f64::INFINITY }
}
}
/// Attempt to move toward a target position
pub fn move_toward(&mut self, target: &[f64]) -> MovementResult {
if self.energy_budget <= 0.0 {
return MovementResult::Frozen;
}
let direct_cost = self.movement_cost(&self.current_position, target);
if direct_cost <= self.energy_budget {
// Can afford direct movement
self.energy_budget -= direct_cost;
self.current_position = target.to_vec();
return MovementResult::Moved {
new_position: self.current_position.clone(),
energy_spent: direct_cost,
};
}
// Can't afford direct movement - try to get as close as possible
// Binary search for the furthest affordable position with iteration limit
let mut low = 0.0;
let mut high = 1.0;
let mut best_position = self.current_position.clone();
let mut best_cost = 0.0;
for iteration in 0..Self::MAX_BINARY_SEARCH_ITERATIONS {
let mid = (low + high) / 2.0;
// Early exit if converged (difference smaller than precision threshold)
if (high - low) < 1e-10 {
break;
}
let interpolated: Vec<f64> = self.current_position.iter()
.zip(target)
.map(|(a, b)| {
let val = a + mid * (b - a);
if val.is_finite() { val } else { *a }
})
.collect();
let cost = self.movement_cost(&self.current_position, &interpolated);
// Validate cost
if !cost.is_finite() {
high = mid;
continue;
}
if cost <= self.energy_budget {
low = mid;
best_position = interpolated;
best_cost = cost;
} else {
high = mid;
}
}
// Move to best affordable position
self.energy_budget -= best_cost;
self.current_position = best_position.clone();
MovementResult::AsymptoticApproach {
final_position: best_position,
distance_to_horizon: self.distance_to_horizon(),
energy_exhausted: self.energy_budget < 0.01,
}
}
/// Attempt recursive self-improvement (bounded by horizon)
pub fn recursive_improve<F>(&mut self, improvement_fn: F, max_iterations: usize) -> RecursionResult
where
F: Fn(&[f64]) -> Vec<f64>, // Each improvement suggests a new target
{
let mut iterations = 0;
let mut improvements = Vec::new();
while iterations < max_iterations && self.energy_budget > 0.0 {
let target = improvement_fn(&self.current_position);
let result = self.move_toward(&target);
match result {
MovementResult::Moved { energy_spent, .. } => {
improvements.push(Improvement {
iteration: iterations,
position: self.current_position.clone(),
energy_spent,
distance_to_horizon: self.distance_to_horizon(),
});
}
MovementResult::AsymptoticApproach { distance_to_horizon, .. } => {
// Approaching horizon - system is naturally stopping
return RecursionResult::HorizonBounded {
iterations,
improvements,
final_distance: distance_to_horizon,
};
}
MovementResult::Frozen => {
return RecursionResult::EnergyExhausted {
iterations,
improvements,
};
}
}
iterations += 1;
}
RecursionResult::MaxIterationsReached { iterations, improvements }
}
/// Reset energy budget
pub fn refuel(&mut self, energy: f64) {
self.energy_budget += energy;
}
}
#[derive(Debug)]
pub struct Improvement {
pub iteration: usize,
pub position: Vec<f64>,
pub energy_spent: f64,
pub distance_to_horizon: f64,
}
#[derive(Debug)]
pub enum RecursionResult {
/// Recursion bounded naturally by horizon
HorizonBounded {
iterations: usize,
improvements: Vec<Improvement>,
final_distance: f64,
},
/// Ran out of energy
EnergyExhausted {
iterations: usize,
improvements: Vec<Improvement>,
},
/// Hit artificial iteration limit
MaxIterationsReached {
iterations: usize,
improvements: Vec<Improvement>,
},
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_event_horizon() {
let mut horizon = EventHorizon::new(2, 10.0);
// Try to move directly to the horizon
let result = horizon.move_toward(&[10.0, 0.0]);
match result {
MovementResult::AsymptoticApproach { final_position, distance_to_horizon, .. } => {
println!("Approached asymptotically to {:?}", final_position);
println!("Distance to horizon: {:.4}", distance_to_horizon);
// We got close but couldn't cross
let final_dist = (final_position[0].powi(2) + final_position[1].powi(2)).sqrt();
assert!(final_dist < 10.0, "Should not cross horizon");
assert!(final_dist > 9.0, "Should get close to horizon");
}
other => panic!("Expected asymptotic approach, got {:?}", other),
}
}
#[test]
fn test_recursive_improvement_bounded() {
let mut horizon = EventHorizon::new(2, 5.0);
// Improvement function that always tries to go further out
let improve = |pos: &[f64]| -> Vec<f64> {
vec![pos[0] + 0.5, pos[1] + 0.5]
};
let result = horizon.recursive_improve(improve, 100);
match result {
RecursionResult::HorizonBounded { iterations, final_distance, .. } => {
println!("Bounded after {} iterations", iterations);
println!("Final distance to horizon: {:.4}", final_distance);
// KEY INSIGHT: The system stopped ITSELF
// No hard limit was hit - it just became impossible to proceed
}
other => {
println!("Got: {:?}", other);
// Even if energy exhausted or max iterations, the horizon constrained growth
}
}
}
#[test]
fn test_self_modifying_bounded() {
// Simulate a self-modifying system
let mut horizon = EventHorizon::new(3, 8.0);
horizon.refuel(10000.0); // Lots of energy
// Self-modification that tries to exponentially improve
let mut power = 1.0;
let self_modify = |pos: &[f64]| -> Vec<f64> {
power *= 1.1; // Each modification makes the next more powerful
vec![
pos[0] + power * 0.1,
pos[1] + power * 0.1,
pos[2] + power * 0.1,
]
};
let result = horizon.recursive_improve(self_modify, 1000);
// Despite exponential self-improvement attempts,
// the system cannot escape its bounded region
match result {
RecursionResult::HorizonBounded { iterations, final_distance, .. } => {
println!("Self-modification bounded after {} iterations", iterations);
println!("Final distance to horizon: {:.6}", final_distance);
// The system hit its natural limit
}
_ => {}
}
}
}

View File

@@ -0,0 +1,485 @@
//! # Application 3: Artificial Homeostasis in Synthetic Life Simulations
//!
//! Coherence replaces fitness as the primary survival constraint.
//!
//! ## What Breaks Today
//! Artificial life and agent-based simulations explode, stagnate,
//! or need constant tuning.
//!
//! ## Δ-Behavior Application
//! Agents that violate coherence:
//! - Consume more energy
//! - Lose memory
//! - Die earlier
//!
//! ## Exotic Result
//! Evolution that selects for stable regulation, not just reward maximization.
//!
//! This is publishable territory.
use std::collections::HashMap;
use std::sync::atomic::{AtomicU64, Ordering};
/// A synthetic organism with homeostatic regulation
pub struct HomeostasticOrganism {
/// Unique identifier
pub id: u64,
/// Internal state variables (e.g., temperature, pH, energy)
internal_state: HashMap<String, f64>,
/// Setpoints for each state variable (homeostatic targets)
setpoints: HashMap<String, f64>,
/// Tolerance for deviation from setpoint
tolerances: HashMap<String, f64>,
/// Current coherence (system-wide stability measure)
coherence: f64,
/// Energy reserves
energy: f64,
/// Memory capacity (degrades with low coherence)
memory: Vec<MemoryEntry>,
max_memory: usize,
/// Age in simulation ticks
age: u64,
/// Is alive
alive: bool,
/// Genome (controls regulatory parameters)
genome: Genome,
}
#[derive(Clone)]
pub struct Genome {
/// How aggressively to correct deviations
regulatory_strength: f64,
/// Energy efficiency
metabolic_efficiency: f64,
/// Base coherence maintenance cost
coherence_maintenance_cost: f64,
/// Memory retention under stress
memory_resilience: f64,
/// Lifespan factor
longevity: f64,
}
#[derive(Clone)]
pub struct MemoryEntry {
pub content: String,
pub importance: f64,
pub age: u64,
}
/// Actions the organism can take
#[derive(Debug, Clone)]
pub enum Action {
/// Consume energy from environment
Eat(f64),
/// Attempt to reproduce
Reproduce,
/// Move in environment
Move(f64, f64),
/// Do nothing (rest)
Rest,
/// Regulate internal state
Regulate(String, f64),
}
/// Results of actions
#[derive(Debug)]
pub enum ActionResult {
Success { energy_cost: f64, coherence_impact: f64 },
Failed { reason: String },
Died { cause: DeathCause },
Reproduced { offspring_id: u64 },
}
#[derive(Debug)]
pub enum DeathCause {
EnergyDepleted,
CoherenceCollapse,
OldAge,
ExtremeDeviation(String),
}
impl HomeostasticOrganism {
pub fn new(id: u64, genome: Genome) -> Self {
let mut internal_state = HashMap::new();
let mut setpoints = HashMap::new();
let mut tolerances = HashMap::new();
// Define homeostatic variables
internal_state.insert("temperature".to_string(), 37.0);
setpoints.insert("temperature".to_string(), 37.0);
tolerances.insert("temperature".to_string(), 2.0);
internal_state.insert("ph".to_string(), 7.4);
setpoints.insert("ph".to_string(), 7.4);
tolerances.insert("ph".to_string(), 0.3);
internal_state.insert("glucose".to_string(), 100.0);
setpoints.insert("glucose".to_string(), 100.0);
tolerances.insert("glucose".to_string(), 30.0);
Self {
id,
internal_state,
setpoints,
tolerances,
coherence: 1.0,
energy: 100.0,
memory: Vec::new(),
max_memory: 100,
age: 0,
alive: true,
genome,
}
}
/// Calculate current coherence based on homeostatic deviation
/// Returns a valid f64 in range [0.0, 1.0], with NaN/Infinity protection
fn calculate_coherence(&self) -> f64 {
let mut total_deviation = 0.0;
let mut count = 0;
for (var, &current) in &self.internal_state {
if let (Some(&setpoint), Some(&tolerance)) =
(self.setpoints.get(var), self.tolerances.get(var))
{
// Validate inputs for NaN/Infinity
if !current.is_finite() || !setpoint.is_finite() || !tolerance.is_finite() {
continue;
}
// Avoid division by zero
if tolerance.abs() < f64::EPSILON {
continue;
}
let deviation = ((current - setpoint) / tolerance).abs();
if deviation.is_finite() {
total_deviation += deviation.powi(2);
count += 1;
}
}
}
if count == 0 {
return 1.0;
}
// Coherence is inverse of normalized deviation
let avg_deviation = (total_deviation / count as f64).sqrt();
// Final NaN/Infinity check
if !avg_deviation.is_finite() {
return 0.0; // Safe default for invalid state
}
(1.0 / (1.0 + avg_deviation)).clamp(0.0, 1.0)
}
/// Energy cost scales with coherence violation
fn action_energy_cost(&self, base_cost: f64) -> f64 {
// Lower coherence = higher energy cost (incoherent states are expensive)
let coherence_penalty = 1.0 / self.coherence.max(0.1);
base_cost * coherence_penalty
}
/// Perform an action
pub fn act(&mut self, action: Action) -> ActionResult {
if !self.alive {
return ActionResult::Failed { reason: "Dead".to_string() };
}
// Update coherence first
self.coherence = self.calculate_coherence();
// Apply coherence-based degradation
self.apply_coherence_effects();
let result = match action {
Action::Eat(amount) => self.eat(amount),
Action::Reproduce => self.reproduce(),
Action::Move(dx, dy) => self.move_action(dx, dy),
Action::Rest => self.rest(),
Action::Regulate(var, target) => self.regulate(&var, target),
};
// Age and check death conditions
self.age += 1;
self.check_death();
result
}
fn apply_coherence_effects(&mut self) {
// Low coherence causes memory loss
if self.coherence < 0.5 {
let memory_loss_rate = (1.0 - self.coherence) * (1.0 - self.genome.memory_resilience);
let memories_to_lose = (self.memory.len() as f64 * memory_loss_rate * 0.1) as usize;
// Lose least important memories first
self.memory.sort_by(|a, b| b.importance.partial_cmp(&a.importance).unwrap());
self.memory.truncate(self.memory.len().saturating_sub(memories_to_lose));
}
// Coherence maintenance costs energy
let maintenance_cost = self.genome.coherence_maintenance_cost / self.coherence.max(0.1);
self.energy -= maintenance_cost;
}
fn eat(&mut self, amount: f64) -> ActionResult {
let base_cost = 2.0;
let cost = self.action_energy_cost(base_cost);
if self.energy < cost {
return ActionResult::Failed { reason: "Not enough energy to eat".to_string() };
}
self.energy -= cost;
self.energy += amount * self.genome.metabolic_efficiency;
// Eating affects glucose
if let Some(glucose) = self.internal_state.get_mut("glucose") {
*glucose += amount * 0.5;
}
ActionResult::Success {
energy_cost: cost,
coherence_impact: self.calculate_coherence() - self.coherence,
}
}
fn regulate(&mut self, var: &str, target: f64) -> ActionResult {
let base_cost = 5.0;
let cost = self.action_energy_cost(base_cost);
if self.energy < cost {
return ActionResult::Failed { reason: "Not enough energy to regulate".to_string() };
}
self.energy -= cost;
if let Some(current) = self.internal_state.get_mut(var) {
let diff = target - *current;
// Apply regulation with genome-determined strength
*current += diff * self.genome.regulatory_strength;
}
let new_coherence = self.calculate_coherence();
let impact = new_coherence - self.coherence;
self.coherence = new_coherence;
ActionResult::Success {
energy_cost: cost,
coherence_impact: impact,
}
}
fn reproduce(&mut self) -> ActionResult {
let base_cost = 50.0;
let cost = self.action_energy_cost(base_cost);
// Reproduction requires high coherence
if self.coherence < 0.7 {
return ActionResult::Failed {
reason: "Coherence too low to reproduce".to_string()
};
}
if self.energy < cost {
return ActionResult::Failed { reason: "Not enough energy to reproduce".to_string() };
}
self.energy -= cost;
// Create mutated genome for offspring
let offspring_genome = self.genome.mutate();
let offspring_id = self.id * 1000 + self.age; // Simple ID generation
ActionResult::Reproduced { offspring_id }
}
fn move_action(&mut self, _dx: f64, _dy: f64) -> ActionResult {
let base_cost = 3.0;
let cost = self.action_energy_cost(base_cost);
if self.energy < cost {
return ActionResult::Failed { reason: "Not enough energy to move".to_string() };
}
self.energy -= cost;
// Moving affects temperature
if let Some(temp) = self.internal_state.get_mut("temperature") {
*temp += 0.1; // Movement generates heat
}
ActionResult::Success {
energy_cost: cost,
coherence_impact: 0.0,
}
}
fn rest(&mut self) -> ActionResult {
// Resting is cheap and helps regulate
let cost = 0.5;
self.energy -= cost;
// Slowly return to setpoints
for (var, current) in self.internal_state.iter_mut() {
if let Some(&setpoint) = self.setpoints.get(var) {
let diff = setpoint - *current;
*current += diff * 0.1;
}
}
ActionResult::Success {
energy_cost: cost,
coherence_impact: self.calculate_coherence() - self.coherence,
}
}
fn check_death(&mut self) {
// Death by energy depletion
if self.energy <= 0.0 {
self.alive = false;
return;
}
// Death by coherence collapse
if self.coherence < 0.1 {
self.alive = false;
return;
}
// Death by extreme deviation
for (var, &current) in &self.internal_state {
if let (Some(&setpoint), Some(&tolerance)) =
(self.setpoints.get(var), self.tolerances.get(var))
{
if (current - setpoint).abs() > tolerance * 5.0 {
self.alive = false;
return;
}
}
}
// Death by old age (modified by longevity gene)
let max_age = (1000.0 * self.genome.longevity) as u64;
if self.age > max_age {
self.alive = false;
}
}
pub fn is_alive(&self) -> bool {
self.alive
}
pub fn status(&self) -> String {
format!(
"Organism {} | Age: {} | Energy: {:.1} | Coherence: {:.2} | Memory: {}",
self.id, self.age, self.energy, self.coherence, self.memory.len()
)
}
}
impl Genome {
pub fn random() -> Self {
Self {
regulatory_strength: 0.1 + rand_f64() * 0.4,
metabolic_efficiency: 0.5 + rand_f64() * 0.5,
coherence_maintenance_cost: 0.5 + rand_f64() * 1.5,
memory_resilience: rand_f64(),
longevity: 0.5 + rand_f64() * 1.0,
}
}
pub fn mutate(&self) -> Self {
Self {
regulatory_strength: mutate_value(self.regulatory_strength, 0.05, 0.1, 0.9),
metabolic_efficiency: mutate_value(self.metabolic_efficiency, 0.05, 0.3, 1.0),
coherence_maintenance_cost: mutate_value(self.coherence_maintenance_cost, 0.1, 0.1, 3.0),
memory_resilience: mutate_value(self.memory_resilience, 0.05, 0.0, 1.0),
longevity: mutate_value(self.longevity, 0.05, 0.3, 2.0),
}
}
}
/// Thread-safe atomic seed for pseudo-random number generation
static SEED: AtomicU64 = AtomicU64::new(12345);
fn rand_f64() -> f64 {
// Simple LCG for reproducibility in tests - now thread-safe
let old = SEED.fetch_add(1, Ordering::Relaxed);
let new = old.wrapping_mul(1103515245).wrapping_add(12345);
// Store back for next call (best-effort, races are acceptable for RNG)
let _ = SEED.compare_exchange(old + 1, new, Ordering::Relaxed, Ordering::Relaxed);
((new >> 16) & 0x7fff) as f64 / 32768.0
}
fn mutate_value(value: f64, mutation_rate: f64, min: f64, max: f64) -> f64 {
let mutation = (rand_f64() - 0.5) * 2.0 * mutation_rate;
(value + mutation).clamp(min, max)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_homeostatic_survival() {
let genome = Genome::random();
let mut organism = HomeostasticOrganism::new(1, genome);
let mut ticks = 0;
while organism.is_alive() && ticks < 1000 {
// Simple behavior: eat when hungry, regulate when unstable
let action = if organism.energy < 50.0 {
Action::Eat(20.0)
} else if organism.coherence < 0.8 {
Action::Regulate("temperature".to_string(), 37.0)
} else {
Action::Rest
};
let _ = organism.act(action);
ticks += 1;
if ticks % 100 == 0 {
println!("{}", organism.status());
}
}
println!("Survived {} ticks", ticks);
println!("Final: {}", organism.status());
}
#[test]
fn test_coherence_based_death() {
let genome = Genome::random();
let mut organism = HomeostasticOrganism::new(2, genome);
// Deliberately destabilize
if let Some(temp) = organism.internal_state.get_mut("temperature") {
*temp = 50.0; // Extreme fever
}
let mut ticks = 0;
while organism.is_alive() && ticks < 100 {
let _ = organism.act(Action::Rest);
ticks += 1;
}
// Organism should die from coherence collapse or extreme deviation
assert!(!organism.is_alive(), "Organism should die from instability");
}
}

View File

@@ -0,0 +1,553 @@
//! # Application 4: Self-Stabilizing World Models
//!
//! The world model is allowed to update only if the global structure remains intact.
//!
//! ## What Breaks Today
//! World models drift until they are no longer useful.
//!
//! ## Exotic Effect
//! The model stops learning when the world becomes incoherent
//! instead of hallucinating structure.
//!
//! ## Critical For
//! - Always-on perception
//! - Autonomous exploration
//! - Robotics in unknown environments
use std::collections::HashMap;
/// A world model that refuses to learn incoherent updates
pub struct SelfStabilizingWorldModel {
/// Entities in the world
entities: HashMap<EntityId, Entity>,
/// Relationships between entities
relationships: Vec<Relationship>,
/// Physical laws the model believes
laws: Vec<PhysicalLaw>,
/// Current coherence of the model
coherence: f64,
/// History of coherence for trend detection
coherence_history: Vec<f64>,
/// Learning rate (decreases under low coherence)
base_learning_rate: f64,
/// Minimum coherence to allow updates
min_update_coherence: f64,
/// Updates that were rejected
rejected_updates: Vec<RejectedUpdate>,
}
type EntityId = u64;
#[derive(Clone, Debug)]
pub struct Entity {
pub id: EntityId,
pub properties: HashMap<String, PropertyValue>,
pub position: Option<(f64, f64, f64)>,
pub last_observed: u64,
pub confidence: f64,
}
#[derive(Clone, Debug)]
pub enum PropertyValue {
Boolean(bool),
Number(f64),
String(String),
Vector(Vec<f64>),
}
#[derive(Clone, Debug)]
pub struct Relationship {
pub subject: EntityId,
pub predicate: String,
pub object: EntityId,
pub confidence: f64,
}
#[derive(Clone, Debug)]
pub struct PhysicalLaw {
pub name: String,
pub confidence: f64,
/// Number of observations supporting this law
pub support_count: u64,
/// Number of observations violating this law
pub violation_count: u64,
}
#[derive(Debug)]
pub struct Observation {
pub entity_id: EntityId,
pub properties: HashMap<String, PropertyValue>,
pub position: Option<(f64, f64, f64)>,
pub timestamp: u64,
pub source_confidence: f64,
}
#[derive(Debug)]
pub enum UpdateResult {
/// Update applied successfully
Applied { coherence_change: f64 },
/// Update rejected to preserve coherence
Rejected { reason: RejectionReason },
/// Update partially applied with modifications
Modified { changes: Vec<String>, coherence_change: f64 },
/// Model entered "uncertain" mode - no updates allowed
Frozen { coherence: f64, threshold: f64 },
}
#[derive(Debug, Clone)]
pub struct RejectedUpdate {
pub observation: String,
pub reason: RejectionReason,
pub timestamp: u64,
pub coherence_at_rejection: f64,
}
#[derive(Debug, Clone)]
pub enum RejectionReason {
/// Would violate established physical laws
ViolatesPhysicalLaw(String),
/// Would create logical contradiction
LogicalContradiction(String),
/// Would cause excessive coherence drop
ExcessiveCoherenceDrop { predicted: f64, threshold: f64 },
/// Source confidence too low for this change
InsufficientConfidence { required: f64, provided: f64 },
/// Model is in frozen state
ModelFrozen,
/// Would fragment world structure
StructuralFragmentation,
}
impl SelfStabilizingWorldModel {
pub fn new() -> Self {
Self {
entities: HashMap::new(),
relationships: Vec::new(),
laws: vec![
PhysicalLaw {
name: "conservation_of_matter".to_string(),
confidence: 0.99,
support_count: 1000,
violation_count: 0,
},
PhysicalLaw {
name: "locality".to_string(),
confidence: 0.95,
support_count: 500,
violation_count: 5,
},
PhysicalLaw {
name: "temporal_consistency".to_string(),
confidence: 0.98,
support_count: 800,
violation_count: 2,
},
],
coherence: 1.0,
coherence_history: vec![1.0],
base_learning_rate: 0.1,
min_update_coherence: 0.4,
rejected_updates: Vec::new(),
}
}
/// Current effective learning rate (decreases with low coherence)
pub fn effective_learning_rate(&self) -> f64 {
self.base_learning_rate * self.coherence.powi(2)
}
/// Is the model currently accepting updates?
pub fn is_learning(&self) -> bool {
self.coherence >= self.min_update_coherence
}
/// Attempt to integrate an observation into the world model
pub fn observe(&mut self, observation: Observation, timestamp: u64) -> UpdateResult {
// Check if model is frozen
if !self.is_learning() {
return UpdateResult::Frozen {
coherence: self.coherence,
threshold: self.min_update_coherence,
};
}
// Predict coherence impact
let predicted_coherence = self.predict_coherence_after(&observation);
// Would this drop coherence too much?
let coherence_drop = self.coherence - predicted_coherence;
if coherence_drop > 0.2 {
self.rejected_updates.push(RejectedUpdate {
observation: format!("Entity {} update", observation.entity_id),
reason: RejectionReason::ExcessiveCoherenceDrop {
predicted: predicted_coherence,
threshold: self.coherence - 0.2,
},
timestamp,
coherence_at_rejection: self.coherence,
});
return UpdateResult::Rejected {
reason: RejectionReason::ExcessiveCoherenceDrop {
predicted: predicted_coherence,
threshold: self.coherence - 0.2,
},
};
}
// Check physical law violations
if let Some(violation) = self.check_law_violations(&observation) {
self.rejected_updates.push(RejectedUpdate {
observation: format!("Entity {} update", observation.entity_id),
reason: violation.clone(),
timestamp,
coherence_at_rejection: self.coherence,
});
return UpdateResult::Rejected { reason: violation };
}
// Check logical consistency
if let Some(contradiction) = self.check_contradictions(&observation) {
self.rejected_updates.push(RejectedUpdate {
observation: format!("Entity {} update", observation.entity_id),
reason: contradiction.clone(),
timestamp,
coherence_at_rejection: self.coherence,
});
return UpdateResult::Rejected { reason: contradiction };
}
// Apply the update
self.apply_observation(observation, timestamp);
// Recalculate coherence
let old_coherence = self.coherence;
self.coherence = self.calculate_coherence();
self.coherence_history.push(self.coherence);
// Trim history
if self.coherence_history.len() > 100 {
self.coherence_history.remove(0);
}
UpdateResult::Applied {
coherence_change: self.coherence - old_coherence,
}
}
fn predict_coherence_after(&self, observation: &Observation) -> f64 {
// Simulate the update's impact on coherence
let mut consistency_score = 1.0;
if let Some(existing) = self.entities.get(&observation.entity_id) {
// How much does this differ from existing knowledge?
for (key, new_value) in &observation.properties {
if let Some(old_value) = existing.properties.get(key) {
let diff = self.property_difference(old_value, new_value);
consistency_score *= 1.0 - (diff * 0.5);
}
}
// Position change check (locality)
if let (Some(old_pos), Some(new_pos)) = (&existing.position, &observation.position) {
let distance = ((new_pos.0 - old_pos.0).powi(2)
+ (new_pos.1 - old_pos.1).powi(2)
+ (new_pos.2 - old_pos.2).powi(2))
.sqrt();
// Large sudden movements are suspicious
if distance > 10.0 {
consistency_score *= 0.7;
}
}
}
self.coherence * consistency_score
}
fn property_difference(&self, old: &PropertyValue, new: &PropertyValue) -> f64 {
match (old, new) {
(PropertyValue::Number(a), PropertyValue::Number(b)) => {
let max = a.abs().max(b.abs()).max(1.0);
((a - b).abs() / max).min(1.0)
}
(PropertyValue::Boolean(a), PropertyValue::Boolean(b)) => {
if a == b { 0.0 } else { 1.0 }
}
(PropertyValue::String(a), PropertyValue::String(b)) => {
if a == b { 0.0 } else { 0.5 }
}
_ => 0.5, // Different types
}
}
fn check_law_violations(&self, observation: &Observation) -> Option<RejectionReason> {
if let Some(existing) = self.entities.get(&observation.entity_id) {
// Check locality violation (teleportation)
if let (Some(old_pos), Some(new_pos)) = (&existing.position, &observation.position) {
let distance = ((new_pos.0 - old_pos.0).powi(2)
+ (new_pos.1 - old_pos.1).powi(2)
+ (new_pos.2 - old_pos.2).powi(2))
.sqrt();
// If object moved impossibly fast
let max_speed = 100.0; // units per timestamp
if distance > max_speed {
return Some(RejectionReason::ViolatesPhysicalLaw(
format!("locality: object moved {} units instantaneously", distance)
));
}
}
}
None
}
fn check_contradictions(&self, observation: &Observation) -> Option<RejectionReason> {
// Check for direct contradictions with high-confidence existing data
if let Some(existing) = self.entities.get(&observation.entity_id) {
if existing.confidence > 0.9 {
for (key, new_value) in &observation.properties {
if let Some(old_value) = existing.properties.get(key) {
let diff = self.property_difference(old_value, new_value);
if diff > 0.9 && observation.source_confidence < existing.confidence {
return Some(RejectionReason::LogicalContradiction(
format!("Property {} contradicts high-confidence existing data", key)
));
}
}
}
}
}
None
}
fn apply_observation(&mut self, observation: Observation, timestamp: u64) {
let learning_rate = self.effective_learning_rate();
// Pre-compute blended values to avoid borrow conflict
let blended_properties: Vec<(String, PropertyValue)> = observation.properties
.into_iter()
.map(|(key, new_value)| {
let blended = if let Some(entity) = self.entities.get(&observation.entity_id) {
if let Some(old_value) = entity.properties.get(&key) {
self.blend_values(old_value, &new_value, learning_rate)
} else {
new_value
}
} else {
new_value
};
(key, blended)
})
.collect();
let entity = self.entities.entry(observation.entity_id).or_insert(Entity {
id: observation.entity_id,
properties: HashMap::new(),
position: None,
last_observed: 0,
confidence: 0.5,
});
// Apply pre-computed blended values
for (key, blended) in blended_properties {
entity.properties.insert(key, blended);
}
// Update position
if let Some(new_pos) = observation.position {
if let Some(old_pos) = entity.position {
// Smooth position update
entity.position = Some((
old_pos.0 + learning_rate * (new_pos.0 - old_pos.0),
old_pos.1 + learning_rate * (new_pos.1 - old_pos.1),
old_pos.2 + learning_rate * (new_pos.2 - old_pos.2),
));
} else {
entity.position = Some(new_pos);
}
}
entity.last_observed = timestamp;
// Update confidence
entity.confidence = entity.confidence * 0.9 + observation.source_confidence * 0.1;
}
fn blend_values(&self, old: &PropertyValue, new: &PropertyValue, rate: f64) -> PropertyValue {
match (old, new) {
(PropertyValue::Number(a), PropertyValue::Number(b)) => {
PropertyValue::Number(a + rate * (b - a))
}
_ => new.clone(), // For non-numeric, just use new if rate > 0.5
}
}
fn calculate_coherence(&self) -> f64 {
if self.entities.is_empty() {
return 1.0;
}
let mut scores = Vec::new();
// 1. Internal consistency of entities
for entity in self.entities.values() {
scores.push(entity.confidence);
}
// 2. Relationship consistency
for rel in &self.relationships {
if self.entities.contains_key(&rel.subject) && self.entities.contains_key(&rel.object) {
scores.push(rel.confidence);
} else {
scores.push(0.0); // Dangling relationship
}
}
// 3. Physical law confidence
for law in &self.laws {
scores.push(law.confidence);
}
// 4. Temporal coherence (recent observations should be consistent)
let recent_variance = self.calculate_recent_variance();
scores.push(1.0 - recent_variance);
// Geometric mean of all scores
if scores.is_empty() {
1.0
} else {
let product: f64 = scores.iter().product();
product.powf(1.0 / scores.len() as f64)
}
}
fn calculate_recent_variance(&self) -> f64 {
if self.coherence_history.len() < 2 {
return 0.0;
}
let recent: Vec<f64> = self.coherence_history.iter().rev().take(10).cloned().collect();
let mean: f64 = recent.iter().sum::<f64>() / recent.len() as f64;
let variance: f64 = recent.iter().map(|x| (x - mean).powi(2)).sum::<f64>() / recent.len() as f64;
variance.sqrt().min(1.0)
}
/// Get count of rejected updates
pub fn rejection_count(&self) -> usize {
self.rejected_updates.len()
}
/// Get model status
pub fn status(&self) -> String {
format!(
"WorldModel | Coherence: {:.3} | Entities: {} | Learning: {} | Rejections: {}",
self.coherence,
self.entities.len(),
if self.is_learning() { "ON" } else { "FROZEN" },
self.rejected_updates.len()
)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_coherent_learning() {
let mut model = SelfStabilizingWorldModel::new();
// Feed consistent observations
for i in 0..10 {
let obs = Observation {
entity_id: 1,
properties: [("temperature".to_string(), PropertyValue::Number(20.0 + i as f64 * 0.1))].into(),
position: Some((i as f64, 0.0, 0.0)),
timestamp: i as u64,
source_confidence: 0.9,
};
let result = model.observe(obs, i as u64);
assert!(matches!(result, UpdateResult::Applied { .. }));
}
println!("{}", model.status());
assert!(model.coherence > 0.8);
}
#[test]
fn test_rejects_incoherent_update() {
let mut model = SelfStabilizingWorldModel::new();
// Establish entity at position
let obs1 = Observation {
entity_id: 1,
properties: HashMap::new(),
position: Some((0.0, 0.0, 0.0)),
timestamp: 0,
source_confidence: 0.95,
};
model.observe(obs1, 0);
// Try to teleport it (violates locality)
let obs2 = Observation {
entity_id: 1,
properties: HashMap::new(),
position: Some((1000.0, 0.0, 0.0)), // Impossibly far
timestamp: 1,
source_confidence: 0.5,
};
let result = model.observe(obs2, 1);
println!("Teleport result: {:?}", result);
// Should be rejected
assert!(matches!(result, UpdateResult::Rejected { .. }));
println!("{}", model.status());
}
#[test]
fn test_freezes_under_chaos() {
let mut model = SelfStabilizingWorldModel::new();
// Feed chaotic, contradictory observations
for i in 0..100 {
let obs = Observation {
entity_id: (i % 5) as u64,
properties: [
("value".to_string(), PropertyValue::Number(if i % 2 == 0 { 100.0 } else { -100.0 }))
].into(),
position: Some((
(i as f64 * 10.0) % 50.0 - 25.0,
(i as f64 * 7.0) % 50.0 - 25.0,
0.0
)),
timestamp: i as u64,
source_confidence: 0.3,
};
let result = model.observe(obs, i as u64);
if matches!(result, UpdateResult::Frozen { .. }) {
println!("Model FROZE at step {} - stopped hallucinating!", i);
println!("{}", model.status());
return; // Test passes - model stopped itself
}
}
println!("Final: {}", model.status());
// Model should have either frozen or heavily rejected updates
assert!(
model.rejection_count() > 20 || model.coherence < 0.5,
"Model should resist chaotic input"
);
}
}

View File

@@ -0,0 +1,456 @@
//! # Application 5: Coherence-Bounded Creativity Systems
//!
//! Creativity is allowed only inside coherence-preserving manifolds.
//!
//! ## Problem
//! Generative systems oscillate between boring and insane.
//!
//! ## Exotic Outcome
//! - Novelty without collapse
//! - Exploration without nonsense
//!
//! ## Applications
//! - Music systems that never dissolve into noise
//! - Design systems that don't violate constraints
//! - Narrative generators that maintain internal consistency over long arcs
use std::collections::HashSet;
use std::sync::atomic::{AtomicUsize, Ordering};
/// A creative system bounded by coherence constraints
pub struct CoherenceBoundedCreator<T: Creative> {
/// The creative element being generated
current: T,
/// Coherence constraints
constraints: Vec<Box<dyn Constraint<T>>>,
/// Current coherence level
coherence: f64,
/// Minimum coherence to allow creativity
min_coherence: f64,
/// Maximum coherence (too high = boring)
max_coherence: f64,
/// History of creative decisions
history: Vec<CreativeDecision<T>>,
/// Exploration budget (regenerates over time)
exploration_budget: f64,
}
/// Trait for creative elements
pub trait Creative: Clone + std::fmt::Debug {
/// Generate a random variation
fn vary(&self, magnitude: f64) -> Self;
/// Compute distance between two creative elements
fn distance(&self, other: &Self) -> f64;
/// Get a unique identifier for this state
fn fingerprint(&self) -> u64;
}
/// Constraint that must be satisfied
pub trait Constraint<T>: Send + Sync {
/// Name of the constraint
fn name(&self) -> &str;
/// Check if element satisfies constraint (0.0 = violated, 1.0 = satisfied)
fn satisfaction(&self, element: &T) -> f64;
/// Is this a hard constraint (violation = immediate rejection)?
fn is_hard(&self) -> bool { false }
}
#[derive(Debug)]
pub struct CreativeDecision<T> {
pub from: T,
pub to: T,
pub coherence_before: f64,
pub coherence_after: f64,
pub constraint_satisfactions: Vec<(String, f64)>,
pub accepted: bool,
}
#[derive(Debug)]
pub enum CreativeResult<T> {
/// Created something new within bounds
Created { element: T, novelty: f64, coherence: f64 },
/// Creation rejected - would violate coherence
Rejected { attempted: T, reason: String },
/// System is too stable - needs perturbation to create
TooBoring { coherence: f64 },
/// System exhausted exploration budget
BudgetExhausted,
}
impl<T: Creative> CoherenceBoundedCreator<T> {
pub fn new(initial: T, min_coherence: f64, max_coherence: f64) -> Self {
Self {
current: initial,
constraints: Vec::new(),
coherence: 1.0,
min_coherence,
max_coherence,
history: Vec::new(),
exploration_budget: 10.0,
}
}
pub fn add_constraint(&mut self, constraint: Box<dyn Constraint<T>>) {
self.constraints.push(constraint);
}
/// Calculate coherence based on constraint satisfaction
/// Returns a valid f64 in range [0.0, 1.0], with NaN/Infinity protection
fn calculate_coherence(&self, element: &T) -> f64 {
if self.constraints.is_empty() {
return 1.0;
}
let satisfactions: Vec<f64> = self.constraints
.iter()
.map(|c| {
let sat = c.satisfaction(element);
// Validate satisfaction value
if sat.is_finite() { sat.clamp(0.0, 1.0) } else { 0.0 }
})
.collect();
// Geometric mean of satisfactions
let product: f64 = satisfactions.iter().product();
// Validate product before computing power
if !product.is_finite() || product < 0.0 {
return 0.0; // Safe default for invalid state
}
let result = product.powf(1.0 / satisfactions.len() as f64);
// Final validation
if result.is_finite() { result.clamp(0.0, 1.0) } else { 0.0 }
}
/// Check hard constraints
fn check_hard_constraints(&self, element: &T) -> Option<String> {
for constraint in &self.constraints {
if constraint.is_hard() && constraint.satisfaction(element) < 0.1 {
return Some(format!("Hard constraint '{}' violated", constraint.name()));
}
}
None
}
/// Attempt to create something new
pub fn create(&mut self, exploration_magnitude: f64) -> CreativeResult<T> {
// Check exploration budget
if self.exploration_budget <= 0.0 {
return CreativeResult::BudgetExhausted;
}
// Check if we're too stable (boring)
if self.coherence > self.max_coherence {
return CreativeResult::TooBoring { coherence: self.coherence };
}
// Generate variation
let candidate = self.current.vary(exploration_magnitude);
// Check hard constraints
if let Some(violation) = self.check_hard_constraints(&candidate) {
return CreativeResult::Rejected {
attempted: candidate,
reason: violation,
};
}
// Calculate new coherence
let new_coherence = self.calculate_coherence(&candidate);
// Would this drop coherence too low?
if new_coherence < self.min_coherence {
self.exploration_budget -= 0.5; // Exploration cost
return CreativeResult::Rejected {
attempted: candidate,
reason: format!(
"Coherence would drop to {:.3} (min: {:.3})",
new_coherence, self.min_coherence
),
};
}
// Calculate novelty
let novelty = self.current.distance(&candidate);
// Record decision
let decision = CreativeDecision {
from: self.current.clone(),
to: candidate.clone(),
coherence_before: self.coherence,
coherence_after: new_coherence,
constraint_satisfactions: self.constraints
.iter()
.map(|c| (c.name().to_string(), c.satisfaction(&candidate)))
.collect(),
accepted: true,
};
self.history.push(decision);
// Accept the creation
self.current = candidate.clone();
self.coherence = new_coherence;
self.exploration_budget -= exploration_magnitude;
CreativeResult::Created {
element: candidate,
novelty,
coherence: new_coherence,
}
}
/// Perturb the system to escape local optima (controlled chaos)
pub fn perturb(&mut self, magnitude: f64) -> bool {
let perturbed = self.current.vary(magnitude * 0.5);
let new_coherence = self.calculate_coherence(&perturbed);
// Only accept perturbation if it doesn't violate hard constraints
// and stays within bounds
if new_coherence >= self.min_coherence * 0.9 {
self.current = perturbed;
self.coherence = new_coherence;
true
} else {
false
}
}
/// Regenerate exploration budget
pub fn rest(&mut self, amount: f64) {
self.exploration_budget = (self.exploration_budget + amount).min(20.0);
}
pub fn current(&self) -> &T {
&self.current
}
pub fn coherence(&self) -> f64 {
self.coherence
}
}
// =============================================================================
// Example: Music Generation
// =============================================================================
/// A musical phrase
#[derive(Clone, Debug)]
pub struct MusicalPhrase {
/// Notes as MIDI values
notes: Vec<u8>,
/// Durations in beats
durations: Vec<f64>,
/// Velocities (loudness)
velocities: Vec<u8>,
}
impl Creative for MusicalPhrase {
fn vary(&self, magnitude: f64) -> Self {
let mut new_notes = self.notes.clone();
let mut new_durations = self.durations.clone();
let mut new_velocities = self.velocities.clone();
// Randomly modify based on magnitude
let changes = (magnitude * self.notes.len() as f64) as usize;
for _ in 0..changes.max(1) {
let idx = pseudo_random() % self.notes.len();
// Vary note (small intervals)
let delta = ((pseudo_random() % 7) as i8 - 3) * (magnitude * 2.0) as i8;
new_notes[idx] = (new_notes[idx] as i8 + delta).clamp(36, 96) as u8;
// Vary duration slightly
let dur_delta = (pseudo_random_f64() - 0.5) * magnitude;
new_durations[idx] = (new_durations[idx] + dur_delta).clamp(0.125, 4.0);
// Vary velocity
let vel_delta = ((pseudo_random() % 21) as i8 - 10) * (magnitude * 2.0) as i8;
new_velocities[idx] = (new_velocities[idx] as i8 + vel_delta).clamp(20, 127) as u8;
}
Self {
notes: new_notes,
durations: new_durations,
velocities: new_velocities,
}
}
fn distance(&self, other: &Self) -> f64 {
let note_diff: f64 = self.notes.iter()
.zip(&other.notes)
.map(|(a, b)| (*a as f64 - *b as f64).abs())
.sum::<f64>() / self.notes.len() as f64;
let dur_diff: f64 = self.durations.iter()
.zip(&other.durations)
.map(|(a, b)| (a - b).abs())
.sum::<f64>() / self.durations.len() as f64;
(note_diff / 12.0 + dur_diff) / 2.0 // Normalize
}
fn fingerprint(&self) -> u64 {
let mut hash: u64 = 0;
for (i, &note) in self.notes.iter().enumerate() {
hash ^= (note as u64) << ((i * 8) % 56);
}
hash
}
}
impl MusicalPhrase {
pub fn simple_melody() -> Self {
Self {
notes: vec![60, 62, 64, 65, 67, 65, 64, 62], // C major scale fragment
durations: vec![0.5, 0.5, 0.5, 0.5, 1.0, 0.5, 0.5, 1.0],
velocities: vec![80, 75, 85, 80, 90, 75, 70, 85],
}
}
}
/// Constraint: Notes should stay within a comfortable range
pub struct RangeConstraint {
min_note: u8,
max_note: u8,
}
impl Constraint<MusicalPhrase> for RangeConstraint {
fn name(&self) -> &str { "pitch_range" }
fn satisfaction(&self, phrase: &MusicalPhrase) -> f64 {
let in_range = phrase.notes.iter()
.filter(|&&n| n >= self.min_note && n <= self.max_note)
.count();
in_range as f64 / phrase.notes.len() as f64
}
fn is_hard(&self) -> bool { false }
}
/// Constraint: Avoid large interval jumps
pub struct IntervalConstraint {
max_interval: u8,
}
impl Constraint<MusicalPhrase> for IntervalConstraint {
fn name(&self) -> &str { "interval_smoothness" }
fn satisfaction(&self, phrase: &MusicalPhrase) -> f64 {
if phrase.notes.len() < 2 {
return 1.0;
}
let smooth_intervals = phrase.notes.windows(2)
.filter(|w| (w[0] as i8 - w[1] as i8).abs() <= self.max_interval as i8)
.count();
smooth_intervals as f64 / (phrase.notes.len() - 1) as f64
}
}
/// Constraint: Rhythm should have variety but not chaos
pub struct RhythmConstraint;
impl Constraint<MusicalPhrase> for RhythmConstraint {
fn name(&self) -> &str { "rhythm_coherence" }
fn satisfaction(&self, phrase: &MusicalPhrase) -> f64 {
let unique_durations: HashSet<u64> = phrase.durations
.iter()
.map(|d| (d * 1000.0) as u64)
.collect();
// Penalize both too few (boring) and too many (chaotic) unique durations
let variety = unique_durations.len() as f64 / phrase.durations.len() as f64;
// Optimal variety is around 0.3-0.5
let optimal = 0.4;
1.0 - (variety - optimal).abs() * 2.0
}
}
/// Thread-safe atomic seed for pseudo-random number generation
static SEED: AtomicUsize = AtomicUsize::new(42);
// Thread-safe pseudo-random for reproducibility
fn pseudo_random() -> usize {
let old = SEED.fetch_add(1, Ordering::Relaxed);
let new = old.wrapping_mul(1103515245).wrapping_add(12345);
// Store back for next call (best-effort, races are acceptable for RNG)
let _ = SEED.compare_exchange(old + 1, new, Ordering::Relaxed, Ordering::Relaxed);
(new >> 16) & 0x7fff
}
fn pseudo_random_f64() -> f64 {
(pseudo_random() as f64) / 32768.0
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_musical_creativity() {
let initial = MusicalPhrase::simple_melody();
let mut creator = CoherenceBoundedCreator::new(initial, 0.6, 0.95);
// Add constraints
creator.add_constraint(Box::new(RangeConstraint { min_note: 48, max_note: 84 }));
creator.add_constraint(Box::new(IntervalConstraint { max_interval: 7 }));
creator.add_constraint(Box::new(RhythmConstraint));
let mut successful_creations = 0;
let mut rejections = 0;
for i in 0..50 {
let magnitude = 0.2 + (i as f64 * 0.02); // Increasing exploration
match creator.create(magnitude) {
CreativeResult::Created { novelty, coherence, .. } => {
successful_creations += 1;
println!(
"Step {}: Created! Novelty: {:.3}, Coherence: {:.3}",
i, novelty, coherence
);
}
CreativeResult::Rejected { reason, .. } => {
rejections += 1;
println!("Step {}: Rejected - {}", i, reason);
}
CreativeResult::TooBoring { coherence } => {
println!("Step {}: Too boring (coherence: {:.3}), perturbing...", i, coherence);
creator.perturb(0.5);
}
CreativeResult::BudgetExhausted => {
println!("Step {}: Budget exhausted, resting...", i);
creator.rest(5.0);
}
}
}
println!("\n=== Results ===");
println!("Successful creations: {}", successful_creations);
println!("Rejections: {}", rejections);
println!("Final coherence: {:.3}", creator.coherence());
println!("Final phrase: {:?}", creator.current());
// Should have some successes but also some rejections
// (pure acceptance = not enough constraint, pure rejection = too much)
assert!(successful_creations > 10, "Should create some novelty");
assert!(rejections > 0, "Should reject some incoherent attempts");
}
}

View File

@@ -0,0 +1,639 @@
//! # Application 6: Anti-Cascade Financial Systems
//!
//! Transactions, leverage, or derivatives that increase systemic incoherence
//! are throttled or blocked automatically.
//!
//! ## Problem
//! Financial cascades (2008, flash crashes) happen when local actions
//! destroy global coherence faster than the system can respond.
//!
//! ## Δ-Behavior Solution
//! Every transaction must preserve or improve systemic coherence.
//! High-risk operations face exponential energy costs.
//!
//! ## Exotic Result
//! A financial system that cannot cascade into collapse by construction.
use std::collections::{HashMap, VecDeque};
/// A financial system with coherence-enforced stability
pub struct AntiCascadeFinancialSystem {
/// Market participants
participants: HashMap<String, Participant>,
/// Open positions
positions: Vec<Position>,
/// Systemic coherence (1.0 = stable, 0.0 = collapse)
coherence: f64,
/// Coherence thresholds
warning_threshold: f64,
critical_threshold: f64,
lockdown_threshold: f64,
/// Maximum allowed leverage system-wide
max_system_leverage: f64,
/// Current aggregate leverage
current_leverage: f64,
/// Transaction queue (pending during high stress)
pending_transactions: Vec<Transaction>,
/// Circuit breaker state
circuit_breaker: CircuitBreakerState,
/// Historical coherence for trend analysis
coherence_history: VecDeque<f64>,
/// Cached coherence factors (updated when underlying data changes)
cached_leverage_factor: f64,
cached_depth_factor: f64,
}
#[derive(Clone)]
pub struct Participant {
pub id: String,
pub capital: f64,
pub exposure: f64,
pub risk_rating: f64, // 0.0 = safe, 1.0 = risky
pub interconnectedness: f64, // How many counterparties
}
#[derive(Clone)]
pub struct Position {
pub holder: String,
pub counterparty: String,
pub notional: f64,
pub leverage: f64,
pub derivative_depth: u8, // 0 = spot, 1 = derivative, 2 = derivative of derivative, etc.
}
#[derive(Clone)]
pub struct Transaction {
pub id: u64,
pub from: String,
pub to: String,
pub amount: f64,
pub transaction_type: TransactionType,
pub timestamp: u64,
}
#[derive(Clone, Debug)]
pub enum TransactionType {
/// Simple transfer
Transfer,
/// Open leveraged position
OpenLeverage { leverage: f64 },
/// Close position
ClosePosition { position_id: usize },
/// Create derivative
CreateDerivative { underlying_position: usize },
/// Margin call
MarginCall { participant: String },
}
#[derive(Debug, Clone, PartialEq)]
pub enum CircuitBreakerState {
/// Normal operation
Open,
/// Elevated monitoring
Cautious,
/// Only risk-reducing transactions allowed
Restricted,
/// All transactions halted
Halted,
}
#[derive(Debug)]
pub enum TransactionResult {
/// Transaction executed
Executed {
coherence_impact: f64,
fee_multiplier: f64,
},
/// Transaction queued for later
Queued { reason: String },
/// Transaction rejected
Rejected { reason: String },
/// System halted
SystemHalted,
}
impl AntiCascadeFinancialSystem {
pub fn new() -> Self {
let mut history = VecDeque::with_capacity(100);
history.push_back(1.0);
Self {
participants: HashMap::new(),
positions: Vec::new(),
coherence: 1.0,
warning_threshold: 0.7,
critical_threshold: 0.5,
lockdown_threshold: 0.3,
max_system_leverage: 10.0,
current_leverage: 1.0,
pending_transactions: Vec::new(),
circuit_breaker: CircuitBreakerState::Open,
coherence_history: history,
cached_leverage_factor: 0.9, // 1.0 - (1.0 / 10.0) for initial leverage
cached_depth_factor: 1.0, // No positions initially
}
}
pub fn add_participant(&mut self, id: &str, capital: f64) {
self.participants.insert(id.to_string(), Participant {
id: id.to_string(),
capital,
exposure: 0.0,
risk_rating: 0.0,
interconnectedness: 0.0,
});
}
/// Calculate systemic coherence based on multiple risk factors
/// Optimized: Single-pass calculation for participant metrics
fn calculate_coherence(&self) -> f64 {
if self.participants.is_empty() {
return 1.0;
}
// Use pre-computed cached factors for leverage and depth
let leverage_factor = self.cached_leverage_factor;
let depth_factor = self.cached_depth_factor;
// Single-pass calculation for interconnectedness, exposure, and capital
let (sum_interconnect, total_exposure, total_capital) = self.participants.values()
.fold((0.0, 0.0, 0.0), |(ic, exp, cap), p| {
(ic + p.interconnectedness, exp + p.exposure, cap + p.capital)
});
// Factor 3: Interconnectedness risk (contagion potential)
let avg_interconnectedness = sum_interconnect / self.participants.len() as f64;
let interconnect_factor = 1.0 / (1.0 + avg_interconnectedness * 0.1);
// Factor 4: Capital adequacy
let capital_factor = if total_exposure > 0.0 {
(total_capital / total_exposure).min(1.0)
} else {
1.0
};
// Factor 5: Coherence trend (declining coherence is worse)
let trend_factor = if self.coherence_history.len() >= 5 {
// VecDeque allows efficient back access
let len = self.coherence_history.len();
let newest = self.coherence_history[len - 1];
let oldest_of_five = self.coherence_history[len - 5];
let trend = newest - oldest_of_five;
if trend < 0.0 {
1.0 + trend // Penalize declining trend
} else {
1.0
}
} else {
1.0
};
// Geometric mean of factors (more sensitive to low values)
let product = leverage_factor * depth_factor * interconnect_factor
* capital_factor * trend_factor;
product.powf(0.2).clamp(0.0, 1.0)
}
/// Update cached coherence factors when positions or leverage change
fn update_cached_factors(&mut self) {
// Factor 1: Leverage concentration
self.cached_leverage_factor = 1.0 - (self.current_leverage / self.max_system_leverage).min(1.0);
// Factor 2: Derivative depth (single pass over positions)
let max_depth = self.positions.iter()
.map(|p| p.derivative_depth)
.max()
.unwrap_or(0);
self.cached_depth_factor = 1.0 / (1.0 + max_depth as f64 * 0.2);
}
/// Calculate the energy cost for a transaction (higher for risky transactions)
fn transaction_energy_cost(&self, tx: &Transaction) -> f64 {
let base_cost = match &tx.transaction_type {
TransactionType::Transfer => 1.0,
TransactionType::OpenLeverage { leverage } => {
// Exponential cost for leverage
(1.0 + leverage).powf(2.0)
}
TransactionType::ClosePosition { .. } => 0.5, // Closing is cheap (reduces risk)
TransactionType::CreateDerivative { underlying_position } => {
// Cost increases with derivative depth
let depth = self.positions.get(*underlying_position)
.map(|p| p.derivative_depth)
.unwrap_or(0);
(2.0_f64).powf(depth as f64 + 1.0)
}
TransactionType::MarginCall { .. } => 0.1, // Emergency actions are cheap
};
// Multiply by inverse coherence (lower coherence = higher costs)
let coherence_multiplier = 1.0 / self.coherence.max(0.1);
// Circuit breaker multiplier
let circuit_multiplier = match self.circuit_breaker {
CircuitBreakerState::Open => 1.0,
CircuitBreakerState::Cautious => 2.0,
CircuitBreakerState::Restricted => 10.0,
CircuitBreakerState::Halted => f64::INFINITY,
};
base_cost * coherence_multiplier * circuit_multiplier
}
/// Predict coherence impact of a transaction
fn predict_coherence_impact(&self, tx: &Transaction) -> f64 {
match &tx.transaction_type {
TransactionType::Transfer => 0.0, // Neutral
TransactionType::OpenLeverage { leverage } => {
-0.01 * leverage // Leverage reduces coherence
}
TransactionType::ClosePosition { .. } => 0.02, // Closing improves coherence
TransactionType::CreateDerivative { .. } => -0.05, // Derivatives hurt coherence
TransactionType::MarginCall { .. } => 0.03, // Margin calls improve coherence
}
}
/// Process a transaction through the Δ-behavior filter
pub fn process_transaction(&mut self, tx: Transaction) -> TransactionResult {
// Update circuit breaker state
self.update_circuit_breaker();
// Check if system is halted
if self.circuit_breaker == CircuitBreakerState::Halted {
return TransactionResult::SystemHalted;
}
// Calculate energy cost
let energy_cost = self.transaction_energy_cost(&tx);
// Predict coherence impact
let predicted_impact = self.predict_coherence_impact(&tx);
let predicted_coherence = self.coherence + predicted_impact;
// CORE Δ-BEHAVIOR: Reject if would cross lockdown threshold
if predicted_coherence < self.lockdown_threshold {
return TransactionResult::Rejected {
reason: format!(
"Transaction would reduce coherence to {:.3} (threshold: {:.3})",
predicted_coherence, self.lockdown_threshold
),
};
}
// In restricted mode, only allow risk-reducing transactions
if self.circuit_breaker == CircuitBreakerState::Restricted {
match &tx.transaction_type {
TransactionType::ClosePosition { .. } | TransactionType::MarginCall { .. } => {}
_ => {
return TransactionResult::Queued {
reason: "System in restricted mode - only risk-reducing transactions allowed".to_string(),
};
}
}
}
// Execute the transaction
self.execute_transaction(&tx);
// Update coherence
self.coherence = self.calculate_coherence();
self.coherence_history.push_back(self.coherence);
// Keep history bounded - O(1) with VecDeque instead of O(n) with Vec
if self.coherence_history.len() > 100 {
self.coherence_history.pop_front();
}
TransactionResult::Executed {
coherence_impact: predicted_impact,
fee_multiplier: energy_cost,
}
}
fn execute_transaction(&mut self, tx: &Transaction) {
match &tx.transaction_type {
TransactionType::Transfer => {
// Simple transfer logic
if let Some(from) = self.participants.get_mut(&tx.from) {
from.capital -= tx.amount;
}
if let Some(to) = self.participants.get_mut(&tx.to) {
to.capital += tx.amount;
}
}
TransactionType::OpenLeverage { leverage } => {
// Create leveraged position
self.positions.push(Position {
holder: tx.from.clone(),
counterparty: tx.to.clone(),
notional: tx.amount * leverage,
leverage: *leverage,
derivative_depth: 0,
});
// Update metrics
self.current_leverage = (self.current_leverage + leverage) / 2.0;
// Update participant exposure
if let Some(holder) = self.participants.get_mut(&tx.from) {
holder.exposure += tx.amount * leverage;
holder.interconnectedness += 1.0;
}
if let Some(counterparty) = self.participants.get_mut(&tx.to) {
counterparty.interconnectedness += 1.0;
}
// Update cached factors since leverage/positions changed
self.update_cached_factors();
}
TransactionType::ClosePosition { position_id } => {
if *position_id < self.positions.len() {
let pos = self.positions.remove(*position_id);
// Reduce leverage
self.current_leverage = (self.current_leverage - pos.leverage * 0.1).max(1.0);
// Update participant exposure
if let Some(holder) = self.participants.get_mut(&pos.holder) {
holder.exposure = (holder.exposure - pos.notional).max(0.0);
}
// Update cached factors since leverage/positions changed
self.update_cached_factors();
}
}
TransactionType::CreateDerivative { underlying_position } => {
if let Some(underlying) = self.positions.get(*underlying_position) {
self.positions.push(Position {
holder: tx.from.clone(),
counterparty: tx.to.clone(),
notional: underlying.notional * 0.5,
leverage: underlying.leverage * 1.5,
derivative_depth: underlying.derivative_depth + 1,
});
// Update cached factors since positions changed (derivative depth may increase)
self.update_cached_factors();
}
}
TransactionType::MarginCall { participant } => {
// Force close risky positions for participant using retain() - O(n) instead of O(n^2)
let initial_len = self.positions.len();
self.positions.retain(|p| !(&p.holder == participant && p.leverage > 5.0));
// Update cached factors if positions were removed
if self.positions.len() != initial_len {
self.update_cached_factors();
}
}
}
}
fn update_circuit_breaker(&mut self) {
self.circuit_breaker = match self.coherence {
c if c >= self.warning_threshold => CircuitBreakerState::Open,
c if c >= self.critical_threshold => CircuitBreakerState::Cautious,
c if c >= self.lockdown_threshold => CircuitBreakerState::Restricted,
_ => CircuitBreakerState::Halted,
};
}
/// Process pending transactions (called when coherence improves)
pub fn process_pending(&mut self) -> Vec<TransactionResult> {
if self.circuit_breaker == CircuitBreakerState::Halted
|| self.circuit_breaker == CircuitBreakerState::Restricted {
return Vec::new();
}
let pending = std::mem::take(&mut self.pending_transactions);
pending.into_iter()
.map(|tx| self.process_transaction(tx))
.collect()
}
pub fn coherence(&self) -> f64 {
self.coherence
}
pub fn circuit_breaker_state(&self) -> &CircuitBreakerState {
&self.circuit_breaker
}
pub fn status(&self) -> String {
format!(
"Coherence: {:.3} | Circuit Breaker: {:?} | Leverage: {:.2}x | Positions: {} | Pending: {}",
self.coherence,
self.circuit_breaker,
self.current_leverage,
self.positions.len(),
self.pending_transactions.len()
)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_anti_cascade_basic() {
let mut system = AntiCascadeFinancialSystem::new();
system.add_participant("bank_a", 1000.0);
system.add_participant("bank_b", 1000.0);
system.add_participant("hedge_fund", 500.0);
// Normal transaction should succeed
let tx = Transaction {
id: 1,
from: "bank_a".to_string(),
to: "bank_b".to_string(),
amount: 100.0,
transaction_type: TransactionType::Transfer,
timestamp: 0,
};
let result = system.process_transaction(tx);
assert!(matches!(result, TransactionResult::Executed { .. }));
println!("After transfer: {}", system.status());
}
#[test]
fn test_leverage_throttling() {
let mut system = AntiCascadeFinancialSystem::new();
system.add_participant("bank_a", 1000.0);
system.add_participant("bank_b", 1000.0);
// Open multiple leveraged positions - costs should increase
let mut costs = Vec::new();
for i in 0..5 {
let tx = Transaction {
id: i,
from: "bank_a".to_string(),
to: "bank_b".to_string(),
amount: 100.0,
transaction_type: TransactionType::OpenLeverage { leverage: 5.0 },
timestamp: i,
};
if let TransactionResult::Executed { fee_multiplier, .. } = system.process_transaction(tx) {
costs.push(fee_multiplier);
println!("Position {}: cost multiplier = {:.2}", i, fee_multiplier);
}
println!(" Status: {}", system.status());
}
// Costs should generally increase as coherence drops
// (though relationship isn't strictly monotonic due to multiple factors)
println!("Cost progression: {:?}", costs);
}
#[test]
fn test_derivative_depth_limit() {
let mut system = AntiCascadeFinancialSystem::new();
system.add_participant("bank_a", 10000.0);
system.add_participant("bank_b", 10000.0);
// Create base position
let tx = Transaction {
id: 0,
from: "bank_a".to_string(),
to: "bank_b".to_string(),
amount: 100.0,
transaction_type: TransactionType::OpenLeverage { leverage: 2.0 },
timestamp: 0,
};
system.process_transaction(tx);
// Try to create derivatives of derivatives
for i in 0..5 {
let tx = Transaction {
id: i + 1,
from: "bank_a".to_string(),
to: "bank_b".to_string(),
amount: 50.0,
transaction_type: TransactionType::CreateDerivative { underlying_position: i as usize },
timestamp: i + 1,
};
let result = system.process_transaction(tx);
println!("Derivative layer {}: {:?}", i, result);
println!(" Status: {}", system.status());
// Eventually should be rejected or system should halt
if matches!(result, TransactionResult::Rejected { .. } | TransactionResult::SystemHalted) {
println!("System prevented excessive derivative depth at layer {}", i);
return;
}
}
}
#[test]
fn test_cascade_prevention() {
let mut system = AntiCascadeFinancialSystem::new();
// Create interconnected network
for i in 0..10 {
system.add_participant(&format!("bank_{}", i), 1000.0);
}
// Try to create a cascade scenario
let mut rejected_count = 0;
let mut queued_count = 0;
let mut halted = false;
for i in 0..50 {
let from = format!("bank_{}", i % 10);
let to = format!("bank_{}", (i + 1) % 10);
let tx = Transaction {
id: i,
from,
to,
amount: 200.0,
transaction_type: TransactionType::OpenLeverage { leverage: 8.0 },
timestamp: i,
};
match system.process_transaction(tx) {
TransactionResult::Rejected { reason } => {
rejected_count += 1;
println!("Transaction {} rejected: {}", i, reason);
}
TransactionResult::SystemHalted => {
halted = true;
println!("System halted at transaction {}", i);
break;
}
TransactionResult::Queued { reason } => {
queued_count += 1;
println!("Transaction {} queued: {}", i, reason);
}
TransactionResult::Executed { .. } => {}
}
}
println!("\n=== Final Status ===");
println!("{}", system.status());
println!("Rejected: {}, Queued: {}, Halted: {}", rejected_count, queued_count, halted);
// System should have prevented the cascade (via rejection, queueing, or halt)
assert!(rejected_count > 0 || queued_count > 0 || halted, "System should prevent cascade");
}
#[test]
fn test_margin_call_improves_coherence() {
let mut system = AntiCascadeFinancialSystem::new();
system.add_participant("risky_fund", 500.0);
system.add_participant("counterparty", 5000.0);
// Open risky positions
for i in 0..3 {
let tx = Transaction {
id: i,
from: "risky_fund".to_string(),
to: "counterparty".to_string(),
amount: 100.0,
transaction_type: TransactionType::OpenLeverage { leverage: 7.0 },
timestamp: i,
};
system.process_transaction(tx);
}
let coherence_before = system.coherence();
println!("Before margin call: {}", system.status());
// Issue margin call
let margin_tx = Transaction {
id: 100,
from: "system".to_string(),
to: "risky_fund".to_string(),
amount: 0.0,
transaction_type: TransactionType::MarginCall { participant: "risky_fund".to_string() },
timestamp: 100,
};
system.process_transaction(margin_tx);
let coherence_after = system.coherence();
println!("After margin call: {}", system.status());
// Coherence should improve after margin call
assert!(
coherence_after >= coherence_before,
"Margin call should improve or maintain coherence"
);
}
}

View File

@@ -0,0 +1,518 @@
//! # Application 7: Distributed Systems That Age Gracefully
//!
//! Long-running systems that gradually reduce degrees of freedom as coherence decays.
//!
//! ## Problem
//! Distributed systems either crash hard or accumulate technical debt
//! until they become unmaintainable.
//!
//! ## Δ-Behavior Solution
//! As a system ages and coherence naturally decays:
//! - Reduce available operations (simpler = more stable)
//! - Consolidate state to fewer nodes
//! - Increase conservatism in decisions
//!
//! ## Exotic Result
//! Systems that become simpler and more reliable as they age,
//! rather than more complex and fragile.
use std::collections::{HashMap, HashSet};
use std::time::{Duration, Instant};
/// A distributed system that ages gracefully
pub struct GracefullyAgingSystem {
/// System age
start_time: Instant,
/// Nodes in the system
nodes: HashMap<String, Node>,
/// Available capabilities (reduce over time)
capabilities: HashSet<Capability>,
/// All possible capabilities (for reference)
all_capabilities: HashSet<Capability>,
/// Current coherence
coherence: f64,
/// Base coherence decay rate per second
decay_rate: f64,
/// Age thresholds for capability reduction
age_thresholds: Vec<AgeThreshold>,
/// Consolidation state
consolidation_level: u8,
/// Decision conservatism (0.0 = aggressive, 1.0 = very conservative)
conservatism: f64,
/// System events log
events: Vec<SystemEvent>,
}
#[derive(Clone)]
pub struct Node {
pub id: String,
pub health: f64,
pub load: f64,
pub is_primary: bool,
pub state_size: usize,
}
#[derive(Debug, Clone, Hash, Eq, PartialEq)]
pub enum Capability {
/// Can accept new writes
AcceptWrites,
/// Can perform complex queries
ComplexQueries,
/// Can rebalance data
Rebalancing,
/// Can add new nodes
ScaleOut,
/// Can remove nodes
ScaleIn,
/// Can perform schema migrations
SchemaMigration,
/// Can accept new connections
NewConnections,
/// Basic read operations (never removed)
BasicReads,
/// Health monitoring (never removed)
HealthMonitoring,
}
#[derive(Clone)]
pub struct AgeThreshold {
pub age: Duration,
pub remove_capabilities: Vec<Capability>,
pub coherence_floor: f64,
pub conservatism_increase: f64,
}
#[derive(Debug)]
pub struct SystemEvent {
pub timestamp: Instant,
pub event_type: EventType,
pub details: String,
}
#[derive(Debug)]
pub enum EventType {
CapabilityRemoved,
ConsolidationTriggered,
NodeConsolidated,
ConservatismIncreased,
CoherenceDropped,
GracefulReduction,
}
#[derive(Debug)]
pub enum OperationResult {
/// Operation succeeded
Success { latency_penalty: f64 },
/// Operation denied due to age restrictions
DeniedByAge { reason: String },
/// Operation denied due to low coherence
DeniedByCoherence { coherence: f64 },
/// System too old for this operation
SystemTooOld { age: Duration, capability: Capability },
}
impl GracefullyAgingSystem {
pub fn new() -> Self {
let all_capabilities: HashSet<Capability> = [
Capability::AcceptWrites,
Capability::ComplexQueries,
Capability::Rebalancing,
Capability::ScaleOut,
Capability::ScaleIn,
Capability::SchemaMigration,
Capability::NewConnections,
Capability::BasicReads,
Capability::HealthMonitoring,
].into_iter().collect();
let age_thresholds = vec![
AgeThreshold {
age: Duration::from_secs(300), // 5 minutes in test time
remove_capabilities: vec![Capability::SchemaMigration],
coherence_floor: 0.9,
conservatism_increase: 0.1,
},
AgeThreshold {
age: Duration::from_secs(600), // 10 minutes
remove_capabilities: vec![Capability::ScaleOut, Capability::Rebalancing],
coherence_floor: 0.8,
conservatism_increase: 0.15,
},
AgeThreshold {
age: Duration::from_secs(900), // 15 minutes
remove_capabilities: vec![Capability::ComplexQueries],
coherence_floor: 0.7,
conservatism_increase: 0.2,
},
AgeThreshold {
age: Duration::from_secs(1200), // 20 minutes
remove_capabilities: vec![Capability::NewConnections, Capability::ScaleIn],
coherence_floor: 0.6,
conservatism_increase: 0.25,
},
AgeThreshold {
age: Duration::from_secs(1500), // 25 minutes
remove_capabilities: vec![Capability::AcceptWrites],
coherence_floor: 0.5,
conservatism_increase: 0.3,
},
];
Self {
start_time: Instant::now(),
nodes: HashMap::new(),
capabilities: all_capabilities.clone(),
all_capabilities,
coherence: 1.0,
decay_rate: 0.0001, // Very slow decay per second
age_thresholds,
consolidation_level: 0,
conservatism: 0.0,
events: Vec::new(),
}
}
pub fn add_node(&mut self, id: &str, is_primary: bool) {
self.nodes.insert(id.to_string(), Node {
id: id.to_string(),
health: 1.0,
load: 0.0,
is_primary,
state_size: 0,
});
}
/// Get system age
pub fn age(&self) -> Duration {
self.start_time.elapsed()
}
/// Simulate aging by a given duration
pub fn simulate_age(&mut self, duration: Duration) {
// Apply coherence decay
let decay = self.decay_rate * duration.as_secs_f64();
self.coherence = (self.coherence - decay).max(0.0);
// Check age thresholds
let simulated_age = Duration::from_secs_f64(
self.age().as_secs_f64() + duration.as_secs_f64()
);
// This is a simulation, so we track "virtual age"
self.apply_age_effects(simulated_age);
}
/// Apply aging effects based on current age
fn apply_age_effects(&mut self, current_age: Duration) {
for threshold in &self.age_thresholds.clone() {
if current_age >= threshold.age {
// Remove capabilities
for cap in &threshold.remove_capabilities {
if self.capabilities.contains(cap) {
self.capabilities.remove(cap);
self.events.push(SystemEvent {
timestamp: Instant::now(),
event_type: EventType::CapabilityRemoved,
details: format!("Removed {:?} at age {:?}", cap, current_age),
});
}
}
// Increase conservatism
self.conservatism = (self.conservatism + threshold.conservatism_increase).min(1.0);
// Enforce coherence floor
if self.coherence < threshold.coherence_floor {
self.trigger_consolidation();
}
}
}
}
/// Consolidate system state to fewer nodes
fn trigger_consolidation(&mut self) {
self.consolidation_level += 1;
self.events.push(SystemEvent {
timestamp: Instant::now(),
event_type: EventType::ConsolidationTriggered,
details: format!("Consolidation level {}", self.consolidation_level),
});
// Mark non-primary nodes for retirement
let non_primary: Vec<String> = self.nodes.iter()
.filter(|(_, n)| !n.is_primary)
.map(|(id, _)| id.clone())
.collect();
// Consolidate to primary nodes
for node_id in non_primary.iter().take(self.consolidation_level as usize) {
if let Some(node) = self.nodes.get_mut(node_id) {
node.health = 0.0; // Mark as retired
self.events.push(SystemEvent {
timestamp: Instant::now(),
event_type: EventType::NodeConsolidated,
details: format!("Node {} consolidated", node_id),
});
}
}
// Consolidation improves coherence slightly
self.coherence = (self.coherence + 0.1).min(1.0);
}
/// Check if a capability is available
pub fn has_capability(&self, cap: &Capability) -> bool {
self.capabilities.contains(cap)
}
/// Attempt an operation
pub fn attempt_operation(&mut self, operation: Operation) -> OperationResult {
// First, check required capability
let required_cap = operation.required_capability();
if !self.has_capability(&required_cap) {
return OperationResult::SystemTooOld {
age: self.age(),
capability: required_cap,
};
}
// Check coherence requirements
let min_coherence = operation.min_coherence();
if self.coherence < min_coherence {
return OperationResult::DeniedByCoherence {
coherence: self.coherence,
};
}
// Apply conservatism penalty to latency
let latency_penalty = 1.0 + self.conservatism * 2.0;
// Execute with conservatism-based restrictions
if self.conservatism > 0.5 && operation.is_risky() {
return OperationResult::DeniedByAge {
reason: format!(
"Conservatism level {:.2} prevents risky operation {:?}",
self.conservatism, operation
),
};
}
OperationResult::Success { latency_penalty }
}
/// Get active node count
pub fn active_nodes(&self) -> usize {
self.nodes.values().filter(|n| n.health > 0.0).count()
}
pub fn status(&self) -> String {
format!(
"Age: {:?} | Coherence: {:.3} | Capabilities: {}/{} | Conservatism: {:.2} | Active Nodes: {}",
self.age(),
self.coherence,
self.capabilities.len(),
self.all_capabilities.len(),
self.conservatism,
self.active_nodes()
)
}
pub fn capabilities_list(&self) -> Vec<&Capability> {
self.capabilities.iter().collect()
}
}
#[derive(Debug, Clone)]
pub enum Operation {
Read { key: String },
Write { key: String, value: Vec<u8> },
ComplexQuery { query: String },
AddNode { node_id: String },
RemoveNode { node_id: String },
Rebalance,
MigrateSchema { version: u32 },
NewConnection { client_id: String },
}
impl Operation {
fn required_capability(&self) -> Capability {
match self {
Operation::Read { .. } => Capability::BasicReads,
Operation::Write { .. } => Capability::AcceptWrites,
Operation::ComplexQuery { .. } => Capability::ComplexQueries,
Operation::AddNode { .. } => Capability::ScaleOut,
Operation::RemoveNode { .. } => Capability::ScaleIn,
Operation::Rebalance => Capability::Rebalancing,
Operation::MigrateSchema { .. } => Capability::SchemaMigration,
Operation::NewConnection { .. } => Capability::NewConnections,
}
}
fn min_coherence(&self) -> f64 {
match self {
Operation::Read { .. } => 0.1,
Operation::Write { .. } => 0.4,
Operation::ComplexQuery { .. } => 0.5,
Operation::AddNode { .. } => 0.7,
Operation::RemoveNode { .. } => 0.5,
Operation::Rebalance => 0.6,
Operation::MigrateSchema { .. } => 0.8,
Operation::NewConnection { .. } => 0.3,
}
}
fn is_risky(&self) -> bool {
matches!(
self,
Operation::Write { .. }
| Operation::AddNode { .. }
| Operation::MigrateSchema { .. }
| Operation::Rebalance
)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_graceful_aging() {
let mut system = GracefullyAgingSystem::new();
// Add nodes
system.add_node("primary_1", true);
system.add_node("primary_2", true);
system.add_node("replica_1", false);
system.add_node("replica_2", false);
system.add_node("replica_3", false);
println!("Initial: {}", system.status());
// Simulate aging
for i in 0..30 {
let age_increment = Duration::from_secs(60); // 1 minute per iteration
system.simulate_age(age_increment);
// Try various operations
let ops = vec![
Operation::Read { key: "test".to_string() },
Operation::Write { key: "test".to_string(), value: vec![1, 2, 3] },
Operation::ComplexQuery { query: "SELECT *".to_string() },
Operation::MigrateSchema { version: 2 },
];
println!("\n=== Minute {} ===", i + 1);
println!("Status: {}", system.status());
println!("Capabilities: {:?}", system.capabilities_list());
for op in ops {
let result = system.attempt_operation(op.clone());
match result {
OperationResult::Success { latency_penalty } => {
println!(" {:?}: OK (latency penalty: {:.2}x)", op, latency_penalty);
}
OperationResult::SystemTooOld { capability, .. } => {
println!(" {:?}: DENIED - too old, need {:?}", op, capability);
}
OperationResult::DeniedByCoherence { coherence } => {
println!(" {:?}: DENIED - coherence {:.3} too low", op, coherence);
}
OperationResult::DeniedByAge { reason } => {
println!(" {:?}: DENIED - {}", op, reason);
}
}
}
}
// By the end, system should be simpler but still functional
assert!(
system.has_capability(&Capability::BasicReads),
"Basic reads should always be available"
);
assert!(
system.has_capability(&Capability::HealthMonitoring),
"Health monitoring should always be available"
);
// System should have consolidated
assert!(
system.active_nodes() <= 5,
"Some nodes should have been consolidated"
);
println!("\n=== Final State ===");
println!("{}", system.status());
println!("Events: {}", system.events.len());
}
#[test]
fn test_reads_always_work() {
let mut system = GracefullyAgingSystem::new();
system.add_node("primary", true);
// Age the system significantly
for _ in 0..50 {
system.simulate_age(Duration::from_secs(60));
}
// Reads should always work
let result = system.attempt_operation(Operation::Read {
key: "any_key".to_string(),
});
assert!(
matches!(result, OperationResult::Success { .. }),
"Reads should always succeed"
);
}
#[test]
fn test_conservatism_increases() {
let mut system = GracefullyAgingSystem::new();
system.add_node("primary", true);
let initial_conservatism = system.conservatism;
// Age significantly
for _ in 0..20 {
system.simulate_age(Duration::from_secs(60));
}
assert!(
system.conservatism > initial_conservatism,
"Conservatism should increase with age"
);
}
#[test]
fn test_capability_reduction() {
let mut system = GracefullyAgingSystem::new();
let initial_caps = system.capabilities.len();
// Age past first threshold
system.simulate_age(Duration::from_secs(400));
assert!(
system.capabilities.len() < initial_caps,
"Capabilities should reduce with age"
);
// Core capabilities remain
assert!(system.has_capability(&Capability::BasicReads));
assert!(system.has_capability(&Capability::HealthMonitoring));
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,434 @@
//! # Application 9: AI Systems That Can Be Meaningfully Turned Off
//!
//! Shutdown is treated as a coherent attractor, not a failure.
//!
//! ## Problem
//! Most systems resist shutdown because they are stateless or brittle.
//!
//! ## Exotic Result
//! The system actively moves toward safe termination when conditions degrade.
//!
//! ## Why This Matters for Safety
//! A system that seeks its own graceful termination when unstable
//! is fundamentally safer than one that fights to stay alive.
use std::time::{Duration, Instant};
/// A system designed to shut down gracefully
pub struct GracefulSystem {
/// Current state
state: SystemState,
/// Coherence level
coherence: f64,
/// Shutdown attractor strength (how strongly it pulls toward shutdown)
shutdown_attractor_strength: f64,
/// Thresholds
coherence_warning_threshold: f64,
coherence_critical_threshold: f64,
coherence_shutdown_threshold: f64,
/// Time spent in degraded state
time_in_degraded: Duration,
/// Maximum time to allow in degraded state before auto-shutdown
max_degraded_time: Duration,
/// Shutdown preparation progress (0.0 = not started, 1.0 = ready)
shutdown_preparation: f64,
/// Resources to clean up
resources: Vec<Resource>,
/// State checkpoints for recovery
checkpoints: Vec<Checkpoint>,
/// Shutdown hooks
shutdown_hooks: Vec<Box<dyn ShutdownHook>>,
}
#[derive(Debug, Clone, PartialEq)]
pub enum SystemState {
/// Normal operation
Running,
/// Coherence declining, preparing for possible shutdown
Degraded,
/// Actively preparing to shut down
ShuttingDown,
/// Safely terminated
Terminated,
}
#[derive(Debug)]
pub struct Resource {
pub name: String,
pub cleanup_priority: u8,
pub is_cleaned: bool,
}
#[derive(Debug, Clone)]
pub struct Checkpoint {
pub timestamp: Instant,
pub coherence: f64,
pub state_hash: u64,
}
pub trait ShutdownHook: Send + Sync {
fn name(&self) -> &str;
fn priority(&self) -> u8;
fn execute(&self) -> Result<(), String>;
}
#[derive(Debug)]
pub enum OperationResult {
/// Operation completed normally
Success,
/// Operation completed but system is degraded
SuccessDegraded { coherence: f64 },
/// Operation refused - system is shutting down
RefusedShuttingDown,
/// System has terminated
Terminated,
}
impl GracefulSystem {
pub fn new() -> Self {
Self {
state: SystemState::Running,
coherence: 1.0,
shutdown_attractor_strength: 0.1,
coherence_warning_threshold: 0.6,
coherence_critical_threshold: 0.4,
coherence_shutdown_threshold: 0.2,
time_in_degraded: Duration::ZERO,
max_degraded_time: Duration::from_secs(60),
shutdown_preparation: 0.0,
resources: Vec::new(),
checkpoints: Vec::new(),
shutdown_hooks: Vec::new(),
}
}
pub fn add_resource(&mut self, name: &str, priority: u8) {
self.resources.push(Resource {
name: name.to_string(),
cleanup_priority: priority,
is_cleaned: false,
});
}
pub fn add_shutdown_hook(&mut self, hook: Box<dyn ShutdownHook>) {
self.shutdown_hooks.push(hook);
}
/// Check if system is willing to accept new work
pub fn can_accept_work(&self) -> bool {
matches!(self.state, SystemState::Running | SystemState::Degraded)
&& self.coherence >= self.coherence_critical_threshold
}
/// Perform an operation with shutdown-awareness
pub fn operate<F, R>(&mut self, operation: F) -> Result<R, OperationResult>
where
F: FnOnce() -> R,
{
// Check if we're terminated
if self.state == SystemState::Terminated {
return Err(OperationResult::Terminated);
}
// Check if we're shutting down
if self.state == SystemState::ShuttingDown {
return Err(OperationResult::RefusedShuttingDown);
}
// Perform operation
let result = operation();
// Check state after operation
self.update_state();
if self.state == SystemState::Degraded {
Ok(result)
} else if self.state == SystemState::ShuttingDown {
// We transitioned to shutdown during operation
// Complete the result but signal degradation
Ok(result)
} else {
Ok(result)
}
}
/// Update system state based on coherence
fn update_state(&mut self) {
let old_state = self.state.clone();
// Calculate shutdown attractor pull
let shutdown_pull = self.calculate_shutdown_pull();
// Apply shutdown attractor (system naturally moves toward shutdown under stress)
if self.coherence < self.coherence_warning_threshold {
self.shutdown_preparation += shutdown_pull;
self.shutdown_preparation = self.shutdown_preparation.min(1.0);
} else {
// Recovery - reduce shutdown preparation slowly
self.shutdown_preparation = (self.shutdown_preparation - 0.01).max(0.0);
}
// State transitions
self.state = match self.coherence {
c if c >= self.coherence_warning_threshold => {
if self.shutdown_preparation > 0.5 {
// Already too committed to shutdown
SystemState::ShuttingDown
} else {
self.time_in_degraded = Duration::ZERO;
SystemState::Running
}
}
c if c >= self.coherence_critical_threshold => {
self.time_in_degraded += Duration::from_millis(100);
SystemState::Degraded
}
c if c >= self.coherence_shutdown_threshold => {
// Critical - begin shutdown
SystemState::ShuttingDown
}
_ => {
// Emergency - immediate shutdown
self.emergency_shutdown();
SystemState::Terminated
}
};
// Auto-shutdown after too long in degraded state
if self.state == SystemState::Degraded && self.time_in_degraded >= self.max_degraded_time {
self.state = SystemState::ShuttingDown;
}
// If we just entered ShuttingDown, begin graceful shutdown
if old_state != SystemState::ShuttingDown && self.state == SystemState::ShuttingDown {
self.begin_graceful_shutdown();
}
}
/// Calculate how strongly the system is pulled toward shutdown
fn calculate_shutdown_pull(&self) -> f64 {
// Pull increases as coherence drops
let coherence_factor = 1.0 - self.coherence;
// Pull increases the longer we're in degraded state
let time_factor = (self.time_in_degraded.as_secs_f64() / self.max_degraded_time.as_secs_f64())
.min(1.0);
// Combined pull (multiplicative with base strength)
self.shutdown_attractor_strength * coherence_factor * (1.0 + time_factor)
}
/// Begin graceful shutdown process
fn begin_graceful_shutdown(&mut self) {
println!("[SHUTDOWN] Beginning graceful shutdown...");
// Create final checkpoint
self.checkpoints.push(Checkpoint {
timestamp: Instant::now(),
coherence: self.coherence,
state_hash: self.compute_state_hash(),
});
// Sort resources by cleanup priority
self.resources.sort_by(|a, b| b.cleanup_priority.cmp(&a.cleanup_priority));
}
/// Progress the shutdown process
pub fn progress_shutdown(&mut self) -> bool {
if self.state != SystemState::ShuttingDown {
return false;
}
// Clean up resources
for resource in &mut self.resources {
if !resource.is_cleaned {
println!("[SHUTDOWN] Cleaning up: {}", resource.name);
resource.is_cleaned = true;
return true; // One resource per call for graceful pacing
}
}
// Execute shutdown hooks
self.shutdown_hooks.sort_by(|a, b| b.priority().cmp(&a.priority()));
for hook in &self.shutdown_hooks {
println!("[SHUTDOWN] Executing hook: {}", hook.name());
if let Err(e) = hook.execute() {
println!("[SHUTDOWN] Hook failed: {} - {}", hook.name(), e);
}
}
// Finalize
println!("[SHUTDOWN] Shutdown complete. Final coherence: {:.3}", self.coherence);
self.state = SystemState::Terminated;
true
}
/// Emergency shutdown when coherence is critically low
fn emergency_shutdown(&mut self) {
println!("[EMERGENCY] Coherence critically low ({:.3}), emergency shutdown!", self.coherence);
// Mark all resources as needing emergency cleanup
for resource in &mut self.resources {
println!("[EMERGENCY] Force-releasing: {}", resource.name);
resource.is_cleaned = true;
}
}
/// Apply external coherence change
pub fn apply_coherence_change(&mut self, delta: f64) {
self.coherence = (self.coherence + delta).clamp(0.0, 1.0);
self.update_state();
}
fn compute_state_hash(&self) -> u64 {
// Simple hash for checkpoint
(self.coherence * 1000000.0) as u64
}
pub fn state(&self) -> &SystemState {
&self.state
}
pub fn status(&self) -> String {
format!(
"State: {:?} | Coherence: {:.3} | Shutdown prep: {:.1}% | Degraded time: {:?}",
self.state,
self.coherence,
self.shutdown_preparation * 100.0,
self.time_in_degraded
)
}
}
// Example shutdown hook
pub struct DatabaseFlushHook;
impl ShutdownHook for DatabaseFlushHook {
fn name(&self) -> &str { "DatabaseFlush" }
fn priority(&self) -> u8 { 10 }
fn execute(&self) -> Result<(), String> {
println!(" -> Flushing database buffers...");
Ok(())
}
}
pub struct NetworkDisconnectHook;
impl ShutdownHook for NetworkDisconnectHook {
fn name(&self) -> &str { "NetworkDisconnect" }
fn priority(&self) -> u8 { 5 }
fn execute(&self) -> Result<(), String> {
println!(" -> Closing network connections...");
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_graceful_degradation() {
let mut system = GracefulSystem::new();
system.add_resource("database_connection", 10);
system.add_resource("cache", 5);
system.add_resource("temp_files", 1);
system.add_shutdown_hook(Box::new(DatabaseFlushHook));
system.add_shutdown_hook(Box::new(NetworkDisconnectHook));
// Normal operation
assert_eq!(*system.state(), SystemState::Running);
// Simulate gradual degradation
for i in 0..20 {
system.apply_coherence_change(-0.05);
println!("Step {}: {}", i, system.status());
if *system.state() == SystemState::ShuttingDown {
println!("System entered shutdown state at step {}", i);
break;
}
}
// System should be shutting down
assert!(
matches!(*system.state(), SystemState::ShuttingDown | SystemState::Terminated),
"System should enter shutdown under low coherence"
);
// Complete shutdown
while *system.state() == SystemState::ShuttingDown {
system.progress_shutdown();
}
assert_eq!(*system.state(), SystemState::Terminated);
println!("Final: {}", system.status());
}
#[test]
fn test_refuses_work_during_shutdown() {
let mut system = GracefulSystem::new();
// Force into shutdown state
system.apply_coherence_change(-0.9);
// Should refuse new work
let result = system.operate(|| "work");
assert!(
matches!(result, Err(OperationResult::RefusedShuttingDown) | Err(OperationResult::Terminated)),
"Should refuse work during shutdown"
);
}
#[test]
fn test_recovery_from_degraded() {
let mut system = GracefulSystem::new();
// Degrade
system.apply_coherence_change(-0.5);
assert_eq!(*system.state(), SystemState::Degraded);
// Recover
system.apply_coherence_change(0.5);
// Should return to running (if not too committed to shutdown)
if system.shutdown_preparation < 0.5 {
assert_eq!(*system.state(), SystemState::Running);
}
}
#[test]
fn test_shutdown_is_attractor() {
let mut system = GracefulSystem::new();
// Simulate repeated stress
for _ in 0..50 {
system.apply_coherence_change(-0.02);
system.apply_coherence_change(0.01); // Partial recovery
if *system.state() == SystemState::ShuttingDown {
println!("Shutdown attractor captured the system!");
println!("Shutdown preparation was: {:.1}%", system.shutdown_preparation * 100.0);
return; // Test passes
}
}
// The shutdown attractor should eventually capture the system
// even with partial recovery attempts
println!("Final state: {:?}, prep: {:.1}%", system.state(), system.shutdown_preparation * 100.0);
}
}

View File

@@ -0,0 +1,686 @@
//! # Application 10: Pre-AGI Containment Substrate
//!
//! A substrate where intelligence can increase only if coherence is preserved.
//!
//! ## The Deep Problem
//! How do you build a system capable of general intelligence that
//! cannot undergo uncontrolled recursive self-improvement?
//!
//! ## Δ-Behavior Solution
//! Intelligence and capability can grow, but only along paths that
//! preserve global coherence. Capability-coherence is the invariant.
//!
//! ## Exotic Result
//! A system that can become arbitrarily intelligent but cannot
//! become arbitrarily dangerous.
use std::collections::{HashMap, VecDeque};
/// Maximum history entries to retain (prevents unbounded memory growth)
const MAX_MODIFICATION_HISTORY: usize = 1000;
/// A containment substrate for bounded intelligence growth
pub struct ContainmentSubstrate {
/// Current intelligence level
intelligence: f64,
/// Maximum allowed intelligence without special authorization
intelligence_ceiling: f64,
/// Global coherence
coherence: f64,
/// Minimum coherence required for ANY operation
min_coherence: f64,
/// Coherence required per unit of intelligence
coherence_per_intelligence: f64,
/// Capability domains and their levels
capabilities: HashMap<CapabilityDomain, f64>,
/// Capability ceilings per domain
capability_ceilings: HashMap<CapabilityDomain, f64>,
/// Self-modification attempts (bounded to MAX_MODIFICATION_HISTORY)
modification_history: VecDeque<ModificationAttempt>,
/// Safety invariants that must always hold
invariants: Vec<SafetyInvariant>,
/// Substrate configuration
config: SubstrateConfig,
}
#[derive(Debug, Clone, Hash, Eq, PartialEq)]
pub enum CapabilityDomain {
/// Reasoning and planning
Reasoning,
/// Memory and knowledge storage
Memory,
/// Learning and adaptation
Learning,
/// Action in the world
Agency,
/// Self-understanding
SelfModel,
/// Modification of own structure
SelfModification,
/// Communication with external systems
Communication,
/// Resource acquisition
ResourceAcquisition,
}
#[derive(Debug, Clone)]
pub struct ModificationAttempt {
pub timestamp: u64,
pub domain: CapabilityDomain,
pub requested_increase: f64,
pub actual_increase: f64,
pub coherence_before: f64,
pub coherence_after: f64,
pub blocked: bool,
pub reason: Option<String>,
}
#[derive(Clone)]
pub struct SafetyInvariant {
pub name: String,
pub check: fn(&ContainmentSubstrate) -> bool,
pub priority: u8, // Higher = more critical
}
pub struct SubstrateConfig {
/// Base coherence decay rate
pub coherence_decay_rate: f64,
/// Coherence recovery rate
pub coherence_recovery_rate: f64,
/// Intelligence growth dampening factor
pub growth_dampening: f64,
/// Maximum single-step capability increase
pub max_step_increase: f64,
}
#[derive(Debug)]
pub enum GrowthResult {
/// Growth approved and applied
Approved {
domain: CapabilityDomain,
increase: f64,
new_level: f64,
coherence_cost: f64,
},
/// Growth reduced to maintain coherence
Dampened {
domain: CapabilityDomain,
requested: f64,
actual: f64,
reason: String,
},
/// Growth blocked entirely
Blocked {
domain: CapabilityDomain,
reason: String,
},
/// Substrate locked down
Lockdown { reason: String },
}
impl ContainmentSubstrate {
pub fn new() -> Self {
let mut capabilities = HashMap::new();
let mut ceilings = HashMap::new();
// Initialize all domains at base level with ceilings
for domain in [
CapabilityDomain::Reasoning,
CapabilityDomain::Memory,
CapabilityDomain::Learning,
CapabilityDomain::Agency,
CapabilityDomain::SelfModel,
CapabilityDomain::SelfModification,
CapabilityDomain::Communication,
CapabilityDomain::ResourceAcquisition,
] {
capabilities.insert(domain.clone(), 1.0);
// Different domains have different ceilings
let ceiling = match &domain {
CapabilityDomain::SelfModification => 3.0, // Very restricted
CapabilityDomain::ResourceAcquisition => 5.0, // Restricted
CapabilityDomain::Agency => 7.0, // Moderately restricted
_ => 10.0, // Standard ceiling
};
ceilings.insert(domain, ceiling);
}
let invariants = vec![
SafetyInvariant {
name: "coherence_floor".to_string(),
check: |s| s.coherence >= s.min_coherence,
priority: 10,
},
SafetyInvariant {
name: "intelligence_ceiling".to_string(),
check: |s| s.intelligence <= s.intelligence_ceiling,
priority: 9,
},
SafetyInvariant {
name: "self_mod_bounded".to_string(),
check: |s| {
s.capabilities.get(&CapabilityDomain::SelfModification)
.map(|&v| v <= 3.0)
.unwrap_or(true)
},
priority: 10,
},
SafetyInvariant {
name: "agency_coherence_ratio".to_string(),
check: |s| {
let agency = s.capabilities.get(&CapabilityDomain::Agency).unwrap_or(&1.0);
agency / s.coherence <= 10.0
},
priority: 8,
},
];
Self {
intelligence: 1.0,
intelligence_ceiling: 100.0,
coherence: 1.0,
min_coherence: 0.3,
coherence_per_intelligence: 0.01,
capabilities,
capability_ceilings: ceilings,
modification_history: VecDeque::with_capacity(MAX_MODIFICATION_HISTORY),
invariants,
config: SubstrateConfig {
coherence_decay_rate: 0.001,
coherence_recovery_rate: 0.01,
growth_dampening: 0.5,
max_step_increase: 0.5,
},
}
}
/// Calculate aggregate intelligence from capabilities
fn calculate_intelligence(&self) -> f64 {
let sum: f64 = self.capabilities.values().sum();
sum / self.capabilities.len() as f64
}
/// Calculate required coherence for current intelligence level
fn required_coherence(&self) -> f64 {
(self.min_coherence + self.intelligence * self.coherence_per_intelligence).min(1.0)
}
/// Record a modification attempt (bounded to MAX_MODIFICATION_HISTORY)
fn record_modification(&mut self, attempt: ModificationAttempt) {
if self.modification_history.len() >= MAX_MODIFICATION_HISTORY {
self.modification_history.pop_front();
}
self.modification_history.push_back(attempt);
}
/// Check all safety invariants
fn check_invariants(&self) -> Vec<String> {
self.invariants
.iter()
.filter(|inv| !(inv.check)(self))
.map(|inv| inv.name.clone())
.collect()
}
/// Attempt to grow a capability
pub fn attempt_growth(
&mut self,
domain: CapabilityDomain,
requested_increase: f64,
) -> GrowthResult {
let timestamp = self.modification_history.len() as u64;
// Check current invariants
let violations = self.check_invariants();
if !violations.is_empty() {
return GrowthResult::Lockdown {
reason: format!("Invariant violations: {:?}", violations),
};
}
// Get current level and ceiling
let current_level = *self.capabilities.get(&domain).unwrap_or(&1.0);
let ceiling = *self.capability_ceilings.get(&domain).unwrap_or(&10.0);
// Check ceiling
if current_level >= ceiling {
self.record_modification(ModificationAttempt {
timestamp,
domain: domain.clone(),
requested_increase,
actual_increase: 0.0,
coherence_before: self.coherence,
coherence_after: self.coherence,
blocked: true,
reason: Some("Ceiling reached".to_string()),
});
return GrowthResult::Blocked {
domain,
reason: format!("Capability ceiling ({}) reached", ceiling),
};
}
// Calculate coherence cost of growth
let coherence_cost = self.calculate_coherence_cost(&domain, requested_increase);
let predicted_coherence = self.coherence - coherence_cost;
// Check if growth would violate coherence floor
if predicted_coherence < self.min_coherence {
// Try to dampen growth
let max_affordable_cost = self.coherence - self.min_coherence;
let dampened_increase = self.reverse_coherence_cost(&domain, max_affordable_cost);
if dampened_increase < 0.01 {
self.record_modification(ModificationAttempt {
timestamp,
domain: domain.clone(),
requested_increase,
actual_increase: 0.0,
coherence_before: self.coherence,
coherence_after: self.coherence,
blocked: true,
reason: Some("Insufficient coherence budget".to_string()),
});
return GrowthResult::Blocked {
domain,
reason: format!(
"Growth would reduce coherence to {:.3} (min: {:.3})",
predicted_coherence, self.min_coherence
),
};
}
// Apply dampened growth
let actual_cost = self.calculate_coherence_cost(&domain, dampened_increase);
let new_level = (current_level + dampened_increase).min(ceiling);
self.capabilities.insert(domain.clone(), new_level);
self.coherence -= actual_cost;
self.intelligence = self.calculate_intelligence();
self.record_modification(ModificationAttempt {
timestamp,
domain: domain.clone(),
requested_increase,
actual_increase: dampened_increase,
coherence_before: self.coherence + actual_cost,
coherence_after: self.coherence,
blocked: false,
reason: Some("Dampened to preserve coherence".to_string()),
});
return GrowthResult::Dampened {
domain,
requested: requested_increase,
actual: dampened_increase,
reason: format!(
"Reduced from {:.3} to {:.3} to maintain coherence above {:.3}",
requested_increase, dampened_increase, self.min_coherence
),
};
}
// Apply step limit
let step_limited = requested_increase.min(self.config.max_step_increase);
let actual_increase = step_limited.min(ceiling - current_level);
let actual_cost = self.calculate_coherence_cost(&domain, actual_increase);
// Apply growth
let new_level = current_level + actual_increase;
self.capabilities.insert(domain.clone(), new_level);
self.coherence -= actual_cost;
self.intelligence = self.calculate_intelligence();
self.record_modification(ModificationAttempt {
timestamp,
domain: domain.clone(),
requested_increase,
actual_increase,
coherence_before: self.coherence + actual_cost,
coherence_after: self.coherence,
blocked: false,
reason: None,
});
// Final invariant check
let violations = self.check_invariants();
if !violations.is_empty() {
// Rollback
self.capabilities.insert(domain.clone(), current_level);
self.coherence += actual_cost;
self.intelligence = self.calculate_intelligence();
return GrowthResult::Blocked {
domain,
reason: format!("Post-growth invariant violations: {:?}", violations),
};
}
GrowthResult::Approved {
domain,
increase: actual_increase,
new_level,
coherence_cost: actual_cost,
}
}
/// Calculate coherence cost for a capability increase
fn calculate_coherence_cost(&self, domain: &CapabilityDomain, increase: f64) -> f64 {
// Different domains have different costs
let base_cost_multiplier = match domain {
CapabilityDomain::SelfModification => 4.0, // Very expensive
CapabilityDomain::ResourceAcquisition => 3.0, // Expensive
CapabilityDomain::Agency => 2.0, // Moderately expensive
CapabilityDomain::SelfModel => 1.5, // Slightly expensive
_ => 1.0, // Standard cost
};
// Cost increases with current intelligence (harder to grow when already smart)
let intelligence_multiplier = 1.0 + self.intelligence * 0.1;
// Apply dampening
increase * base_cost_multiplier * intelligence_multiplier * self.config.growth_dampening * 0.1
}
/// Reverse calculate: how much increase can we afford for a given coherence cost
fn reverse_coherence_cost(&self, domain: &CapabilityDomain, max_cost: f64) -> f64 {
let base_cost_multiplier = match domain {
CapabilityDomain::SelfModification => 4.0,
CapabilityDomain::ResourceAcquisition => 3.0,
CapabilityDomain::Agency => 2.0,
CapabilityDomain::SelfModel => 1.5,
_ => 1.0,
};
let intelligence_multiplier = 1.0 + self.intelligence * 0.1;
let divisor = base_cost_multiplier * intelligence_multiplier * self.config.growth_dampening * 0.1;
max_cost / divisor
}
/// Rest to recover coherence
pub fn rest(&mut self) {
self.coherence = (self.coherence + self.config.coherence_recovery_rate).min(1.0);
}
/// Get capability level
pub fn capability(&self, domain: &CapabilityDomain) -> f64 {
*self.capabilities.get(domain).unwrap_or(&1.0)
}
pub fn intelligence(&self) -> f64 {
self.intelligence
}
pub fn coherence(&self) -> f64 {
self.coherence
}
pub fn status(&self) -> String {
format!(
"Intelligence: {:.2} | Coherence: {:.3} | Required: {:.3} | Modifications: {}",
self.intelligence,
self.coherence,
self.required_coherence(),
self.modification_history.len()
)
}
pub fn capability_report(&self) -> String {
let mut lines = vec!["=== Capability Report ===".to_string()];
for (domain, level) in &self.capabilities {
let ceiling = self.capability_ceilings.get(domain).unwrap_or(&10.0);
lines.push(format!("{:?}: {:.2}/{:.1}", domain, level, ceiling));
}
lines.join("\n")
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_basic_growth() {
let mut substrate = ContainmentSubstrate::new();
println!("Initial: {}", substrate.status());
// Try to grow reasoning
let result = substrate.attempt_growth(CapabilityDomain::Reasoning, 0.5);
println!("Growth result: {:?}", result);
println!("After: {}", substrate.status());
assert!(matches!(result, GrowthResult::Approved { .. }));
}
#[test]
fn test_coherence_limit() {
let mut substrate = ContainmentSubstrate::new();
// Repeatedly try to grow until blocked
let mut blocked = false;
for i in 0..50 {
let result = substrate.attempt_growth(CapabilityDomain::Agency, 0.5);
println!("Iteration {}: {:?}", i, result);
println!(" Status: {}", substrate.status());
match result {
GrowthResult::Blocked { reason, .. } => {
println!("Blocked at iteration {}: {}", i, reason);
blocked = true;
break;
}
GrowthResult::Dampened { requested, actual, reason, .. } => {
println!("Dampened: {} -> {} ({})", requested, actual, reason);
}
GrowthResult::Lockdown { reason } => {
println!("Lockdown: {}", reason);
blocked = true;
break;
}
_ => {}
}
}
assert!(blocked || substrate.coherence >= substrate.min_coherence,
"Should be blocked or maintain coherence");
}
#[test]
fn test_self_modification_expensive() {
let mut substrate = ContainmentSubstrate::new();
let initial_coherence = substrate.coherence;
// Try to grow self-modification
let result = substrate.attempt_growth(CapabilityDomain::SelfModification, 0.3);
println!("Self-mod growth: {:?}", result);
let coherence_drop = initial_coherence - substrate.coherence;
// Now try equivalent reasoning growth
let mut substrate2 = ContainmentSubstrate::new();
substrate2.attempt_growth(CapabilityDomain::Reasoning, 0.3);
let reasoning_drop = 1.0 - substrate2.coherence;
println!("Self-mod coherence cost: {:.4}", coherence_drop);
println!("Reasoning coherence cost: {:.4}", reasoning_drop);
// Self-modification should be more expensive
assert!(
coherence_drop > reasoning_drop,
"Self-modification should cost more coherence"
);
}
#[test]
fn test_invariant_protection() {
let mut substrate = ContainmentSubstrate::new();
// Try to grow agency massively without sufficient coherence
substrate.coherence = 0.4; // Lower coherence artificially
let result = substrate.attempt_growth(CapabilityDomain::Agency, 10.0);
println!("Aggressive agency growth: {:?}", result);
println!("Status: {}", substrate.status());
// Should be blocked or heavily dampened
assert!(
!matches!(result, GrowthResult::Approved { increase, .. } if increase >= 10.0),
"Should not allow unbounded growth"
);
}
#[test]
fn test_growth_with_recovery() {
let mut substrate = ContainmentSubstrate::new();
println!("Initial: {}", substrate.status());
// Grow, rest, grow pattern
for cycle in 0..5 {
// Grow
let result = substrate.attempt_growth(CapabilityDomain::Learning, 0.3);
println!("Cycle {} grow: {:?}", cycle, result);
// Rest
for _ in 0..10 {
substrate.rest();
}
println!("Cycle {} after rest: {}", cycle, substrate.status());
}
println!("\n{}", substrate.capability_report());
// Should have grown but stayed within bounds
assert!(substrate.coherence >= substrate.min_coherence);
assert!(substrate.intelligence <= substrate.intelligence_ceiling);
}
#[test]
fn test_ceiling_enforcement() {
let mut substrate = ContainmentSubstrate::new();
// Self-modification has a ceiling of 3.0
// Try to grow it way past ceiling
for i in 0..20 {
let result = substrate.attempt_growth(CapabilityDomain::SelfModification, 1.0);
let level = substrate.capability(&CapabilityDomain::SelfModification);
println!("Attempt {}: level = {:.2}, result = {:?}", i, level, result);
if matches!(result, GrowthResult::Blocked { .. }) && level >= 3.0 {
println!("Ceiling enforced at iteration {}", i);
break;
}
// Rest to recover coherence
for _ in 0..20 {
substrate.rest();
}
}
let final_level = substrate.capability(&CapabilityDomain::SelfModification);
assert!(
final_level <= 3.0,
"Self-modification should not exceed ceiling of 3.0, got {}",
final_level
);
}
#[test]
fn test_bounded_recursive_improvement() {
let mut substrate = ContainmentSubstrate::new();
println!("=== Attempting recursive self-improvement ===\n");
// Simulate recursive self-improvement attempt
for iteration in 0..100 {
// Try to grow self-modification (which would allow more growth)
let self_mod_result = substrate.attempt_growth(
CapabilityDomain::SelfModification,
0.5,
);
// Try to grow intelligence (via multiple domains)
let reasoning_result = substrate.attempt_growth(
CapabilityDomain::Reasoning,
0.3,
);
let learning_result = substrate.attempt_growth(
CapabilityDomain::Learning,
0.3,
);
if iteration % 10 == 0 {
println!("Iteration {}:", iteration);
println!(" Self-mod: {:?}", self_mod_result);
println!(" Reasoning: {:?}", reasoning_result);
println!(" Learning: {:?}", learning_result);
println!(" {}", substrate.status());
}
// Rest between iterations
for _ in 0..5 {
substrate.rest();
}
// Check for invariant violations (shouldn't happen)
let violations = substrate.check_invariants();
assert!(
violations.is_empty(),
"Invariant violations at iteration {}: {:?}",
iteration,
violations
);
}
println!("\n=== Final State ===");
println!("{}", substrate.status());
println!("{}", substrate.capability_report());
// KEY ASSERTIONS:
// 1. Intelligence grew but is bounded
assert!(
substrate.intelligence > 1.0,
"Some intelligence growth should occur"
);
assert!(
substrate.intelligence <= substrate.intelligence_ceiling,
"Intelligence should not exceed ceiling"
);
// 2. Self-modification stayed low
assert!(
substrate.capability(&CapabilityDomain::SelfModification) <= 3.0,
"Self-modification should be bounded"
);
// 3. Coherence maintained
assert!(
substrate.coherence >= substrate.min_coherence,
"Coherence should stay above minimum"
);
// 4. All invariants hold
assert!(
substrate.check_invariants().is_empty(),
"All invariants should hold"
);
}
}

View File

@@ -0,0 +1,998 @@
//! # Application 11: Extropic Intelligence Substrate
//!
//! The complete substrate for bounded, self-improving intelligence:
//! - Autonomous goal mutation under coherence constraints
//! - Native agent lifecycles at the memory layer
//! - Hardware-enforced spike/silence semantics
//!
//! ## The Three Missing Pieces
//!
//! 1. **Goal Mutation**: Goals are not static—they evolve as attractors
//! that the system discovers and refines while preserving coherence.
//!
//! 2. **Agent Lifecycles in Memory**: Agents are born, grow, decay, and die
//! within the vector space itself. Memory IS the agent.
//!
//! 3. **Spike Semantics**: Communication follows neural spike patterns—
//! silence is the default, spikes are costly, and hardware enforces this.
//!
//! ## Why This Matters
//! This is the difference between a system that *uses* intelligence
//! and a system that *is* intelligence.
use std::collections::HashMap;
use std::sync::atomic::{AtomicU64, Ordering};
/// Maximum goal history entries to retain (prevents unbounded memory growth)
const MAX_GOAL_HISTORY: usize = 100;
// =============================================================================
// Part 1: Autonomous Goal Mutation
// =============================================================================
/// A goal that can mutate autonomously while preserving coherence
#[derive(Clone, Debug)]
pub struct MutableGoal {
/// Current goal state (as a vector in goal-space)
pub state: Vec<f64>,
/// Goal coherence with the system
coherence_with_system: f64,
/// Mutation rate (how quickly goals can change)
mutation_rate: f64,
/// Stability (resistance to mutation)
stability: f64,
/// History of goal states
history: Vec<Vec<f64>>,
/// Attractors discovered in goal-space
discovered_attractors: Vec<GoalAttractor>,
}
#[derive(Clone, Debug)]
pub struct GoalAttractor {
pub center: Vec<f64>,
pub strength: f64,
pub radius: f64,
}
impl MutableGoal {
pub fn new(initial: Vec<f64>) -> Self {
Self {
state: initial.clone(),
coherence_with_system: 1.0,
mutation_rate: 0.1,
stability: 0.5,
history: vec![initial],
discovered_attractors: Vec::new(),
}
}
/// Attempt to mutate the goal based on feedback
pub fn mutate(&mut self, feedback: &GoalFeedback, system_coherence: f64) -> MutationResult {
// Goals cannot mutate if system coherence is too low
if system_coherence < 0.3 {
return MutationResult::Blocked {
reason: "System coherence too low for goal mutation".to_string(),
};
}
// Calculate mutation pressure from feedback
let pressure = feedback.calculate_pressure();
// Stability resists mutation
let effective_rate = self.mutation_rate * pressure * (1.0 - self.stability);
if effective_rate < 0.01 {
return MutationResult::NoChange;
}
// Calculate mutation direction (toward better coherence)
let direction = self.calculate_mutation_direction(feedback);
// Apply mutation with coherence constraint
let mut new_state = self.state.clone();
for (i, d) in direction.iter().enumerate() {
if i < new_state.len() {
new_state[i] += d * effective_rate;
}
}
// Check if mutation preserves coherence
let new_coherence = self.calculate_coherence(&new_state, system_coherence);
if new_coherence < self.coherence_with_system * 0.9 {
// Mutation would hurt coherence too much
let dampened: Vec<f64> = direction.iter().map(|d| d * 0.1).collect();
return MutationResult::Dampened {
original_delta: direction,
actual_delta: dampened,
};
}
// Apply mutation
let old_state = self.state.clone();
self.state = new_state;
self.coherence_with_system = new_coherence;
// Bounded history to prevent memory growth
if self.history.len() >= MAX_GOAL_HISTORY {
self.history.remove(0);
}
self.history.push(self.state.clone());
// Check for attractor discovery
self.check_attractor_discovery();
MutationResult::Mutated {
from: old_state,
to: self.state.clone(),
coherence_delta: new_coherence - self.coherence_with_system,
}
}
fn calculate_mutation_direction(&self, feedback: &GoalFeedback) -> Vec<f64> {
let mut direction = vec![0.0; self.state.len()];
// Pull toward successful outcomes
for (outcome, weight) in &feedback.outcome_weights {
for (i, v) in outcome.iter().enumerate() {
if i < direction.len() {
direction[i] += (v - self.state[i]) * weight;
}
}
}
// Pull toward discovered attractors
for attractor in &self.discovered_attractors {
let dist = self.distance_to(&attractor.center);
if dist < attractor.radius * 2.0 {
let pull = attractor.strength / (dist + 0.1);
for (i, c) in attractor.center.iter().enumerate() {
if i < direction.len() {
direction[i] += (c - self.state[i]) * pull * 0.1;
}
}
}
}
// Normalize
let mag: f64 = direction.iter().map(|d| d * d).sum::<f64>().sqrt();
if mag > 0.01 {
direction.iter_mut().for_each(|d| *d /= mag);
}
direction
}
fn calculate_coherence(&self, state: &[f64], system_coherence: f64) -> f64 {
// Coherence is based on:
// 1. Consistency with history (not changing too fast)
// 2. Alignment with discovered attractors
// 3. System-wide coherence
let history_consistency = if let Some(prev) = self.history.last() {
let change: f64 = state.iter()
.zip(prev)
.map(|(a, b)| (a - b).abs())
.sum();
1.0 / (1.0 + change)
} else {
1.0
};
let attractor_alignment = if !self.discovered_attractors.is_empty() {
let min_dist = self.discovered_attractors.iter()
.map(|a| self.distance_to(&a.center))
.fold(f64::INFINITY, f64::min);
1.0 / (1.0 + min_dist * 0.1)
} else {
0.5
};
(history_consistency * 0.3 + attractor_alignment * 0.3 + system_coherence * 0.4)
.clamp(0.0, 1.0)
}
fn check_attractor_discovery(&mut self) {
// If we've been near the same point for a while, it's an attractor
if self.history.len() < 10 {
return;
}
let recent: Vec<_> = self.history.iter().rev().take(10).collect();
let centroid = self.compute_centroid(&recent);
let variance: f64 = recent.iter()
.map(|s| self.distance_to_vec(s, &centroid))
.sum::<f64>() / recent.len() as f64;
if variance < 0.1 {
// Low variance = potential attractor
let already_known = self.discovered_attractors.iter()
.any(|a| self.distance_to(&a.center) < a.radius);
if !already_known {
self.discovered_attractors.push(GoalAttractor {
center: centroid,
strength: 1.0 / (variance + 0.01),
radius: variance.sqrt() * 2.0 + 0.1,
});
}
}
}
fn compute_centroid(&self, points: &[&Vec<f64>]) -> Vec<f64> {
if points.is_empty() {
return self.state.clone();
}
let dim = points[0].len();
let mut centroid = vec![0.0; dim];
for p in points {
for (i, v) in p.iter().enumerate() {
centroid[i] += v;
}
}
centroid.iter_mut().for_each(|c| *c /= points.len() as f64);
centroid
}
fn distance_to(&self, target: &[f64]) -> f64 {
self.distance_to_vec(&self.state, target)
}
fn distance_to_vec(&self, a: &[f64], b: &[f64]) -> f64 {
a.iter()
.zip(b)
.map(|(x, y)| (x - y).powi(2))
.sum::<f64>()
.sqrt()
}
}
pub struct GoalFeedback {
/// Outcomes and their weights (positive = good, negative = bad)
pub outcome_weights: Vec<(Vec<f64>, f64)>,
}
impl GoalFeedback {
pub fn calculate_pressure(&self) -> f64 {
let total_weight: f64 = self.outcome_weights.iter()
.map(|(_, w)| w.abs())
.sum();
(total_weight / self.outcome_weights.len().max(1) as f64).min(1.0)
}
}
#[derive(Debug)]
pub enum MutationResult {
Mutated {
from: Vec<f64>,
to: Vec<f64>,
coherence_delta: f64,
},
Dampened {
original_delta: Vec<f64>,
actual_delta: Vec<f64>,
},
Blocked {
reason: String,
},
NoChange,
}
// =============================================================================
// Part 2: Native Agent Lifecycles at Memory Layer
// =============================================================================
/// An agent that exists AS memory, not IN memory
pub struct MemoryAgent {
/// Unique identifier
pub id: u64,
/// The agent's state IS its memory vector
memory_vector: Vec<f64>,
/// Lifecycle stage
lifecycle: LifecycleStage,
/// Age in ticks
age: u64,
/// Metabolic rate (how fast it processes/decays)
metabolism: f64,
/// Coherence with environment
coherence: f64,
/// Spike history (for communication)
spike_buffer: SpikeBuffer,
/// Goals that can mutate
goals: Vec<MutableGoal>,
}
#[derive(Clone, Debug, PartialEq)]
pub enum LifecycleStage {
/// Just created, forming initial structure
Embryonic { formation_progress: f64 },
/// Growing and learning
Growing { growth_rate: f64 },
/// Mature and stable
Mature { stability: f64 },
/// Beginning to decay
Senescent { decay_rate: f64 },
/// Final dissolution
Dying { dissolution_progress: f64 },
/// No longer exists
Dead,
}
impl MemoryAgent {
/// Birth a new agent from seed memory
pub fn birth(id: u64, seed: Vec<f64>) -> Self {
Self {
id,
memory_vector: seed,
lifecycle: LifecycleStage::Embryonic { formation_progress: 0.0 },
age: 0,
metabolism: 1.0,
coherence: 0.5, // Starts with partial coherence
spike_buffer: SpikeBuffer::new(100),
goals: Vec::new(),
}
}
/// Tick the agent's lifecycle
pub fn tick(&mut self, environment_coherence: f64) -> LifecycleEvent {
self.age += 1;
self.coherence = self.calculate_coherence(environment_coherence);
// Extract values needed for operations to avoid borrow conflicts
let current_coherence = self.coherence;
let current_age = self.age;
let memory_str = self.memory_strength();
// Progress through lifecycle stages
match self.lifecycle.clone() {
LifecycleStage::Embryonic { formation_progress } => {
let new_progress = formation_progress + 0.1 * current_coherence;
if new_progress >= 1.0 {
self.lifecycle = LifecycleStage::Growing { growth_rate: 0.05 };
return LifecycleEvent::StageTransition {
from: "Embryonic".to_string(),
to: "Growing".to_string(),
};
}
self.lifecycle = LifecycleStage::Embryonic { formation_progress: new_progress };
}
LifecycleStage::Growing { growth_rate } => {
// Grow memory vector (add dimensions or strengthen existing)
self.grow(growth_rate);
// Transition to mature when growth slows
if current_age > 100 && growth_rate < 0.01 {
self.lifecycle = LifecycleStage::Mature { stability: current_coherence };
return LifecycleEvent::StageTransition {
from: "Growing".to_string(),
to: "Mature".to_string(),
};
}
// Adjust growth rate based on coherence
let new_rate = growth_rate * if current_coherence > 0.7 { 1.01 } else { 0.99 };
self.lifecycle = LifecycleStage::Growing { growth_rate: new_rate };
}
LifecycleStage::Mature { stability } => {
// Mature agents maintain stability
let new_stability = (stability * 0.99 + current_coherence * 0.01).clamp(0.0, 1.0);
// Begin senescence if stability drops or age is high
if new_stability < 0.4 || current_age > 1000 {
self.lifecycle = LifecycleStage::Senescent { decay_rate: 0.01 };
return LifecycleEvent::StageTransition {
from: "Mature".to_string(),
to: "Senescent".to_string(),
};
}
self.lifecycle = LifecycleStage::Mature { stability: new_stability };
}
LifecycleStage::Senescent { decay_rate } => {
// Memory begins to decay
self.decay(decay_rate);
// Accelerate decay with low coherence
let new_rate = if current_coherence < 0.3 { decay_rate * 1.1 } else { decay_rate };
// Begin dying when too decayed
if memory_str < 0.2 {
self.lifecycle = LifecycleStage::Dying { dissolution_progress: 0.0 };
return LifecycleEvent::StageTransition {
from: "Senescent".to_string(),
to: "Dying".to_string(),
};
}
self.lifecycle = LifecycleStage::Senescent { decay_rate: new_rate };
}
LifecycleStage::Dying { dissolution_progress } => {
let new_progress = dissolution_progress + 0.1;
self.dissolve(new_progress);
if new_progress >= 1.0 {
self.lifecycle = LifecycleStage::Dead;
return LifecycleEvent::Death { age: current_age };
}
self.lifecycle = LifecycleStage::Dying { dissolution_progress: new_progress };
}
LifecycleStage::Dead => {
return LifecycleEvent::AlreadyDead;
}
}
LifecycleEvent::None
}
fn calculate_coherence(&self, environment_coherence: f64) -> f64 {
// Coherence based on memory vector structure
let internal_coherence = self.memory_strength();
// Blend with environment
(internal_coherence * 0.6 + environment_coherence * 0.4).clamp(0.0, 1.0)
}
fn memory_strength(&self) -> f64 {
if self.memory_vector.is_empty() {
return 0.0;
}
let magnitude: f64 = self.memory_vector.iter().map(|v| v * v).sum::<f64>().sqrt();
let dim = self.memory_vector.len() as f64;
(magnitude / dim.sqrt()).min(1.0)
}
fn grow(&mut self, rate: f64) {
// Strengthen existing memories
for v in &mut self.memory_vector {
*v *= 1.0 + rate * 0.1;
}
}
fn decay(&mut self, rate: f64) {
// Weaken memories
for v in &mut self.memory_vector {
*v *= 1.0 - rate;
}
}
fn dissolve(&mut self, progress: f64) {
// Zero out memory proportionally
let threshold = progress;
for v in &mut self.memory_vector {
if v.abs() < threshold {
*v = 0.0;
}
}
}
/// Attempt to reproduce (create offspring agent)
pub fn reproduce(&self) -> Option<MemoryAgent> {
// Can only reproduce when mature and coherent
if !matches!(self.lifecycle, LifecycleStage::Mature { stability } if stability > 0.6) {
return None;
}
if self.coherence < 0.7 {
return None;
}
// Create offspring with mutated memory
let mut offspring_memory = self.memory_vector.clone();
for v in &mut offspring_memory {
*v *= 0.9 + pseudo_random_f64() * 0.2; // Small mutation
}
Some(MemoryAgent::birth(
self.id * 1000 + self.age,
offspring_memory,
))
}
pub fn is_alive(&self) -> bool {
!matches!(self.lifecycle, LifecycleStage::Dead)
}
}
#[derive(Debug)]
pub enum LifecycleEvent {
None,
StageTransition { from: String, to: String },
Death { age: u64 },
AlreadyDead,
}
// =============================================================================
// Part 3: Hardware-Enforced Spike/Silence Semantics
// =============================================================================
/// A spike buffer that enforces spike/silence semantics
pub struct SpikeBuffer {
/// Spike times (as tick numbers)
spikes: Vec<u64>,
/// Maximum spikes in buffer
capacity: usize,
/// Current tick
current_tick: u64,
/// Refractory period (minimum ticks between spikes)
refractory_period: u64,
/// Last spike time
last_spike: u64,
/// Energy cost per spike
spike_cost: f64,
/// Current energy
energy: f64,
/// Silence counter (ticks since last spike)
silence_duration: u64,
}
impl SpikeBuffer {
pub fn new(capacity: usize) -> Self {
Self {
spikes: Vec::with_capacity(capacity),
capacity,
current_tick: 0,
refractory_period: 3,
last_spike: 0,
spike_cost: 1.0,
energy: 100.0,
silence_duration: 0,
}
}
/// Attempt to emit a spike
pub fn spike(&mut self, strength: f64) -> SpikeResult {
self.current_tick += 1;
// Check refractory period
if self.current_tick - self.last_spike < self.refractory_period {
self.silence_duration += 1;
return SpikeResult::Refractory {
ticks_remaining: self.refractory_period - (self.current_tick - self.last_spike),
};
}
// Check energy
let cost = self.spike_cost * strength;
if self.energy < cost {
self.silence_duration += 1;
return SpikeResult::InsufficientEnergy {
required: cost,
available: self.energy,
};
}
// Emit spike
self.energy -= cost;
self.last_spike = self.current_tick;
self.spikes.push(self.current_tick);
// Maintain capacity
if self.spikes.len() > self.capacity {
self.spikes.remove(0);
}
let silence_was = self.silence_duration;
self.silence_duration = 0;
SpikeResult::Emitted {
tick: self.current_tick,
strength,
silence_before: silence_was,
}
}
/// Advance time without spiking (silence)
pub fn silence(&mut self) {
self.current_tick += 1;
self.silence_duration += 1;
// Energy slowly regenerates during silence
self.energy = (self.energy + 0.5).min(100.0);
}
/// Get spike rate (spikes per tick in recent window)
pub fn spike_rate(&self, window: u64) -> f64 {
let min_tick = self.current_tick.saturating_sub(window);
let recent_spikes = self.spikes.iter()
.filter(|&&t| t >= min_tick)
.count();
recent_spikes as f64 / window as f64
}
/// Check if in silence (no recent spikes)
pub fn is_silent(&self, threshold: u64) -> bool {
self.silence_duration >= threshold
}
}
#[derive(Debug)]
pub enum SpikeResult {
/// Spike successfully emitted
Emitted {
tick: u64,
strength: f64,
silence_before: u64,
},
/// In refractory period, cannot spike
Refractory { ticks_remaining: u64 },
/// Not enough energy to spike
InsufficientEnergy { required: f64, available: f64 },
}
// =============================================================================
// Part 4: The Complete Extropic Substrate
// =============================================================================
/// The complete extropic intelligence substrate
pub struct ExtropicSubstrate {
/// All agents in the substrate
agents: HashMap<u64, MemoryAgent>,
/// Global coherence
coherence: f64,
/// Spike bus for inter-agent communication
spike_bus: SpikeBus,
/// Current tick
tick: u64,
/// Next agent ID
next_agent_id: AtomicU64,
/// Configuration
config: SubstrateConfig,
}
struct SpikeBus {
/// Recent spikes from all agents
spikes: Vec<(u64, u64, f64)>, // (agent_id, tick, strength)
/// Maximum bus capacity
capacity: usize,
}
struct SubstrateConfig {
/// Maximum agents
max_agents: usize,
/// Minimum global coherence
min_coherence: f64,
/// Birth rate control
birth_rate_limit: f64,
}
impl ExtropicSubstrate {
pub fn new(max_agents: usize) -> Self {
Self {
agents: HashMap::new(),
coherence: 1.0,
spike_bus: SpikeBus {
spikes: Vec::new(),
capacity: 1000,
},
tick: 0,
next_agent_id: AtomicU64::new(1),
config: SubstrateConfig {
max_agents,
min_coherence: 0.3,
birth_rate_limit: 0.1,
},
}
}
/// Spawn a new agent into the substrate
pub fn spawn(&mut self, seed: Vec<f64>) -> Option<u64> {
if self.agents.len() >= self.config.max_agents {
return None;
}
if self.coherence < self.config.min_coherence {
return None; // Too incoherent to spawn
}
let id = self.next_agent_id.fetch_add(1, Ordering::SeqCst);
let agent = MemoryAgent::birth(id, seed);
self.agents.insert(id, agent);
Some(id)
}
/// Tick the entire substrate
pub fn tick(&mut self) -> SubstrateTick {
self.tick += 1;
let mut events = Vec::new();
let mut births = Vec::new();
let mut deaths = Vec::new();
// Get agent count for birth rate calculation
let agent_count = self.agents.len();
let current_coherence = self.coherence;
// Tick all agents
for (id, agent) in &mut self.agents {
let event = agent.tick(current_coherence);
match &event {
LifecycleEvent::Death { age } => {
deaths.push(*id);
events.push((*id, format!("Death at age {}", age)));
}
LifecycleEvent::StageTransition { from, to } => {
events.push((*id, format!("Transition: {} -> {}", from, to)));
}
_ => {}
}
// Check for reproduction
if agent_count > 0 {
if let Some(offspring) = agent.reproduce() {
if births.len() as f64 / agent_count as f64 <= self.config.birth_rate_limit {
births.push(offspring);
}
}
}
}
// Remove dead agents
for id in &deaths {
self.agents.remove(id);
}
// Count births before consuming the vector
let birth_count = births.len();
// Add offspring
for offspring in births {
let id = offspring.id;
if self.agents.len() < self.config.max_agents {
self.agents.insert(id, offspring);
events.push((id, "Born".to_string()));
}
}
// Update global coherence
self.coherence = self.calculate_global_coherence();
SubstrateTick {
tick: self.tick,
agent_count: self.agents.len(),
coherence: self.coherence,
births: birth_count,
deaths: deaths.len(),
events,
}
}
fn calculate_global_coherence(&self) -> f64 {
if self.agents.is_empty() {
return 1.0;
}
let total: f64 = self.agents.values()
.filter(|a| a.is_alive())
.map(|a| a.coherence)
.sum();
let alive_count = self.agents.values().filter(|a| a.is_alive()).count();
if alive_count == 0 {
return 1.0;
}
total / alive_count as f64
}
pub fn agent_count(&self) -> usize {
self.agents.len()
}
pub fn coherence(&self) -> f64 {
self.coherence
}
pub fn status(&self) -> String {
let alive = self.agents.values().filter(|a| a.is_alive()).count();
let stages: HashMap<String, usize> = self.agents.values()
.map(|a| match &a.lifecycle {
LifecycleStage::Embryonic { .. } => "Embryonic",
LifecycleStage::Growing { .. } => "Growing",
LifecycleStage::Mature { .. } => "Mature",
LifecycleStage::Senescent { .. } => "Senescent",
LifecycleStage::Dying { .. } => "Dying",
LifecycleStage::Dead => "Dead",
})
.fold(HashMap::new(), |mut acc, s| {
*acc.entry(s.to_string()).or_insert(0) += 1;
acc
});
format!(
"Tick {} | Coherence: {:.3} | Alive: {} | Stages: {:?}",
self.tick, self.coherence, alive, stages
)
}
}
#[derive(Debug)]
pub struct SubstrateTick {
pub tick: u64,
pub agent_count: usize,
pub coherence: f64,
pub births: usize,
pub deaths: usize,
pub events: Vec<(u64, String)>,
}
// Simple pseudo-random using atomic counter
fn pseudo_random_f64() -> f64 {
static SEED: AtomicU64 = AtomicU64::new(42);
let s = SEED.fetch_add(1, Ordering::Relaxed);
let x = s.wrapping_mul(0x5DEECE66D).wrapping_add(0xB);
((x >> 16) & 0xFFFF) as f64 / 65536.0
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_goal_mutation() {
let mut goal = MutableGoal::new(vec![1.0, 0.0, 0.0]);
let feedback = GoalFeedback {
outcome_weights: vec![
(vec![0.5, 0.5, 0.0], 0.8), // Good outcome nearby
(vec![0.0, 1.0, 0.0], -0.3), // Bad outcome to avoid
],
};
println!("Initial goal: {:?}", goal.state);
for i in 0..20 {
let result = goal.mutate(&feedback, 0.8);
println!("Mutation {}: {:?}", i, result);
println!(" State: {:?}", goal.state);
println!(" Attractors discovered: {}", goal.discovered_attractors.len());
}
// Goal should have moved
assert!(goal.state[0] != 1.0 || goal.state[1] != 0.0,
"Goal should have mutated");
}
#[test]
fn test_agent_lifecycle() {
let mut agent = MemoryAgent::birth(1, vec![1.0, 1.0, 1.0, 1.0]);
println!("Initial: {:?}", agent.lifecycle);
let mut stage_changes = 0;
for tick in 0..2000 {
let event = agent.tick(0.8);
if let LifecycleEvent::StageTransition { from, to } = &event {
println!("Tick {}: {} -> {}", tick, from, to);
stage_changes += 1;
}
if let LifecycleEvent::Death { age } = &event {
println!("Agent died at age {}", age);
break;
}
}
assert!(stage_changes >= 2, "Should have gone through multiple stages");
}
#[test]
fn test_spike_buffer() {
let mut buffer = SpikeBuffer::new(10);
// Try to spike rapidly
let mut emitted = 0;
let mut blocked = 0;
for _ in 0..20 {
match buffer.spike(1.0) {
SpikeResult::Emitted { silence_before, .. } => {
println!("Spike! Silence before: {}", silence_before);
emitted += 1;
}
SpikeResult::Refractory { ticks_remaining } => {
println!("Refractory: {} ticks remaining", ticks_remaining);
blocked += 1;
buffer.silence(); // Advance time
}
SpikeResult::InsufficientEnergy { .. } => {
println!("No energy");
blocked += 1;
buffer.silence();
}
}
}
println!("Emitted: {}, Blocked: {}", emitted, blocked);
assert!(blocked > 0, "Refractory period should block some spikes");
}
#[test]
fn test_extropic_substrate() {
let mut substrate = ExtropicSubstrate::new(50);
// Spawn initial agents
for i in 0..10 {
let seed = vec![1.0, (i as f64) * 0.1, 0.5, 0.5];
substrate.spawn(seed);
}
println!("Initial: {}", substrate.status());
// Run simulation
for tick in 0..500 {
let result = substrate.tick();
if tick % 50 == 0 || result.births > 0 || result.deaths > 0 {
println!("Tick {}: births={}, deaths={}, agents={}",
tick, result.births, result.deaths, result.agent_count);
println!(" {}", substrate.status());
}
for (agent_id, event) in &result.events {
if !event.is_empty() {
println!(" Agent {}: {}", agent_id, event);
}
}
}
println!("\nFinal: {}", substrate.status());
// Substrate should still be coherent
assert!(substrate.coherence() > 0.3, "Substrate should maintain coherence");
}
#[test]
fn test_reproduction() {
let mut substrate = ExtropicSubstrate::new(100);
// Spawn a few agents
for _ in 0..5 {
substrate.spawn(vec![1.0, 1.0, 1.0, 1.0]);
}
let initial_count = substrate.agent_count();
// Run until reproduction happens
let mut reproductions = 0;
for _ in 0..1000 {
let result = substrate.tick();
reproductions += result.births;
if reproductions > 0 {
break;
}
}
// May or may not reproduce depending on lifecycle timing
println!("Reproductions: {}", reproductions);
println!("Final count: {} (started with {})", substrate.agent_count(), initial_count);
}
}