Merge commit 'd803bfe2b1fe7f5e219e50ac20d6801a0a58ac75' as 'vendor/ruvector'

This commit is contained in:
ruv
2026-02-28 14:39:40 -05:00
7854 changed files with 3522914 additions and 0 deletions

View File

@@ -0,0 +1,898 @@
//! Filter pipeline tests for ruQu coherence gate
//!
//! Tests the three-filter decision pipeline:
//! - Structural filter with min-cut based stability
//! - Shift filter for distribution drift detection
//! - Evidence accumulator for e-value convergence
use ruqu::filters::{
EvidenceAccumulator, EvidenceConfig, EvidenceFilter, FilterConfig, FilterPipeline, RegionMask,
ShiftConfig, ShiftFilter, StructuralConfig, StructuralFilter, SystemState, Verdict,
};
// ============================================================================
// Structural Filter Tests
// ============================================================================
mod structural_filter_tests {
use super::*;
#[test]
fn test_structural_filter_basic_creation() {
let filter = StructuralFilter::new(5.0);
assert_eq!(filter.threshold(), 5.0);
}
#[test]
fn test_structural_filter_with_config() {
let config = StructuralConfig {
threshold: 3.5,
max_cut_size: 500,
use_subpolynomial: false,
phi: 0.02,
};
let filter = StructuralFilter::with_config(config);
assert_eq!(filter.threshold(), 3.5);
}
#[test]
fn test_structural_filter_triangle_graph() {
let mut filter = StructuralFilter::new(1.5);
// Create a triangle (3-connected)
filter.insert_edge(1, 2, 1.0).unwrap();
filter.insert_edge(2, 3, 1.0).unwrap();
filter.insert_edge(3, 1, 1.0).unwrap();
let state = SystemState::new(3);
let result = filter.evaluate(&state);
// Triangle should have cut value >= 2.0
assert!(result.cut_value >= 1.5);
assert!(result.is_coherent);
assert!(result.boundary_edges.is_empty());
}
#[test]
fn test_structural_filter_single_edge_below_threshold() {
let config = StructuralConfig {
threshold: 3.0,
use_subpolynomial: false,
..Default::default()
};
let mut filter = StructuralFilter::with_config(config);
// Single edge has cut value 1.0
filter.insert_edge(1, 2, 1.0).unwrap();
let state = SystemState::new(2);
let result = filter.evaluate(&state);
// Should be below threshold
assert!(!result.is_coherent);
assert!(!result.boundary_edges.is_empty());
}
#[test]
fn test_structural_filter_various_cut_values() {
let test_cases = vec![
(vec![(1, 2, 1.0)], 1.0, true), // Single edge at threshold (>= passes)
(vec![(1, 2, 2.0)], 1.0, true), // Single edge weight 2.0 above threshold
(vec![(1, 2, 1.0), (2, 3, 1.0)], 1.0, true), // Path
(vec![(1, 2, 0.5)], 1.0, false), // Weak edge below threshold
];
for (edges, threshold, expected_coherent) in test_cases {
let config = StructuralConfig {
threshold,
use_subpolynomial: false,
..Default::default()
};
let mut filter = StructuralFilter::with_config(config);
for (u, v, w) in edges {
filter.insert_edge(u, v, w).unwrap();
}
let state = SystemState::new(10);
let result = filter.evaluate(&state);
assert_eq!(
result.is_coherent, expected_coherent,
"Threshold {}, expected coherent: {}",
threshold, expected_coherent
);
}
}
#[test]
fn test_structural_filter_edge_deletion() {
let config = StructuralConfig {
threshold: 1.0,
use_subpolynomial: false,
..Default::default()
};
let mut filter = StructuralFilter::with_config(config);
// Build a path: 1-2-3
filter.insert_edge(1, 2, 1.0).unwrap();
filter.insert_edge(2, 3, 1.0).unwrap();
// Remove an edge
filter.delete_edge(1, 2).unwrap();
let state = SystemState::new(3);
let result = filter.evaluate(&state);
// Cut value should decrease
assert!(result.cut_value >= 0.0);
}
#[test]
fn test_structural_filter_duplicate_edge_error() {
let mut filter = StructuralFilter::new(1.0);
filter.insert_edge(1, 2, 1.0).unwrap();
let result = filter.insert_edge(1, 2, 1.0);
assert!(result.is_err());
}
#[test]
fn test_structural_filter_delete_nonexistent_edge() {
let mut filter = StructuralFilter::new(1.0);
let result = filter.delete_edge(1, 2);
assert!(result.is_err());
}
#[test]
fn test_structural_filter_compute_time_recorded() {
let mut filter = StructuralFilter::new(1.0);
filter.insert_edge(1, 2, 1.0).unwrap();
let state = SystemState::new(2);
let result = filter.evaluate(&state);
// Should have recorded some compute time
assert!(result.compute_time_us < 1_000_000); // Less than 1 second
}
}
// ============================================================================
// Shift Filter Tests
// ============================================================================
mod shift_filter_tests {
use super::*;
#[test]
fn test_shift_filter_basic_creation() {
let filter = ShiftFilter::new(0.5, 100);
assert_eq!(filter.threshold(), 0.5);
assert_eq!(filter.window_size(), 100);
}
#[test]
fn test_shift_filter_stable_observations() {
let mut filter = ShiftFilter::new(0.5, 100);
// Add stable observations (low variance)
for i in 0..100 {
filter.update(0, 0.5 + (i as f64 % 10.0) * 0.001);
}
let state = SystemState::new(10);
let result = filter.evaluate(&state);
assert!(result.is_stable);
assert!(result.pressure < 0.5);
}
#[test]
fn test_shift_filter_drift_detection() {
let mut filter = ShiftFilter::new(0.3, 100);
// Start with baseline
for _ in 0..50 {
filter.update(0, 0.5);
}
// Introduce drift
for i in 0..50 {
filter.update(0, 0.5 + i as f64 * 0.1); // Increasing values
}
let state = SystemState::new(10);
let result = filter.evaluate(&state);
// Should detect drift
assert!(result.pressure > 0.0);
}
#[test]
fn test_shift_filter_multiple_regions() {
let mut filter = ShiftFilter::new(0.5, 100);
// Different patterns per region
for i in 0..100 {
filter.update(0, 0.5); // Stable
filter.update(1, 0.5 + i as f64 * 0.05); // Drifting
filter.update(2, 0.5); // Stable
}
let state = SystemState::new(10);
let result = filter.evaluate(&state);
// Region 1 should be affected
assert!(result.region_shifts.len() >= 3);
}
#[test]
fn test_shift_filter_affected_regions_mask() {
let mut filter = ShiftFilter::new(0.2, 100);
// Create severe drift in regions 0 and 2
for i in 0..100 {
filter.update(0, i as f64); // Severe drift
filter.update(1, 0.5); // Stable
filter.update(2, i as f64 * 0.5); // Moderate drift
}
let state = SystemState::new(10);
let result = filter.evaluate(&state);
// Check affected regions
if result.affected_regions.any() {
assert!(result.pressure > 0.0);
}
}
#[test]
fn test_shift_filter_lead_time_estimation() {
let mut filter = ShiftFilter::new(0.3, 100);
// Create moderate drift
for i in 0..100 {
filter.update(0, 0.5 + i as f64 * 0.02);
}
let state = SystemState::new(10);
let result = filter.evaluate(&state);
// If drifting, should have lead time estimate
if !result.is_stable {
assert!(result.lead_time.is_some());
assert!(result.lead_time.unwrap() >= 1);
}
}
#[test]
fn test_shift_filter_reset() {
let mut filter = ShiftFilter::new(0.5, 100);
// Add observations
for _ in 0..50 {
filter.update(0, 1.0);
}
// Reset
filter.reset();
// New observations should be fresh
let state = SystemState::new(10);
let result = filter.evaluate(&state);
// Should be near-zero pressure after reset
assert!(result.pressure < 0.5 || result.is_stable);
}
#[test]
fn test_shift_filter_variance_computation() {
let mut filter = ShiftFilter::new(0.5, 100);
// Add observations with known variance
let values = [0.0, 1.0, 2.0, 3.0, 4.0];
for &v in values.iter().cycle().take(100) {
filter.update(0, v);
}
let state = SystemState::new(10);
let result = filter.evaluate(&state);
// Should compute some shift based on variance
assert!(result.region_shifts[0] >= 0.0);
}
}
// ============================================================================
// Evidence Accumulator Tests
// ============================================================================
mod evidence_accumulator_tests {
use super::*;
#[test]
fn test_evidence_accumulator_initial_state() {
let acc = EvidenceAccumulator::new();
assert_eq!(acc.e_value(), 1.0);
assert_eq!(acc.samples_seen(), 0);
assert_eq!(acc.log_e_value(), 0.0);
}
#[test]
fn test_evidence_accumulator_update() {
let mut acc = EvidenceAccumulator::new();
// Likelihood ratio > 1 means evidence for H1
acc.update(2.0);
assert!(acc.e_value() > 1.0);
assert_eq!(acc.samples_seen(), 1);
}
#[test]
fn test_evidence_accumulator_convergence_positive() {
let mut acc = EvidenceAccumulator::new();
// Consistently high likelihood ratios
for _ in 0..20 {
acc.update(2.0);
}
// Should converge to high e-value
assert!(acc.e_value() > 100.0);
}
#[test]
fn test_evidence_accumulator_convergence_negative() {
let mut acc = EvidenceAccumulator::new();
// Consistently low likelihood ratios
for _ in 0..20 {
acc.update(0.5);
}
// Should converge to low e-value
assert!(acc.e_value() < 0.1);
}
#[test]
fn test_evidence_accumulator_mixed_evidence() {
let mut acc = EvidenceAccumulator::new();
// Mixed evidence should roughly cancel out
for _ in 0..50 {
acc.update(2.0);
acc.update(0.5);
}
// Should be near 1.0
let e = acc.e_value();
assert!(e > 0.1 && e < 10.0);
}
#[test]
fn test_evidence_accumulator_reset() {
let mut acc = EvidenceAccumulator::new();
// Add evidence
for _ in 0..10 {
acc.update(2.0);
}
// Reset
acc.reset();
assert_eq!(acc.e_value(), 1.0);
assert_eq!(acc.samples_seen(), 0);
}
#[test]
fn test_evidence_accumulator_extreme_values_clamped() {
let mut acc = EvidenceAccumulator::new();
// Extreme likelihood ratio should be clamped
acc.update(1e20); // Should be clamped to 1e10
// Should not overflow
assert!(acc.e_value().is_finite());
}
#[test]
fn test_evidence_accumulator_posterior_odds() {
let mut acc = EvidenceAccumulator::new();
acc.update(4.0); // e-value = 4
let prior_odds = 1.0; // Equal prior
let posterior = acc.posterior_odds(prior_odds);
assert!((posterior - 4.0).abs() < 0.1);
}
}
// ============================================================================
// Evidence Filter Tests
// ============================================================================
mod evidence_filter_tests {
use super::*;
#[test]
fn test_evidence_filter_permit_verdict() {
let mut filter = EvidenceFilter::new(10.0, 0.1);
// Add strong evidence
for _ in 0..10 {
filter.update(2.0);
}
let state = SystemState::new(10);
let result = filter.evaluate(&state);
assert!(result.e_value > 10.0);
assert_eq!(result.verdict, Some(Verdict::Permit));
}
#[test]
fn test_evidence_filter_deny_verdict() {
let mut filter = EvidenceFilter::new(10.0, 0.1);
// Add negative evidence
for _ in 0..10 {
filter.update(0.5);
}
let state = SystemState::new(10);
let result = filter.evaluate(&state);
assert!(result.e_value < 0.1);
assert_eq!(result.verdict, Some(Verdict::Deny));
}
#[test]
fn test_evidence_filter_defer_verdict() {
let mut filter = EvidenceFilter::new(10.0, 0.1);
// Add minimal evidence (stays near 1.0)
filter.update(1.1);
filter.update(0.9);
let state = SystemState::new(10);
let result = filter.evaluate(&state);
// Should be between thresholds
assert!(result.e_value > 0.1 && result.e_value < 10.0);
assert_eq!(result.verdict, None); // Defer
}
#[test]
fn test_evidence_filter_thresholds() {
let filter = EvidenceFilter::new(20.0, 0.05);
assert_eq!(filter.tau_permit(), 20.0);
assert_eq!(filter.tau_deny(), 0.05);
}
#[test]
fn test_evidence_filter_region_accumulators() {
let mut filter = EvidenceFilter::new(10.0, 0.1);
// Update different regions
filter.update_region(0, 2.0);
filter.update_region(1, 0.5);
filter.update_region(2, 1.5);
let state = SystemState::new(10);
let result = filter.evaluate(&state);
// Global accumulator should still be at 1.0
assert!((result.e_value - 1.0).abs() < 0.1);
}
}
// ============================================================================
// Filter Pipeline Tests
// ============================================================================
mod filter_pipeline_tests {
use super::*;
#[test]
fn test_pipeline_all_filters_pass() {
let config = FilterConfig {
structural: StructuralConfig {
threshold: 1.0,
use_subpolynomial: false,
..Default::default()
},
shift: ShiftConfig {
threshold: 0.5,
..Default::default()
},
evidence: EvidenceConfig {
tau_permit: 5.0,
tau_deny: 0.2,
..Default::default()
},
};
let mut pipeline = FilterPipeline::new(config);
// Build good graph
pipeline.structural_mut().insert_edge(1, 2, 2.0).unwrap();
pipeline.structural_mut().insert_edge(2, 3, 2.0).unwrap();
pipeline.structural_mut().insert_edge(3, 1, 2.0).unwrap();
// Stable shift
for _ in 0..30 {
pipeline.shift_mut().update(0, 0.5);
}
// Strong evidence
for _ in 0..5 {
pipeline.evidence_mut().update(2.0);
}
let state = SystemState::new(3);
let result = pipeline.evaluate(&state);
assert_eq!(result.verdict, Some(Verdict::Permit));
assert!(result.recommendations.is_empty() || result.structural.is_coherent);
}
#[test]
fn test_pipeline_structural_fails() {
let config = FilterConfig {
structural: StructuralConfig {
threshold: 5.0, // High threshold
use_subpolynomial: false,
..Default::default()
},
..Default::default()
};
let mut pipeline = FilterPipeline::new(config);
// Weak graph
pipeline.structural_mut().insert_edge(1, 2, 1.0).unwrap();
let state = SystemState::new(2);
let result = pipeline.evaluate(&state);
assert_eq!(result.verdict, Some(Verdict::Deny));
assert!(!result.structural.is_coherent);
}
#[test]
fn test_pipeline_shift_triggers_defer() {
let config = FilterConfig {
structural: StructuralConfig {
threshold: 1.0,
use_subpolynomial: false,
..Default::default()
},
shift: ShiftConfig {
threshold: 0.1, // Low threshold
..Default::default()
},
..Default::default()
};
let mut pipeline = FilterPipeline::new(config);
// Good structure
pipeline.structural_mut().insert_edge(1, 2, 2.0).unwrap();
pipeline.structural_mut().insert_edge(2, 3, 2.0).unwrap();
// Create drift
for i in 0..50 {
pipeline.shift_mut().update(0, i as f64);
}
let state = SystemState::new(3);
let result = pipeline.evaluate(&state);
// Should defer due to shift
assert!(result.verdict == Some(Verdict::Defer) || result.verdict == Some(Verdict::Deny));
}
#[test]
fn test_pipeline_evidence_determines_permit_deny() {
let config = FilterConfig {
structural: StructuralConfig {
threshold: 1.0,
use_subpolynomial: false,
..Default::default()
},
shift: ShiftConfig {
threshold: 0.9, // Permissive
..Default::default()
},
evidence: EvidenceConfig {
tau_permit: 5.0,
tau_deny: 0.2,
..Default::default()
},
};
let mut pipeline = FilterPipeline::new(config);
// Good structure
pipeline.structural_mut().insert_edge(1, 2, 2.0).unwrap();
// Minimal shift
for _ in 0..20 {
pipeline.shift_mut().update(0, 0.5);
}
// Test with insufficient evidence
let state = SystemState::new(2);
let result = pipeline.evaluate(&state);
// Should be Defer (evidence accumulating) since no evidence added
assert!(result.verdict == Some(Verdict::Defer) || result.evidence.verdict == None);
}
#[test]
fn test_pipeline_reset() {
let config = FilterConfig::default();
let mut pipeline = FilterPipeline::new(config);
// Add some state
for _ in 0..10 {
pipeline.shift_mut().update(0, 1.0);
pipeline.evidence_mut().update(2.0);
}
// Reset
pipeline.reset();
// Evaluate fresh
let state = SystemState::new(10);
let result = pipeline.evaluate(&state);
// Evidence should be back to 1.0
assert!((result.evidence.e_value - 1.0).abs() < 0.5);
}
#[test]
fn test_pipeline_total_time_recorded() {
let config = FilterConfig::default();
let pipeline = FilterPipeline::new(config);
let state = SystemState::new(10);
let result = pipeline.evaluate(&state);
// Should have recorded time
assert!(result.total_time_us < 1_000_000);
}
#[test]
fn test_pipeline_recommendations_generated() {
let config = FilterConfig {
structural: StructuralConfig {
threshold: 10.0, // Very high
use_subpolynomial: false,
..Default::default()
},
..Default::default()
};
let mut pipeline = FilterPipeline::new(config);
pipeline.structural_mut().insert_edge(1, 2, 1.0).unwrap();
let state = SystemState::new(2);
let result = pipeline.evaluate(&state);
// Should have recommendations about structural failure
assert!(!result.recommendations.is_empty());
assert!(result.recommendations[0].contains("Structural"));
}
}
// ============================================================================
// Filter Combination Logic Tests
// ============================================================================
mod filter_combination_tests {
use super::*;
#[test]
fn test_deny_takes_priority() {
// If any filter denies, overall should deny
let config = FilterConfig {
structural: StructuralConfig {
threshold: 10.0,
use_subpolynomial: false,
..Default::default()
},
..Default::default()
};
let mut pipeline = FilterPipeline::new(config);
pipeline.structural_mut().insert_edge(1, 2, 1.0).unwrap();
// Even with good evidence
for _ in 0..10 {
pipeline.evidence_mut().update(2.0);
}
let state = SystemState::new(2);
let result = pipeline.evaluate(&state);
assert_eq!(result.verdict, Some(Verdict::Deny));
}
#[test]
fn test_defer_when_evidence_accumulating() {
let config = FilterConfig {
structural: StructuralConfig {
threshold: 1.0,
use_subpolynomial: false,
..Default::default()
},
shift: ShiftConfig {
threshold: 0.9,
..Default::default()
},
evidence: EvidenceConfig {
tau_permit: 100.0, // Very high threshold
tau_deny: 0.001,
..Default::default()
},
};
let mut pipeline = FilterPipeline::new(config);
pipeline.structural_mut().insert_edge(1, 2, 2.0).unwrap();
// Minimal evidence (not enough to decide)
pipeline.evidence_mut().update(1.1);
let state = SystemState::new(2);
let result = pipeline.evaluate(&state);
// Should defer - evidence not conclusive
assert_eq!(result.verdict, Some(Verdict::Defer));
}
}
// ============================================================================
// Proptest Property-Based Tests
// ============================================================================
#[cfg(test)]
mod proptest_filters {
use super::*;
use proptest::prelude::*;
proptest! {
#![proptest_config(ProptestConfig::with_cases(50))]
#[test]
fn prop_structural_coherence_monotonic_with_weight(
base_weight in 0.1f64..10.0,
multiplier in 1.0f64..5.0
) {
let config = StructuralConfig {
threshold: base_weight,
use_subpolynomial: false,
..Default::default()
};
let mut filter = StructuralFilter::with_config(config);
filter.insert_edge(1, 2, base_weight * multiplier).unwrap();
let state = SystemState::new(2);
let result = filter.evaluate(&state);
// Higher weight should increase cut value
if multiplier >= 1.0 {
prop_assert!(result.cut_value >= 0.0);
}
}
#[test]
fn prop_evidence_accumulator_bounded(
likelihood_ratios in prop::collection::vec(0.1f64..10.0, 1..50)
) {
let mut acc = EvidenceAccumulator::new();
for lr in likelihood_ratios {
acc.update(lr);
}
// E-value should always be finite and positive
prop_assert!(acc.e_value().is_finite());
prop_assert!(acc.e_value() > 0.0);
}
#[test]
fn prop_shift_filter_pressure_bounded(
values in prop::collection::vec(0.0f64..100.0, 10..100)
) {
let mut filter = ShiftFilter::new(0.5, 100);
for (i, v) in values.iter().enumerate() {
filter.update(i % 10, *v);
}
let state = SystemState::new(10);
let result = filter.evaluate(&state);
// Pressure should be bounded [0, inf) but typically reasonable
prop_assert!(result.pressure >= 0.0);
prop_assert!(result.pressure.is_finite());
}
}
}
// ============================================================================
// Region Mask Tests
// ============================================================================
mod region_mask_tests {
use super::*;
#[test]
fn test_region_mask_empty() {
let mask = RegionMask::empty();
assert!(!mask.any());
assert_eq!(mask.count(), 0);
}
#[test]
fn test_region_mask_all() {
let mask = RegionMask::all();
assert!(mask.any());
assert_eq!(mask.count(), 64);
}
#[test]
fn test_region_mask_set_clear() {
let mut mask = RegionMask::empty();
mask.set(5);
assert!(mask.is_set(5));
assert!(!mask.is_set(4));
mask.clear(5);
assert!(!mask.is_set(5));
}
#[test]
fn test_region_mask_union() {
let mut a = RegionMask::empty();
let mut b = RegionMask::empty();
a.set(1);
a.set(3);
b.set(2);
b.set(3);
let union = a.union(&b);
assert!(union.is_set(1));
assert!(union.is_set(2));
assert!(union.is_set(3));
assert_eq!(union.count(), 3);
}
#[test]
fn test_region_mask_intersection() {
let mut a = RegionMask::empty();
let mut b = RegionMask::empty();
a.set(1);
a.set(3);
b.set(2);
b.set(3);
let intersection = a.intersection(&b);
assert!(!intersection.is_set(1));
assert!(!intersection.is_set(2));
assert!(intersection.is_set(3));
assert_eq!(intersection.count(), 1);
}
}

View File

@@ -0,0 +1,535 @@
//! End-to-end integration tests for ruQu coherence gate
//!
//! Tests full fabric initialization, syndrome ingestion through gate decision,
//! and receipt generation with verification.
use ruqu::{
filters::{
EvidenceConfig, FilterConfig, FilterPipeline, ShiftConfig, StructuralConfig, Verdict,
},
prelude::*,
syndrome::{DetectorBitmap, SyndromeBuffer, SyndromeDelta, SyndromeRound},
tile::{
GateDecision, GateThresholds, PermitToken, ReceiptLog, SyndromeDelta as TileSyndromeDelta,
TileReport, TileZero, WorkerTile,
},
TILE_COUNT, WORKER_TILE_COUNT,
};
// ============================================================================
// Full Fabric Initialization Tests
// ============================================================================
#[test]
fn test_fabric_initialization_all_tiles() {
// Create all 255 worker tiles
let workers: Vec<WorkerTile> = (1..=255).map(WorkerTile::new).collect();
assert_eq!(workers.len(), WORKER_TILE_COUNT);
for (i, worker) in workers.iter().enumerate() {
assert_eq!(worker.tile_id, (i + 1) as u8);
assert_eq!(worker.tick, 0);
}
}
#[test]
fn test_fabric_initialization_with_tilezero() {
let thresholds = GateThresholds::default();
let tilezero = TileZero::new(thresholds);
// Verify default thresholds
assert!(tilezero.thresholds.structural_min_cut > 0.0);
assert!(tilezero.thresholds.shift_max > 0.0);
assert!(tilezero.thresholds.tau_deny > 0.0);
assert!(tilezero.thresholds.tau_permit > tilezero.thresholds.tau_deny);
assert!(tilezero.receipt_log.is_empty());
}
#[test]
fn test_fabric_tile_count_matches_constants() {
assert_eq!(TILE_COUNT, 256);
assert_eq!(WORKER_TILE_COUNT, 255);
}
// ============================================================================
// Syndrome Ingestion Through Gate Decision Tests
// ============================================================================
#[test]
fn test_syndrome_ingestion_single_round() {
let mut worker = WorkerTile::new(1);
// Ingest a syndrome
let delta = TileSyndromeDelta::new(0, 1, 50);
let report = worker.tick(&delta);
assert_eq!(report.tile_id, 1);
assert_eq!(report.tick, 1);
assert!(report.status & TileReport::STATUS_VALID != 0);
}
#[test]
fn test_syndrome_ingestion_multiple_rounds() {
let mut worker = WorkerTile::new(1);
// Process multiple syndrome rounds
for i in 0..100 {
let delta = TileSyndromeDelta::new(i as u16 % 64, (i as u16 + 1) % 64, (i % 256) as u16);
let report = worker.tick(&delta);
assert_eq!(report.tick, i + 1);
}
assert_eq!(worker.tick, 100);
}
#[test]
fn test_full_pipeline_syndrome_to_decision_safe() {
// Setup tiles
let thresholds = GateThresholds {
structural_min_cut: 2.0,
shift_max: 0.7,
tau_deny: 0.01,
tau_permit: 50.0,
permit_ttl_ns: 4_000_000,
};
let mut tilezero = TileZero::new(thresholds);
// Create workers and process syndromes
let mut workers: Vec<WorkerTile> = (1..=10).map(WorkerTile::new).collect();
// Build graph with good connectivity
for worker in &mut workers {
// Add edges to create a well-connected graph
worker.patch_graph.add_edge(0, 1, 100);
worker.patch_graph.add_edge(1, 2, 100);
worker.patch_graph.add_edge(2, 3, 100);
worker.patch_graph.add_edge(3, 0, 100);
worker.patch_graph.recompute_components();
}
// Process syndromes with low values (indicating stability)
for _ in 0..50 {
for worker in &mut workers {
let delta = TileSyndromeDelta::new(0, 1, 50); // Low syndrome value
worker.tick(&delta);
}
}
// Collect reports
let reports: Vec<TileReport> = workers
.iter()
.map(|w| {
let mut report = TileReport::new(w.tile_id);
report.local_cut = 10.0; // Good cut value
report.shift_score = 0.1; // Low shift
report.e_value = 200.0; // Strong evidence
report
})
.collect();
let decision = tilezero.merge_reports(reports);
assert_eq!(decision, GateDecision::Permit);
}
#[test]
fn test_full_pipeline_syndrome_to_decision_unsafe() {
let thresholds = GateThresholds::default();
let mut tilezero = TileZero::new(thresholds);
// Create reports indicating structural problems
let reports: Vec<TileReport> = (1..=10)
.map(|i| {
let mut report = TileReport::new(i);
report.local_cut = 1.0; // Below threshold (5.0)
report.shift_score = 0.1;
report.e_value = 200.0;
report
})
.collect();
let decision = tilezero.merge_reports(reports);
assert_eq!(decision, GateDecision::Deny);
}
#[test]
fn test_full_pipeline_syndrome_to_decision_cautious() {
let thresholds = GateThresholds::default();
let mut tilezero = TileZero::new(thresholds);
// Create reports with high shift but good structure
let reports: Vec<TileReport> = (1..=10)
.map(|i| {
let mut report = TileReport::new(i);
report.local_cut = 10.0;
report.shift_score = 0.8; // Above threshold (0.5)
report.e_value = 200.0;
report
})
.collect();
let decision = tilezero.merge_reports(reports);
assert_eq!(decision, GateDecision::Defer);
}
// ============================================================================
// GateDecision Variants Tests
// ============================================================================
#[test]
fn test_gate_decision_safe_variant() {
let decision = GateDecision::Permit;
assert!(decision.is_permit());
assert!(!decision.is_deny());
}
#[test]
fn test_gate_decision_cautious_variant() {
let decision = GateDecision::Defer;
assert!(!decision.is_permit());
assert!(!decision.is_deny());
}
#[test]
fn test_gate_decision_unsafe_variant() {
let decision = GateDecision::Deny;
assert!(!decision.is_permit());
assert!(decision.is_deny());
}
#[test]
fn test_gate_decision_all_variants_distinct() {
let permit = GateDecision::Permit;
let defer = GateDecision::Defer;
let deny = GateDecision::Deny;
assert_ne!(permit, defer);
assert_ne!(permit, deny);
assert_ne!(defer, deny);
}
// ============================================================================
// Receipt Generation and Verification Tests
// ============================================================================
#[test]
fn test_receipt_generation_on_decision() {
let thresholds = GateThresholds::default();
let mut tilezero = TileZero::new(thresholds);
// Make a decision
let reports: Vec<TileReport> = (1..=5)
.map(|i| {
let mut report = TileReport::new(i);
report.local_cut = 10.0;
report.shift_score = 0.1;
report.e_value = 200.0;
report
})
.collect();
tilezero.merge_reports(reports);
// Verify receipt was created
assert_eq!(tilezero.receipt_log.len(), 1);
}
#[test]
fn test_receipt_chain_integrity() {
let thresholds = GateThresholds::default();
let mut tilezero = TileZero::new(thresholds);
// Make multiple decisions
for _ in 0..10 {
let reports: Vec<TileReport> = (1..=3)
.map(|i| {
let mut report = TileReport::new(i);
report.local_cut = 10.0;
report.shift_score = 0.1;
report.e_value = 200.0;
report
})
.collect();
tilezero.merge_reports(reports);
}
// Verify chain
assert_eq!(tilezero.receipt_log.len(), 10);
// Check that entries are chainable by looking up sequences
for i in 0..10 {
let entry = tilezero.receipt_log.get(i as u64);
assert!(entry.is_some());
assert_eq!(entry.unwrap().sequence, i as u64);
}
}
#[test]
fn test_permit_token_issuance() {
let thresholds = GateThresholds::default();
let mut tilezero = TileZero::new(thresholds);
// Create reports for permit
let reports: Vec<TileReport> = (1..=5)
.map(|i| {
let mut report = TileReport::new(i);
report.local_cut = 10.0;
report.shift_score = 0.1;
report.e_value = 200.0;
report
})
.collect();
let decision = tilezero.merge_reports(reports);
assert_eq!(decision, GateDecision::Permit);
let token = tilezero.issue_permit(&decision);
assert_eq!(token.decision, GateDecision::Permit);
assert!(token.ttl_ns > 0);
}
#[test]
fn test_permit_token_validity_window() {
let token = PermitToken {
decision: GateDecision::Permit,
sequence: 0,
timestamp: 1_000_000,
ttl_ns: 500_000,
witness_hash: [0u8; 32],
signature: [1u8; 64], // Non-zero placeholder
};
// Within validity window
assert!(token.is_valid(1_200_000));
assert!(token.is_valid(1_499_999));
// Outside validity window
assert!(!token.is_valid(1_500_001));
assert!(!token.is_valid(2_000_000));
}
// ============================================================================
// Integration with Filter Pipeline Tests
// ============================================================================
#[test]
fn test_filter_pipeline_integration_permit() {
let config = FilterConfig {
structural: StructuralConfig {
threshold: 1.0,
use_subpolynomial: false,
..Default::default()
},
shift: ShiftConfig {
threshold: 0.5,
..Default::default()
},
evidence: EvidenceConfig {
tau_permit: 5.0,
tau_deny: 0.2,
..Default::default()
},
};
let mut pipeline = FilterPipeline::new(config);
// Build strong graph
pipeline.structural_mut().insert_edge(1, 2, 2.0).unwrap();
pipeline.structural_mut().insert_edge(2, 3, 2.0).unwrap();
pipeline.structural_mut().insert_edge(3, 1, 2.0).unwrap();
// Add stable observations
for _ in 0..20 {
pipeline.shift_mut().update(0, 0.5);
}
// Add strong evidence
for _ in 0..5 {
pipeline.evidence_mut().update(2.0);
}
let state = ruqu::filters::SystemState::new(3);
let result = pipeline.evaluate(&state);
assert_eq!(result.verdict, Some(Verdict::Permit));
}
#[test]
fn test_filter_pipeline_integration_deny() {
let config = FilterConfig {
structural: StructuralConfig {
threshold: 5.0,
use_subpolynomial: false,
..Default::default()
},
..Default::default()
};
let mut pipeline = FilterPipeline::new(config);
// Build weak graph
pipeline.structural_mut().insert_edge(1, 2, 1.0).unwrap();
let state = ruqu::filters::SystemState::new(2);
let result = pipeline.evaluate(&state);
assert_eq!(result.verdict, Some(Verdict::Deny));
}
// ============================================================================
// End-to-End Workflow Tests
// ============================================================================
#[test]
fn test_complete_workflow_healthy_system() {
// 1. Initialize fabric
let thresholds = GateThresholds::default();
let mut tilezero = TileZero::new(thresholds);
let mut workers: Vec<WorkerTile> = (1..=5).map(WorkerTile::new).collect();
// 2. Build graph structure in each worker
for worker in &mut workers {
worker.patch_graph.add_edge(0, 1, 200);
worker.patch_graph.add_edge(1, 2, 200);
worker.patch_graph.add_edge(2, 0, 200);
worker.patch_graph.recompute_components();
}
// 3. Simulate syndrome stream
for cycle in 0..50 {
for worker in &mut workers {
let delta = TileSyndromeDelta::new(
(cycle % 3) as u16,
((cycle + 1) % 3) as u16,
50, // Low syndrome value
);
worker.tick(&delta);
}
}
// 4. Collect reports and make decision
let reports: Vec<TileReport> = workers
.iter()
.map(|w| {
let mut report = TileReport::new(w.tile_id);
report.local_cut = w.local_cut_state.cut_value.max(10.0);
report.shift_score = 0.1;
report.e_value = 200.0;
report
})
.collect();
let decision = tilezero.merge_reports(reports);
// 5. Verify outcome
assert_eq!(decision, GateDecision::Permit);
assert_eq!(tilezero.receipt_log.len(), 1);
// 6. Issue and verify permit
let token = tilezero.issue_permit(&decision);
assert_eq!(token.decision, GateDecision::Permit);
}
#[test]
fn test_complete_workflow_degrading_system() {
let thresholds = GateThresholds::default();
let mut tilezero = TileZero::new(thresholds);
// Simulate degradation over time
for cycle in 0..20 {
let cut_value = 10.0 - (cycle as f64 * 0.5); // Degrading cut
let reports: Vec<TileReport> = (1..=5)
.map(|i| {
let mut report = TileReport::new(i);
report.local_cut = cut_value;
report.shift_score = 0.1 + (cycle as f64 * 0.02);
report.e_value = 200.0 / (cycle as f64 + 1.0);
report
})
.collect();
let decision = tilezero.merge_reports(reports);
// Eventually should transition from Permit -> Defer -> Deny
if cut_value < thresholds.structural_min_cut {
assert_eq!(decision, GateDecision::Deny);
}
}
// Should have logged all decisions
assert_eq!(tilezero.receipt_log.len(), 20);
}
// ============================================================================
// Proptest Property-Based Tests
// ============================================================================
#[cfg(test)]
mod proptest_integration {
use super::*;
use proptest::prelude::*;
proptest! {
#![proptest_config(ProptestConfig::with_cases(100))]
#[test]
fn prop_decision_consistency(
cut_values in prop::collection::vec(0.0f64..20.0, 1..10),
shift_values in prop::collection::vec(0.0f64..1.0, 1..10),
e_values in prop::collection::vec(0.01f64..500.0, 1..10),
) {
let thresholds = GateThresholds::default();
let mut tilezero = TileZero::new(thresholds);
let reports: Vec<TileReport> = cut_values
.iter()
.zip(shift_values.iter())
.zip(e_values.iter())
.enumerate()
.map(|(i, ((cut, shift), e_val))| {
let mut report = TileReport::new((i + 1) as u8);
report.local_cut = *cut;
report.shift_score = *shift;
report.e_value = *e_val;
report
})
.collect();
let decision = tilezero.merge_reports(reports.clone());
// Verify decision is consistent with filters
let min_cut: f64 = reports.iter().map(|r| r.local_cut).filter(|c| *c > 0.0).fold(f64::MAX, |a, b| a.min(b));
let max_shift: f64 = reports.iter().map(|r| r.shift_score).fold(0.0, |a, b| a.max(b));
if min_cut < thresholds.structural_min_cut {
prop_assert_eq!(decision, GateDecision::Deny);
} else if max_shift >= thresholds.shift_max {
prop_assert_eq!(decision, GateDecision::Defer);
}
}
#[test]
fn prop_receipt_log_always_grows(num_decisions in 1usize..50) {
let thresholds = GateThresholds::default();
let mut tilezero = TileZero::new(thresholds);
for _ in 0..num_decisions {
let reports: Vec<TileReport> = (1..=3)
.map(|i| {
let mut report = TileReport::new(i);
report.local_cut = 10.0;
report.shift_score = 0.1;
report.e_value = 200.0;
report
})
.collect();
tilezero.merge_reports(reports);
}
prop_assert_eq!(tilezero.receipt_log.len(), num_decisions);
}
}
}

View File

@@ -0,0 +1,942 @@
//! Stress and edge case tests for ruQu coherence gate
//!
//! Tests for high throughput syndrome streaming, memory pressure (64KB budget),
//! rapid decision cycling, and error recovery scenarios.
use ruqu::filters::{
EvidenceAccumulator, EvidenceConfig, EvidenceFilter, FilterConfig, FilterPipeline, ShiftConfig,
ShiftFilter, StructuralConfig, StructuralFilter, SystemState, Verdict,
};
use ruqu::syndrome::{DetectorBitmap, SyndromeBuffer, SyndromeDelta, SyndromeRound};
use ruqu::tile::{
GateDecision, GateThresholds, PatchGraph, ReceiptLog, SyndromeDelta as TileSyndromeDelta,
TileReport, TileZero, WorkerTile, MAX_PATCH_EDGES, MAX_PATCH_VERTICES, SYNDROME_BUFFER_DEPTH,
};
use ruqu::{TILE_MEMORY_BUDGET, WORKER_TILE_COUNT};
use std::time::Instant;
// ============================================================================
// High Throughput Syndrome Streaming Tests
// ============================================================================
mod throughput_tests {
use super::*;
#[test]
fn test_syndrome_stream_10k_rounds() {
let mut buffer = SyndromeBuffer::new(1024);
for i in 0..10_000 {
let mut detectors = DetectorBitmap::new(64);
if i % 100 == 0 {
detectors.set(i as usize % 64, true);
}
let round = SyndromeRound::new(i, i, i * 1_000, detectors, 0);
buffer.push(round);
}
// Buffer should still function correctly
assert_eq!(buffer.len(), 1024);
assert!(buffer.get(9_999).is_some());
assert!(buffer.get(8_975).is_none()); // Evicted
}
#[test]
fn test_syndrome_stream_100k_rounds() {
let mut buffer = SyndromeBuffer::new(1024);
let start = Instant::now();
for i in 0..100_000u64 {
let mut detectors = DetectorBitmap::new(256);
if i % 10 == 0 {
for j in 0..(i % 10) as usize {
detectors.set(j, true);
}
}
let round = SyndromeRound::new(i, i, i * 1_000, detectors, 0);
buffer.push(round);
}
let duration = start.elapsed();
// Performance sanity check - should complete in reasonable time
assert!(
duration.as_millis() < 5_000,
"100k rounds took too long: {:?}",
duration
);
// Data integrity
assert_eq!(buffer.len(), 1024);
}
#[test]
fn test_worker_tile_high_throughput() {
let mut tile = WorkerTile::new(1);
let start = Instant::now();
for i in 0..10_000 {
let delta =
TileSyndromeDelta::new((i % 64) as u16, ((i + 1) % 64) as u16, (i % 256) as u16);
tile.tick(&delta);
}
let duration = start.elapsed();
assert_eq!(tile.tick, 10_000);
assert!(
duration.as_millis() < 5_000,
"10k ticks took too long: {:?}",
duration
);
}
#[test]
fn test_tilezero_high_report_throughput() {
let thresholds = GateThresholds::default();
let mut tilezero = TileZero::new(thresholds);
let start = Instant::now();
for _ in 0..1_000 {
let reports: Vec<TileReport> = (1..=50)
.map(|i| {
let mut report = TileReport::new(i);
report.local_cut = 10.0;
report.shift_score = 0.1;
report.e_value = 200.0;
report
})
.collect();
tilezero.merge_reports(reports);
}
let duration = start.elapsed();
assert_eq!(tilezero.receipt_log.len(), 1_000);
assert!(
duration.as_millis() < 5_000,
"1000 merges took too long: {:?}",
duration
);
}
#[test]
fn test_bitmap_operations_throughput() {
let mut a = DetectorBitmap::new(1024);
let mut b = DetectorBitmap::new(1024);
// Setup
for i in (0..1024).step_by(2) {
a.set(i, true);
}
for i in (1..1024).step_by(2) {
b.set(i, true);
}
let start = Instant::now();
for _ in 0..100_000 {
let _ = a.xor(&b);
let _ = a.and(&b);
let _ = a.or(&b);
}
let duration = start.elapsed();
// 300k bitmap operations should be fast (SIMD-like)
assert!(
duration.as_millis() < 2_000,
"Bitmap ops took too long: {:?}",
duration
);
}
#[test]
fn test_popcount_throughput() {
let mut bitmap = DetectorBitmap::new(1024);
for i in (0..1024).step_by(3) {
bitmap.set(i, true);
}
let start = Instant::now();
let mut total = 0usize;
for _ in 0..1_000_000 {
total += bitmap.popcount();
}
let duration = start.elapsed();
// 1M popcounts should be very fast (hardware instruction)
assert!(
duration.as_millis() < 1_000,
"Popcount ops took too long: {:?}",
duration
);
assert!(total > 0); // Prevent optimization
}
}
// ============================================================================
// Memory Pressure Tests (64KB Budget)
// ============================================================================
mod memory_pressure_tests {
use super::*;
#[test]
fn test_worker_tile_memory_budget() {
let size = WorkerTile::memory_size();
// Target is 64KB per tile, allow up to 128KB
assert!(
size <= TILE_MEMORY_BUDGET * 2,
"WorkerTile exceeds 128KB budget: {} bytes",
size
);
// Log actual size for monitoring
println!(
"WorkerTile memory: {} bytes ({:.1}% of 64KB)",
size,
(size as f64 / 65536.0) * 100.0
);
}
#[test]
fn test_patch_graph_memory_budget() {
let size = PatchGraph::memory_size();
// PatchGraph should be ~32KB
assert!(size <= 65536, "PatchGraph exceeds 64KB: {} bytes", size);
println!("PatchGraph memory: {} bytes", size);
}
#[test]
fn test_syndrome_buffer_memory_budget() {
let size = ruqu::tile::SyndromBuffer::memory_size();
// SyndromBuffer should be ~16KB
assert!(size <= 32768, "SyndromBuffer exceeds 32KB: {} bytes", size);
println!("SyndromBuffer memory: {} bytes", size);
}
#[test]
fn test_multiple_tiles_memory() {
// Simulate 256-tile fabric memory
let tile_size = WorkerTile::memory_size();
let total_memory = tile_size * 255; // 255 worker tiles
// Total should be reasonable (target ~16MB for all tiles)
let mb = total_memory / (1024 * 1024);
println!("Total fabric memory (255 tiles): {} MB", mb);
assert!(mb < 64, "Total fabric memory exceeds 64MB: {} MB", mb);
}
#[test]
fn test_patch_graph_at_capacity() {
let mut graph = PatchGraph::new();
// Fill to edge capacity
let mut edge_count = 0;
for v1 in 0..16 {
for v2 in (v1 + 1)..16 {
if graph.add_edge(v1, v2, 100).is_some() {
edge_count += 1;
}
}
}
// Should handle many edges
assert!(edge_count > 0);
assert_eq!(graph.num_edges as usize, edge_count);
}
#[test]
fn test_patch_graph_vertex_limit() {
let mut graph = PatchGraph::new();
// Try to use vertices up to limit
for i in 0..(MAX_PATCH_VERTICES - 1) {
let v1 = i as u16;
let v2 = (i + 1) as u16;
if v2 < MAX_PATCH_VERTICES as u16 {
graph.add_edge(v1, v2, 100);
}
}
assert!(graph.num_vertices <= MAX_PATCH_VERTICES as u16);
}
#[test]
fn test_syndrome_buffer_at_depth() {
let mut buffer = ruqu::tile::SyndromBuffer::new();
// Fill to depth
for i in 0..SYNDROME_BUFFER_DEPTH as u32 {
let entry = ruqu::tile::SyndromeEntry {
round: i,
syndrome: [i as u8; 8],
flags: 0,
};
buffer.append(entry);
}
assert_eq!(buffer.count as usize, SYNDROME_BUFFER_DEPTH);
// Overflow
let entry = ruqu::tile::SyndromeEntry {
round: SYNDROME_BUFFER_DEPTH as u32,
syndrome: [0; 8],
flags: 0,
};
buffer.append(entry);
assert_eq!(buffer.count as usize, SYNDROME_BUFFER_DEPTH);
}
#[test]
fn test_receipt_log_growth() {
let mut log = ReceiptLog::new();
// Log many receipts
for i in 0..10_000 {
log.append(GateDecision::Permit, i, i * 1_000, [0u8; 32]);
}
assert_eq!(log.len(), 10_000);
// Should still be searchable
assert!(log.get(5_000).is_some());
}
}
// ============================================================================
// Rapid Decision Cycling Tests
// ============================================================================
mod rapid_decision_tests {
use super::*;
#[test]
fn test_rapid_permit_deny_cycling() {
let thresholds = GateThresholds {
structural_min_cut: 5.0,
..Default::default()
};
let mut tilezero = TileZero::new(thresholds);
for i in 0..1_000 {
let cut_value = if i % 2 == 0 { 10.0 } else { 1.0 };
let reports: Vec<TileReport> = (1..=5)
.map(|j| {
let mut report = TileReport::new(j);
report.local_cut = cut_value;
report.shift_score = 0.1;
report.e_value = 200.0;
report
})
.collect();
let decision = tilezero.merge_reports(reports);
if cut_value < 5.0 {
assert_eq!(decision, GateDecision::Deny);
} else {
assert_eq!(decision, GateDecision::Permit);
}
}
assert_eq!(tilezero.receipt_log.len(), 1_000);
}
#[test]
fn test_rapid_filter_evaluation() {
let config = FilterConfig {
structural: StructuralConfig {
threshold: 2.0,
use_subpolynomial: false,
..Default::default()
},
shift: ShiftConfig {
threshold: 0.5,
..Default::default()
},
evidence: EvidenceConfig {
tau_permit: 10.0,
tau_deny: 0.1,
..Default::default()
},
};
let mut pipeline = FilterPipeline::new(config);
pipeline.structural_mut().insert_edge(1, 2, 3.0).unwrap();
pipeline.structural_mut().insert_edge(2, 3, 3.0).unwrap();
let state = SystemState::new(3);
let start = Instant::now();
for _ in 0..10_000 {
let _ = pipeline.evaluate(&state);
}
let duration = start.elapsed();
// 10k evaluations should be fast
assert!(
duration.as_millis() < 5_000,
"10k evaluations took too long: {:?}",
duration
);
}
#[test]
fn test_evidence_rapid_accumulation() {
let mut acc = EvidenceAccumulator::new();
let start = Instant::now();
for _ in 0..100_000 {
acc.update(1.1);
}
let duration = start.elapsed();
// 100k updates should be fast
assert!(
duration.as_millis() < 1_000,
"100k evidence updates took too long: {:?}",
duration
);
// E-value should be very high
assert!(acc.e_value() > 1e10);
}
#[test]
fn test_shift_filter_rapid_updates() {
let mut filter = ShiftFilter::new(0.5, 100);
let start = Instant::now();
for i in 0..100_000 {
filter.update(i % 64, (i as f64) % 10.0);
}
let duration = start.elapsed();
assert!(
duration.as_millis() < 2_000,
"100k shift updates took too long: {:?}",
duration
);
}
#[test]
fn test_decision_state_transitions() {
let thresholds = GateThresholds::default();
let mut tilezero = TileZero::new(thresholds);
let mut last_decision = GateDecision::Permit;
let mut transitions = 0;
for i in 0..1_000 {
// Vary parameters to cause state changes
let cut_value = 5.0 + (i as f64).sin() * 10.0;
let shift_score = 0.3 + (i as f64).cos().abs() * 0.4;
let reports: Vec<TileReport> = (1..=5)
.map(|j| {
let mut report = TileReport::new(j);
report.local_cut = cut_value.max(0.1);
report.shift_score = shift_score;
report.e_value = 200.0;
report
})
.collect();
let decision = tilezero.merge_reports(reports);
if decision != last_decision {
transitions += 1;
last_decision = decision;
}
}
// Should have some state transitions
println!("Decision state transitions: {}", transitions);
assert!(transitions > 0);
}
}
// ============================================================================
// Error Recovery Tests
// ============================================================================
mod error_recovery_tests {
use super::*;
#[test]
fn test_structural_filter_edge_operation_errors() {
let mut filter = StructuralFilter::new(5.0);
// Duplicate edge
filter.insert_edge(1, 2, 1.0).unwrap();
let result = filter.insert_edge(1, 2, 1.0);
assert!(result.is_err());
// Delete nonexistent
let result = filter.delete_edge(5, 6);
assert!(result.is_err());
// Filter should still work
let state = SystemState::new(2);
let eval = filter.evaluate(&state);
assert!(eval.compute_time_us < 1_000_000);
}
#[test]
fn test_patch_graph_recovery_from_bad_operations() {
let mut graph = PatchGraph::new();
// Add valid edges
graph.add_edge(0, 1, 100);
graph.add_edge(1, 2, 100);
// Try invalid operations
let _ = graph.add_edge(0, 0, 100); // Self-loop
let _ = graph.add_edge(MAX_PATCH_VERTICES as u16, 0, 100); // Out of bounds
let _ = graph.remove_edge(5, 6); // Nonexistent
// Graph should still be valid
assert_eq!(graph.num_edges, 2);
assert!(graph.estimate_local_cut() > 0.0);
}
#[test]
fn test_buffer_recovery_from_rapid_operations() {
let mut buffer = SyndromeBuffer::new(100);
// Rapid push/clear cycles
for cycle in 0..100 {
for i in 0..50 {
let round = SyndromeRound::new(
cycle * 50 + i,
cycle * 50 + i,
(cycle * 50 + i) * 1_000,
DetectorBitmap::new(64),
0,
);
buffer.push(round);
}
if cycle % 10 == 0 {
buffer.clear();
}
}
// Buffer should be valid
assert!(buffer.len() <= 100);
}
#[test]
fn test_worker_tile_reset_recovery() {
let mut tile = WorkerTile::new(1);
// Build up state
for _ in 0..100 {
let delta = TileSyndromeDelta::new(0, 1, 100);
tile.tick(&delta);
}
// Add graph structure
tile.patch_graph.add_edge(0, 1, 100);
tile.patch_graph.add_edge(1, 2, 100);
// Reset
tile.reset();
// Should be clean
assert_eq!(tile.tick, 0);
assert_eq!(tile.patch_graph.num_edges, 0);
assert_eq!(tile.syndrome_buffer.count, 0);
// Should work again
let delta = TileSyndromeDelta::new(0, 1, 50);
let report = tile.tick(&delta);
assert_eq!(report.tick, 1);
}
#[test]
fn test_filter_pipeline_reset_recovery() {
let config = FilterConfig::default();
let mut pipeline = FilterPipeline::new(config);
// Build up state
for _ in 0..100 {
pipeline.shift_mut().update(0, 1.0);
pipeline.evidence_mut().update(2.0);
}
// Reset
pipeline.reset();
// Evidence should be back to neutral
let state = SystemState::new(10);
let result = pipeline.evaluate(&state);
assert!((result.evidence.e_value - 1.0).abs() < 0.5);
}
#[test]
fn test_evidence_overflow_protection() {
let mut acc = EvidenceAccumulator::new();
// Try to overflow with extreme values
for _ in 0..1000 {
acc.update(1e100); // Very large (will be clamped)
}
// Should not panic or be NaN/Inf
assert!(acc.e_value().is_finite());
// Reset should work
acc.reset();
assert_eq!(acc.e_value(), 1.0);
}
#[test]
fn test_evidence_underflow_protection() {
let mut acc = EvidenceAccumulator::new();
// Try to underflow with tiny values
for _ in 0..1000 {
acc.update(1e-100); // Very small (will be clamped)
}
// Should not panic or be NaN/Inf
assert!(acc.e_value().is_finite());
assert!(acc.e_value() >= 0.0);
}
}
// ============================================================================
// Concurrent-Style Stress Tests (Sequential Simulation)
// ============================================================================
mod concurrent_stress_tests {
use super::*;
#[test]
fn test_multiple_workers_same_syndrome_pattern() {
let mut workers: Vec<WorkerTile> = (1..=10).map(WorkerTile::new).collect();
// All workers process same syndrome pattern
for round in 0..100 {
let delta = TileSyndromeDelta::new(
(round % 64) as u16,
((round + 1) % 64) as u16,
(round % 256) as u16,
);
for worker in &mut workers {
worker.tick(&delta);
}
}
// All workers should be in sync
for worker in &workers {
assert_eq!(worker.tick, 100);
}
}
#[test]
fn test_multiple_workers_different_patterns() {
let mut workers: Vec<WorkerTile> = (1..=50).map(WorkerTile::new).collect();
// Each worker gets unique pattern
for round in 0..100 {
for (i, worker) in workers.iter_mut().enumerate() {
let delta = TileSyndromeDelta::new(
((round + i) % 64) as u16,
((round + i + 1) % 64) as u16,
((round + i) % 256) as u16,
);
worker.tick(&delta);
}
}
// All workers should have processed 100 rounds
for worker in &workers {
assert_eq!(worker.tick, 100);
}
}
#[test]
fn test_tilezero_varying_report_counts() {
let thresholds = GateThresholds::default();
let mut tilezero = TileZero::new(thresholds);
// Vary the number of reports each cycle
for i in 0..100 {
let report_count = 1 + (i % 20);
let reports: Vec<TileReport> = (1..=report_count as u8)
.map(|j| {
let mut report = TileReport::new(j);
report.local_cut = 10.0;
report.shift_score = 0.1;
report.e_value = 200.0;
report
})
.collect();
tilezero.merge_reports(reports);
}
assert_eq!(tilezero.receipt_log.len(), 100);
}
#[test]
fn test_interleaved_operations() {
let mut buffer = SyndromeBuffer::new(100);
let mut filter = ShiftFilter::new(0.5, 100);
let mut evidence = EvidenceAccumulator::new();
// Interleave different operations
for i in 0..1_000 {
// Buffer operation
let round = SyndromeRound::new(i, i, i * 1_000, DetectorBitmap::new(64), 0);
buffer.push(round);
// Filter operation
filter.update(i as usize % 64, (i as f64) % 10.0);
// Evidence operation
evidence.update(1.0 + (i as f64 % 10.0) / 100.0);
// Occasional window access
if i % 100 == 0 {
let _ = buffer.window(10);
}
}
// All should be functional
assert_eq!(buffer.len(), 100);
assert!(evidence.e_value() > 1.0);
}
}
// ============================================================================
// Boundary Condition Tests
// ============================================================================
mod boundary_tests {
use super::*;
#[test]
fn test_empty_state_handling() {
// Empty filter pipeline
let config = FilterConfig::default();
let pipeline = FilterPipeline::new(config);
let state = SystemState::new(0);
let result = pipeline.evaluate(&state);
assert!(result.verdict.is_some());
// Empty tilezero
let thresholds = GateThresholds::default();
let mut tilezero = TileZero::new(thresholds);
let decision = tilezero.merge_reports(vec![]);
// Empty reports should produce some decision
assert!(decision == GateDecision::Permit || decision == GateDecision::Defer);
}
#[test]
fn test_single_element_handling() {
// Single round buffer
let mut buffer = SyndromeBuffer::new(1);
buffer.push(SyndromeRound::new(0, 0, 0, DetectorBitmap::new(64), 0));
assert_eq!(buffer.len(), 1);
assert_eq!(buffer.window(1).len(), 1);
// Single bit bitmap
let mut bitmap = DetectorBitmap::new(1);
bitmap.set(0, true);
assert_eq!(bitmap.fired_count(), 1);
// Single report
let thresholds = GateThresholds::default();
let mut tilezero = TileZero::new(thresholds);
let mut report = TileReport::new(1);
report.local_cut = 10.0;
report.shift_score = 0.1;
report.e_value = 200.0;
let decision = tilezero.merge_reports(vec![report]);
assert_eq!(decision, GateDecision::Permit);
}
#[test]
fn test_maximum_values() {
// Max detectors
let mut bitmap = DetectorBitmap::new(1024);
for i in 0..1024 {
bitmap.set(i, true);
}
assert_eq!(bitmap.fired_count(), 1024);
// Max tile ID
let tile = WorkerTile::new(255);
assert_eq!(tile.tile_id, 255);
// Very high e-value
let mut evidence = EvidenceAccumulator::new();
for _ in 0..100 {
evidence.update(10.0);
}
assert!(evidence.e_value().is_finite());
}
#[test]
fn test_minimum_values() {
// Min detector count
let bitmap = DetectorBitmap::new(0);
assert_eq!(bitmap.fired_count(), 0);
// Very low e-value
let mut evidence = EvidenceAccumulator::new();
for _ in 0..100 {
evidence.update(0.1);
}
let e = evidence.e_value();
assert!(e.is_finite());
assert!(e >= 0.0);
}
#[test]
fn test_threshold_boundaries() {
let thresholds = GateThresholds {
structural_min_cut: 5.0,
shift_max: 0.5,
tau_deny: 0.01,
tau_permit: 100.0,
permit_ttl_ns: 4_000_000,
};
let mut tilezero = TileZero::new(thresholds);
// Exactly at threshold
let mut report = TileReport::new(1);
report.local_cut = 5.0; // Exactly at threshold
report.shift_score = 0.5; // Exactly at threshold
report.e_value = 100.0; // Exactly at threshold
let decision = tilezero.merge_reports(vec![report]);
// At threshold behavior
assert!(decision == GateDecision::Permit || decision == GateDecision::Defer);
}
#[test]
fn test_just_below_thresholds() {
let thresholds = GateThresholds {
structural_min_cut: 5.0,
shift_max: 0.5,
tau_deny: 0.01,
tau_permit: 100.0,
permit_ttl_ns: 4_000_000,
};
let mut tilezero = TileZero::new(thresholds);
// Just below structural threshold
let mut report = TileReport::new(1);
report.local_cut = 4.99;
report.shift_score = 0.1;
report.e_value = 200.0;
let decision = tilezero.merge_reports(vec![report]);
assert_eq!(decision, GateDecision::Deny);
}
}
// ============================================================================
// Proptest Stress Tests
// ============================================================================
#[cfg(test)]
mod proptest_stress {
use super::*;
use proptest::prelude::*;
proptest! {
#![proptest_config(ProptestConfig::with_cases(20))]
#[test]
fn prop_buffer_survives_random_operations(
pushes in prop::collection::vec(0u64..10000, 100..1000),
capacity in 10usize..200
) {
let mut buffer = SyndromeBuffer::new(capacity);
for round_id in pushes {
let round = SyndromeRound::new(round_id, round_id, round_id * 1000, DetectorBitmap::new(64), 0);
buffer.push(round);
}
// Buffer should be valid
prop_assert!(buffer.len() <= capacity);
prop_assert!(!buffer.statistics().avg_firing_rate.is_nan());
}
#[test]
fn prop_worker_survives_random_deltas(
syndromes in prop::collection::vec((0u16..64, 0u16..64, 0u16..256), 100..500)
) {
let mut worker = WorkerTile::new(1);
for (src, tgt, val) in syndromes {
let delta = TileSyndromeDelta::new(src, tgt.max(1), val);
worker.tick(&delta);
}
// Worker should be valid
prop_assert!(worker.tick > 0);
}
#[test]
fn prop_tilezero_survives_random_reports(
report_values in prop::collection::vec(
(0.0f64..20.0, 0.0f64..1.0, 0.01f64..500.0),
1..50
)
) {
let thresholds = GateThresholds::default();
let mut tilezero = TileZero::new(thresholds);
let reports: Vec<TileReport> = report_values
.iter()
.enumerate()
.map(|(i, (cut, shift, e_val))| {
let mut report = TileReport::new((i + 1) as u8);
report.local_cut = *cut;
report.shift_score = *shift;
report.e_value = *e_val;
report
})
.collect();
let decision = tilezero.merge_reports(reports);
// Decision should be valid
prop_assert!(matches!(decision, GateDecision::Permit | GateDecision::Defer | GateDecision::Deny));
}
}
}

View File

@@ -0,0 +1,957 @@
//! Syndrome processing tests for ruQu coherence gate
//!
//! Tests for detector bitmap operations with SIMD-like performance,
//! syndrome buffer ring behavior, delta computation accuracy,
//! and buffer overflow handling.
use ruqu::syndrome::{
BufferStatistics, DetectorBitmap, SyndromeBuffer, SyndromeDelta, SyndromeRound,
};
use ruqu::MAX_DETECTORS;
// ============================================================================
// DetectorBitmap Tests - SIMD-like Performance
// ============================================================================
mod detector_bitmap_tests {
use super::*;
#[test]
fn test_bitmap_creation() {
let bitmap = DetectorBitmap::new(64);
assert_eq!(bitmap.detector_count(), 64);
assert_eq!(bitmap.fired_count(), 0);
assert!(bitmap.is_empty());
}
#[test]
fn test_bitmap_max_detectors() {
let bitmap = DetectorBitmap::new(MAX_DETECTORS);
assert_eq!(bitmap.detector_count(), MAX_DETECTORS);
assert_eq!(bitmap.fired_count(), 0);
}
#[test]
#[should_panic(expected = "count exceeds maximum")]
fn test_bitmap_overflow_panics() {
DetectorBitmap::new(MAX_DETECTORS + 1);
}
#[test]
fn test_bitmap_set_get() {
let mut bitmap = DetectorBitmap::new(128);
bitmap.set(0, true);
bitmap.set(63, true);
bitmap.set(64, true);
bitmap.set(127, true);
assert!(bitmap.get(0));
assert!(bitmap.get(63));
assert!(bitmap.get(64));
assert!(bitmap.get(127));
assert!(!bitmap.get(1));
assert!(!bitmap.get(100));
}
#[test]
fn test_bitmap_set_clear() {
let mut bitmap = DetectorBitmap::new(64);
bitmap.set(10, true);
assert!(bitmap.get(10));
bitmap.set(10, false);
assert!(!bitmap.get(10));
}
#[test]
fn test_bitmap_fired_count_popcount() {
let mut bitmap = DetectorBitmap::new(256);
// Set every 10th detector
for i in (0..256).step_by(10) {
bitmap.set(i, true);
}
assert_eq!(bitmap.fired_count(), 26); // 0, 10, 20, ..., 250
}
#[test]
fn test_bitmap_fired_count_all() {
let mut bitmap = DetectorBitmap::new(64);
for i in 0..64 {
bitmap.set(i, true);
}
assert_eq!(bitmap.fired_count(), 64);
}
#[test]
fn test_bitmap_iter_fired() {
let mut bitmap = DetectorBitmap::new(128);
bitmap.set(5, true);
bitmap.set(64, true);
bitmap.set(100, true);
let fired: Vec<usize> = bitmap.iter_fired().collect();
assert_eq!(fired, vec![5, 64, 100]);
}
#[test]
fn test_bitmap_iter_fired_empty() {
let bitmap = DetectorBitmap::new(64);
let fired: Vec<usize> = bitmap.iter_fired().collect();
assert!(fired.is_empty());
}
#[test]
fn test_bitmap_iter_fired_all() {
let mut bitmap = DetectorBitmap::new(64);
for i in 0..64 {
bitmap.set(i, true);
}
let fired: Vec<usize> = bitmap.iter_fired().collect();
assert_eq!(fired.len(), 64);
for (i, &val) in fired.iter().enumerate() {
assert_eq!(val, i);
}
}
#[test]
fn test_bitmap_xor() {
let mut a = DetectorBitmap::new(64);
a.set(0, true);
a.set(5, true);
a.set(10, true);
let mut b = DetectorBitmap::new(64);
b.set(5, true);
b.set(10, true);
b.set(20, true);
let result = a.xor(&b);
assert!(result.get(0)); // Only in a
assert!(!result.get(5)); // In both
assert!(!result.get(10)); // In both
assert!(result.get(20)); // Only in b
assert_eq!(result.fired_count(), 2);
}
#[test]
fn test_bitmap_and() {
let mut a = DetectorBitmap::new(64);
a.set(0, true);
a.set(5, true);
let mut b = DetectorBitmap::new(64);
b.set(5, true);
b.set(10, true);
let result = a.and(&b);
assert!(!result.get(0));
assert!(result.get(5));
assert!(!result.get(10));
assert_eq!(result.fired_count(), 1);
}
#[test]
fn test_bitmap_or() {
let mut a = DetectorBitmap::new(64);
a.set(0, true);
a.set(5, true);
let mut b = DetectorBitmap::new(64);
b.set(5, true);
b.set(10, true);
let result = a.or(&b);
assert!(result.get(0));
assert!(result.get(5));
assert!(result.get(10));
assert_eq!(result.fired_count(), 3);
}
#[test]
fn test_bitmap_clear() {
let mut bitmap = DetectorBitmap::new(64);
bitmap.set(0, true);
bitmap.set(10, true);
assert_eq!(bitmap.fired_count(), 2);
bitmap.clear();
assert_eq!(bitmap.fired_count(), 0);
assert!(bitmap.is_empty());
}
#[test]
fn test_bitmap_from_raw() {
let bits = [0x0101_0101_0101_0101u64; 16];
let bitmap = DetectorBitmap::from_raw(bits, 1024);
// Each word has 8 bits set (every 8th bit)
assert_eq!(bitmap.fired_count(), 128); // 8 * 16
}
#[test]
fn test_bitmap_raw_bits() {
let mut bitmap = DetectorBitmap::new(128);
bitmap.set(0, true);
bitmap.set(64, true);
let bits = bitmap.raw_bits();
assert_eq!(bits[0], 1); // Bit 0 set
assert_eq!(bits[1], 1); // Bit 0 of word 1 (detector 64)
}
// Performance-oriented tests for SIMD-like behavior
#[test]
fn test_bitmap_bulk_operations_performance() {
let mut a = DetectorBitmap::new(1024);
let mut b = DetectorBitmap::new(1024);
// Set alternating bits
for i in (0..1024).step_by(2) {
a.set(i, true);
}
for i in (1..1024).step_by(2) {
b.set(i, true);
}
// These operations should be efficient (operating on 64 bits at a time)
let xor_result = a.xor(&b);
assert_eq!(xor_result.fired_count(), 1024); // All bits differ
let and_result = a.and(&b);
assert_eq!(and_result.fired_count(), 0); // No overlap
let or_result = a.or(&b);
assert_eq!(or_result.fired_count(), 1024); // All bits set
}
#[test]
fn test_bitmap_popcount_performance() {
let mut bitmap = DetectorBitmap::new(1024);
// Set all bits
for i in 0..1024 {
bitmap.set(i, true);
}
// Popcount should use hardware instructions
assert_eq!(bitmap.popcount(), 1024);
}
}
// ============================================================================
// SyndromeRound Tests
// ============================================================================
mod syndrome_round_tests {
use super::*;
#[test]
fn test_round_creation() {
let detectors = DetectorBitmap::new(64);
let round = SyndromeRound::new(1, 100, 1_000_000, detectors, 5);
assert_eq!(round.round_id, 1);
assert_eq!(round.cycle, 100);
assert_eq!(round.timestamp, 1_000_000);
assert_eq!(round.source_tile, 5);
assert_eq!(round.fired_count(), 0);
}
#[test]
fn test_round_struct_syntax() {
let mut detectors = DetectorBitmap::new(64);
detectors.set(10, true);
let round = SyndromeRound {
round_id: 42,
cycle: 200,
timestamp: 2_000_000,
detectors,
source_tile: 0,
};
assert_eq!(round.round_id, 42);
assert_eq!(round.fired_count(), 1);
}
#[test]
fn test_round_fired_count() {
let mut detectors = DetectorBitmap::new(64);
detectors.set(0, true);
detectors.set(10, true);
detectors.set(63, true);
let round = SyndromeRound::new(1, 100, 1_000_000, detectors, 0);
assert_eq!(round.fired_count(), 3);
}
#[test]
fn test_round_iter_fired() {
let mut detectors = DetectorBitmap::new(64);
detectors.set(5, true);
detectors.set(10, true);
let round = SyndromeRound::new(1, 100, 1_000_000, detectors, 0);
let fired: Vec<usize> = round.iter_fired().collect();
assert_eq!(fired, vec![5, 10]);
}
#[test]
fn test_round_delta_to() {
let mut d1 = DetectorBitmap::new(64);
d1.set(0, true);
d1.set(5, true);
let mut d2 = DetectorBitmap::new(64);
d2.set(5, true);
d2.set(10, true);
let round1 = SyndromeRound::new(1, 100, 1_000, d1, 0);
let round2 = SyndromeRound::new(2, 101, 2_000, d2, 0);
let delta = round1.delta_to(&round2);
assert_eq!(delta.from_round, 1);
assert_eq!(delta.to_round, 2);
assert_eq!(delta.flip_count(), 2); // 0 and 10 flipped
}
}
// ============================================================================
// SyndromeBuffer Ring Behavior Tests
// ============================================================================
mod syndrome_buffer_tests {
use super::*;
#[test]
fn test_buffer_creation() {
let buffer = SyndromeBuffer::new(100);
assert_eq!(buffer.capacity(), 100);
assert_eq!(buffer.len(), 0);
assert!(buffer.is_empty());
assert!(!buffer.is_full());
}
#[test]
#[should_panic(expected = "capacity must be positive")]
fn test_buffer_zero_capacity() {
SyndromeBuffer::new(0);
}
#[test]
fn test_buffer_push_single() {
let mut buffer = SyndromeBuffer::new(10);
let round = SyndromeRound::new(1, 100, 1_000, DetectorBitmap::new(64), 0);
buffer.push(round);
assert_eq!(buffer.len(), 1);
assert!(!buffer.is_empty());
}
#[test]
fn test_buffer_push_to_capacity() {
let mut buffer = SyndromeBuffer::new(10);
for i in 0..10 {
let round = SyndromeRound::new(i, i, i * 1_000, DetectorBitmap::new(64), 0);
buffer.push(round);
}
assert_eq!(buffer.len(), 10);
assert!(buffer.is_full());
}
#[test]
fn test_buffer_ring_overflow() {
let mut buffer = SyndromeBuffer::new(5);
// Push 10 rounds into buffer of capacity 5
for i in 0..10 {
let round = SyndromeRound::new(i, i, i * 1_000, DetectorBitmap::new(64), 0);
buffer.push(round);
}
// Should still have capacity 5
assert_eq!(buffer.len(), 5);
// Oldest should be round 5 (rounds 0-4 evicted)
assert!(buffer.get(4).is_none());
assert!(buffer.get(5).is_some());
}
#[test]
fn test_buffer_watermark_updates() {
let mut buffer = SyndromeBuffer::new(5);
// Fill buffer
for i in 0..5 {
let round = SyndromeRound::new(i, i, i * 1_000, DetectorBitmap::new(64), 0);
buffer.push(round);
}
let initial_watermark = buffer.watermark();
// Overflow
for i in 5..10 {
let round = SyndromeRound::new(i, i, i * 1_000, DetectorBitmap::new(64), 0);
buffer.push(round);
}
// Watermark should have advanced
assert!(buffer.watermark() > initial_watermark);
}
#[test]
fn test_buffer_window_basic() {
let mut buffer = SyndromeBuffer::new(100);
for i in 0..50 {
let round = SyndromeRound::new(i, i, i * 1_000, DetectorBitmap::new(64), 0);
buffer.push(round);
}
let window = buffer.window(10);
assert_eq!(window.len(), 10);
assert_eq!(window[0].round_id, 40); // Oldest in window
assert_eq!(window[9].round_id, 49); // Newest in window
}
#[test]
fn test_buffer_window_larger_than_available() {
let mut buffer = SyndromeBuffer::new(100);
for i in 0..5 {
let round = SyndromeRound::new(i, i, i * 1_000, DetectorBitmap::new(64), 0);
buffer.push(round);
}
let window = buffer.window(100);
assert_eq!(window.len(), 5); // Only 5 available
}
#[test]
fn test_buffer_window_empty() {
let buffer = SyndromeBuffer::new(100);
let window = buffer.window(10);
assert!(window.is_empty());
}
#[test]
fn test_buffer_get_by_round_id() {
let mut buffer = SyndromeBuffer::new(100);
for i in 0..50 {
let mut detectors = DetectorBitmap::new(64);
detectors.set(i as usize % 64, true);
let round = SyndromeRound::new(i, i, i * 1_000, detectors, 0);
buffer.push(round);
}
let round = buffer.get(25);
assert!(round.is_some());
assert_eq!(round.unwrap().round_id, 25);
let nonexistent = buffer.get(999);
assert!(nonexistent.is_none());
}
#[test]
fn test_buffer_get_evicted_round() {
let mut buffer = SyndromeBuffer::new(5);
for i in 0..10 {
let round = SyndromeRound::new(i, i, i * 1_000, DetectorBitmap::new(64), 0);
buffer.push(round);
}
// Rounds 0-4 should be evicted
for i in 0..5 {
assert!(buffer.get(i).is_none());
}
// Rounds 5-9 should exist
for i in 5..10 {
assert!(buffer.get(i).is_some());
}
}
#[test]
fn test_buffer_iter() {
let mut buffer = SyndromeBuffer::new(100);
for i in 0..10 {
let round = SyndromeRound::new(i, i, i * 1_000, DetectorBitmap::new(64), 0);
buffer.push(round);
}
let ids: Vec<u64> = buffer.iter().map(|r| r.round_id).collect();
assert_eq!(ids, vec![0, 1, 2, 3, 4, 5, 6, 7, 8, 9]);
}
#[test]
fn test_buffer_iter_after_overflow() {
let mut buffer = SyndromeBuffer::new(5);
for i in 0..10 {
let round = SyndromeRound::new(i, i, i * 1_000, DetectorBitmap::new(64), 0);
buffer.push(round);
}
let ids: Vec<u64> = buffer.iter().map(|r| r.round_id).collect();
assert_eq!(ids, vec![5, 6, 7, 8, 9]);
}
#[test]
fn test_buffer_clear() {
let mut buffer = SyndromeBuffer::new(100);
for i in 0..50 {
let round = SyndromeRound::new(i, i, i * 1_000, DetectorBitmap::new(64), 0);
buffer.push(round);
}
buffer.clear();
assert_eq!(buffer.len(), 0);
assert!(buffer.is_empty());
}
#[test]
fn test_buffer_statistics() {
let mut buffer = SyndromeBuffer::new(10);
for i in 0..20 {
let mut detectors = DetectorBitmap::new(64);
for j in 0..(i % 5) as usize {
detectors.set(j, true);
}
let round = SyndromeRound::new(i, i, i * 1_000, detectors, 0);
buffer.push(round);
}
let stats = buffer.statistics();
assert_eq!(stats.total_rounds, 20);
assert_eq!(stats.current_size, 10);
assert_eq!(stats.capacity, 10);
assert_eq!(stats.evicted_rounds, 10);
assert!(stats.avg_firing_rate >= 0.0);
}
}
// ============================================================================
// SyndromeDelta Computation Tests
// ============================================================================
mod syndrome_delta_tests {
use super::*;
#[test]
fn test_delta_compute_basic() {
let mut d1 = DetectorBitmap::new(64);
d1.set(0, true);
d1.set(5, true);
let mut d2 = DetectorBitmap::new(64);
d2.set(5, true);
d2.set(10, true);
let round1 = SyndromeRound::new(1, 100, 1_000, d1, 0);
let round2 = SyndromeRound::new(2, 101, 2_000, d2, 0);
let delta = SyndromeDelta::compute(&round1, &round2);
assert_eq!(delta.from_round, 1);
assert_eq!(delta.to_round, 2);
assert_eq!(delta.flip_count(), 2);
}
#[test]
fn test_delta_quiet() {
let mut detectors = DetectorBitmap::new(64);
detectors.set(5, true);
let round1 = SyndromeRound::new(1, 100, 1_000, detectors.clone(), 0);
let round2 = SyndromeRound::new(2, 101, 2_000, detectors, 0);
let delta = SyndromeDelta::compute(&round1, &round2);
assert!(delta.is_quiet());
assert_eq!(delta.flip_count(), 0);
}
#[test]
fn test_delta_not_quiet() {
let d1 = DetectorBitmap::new(64);
let mut d2 = DetectorBitmap::new(64);
d2.set(0, true);
let round1 = SyndromeRound::new(1, 100, 1_000, d1, 0);
let round2 = SyndromeRound::new(2, 101, 2_000, d2, 0);
let delta = SyndromeDelta::compute(&round1, &round2);
assert!(!delta.is_quiet());
}
#[test]
fn test_delta_activity_level() {
let d1 = DetectorBitmap::new(100);
let mut d2 = DetectorBitmap::new(100);
for i in 0..10 {
d2.set(i, true);
}
let round1 = SyndromeRound::new(1, 100, 1_000, d1, 0);
let round2 = SyndromeRound::new(2, 101, 2_000, d2, 0);
let delta = SyndromeDelta::compute(&round1, &round2);
// 10 out of 100 detectors flipped = 0.1
assert!((delta.activity_level() - 0.1).abs() < 0.001);
}
#[test]
fn test_delta_activity_level_zero() {
let d1 = DetectorBitmap::new(0);
let d2 = DetectorBitmap::new(0);
let round1 = SyndromeRound::new(1, 100, 1_000, d1, 0);
let round2 = SyndromeRound::new(2, 101, 2_000, d2, 0);
let delta = SyndromeDelta::compute(&round1, &round2);
assert_eq!(delta.activity_level(), 0.0);
}
#[test]
fn test_delta_span() {
let d1 = DetectorBitmap::new(64);
let d2 = DetectorBitmap::new(64);
let round1 = SyndromeRound::new(100, 100, 1_000, d1, 0);
let round2 = SyndromeRound::new(110, 110, 2_000, d2, 0);
let delta = SyndromeDelta::compute(&round1, &round2);
assert_eq!(delta.span(), 10);
}
#[test]
fn test_delta_iter_flipped() {
let mut d1 = DetectorBitmap::new(64);
d1.set(0, true);
let mut d2 = DetectorBitmap::new(64);
d2.set(10, true);
d2.set(20, true);
let round1 = SyndromeRound::new(1, 100, 1_000, d1, 0);
let round2 = SyndromeRound::new(2, 101, 2_000, d2, 0);
let delta = SyndromeDelta::compute(&round1, &round2);
let flipped: Vec<usize> = delta.iter_flipped().collect();
assert_eq!(flipped, vec![0, 10, 20]);
}
#[test]
fn test_delta_new_constructor() {
let flipped = DetectorBitmap::new(64);
let delta = SyndromeDelta::new(1, 5, flipped);
assert_eq!(delta.from_round, 1);
assert_eq!(delta.to_round, 5);
assert_eq!(delta.span(), 4);
}
#[test]
fn test_delta_accuracy_all_bits_flip() {
let mut d1 = DetectorBitmap::new(64);
for i in 0..64 {
d1.set(i, true);
}
let d2 = DetectorBitmap::new(64);
let round1 = SyndromeRound::new(1, 100, 1_000, d1, 0);
let round2 = SyndromeRound::new(2, 101, 2_000, d2, 0);
let delta = SyndromeDelta::compute(&round1, &round2);
assert_eq!(delta.flip_count(), 64);
assert_eq!(delta.activity_level(), 1.0);
}
}
// ============================================================================
// Buffer Overflow Handling Tests
// ============================================================================
mod buffer_overflow_tests {
use super::*;
#[test]
fn test_buffer_graceful_overflow() {
let mut buffer = SyndromeBuffer::new(100);
// Push 1000 rounds
for i in 0..1000 {
let round = SyndromeRound::new(i, i, i * 1_000, DetectorBitmap::new(64), 0);
buffer.push(round);
}
// Should still work
assert_eq!(buffer.len(), 100);
assert!(buffer.is_full());
// Most recent 100 should be available
for i in 900..1000 {
assert!(buffer.get(i).is_some());
}
}
#[test]
fn test_buffer_statistics_after_overflow() {
let mut buffer = SyndromeBuffer::new(10);
for i in 0..100 {
let round = SyndromeRound::new(i, i, i * 1_000, DetectorBitmap::new(64), 0);
buffer.push(round);
}
let stats = buffer.statistics();
assert_eq!(stats.total_rounds, 100);
assert_eq!(stats.evicted_rounds, 90);
assert_eq!(stats.current_size, 10);
}
#[test]
fn test_buffer_continuous_operation() {
let mut buffer = SyndromeBuffer::new(50);
// Simulate long-running operation
for i in 0..10_000 {
let mut detectors = DetectorBitmap::new(64);
if i % 100 == 0 {
detectors.set(0, true); // Occasional syndrome
}
let round = SyndromeRound::new(i, i, i * 1_000, detectors, 0);
buffer.push(round);
// Periodically access window
if i % 1000 == 0 {
let window = buffer.window(10);
assert_eq!(window.len(), std::cmp::min(10, buffer.len()));
}
}
// Buffer should still be functional
assert_eq!(buffer.len(), 50);
}
#[test]
fn test_buffer_window_wrap_around() {
let mut buffer = SyndromeBuffer::new(10);
// Push 15 rounds to wrap around
for i in 0..15 {
let round = SyndromeRound::new(i, i, i * 1_000, DetectorBitmap::new(64), 0);
buffer.push(round);
}
// Window should correctly handle wrap-around
let window = buffer.window(10);
assert_eq!(window.len(), 10);
assert_eq!(window[0].round_id, 5); // Oldest available
assert_eq!(window[9].round_id, 14); // Most recent
}
}
// ============================================================================
// Proptest Property-Based Tests
// ============================================================================
#[cfg(test)]
mod proptest_syndrome {
use super::*;
use proptest::prelude::*;
proptest! {
#![proptest_config(ProptestConfig::with_cases(100))]
#[test]
fn prop_bitmap_popcount_equals_set_count(
detector_indices in prop::collection::vec(0usize..1024, 0..100)
) {
let mut bitmap = DetectorBitmap::new(1024);
let mut unique_indices: std::collections::HashSet<usize> = std::collections::HashSet::new();
for idx in detector_indices {
bitmap.set(idx, true);
unique_indices.insert(idx);
}
prop_assert_eq!(bitmap.fired_count(), unique_indices.len());
}
#[test]
fn prop_xor_commutative(
indices_a in prop::collection::vec(0usize..64, 0..10),
indices_b in prop::collection::vec(0usize..64, 0..10)
) {
let mut a = DetectorBitmap::new(64);
let mut b = DetectorBitmap::new(64);
for idx in indices_a {
a.set(idx, true);
}
for idx in indices_b {
b.set(idx, true);
}
let ab = a.xor(&b);
let ba = b.xor(&a);
// XOR should be commutative
prop_assert_eq!(ab.fired_count(), ba.fired_count());
for i in 0..64 {
prop_assert_eq!(ab.get(i), ba.get(i));
}
}
#[test]
fn prop_buffer_window_size_bounded(
capacity in 10usize..100,
push_count in 0usize..200,
window_size in 1usize..50
) {
let mut buffer = SyndromeBuffer::new(capacity);
for i in 0..push_count as u64 {
let round = SyndromeRound::new(i, i, i * 1000, DetectorBitmap::new(64), 0);
buffer.push(round);
}
let window = buffer.window(window_size);
// Window size should be min(requested, available)
let expected_size = window_size.min(push_count).min(capacity);
prop_assert_eq!(window.len(), expected_size);
}
#[test]
fn prop_delta_flip_count_bounded(
set_a in prop::collection::vec(0usize..64, 0..64),
set_b in prop::collection::vec(0usize..64, 0..64)
) {
let mut d1 = DetectorBitmap::new(64);
let mut d2 = DetectorBitmap::new(64);
for idx in set_a {
d1.set(idx, true);
}
for idx in set_b {
d2.set(idx, true);
}
let round1 = SyndromeRound::new(0, 0, 0, d1, 0);
let round2 = SyndromeRound::new(1, 1, 1, d2, 0);
let delta = SyndromeDelta::compute(&round1, &round2);
// Flip count should be bounded by detector count
prop_assert!(delta.flip_count() <= 64);
}
}
}
// ============================================================================
// Edge Case Tests
// ============================================================================
mod edge_cases {
use super::*;
#[test]
fn test_bitmap_single_detector() {
let bitmap = DetectorBitmap::new(1);
assert_eq!(bitmap.detector_count(), 1);
}
#[test]
fn test_bitmap_boundary_word_crossing() {
let mut bitmap = DetectorBitmap::new(128);
// Set bits around word boundary (63, 64, 65)
bitmap.set(63, true);
bitmap.set(64, true);
bitmap.set(65, true);
assert!(bitmap.get(63));
assert!(bitmap.get(64));
assert!(bitmap.get(65));
assert_eq!(bitmap.fired_count(), 3);
}
#[test]
fn test_buffer_single_capacity() {
let mut buffer = SyndromeBuffer::new(1);
buffer.push(SyndromeRound::new(0, 0, 0, DetectorBitmap::new(64), 0));
assert_eq!(buffer.len(), 1);
buffer.push(SyndromeRound::new(1, 1, 1, DetectorBitmap::new(64), 0));
assert_eq!(buffer.len(), 1); // Still 1, oldest evicted
assert!(buffer.get(0).is_none());
assert!(buffer.get(1).is_some());
}
#[test]
fn test_delta_same_round() {
let detectors = DetectorBitmap::new(64);
let round = SyndromeRound::new(1, 100, 1_000, detectors, 0);
let delta = SyndromeDelta::compute(&round, &round);
assert!(delta.is_quiet());
assert_eq!(delta.span(), 0);
}
}

File diff suppressed because it is too large Load Diff