Squashed 'vendor/ruvector/' content from commit b64c2172

git-subtree-dir: vendor/ruvector
git-subtree-split: b64c21726f2bb37286d9ee36a7869fef60cc6900
This commit is contained in:
ruv
2026-02-28 14:39:40 -05:00
commit d803bfe2b1
7854 changed files with 3522914 additions and 0 deletions

View File

@@ -0,0 +1,213 @@
//! # Interference Search
//!
//! Concepts interfere during retrieval. Each concept can exist in a
//! superposition of multiple meanings, each with a complex amplitude.
//! When a search context is applied, the amplitudes interfere --
//! meanings aligned with the context get constructively boosted,
//! while misaligned meanings destructively cancel.
//!
//! This replaces simple cosine reranking with a quantum-inspired
//! interference model where polysemous concepts naturally resolve
//! to context-appropriate meanings.
use ruqu_core::types::Complex;
use rand::rngs::StdRng;
use rand::{Rng, SeedableRng};
// ---------------------------------------------------------------------------
// Public types
// ---------------------------------------------------------------------------
/// A single meaning within a superposition: a label, an embedding, and a
/// complex amplitude.
#[derive(Debug, Clone)]
pub struct Meaning {
pub label: String,
pub embedding: Vec<f64>,
pub amplitude: Complex,
}
/// A concept in superposition of multiple meanings.
#[derive(Debug, Clone)]
pub struct ConceptSuperposition {
pub concept_id: String,
pub meanings: Vec<Meaning>,
}
/// Score for a single meaning after interference with a context.
#[derive(Debug, Clone)]
pub struct InterferenceScore {
pub label: String,
pub probability: f64,
pub amplitude: Complex,
}
/// A concept with its interference-computed relevance score.
#[derive(Debug, Clone)]
pub struct ConceptScore {
pub concept_id: String,
pub relevance: f64,
pub dominant_meaning: String,
}
// ---------------------------------------------------------------------------
// Implementation
// ---------------------------------------------------------------------------
impl ConceptSuperposition {
/// Create a uniform superposition: all meanings get equal amplitude
/// with zero phase.
pub fn uniform(concept_id: &str, meanings: Vec<(String, Vec<f64>)>) -> Self {
let n = meanings.len();
let amp = if n > 0 { 1.0 / (n as f64).sqrt() } else { 0.0 };
let meanings = meanings
.into_iter()
.map(|(label, embedding)| Meaning {
label,
embedding,
amplitude: Complex::new(amp, 0.0),
})
.collect();
Self {
concept_id: concept_id.to_string(),
meanings,
}
}
/// Create a superposition with explicit complex amplitudes.
pub fn with_amplitudes(concept_id: &str, meanings: Vec<(String, Vec<f64>, Complex)>) -> Self {
let meanings = meanings
.into_iter()
.map(|(label, embedding, amplitude)| Meaning {
label,
embedding,
amplitude,
})
.collect();
Self {
concept_id: concept_id.to_string(),
meanings,
}
}
/// Compute interference scores for each meaning given a context embedding.
///
/// For each meaning, the context modifies the amplitude:
/// effective_amplitude = original_amplitude * (1 + similarity(meaning, context))
///
/// Meanings aligned with the context get amplified; orthogonal meanings
/// stay the same; opposing meanings get attenuated.
///
/// Returns scores sorted by probability (descending).
pub fn interfere(&self, context: &[f64]) -> Vec<InterferenceScore> {
let mut scores: Vec<InterferenceScore> = self
.meanings
.iter()
.map(|m| {
let sim = cosine_similarity(&m.embedding, context);
// Scale amplitude by (1 + sim). For sim in [-1, 1], this gives
// a factor in [0, 2]. Negative similarity attenuates.
let scale = (1.0 + sim).max(0.0);
let effective = m.amplitude * scale;
InterferenceScore {
label: m.label.clone(),
probability: effective.norm_sq(),
amplitude: effective,
}
})
.collect();
scores.sort_by(|a, b| {
b.probability
.partial_cmp(&a.probability)
.unwrap_or(std::cmp::Ordering::Equal)
});
scores
}
/// Collapse the superposition to a single meaning by sampling from
/// the interference-weighted probability distribution.
pub fn collapse(&self, context: &[f64], seed: u64) -> String {
let scores = self.interfere(context);
let total: f64 = scores.iter().map(|s| s.probability).sum();
if total < 1e-15 {
// Degenerate case: return first meaning if available
return scores.first().map(|s| s.label.clone()).unwrap_or_default();
}
let mut rng = StdRng::seed_from_u64(seed);
let r: f64 = rng.gen::<f64>() * total;
let mut cumulative = 0.0;
for score in &scores {
cumulative += score.probability;
if r <= cumulative {
return score.label.clone();
}
}
scores.last().map(|s| s.label.clone()).unwrap_or_default()
}
/// Return the dominant meaning: the one with the largest |amplitude|^2
/// (before any context is applied).
pub fn dominant(&self) -> Option<&Meaning> {
self.meanings.iter().max_by(|a, b| {
a.amplitude
.norm_sq()
.partial_cmp(&b.amplitude.norm_sq())
.unwrap_or(std::cmp::Ordering::Equal)
})
}
}
/// Run an interference search across multiple concepts, ranking them by
/// relevance to the given query context.
///
/// Returns concepts sorted by relevance (descending).
pub fn interference_search(
concepts: &[ConceptSuperposition],
context: &[f64],
) -> Vec<ConceptScore> {
let mut results: Vec<ConceptScore> = concepts
.iter()
.map(|concept| {
let scores = concept.interfere(context);
let relevance: f64 = scores.iter().map(|s| s.probability).sum();
let dominant_meaning = scores.first().map(|s| s.label.clone()).unwrap_or_default();
ConceptScore {
concept_id: concept.concept_id.clone(),
relevance,
dominant_meaning,
}
})
.collect();
results.sort_by(|a, b| {
b.relevance
.partial_cmp(&a.relevance)
.unwrap_or(std::cmp::Ordering::Equal)
});
results
}
/// Cosine similarity between two vectors.
fn cosine_similarity(a: &[f64], b: &[f64]) -> f64 {
if a.is_empty() || b.is_empty() {
return 0.0;
}
let len = a.len().min(b.len());
let mut dot = 0.0_f64;
let mut norm_a = 0.0_f64;
let mut norm_b = 0.0_f64;
for i in 0..len {
dot += a[i] * b[i];
norm_a += a[i] * a[i];
norm_b += b[i] * b[i];
}
let denom = norm_a.sqrt() * norm_b.sqrt();
if denom < 1e-15 {
0.0
} else {
(dot / denom).clamp(-1.0, 1.0)
}
}

View File

@@ -0,0 +1,28 @@
//! # ruqu-exotic — Exotic Quantum-Classical Hybrid Algorithms
//!
//! Novel algorithms that emerge from embedding a quantum simulation engine
//! inside a vector database stack. These were structurally impossible before
//! because the required primitives (amplitude space, interference, decoherence,
//! syndrome extraction) did not coexist with vector search infrastructure.
//!
//! ## Modules
//!
//! | Module | Concept | What it replaces |
//! |--------|---------|-----------------|
//! | [`quantum_decay`] | Embeddings decohere instead of being deleted | TTL-based eviction |
//! | [`interference_search`] | Concepts interfere during retrieval | Cosine reranking |
//! | [`quantum_collapse`] | Search collapses from superposition | Deterministic top-k |
//! | [`reasoning_qec`] | Surface-code correction on reasoning traces | Semantic checks |
//! | [`swarm_interference`] | Agents interfere instead of voting | Consensus protocols |
//! | [`syndrome_diagnosis`] | QEC syndrome extraction for system diagnosis | Log-based monitoring |
//! | [`reversible_memory`] | Time-reversible state for counterfactual debugging | Forward-only ML |
//! | [`reality_check`] | Browser-native quantum verification circuits | Trust-based claims |
pub mod interference_search;
pub mod quantum_collapse;
pub mod quantum_decay;
pub mod reality_check;
pub mod reasoning_qec;
pub mod reversible_memory;
pub mod swarm_interference;
pub mod syndrome_diagnosis;

View File

@@ -0,0 +1,400 @@
//! # Quantum Collapse Search
//!
//! Instead of deterministic top-k retrieval, encode search candidates as
//! quantum amplitudes. Apply Grover-like iterations biased by query similarity,
//! then "measure" to collapse to a single result. Nondeterministic but
//! statistically stable -- repeated shots converge to a reproducible
//! frequency distribution weighted by relevance.
//!
//! ## Algorithm
//!
//! 1. Initialise a uniform superposition over all candidate slots.
//! 2. **Oracle**: apply a phase rotation proportional to cosine similarity
//! between the query and each candidate embedding.
//! 3. **Diffusion**: inversion about the mean amplitude (Grover diffusion).
//! 4. Repeat for the requested number of iterations.
//! 5. **Collapse**: sample one index from the |amplitude|^2 distribution.
//!
//! The oracle biases the superposition toward high-similarity candidates.
//! Multiple collapses yield a frequency distribution that concentrates on
//! the most relevant candidates while still allowing serendipitous discovery.
use ruqu_core::types::Complex;
use rand::rngs::StdRng;
use rand::{Rng, SeedableRng};
use std::f64::consts::PI;
// ---------------------------------------------------------------------------
// Public types
// ---------------------------------------------------------------------------
/// A quantum search engine that collapses from superposition rather than
/// ranking deterministically.
pub struct QuantumCollapseSearch {
/// Number of qubits (encodes up to 2^n candidate slots).
num_qubits: u32,
/// Candidate embeddings, padded with zero-vectors to length 2^num_qubits.
candidates: Vec<Vec<f64>>,
/// Number of *real* candidates (the rest are zero-padding).
num_real: usize,
}
/// Result of a single collapse measurement.
#[derive(Debug, Clone)]
pub struct CollapseResult {
/// Index of the candidate that was selected.
pub index: usize,
/// Amplitude magnitude before collapse (acts as confidence).
pub amplitude: f64,
/// `true` if the collapse landed on a padding slot (no real candidate).
pub is_padding: bool,
}
// ---------------------------------------------------------------------------
// Implementation
// ---------------------------------------------------------------------------
impl QuantumCollapseSearch {
/// Number of qubits used to encode the candidate space.
pub fn num_qubits(&self) -> u32 {
self.num_qubits
}
/// Number of real (non-padding) candidates.
pub fn num_real(&self) -> usize {
self.num_real
}
/// Create a search engine from candidate embeddings.
///
/// The candidate list is padded with empty vectors to the next power of two
/// so that the amplitude vector has length 2^n.
pub fn new(candidates: Vec<Vec<f64>>) -> Self {
let num_real = candidates.len();
// Determine the number of qubits needed.
let num_qubits = if num_real <= 1 {
1
} else {
(num_real as f64).log2().ceil() as u32
};
let total = 1usize << num_qubits;
let mut padded = candidates;
padded.resize(total, Vec::new());
Self {
num_qubits,
candidates: padded,
num_real,
}
}
/// Run a single quantum collapse search.
///
/// 1. Initialise a uniform superposition over all candidate slots.
/// 2. For each iteration, apply the similarity-biased oracle followed by
/// the Grover diffusion operator.
/// 3. Sample one index from the resulting probability distribution.
pub fn search(&self, query: &[f64], iterations: usize, seed: u64) -> CollapseResult {
let n = self.candidates.len();
assert!(n > 0, "no candidates");
// --- Uniform superposition ---
let amp = 1.0 / (n as f64).sqrt();
let mut amplitudes: Vec<Complex> = vec![Complex::new(amp, 0.0); n];
// --- Grover-like iterations ---
for _ in 0..iterations {
// Oracle: phase rotation proportional to similarity
self.apply_oracle(query, &mut amplitudes);
// Diffusion: inversion about the mean
Self::apply_diffusion(&mut amplitudes);
}
// --- Collapse (sample from |amplitude|^2 distribution) ---
self.collapse(&amplitudes, seed)
}
/// Run `num_shots` independent collapses and return a frequency
/// distribution: `Vec<(index, count)>` sorted by count descending.
///
/// This demonstrates statistical stability: the same query produces a
/// reproducible distribution over repeated shots.
pub fn search_distribution(
&self,
query: &[f64],
iterations: usize,
num_shots: usize,
seed: u64,
) -> Vec<(usize, usize)> {
let n = self.candidates.len();
let mut counts = vec![0usize; n];
for shot in 0..num_shots {
// Each shot gets a deterministic but distinct seed.
let shot_seed = seed.wrapping_add(shot as u64);
let result = self.search(query, iterations, shot_seed);
counts[result.index] += 1;
}
// Collect non-zero counts, sorted descending.
let mut distribution: Vec<(usize, usize)> = counts
.into_iter()
.enumerate()
.filter(|&(_, c)| c > 0)
.collect();
distribution.sort_by(|a, b| b.1.cmp(&a.1));
distribution
}
// -----------------------------------------------------------------------
// Private helpers
// -----------------------------------------------------------------------
/// Similarity-biased oracle.
///
/// For each candidate slot `i`, compute the cosine similarity between the
/// query and the candidate embedding. Apply a phase rotation of
/// `PI * similarity` to the amplitude, boosting candidates that align with
/// the query.
fn apply_oracle(&self, query: &[f64], amplitudes: &mut [Complex]) {
for (i, candidate) in self.candidates.iter().enumerate() {
let sim = Self::similarity(query, candidate);
// Phase rotation: amplitude[i] *= e^{i * PI * sim}
let phase = Complex::from_polar(1.0, PI * sim);
amplitudes[i] = amplitudes[i] * phase;
}
}
/// Grover diffusion operator: inversion about the mean amplitude.
///
/// mean = (1/n) * sum(amplitudes)
/// amplitudes[i] = 2 * mean - amplitudes[i]
fn apply_diffusion(amplitudes: &mut [Complex]) {
let n = amplitudes.len();
let inv_n = 1.0 / n as f64;
let mut mean = Complex::ZERO;
for a in amplitudes.iter() {
mean += *a;
}
mean = mean * inv_n;
let two_mean = mean * 2.0;
for a in amplitudes.iter_mut() {
*a = two_mean - *a;
}
}
/// Collapse the amplitude vector: sample one index from the |a_i|^2
/// probability distribution.
fn collapse(&self, amplitudes: &[Complex], seed: u64) -> CollapseResult {
let mut rng = StdRng::seed_from_u64(seed);
// Build the cumulative probability distribution.
let probs: Vec<f64> = amplitudes.iter().map(|a| a.norm_sq()).collect();
let total: f64 = probs.iter().sum();
let r: f64 = rng.gen::<f64>() * total;
let mut cumulative = 0.0;
let mut chosen = amplitudes.len() - 1; // fallback to last
for (i, &p) in probs.iter().enumerate() {
cumulative += p;
if r <= cumulative {
chosen = i;
break;
}
}
CollapseResult {
index: chosen,
amplitude: amplitudes[chosen].norm(),
is_padding: chosen >= self.num_real,
}
}
/// Cosine similarity between two vectors.
///
/// Returns 0.0 if either vector is empty or has zero norm.
fn similarity(query: &[f64], candidate: &[f64]) -> f64 {
if query.is_empty() || candidate.is_empty() {
return 0.0;
}
let len = query.len().min(candidate.len());
let mut dot = 0.0_f64;
let mut norm_q = 0.0_f64;
let mut norm_c = 0.0_f64;
for i in 0..len {
dot += query[i] * candidate[i];
norm_q += query[i] * query[i];
norm_c += candidate[i] * candidate[i];
}
// Account for any remaining elements in the longer vector.
for i in len..query.len() {
norm_q += query[i] * query[i];
}
for i in len..candidate.len() {
norm_c += candidate[i] * candidate[i];
}
let denom = norm_q.sqrt() * norm_c.sqrt();
if denom < 1e-15 {
0.0
} else {
(dot / denom).clamp(-1.0, 1.0)
}
}
}
// ---------------------------------------------------------------------------
// Tests
// ---------------------------------------------------------------------------
#[cfg(test)]
mod tests {
use super::*;
/// Helper: create simple 2D embeddings.
fn sample_candidates() -> Vec<Vec<f64>> {
vec![
vec![1.0, 0.0], // 0: east
vec![0.0, 1.0], // 1: north
vec![-1.0, 0.0], // 2: west
vec![0.0, -1.0], // 3: south
]
}
#[test]
fn new_pads_to_power_of_two() {
// 3 candidates should pad to 4 (2 qubits)
let search = QuantumCollapseSearch::new(vec![vec![1.0], vec![2.0], vec![3.0]]);
assert_eq!(search.num_qubits, 2);
assert_eq!(search.candidates.len(), 4);
assert_eq!(search.num_real, 3);
}
#[test]
fn similarity_identical_vectors() {
let a = vec![1.0, 2.0, 3.0];
let sim = QuantumCollapseSearch::similarity(&a, &a);
assert!((sim - 1.0).abs() < 1e-10);
}
#[test]
fn similarity_orthogonal_vectors() {
let a = vec![1.0, 0.0];
let b = vec![0.0, 1.0];
let sim = QuantumCollapseSearch::similarity(&a, &b);
assert!(sim.abs() < 1e-10);
}
#[test]
fn similarity_opposite_vectors() {
let a = vec![1.0, 0.0];
let b = vec![-1.0, 0.0];
let sim = QuantumCollapseSearch::similarity(&a, &b);
assert!((sim + 1.0).abs() < 1e-10);
}
#[test]
fn similarity_empty_returns_zero() {
assert_eq!(QuantumCollapseSearch::similarity(&[], &[1.0, 2.0]), 0.0);
assert_eq!(QuantumCollapseSearch::similarity(&[1.0], &[]), 0.0);
}
#[test]
fn single_candidate_always_collapses_to_it() {
let search = QuantumCollapseSearch::new(vec![vec![1.0, 0.0]]);
let query = [1.0, 0.0];
for seed in 0..20 {
let result = search.search(&query, 3, seed);
// With 1 real candidate and 1 padding, we should almost always
// get index 0 after iterations biased toward the real candidate.
// At minimum check that the result is valid.
assert!(result.index < 2);
}
}
#[test]
fn search_favors_similar_candidates() {
// Use asymmetric candidates so only one is highly aligned with the query.
let candidates = vec![
vec![1.0, 0.0], // 0: very aligned
vec![0.3, 0.7], // 1: partially aligned
vec![0.0, 1.0], // 2: orthogonal
vec![-0.5, 0.5], // 3: partially opposed
];
let search = QuantumCollapseSearch::new(candidates);
let query = [1.0, 0.0]; // aligned with candidate 0
// Run many shots to build a distribution.
let dist = search.search_distribution(&query, 1, 500, 42);
assert!(!dist.is_empty(), "distribution should not be empty");
// The distribution should be non-uniform (oracle has an effect).
// We just verify the distribution has variation.
let max_count = dist.iter().map(|&(_, c)| c).max().unwrap_or(0);
let min_count = dist.iter().map(|&(_, c)| c).min().unwrap_or(0);
assert!(
max_count > min_count,
"distribution should be non-uniform: max {} vs min {}",
max_count,
min_count
);
}
#[test]
fn search_distribution_is_reproducible() {
let search = QuantumCollapseSearch::new(sample_candidates());
let query = [0.7, 0.7];
let d1 = search.search_distribution(&query, 2, 100, 99);
let d2 = search.search_distribution(&query, 2, 100, 99);
assert_eq!(d1, d2, "same seed should produce identical distributions");
}
#[test]
fn collapse_result_flags_padding() {
// 3 real candidates -> padded to 4
let search =
QuantumCollapseSearch::new(vec![vec![0.0, 1.0], vec![1.0, 0.0], vec![0.5, 0.5]]);
// Run many shots; any hit on index 3 should have is_padding = true.
for seed in 0..50 {
let result = search.search(&[0.0, 0.0], 0, seed);
if result.index >= 3 {
assert!(result.is_padding);
} else {
assert!(!result.is_padding);
}
}
}
#[test]
fn zero_iterations_gives_uniform_distribution() {
let search = QuantumCollapseSearch::new(sample_candidates());
let query = [1.0, 0.0];
// With 0 iterations, the superposition stays uniform.
// Each of the 4 candidates should get roughly 25% of 1000 shots.
let dist = search.search_distribution(&query, 0, 1000, 7);
for &(_, count) in &dist {
// Should be roughly 250 +/- some variance
assert!(count > 100, "expected roughly uniform: got {count}");
}
}
#[test]
fn amplitude_is_positive() {
let search = QuantumCollapseSearch::new(sample_candidates());
let result = search.search(&[1.0, 0.0], 2, 0);
assert!(result.amplitude >= 0.0);
}
}

View File

@@ -0,0 +1,423 @@
//! Quantum Decay -- Embeddings decohere instead of being deleted
//!
//! Treats f64 embedding vectors as quantum state amplitudes. Applies quantum
//! noise channels (dephasing, amplitude damping) over time instead of TTL
//! deletion. Cold vectors lose phase fidelity before magnitude, and similarity
//! degrades smoothly rather than disappearing at a hard deadline.
//!
//! # Model
//!
//! Two physical noise channels are applied each time [`QuantumEmbedding::decohere`]
//! is called:
//!
//! 1. **Dephasing (T2)** -- random Rz-like phase kicks on every amplitude.
//! Magnitudes are preserved but phase coherence is scrambled.
//! 2. **Amplitude damping (T1)** -- amplitudes decay toward the |0> ground
//! state, modelling energy dissipation. Probability leaked from excited
//! states is transferred to the ground state.
//!
//! The `noise_rate` parameter controls how aggressively both channels act per
//! unit of abstract time `dt`.
use ruqu_core::state::QuantumState;
use ruqu_core::types::Complex;
use rand::rngs::StdRng;
use rand::{Rng, SeedableRng};
use std::f64::consts::PI;
// ---------------------------------------------------------------------------
// Helpers
// ---------------------------------------------------------------------------
/// Compute the minimum number of qubits needed to hold `len` amplitudes.
/// Always returns at least 1 (QuantumState requires num_qubits >= 1).
fn required_qubits(len: usize) -> u32 {
if len <= 2 {
return 1;
}
let mut n = 1u32;
while (1usize << n) < len {
n += 1;
}
n
}
// ---------------------------------------------------------------------------
// QuantumEmbedding
// ---------------------------------------------------------------------------
/// A vector embedding treated as a quantum state that decoheres over time.
///
/// Classical f64 values are normalised and zero-padded to the next power of two,
/// then stored as complex amplitudes of a [`QuantumState`]. Decoherence is
/// modelled by applying stochastic phase and amplitude noise, causing the
/// fidelity with the original state to decay smoothly.
pub struct QuantumEmbedding {
/// Embedding encoded as quantum amplitudes.
state: QuantumState,
/// Snapshot of amplitudes at creation for fidelity tracking.
original_state: Vec<Complex>,
/// Dimensionality of the original embedding before power-of-2 padding.
original_dim: usize,
/// Abstract time units elapsed since creation.
age: f64,
/// Decoherence rate per time unit.
noise_rate: f64,
}
impl QuantumEmbedding {
/// Create from a classical f64 embedding vector.
///
/// The embedding is L2-normalised and encoded as purely-real quantum
/// amplitudes. If the length is not a power of two, the vector is
/// zero-padded. An empty or all-zero embedding is mapped to the |0>
/// computational basis state.
pub fn from_embedding(embedding: &[f64], noise_rate: f64) -> Self {
let original_dim = embedding.len().max(1);
let num_qubits = required_qubits(original_dim);
let padded_len = 1usize << num_qubits;
// L2 normalisation factor
let norm_sq: f64 = embedding.iter().map(|x| x * x).sum();
let inv_norm = if norm_sq > 0.0 {
1.0 / norm_sq.sqrt()
} else {
0.0
};
// Build zero-padded amplitude vector
let mut amps = vec![Complex::ZERO; padded_len];
for (i, &val) in embedding.iter().enumerate() {
amps[i] = Complex::new(val * inv_norm, 0.0);
}
// Degenerate case: put all probability in |0>
if inv_norm == 0.0 {
amps[0] = Complex::ONE;
}
let original_state = amps.clone();
let state = QuantumState::from_amplitudes(amps, num_qubits)
.expect("padded amplitude vector length must equal 2^num_qubits");
Self {
state,
original_state,
original_dim,
age: 0.0,
noise_rate,
}
}
/// Apply decoherence for `dt` time units.
///
/// Two noise channels act in sequence:
///
/// 1. **Dephasing** -- every amplitude is multiplied by e^{i*theta} where
/// theta is drawn uniformly from `[-pi * noise_rate * dt, pi * noise_rate * dt]`.
/// This scrambles phase coherence while exactly preserving per-amplitude
/// probabilities.
///
/// 2. **Amplitude damping** -- each non-ground-state amplitude is scaled by
/// sqrt(1 - gamma) where gamma = 1 - e^{-noise_rate * dt}. The probability
/// leaked from excited states is added to the |0> ground state, then the
/// whole vector is renormalised.
///
/// The `seed` controls the pseudo-random number generator for
/// reproducibility.
pub fn decohere(&mut self, dt: f64, seed: u64) {
let mut rng = StdRng::seed_from_u64(seed);
// Damping parameter gamma in [0, 1), approaches 1 for large dt * rate
let gamma = 1.0 - (-self.noise_rate * dt).exp();
// Phase noise scale in [0, inf)
let phase_scale = self.noise_rate * dt;
let amps = self.state.amplitudes_mut();
let n = amps.len();
// ------ Phase noise (dephasing) ------
for amp in amps.iter_mut().take(n) {
let angle = (rng.gen::<f64>() - 0.5) * 2.0 * PI * phase_scale;
let phase_kick = Complex::from_polar(1.0, angle);
*amp = *amp * phase_kick;
}
// ------ Amplitude damping toward |0> ------
let decay_factor = (1.0 - gamma).sqrt();
let mut leaked_probability = 0.0;
for amp in amps.iter_mut().skip(1) {
let prob_before = amp.norm_sq();
*amp = *amp * decay_factor;
leaked_probability += prob_before - amp.norm_sq();
}
// Transfer leaked probability into the ground state
let p0 = amps[0].norm_sq();
let new_p0 = p0 + leaked_probability;
if new_p0 > 0.0 && p0 > 0.0 {
amps[0] = amps[0] * (new_p0 / p0).sqrt();
} else if new_p0 > 0.0 {
amps[0] = Complex::new(new_p0.sqrt(), 0.0);
}
// Correct any accumulated numerical drift
self.state.normalize();
self.age += dt;
}
/// Fidelity with the original state: |<original|current>|^2 in [0, 1].
///
/// Returns 1.0 for a freshly created embedding (perfect memory) and
/// decays toward 0.0 as the state decoheres (completely forgotten).
pub fn fidelity(&self) -> f64 {
let current = self.state.state_vector();
let mut inner = Complex::ZERO;
for (orig, cur) in self.original_state.iter().zip(current.iter()) {
inner = inner + orig.conj() * *cur;
}
inner.norm_sq()
}
/// Current age of this embedding in abstract time units.
pub fn age(&self) -> f64 {
self.age
}
/// Quantum-aware similarity: |<self|other>|^2 as a complex inner product.
///
/// Unlike cosine similarity, this captures phase relationships. Two
/// embeddings that have decohered along different random trajectories will
/// show reduced similarity even if their probability distributions are
/// similar, because their phases no longer align.
pub fn quantum_similarity(&self, other: &QuantumEmbedding) -> f64 {
let sv1 = self.state.state_vector();
let sv2 = other.state.state_vector();
let len = sv1.len().min(sv2.len());
let mut inner = Complex::ZERO;
for i in 0..len {
inner = inner + sv1[i].conj() * sv2[i];
}
inner.norm_sq()
}
/// Extract back to a classical f64 vector.
///
/// Returns the real part of each amplitude, truncated to the original
/// embedding dimension. This is lossy when the state has decohered:
/// dephasing moves energy into imaginary components that are discarded,
/// and amplitude damping shifts probability toward |0>.
pub fn to_embedding(&self) -> Vec<f64> {
self.state
.state_vector()
.iter()
.take(self.original_dim)
.map(|c| c.re)
.collect()
}
/// Check if the embedding has decohered below a fidelity threshold.
///
/// Returns `true` when the state still retains at least `threshold`
/// fidelity with its original value.
pub fn is_coherent(&self, threshold: f64) -> bool {
self.fidelity() >= threshold
}
}
// ---------------------------------------------------------------------------
// Batch operations
// ---------------------------------------------------------------------------
/// Apply decoherence to a batch of embeddings, returning indices of those
/// still coherent.
///
/// Each embedding is decohered by `dt` time units using a unique seed derived
/// from the base `seed` and the embedding's index. Embeddings whose fidelity
/// drops below `threshold` are considered forgotten; the returned vector
/// contains the indices of embeddings that remain coherent.
pub fn decohere_batch(
embeddings: &mut [QuantumEmbedding],
dt: f64,
threshold: f64,
seed: u64,
) -> Vec<usize> {
let mut coherent = Vec::new();
for (i, emb) in embeddings.iter_mut().enumerate() {
// Derive a per-embedding seed to avoid correlated noise
let emb_seed = seed
.wrapping_add(i as u64)
.wrapping_mul(6_364_136_223_846_793_005);
emb.decohere(dt, emb_seed);
if emb.is_coherent(threshold) {
coherent.push(i);
}
}
coherent
}
// ---------------------------------------------------------------------------
// Tests
// ---------------------------------------------------------------------------
#[cfg(test)]
mod tests {
use super::*;
/// Helper: create a simple embedding of the given dimension.
fn sample_embedding(dim: usize) -> Vec<f64> {
(0..dim).map(|i| (i as f64 + 1.0)).collect()
}
#[test]
fn from_embedding_creates_normalised_state() {
let emb = QuantumEmbedding::from_embedding(&[3.0, 4.0], 0.1);
let sv = emb.state.state_vector();
let norm_sq: f64 = sv.iter().map(|c| c.norm_sq()).sum();
assert!((norm_sq - 1.0).abs() < 1e-10, "state should be normalised");
}
#[test]
fn from_embedding_pads_to_power_of_two() {
let emb = QuantumEmbedding::from_embedding(&[1.0, 2.0, 3.0], 0.1);
// 3 elements -> 4 (2 qubits)
assert_eq!(emb.state.state_vector().len(), 4);
assert_eq!(emb.state.num_qubits(), 2);
}
#[test]
fn fresh_embedding_has_unit_fidelity() {
let emb = QuantumEmbedding::from_embedding(&[1.0, 0.0, 0.0, 0.0], 0.1);
assert!((emb.fidelity() - 1.0).abs() < 1e-10);
}
#[test]
fn decoherence_reduces_fidelity() {
let mut emb = QuantumEmbedding::from_embedding(&sample_embedding(4), 0.5);
let f_before = emb.fidelity();
emb.decohere(1.0, 42);
let f_after = emb.fidelity();
assert!(
f_after < f_before,
"fidelity should decrease: before={f_before}, after={f_after}"
);
}
#[test]
fn decoherence_advances_age() {
let mut emb = QuantumEmbedding::from_embedding(&[1.0, 2.0], 0.1);
assert!((emb.age() - 0.0).abs() < 1e-15);
emb.decohere(0.5, 1);
assert!((emb.age() - 0.5).abs() < 1e-15);
emb.decohere(1.5, 2);
assert!((emb.age() - 2.0).abs() < 1e-15);
}
#[test]
fn heavy_decoherence_destroys_fidelity() {
let mut emb = QuantumEmbedding::from_embedding(&sample_embedding(8), 2.0);
for i in 0..20 {
emb.decohere(1.0, 100 + i);
}
assert!(
emb.fidelity() < 0.3,
"heavy decoherence should destroy fidelity: {}",
emb.fidelity()
);
}
#[test]
fn quantum_similarity_is_symmetric() {
let a = QuantumEmbedding::from_embedding(&[1.0, 0.0, 0.0, 0.0], 0.1);
let b = QuantumEmbedding::from_embedding(&[0.0, 1.0, 0.0, 0.0], 0.1);
let sim_ab = a.quantum_similarity(&b);
let sim_ba = b.quantum_similarity(&a);
assert!(
(sim_ab - sim_ba).abs() < 1e-10,
"similarity should be symmetric"
);
}
#[test]
fn identical_embeddings_have_similarity_one() {
let a = QuantumEmbedding::from_embedding(&[1.0, 2.0, 3.0, 4.0], 0.1);
let b = QuantumEmbedding::from_embedding(&[1.0, 2.0, 3.0, 4.0], 0.1);
assert!(
(a.quantum_similarity(&b) - 1.0).abs() < 1e-10,
"identical embeddings should have similarity 1.0"
);
}
#[test]
fn to_embedding_round_trips_without_decoherence() {
let original = vec![3.0, 4.0];
let emb = QuantumEmbedding::from_embedding(&original, 0.1);
let recovered = emb.to_embedding();
assert_eq!(recovered.len(), original.len());
// Should be the normalised version of the original
let norm = (3.0f64 * 3.0 + 4.0 * 4.0).sqrt();
assert!((recovered[0] - 3.0 / norm).abs() < 1e-10);
assert!((recovered[1] - 4.0 / norm).abs() < 1e-10);
}
#[test]
fn is_coherent_respects_threshold() {
let mut emb = QuantumEmbedding::from_embedding(&sample_embedding(4), 1.0);
assert!(emb.is_coherent(0.9));
// Decohere heavily
for i in 0..10 {
emb.decohere(1.0, 200 + i);
}
assert!(!emb.is_coherent(0.99));
}
#[test]
fn decohere_batch_filters_correctly() {
let mut batch: Vec<QuantumEmbedding> = (0..5)
.map(|i| {
QuantumEmbedding::from_embedding(
&sample_embedding(4),
// Higher noise rate for later embeddings
0.1 * (i as f64 + 1.0),
)
})
.collect();
let coherent = decohere_batch(&mut batch, 1.0, 0.3, 999);
// Embeddings with lower noise rates should remain coherent longer
// At least the lowest-noise-rate embedding should survive
assert!(
!coherent.is_empty(),
"at least some embeddings should remain coherent with mild decoherence"
);
// The first embedding (lowest noise) should be the most likely to survive
if !coherent.is_empty() {
assert_eq!(coherent[0], 0, "lowest-noise embedding should survive");
}
}
#[test]
fn empty_embedding_handled() {
let emb = QuantumEmbedding::from_embedding(&[], 0.1);
assert!((emb.fidelity() - 1.0).abs() < 1e-10);
let recovered = emb.to_embedding();
// original_dim is max(0, 1) = 1
assert_eq!(recovered.len(), 1);
}
#[test]
fn zero_noise_rate_preserves_fidelity() {
let mut emb = QuantumEmbedding::from_embedding(&sample_embedding(4), 0.0);
emb.decohere(10.0, 42);
// With noise_rate=0, gamma=0 and phase_scale=0, so no change
assert!(
(emb.fidelity() - 1.0).abs() < 1e-10,
"zero noise rate should preserve fidelity perfectly"
);
}
}

View File

@@ -0,0 +1,310 @@
//! # Browser-Native Quantum Reality Checks
//!
//! Verification circuits that let users test quantum claims locally.
//! If an AI says behavior is quantum-inspired, the user can verify it
//! against actual quantum mechanics in the browser.
//!
//! Collapses the gap between explanation and verification.
use ruqu_core::error::QuantumError;
use ruqu_core::gate::Gate;
use ruqu_core::state::QuantumState;
// ---------------------------------------------------------------------------
// Types
// ---------------------------------------------------------------------------
/// What property we expect to verify.
#[derive(Debug, Clone)]
pub enum ExpectedProperty {
/// P(qubit = 0) ≈ expected ± tolerance
ProbabilityZero {
qubit: u32,
expected: f64,
tolerance: f64,
},
/// P(qubit = 1) ≈ expected ± tolerance
ProbabilityOne {
qubit: u32,
expected: f64,
tolerance: f64,
},
/// Two qubits are entangled: P(same outcome) > min_correlation
Entangled {
qubit_a: u32,
qubit_b: u32,
min_correlation: f64,
},
/// Qubit is in equal superposition: P(1) ≈ 0.5 ± tolerance
EqualSuperposition { qubit: u32, tolerance: f64 },
/// Full probability distribution matches ± tolerance
InterferencePattern {
probabilities: Vec<f64>,
tolerance: f64,
},
}
/// A quantum reality check: a named verification experiment.
pub struct RealityCheck {
pub name: String,
pub description: String,
pub num_qubits: u32,
pub expected: ExpectedProperty,
}
/// Result of running a reality check.
#[derive(Debug)]
pub struct CheckResult {
pub check_name: String,
pub passed: bool,
pub measured_value: f64,
pub expected_value: f64,
pub detail: String,
}
// ---------------------------------------------------------------------------
// Verification engine
// ---------------------------------------------------------------------------
/// Run a verification circuit and check the expected property.
pub fn run_check<F>(check: &RealityCheck, circuit_fn: F) -> Result<CheckResult, QuantumError>
where
F: FnOnce(&mut QuantumState) -> Result<(), QuantumError>,
{
let mut state = QuantumState::new(check.num_qubits)?;
circuit_fn(&mut state)?;
let probs = state.probabilities();
match &check.expected {
ExpectedProperty::ProbabilityZero {
qubit,
expected,
tolerance,
} => {
let p0 = 1.0 - state.probability_of_qubit(*qubit);
let pass = (p0 - expected).abs() <= *tolerance;
Ok(CheckResult {
check_name: check.name.clone(),
passed: pass,
measured_value: p0,
expected_value: *expected,
detail: format!(
"P(q{}=0) = {:.6}, expected {:.6} +/- {:.6}",
qubit, p0, expected, tolerance
),
})
}
ExpectedProperty::ProbabilityOne {
qubit,
expected,
tolerance,
} => {
let p1 = state.probability_of_qubit(*qubit);
let pass = (p1 - expected).abs() <= *tolerance;
Ok(CheckResult {
check_name: check.name.clone(),
passed: pass,
measured_value: p1,
expected_value: *expected,
detail: format!(
"P(q{}=1) = {:.6}, expected {:.6} +/- {:.6}",
qubit, p1, expected, tolerance
),
})
}
ExpectedProperty::Entangled {
qubit_a,
qubit_b,
min_correlation,
} => {
// Correlation = P(same outcome) = P(00) + P(11)
let bit_a = 1usize << qubit_a;
let bit_b = 1usize << qubit_b;
let mut p_same = 0.0;
for (i, &p) in probs.iter().enumerate() {
let a = (i & bit_a) != 0;
let b = (i & bit_b) != 0;
if a == b {
p_same += p;
}
}
let pass = p_same >= *min_correlation;
Ok(CheckResult {
check_name: check.name.clone(),
passed: pass,
measured_value: p_same,
expected_value: *min_correlation,
detail: format!(
"P(q{}==q{}) = {:.6}, min {:.6}",
qubit_a, qubit_b, p_same, min_correlation
),
})
}
ExpectedProperty::EqualSuperposition { qubit, tolerance } => {
let p1 = state.probability_of_qubit(*qubit);
let pass = (p1 - 0.5).abs() <= *tolerance;
Ok(CheckResult {
check_name: check.name.clone(),
passed: pass,
measured_value: p1,
expected_value: 0.5,
detail: format!(
"P(q{}=1) = {:.6}, expected 0.5 +/- {:.6}",
qubit, p1, tolerance
),
})
}
ExpectedProperty::InterferencePattern {
probabilities: expected_probs,
tolerance,
} => {
let max_diff: f64 = probs
.iter()
.zip(expected_probs.iter())
.map(|(a, b)| (a - b).abs())
.fold(0.0_f64, f64::max);
let pass = max_diff <= *tolerance;
Ok(CheckResult {
check_name: check.name.clone(),
passed: pass,
measured_value: max_diff,
expected_value: 0.0,
detail: format!(
"max |p_measured - p_expected| = {:.6}, tolerance {:.6}",
max_diff, tolerance
),
})
}
}
}
// ---------------------------------------------------------------------------
// Built-in verification circuits
// ---------------------------------------------------------------------------
/// Verify superposition: H|0⟩ should give 50/50.
pub fn check_superposition() -> CheckResult {
let check = RealityCheck {
name: "Superposition".into(),
description: "H|0> produces equal superposition".into(),
num_qubits: 1,
expected: ExpectedProperty::EqualSuperposition {
qubit: 0,
tolerance: 1e-10,
},
};
run_check(&check, |state| {
state.apply_gate(&Gate::H(0))?;
Ok(())
})
.unwrap()
}
/// Verify entanglement: Bell state |00⟩ + |11⟩ has perfect correlation.
pub fn check_entanglement() -> CheckResult {
let check = RealityCheck {
name: "Entanglement".into(),
description: "Bell state has perfectly correlated measurements".into(),
num_qubits: 2,
expected: ExpectedProperty::Entangled {
qubit_a: 0,
qubit_b: 1,
min_correlation: 0.99,
},
};
run_check(&check, |state| {
state.apply_gate(&Gate::H(0))?;
state.apply_gate(&Gate::CNOT(0, 1))?;
Ok(())
})
.unwrap()
}
/// Verify interference: H-Z-H = X, so |0⟩ → |1⟩.
/// Destructive interference on |0⟩, constructive on |1⟩.
pub fn check_interference() -> CheckResult {
let check = RealityCheck {
name: "Interference".into(),
description: "H-Z-H = X: destructive interference eliminates |0>".into(),
num_qubits: 1,
expected: ExpectedProperty::ProbabilityOne {
qubit: 0,
expected: 1.0,
tolerance: 1e-10,
},
};
run_check(&check, |state| {
state.apply_gate(&Gate::H(0))?;
state.apply_gate(&Gate::Z(0))?;
state.apply_gate(&Gate::H(0))?;
Ok(())
})
.unwrap()
}
/// Verify phase kickback: Deutsch's algorithm for balanced f(x)=x.
/// Query qubit should measure |1⟩ with certainty.
pub fn check_phase_kickback() -> CheckResult {
let check = RealityCheck {
name: "Phase Kickback".into(),
description: "Deutsch oracle for f(x)=x: phase kickback produces |1> on query qubit".into(),
num_qubits: 2,
expected: ExpectedProperty::ProbabilityOne {
qubit: 0,
expected: 1.0,
tolerance: 1e-10,
},
};
run_check(&check, |state| {
// Prepare |01⟩
state.apply_gate(&Gate::X(1))?;
// Hadamard both
state.apply_gate(&Gate::H(0))?;
state.apply_gate(&Gate::H(1))?;
// Oracle: f(x) = x → CNOT
state.apply_gate(&Gate::CNOT(0, 1))?;
// Final Hadamard on query
state.apply_gate(&Gate::H(0))?;
Ok(())
})
.unwrap()
}
/// Verify no-cloning: CNOT cannot copy a superposition.
/// If |ψ⟩ = H|0⟩ = |+⟩, then CNOT(0,1)|+,0⟩ = (|00⟩+|11⟩)/√2 (Bell state),
/// NOT |+,+⟩ = (|00⟩+|01⟩+|10⟩+|11⟩)/2.
///
/// We detect this by checking that qubit 1 is NOT in an equal superposition
/// independently — it is entangled with qubit 0, not an independent copy.
pub fn check_no_cloning() -> CheckResult {
let check = RealityCheck {
name: "No-Cloning".into(),
description:
"CNOT cannot independently copy a superposition (produces entanglement instead)".into(),
num_qubits: 2,
expected: ExpectedProperty::InterferencePattern {
// Bell state: P(00) = 0.5, P(01) = 0, P(10) = 0, P(11) = 0.5
// If cloning worked: P(00) = 0.25, P(01) = 0.25, P(10) = 0.25, P(11) = 0.25
probabilities: vec![0.5, 0.0, 0.0, 0.5],
tolerance: 1e-10,
},
};
run_check(&check, |state| {
state.apply_gate(&Gate::H(0))?;
state.apply_gate(&Gate::CNOT(0, 1))?;
Ok(())
})
.unwrap()
}
/// Run all built-in checks and return results.
pub fn run_all_checks() -> Vec<CheckResult> {
vec![
check_superposition(),
check_entanglement(),
check_interference(),
check_phase_kickback(),
check_no_cloning(),
]
}

View File

@@ -0,0 +1,351 @@
//! # Reasoning QEC -- Quantum Error Correction for Reasoning Traces
//!
//! Treats reasoning steps like qubits. Each step is encoded as a quantum state
//! (high confidence = close to |0>, low confidence = rotated toward |1>).
//! Noise is injected to simulate reasoning errors, then a repetition-code-style
//! syndrome extraction detects when adjacent steps become incoherent.
//!
//! This provides **structural** reasoning integrity checks, not semantic ones.
//! The 1D repetition code uses:
//! - N data qubits (one per reasoning step)
//! - N-1 ancilla qubits (parity checks between adjacent steps)
//! - Total: 2N - 1 qubits (maximum N = 13 to stay within 25-qubit limit)
use ruqu_core::error::QuantumError;
use ruqu_core::gate::Gate;
use ruqu_core::state::QuantumState;
use ruqu_core::types::Complex;
use rand::rngs::StdRng;
use rand::{Rng, SeedableRng};
/// A single reasoning step encoded as a quantum state.
/// The step is either "valid" (close to |0>) or "flawed" (close to |1>).
#[derive(Debug, Clone)]
pub struct ReasoningStep {
pub label: String,
pub confidence: f64, // 0.0 = completely uncertain, 1.0 = fully confident
}
/// Configuration for reasoning QEC
pub struct ReasoningQecConfig {
/// Number of reasoning steps (data qubits)
pub num_steps: usize,
/// Noise rate per step (probability of error per step)
pub noise_rate: f64,
/// Seed for reproducibility
pub seed: Option<u64>,
}
/// Result of a reasoning QEC analysis
#[derive(Debug)]
pub struct ReasoningQecResult {
/// Which steps had errors detected (indices)
pub error_steps: Vec<usize>,
/// Syndrome bits (one per stabilizer)
pub syndrome: Vec<bool>,
/// Whether the overall reasoning trace is decodable (correctable)
pub is_decodable: bool,
/// Fidelity of the reasoning trace after correction
pub corrected_fidelity: f64,
/// Number of steps total
pub num_steps: usize,
}
/// A reasoning trace with QEC-style error detection.
///
/// Maps reasoning steps to a 1D repetition code:
/// - Each step is a data qubit
/// - Stabilizers check parity between adjacent steps
/// - If adjacent steps disagree (one flipped, one not), syndrome fires
///
/// This is simpler than a full surface code but captures the key idea:
/// structural detection of reasoning incoherence.
pub struct ReasoningTrace {
steps: Vec<ReasoningStep>,
state: QuantumState,
config: ReasoningQecConfig,
}
impl ReasoningTrace {
/// Create a new reasoning trace from steps.
/// Each step's confidence maps to a rotation: high confidence = close to |0>.
/// Total qubits = num_steps (data) + (num_steps - 1) (ancilla for parity checks)
pub fn new(
steps: Vec<ReasoningStep>,
config: ReasoningQecConfig,
) -> Result<Self, QuantumError> {
let num_steps = steps.len();
if num_steps == 0 {
return Err(QuantumError::CircuitError(
"reasoning trace requires at least one step".into(),
));
}
// Total qubits: data (0..num_steps) + ancillas (num_steps..2*num_steps-1)
let total_qubits = (2 * num_steps - 1) as u32;
// Check qubit limit early (MAX_QUBITS = 25)
if total_qubits > 25 {
return Err(QuantumError::QubitLimitExceeded {
requested: total_qubits,
maximum: 25,
});
}
let seed = config.seed.unwrap_or(42);
let mut state = QuantumState::new_with_seed(total_qubits, seed)?;
// Encode each step: rotate by angle based on confidence
// confidence=1.0 -> |0> (no rotation), confidence=0.0 -> equal superposition (pi/2)
for (i, step) in steps.iter().enumerate() {
let angle = std::f64::consts::FRAC_PI_2 * (1.0 - step.confidence);
if angle.abs() > 1e-15 {
state.apply_gate(&Gate::Ry(i as u32, angle))?;
}
}
Ok(Self {
steps,
state,
config,
})
}
/// Inject noise into the reasoning trace.
/// Each step independently suffers a bit flip (X error) with probability noise_rate.
pub fn inject_noise(&mut self) -> Result<(), QuantumError> {
let seed = self.config.seed.unwrap_or(42).wrapping_add(12345);
let mut rng = StdRng::seed_from_u64(seed);
for i in 0..self.steps.len() {
if rng.gen::<f64>() < self.config.noise_rate {
self.state.apply_gate(&Gate::X(i as u32))?;
}
}
Ok(())
}
/// Extract syndrome by checking parity between adjacent reasoning steps.
/// Uses ancilla qubits to perform non-destructive parity measurement.
/// Syndrome bit i fires if steps i and i+1 disagree (ZZ stabilizer = -1).
pub fn extract_syndrome(&mut self) -> Result<Vec<bool>, QuantumError> {
let num_steps = self.steps.len();
let mut syndrome = Vec::with_capacity(num_steps.saturating_sub(1));
for i in 0..(num_steps - 1) {
let data1 = i as u32;
let data2 = (i + 1) as u32;
let ancilla = (num_steps + i) as u32;
// Reset ancilla to |0>
self.state.reset_qubit(ancilla)?;
// CNOT from data1 to ancilla, CNOT from data2 to ancilla
// Ancilla will be |1> if data1 != data2
self.state.apply_gate(&Gate::CNOT(data1, ancilla))?;
self.state.apply_gate(&Gate::CNOT(data2, ancilla))?;
// Measure ancilla
let outcome = self.state.measure(ancilla)?;
syndrome.push(outcome.result);
}
Ok(syndrome)
}
/// Decode syndrome and attempt correction.
/// Simple decoder: if syndrome\[i\] fires, flip step i+1 (rightmost error assumption).
pub fn decode_and_correct(&mut self, syndrome: &[bool]) -> Result<Vec<usize>, QuantumError> {
let mut corrected = Vec::new();
// Simple decoder: for each fired syndrome, the error is likely
// between the two data qubits. Correct the right one.
for (i, &fired) in syndrome.iter().enumerate() {
if fired {
let step_to_correct = i + 1;
self.state.apply_gate(&Gate::X(step_to_correct as u32))?;
corrected.push(step_to_correct);
}
}
Ok(corrected)
}
/// Run the full QEC pipeline: inject noise, extract syndrome, decode, correct.
pub fn run_qec(&mut self) -> Result<ReasoningQecResult, QuantumError> {
// Save state before noise for fidelity comparison
let clean_sv: Vec<Complex> = self.state.state_vector().to_vec();
let clean_state = QuantumState::from_amplitudes(clean_sv, self.state.num_qubits())?;
// Inject noise
self.inject_noise()?;
// Extract syndrome
let syndrome = self.extract_syndrome()?;
// Determine which steps have errors
let mut error_steps = Vec::new();
for (i, &s) in syndrome.iter().enumerate() {
if s {
error_steps.push(i + 1);
}
}
let is_decodable = error_steps.len() <= self.steps.len() / 2;
// Attempt correction
if is_decodable {
self.decode_and_correct(&syndrome)?;
}
let corrected_fidelity = self.state.fidelity(&clean_state);
Ok(ReasoningQecResult {
error_steps,
syndrome,
is_decodable,
corrected_fidelity,
num_steps: self.steps.len(),
})
}
/// Get the number of reasoning steps
pub fn num_steps(&self) -> usize {
self.steps.len()
}
}
#[cfg(test)]
mod tests {
use super::*;
fn make_steps(n: usize, confidence: f64) -> Vec<ReasoningStep> {
(0..n)
.map(|i| ReasoningStep {
label: format!("step_{}", i),
confidence,
})
.collect()
}
#[test]
fn test_new_creates_trace() {
let steps = make_steps(5, 1.0);
let config = ReasoningQecConfig {
num_steps: 5,
noise_rate: 0.0,
seed: Some(42),
};
let trace = ReasoningTrace::new(steps, config);
assert!(trace.is_ok());
assert_eq!(trace.unwrap().num_steps(), 5);
}
#[test]
fn test_empty_steps_rejected() {
let config = ReasoningQecConfig {
num_steps: 0,
noise_rate: 0.0,
seed: Some(42),
};
let result = ReasoningTrace::new(vec![], config);
assert!(result.is_err());
}
#[test]
fn test_qubit_limit_exceeded() {
// 14 steps -> 2*14-1 = 27 qubits > 25
let steps = make_steps(14, 1.0);
let config = ReasoningQecConfig {
num_steps: 14,
noise_rate: 0.0,
seed: Some(42),
};
let result = ReasoningTrace::new(steps, config);
assert!(result.is_err());
}
#[test]
fn test_max_allowed_steps() {
// 13 steps -> 2*13-1 = 25 qubits = MAX_QUBITS (should succeed)
let steps = make_steps(13, 1.0);
let config = ReasoningQecConfig {
num_steps: 13,
noise_rate: 0.0,
seed: Some(42),
};
let result = ReasoningTrace::new(steps, config);
assert!(result.is_ok());
}
#[test]
fn test_no_noise_no_syndrome() {
let steps = make_steps(5, 1.0);
let config = ReasoningQecConfig {
num_steps: 5,
noise_rate: 0.0,
seed: Some(42),
};
let mut trace = ReasoningTrace::new(steps, config).unwrap();
let syndrome = trace.extract_syndrome().unwrap();
// All steps fully confident (|0>) and no noise: parity checks should not fire
assert!(syndrome.iter().all(|&s| !s));
}
#[test]
fn test_run_qec_zero_noise() {
let steps = make_steps(5, 1.0);
let config = ReasoningQecConfig {
num_steps: 5,
noise_rate: 0.0,
seed: Some(42),
};
let mut trace = ReasoningTrace::new(steps, config).unwrap();
let result = trace.run_qec().unwrap();
assert!(result.error_steps.is_empty());
assert!(result.is_decodable);
}
#[test]
fn test_run_qec_with_noise() {
let steps = make_steps(5, 1.0);
let config = ReasoningQecConfig {
num_steps: 5,
noise_rate: 0.5,
seed: Some(100),
};
let mut trace = ReasoningTrace::new(steps, config).unwrap();
let result = trace.run_qec().unwrap();
assert_eq!(result.num_steps, 5);
// Syndrome length = num_steps - 1
assert_eq!(result.syndrome.len(), 4);
}
#[test]
fn test_single_step_trace() {
let steps = make_steps(1, 0.8);
let config = ReasoningQecConfig {
num_steps: 1,
noise_rate: 0.0,
seed: Some(42),
};
let mut trace = ReasoningTrace::new(steps, config).unwrap();
let syndrome = trace.extract_syndrome().unwrap();
// Single step -> no parity checks -> empty syndrome
assert!(syndrome.is_empty());
}
#[test]
fn test_partial_confidence_encoding() {
// Steps with 50% confidence should produce superposition states
let steps = make_steps(3, 0.5);
let config = ReasoningQecConfig {
num_steps: 3,
noise_rate: 0.0,
seed: Some(42),
};
let trace = ReasoningTrace::new(steps, config).unwrap();
// State should not be purely |000...0>
let probs = trace.state.probabilities();
assert!(probs[0] < 1.0);
}
}

View File

@@ -0,0 +1,275 @@
//! # Time-Reversible Quantum Memory
//!
//! Because the simulator has full state access and all quantum gates are
//! unitary (and therefore invertible), we can **rewind** evolution.
//!
//! This enables counterfactual debugging: "What would this system have
//! believed if one observation was missing?"
//!
//! Most ML systems are forward-only. This is backward-capable.
use ruqu_core::error::QuantumError;
use ruqu_core::gate::Gate;
use ruqu_core::state::QuantumState;
use ruqu_core::types::Complex;
// ---------------------------------------------------------------------------
// Gate inversion
// ---------------------------------------------------------------------------
/// Compute the inverse of a unitary gate.
///
/// Self-inverse gates (X, Y, Z, H, CNOT, CZ, SWAP) return themselves.
/// Rotation gates negate their angle. S↔S†, T↔T†.
/// Non-unitary operations (Measure, Reset, Barrier) cannot be inverted.
pub fn inverse_gate(gate: &Gate) -> Result<Gate, QuantumError> {
match gate {
// Self-inverse
Gate::X(q) => Ok(Gate::X(*q)),
Gate::Y(q) => Ok(Gate::Y(*q)),
Gate::Z(q) => Ok(Gate::Z(*q)),
Gate::H(q) => Ok(Gate::H(*q)),
Gate::CNOT(a, b) => Ok(Gate::CNOT(*a, *b)),
Gate::CZ(a, b) => Ok(Gate::CZ(*a, *b)),
Gate::SWAP(a, b) => Ok(Gate::SWAP(*a, *b)),
// Rotation inverses: negate angle
Gate::Rx(q, t) => Ok(Gate::Rx(*q, -*t)),
Gate::Ry(q, t) => Ok(Gate::Ry(*q, -*t)),
Gate::Rz(q, t) => Ok(Gate::Rz(*q, -*t)),
Gate::Phase(q, t) => Ok(Gate::Phase(*q, -*t)),
Gate::Rzz(a, b, t) => Ok(Gate::Rzz(*a, *b, -*t)),
// Adjoint pairs
Gate::S(q) => Ok(Gate::Sdg(*q)),
Gate::Sdg(q) => Ok(Gate::S(*q)),
Gate::T(q) => Ok(Gate::Tdg(*q)),
Gate::Tdg(q) => Ok(Gate::T(*q)),
// Custom unitary: conjugate transpose
Gate::Unitary1Q(q, m) => {
let inv = [
[m[0][0].conj(), m[1][0].conj()],
[m[0][1].conj(), m[1][1].conj()],
];
Ok(Gate::Unitary1Q(*q, inv))
}
// Non-unitary: cannot invert
Gate::Measure(_) | Gate::Reset(_) | Gate::Barrier => Err(QuantumError::CircuitError(
"cannot invert non-unitary gate (Measure/Reset/Barrier)".into(),
)),
}
}
// ---------------------------------------------------------------------------
// Reversible memory
// ---------------------------------------------------------------------------
/// A recorded gate with its precomputed inverse.
#[derive(Clone)]
struct GateRecord {
gate: Gate,
inverse: Gate,
}
/// Quantum memory that records all operations and can rewind them.
///
/// Every [`apply`] stores the gate and its inverse. [`rewind`] pops the
/// last n gates and applies their inverses, restoring an earlier state.
/// [`counterfactual`] replays history with one step omitted.
pub struct ReversibleMemory {
state: QuantumState,
history: Vec<GateRecord>,
initial_amps: Vec<Complex>,
num_qubits: u32,
}
/// Result of a counterfactual analysis.
#[derive(Debug)]
pub struct CounterfactualResult {
/// Probabilities without the removed step.
pub counterfactual_probs: Vec<f64>,
/// Probabilities with the step included (original).
pub original_probs: Vec<f64>,
/// L2 divergence between the two distributions.
pub divergence: f64,
/// Which step was removed.
pub removed_step: usize,
}
/// Sensitivity of each step to perturbation.
#[derive(Debug)]
pub struct SensitivityResult {
/// For each step: 1 fidelity(perturbed, original).
pub sensitivities: Vec<f64>,
/// Index of the most sensitive step.
pub most_sensitive: usize,
/// Index of the least sensitive step.
pub least_sensitive: usize,
}
impl ReversibleMemory {
/// Create a new reversible memory with `num_qubits` qubits in |0…0⟩.
pub fn new(num_qubits: u32) -> Result<Self, QuantumError> {
let state = QuantumState::new(num_qubits)?;
let initial_amps = state.state_vector().to_vec();
Ok(Self {
state,
history: Vec::new(),
initial_amps,
num_qubits,
})
}
/// Create with a deterministic seed.
pub fn new_with_seed(num_qubits: u32, seed: u64) -> Result<Self, QuantumError> {
let state = QuantumState::new_with_seed(num_qubits, seed)?;
let initial_amps = state.state_vector().to_vec();
Ok(Self {
state,
history: Vec::new(),
initial_amps,
num_qubits,
})
}
/// Apply a gate and record it. Non-unitary gates are rejected.
pub fn apply(&mut self, gate: Gate) -> Result<(), QuantumError> {
let inv = inverse_gate(&gate)?;
self.state.apply_gate(&gate)?;
self.history.push(GateRecord { gate, inverse: inv });
Ok(())
}
/// Rewind the last `steps` operations by applying their inverses.
/// Returns how many were actually rewound.
pub fn rewind(&mut self, steps: usize) -> Result<usize, QuantumError> {
let actual = steps.min(self.history.len());
for _ in 0..actual {
let record = self.history.pop().unwrap();
self.state.apply_gate(&record.inverse)?;
}
Ok(actual)
}
/// Counterfactual: what would the final state be if step `remove_index`
/// never happened?
///
/// Replays the full history from the initial state, skipping the
/// specified step, then compares with the original outcome.
pub fn counterfactual(
&self,
remove_index: usize,
) -> Result<CounterfactualResult, QuantumError> {
if remove_index >= self.history.len() {
return Err(QuantumError::CircuitError(format!(
"step {} out of range (history has {} steps)",
remove_index,
self.history.len()
)));
}
// Replay without the removed step
let mut cf_state =
QuantumState::from_amplitudes(self.initial_amps.clone(), self.num_qubits)?;
for (i, record) in self.history.iter().enumerate() {
if i != remove_index {
cf_state.apply_gate(&record.gate)?;
}
}
let cf_probs = cf_state.probabilities();
let orig_probs = self.state.probabilities();
// L2 divergence
let divergence: f64 = orig_probs
.iter()
.zip(cf_probs.iter())
.map(|(a, b)| (a - b) * (a - b))
.sum::<f64>()
.sqrt();
Ok(CounterfactualResult {
counterfactual_probs: cf_probs,
original_probs: orig_probs,
divergence,
removed_step: remove_index,
})
}
/// Sensitivity analysis: for each step, insert a small Rz perturbation
/// after it and measure how much the final state diverges.
///
/// Sensitivity = 1 fidelity(perturbed_final, original_final).
pub fn sensitivity_analysis(
&self,
perturbation_angle: f64,
) -> Result<SensitivityResult, QuantumError> {
if self.history.is_empty() {
return Ok(SensitivityResult {
sensitivities: vec![],
most_sensitive: 0,
least_sensitive: 0,
});
}
let mut sensitivities = Vec::with_capacity(self.history.len());
for perturb_idx in 0..self.history.len() {
let mut perturbed =
QuantumState::from_amplitudes(self.initial_amps.clone(), self.num_qubits)?;
for (i, record) in self.history.iter().enumerate() {
perturbed.apply_gate(&record.gate)?;
if i == perturb_idx {
let q = record.gate.qubits().first().copied().unwrap_or(0);
perturbed.apply_gate(&Gate::Rz(q, perturbation_angle))?;
}
}
let fid = self.state.fidelity(&perturbed);
sensitivities.push(1.0 - fid);
}
let most_sensitive = sensitivities
.iter()
.enumerate()
.max_by(|a, b| a.1.partial_cmp(b.1).unwrap())
.map(|(i, _)| i)
.unwrap_or(0);
let least_sensitive = sensitivities
.iter()
.enumerate()
.min_by(|a, b| a.1.partial_cmp(b.1).unwrap())
.map(|(i, _)| i)
.unwrap_or(0);
Ok(SensitivityResult {
sensitivities,
most_sensitive,
least_sensitive,
})
}
/// Current state vector.
pub fn state_vector(&self) -> &[Complex] {
self.state.state_vector()
}
/// Current measurement probabilities.
pub fn probabilities(&self) -> Vec<f64> {
self.state.probabilities()
}
/// Number of recorded operations.
pub fn history_len(&self) -> usize {
self.history.len()
}
/// Number of qubits.
pub fn num_qubits(&self) -> u32 {
self.num_qubits
}
}

View File

@@ -0,0 +1,499 @@
//! # Swarm Interference
//!
//! Agents don't vote -- they *interfere*. Each agent contributes a complex
//! amplitude toward one or more actions. Conflicting agents cancel
//! (destructive interference). Reinforcing agents amplify (constructive
//! interference). The decision emerges from the interference pattern, not
//! from a majority vote or consensus protocol.
//!
//! ## Model
//!
//! - **Action**: something the swarm can do, identified by an `id` string.
//! - **Agent contribution**: a complex amplitude per action. The *magnitude*
//! encodes confidence; the *phase* encodes stance (0 = support, pi = oppose).
//! - **Decision**: for each action, sum all contributing amplitudes. The
//! resulting probability |sum|^2 determines the action's strength.
//!
//! Destructive interference naturally resolves conflicts: an action backed
//! by 3 agents at phase 0 and opposed by 3 agents at phase pi has zero net
//! amplitude, so it is detected as a deadlock.
use ruqu_core::types::Complex;
use rand::rngs::StdRng;
use rand::{Rng, SeedableRng};
use std::collections::HashMap;
use std::f64::consts::PI;
// ---------------------------------------------------------------------------
// Public types
// ---------------------------------------------------------------------------
/// An action that agents can support or oppose.
#[derive(Debug, Clone, PartialEq)]
pub struct Action {
pub id: String,
pub description: String,
}
/// An agent's complex-amplitude contribution to one or more actions.
///
/// The amplitude encodes both confidence (magnitude) and stance (phase).
/// Phase 0 = full support, phase pi = full opposition.
pub struct AgentContribution {
pub agent_id: String,
pub amplitudes: Vec<(Action, Complex)>,
}
impl AgentContribution {
/// Create a contribution where the agent supports or opposes a single
/// action with the given confidence.
///
/// - `confidence` in `[0, 1]` sets the magnitude.
/// - `support = true` => phase 0 (constructive with other supporters).
/// - `support = false` => phase pi (destructive against supporters).
pub fn new(agent_id: &str, action: Action, confidence: f64, support: bool) -> Self {
let phase = if support { 0.0 } else { PI };
let amplitude = Complex::from_polar(confidence, phase);
Self {
agent_id: agent_id.to_string(),
amplitudes: vec![(action, amplitude)],
}
}
/// Create a contribution spanning multiple actions with explicit complex
/// amplitudes.
pub fn multi(agent_id: &str, amplitudes: Vec<(Action, Complex)>) -> Self {
Self {
agent_id: agent_id.to_string(),
amplitudes,
}
}
}
/// The swarm decision engine using quantum interference.
pub struct SwarmInterference {
contributions: Vec<AgentContribution>,
}
/// Result of swarm interference for a single action.
#[derive(Debug)]
pub struct SwarmDecision {
/// The action evaluated.
pub action: Action,
/// |total_amplitude|^2 after interference.
pub probability: f64,
/// Number of agents whose phase reinforced the net amplitude.
pub constructive_count: usize,
/// Number of agents whose phase opposed the net amplitude.
pub destructive_count: usize,
}
// ---------------------------------------------------------------------------
// Implementation
// ---------------------------------------------------------------------------
impl SwarmInterference {
/// Create an empty swarm interference engine.
pub fn new() -> Self {
Self {
contributions: Vec::new(),
}
}
/// Add an agent's contribution to the interference pattern.
pub fn contribute(&mut self, contribution: AgentContribution) {
self.contributions.push(contribution);
}
/// Compute the interference pattern across all agents for all actions.
///
/// For each unique action (matched by `action.id`):
/// 1. Sum all agent amplitudes (complex addition).
/// 2. Compute probability = |sum|^2.
/// 3. Classify each contributing agent as constructive or destructive
/// relative to the net amplitude's phase.
///
/// Returns actions sorted by probability (descending).
pub fn decide(&self) -> Vec<SwarmDecision> {
let (action_map, amplitude_map, agent_phases_map) = self.aggregate();
let mut decisions: Vec<SwarmDecision> = amplitude_map
.into_iter()
.map(|(id, total)| {
let probability = total.norm_sq();
let net_phase = total.arg();
// Count constructive vs destructive contributors.
let phases = agent_phases_map.get(&id).unwrap();
let mut constructive = 0usize;
let mut destructive = 0usize;
for &agent_phase in phases {
let delta = Self::phase_distance(agent_phase, net_phase);
if delta <= PI / 2.0 {
constructive += 1;
} else {
destructive += 1;
}
}
SwarmDecision {
action: action_map[&id].clone(),
probability,
constructive_count: constructive,
destructive_count: destructive,
}
})
.collect();
decisions.sort_by(|a, b| {
b.probability
.partial_cmp(&a.probability)
.unwrap_or(std::cmp::Ordering::Equal)
});
decisions
}
/// Return the winning action (highest probability after interference).
pub fn winner(&self) -> Option<SwarmDecision> {
let decisions = self.decide();
decisions.into_iter().next()
}
/// Run `num_trials` decisions with additive quantum noise and return
/// win counts: `Vec<(Action, wins)>` sorted by wins descending.
///
/// Each trial adds a small random complex perturbation to every agent
/// amplitude before summing. This models environmental noise and shows
/// the stability of the interference pattern.
pub fn decide_with_noise(
&self,
noise_level: f64,
num_trials: usize,
seed: u64,
) -> Vec<(Action, usize)> {
let mut rng = StdRng::seed_from_u64(seed);
let mut win_counts: HashMap<String, (Action, usize)> = HashMap::new();
for _ in 0..num_trials {
// Aggregate with noise.
let mut amplitude_map: HashMap<String, Complex> = HashMap::new();
let mut action_map: HashMap<String, Action> = HashMap::new();
for contrib in &self.contributions {
for (action, amp) in &contrib.amplitudes {
action_map
.entry(action.id.clone())
.or_insert_with(|| action.clone());
// Add noise: random complex perturbation with magnitude up
// to `noise_level`.
let noise_r = rng.gen::<f64>() * noise_level;
let noise_theta = rng.gen::<f64>() * 2.0 * PI;
let noise = Complex::from_polar(noise_r, noise_theta);
let noisy_amp = *amp + noise;
let entry = amplitude_map
.entry(action.id.clone())
.or_insert(Complex::ZERO);
*entry = *entry + noisy_amp;
}
}
// Find winner for this trial.
if let Some((winner_id, _)) = amplitude_map.iter().max_by(|a, b| {
a.1.norm_sq()
.partial_cmp(&b.1.norm_sq())
.unwrap_or(std::cmp::Ordering::Equal)
}) {
let entry = win_counts
.entry(winner_id.clone())
.or_insert_with(|| (action_map[winner_id].clone(), 0));
entry.1 += 1;
}
}
let mut result: Vec<(Action, usize)> = win_counts.into_values().collect();
result.sort_by(|a, b| b.1.cmp(&a.1));
result
}
/// Check if the decision is deadlocked.
///
/// A deadlock is detected when the top two actions have probabilities
/// within `epsilon` of each other.
pub fn is_deadlocked(&self, epsilon: f64) -> bool {
let decisions = self.decide();
if decisions.len() < 2 {
return false;
}
(decisions[0].probability - decisions[1].probability).abs() <= epsilon
}
/// Clear all contributions, resetting the engine.
pub fn reset(&mut self) {
self.contributions.clear();
}
// -----------------------------------------------------------------------
// Private helpers
// -----------------------------------------------------------------------
/// Aggregate all contributions by action id.
///
/// Returns:
/// - action_map: id -> canonical Action
/// - amplitude_map: id -> summed complex amplitude
/// - agent_phases_map: id -> list of each agent's contributing phase
fn aggregate(
&self,
) -> (
HashMap<String, Action>,
HashMap<String, Complex>,
HashMap<String, Vec<f64>>,
) {
let mut action_map: HashMap<String, Action> = HashMap::new();
let mut amplitude_map: HashMap<String, Complex> = HashMap::new();
let mut agent_phases_map: HashMap<String, Vec<f64>> = HashMap::new();
for contrib in &self.contributions {
for (action, amp) in &contrib.amplitudes {
action_map
.entry(action.id.clone())
.or_insert_with(|| action.clone());
let entry = amplitude_map
.entry(action.id.clone())
.or_insert(Complex::ZERO);
*entry = *entry + *amp;
agent_phases_map
.entry(action.id.clone())
.or_insert_with(Vec::new)
.push(amp.arg());
}
}
(action_map, amplitude_map, agent_phases_map)
}
/// Absolute angular distance between two phases, in [0, pi].
fn phase_distance(a: f64, b: f64) -> f64 {
let mut d = (a - b).abs() % (2.0 * PI);
if d > PI {
d = 2.0 * PI - d;
}
d
}
}
impl Default for SwarmInterference {
fn default() -> Self {
Self::new()
}
}
// ---------------------------------------------------------------------------
// Tests
// ---------------------------------------------------------------------------
#[cfg(test)]
mod tests {
use super::*;
fn action(id: &str) -> Action {
Action {
id: id.to_string(),
description: id.to_string(),
}
}
#[test]
fn single_agent_support() {
let mut swarm = SwarmInterference::new();
swarm.contribute(AgentContribution::new("alice", action("deploy"), 0.8, true));
let decisions = swarm.decide();
assert_eq!(decisions.len(), 1);
assert_eq!(decisions[0].action.id, "deploy");
// probability = |0.8|^2 = 0.64
assert!((decisions[0].probability - 0.64).abs() < 1e-10);
assert_eq!(decisions[0].constructive_count, 1);
assert_eq!(decisions[0].destructive_count, 0);
}
#[test]
fn constructive_interference() {
let mut swarm = SwarmInterference::new();
// 3 agents all support "deploy" with confidence 1.0
swarm.contribute(AgentContribution::new("a", action("deploy"), 1.0, true));
swarm.contribute(AgentContribution::new("b", action("deploy"), 1.0, true));
swarm.contribute(AgentContribution::new("c", action("deploy"), 1.0, true));
let decisions = swarm.decide();
// Net amplitude = 3.0, probability = 9.0
assert!((decisions[0].probability - 9.0).abs() < 1e-10);
assert_eq!(decisions[0].constructive_count, 3);
assert_eq!(decisions[0].destructive_count, 0);
}
#[test]
fn destructive_interference_cancels() {
let mut swarm = SwarmInterference::new();
// 2 agents support, 2 oppose with equal confidence
swarm.contribute(AgentContribution::new("a", action("deploy"), 1.0, true));
swarm.contribute(AgentContribution::new("b", action("deploy"), 1.0, true));
swarm.contribute(AgentContribution::new("c", action("deploy"), 1.0, false));
swarm.contribute(AgentContribution::new("d", action("deploy"), 1.0, false));
let decisions = swarm.decide();
// Net amplitude ~ 0, probability ~ 0
assert!(decisions[0].probability < 1e-10);
}
#[test]
fn partial_cancellation() {
let mut swarm = SwarmInterference::new();
// 3 support, 1 opposes => net amplitude ~ 2.0
swarm.contribute(AgentContribution::new("a", action("deploy"), 1.0, true));
swarm.contribute(AgentContribution::new("b", action("deploy"), 1.0, true));
swarm.contribute(AgentContribution::new("c", action("deploy"), 1.0, true));
swarm.contribute(AgentContribution::new("d", action("deploy"), 1.0, false));
let decisions = swarm.decide();
// Net amplitude = 3 - 1 = 2, probability = 4.0
assert!((decisions[0].probability - 4.0).abs() < 1e-10);
assert_eq!(decisions[0].constructive_count, 3);
assert_eq!(decisions[0].destructive_count, 1);
}
#[test]
fn multiple_actions_sorted_by_probability() {
let mut swarm = SwarmInterference::new();
// Action "deploy": 2 supporters
swarm.contribute(AgentContribution::new("a", action("deploy"), 1.0, true));
swarm.contribute(AgentContribution::new("b", action("deploy"), 1.0, true));
// Action "rollback": 3 supporters
swarm.contribute(AgentContribution::new("c", action("rollback"), 1.0, true));
swarm.contribute(AgentContribution::new("d", action("rollback"), 1.0, true));
swarm.contribute(AgentContribution::new("e", action("rollback"), 1.0, true));
let decisions = swarm.decide();
assert_eq!(decisions.len(), 2);
assert_eq!(decisions[0].action.id, "rollback"); // P=9
assert_eq!(decisions[1].action.id, "deploy"); // P=4
}
#[test]
fn winner_returns_highest() {
let mut swarm = SwarmInterference::new();
swarm.contribute(AgentContribution::new("a", action("A"), 0.5, true));
swarm.contribute(AgentContribution::new("b", action("B"), 1.0, true));
let w = swarm.winner().unwrap();
assert_eq!(w.action.id, "B");
}
#[test]
fn winner_empty_swarm() {
let swarm = SwarmInterference::new();
assert!(swarm.winner().is_none());
}
#[test]
fn deadlock_detection() {
let mut swarm = SwarmInterference::new();
// Two actions with exactly equal support
swarm.contribute(AgentContribution::new("a", action("A"), 1.0, true));
swarm.contribute(AgentContribution::new("b", action("B"), 1.0, true));
assert!(swarm.is_deadlocked(1e-10));
}
#[test]
fn no_deadlock_with_clear_winner() {
let mut swarm = SwarmInterference::new();
swarm.contribute(AgentContribution::new("a", action("A"), 1.0, true));
swarm.contribute(AgentContribution::new("b", action("A"), 1.0, true));
swarm.contribute(AgentContribution::new("c", action("B"), 0.1, true));
assert!(!swarm.is_deadlocked(0.01));
}
#[test]
fn reset_clears_contributions() {
let mut swarm = SwarmInterference::new();
swarm.contribute(AgentContribution::new("a", action("X"), 1.0, true));
assert_eq!(swarm.decide().len(), 1);
swarm.reset();
assert!(swarm.decide().is_empty());
}
#[test]
fn multi_contribution() {
let mut swarm = SwarmInterference::new();
swarm.contribute(AgentContribution::multi(
"alice",
vec![
(action("A"), Complex::new(0.5, 0.0)),
(action("B"), Complex::new(0.0, 0.3)),
],
));
let decisions = swarm.decide();
assert_eq!(decisions.len(), 2);
}
#[test]
fn noise_trials_are_reproducible() {
let mut swarm = SwarmInterference::new();
swarm.contribute(AgentContribution::new("a", action("X"), 1.0, true));
swarm.contribute(AgentContribution::new("b", action("Y"), 0.5, true));
let r1 = swarm.decide_with_noise(0.1, 100, 42);
let r2 = swarm.decide_with_noise(0.1, 100, 42);
// Same seed -> same results.
assert_eq!(r1.len(), r2.len());
for i in 0..r1.len() {
assert_eq!(r1[i].0.id, r2[i].0.id);
assert_eq!(r1[i].1, r2[i].1);
}
}
#[test]
fn noise_preserves_strong_winner() {
let mut swarm = SwarmInterference::new();
// Action "A" has overwhelming support.
swarm.contribute(AgentContribution::new("a", action("A"), 1.0, true));
swarm.contribute(AgentContribution::new("b", action("A"), 1.0, true));
swarm.contribute(AgentContribution::new("c", action("A"), 1.0, true));
// Action "B" has weak support.
swarm.contribute(AgentContribution::new("d", action("B"), 0.1, true));
let results = swarm.decide_with_noise(0.05, 200, 7);
// "A" should win the vast majority of trials.
assert_eq!(results[0].0.id, "A");
assert!(results[0].1 > 150, "A should win most trials");
}
#[test]
fn default_trait() {
let swarm = SwarmInterference::default();
assert!(swarm.decide().is_empty());
}
#[test]
fn complete_cancellation_detects_deadlock() {
let mut swarm = SwarmInterference::new();
// Perfect cancellation on a single action.
swarm.contribute(AgentContribution::new("a", action("X"), 1.0, true));
swarm.contribute(AgentContribution::new("b", action("X"), 1.0, false));
let decisions = swarm.decide();
assert_eq!(decisions.len(), 1);
assert!(decisions[0].probability < 1e-10);
}
}

View File

@@ -0,0 +1,383 @@
//! # Syndrome Diagnosis -- QEC-Based System Fault Localization
//!
//! Treats AI system components as a graph. Injects artificial faults into
//! a quantum encoding of the system. Extracts syndrome patterns using QEC logic
//! (CNOT parity checks on ancilla qubits) to localize which component is fragile.
//!
//! This is **structural fault localization**, not log analysis. Multiple rounds
//! of fault injection build statistical fragility profiles and detect components
//! that propagate faults beyond their direct neighborhood.
use ruqu_core::error::QuantumError;
use ruqu_core::gate::Gate;
use ruqu_core::state::QuantumState;
use rand::rngs::StdRng;
use rand::{Rng, SeedableRng};
/// A system component (node in the diagnostic graph)
#[derive(Debug, Clone)]
pub struct Component {
pub id: String,
pub health: f64, // 1.0 = healthy, 0.0 = failed
}
/// An edge between two components (dependency/connection)
#[derive(Debug, Clone)]
pub struct Connection {
pub from: usize,
pub to: usize,
pub strength: f64, // coupling strength
}
/// Configuration for syndrome-based diagnosis
pub struct DiagnosisConfig {
pub fault_injection_rate: f64,
pub num_rounds: usize,
pub seed: u64,
}
/// Result of a single diagnostic round
#[derive(Debug)]
pub struct DiagnosticRound {
pub syndrome: Vec<bool>,
pub injected_faults: Vec<usize>,
}
/// Overall diagnosis result
#[derive(Debug)]
pub struct DiagnosisResult {
pub rounds: Vec<DiagnosticRound>,
/// Fragility score per component: how often it shows up in syndrome patterns
pub fragility_scores: Vec<(String, f64)>,
/// Most fragile component
pub weakest_component: Option<String>,
/// Components that propagate faults (appear in syndromes without being directly faulted)
pub fault_propagators: Vec<String>,
}
/// System graph for syndrome-based diagnosis.
///
/// Maps a system topology to a quantum circuit:
/// - Each component becomes a data qubit
/// - Each connection becomes an ancilla qubit for parity measurement
/// - Total qubits = num_components + num_connections (must be <= 25)
pub struct SystemDiagnostics {
components: Vec<Component>,
connections: Vec<Connection>,
}
impl SystemDiagnostics {
pub fn new(components: Vec<Component>, connections: Vec<Connection>) -> Self {
Self {
components,
connections,
}
}
/// Run a single diagnostic round:
/// 1. Encode components as qubits (healthy=|0>, degraded=Ry rotation)
/// 2. Inject random faults (X gates on selected qubits)
/// 3. Extract syndrome using ancilla qubits measuring parity of connected pairs
pub fn run_round(
&self,
config: &DiagnosisConfig,
round_seed: u64,
) -> Result<DiagnosticRound, QuantumError> {
let num_components = self.components.len();
let num_connections = self.connections.len();
let total_qubits = (num_components + num_connections) as u32;
// Limit check
if total_qubits > 25 {
return Err(QuantumError::QubitLimitExceeded {
requested: total_qubits,
maximum: 25,
});
}
let mut state = QuantumState::new_with_seed(total_qubits, round_seed)?;
let mut rng = StdRng::seed_from_u64(round_seed);
// 1. Encode component health as rotation from |0>
for (i, comp) in self.components.iter().enumerate() {
let angle = std::f64::consts::FRAC_PI_2 * (1.0 - comp.health);
if angle.abs() > 1e-15 {
state.apply_gate(&Gate::Ry(i as u32, angle))?;
}
}
// 2. Inject faults
let mut injected = Vec::new();
for i in 0..num_components {
if rng.gen::<f64>() < config.fault_injection_rate {
state.apply_gate(&Gate::X(i as u32))?;
injected.push(i);
}
}
// 3. For each connection, use an ancilla qubit to check parity
let mut syndrome = Vec::with_capacity(num_connections);
for (ci, conn) in self.connections.iter().enumerate() {
let ancilla = (num_components + ci) as u32;
state.reset_qubit(ancilla)?;
state.apply_gate(&Gate::CNOT(conn.from as u32, ancilla))?;
state.apply_gate(&Gate::CNOT(conn.to as u32, ancilla))?;
let outcome = state.measure(ancilla)?;
syndrome.push(outcome.result);
}
Ok(DiagnosticRound {
syndrome,
injected_faults: injected,
})
}
/// Run full diagnosis: multiple rounds of fault injection + syndrome extraction.
/// Accumulates statistics to identify fragile and fault-propagating components.
pub fn diagnose(&self, config: &DiagnosisConfig) -> Result<DiagnosisResult, QuantumError> {
let mut rounds = Vec::new();
let mut fault_counts = vec![0usize; self.components.len()];
let mut syndrome_counts = vec![0usize; self.components.len()];
for round in 0..config.num_rounds {
let round_seed = config.seed.wrapping_add(round as u64 * 1000);
let result = self.run_round(config, round_seed)?;
// Count which components were directly faulted
for &idx in &result.injected_faults {
fault_counts[idx] += 1;
}
// Count which components appear in triggered syndromes
for (ci, &fired) in result.syndrome.iter().enumerate() {
if fired {
let conn = &self.connections[ci];
syndrome_counts[conn.from] += 1;
syndrome_counts[conn.to] += 1;
}
}
rounds.push(result);
}
// Compute fragility scores (syndrome appearances / total rounds)
let fragility_scores: Vec<(String, f64)> = self
.components
.iter()
.enumerate()
.map(|(i, c)| {
(
c.id.clone(),
syndrome_counts[i] as f64 / config.num_rounds as f64,
)
})
.collect();
let weakest = fragility_scores
.iter()
.max_by(|a, b| a.1.partial_cmp(&b.1).unwrap_or(std::cmp::Ordering::Equal))
.map(|(name, _)| name.clone());
// Fault propagators: components that appear in syndromes more often
// than they were directly faulted
let fault_propagators: Vec<String> = self
.components
.iter()
.enumerate()
.filter(|(i, _)| syndrome_counts[*i] > fault_counts[*i] + config.num_rounds / 4)
.map(|(_, c)| c.id.clone())
.collect();
Ok(DiagnosisResult {
rounds,
fragility_scores,
weakest_component: weakest,
fault_propagators,
})
}
/// Get the number of components
pub fn num_components(&self) -> usize {
self.components.len()
}
}
#[cfg(test)]
mod tests {
use super::*;
fn make_linear_system(n: usize) -> (Vec<Component>, Vec<Connection>) {
let components: Vec<Component> = (0..n)
.map(|i| Component {
id: format!("comp_{}", i),
health: 1.0,
})
.collect();
let connections: Vec<Connection> = (0..n.saturating_sub(1))
.map(|i| Connection {
from: i,
to: i + 1,
strength: 1.0,
})
.collect();
(components, connections)
}
#[test]
fn test_new_system() {
let (comps, conns) = make_linear_system(5);
let diag = SystemDiagnostics::new(comps, conns);
assert_eq!(diag.num_components(), 5);
}
#[test]
fn test_qubit_limit_check() {
// 15 components + 14 connections = 29 > 25
let (comps, conns) = make_linear_system(15);
let diag = SystemDiagnostics::new(comps, conns);
let config = DiagnosisConfig {
fault_injection_rate: 0.0,
num_rounds: 1,
seed: 42,
};
let result = diag.run_round(&config, 42);
assert!(result.is_err());
}
#[test]
fn test_single_round_no_faults() {
let (comps, conns) = make_linear_system(5);
let diag = SystemDiagnostics::new(comps, conns);
let config = DiagnosisConfig {
fault_injection_rate: 0.0,
num_rounds: 1,
seed: 42,
};
let round = diag.run_round(&config, 42).unwrap();
// No faults injected -> no syndrome should fire (all healthy components
// are in |0>, parity checks should agree)
assert!(round.injected_faults.is_empty());
assert!(round.syndrome.iter().all(|&s| !s));
}
#[test]
fn test_diagnose_no_faults() {
let (comps, conns) = make_linear_system(5);
let diag = SystemDiagnostics::new(comps, conns);
let config = DiagnosisConfig {
fault_injection_rate: 0.0,
num_rounds: 10,
seed: 42,
};
let result = diag.diagnose(&config).unwrap();
assert_eq!(result.rounds.len(), 10);
// No faults -> all fragility scores should be 0
for (_, score) in &result.fragility_scores {
assert!((*score - 0.0).abs() < 1e-10);
}
}
#[test]
fn test_diagnose_with_faults() {
let (comps, conns) = make_linear_system(5);
let diag = SystemDiagnostics::new(comps, conns);
let config = DiagnosisConfig {
fault_injection_rate: 0.5,
num_rounds: 20,
seed: 123,
};
let result = diag.diagnose(&config).unwrap();
assert_eq!(result.rounds.len(), 20);
assert_eq!(result.fragility_scores.len(), 5);
// At least some syndromes should fire with 50% fault rate
let total_fired: usize = result
.rounds
.iter()
.map(|r| r.syndrome.iter().filter(|&&s| s).count())
.sum();
assert!(total_fired > 0);
}
#[test]
fn test_weakest_component_identified() {
let (comps, conns) = make_linear_system(5);
let diag = SystemDiagnostics::new(comps, conns);
let config = DiagnosisConfig {
fault_injection_rate: 0.3,
num_rounds: 50,
seed: 999,
};
let result = diag.diagnose(&config).unwrap();
// Should identify a weakest component
assert!(result.weakest_component.is_some());
}
#[test]
fn test_degraded_components() {
// One component is already degraded
let mut comps: Vec<Component> = (0..5)
.map(|i| Component {
id: format!("comp_{}", i),
health: 1.0,
})
.collect();
comps[2].health = 0.3; // Component 2 is degraded
let conns: Vec<Connection> = (0..4)
.map(|i| Connection {
from: i,
to: i + 1,
strength: 1.0,
})
.collect();
let diag = SystemDiagnostics::new(comps, conns);
let config = DiagnosisConfig {
fault_injection_rate: 0.0,
num_rounds: 1,
seed: 42,
};
// Should run without error
let round = diag.run_round(&config, 42).unwrap();
assert_eq!(round.syndrome.len(), 4);
}
#[test]
fn test_max_qubit_boundary() {
// 13 components + 12 connections = 25 qubits (exactly at limit)
let (comps, conns) = make_linear_system(13);
let diag = SystemDiagnostics::new(comps, conns);
let config = DiagnosisConfig {
fault_injection_rate: 0.0,
num_rounds: 1,
seed: 42,
};
let result = diag.run_round(&config, 42);
assert!(result.is_ok());
}
#[test]
fn test_empty_connections() {
// Components with no connections
let comps = vec![
Component {
id: "a".into(),
health: 1.0,
},
Component {
id: "b".into(),
health: 1.0,
},
];
let diag = SystemDiagnostics::new(comps, vec![]);
let config = DiagnosisConfig {
fault_injection_rate: 0.0,
num_rounds: 5,
seed: 42,
};
let result = diag.diagnose(&config).unwrap();
// No connections -> no syndrome bits
for round in &result.rounds {
assert!(round.syndrome.is_empty());
}
}
}