git-subtree-dir: vendor/ruvector git-subtree-split: b64c21726f2bb37286d9ee36a7869fef60cc6900
1155 lines
37 KiB
Markdown
1155 lines
37 KiB
Markdown
# SONA Component Integration
|
|
|
|
## Overview
|
|
|
|
This document details how SONA integrates with the ruvector ecosystem and exo-ai cognitive crates to create a unified self-improving architecture.
|
|
|
|
## Integration Architecture
|
|
|
|
```
|
|
┌─────────────────────────────────────────────────────────────────────────┐
|
|
│ SONA Integration Layer │
|
|
├─────────────────────────────────────────────────────────────────────────┤
|
|
│ ┌─────────────┐ ┌─────────────┐ ┌─────────────┐ ┌─────────────┐ │
|
|
│ │ Learning │ │ Router │ │ Attention │ │ Memory │ │
|
|
│ │ Engine │ │ Engine │ │ Engine │ │ Engine │ │
|
|
│ └──────┬──────┘ └──────┬──────┘ └──────┬──────┘ └──────┬──────┘ │
|
|
│ │ │ │ │ │
|
|
├─────────┼────────────────┼────────────────┼────────────────┼───────────┤
|
|
│ │ │ │ │ │
|
|
│ ┌──────▼──────────────────────────────────────────────────▼──────┐ │
|
|
│ │ ruvector Crates │ │
|
|
│ │ ┌─────────┐ ┌─────────┐ ┌─────────┐ ┌─────────┐ ┌─────────┐ │ │
|
|
│ │ │ core │ │attention│ │ gnn │ │postgres │ │ sparse │ │ │
|
|
│ │ │ (HNSW) │ │(39 mech)│ │ (GNN) │ │(persist)│ │(vectors)│ │ │
|
|
│ │ └─────────┘ └─────────┘ └─────────┘ └─────────┘ └─────────┘ │ │
|
|
│ └────────────────────────────────────────────────────────────────┘ │
|
|
│ │
|
|
│ ┌────────────────────────────────────────────────────────────────┐ │
|
|
│ │ exo-ai Crates │ │
|
|
│ │ ┌─────────┐ ┌─────────┐ ┌─────────┐ ┌─────────┐ ┌─────────┐ │ │
|
|
│ │ │exo-core │ │temporal │ │ exotic │ │ memory │ │attention│ │ │
|
|
│ │ │ (IIT/Φ) │ │(cycles) │ │(quantum)│ │(dreams) │ │ (39) │ │ │
|
|
│ │ └─────────┘ └─────────┘ └─────────┘ └─────────┘ └─────────┘ │ │
|
|
│ └────────────────────────────────────────────────────────────────┘ │
|
|
└─────────────────────────────────────────────────────────────────────────┘
|
|
```
|
|
|
|
## ruvector Crate Integration
|
|
|
|
### 1. ruvector-core (HNSW Index)
|
|
|
|
**Purpose**: High-performance approximate nearest neighbor search for pattern retrieval.
|
|
|
|
```rust
|
|
use ruvector_core::{HnswIndex, Distance, SearchParams};
|
|
|
|
/// Pattern index using HNSW for sub-millisecond retrieval
|
|
pub struct PatternIndex {
|
|
index: HnswIndex<f32>,
|
|
config: HnswConfig,
|
|
metrics: IndexMetrics,
|
|
}
|
|
|
|
impl PatternIndex {
|
|
pub fn new(dim: usize, max_patterns: usize) -> Self {
|
|
Self {
|
|
index: HnswIndex::new(HnswConfig {
|
|
m: 16, // Connections per node
|
|
ef_construction: 200, // Build quality
|
|
ef_search: 50, // Search quality
|
|
max_elements: max_patterns,
|
|
dimension: dim,
|
|
}),
|
|
config: HnswConfig::default(),
|
|
metrics: IndexMetrics::default(),
|
|
}
|
|
}
|
|
|
|
/// Add pattern embedding to index
|
|
pub fn add_pattern(&mut self, id: u64, embedding: &[f32]) -> Result<(), IndexError> {
|
|
self.index.insert(id, embedding)?;
|
|
self.metrics.total_patterns += 1;
|
|
Ok(())
|
|
}
|
|
|
|
/// Find k nearest patterns
|
|
pub fn find_similar(&self, query: &[f32], k: usize) -> Vec<(u64, f32)> {
|
|
self.index.search(query, k, SearchParams {
|
|
ef: self.config.ef_search,
|
|
})
|
|
}
|
|
|
|
/// Batch search for multiple queries
|
|
pub fn batch_search(&self, queries: &[Vec<f32>], k: usize) -> Vec<Vec<(u64, f32)>> {
|
|
queries.par_iter()
|
|
.map(|q| self.find_similar(q, k))
|
|
.collect()
|
|
}
|
|
}
|
|
|
|
#[derive(Default)]
|
|
pub struct IndexMetrics {
|
|
pub total_patterns: usize,
|
|
pub avg_search_time_us: f64,
|
|
pub cache_hit_rate: f32,
|
|
}
|
|
```
|
|
|
|
**Integration Points**:
|
|
- ReasoningBank pattern storage
|
|
- Dream memory retrieval
|
|
- Router context lookup
|
|
|
|
### 2. ruvector-attention (39 Mechanisms)
|
|
|
|
**Purpose**: Diverse attention mechanisms for different reasoning patterns.
|
|
|
|
```rust
|
|
use ruvector_attention::{
|
|
AttentionMechanism, MultiHeadAttention, LinearAttention,
|
|
SparseAttention, FlashAttention, KernelizedAttention
|
|
};
|
|
|
|
/// Adaptive attention selector based on query characteristics
|
|
pub struct AdaptiveAttention {
|
|
mechanisms: Vec<Box<dyn AttentionMechanism>>,
|
|
router: AttentionRouter,
|
|
performance_tracker: PerformanceTracker,
|
|
}
|
|
|
|
impl AdaptiveAttention {
|
|
pub fn new(hidden_dim: usize, num_heads: usize) -> Self {
|
|
Self {
|
|
mechanisms: vec![
|
|
Box::new(MultiHeadAttention::new(hidden_dim, num_heads)),
|
|
Box::new(LinearAttention::new(hidden_dim)),
|
|
Box::new(SparseAttention::new(hidden_dim, 0.1)), // 10% sparsity
|
|
Box::new(FlashAttention::new(hidden_dim, num_heads)),
|
|
Box::new(KernelizedAttention::new(hidden_dim, "elu")),
|
|
],
|
|
router: AttentionRouter::new(5),
|
|
performance_tracker: PerformanceTracker::new(),
|
|
}
|
|
}
|
|
|
|
/// Select optimal attention mechanism based on context
|
|
pub fn forward(&mut self, q: &Tensor, k: &Tensor, v: &Tensor) -> Tensor {
|
|
// Analyze query characteristics
|
|
let features = self.analyze_query(q);
|
|
|
|
// Route to best mechanism
|
|
let mechanism_idx = self.router.route(&features);
|
|
|
|
// Execute attention
|
|
let start = Instant::now();
|
|
let output = self.mechanisms[mechanism_idx].forward(q, k, v);
|
|
let elapsed = start.elapsed();
|
|
|
|
// Track performance
|
|
self.performance_tracker.record(mechanism_idx, elapsed);
|
|
|
|
output
|
|
}
|
|
|
|
fn analyze_query(&self, q: &Tensor) -> AttentionFeatures {
|
|
AttentionFeatures {
|
|
sequence_length: q.shape()[1],
|
|
sparsity: q.sparsity_ratio(),
|
|
entropy: q.attention_entropy(),
|
|
locality: q.attention_locality(),
|
|
}
|
|
}
|
|
}
|
|
|
|
/// Routes queries to optimal attention mechanism
|
|
pub struct AttentionRouter {
|
|
weights: Vec<f32>,
|
|
history: CircularBuffer<RoutingDecision>,
|
|
}
|
|
|
|
impl AttentionRouter {
|
|
pub fn route(&self, features: &AttentionFeatures) -> usize {
|
|
// Decision logic based on features
|
|
if features.sequence_length > 4096 {
|
|
2 // SparseAttention for long sequences
|
|
} else if features.sparsity > 0.5 {
|
|
2 // SparseAttention for sparse patterns
|
|
} else if features.locality > 0.8 {
|
|
3 // FlashAttention for local patterns
|
|
} else {
|
|
0 // Default MultiHeadAttention
|
|
}
|
|
}
|
|
|
|
pub fn update_from_feedback(&mut self, decision: usize, quality: f32) {
|
|
self.history.push(RoutingDecision { decision, quality });
|
|
// Online learning of routing weights
|
|
self.weights[decision] += 0.01 * (quality - self.weights[decision]);
|
|
}
|
|
}
|
|
```
|
|
|
|
**Integration Points**:
|
|
- Query processing pipeline
|
|
- Dream pattern recognition
|
|
- Cross-memory attention
|
|
|
|
### 3. ruvector-gnn (Graph Neural Networks)
|
|
|
|
**Purpose**: Graph-based reasoning over knowledge structures.
|
|
|
|
```rust
|
|
use ruvector_gnn::{GraphConv, GraphAttention, MessagePassing};
|
|
|
|
/// Knowledge graph reasoning with GNN
|
|
pub struct KnowledgeGraph {
|
|
nodes: HashMap<NodeId, NodeEmbedding>,
|
|
edges: Vec<Edge>,
|
|
gnn: GraphNeuralNetwork,
|
|
graph_index: GraphIndex,
|
|
}
|
|
|
|
#[derive(Clone)]
|
|
pub struct NodeEmbedding {
|
|
pub id: NodeId,
|
|
pub embedding: Vec<f32>,
|
|
pub node_type: NodeType,
|
|
pub importance: f32,
|
|
pub last_accessed: Instant,
|
|
}
|
|
|
|
#[derive(Clone, Copy)]
|
|
pub enum NodeType {
|
|
Concept,
|
|
Pattern,
|
|
Episode,
|
|
Procedure,
|
|
Dream,
|
|
}
|
|
|
|
impl KnowledgeGraph {
|
|
pub fn new(embedding_dim: usize, hidden_dim: usize) -> Self {
|
|
Self {
|
|
nodes: HashMap::new(),
|
|
edges: Vec::new(),
|
|
gnn: GraphNeuralNetwork::new(embedding_dim, hidden_dim, 3), // 3 layers
|
|
graph_index: GraphIndex::new(),
|
|
}
|
|
}
|
|
|
|
/// Add node to knowledge graph
|
|
pub fn add_node(&mut self, id: NodeId, embedding: Vec<f32>, node_type: NodeType) {
|
|
let node = NodeEmbedding {
|
|
id,
|
|
embedding: embedding.clone(),
|
|
node_type,
|
|
importance: 1.0,
|
|
last_accessed: Instant::now(),
|
|
};
|
|
self.nodes.insert(id, node);
|
|
self.graph_index.add(id, &embedding);
|
|
}
|
|
|
|
/// Create edge between nodes
|
|
pub fn add_edge(&mut self, from: NodeId, to: NodeId, edge_type: EdgeType, weight: f32) {
|
|
self.edges.push(Edge { from, to, edge_type, weight });
|
|
}
|
|
|
|
/// Propagate information through graph
|
|
pub fn propagate(&mut self, query: &[f32], hops: usize) -> Vec<(NodeId, f32)> {
|
|
// Find seed nodes
|
|
let seeds = self.graph_index.search(query, 10);
|
|
|
|
// Message passing through GNN layers
|
|
let mut activations = HashMap::new();
|
|
for (node_id, score) in seeds {
|
|
activations.insert(node_id, score);
|
|
}
|
|
|
|
for _hop in 0..hops {
|
|
let new_activations = self.gnn.propagate(&self.nodes, &self.edges, &activations);
|
|
activations = new_activations;
|
|
}
|
|
|
|
// Return top activated nodes
|
|
let mut results: Vec<_> = activations.into_iter().collect();
|
|
results.sort_by(|a, b| b.1.partial_cmp(&a.1).unwrap());
|
|
results.truncate(20);
|
|
results
|
|
}
|
|
|
|
/// Learn new edge from pattern
|
|
pub fn learn_edge(&mut self, pattern: &LearnedPattern) {
|
|
// Extract node relationships from pattern
|
|
let source_nodes = self.find_related_nodes(&pattern.centroid, pattern.cluster_size);
|
|
|
|
for i in 0..source_nodes.len() {
|
|
for j in i+1..source_nodes.len() {
|
|
let strength = cosine_similarity(
|
|
&self.nodes[&source_nodes[i]].embedding,
|
|
&self.nodes[&source_nodes[j]].embedding
|
|
);
|
|
|
|
if strength > 0.7 {
|
|
self.add_edge(
|
|
source_nodes[i],
|
|
source_nodes[j],
|
|
EdgeType::Pattern,
|
|
strength
|
|
);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
/// Graph neural network with multiple layer types
|
|
pub struct GraphNeuralNetwork {
|
|
layers: Vec<GnnLayer>,
|
|
aggregator: Aggregator,
|
|
}
|
|
|
|
enum GnnLayer {
|
|
GraphConv(GraphConv),
|
|
GraphAttention(GraphAttention),
|
|
MessagePassing(MessagePassing),
|
|
}
|
|
```
|
|
|
|
**Integration Points**:
|
|
- Knowledge representation
|
|
- Dream creative connections
|
|
- Pattern relationship discovery
|
|
|
|
### 4. ruvector-postgres (Persistence)
|
|
|
|
**Purpose**: Durable storage for learned knowledge.
|
|
|
|
```rust
|
|
use ruvector_postgres::{PgVector, PgStore, VectorIndex};
|
|
|
|
/// Persistent pattern storage with PostgreSQL
|
|
pub struct PatternStore {
|
|
pool: PgPool,
|
|
vector_index: VectorIndex,
|
|
cache: LruCache<PatternId, LearnedPattern>,
|
|
}
|
|
|
|
impl PatternStore {
|
|
pub async fn new(database_url: &str) -> Result<Self, PgError> {
|
|
let pool = PgPool::connect(database_url).await?;
|
|
|
|
// Initialize schema
|
|
sqlx::query(r#"
|
|
CREATE TABLE IF NOT EXISTS patterns (
|
|
id BIGSERIAL PRIMARY KEY,
|
|
embedding vector(256),
|
|
centroid vector(256),
|
|
cluster_size INTEGER,
|
|
total_weight FLOAT,
|
|
avg_quality FLOAT,
|
|
created_at TIMESTAMP DEFAULT NOW(),
|
|
last_accessed TIMESTAMP DEFAULT NOW(),
|
|
access_count INTEGER DEFAULT 0,
|
|
pattern_type VARCHAR(50),
|
|
metadata JSONB
|
|
);
|
|
|
|
CREATE INDEX IF NOT EXISTS patterns_embedding_idx
|
|
ON patterns USING ivfflat (embedding vector_cosine_ops);
|
|
|
|
CREATE INDEX IF NOT EXISTS patterns_centroid_idx
|
|
ON patterns USING hnsw (centroid vector_cosine_ops);
|
|
"#).execute(&pool).await?;
|
|
|
|
Ok(Self {
|
|
pool,
|
|
vector_index: VectorIndex::new(256),
|
|
cache: LruCache::new(NonZeroUsize::new(10000).unwrap()),
|
|
})
|
|
}
|
|
|
|
/// Store pattern with vector embedding
|
|
pub async fn store_pattern(&mut self, pattern: &LearnedPattern) -> Result<i64, PgError> {
|
|
let embedding_vec: Vec<f32> = pattern.centroid.clone();
|
|
|
|
let row = sqlx::query_scalar::<_, i64>(r#"
|
|
INSERT INTO patterns (embedding, centroid, cluster_size, total_weight, avg_quality, pattern_type, metadata)
|
|
VALUES ($1, $2, $3, $4, $5, $6, $7)
|
|
RETURNING id
|
|
"#)
|
|
.bind(&embedding_vec)
|
|
.bind(&embedding_vec)
|
|
.bind(pattern.cluster_size as i32)
|
|
.bind(pattern.total_weight)
|
|
.bind(pattern.avg_quality)
|
|
.bind("learned")
|
|
.bind(serde_json::to_value(&pattern.metadata).unwrap())
|
|
.fetch_one(&self.pool)
|
|
.await?;
|
|
|
|
// Update cache
|
|
self.cache.put(row, pattern.clone());
|
|
|
|
Ok(row)
|
|
}
|
|
|
|
/// Find similar patterns using vector similarity
|
|
pub async fn find_similar(&self, embedding: &[f32], k: usize) -> Result<Vec<LearnedPattern>, PgError> {
|
|
let rows = sqlx::query_as::<_, PatternRow>(r#"
|
|
SELECT id, embedding, centroid, cluster_size, total_weight, avg_quality, metadata
|
|
FROM patterns
|
|
ORDER BY embedding <=> $1
|
|
LIMIT $2
|
|
"#)
|
|
.bind(embedding)
|
|
.bind(k as i64)
|
|
.fetch_all(&self.pool)
|
|
.await?;
|
|
|
|
Ok(rows.into_iter().map(|r| r.into()).collect())
|
|
}
|
|
|
|
/// Consolidate patterns (merge similar, prune weak)
|
|
pub async fn consolidate(&mut self) -> Result<ConsolidationResult, PgError> {
|
|
// Find patterns to merge (similarity > 0.95)
|
|
let merge_candidates = sqlx::query(r#"
|
|
SELECT p1.id as id1, p2.id as id2,
|
|
1 - (p1.centroid <=> p2.centroid) as similarity
|
|
FROM patterns p1
|
|
JOIN patterns p2 ON p1.id < p2.id
|
|
WHERE 1 - (p1.centroid <=> p2.centroid) > 0.95
|
|
LIMIT 100
|
|
"#).fetch_all(&self.pool).await?;
|
|
|
|
// Prune weak patterns (low quality, low access)
|
|
let pruned = sqlx::query(r#"
|
|
DELETE FROM patterns
|
|
WHERE avg_quality < 0.3
|
|
AND access_count < 5
|
|
AND created_at < NOW() - INTERVAL '7 days'
|
|
RETURNING id
|
|
"#).fetch_all(&self.pool).await?;
|
|
|
|
Ok(ConsolidationResult {
|
|
merged: merge_candidates.len(),
|
|
pruned: pruned.len(),
|
|
})
|
|
}
|
|
}
|
|
```
|
|
|
|
**Integration Points**:
|
|
- Long-term pattern persistence
|
|
- Dream memory storage
|
|
- Knowledge graph persistence
|
|
|
|
### 5. ruvector-sparse (Sparse Vectors)
|
|
|
|
**Purpose**: Efficient sparse vector operations for pattern matching.
|
|
|
|
```rust
|
|
use ruvector_sparse::{SparseVector, SparseDot, SparseIndex};
|
|
|
|
/// Sparse pattern representation for efficient storage
|
|
pub struct SparsePatternStore {
|
|
index: SparseIndex,
|
|
patterns: Vec<SparsePattern>,
|
|
}
|
|
|
|
#[derive(Clone)]
|
|
pub struct SparsePattern {
|
|
pub id: u64,
|
|
pub indices: Vec<u32>,
|
|
pub values: Vec<f32>,
|
|
pub nnz: usize, // Non-zero count
|
|
pub metadata: PatternMetadata,
|
|
}
|
|
|
|
impl SparsePatternStore {
|
|
pub fn new(dim: usize) -> Self {
|
|
Self {
|
|
index: SparseIndex::new(dim),
|
|
patterns: Vec::new(),
|
|
}
|
|
}
|
|
|
|
/// Convert dense pattern to sparse representation
|
|
pub fn add_pattern(&mut self, dense: &[f32], threshold: f32) -> u64 {
|
|
let (indices, values): (Vec<u32>, Vec<f32>) = dense.iter()
|
|
.enumerate()
|
|
.filter(|(_, &v)| v.abs() > threshold)
|
|
.map(|(i, &v)| (i as u32, v))
|
|
.unzip();
|
|
|
|
let id = self.patterns.len() as u64;
|
|
let pattern = SparsePattern {
|
|
id,
|
|
nnz: indices.len(),
|
|
indices,
|
|
values,
|
|
metadata: PatternMetadata::default(),
|
|
};
|
|
|
|
self.index.insert(id, &pattern.indices, &pattern.values);
|
|
self.patterns.push(pattern);
|
|
|
|
id
|
|
}
|
|
|
|
/// Fast sparse dot product search
|
|
pub fn search(&self, query_indices: &[u32], query_values: &[f32], k: usize) -> Vec<(u64, f32)> {
|
|
self.index.search_sparse(query_indices, query_values, k)
|
|
}
|
|
|
|
/// Batch sparse search with SIMD acceleration
|
|
#[cfg(target_arch = "x86_64")]
|
|
pub fn batch_search_simd(&self, queries: &[SparseVector], k: usize) -> Vec<Vec<(u64, f32)>> {
|
|
use std::arch::x86_64::*;
|
|
|
|
queries.par_iter()
|
|
.map(|q| {
|
|
// SIMD-accelerated sparse dot products
|
|
let mut scores = Vec::with_capacity(self.patterns.len());
|
|
|
|
for pattern in &self.patterns {
|
|
let score = unsafe {
|
|
sparse_dot_simd(&q.indices, &q.values, &pattern.indices, &pattern.values)
|
|
};
|
|
scores.push((pattern.id, score));
|
|
}
|
|
|
|
// Top-k selection
|
|
scores.sort_by(|a, b| b.1.partial_cmp(&a.1).unwrap());
|
|
scores.truncate(k);
|
|
scores
|
|
})
|
|
.collect()
|
|
}
|
|
}
|
|
|
|
/// SIMD-accelerated sparse dot product
|
|
#[cfg(target_arch = "x86_64")]
|
|
#[target_feature(enable = "avx2")]
|
|
unsafe fn sparse_dot_simd(
|
|
idx1: &[u32], val1: &[f32],
|
|
idx2: &[u32], val2: &[f32]
|
|
) -> f32 {
|
|
let mut i = 0;
|
|
let mut j = 0;
|
|
let mut sum = _mm256_setzero_ps();
|
|
|
|
// Merge-join with SIMD accumulation
|
|
while i + 8 <= idx1.len() && j + 8 <= idx2.len() {
|
|
let idx1_vec = _mm256_loadu_si256(idx1[i..].as_ptr() as *const __m256i);
|
|
let idx2_vec = _mm256_loadu_si256(idx2[j..].as_ptr() as *const __m256i);
|
|
|
|
// Compare and accumulate matching indices
|
|
// ... SIMD comparison logic ...
|
|
|
|
i += 8;
|
|
j += 8;
|
|
}
|
|
|
|
// Reduce SIMD accumulator
|
|
let mut result = [0.0f32; 8];
|
|
_mm256_storeu_ps(result.as_mut_ptr(), sum);
|
|
result.iter().sum()
|
|
}
|
|
```
|
|
|
|
**Integration Points**:
|
|
- Pattern compression
|
|
- Fast similarity search
|
|
- Memory-efficient storage
|
|
|
|
---
|
|
|
|
## exo-ai Crate Integration
|
|
|
|
### 1. exo-core (IIT/Φ Measurement)
|
|
|
|
**Purpose**: Integrated Information Theory for consciousness metrics.
|
|
|
|
```rust
|
|
use exo_core::{PhiComputer, IntegratedInformation, Constellation};
|
|
|
|
/// Φ-based quality measurement for reasoning traces
|
|
pub struct PhiEvaluator {
|
|
phi_computer: PhiComputer,
|
|
history: Vec<PhiMeasurement>,
|
|
threshold: f64,
|
|
}
|
|
|
|
#[derive(Clone)]
|
|
pub struct PhiMeasurement {
|
|
pub phi_value: f64,
|
|
pub main_complex: Constellation,
|
|
pub timestamp: Instant,
|
|
pub context: String,
|
|
}
|
|
|
|
impl PhiEvaluator {
|
|
pub fn new(threshold: f64) -> Self {
|
|
Self {
|
|
phi_computer: PhiComputer::new(),
|
|
history: Vec::new(),
|
|
threshold,
|
|
}
|
|
}
|
|
|
|
/// Measure integrated information of reasoning trace
|
|
pub fn measure_phi(&mut self, trace: &ReasoningTrace) -> PhiMeasurement {
|
|
// Build state transition matrix from trace
|
|
let tpm = self.build_tpm(trace);
|
|
|
|
// Compute Φ using IIT 3.0
|
|
let result = self.phi_computer.compute_phi(&tpm);
|
|
|
|
let measurement = PhiMeasurement {
|
|
phi_value: result.phi,
|
|
main_complex: result.main_complex,
|
|
timestamp: Instant::now(),
|
|
context: trace.query.clone(),
|
|
};
|
|
|
|
self.history.push(measurement.clone());
|
|
measurement
|
|
}
|
|
|
|
/// Check if reasoning meets integration threshold
|
|
pub fn is_integrated(&self, measurement: &PhiMeasurement) -> bool {
|
|
measurement.phi_value >= self.threshold
|
|
}
|
|
|
|
fn build_tpm(&self, trace: &ReasoningTrace) -> TransitionMatrix {
|
|
let n = trace.steps.len();
|
|
let mut tpm = TransitionMatrix::zeros(n, n);
|
|
|
|
for i in 0..n-1 {
|
|
let from_state = &trace.steps[i];
|
|
let to_state = &trace.steps[i+1];
|
|
|
|
// Compute transition probability based on embedding similarity
|
|
let similarity = cosine_similarity(&from_state.embedding, &to_state.embedding);
|
|
tpm[(i, i+1)] = similarity;
|
|
}
|
|
|
|
tpm
|
|
}
|
|
|
|
/// Evaluate dream quality using Φ
|
|
pub fn evaluate_dream(&mut self, dream: &Dream) -> f64 {
|
|
let trace = ReasoningTrace {
|
|
query: "dream".to_string(),
|
|
steps: dream.path.iter()
|
|
.map(|node| ReasoningStep {
|
|
embedding: node.embedding.clone(),
|
|
..Default::default()
|
|
})
|
|
.collect(),
|
|
..Default::default()
|
|
};
|
|
|
|
self.measure_phi(&trace).phi_value
|
|
}
|
|
}
|
|
|
|
/// Reasoning trace for Φ analysis
|
|
pub struct ReasoningTrace {
|
|
pub query: String,
|
|
pub steps: Vec<ReasoningStep>,
|
|
pub final_answer: Option<String>,
|
|
pub quality_score: f32,
|
|
}
|
|
|
|
pub struct ReasoningStep {
|
|
pub embedding: Vec<f32>,
|
|
pub attention_pattern: Vec<f32>,
|
|
pub activated_nodes: Vec<NodeId>,
|
|
}
|
|
```
|
|
|
|
**Integration Points**:
|
|
- Dream quality evaluation
|
|
- Reasoning coherence measurement
|
|
- Learning signal generation
|
|
|
|
### 2. exo-temporal (Temporal Cycles)
|
|
|
|
**Purpose**: Temporal pattern recognition and prediction.
|
|
|
|
```rust
|
|
use exo_temporal::{TemporalEncoder, CycleDetector, Predictor};
|
|
|
|
/// Temporal pattern learning for usage prediction
|
|
pub struct TemporalLearner {
|
|
encoder: TemporalEncoder,
|
|
cycle_detector: CycleDetector,
|
|
predictor: Predictor,
|
|
patterns: Vec<TemporalPattern>,
|
|
}
|
|
|
|
#[derive(Clone)]
|
|
pub struct TemporalPattern {
|
|
pub id: u64,
|
|
pub period: Duration,
|
|
pub phase: f32,
|
|
pub amplitude: f32,
|
|
pub pattern_type: TemporalPatternType,
|
|
}
|
|
|
|
#[derive(Clone, Copy)]
|
|
pub enum TemporalPatternType {
|
|
Daily,
|
|
Weekly,
|
|
Bursty,
|
|
Seasonal,
|
|
Custom,
|
|
}
|
|
|
|
impl TemporalLearner {
|
|
pub fn new(encoding_dim: usize) -> Self {
|
|
Self {
|
|
encoder: TemporalEncoder::new(encoding_dim),
|
|
cycle_detector: CycleDetector::new(),
|
|
predictor: Predictor::new(encoding_dim, 64),
|
|
patterns: Vec::new(),
|
|
}
|
|
}
|
|
|
|
/// Record event with timestamp
|
|
pub fn record_event(&mut self, event: &Event, timestamp: Instant) {
|
|
let encoding = self.encoder.encode(timestamp, event);
|
|
self.cycle_detector.add_observation(encoding, timestamp);
|
|
}
|
|
|
|
/// Detect temporal patterns
|
|
pub fn detect_patterns(&mut self) -> Vec<TemporalPattern> {
|
|
let cycles = self.cycle_detector.find_cycles();
|
|
|
|
self.patterns = cycles.into_iter()
|
|
.enumerate()
|
|
.map(|(i, cycle)| TemporalPattern {
|
|
id: i as u64,
|
|
period: cycle.period,
|
|
phase: cycle.phase,
|
|
amplitude: cycle.amplitude,
|
|
pattern_type: self.classify_cycle(&cycle),
|
|
})
|
|
.collect();
|
|
|
|
self.patterns.clone()
|
|
}
|
|
|
|
fn classify_cycle(&self, cycle: &Cycle) -> TemporalPatternType {
|
|
let hours = cycle.period.as_secs_f64() / 3600.0;
|
|
|
|
if (23.0..25.0).contains(&hours) {
|
|
TemporalPatternType::Daily
|
|
} else if (166.0..170.0).contains(&hours) {
|
|
TemporalPatternType::Weekly
|
|
} else if hours < 1.0 {
|
|
TemporalPatternType::Bursty
|
|
} else {
|
|
TemporalPatternType::Custom
|
|
}
|
|
}
|
|
|
|
/// Predict optimal times for background learning
|
|
pub fn predict_learning_windows(&self) -> Vec<TimeWindow> {
|
|
let mut windows = Vec::new();
|
|
|
|
for pattern in &self.patterns {
|
|
if matches!(pattern.pattern_type, TemporalPatternType::Daily | TemporalPatternType::Weekly) {
|
|
// Find low-activity periods
|
|
let low_activity_phase = pattern.phase + std::f32::consts::PI; // Opposite phase
|
|
windows.push(TimeWindow {
|
|
start: self.phase_to_time(low_activity_phase, pattern.period),
|
|
duration: Duration::from_secs(3600), // 1 hour window
|
|
priority: pattern.amplitude,
|
|
});
|
|
}
|
|
}
|
|
|
|
windows.sort_by(|a, b| b.priority.partial_cmp(&a.priority).unwrap());
|
|
windows
|
|
}
|
|
|
|
fn phase_to_time(&self, phase: f32, period: Duration) -> Instant {
|
|
let period_secs = period.as_secs_f32();
|
|
let offset = (phase / (2.0 * std::f32::consts::PI)) * period_secs;
|
|
Instant::now() + Duration::from_secs_f32(offset)
|
|
}
|
|
}
|
|
|
|
pub struct TimeWindow {
|
|
pub start: Instant,
|
|
pub duration: Duration,
|
|
pub priority: f32,
|
|
}
|
|
```
|
|
|
|
**Integration Points**:
|
|
- Learning schedule optimization
|
|
- Usage pattern prediction
|
|
- Adaptive resource allocation
|
|
|
|
### 3. exo-exotic (Quantum-Inspired)
|
|
|
|
**Purpose**: Quantum-inspired optimization for creative exploration.
|
|
|
|
```rust
|
|
use exo_exotic::{QuantumState, SuperpositionSampler, EntanglementGraph};
|
|
|
|
/// Quantum-inspired creative exploration
|
|
pub struct QuantumExplorer {
|
|
state: QuantumState,
|
|
sampler: SuperpositionSampler,
|
|
entanglement: EntanglementGraph,
|
|
}
|
|
|
|
impl QuantumExplorer {
|
|
pub fn new(dim: usize) -> Self {
|
|
Self {
|
|
state: QuantumState::new(dim),
|
|
sampler: SuperpositionSampler::new(),
|
|
entanglement: EntanglementGraph::new(),
|
|
}
|
|
}
|
|
|
|
/// Create superposition of pattern states
|
|
pub fn create_superposition(&mut self, patterns: &[LearnedPattern]) -> Superposition {
|
|
let amplitudes: Vec<Complex64> = patterns.iter()
|
|
.map(|p| {
|
|
let magnitude = (p.avg_quality as f64).sqrt();
|
|
let phase = p.total_weight as f64 * 0.1;
|
|
Complex64::from_polar(magnitude, phase)
|
|
})
|
|
.collect();
|
|
|
|
self.state.set_amplitudes(&litudes);
|
|
|
|
Superposition {
|
|
patterns: patterns.to_vec(),
|
|
amplitudes: amplitudes.clone(),
|
|
entanglement_strength: self.measure_entanglement(&litudes),
|
|
}
|
|
}
|
|
|
|
/// Sample from superposition for creative exploration
|
|
pub fn sample_creative(&self, superposition: &Superposition, n_samples: usize) -> Vec<CreativeSample> {
|
|
self.sampler.sample(&superposition.amplitudes, n_samples)
|
|
.into_iter()
|
|
.enumerate()
|
|
.map(|(i, prob)| {
|
|
let pattern_idx = self.probability_to_index(prob, superposition.patterns.len());
|
|
CreativeSample {
|
|
base_pattern: superposition.patterns[pattern_idx].clone(),
|
|
perturbation: self.quantum_perturbation(prob),
|
|
novelty_score: 1.0 - prob, // Lower probability = more novel
|
|
}
|
|
})
|
|
.collect()
|
|
}
|
|
|
|
fn measure_entanglement(&self, amplitudes: &[Complex64]) -> f64 {
|
|
// Compute von Neumann entropy as entanglement measure
|
|
let probs: Vec<f64> = amplitudes.iter()
|
|
.map(|a| a.norm_sqr())
|
|
.collect();
|
|
|
|
-probs.iter()
|
|
.filter(|&&p| p > 1e-10)
|
|
.map(|&p| p * p.ln())
|
|
.sum::<f64>()
|
|
}
|
|
|
|
fn quantum_perturbation(&self, prob: f64) -> Vec<f32> {
|
|
// Generate quantum-inspired perturbation
|
|
let dim = self.state.dimension();
|
|
let mut rng = rand::thread_rng();
|
|
|
|
(0..dim)
|
|
.map(|_| {
|
|
let phase = rng.gen::<f64>() * 2.0 * std::f64::consts::PI;
|
|
let amplitude = (1.0 - prob).sqrt();
|
|
(amplitude * phase.cos()) as f32 * 0.1
|
|
})
|
|
.collect()
|
|
}
|
|
|
|
fn probability_to_index(&self, prob: f64, n: usize) -> usize {
|
|
((prob * n as f64) as usize).min(n - 1)
|
|
}
|
|
}
|
|
|
|
pub struct Superposition {
|
|
pub patterns: Vec<LearnedPattern>,
|
|
pub amplitudes: Vec<Complex64>,
|
|
pub entanglement_strength: f64,
|
|
}
|
|
|
|
pub struct CreativeSample {
|
|
pub base_pattern: LearnedPattern,
|
|
pub perturbation: Vec<f32>,
|
|
pub novelty_score: f64,
|
|
}
|
|
```
|
|
|
|
**Integration Points**:
|
|
- Dream creative jumps
|
|
- Novel pattern generation
|
|
- Exploration-exploitation balance
|
|
|
|
---
|
|
|
|
## Unified Integration Layer
|
|
|
|
### SONA Integration Manager
|
|
|
|
```rust
|
|
/// Central integration manager for all SONA components
|
|
pub struct SonaIntegration {
|
|
// ruvector components
|
|
pub pattern_index: PatternIndex,
|
|
pub attention: AdaptiveAttention,
|
|
pub knowledge_graph: KnowledgeGraph,
|
|
pub pattern_store: PatternStore,
|
|
pub sparse_store: SparsePatternStore,
|
|
|
|
// exo-ai components
|
|
pub phi_evaluator: PhiEvaluator,
|
|
pub temporal_learner: TemporalLearner,
|
|
pub quantum_explorer: QuantumExplorer,
|
|
|
|
// Core SONA components
|
|
pub lora_engine: LoraEngine,
|
|
pub reasoning_bank: ReasoningBank,
|
|
pub dream_engine: DreamEngine,
|
|
pub ewc: EwcPlusPlus,
|
|
|
|
// Coordination
|
|
pub loop_coordinator: LoopCoordinator,
|
|
pub metrics: IntegrationMetrics,
|
|
}
|
|
|
|
impl SonaIntegration {
|
|
pub async fn new(config: SonaConfig) -> Result<Self, SonaError> {
|
|
Ok(Self {
|
|
pattern_index: PatternIndex::new(config.embedding_dim, config.max_patterns),
|
|
attention: AdaptiveAttention::new(config.hidden_dim, config.num_heads),
|
|
knowledge_graph: KnowledgeGraph::new(config.embedding_dim, config.hidden_dim),
|
|
pattern_store: PatternStore::new(&config.database_url).await?,
|
|
sparse_store: SparsePatternStore::new(config.embedding_dim),
|
|
phi_evaluator: PhiEvaluator::new(config.phi_threshold),
|
|
temporal_learner: TemporalLearner::new(config.temporal_dim),
|
|
quantum_explorer: QuantumExplorer::new(config.embedding_dim),
|
|
lora_engine: LoraEngine::new(config.lora_config),
|
|
reasoning_bank: ReasoningBank::new(config.pattern_config),
|
|
dream_engine: DreamEngine::new(config.dream_config),
|
|
ewc: EwcPlusPlus::new(config.ewc_config),
|
|
loop_coordinator: LoopCoordinator::new(),
|
|
metrics: IntegrationMetrics::default(),
|
|
})
|
|
}
|
|
|
|
/// Process query through unified pipeline
|
|
pub async fn process(&mut self, query: &str, context: &Context) -> Result<Response, SonaError> {
|
|
let start = Instant::now();
|
|
|
|
// 1. Record temporal event
|
|
self.temporal_learner.record_event(&Event::Query(query.to_string()), Instant::now());
|
|
|
|
// 2. Embed query
|
|
let query_embedding = self.embed_query(query);
|
|
|
|
// 3. Find similar patterns (parallel)
|
|
let (similar_patterns, graph_context, sparse_matches) = tokio::join!(
|
|
self.pattern_index.find_similar(&query_embedding, 10),
|
|
self.knowledge_graph.propagate(&query_embedding, 3),
|
|
async { self.sparse_store.search(&[], &[], 5) } // Sparse backup
|
|
);
|
|
|
|
// 4. Apply adaptive attention
|
|
let attended = self.attention.forward(&query_embedding, &context, &similar_patterns);
|
|
|
|
// 5. Generate response with LoRA
|
|
let response = self.lora_engine.forward(&attended);
|
|
|
|
// 6. Record trajectory
|
|
let trajectory = QueryTrajectory {
|
|
query: query.to_string(),
|
|
steps: vec![/* reasoning steps */],
|
|
response: response.clone(),
|
|
quality: self.evaluate_quality(&response),
|
|
};
|
|
|
|
// 7. Signal learning (async)
|
|
let signal = LearningSignal::from_trajectory(&trajectory);
|
|
self.loop_coordinator.signal_learning(signal);
|
|
|
|
self.metrics.queries_processed += 1;
|
|
self.metrics.avg_latency_ms =
|
|
(self.metrics.avg_latency_ms * 0.99) + (start.elapsed().as_millis() as f64 * 0.01);
|
|
|
|
Ok(Response {
|
|
text: response.text,
|
|
confidence: response.confidence,
|
|
patterns_used: similar_patterns.len(),
|
|
})
|
|
}
|
|
|
|
/// Run background learning cycle
|
|
pub async fn background_learn(&mut self) -> Result<LearningResult, SonaError> {
|
|
// Check if good time for learning
|
|
let windows = self.temporal_learner.predict_learning_windows();
|
|
|
|
// Extract patterns from reasoning bank
|
|
let patterns = self.reasoning_bank.extract_patterns();
|
|
|
|
// Evaluate patterns with Φ
|
|
for pattern in &patterns {
|
|
let trace = pattern.to_reasoning_trace();
|
|
let phi = self.phi_evaluator.measure_phi(&trace);
|
|
|
|
if self.phi_evaluator.is_integrated(&phi) {
|
|
// High-quality pattern - persist
|
|
self.pattern_store.store_pattern(pattern).await?;
|
|
self.knowledge_graph.learn_edge(pattern);
|
|
}
|
|
}
|
|
|
|
// Update LoRA with EWC++
|
|
let gradients = self.lora_engine.compute_gradients(&patterns);
|
|
let safe_gradients = self.ewc.apply_constraints(&gradients);
|
|
self.lora_engine.apply_update(&safe_gradients);
|
|
|
|
// Consolidate storage
|
|
self.pattern_store.consolidate().await?;
|
|
|
|
Ok(LearningResult {
|
|
patterns_learned: patterns.len(),
|
|
patterns_persisted: patterns.iter().filter(|p| p.avg_quality > 0.7).count(),
|
|
})
|
|
}
|
|
|
|
/// Run deep learning cycle (weekly)
|
|
pub async fn deep_learn(&mut self) -> Result<DeepLearningResult, SonaError> {
|
|
// Generate dreams
|
|
let dreams = self.dream_engine.generate_dreams(50);
|
|
|
|
// Evaluate with quantum exploration
|
|
let quantum_samples: Vec<_> = dreams.iter()
|
|
.filter_map(|dream| {
|
|
let patterns = dream.to_patterns();
|
|
if patterns.len() >= 2 {
|
|
let superposition = self.quantum_explorer.create_superposition(&patterns);
|
|
Some(self.quantum_explorer.sample_creative(&superposition, 3))
|
|
} else {
|
|
None
|
|
}
|
|
})
|
|
.flatten()
|
|
.collect();
|
|
|
|
// Evaluate dreams with Φ
|
|
let mut integrated_dreams = Vec::new();
|
|
for dream in &dreams {
|
|
let phi = self.phi_evaluator.evaluate_dream(dream);
|
|
if phi > self.phi_evaluator.threshold {
|
|
integrated_dreams.push((dream.clone(), phi));
|
|
}
|
|
}
|
|
|
|
// Integrate high-quality dreams
|
|
for (dream, _phi) in &integrated_dreams {
|
|
self.dream_engine.integrate_dream(dream);
|
|
}
|
|
|
|
// Update temporal patterns
|
|
self.temporal_learner.detect_patterns();
|
|
|
|
// Full EWC++ consolidation
|
|
self.ewc.consolidate_all_tasks();
|
|
|
|
Ok(DeepLearningResult {
|
|
dreams_generated: dreams.len(),
|
|
dreams_integrated: integrated_dreams.len(),
|
|
quantum_samples: quantum_samples.len(),
|
|
})
|
|
}
|
|
|
|
fn embed_query(&self, query: &str) -> Vec<f32> {
|
|
// Query embedding implementation
|
|
vec![0.0; 256] // Placeholder
|
|
}
|
|
|
|
fn evaluate_quality(&self, response: &ResponseData) -> f32 {
|
|
response.confidence
|
|
}
|
|
}
|
|
|
|
#[derive(Default)]
|
|
pub struct IntegrationMetrics {
|
|
pub queries_processed: u64,
|
|
pub patterns_learned: u64,
|
|
pub dreams_integrated: u64,
|
|
pub avg_latency_ms: f64,
|
|
pub avg_phi: f64,
|
|
}
|
|
```
|
|
|
|
---
|
|
|
|
## Component Communication Protocol
|
|
|
|
```rust
|
|
/// Inter-component message types
|
|
pub enum SonaMessage {
|
|
// Learning signals
|
|
LearningSignal(LearningSignal),
|
|
PatternDiscovered(LearnedPattern),
|
|
DreamGenerated(Dream),
|
|
|
|
// Coordination
|
|
StartBackgroundLearning,
|
|
StartDeepLearning,
|
|
ConsolidateMemory,
|
|
|
|
// Queries
|
|
QueryPattern(Vec<f32>),
|
|
QueryGraph(NodeId, usize),
|
|
|
|
// Results
|
|
PatternResult(Vec<LearnedPattern>),
|
|
GraphResult(Vec<(NodeId, f32)>),
|
|
}
|
|
|
|
/// Message bus for component communication
|
|
pub struct SonaMessageBus {
|
|
sender: broadcast::Sender<SonaMessage>,
|
|
subscribers: HashMap<ComponentId, broadcast::Receiver<SonaMessage>>,
|
|
}
|
|
|
|
impl SonaMessageBus {
|
|
pub fn subscribe(&mut self, component_id: ComponentId) -> broadcast::Receiver<SonaMessage> {
|
|
self.sender.subscribe()
|
|
}
|
|
|
|
pub fn publish(&self, message: SonaMessage) {
|
|
let _ = self.sender.send(message);
|
|
}
|
|
}
|
|
```
|
|
|
|
---
|
|
|
|
## Next Steps
|
|
|
|
1. **06-COMPONENTS.md** - This document (Complete)
|
|
2. **07-IMPLEMENTATION.md** - Implementation roadmap
|
|
3. **08-BENCHMARKS.md** - Performance targets
|
|
4. **09-API-REFERENCE.md** - Complete API documentation
|