Merge commit 'd803bfe2b1fe7f5e219e50ac20d6801a0a58ac75' as 'vendor/ruvector'

This commit is contained in:
ruv
2026-02-28 14:39:40 -05:00
7854 changed files with 3522914 additions and 0 deletions

View File

@@ -0,0 +1,70 @@
/**
* Basic SONA Usage Example
* Demonstrates core functionality of the SONA engine
*/
const { SonaEngine } = require('../index.js');
function main() {
console.log('🧠 SONA - Self-Optimizing Neural Architecture\n');
// Create engine with hidden dimension
console.log('Creating SONA engine with hidden_dim=256...');
const engine = new SonaEngine(256);
console.log('✓ Engine created\n');
// Simulate some inference trajectories
console.log('Recording inference trajectories...');
for (let i = 0; i < 10; i++) {
// Create query embedding
const queryEmbedding = Array(256).fill(0).map(() => Math.random());
// Start trajectory
const builder = engine.beginTrajectory(queryEmbedding);
// Simulate inference steps
for (let step = 0; step < 3; step++) {
const activations = Array(256).fill(0).map(() => Math.random());
const attentionWeights = Array(64).fill(0).map(() => Math.random());
const reward = 0.7 + Math.random() * 0.3; // Random reward between 0.7-1.0
builder.addStep(activations, attentionWeights, reward);
}
// Set route and context
builder.setRoute(`model_${i % 3}`);
builder.addContext(`context_${i}`);
// Complete trajectory
const quality = 0.75 + Math.random() * 0.25; // Quality between 0.75-1.0
engine.endTrajectory(builder, quality);
}
console.log('✓ Recorded 10 trajectories\n');
// Apply micro-LoRA transformation
console.log('Applying micro-LoRA transformation...');
const input = Array(256).fill(1.0);
const output = engine.applyMicroLora(input);
console.log(`✓ Transformed ${input.length} -> ${output.length} dimensions\n`);
// Find similar patterns
console.log('Finding similar patterns...');
const queryEmbedding = Array(256).fill(0).map(() => Math.random());
const patterns = engine.findPatterns(queryEmbedding, 5);
console.log(`✓ Found ${patterns.length} patterns\n`);
// Get statistics
console.log('Engine statistics:');
const stats = engine.getStats();
console.log(stats);
console.log();
// Force learning cycle
console.log('Running background learning cycle...');
const result = engine.forceLearn();
console.log(`${result}\n`);
console.log('✓ Example completed successfully!');
}
main();

View File

@@ -0,0 +1,87 @@
/**
* Custom Configuration Example
* Demonstrates advanced configuration options
*/
const { SonaEngine } = require('../index.js');
function main() {
console.log('🔧 SONA - Custom Configuration Example\n');
// Create engine with custom configuration
const config = {
hiddenDim: 512,
embeddingDim: 512,
microLoraRank: 2,
baseLoraRank: 16,
microLoraLr: 0.002,
baseLoraLr: 0.0002,
ewcLambda: 500.0,
patternClusters: 100,
trajectoryCapacity: 5000,
backgroundIntervalMs: 1800000, // 30 minutes
qualityThreshold: 0.7,
enableSimd: true,
};
console.log('Configuration:', JSON.stringify(config, null, 2));
const engine = SonaEngine.withConfig(config);
console.log('✓ Engine created with custom config\n');
// Record high-quality trajectories
console.log('Recording high-quality trajectories...');
for (let i = 0; i < 20; i++) {
const queryEmbedding = Array(512).fill(0).map(() => Math.random());
const builder = engine.beginTrajectory(queryEmbedding);
// Multiple inference steps
for (let step = 0; step < 5; step++) {
const activations = Array(512).fill(0).map(() => Math.random());
const attentionWeights = Array(128).fill(0).map(() => Math.random());
const reward = 0.8 + Math.random() * 0.2;
builder.addStep(activations, attentionWeights, reward);
}
builder.setRoute(`high_quality_model_${i % 4}`);
const quality = 0.85 + Math.random() * 0.15;
engine.endTrajectory(builder, quality);
}
console.log('✓ Recorded 20 high-quality trajectories\n');
// Apply both micro and base LoRA
console.log('Applying LoRA transformations...');
const input = Array(512).fill(1.0);
const microOutput = engine.applyMicroLora(input);
console.log(`✓ Micro-LoRA: ${input.length} -> ${microOutput.length}`);
const baseOutput = engine.applyBaseLora(0, input);
console.log(`✓ Base-LoRA (layer 0): ${input.length} -> ${baseOutput.length}\n`);
// Pattern analysis
console.log('Pattern analysis...');
const testQuery = Array(512).fill(0).map(() => Math.random());
const topPatterns = engine.findPatterns(testQuery, 10);
console.log(`Found ${topPatterns.length} patterns:`);
topPatterns.slice(0, 3).forEach((pattern, i) => {
console.log(` ${i + 1}. ID: ${pattern.id}`);
console.log(` Quality: ${pattern.avgQuality.toFixed(3)}`);
console.log(` Cluster size: ${pattern.clusterSize}`);
console.log(` Type: ${pattern.patternType}`);
});
console.log();
// Enable/disable engine
console.log('Testing enable/disable...');
console.log(`Engine enabled: ${engine.isEnabled()}`);
engine.setEnabled(false);
console.log(`Engine enabled: ${engine.isEnabled()}`);
engine.setEnabled(true);
console.log(`Engine enabled: ${engine.isEnabled()}\n`);
console.log('✓ Custom configuration example completed!');
}
main();

View File

@@ -0,0 +1,222 @@
/**
* LLM Integration Example
* Demonstrates how to integrate SONA with an LLM inference pipeline
*/
const { SonaEngine } = require('../index.js');
class AdaptiveLLM {
constructor(hiddenDim = 4096) {
// Create SONA engine with LLM-appropriate configuration
this.sona = SonaEngine.withConfig({
hiddenDim: hiddenDim,
embeddingDim: hiddenDim,
microLoraRank: 2,
baseLoraRank: 16,
microLoraLr: 0.002,
baseLoraLr: 0.0001,
qualityThreshold: 0.7,
backgroundIntervalMs: 1800000, // 30 minutes
});
this.layers = 32; // Simulated layer count
console.log(`🤖 Initialized Adaptive LLM with SONA (hidden_dim=${hiddenDim})`);
}
/**
* Simulate LLM inference with SONA enhancement
*/
async generate(prompt) {
console.log(`\n📝 Generating response for: "${prompt}"`);
// 1. Embed the prompt (simulated)
const embedding = this.embedPrompt(prompt);
// 2. Start SONA trajectory
const builder = this.sona.beginTrajectory(embedding);
// 3. Run inference through layers
let output = embedding;
for (let layer = 0; layer < this.layers; layer++) {
// Simulate layer forward pass
const activations = this.forwardLayer(layer, output);
// Apply SONA micro-LoRA enhancement
const enhanced = this.sona.applyMicroLora(activations);
// Record trajectory step
const attention = this.getAttention(layer);
const reward = this.calculateReward(enhanced, layer);
builder.addStep(activations, attention, reward);
output = enhanced;
// Progress indicator
if ((layer + 1) % 8 === 0) {
console.log(` Layer ${layer + 1}/${this.layers} processed`);
}
}
// 4. Decode output (simulated)
const generatedText = this.decode(output);
// 5. Calculate quality score
const quality = this.assessQuality(generatedText, prompt);
// 6. Complete trajectory
builder.setRoute('main_model');
builder.addContext(prompt);
this.sona.endTrajectory(builder, quality);
console.log(`✓ Generated (quality: ${quality.toFixed(3)}): "${generatedText}"`);
// 7. Run periodic background learning
const status = this.sona.tick();
if (status) {
console.log(`🔄 Background learning: ${status}`);
}
return generatedText;
}
/**
* Simulate prompt embedding
*/
embedPrompt(prompt) {
const dim = 4096;
// Simple hash-based embedding (in real use, use actual embeddings)
const seed = prompt.split('').reduce((acc, char) => acc + char.charCodeAt(0), 0);
const embedding = Array(dim).fill(0).map((_, i) => {
return Math.sin(seed * (i + 1) * 0.001) * Math.cos(i * 0.1);
});
return embedding;
}
/**
* Simulate layer forward pass
*/
forwardLayer(layer, input) {
// Simple transformation (in real use, actual neural network layer)
return input.map((x, i) => {
return Math.tanh(x + Math.sin(layer * i * 0.01));
});
}
/**
* Simulate attention weights
*/
getAttention(layer) {
const seqLen = 64;
const weights = Array(seqLen).fill(0).map(() => Math.random());
const sum = weights.reduce((a, b) => a + b, 0);
return weights.map(w => w / sum); // Normalize
}
/**
* Calculate reward for a layer
*/
calculateReward(activations, layer) {
// Higher reward for middle layers, lower for early/late
const midLayer = this.layers / 2;
const distance = Math.abs(layer - midLayer) / midLayer;
const base = 0.7 + Math.random() * 0.2;
return base * (1 - distance * 0.3);
}
/**
* Decode activations to text (simulated)
*/
decode(activations) {
// Simple simulation - in real use, actual decoder
const templates = [
'This is a thoughtful response.',
'Here is the information you requested.',
'Based on the context, the answer is...',
'Let me explain this concept.',
'The solution involves several steps.',
];
const hash = activations.slice(0, 10).reduce((a, b) => a + b, 0);
const index = Math.floor(Math.abs(hash) * 100) % templates.length;
return templates[index];
}
/**
* Assess output quality
*/
assessQuality(output, prompt) {
// Simple quality metric (in real use, actual quality assessment)
const lengthScore = Math.min(output.length / 50, 1.0);
const randomness = Math.random() * 0.2;
return 0.6 + lengthScore * 0.2 + randomness;
}
/**
* Find similar patterns for routing
*/
findSimilarPatterns(prompt, k = 5) {
const embedding = this.embedPrompt(prompt);
const patterns = this.sona.findPatterns(embedding, k);
console.log(`\n🔍 Found ${patterns.length} similar patterns:`);
patterns.forEach((pattern, i) => {
console.log(` ${i + 1}. Quality: ${pattern.avgQuality.toFixed(3)}, ` +
`Type: ${pattern.patternType}, Size: ${pattern.clusterSize}`);
});
return patterns;
}
/**
* Get engine statistics
*/
getStats() {
const stats = this.sona.getStats();
console.log('\n📊 SONA Engine Statistics:');
console.log(stats);
return stats;
}
/**
* Force background learning
*/
forceLearn() {
console.log('\n🎓 Forcing background learning...');
const result = this.sona.forceLearn();
console.log(result);
return result;
}
}
// Example usage
async function main() {
console.log('🚀 SONA LLM Integration Example\n');
const llm = new AdaptiveLLM(4096);
// Generate responses for different prompts
const prompts = [
'What is machine learning?',
'Explain neural networks',
'How does gradient descent work?',
'What are transformers?',
];
for (const prompt of prompts) {
await llm.generate(prompt);
// Small delay to simulate async processing
await new Promise(resolve => setTimeout(resolve, 100));
}
// Pattern analysis
llm.findSimilarPatterns('Tell me about AI');
// Statistics
llm.getStats();
// Force learning
llm.forceLearn();
console.log('\n✓ LLM integration example completed!');
}
main().catch(console.error);