Merge commit 'd803bfe2b1fe7f5e219e50ac20d6801a0a58ac75' as 'vendor/ruvector'
This commit is contained in:
817
vendor/ruvector/npm/packages/ruvllm/test/advanced-features.test.js
vendored
Normal file
817
vendor/ruvector/npm/packages/ruvllm/test/advanced-features.test.js
vendored
Normal file
@@ -0,0 +1,817 @@
|
||||
/**
|
||||
* Tests for advanced features: Federated Learning, LoRA, Export, Training Pipeline
|
||||
*/
|
||||
|
||||
const { test, describe } = require('node:test');
|
||||
const assert = require('node:assert');
|
||||
|
||||
const {
|
||||
// Federated Learning
|
||||
EphemeralAgent,
|
||||
FederatedCoordinator,
|
||||
// LoRA
|
||||
LoraAdapter,
|
||||
LoraManager,
|
||||
// Export
|
||||
SafeTensorsWriter,
|
||||
SafeTensorsReader,
|
||||
ModelExporter,
|
||||
ModelImporter,
|
||||
DatasetExporter,
|
||||
// Training
|
||||
TrainingPipeline,
|
||||
TrainingFactory,
|
||||
LRScheduler,
|
||||
MetricsTracker,
|
||||
} = require('../dist/cjs/index.js');
|
||||
|
||||
// ============================================
|
||||
// Federated Learning Tests
|
||||
// ============================================
|
||||
|
||||
describe('EphemeralAgent', () => {
|
||||
test('should create agent with config', () => {
|
||||
const agent = new EphemeralAgent('agent-1', { hiddenDim: 128 });
|
||||
|
||||
assert.strictEqual(agent.getAgentId(), 'agent-1');
|
||||
assert.strictEqual(agent.trajectoryCount(), 0);
|
||||
assert.strictEqual(agent.avgQuality(), 0);
|
||||
});
|
||||
|
||||
test('should process tasks', () => {
|
||||
const agent = new EphemeralAgent('agent-1', { hiddenDim: 64 });
|
||||
|
||||
agent.processTask([0.1, 0.2, 0.3], 0.85);
|
||||
agent.processTask([0.4, 0.5, 0.6], 0.92);
|
||||
|
||||
assert.strictEqual(agent.trajectoryCount(), 2);
|
||||
assert.ok(agent.avgQuality() > 0.8);
|
||||
});
|
||||
|
||||
test('should process tasks with route', () => {
|
||||
const agent = new EphemeralAgent('agent-1');
|
||||
|
||||
agent.processTaskWithRoute([0.1, 0.2], 0.9, 'code-model');
|
||||
|
||||
const exportData = agent.exportState();
|
||||
assert.strictEqual(exportData.trajectories[0].route, 'code-model');
|
||||
});
|
||||
|
||||
test('should apply micro-LoRA', () => {
|
||||
const agent = new EphemeralAgent('agent-1', { hiddenDim: 8, microLoraRank: 2 });
|
||||
|
||||
// Process some tasks first to train the LoRA weights
|
||||
for (let i = 0; i < 10; i++) {
|
||||
agent.processTask([1, 2, 3, 4, 5, 6, 7, 8], 0.9);
|
||||
}
|
||||
|
||||
const input = [1, 2, 3, 4, 5, 6, 7, 8];
|
||||
const output = new Array(8).fill(0);
|
||||
|
||||
agent.applyMicroLora(input, output);
|
||||
|
||||
// Output should have non-zero values after LoRA applied
|
||||
const hasOutput = output.some((v) => v !== 0);
|
||||
assert.ok(hasOutput, 'LoRA should produce non-zero output');
|
||||
});
|
||||
|
||||
test('should export state', () => {
|
||||
const agent = new EphemeralAgent('agent-1');
|
||||
|
||||
agent.processTask([0.1, 0.2], 0.85);
|
||||
agent.processTask([0.3, 0.4], 0.75);
|
||||
|
||||
const exportData = agent.exportState();
|
||||
|
||||
assert.strictEqual(exportData.agentId, 'agent-1');
|
||||
assert.strictEqual(exportData.trajectories.length, 2);
|
||||
assert.ok(exportData.sessionDurationMs >= 0);
|
||||
assert.ok(exportData.stats.avgQuality > 0.7);
|
||||
});
|
||||
|
||||
test('should serialize to JSON', () => {
|
||||
const agent = new EphemeralAgent('agent-1');
|
||||
agent.processTask([0.1, 0.2], 0.9);
|
||||
|
||||
const json = agent.toJSON();
|
||||
const parsed = JSON.parse(json);
|
||||
|
||||
assert.strictEqual(parsed.agentId, 'agent-1');
|
||||
assert.strictEqual(parsed.trajectories.length, 1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('FederatedCoordinator', () => {
|
||||
test('should create coordinator', () => {
|
||||
const coord = new FederatedCoordinator('coord-1', { hiddenDim: 128 });
|
||||
|
||||
assert.strictEqual(coord.getCoordinatorId(), 'coord-1');
|
||||
assert.strictEqual(coord.agentCount(), 0);
|
||||
assert.strictEqual(coord.getTotalTrajectories(), 0);
|
||||
});
|
||||
|
||||
test('should aggregate agent exports', () => {
|
||||
const coord = new FederatedCoordinator('coord-1');
|
||||
coord.setQualityThreshold(0.5);
|
||||
|
||||
const exportData = {
|
||||
agentId: 'agent-1',
|
||||
trajectories: [
|
||||
{ embedding: [0.1, 0.2], quality: 0.8, context: [], timestamp: Date.now() },
|
||||
{ embedding: [0.3, 0.4], quality: 0.3, context: [], timestamp: Date.now() }, // Below threshold
|
||||
],
|
||||
stats: { totalTrajectories: 2, avgQuality: 0.55, patternsLearned: 0 },
|
||||
sessionDurationMs: 1000,
|
||||
timestamp: Date.now(),
|
||||
};
|
||||
|
||||
const result = coord.aggregate(exportData);
|
||||
|
||||
assert.strictEqual(result.agentId, 'agent-1');
|
||||
assert.strictEqual(result.trajectoriesAccepted, 1);
|
||||
assert.strictEqual(result.trajectoriesRejected, 1);
|
||||
assert.strictEqual(result.totalAgents, 1);
|
||||
});
|
||||
|
||||
test('should aggregate multiple agents', () => {
|
||||
const coord = new FederatedCoordinator('coord-1');
|
||||
|
||||
for (let i = 0; i < 3; i++) {
|
||||
coord.aggregate({
|
||||
agentId: `agent-${i}`,
|
||||
trajectories: [
|
||||
{ embedding: [i * 0.1], quality: 0.8, context: [], timestamp: Date.now() },
|
||||
],
|
||||
stats: { totalTrajectories: 1, avgQuality: 0.8, patternsLearned: 0 },
|
||||
sessionDurationMs: 1000,
|
||||
timestamp: Date.now(),
|
||||
});
|
||||
}
|
||||
|
||||
const stats = coord.stats();
|
||||
assert.strictEqual(stats.totalAgents, 3);
|
||||
assert.strictEqual(stats.totalTrajectories, 3);
|
||||
});
|
||||
|
||||
test('should create agent with warm start', () => {
|
||||
const coord = new FederatedCoordinator('coord-1');
|
||||
|
||||
// Add some patterns first
|
||||
coord.aggregate({
|
||||
agentId: 'agent-1',
|
||||
trajectories: [
|
||||
{ embedding: [0.5, 0.5], quality: 0.9, context: [], timestamp: Date.now() },
|
||||
],
|
||||
stats: { totalTrajectories: 1, avgQuality: 0.9, patternsLearned: 1 },
|
||||
sessionDurationMs: 1000,
|
||||
timestamp: Date.now(),
|
||||
});
|
||||
|
||||
const newAgent = coord.createAgent('agent-2');
|
||||
|
||||
assert.strictEqual(newAgent.getAgentId(), 'agent-2');
|
||||
// Agent should have some warm-start trajectories
|
||||
});
|
||||
|
||||
test('should apply coordinator LoRA', () => {
|
||||
const coord = new FederatedCoordinator('coord-1', { hiddenDim: 8 });
|
||||
|
||||
const input = [1, 2, 3, 4, 5, 6, 7, 8];
|
||||
const output = coord.applyLora(input);
|
||||
|
||||
assert.strictEqual(output.length, input.length);
|
||||
});
|
||||
|
||||
test('should get initial patterns', () => {
|
||||
const coord = new FederatedCoordinator('coord-1');
|
||||
|
||||
coord.aggregate({
|
||||
agentId: 'agent-1',
|
||||
trajectories: [
|
||||
{ embedding: [0.1, 0.2], quality: 0.9, context: [], timestamp: Date.now() },
|
||||
{ embedding: [0.3, 0.4], quality: 0.8, context: [], timestamp: Date.now() },
|
||||
],
|
||||
stats: { totalTrajectories: 2, avgQuality: 0.85, patternsLearned: 0 },
|
||||
sessionDurationMs: 1000,
|
||||
timestamp: Date.now(),
|
||||
});
|
||||
|
||||
const patterns = coord.getInitialPatterns(5);
|
||||
assert.ok(patterns.length >= 0);
|
||||
});
|
||||
});
|
||||
|
||||
// ============================================
|
||||
// LoRA Tests
|
||||
// ============================================
|
||||
|
||||
describe('LoraAdapter', () => {
|
||||
test('should create adapter with config', () => {
|
||||
const adapter = new LoraAdapter({ rank: 8, alpha: 16 }, 64, 64);
|
||||
|
||||
const config = adapter.getConfig();
|
||||
assert.strictEqual(config.rank, 8);
|
||||
assert.strictEqual(config.alpha, 16);
|
||||
});
|
||||
|
||||
test('should forward pass', () => {
|
||||
const adapter = new LoraAdapter({ rank: 4 }, 16, 16);
|
||||
|
||||
const input = new Array(16).fill(0).map((_, i) => i * 0.1);
|
||||
const output = adapter.forward(input);
|
||||
|
||||
assert.strictEqual(output.length, 16);
|
||||
// Output should differ from input due to LoRA delta
|
||||
});
|
||||
|
||||
test('should forward batch', () => {
|
||||
const adapter = new LoraAdapter({ rank: 4 }, 8, 8);
|
||||
|
||||
const inputs = [
|
||||
[0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8],
|
||||
[0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8, 0.9],
|
||||
];
|
||||
|
||||
const outputs = adapter.forwardBatch(inputs);
|
||||
|
||||
assert.strictEqual(outputs.length, 2);
|
||||
assert.strictEqual(outputs[0].length, 8);
|
||||
});
|
||||
|
||||
test('should backward and update weights', () => {
|
||||
const adapter = new LoraAdapter({ rank: 4 }, 8, 8);
|
||||
adapter.startTraining(0.01);
|
||||
|
||||
const input = [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8];
|
||||
const gradOutput = [0.01, 0.02, 0.03, 0.04, 0.05, 0.06, 0.07, 0.08];
|
||||
|
||||
const gradNorm = adapter.backward(input, gradOutput, 0.01);
|
||||
|
||||
assert.ok(gradNorm >= 0);
|
||||
|
||||
const state = adapter.endTraining();
|
||||
assert.ok(state);
|
||||
assert.strictEqual(state.step, 1);
|
||||
});
|
||||
|
||||
test('should freeze and unfreeze', () => {
|
||||
const adapter = new LoraAdapter();
|
||||
|
||||
assert.strictEqual(adapter.isFrozen(), false);
|
||||
|
||||
adapter.freeze();
|
||||
assert.strictEqual(adapter.isFrozen(), true);
|
||||
|
||||
adapter.unfreeze();
|
||||
assert.strictEqual(adapter.isFrozen(), false);
|
||||
});
|
||||
|
||||
test('should serialize and deserialize', () => {
|
||||
const adapter = new LoraAdapter({ rank: 4, alpha: 8 }, 16, 16);
|
||||
|
||||
const json = adapter.toJSON();
|
||||
const restored = LoraAdapter.fromJSON(json);
|
||||
|
||||
const config = restored.getConfig();
|
||||
assert.strictEqual(config.rank, 4);
|
||||
assert.strictEqual(config.alpha, 8);
|
||||
});
|
||||
|
||||
test('should merge weights', () => {
|
||||
const adapter = new LoraAdapter({ rank: 4 }, 8, 8);
|
||||
|
||||
const delta = adapter.merge();
|
||||
|
||||
assert.strictEqual(delta.length, 8);
|
||||
assert.strictEqual(delta[0].length, 8);
|
||||
});
|
||||
|
||||
test('should report number of parameters', () => {
|
||||
const adapter = new LoraAdapter({ rank: 8 }, 64, 64);
|
||||
|
||||
const params = adapter.numParameters();
|
||||
// (64 * 8) + (8 * 64) = 1024
|
||||
assert.strictEqual(params, 1024);
|
||||
});
|
||||
});
|
||||
|
||||
describe('LoraManager', () => {
|
||||
test('should manage multiple adapters', () => {
|
||||
const manager = new LoraManager();
|
||||
|
||||
manager.create('task-1', { rank: 4 }, 32, 32);
|
||||
manager.create('task-2', { rank: 8 }, 32, 32);
|
||||
|
||||
assert.strictEqual(manager.count(), 2);
|
||||
assert.deepStrictEqual(manager.list(), ['task-1', 'task-2']);
|
||||
});
|
||||
|
||||
test('should activate adapters', () => {
|
||||
const manager = new LoraManager();
|
||||
|
||||
manager.create('task-1');
|
||||
manager.create('task-2');
|
||||
|
||||
assert.strictEqual(manager.getActiveId(), null);
|
||||
|
||||
manager.activate('task-1');
|
||||
assert.strictEqual(manager.getActiveId(), 'task-1');
|
||||
|
||||
manager.deactivate();
|
||||
assert.strictEqual(manager.getActiveId(), null);
|
||||
});
|
||||
|
||||
test('should forward through active adapter', () => {
|
||||
const manager = new LoraManager();
|
||||
|
||||
manager.create('task-1', { rank: 4 }, 8, 8);
|
||||
manager.activate('task-1');
|
||||
|
||||
const input = [0.1, 0.2, 0.3, 0.4, 0.5, 0.6, 0.7, 0.8];
|
||||
const output = manager.forward(input);
|
||||
|
||||
assert.strictEqual(output.length, 8);
|
||||
});
|
||||
|
||||
test('should merge adapters', () => {
|
||||
const manager = new LoraManager();
|
||||
|
||||
manager.create('task-1', { rank: 4 }, 8, 8);
|
||||
manager.create('task-2', { rank: 4 }, 8, 8);
|
||||
|
||||
const merged = manager.mergeAdapters(['task-1', 'task-2'], 'merged');
|
||||
|
||||
assert.ok(merged);
|
||||
assert.strictEqual(manager.count(), 3);
|
||||
});
|
||||
|
||||
test('should provide stats', () => {
|
||||
const manager = new LoraManager();
|
||||
|
||||
manager.create('task-1', { rank: 4 }, 16, 16);
|
||||
manager.create('task-2', { rank: 8 }, 16, 16);
|
||||
manager.get('task-1').freeze();
|
||||
|
||||
const stats = manager.stats();
|
||||
|
||||
assert.strictEqual(stats.totalAdapters, 2);
|
||||
assert.strictEqual(stats.frozenCount, 1);
|
||||
assert.ok(stats.totalParameters > 0);
|
||||
});
|
||||
});
|
||||
|
||||
// ============================================
|
||||
// Export Tests
|
||||
// ============================================
|
||||
|
||||
describe('SafeTensorsWriter', () => {
|
||||
test('should add tensors', () => {
|
||||
const writer = new SafeTensorsWriter();
|
||||
|
||||
writer.add1D('bias', [0.1, 0.2, 0.3]);
|
||||
writer.add2D('weight', [[0.1, 0.2], [0.3, 0.4]]);
|
||||
|
||||
const buffer = writer.build();
|
||||
|
||||
assert.ok(buffer instanceof Uint8Array);
|
||||
assert.ok(buffer.length > 0);
|
||||
});
|
||||
|
||||
test('should add metadata', () => {
|
||||
const writer = new SafeTensorsWriter();
|
||||
|
||||
writer.addMetadata('name', 'test-model');
|
||||
writer.addMetadata('version', '1.0.0');
|
||||
writer.add1D('data', [1, 2, 3]);
|
||||
|
||||
const buffer = writer.build();
|
||||
assert.ok(buffer.length > 0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('SafeTensorsReader', () => {
|
||||
test('should read tensors', () => {
|
||||
// Write then read
|
||||
const writer = new SafeTensorsWriter();
|
||||
writer.add1D('bias', [0.1, 0.2, 0.3]);
|
||||
writer.add2D('weight', [[1, 2], [3, 4]]);
|
||||
writer.addMetadata('name', 'test');
|
||||
|
||||
const buffer = writer.build();
|
||||
const reader = new SafeTensorsReader(buffer);
|
||||
|
||||
const names = reader.getTensorNames();
|
||||
assert.ok(names.includes('bias'));
|
||||
assert.ok(names.includes('weight'));
|
||||
|
||||
const bias = reader.getTensor1D('bias');
|
||||
assert.ok(bias);
|
||||
assert.strictEqual(bias.length, 3);
|
||||
|
||||
const weight = reader.getTensor2D('weight');
|
||||
assert.ok(weight);
|
||||
assert.strictEqual(weight.length, 2);
|
||||
assert.strictEqual(weight[0].length, 2);
|
||||
|
||||
const metadata = reader.getMetadata();
|
||||
assert.strictEqual(metadata.name, 'test');
|
||||
});
|
||||
});
|
||||
|
||||
describe('ModelExporter', () => {
|
||||
test('should export to SafeTensors', () => {
|
||||
const exporter = new ModelExporter();
|
||||
|
||||
const model = {
|
||||
metadata: {
|
||||
name: 'test-model',
|
||||
version: '1.0.0',
|
||||
architecture: 'sona-lora',
|
||||
},
|
||||
loraWeights: {
|
||||
loraA: [[0.1, 0.2], [0.3, 0.4]],
|
||||
loraB: [[0.5, 0.6], [0.7, 0.8]],
|
||||
scaling: 2.0,
|
||||
},
|
||||
};
|
||||
|
||||
const buffer = exporter.toSafeTensors(model);
|
||||
|
||||
assert.ok(buffer instanceof Uint8Array);
|
||||
assert.ok(buffer.length > 0);
|
||||
});
|
||||
|
||||
test('should export to JSON', () => {
|
||||
const exporter = new ModelExporter();
|
||||
|
||||
const model = {
|
||||
metadata: { name: 'test', version: '1.0', architecture: 'lora' },
|
||||
loraConfig: { rank: 8, alpha: 16, dropout: 0.1, targetModules: ['q', 'v'] },
|
||||
};
|
||||
|
||||
const json = exporter.toJSON(model);
|
||||
const parsed = JSON.parse(json);
|
||||
|
||||
assert.strictEqual(parsed.metadata.name, 'test');
|
||||
assert.strictEqual(parsed.loraConfig.rank, 8);
|
||||
});
|
||||
|
||||
test('should export for HuggingFace', () => {
|
||||
const exporter = new ModelExporter();
|
||||
|
||||
const model = {
|
||||
metadata: {
|
||||
name: 'my-lora',
|
||||
version: '1.0.0',
|
||||
architecture: 'sona-lora',
|
||||
training: { steps: 1000, loss: 0.01, learningRate: 0.001 },
|
||||
},
|
||||
loraWeights: {
|
||||
loraA: [[0.1, 0.2]],
|
||||
loraB: [[0.3, 0.4]],
|
||||
scaling: 2.0,
|
||||
},
|
||||
};
|
||||
|
||||
const { safetensors, config, readme } = exporter.toHuggingFace(model);
|
||||
|
||||
assert.ok(safetensors instanceof Uint8Array);
|
||||
assert.ok(config.includes('sona-lora'));
|
||||
assert.ok(readme.includes('my-lora'));
|
||||
});
|
||||
});
|
||||
|
||||
describe('ModelImporter', () => {
|
||||
test('should import from SafeTensors', () => {
|
||||
const exporter = new ModelExporter();
|
||||
const importer = new ModelImporter();
|
||||
|
||||
const original = {
|
||||
metadata: { name: 'test', version: '1.0', architecture: 'lora' },
|
||||
loraWeights: {
|
||||
loraA: [[0.1, 0.2], [0.3, 0.4]],
|
||||
loraB: [[0.5, 0.6], [0.7, 0.8]],
|
||||
scaling: 2.0,
|
||||
},
|
||||
};
|
||||
|
||||
const buffer = exporter.toSafeTensors(original);
|
||||
const imported = importer.fromSafeTensors(buffer);
|
||||
|
||||
assert.ok(imported.loraWeights);
|
||||
assert.strictEqual(imported.loraWeights.loraA.length, 2);
|
||||
});
|
||||
|
||||
test('should import from JSON', () => {
|
||||
const importer = new ModelImporter();
|
||||
|
||||
const json = JSON.stringify({
|
||||
metadata: { name: 'test', version: '1.0', architecture: 'lora' },
|
||||
loraConfig: { rank: 8 },
|
||||
});
|
||||
|
||||
const imported = importer.fromJSON(json);
|
||||
|
||||
assert.strictEqual(imported.metadata.name, 'test');
|
||||
assert.strictEqual(imported.loraConfig.rank, 8);
|
||||
});
|
||||
});
|
||||
|
||||
describe('DatasetExporter', () => {
|
||||
test('should export to JSONL', () => {
|
||||
const exporter = new DatasetExporter();
|
||||
|
||||
const data = [
|
||||
{ input: [0.1, 0.2], output: [0.3, 0.4], quality: 0.9 },
|
||||
{ input: [0.5, 0.6], output: [0.7, 0.8], quality: 0.8 },
|
||||
];
|
||||
|
||||
const jsonl = exporter.toJSONL(data);
|
||||
const lines = jsonl.split('\n');
|
||||
|
||||
assert.strictEqual(lines.length, 2);
|
||||
const first = JSON.parse(lines[0]);
|
||||
assert.deepStrictEqual(first.input, [0.1, 0.2]);
|
||||
});
|
||||
|
||||
test('should export to CSV', () => {
|
||||
const exporter = new DatasetExporter();
|
||||
|
||||
const data = [
|
||||
{ input: [0.1], output: [0.2], quality: 0.9 },
|
||||
];
|
||||
|
||||
const csv = exporter.toCSV(data);
|
||||
|
||||
assert.ok(csv.startsWith('quality,input,output'));
|
||||
assert.ok(csv.includes('0.9'));
|
||||
});
|
||||
});
|
||||
|
||||
// ============================================
|
||||
// Training Pipeline Tests
|
||||
// ============================================
|
||||
|
||||
describe('LRScheduler', () => {
|
||||
test('should return constant LR', () => {
|
||||
const config = {
|
||||
learningRate: 0.01,
|
||||
batchSize: 32,
|
||||
epochs: 10,
|
||||
scheduler: 'constant',
|
||||
warmupSteps: 0,
|
||||
weightDecay: 0,
|
||||
gradientClip: 1,
|
||||
earlyStoppingPatience: 3,
|
||||
checkpointInterval: 1,
|
||||
ewcLambda: 2000,
|
||||
validationSplit: 0.1,
|
||||
};
|
||||
|
||||
const scheduler = new LRScheduler(config, 100);
|
||||
|
||||
assert.strictEqual(scheduler.getLR(), 0.01);
|
||||
scheduler.step();
|
||||
assert.strictEqual(scheduler.getLR(), 0.01);
|
||||
});
|
||||
|
||||
test('should decay with cosine schedule', () => {
|
||||
const config = {
|
||||
learningRate: 0.01,
|
||||
batchSize: 32,
|
||||
epochs: 10,
|
||||
scheduler: 'cosine',
|
||||
warmupSteps: 0,
|
||||
weightDecay: 0,
|
||||
gradientClip: 1,
|
||||
earlyStoppingPatience: 3,
|
||||
checkpointInterval: 1,
|
||||
ewcLambda: 2000,
|
||||
validationSplit: 0.1,
|
||||
};
|
||||
|
||||
const scheduler = new LRScheduler(config, 100);
|
||||
|
||||
const lr1 = scheduler.getLR();
|
||||
for (let i = 0; i < 50; i++) scheduler.step();
|
||||
const lr2 = scheduler.getLR();
|
||||
|
||||
assert.ok(lr2 < lr1, 'LR should decay');
|
||||
});
|
||||
});
|
||||
|
||||
describe('MetricsTracker', () => {
|
||||
test('should track losses', () => {
|
||||
const tracker = new MetricsTracker();
|
||||
|
||||
tracker.recordLoss(0.5);
|
||||
tracker.recordLoss(0.4);
|
||||
tracker.recordLoss(0.3);
|
||||
|
||||
const avg = tracker.avgLoss(3);
|
||||
assert.ok(Math.abs(avg - 0.4) < 0.01);
|
||||
});
|
||||
|
||||
test('should track validation losses', () => {
|
||||
const tracker = new MetricsTracker();
|
||||
|
||||
tracker.recordValLoss(0.6);
|
||||
tracker.recordValLoss(0.5);
|
||||
tracker.recordValLoss(0.4);
|
||||
|
||||
assert.strictEqual(tracker.bestValLoss(), 0.4);
|
||||
});
|
||||
|
||||
test('should compute steps per second', () => {
|
||||
const tracker = new MetricsTracker();
|
||||
|
||||
tracker.recordStepTime(100);
|
||||
tracker.recordStepTime(100);
|
||||
|
||||
const sps = tracker.stepsPerSecond();
|
||||
assert.ok(sps > 0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('TrainingPipeline', () => {
|
||||
test('should add training data', () => {
|
||||
const pipeline = new TrainingPipeline({ batchSize: 2 });
|
||||
|
||||
const data = [
|
||||
{ input: [0.1, 0.2], target: [0.3, 0.4], quality: 0.9 },
|
||||
{ input: [0.5, 0.6], target: [0.7, 0.8], quality: 0.8 },
|
||||
{ input: [0.9, 1.0], target: [1.1, 1.2], quality: 0.7 },
|
||||
];
|
||||
|
||||
pipeline.addData(data);
|
||||
// Should have 2 batches (2 + 1)
|
||||
});
|
||||
|
||||
test('should train model', () => {
|
||||
const pipeline = new TrainingPipeline({
|
||||
learningRate: 0.01,
|
||||
batchSize: 2,
|
||||
epochs: 2,
|
||||
validationSplit: 0,
|
||||
});
|
||||
|
||||
// Add some training data
|
||||
const data = [];
|
||||
for (let i = 0; i < 10; i++) {
|
||||
data.push({
|
||||
input: new Array(8).fill(0).map(() => Math.random()),
|
||||
target: new Array(8).fill(0).map(() => Math.random()),
|
||||
quality: 0.8 + Math.random() * 0.2,
|
||||
});
|
||||
}
|
||||
|
||||
pipeline.addData(data);
|
||||
const result = pipeline.train();
|
||||
|
||||
assert.strictEqual(result.epochs, 2);
|
||||
assert.ok(result.steps > 0);
|
||||
assert.ok(result.lossHistory.length > 0);
|
||||
});
|
||||
|
||||
test('should get metrics', () => {
|
||||
const pipeline = new TrainingPipeline();
|
||||
|
||||
const metrics = pipeline.getMetrics();
|
||||
|
||||
assert.strictEqual(metrics.epoch, 0);
|
||||
assert.strictEqual(metrics.step, 0);
|
||||
});
|
||||
|
||||
test('should get adapter', () => {
|
||||
const pipeline = new TrainingPipeline();
|
||||
|
||||
const adapter = pipeline.getAdapter();
|
||||
|
||||
assert.ok(adapter instanceof LoraAdapter);
|
||||
});
|
||||
});
|
||||
|
||||
describe('TrainingFactory', () => {
|
||||
test('should create quick finetune pipeline', () => {
|
||||
const pipeline = TrainingFactory.quickFinetune();
|
||||
|
||||
const adapter = pipeline.getAdapter();
|
||||
assert.ok(adapter);
|
||||
});
|
||||
|
||||
test('should create deep training pipeline', () => {
|
||||
const pipeline = TrainingFactory.deepTraining();
|
||||
|
||||
const adapter = pipeline.getAdapter();
|
||||
assert.ok(adapter);
|
||||
});
|
||||
|
||||
test('should create continual learning pipeline', () => {
|
||||
const pipeline = TrainingFactory.continualLearning(5000);
|
||||
|
||||
const ewc = pipeline.getEwcManager();
|
||||
assert.ok(ewc);
|
||||
});
|
||||
|
||||
test('should create federated aggregation pipeline', () => {
|
||||
const pipeline = TrainingFactory.federatedAggregation();
|
||||
|
||||
const adapter = pipeline.getAdapter();
|
||||
assert.ok(adapter);
|
||||
});
|
||||
});
|
||||
|
||||
// ============================================
|
||||
// Integration Tests
|
||||
// ============================================
|
||||
|
||||
describe('Integration: Federated + LoRA + Export', () => {
|
||||
test('should train agent, export, and import', () => {
|
||||
// Create and train agent
|
||||
const agent = new EphemeralAgent('agent-1', { hiddenDim: 8 });
|
||||
|
||||
for (let i = 0; i < 5; i++) {
|
||||
agent.processTask(
|
||||
new Array(8).fill(0).map(() => Math.random()),
|
||||
0.7 + Math.random() * 0.3
|
||||
);
|
||||
}
|
||||
|
||||
// Export state
|
||||
const exportData = agent.exportState();
|
||||
|
||||
// Aggregate in coordinator
|
||||
const coord = new FederatedCoordinator('coord-1', { hiddenDim: 8 });
|
||||
const result = coord.aggregate(exportData);
|
||||
|
||||
assert.ok(result.trajectoriesAccepted > 0);
|
||||
|
||||
// Export coordinator model
|
||||
const exporter = new ModelExporter();
|
||||
const model = {
|
||||
metadata: {
|
||||
name: 'federated-model',
|
||||
version: '1.0.0',
|
||||
architecture: 'sona-federated',
|
||||
},
|
||||
patterns: coord.getAllPatterns(),
|
||||
};
|
||||
|
||||
const json = exporter.toJSON(model);
|
||||
const importer = new ModelImporter();
|
||||
const imported = importer.fromJSON(json);
|
||||
|
||||
assert.strictEqual(imported.metadata.name, 'federated-model');
|
||||
});
|
||||
|
||||
test('should train with pipeline and export LoRA', () => {
|
||||
// Create pipeline
|
||||
const pipeline = new TrainingPipeline({
|
||||
learningRate: 0.01,
|
||||
epochs: 1,
|
||||
batchSize: 2,
|
||||
validationSplit: 0,
|
||||
});
|
||||
|
||||
// Add data
|
||||
for (let i = 0; i < 4; i++) {
|
||||
pipeline.addBatch(
|
||||
[new Array(8).fill(0).map(() => Math.random())],
|
||||
[new Array(8).fill(0).map(() => Math.random())],
|
||||
[0.8]
|
||||
);
|
||||
}
|
||||
|
||||
// Train
|
||||
const result = pipeline.train();
|
||||
assert.ok(result.steps > 0);
|
||||
|
||||
// Export adapter
|
||||
const adapter = pipeline.getAdapter();
|
||||
const exporter = new ModelExporter();
|
||||
|
||||
const model = {
|
||||
metadata: {
|
||||
name: 'trained-lora',
|
||||
version: '1.0.0',
|
||||
architecture: 'lora',
|
||||
training: {
|
||||
steps: result.steps,
|
||||
loss: result.finalLoss,
|
||||
learningRate: 0.01,
|
||||
},
|
||||
},
|
||||
loraWeights: adapter.getWeights(),
|
||||
loraConfig: adapter.getConfig(),
|
||||
};
|
||||
|
||||
const buffer = exporter.toSafeTensors(model);
|
||||
assert.ok(buffer.length > 0);
|
||||
|
||||
// Import and verify
|
||||
const importer = new ModelImporter();
|
||||
const imported = importer.fromSafeTensors(buffer);
|
||||
|
||||
assert.ok(imported.loraWeights);
|
||||
});
|
||||
});
|
||||
182
vendor/ruvector/npm/packages/ruvllm/test/basic.test.js
vendored
Normal file
182
vendor/ruvector/npm/packages/ruvllm/test/basic.test.js
vendored
Normal file
@@ -0,0 +1,182 @@
|
||||
/**
|
||||
* Basic tests for @ruvector/ruvllm
|
||||
*/
|
||||
|
||||
const { test, describe } = require('node:test');
|
||||
const assert = require('node:assert');
|
||||
|
||||
// We test against the source for now
|
||||
// In production, tests would run against dist/
|
||||
const { RuvLLM, SimdOps, version, hasSimdSupport } = require('../dist/cjs/index.js');
|
||||
|
||||
describe('RuvLLM', () => {
|
||||
test('should create instance', () => {
|
||||
const llm = new RuvLLM();
|
||||
assert.ok(llm);
|
||||
});
|
||||
|
||||
test('should create instance with config', () => {
|
||||
const llm = new RuvLLM({
|
||||
embeddingDim: 384,
|
||||
learningEnabled: false,
|
||||
});
|
||||
assert.ok(llm);
|
||||
});
|
||||
|
||||
test('should query and get response', () => {
|
||||
const llm = new RuvLLM();
|
||||
const response = llm.query('test query');
|
||||
|
||||
assert.ok(response.text);
|
||||
assert.ok(typeof response.confidence === 'number');
|
||||
assert.ok(response.model);
|
||||
assert.ok(response.requestId);
|
||||
});
|
||||
|
||||
test('should generate text', () => {
|
||||
const llm = new RuvLLM();
|
||||
const text = llm.generate('test prompt');
|
||||
|
||||
assert.ok(typeof text === 'string');
|
||||
assert.ok(text.length > 0);
|
||||
});
|
||||
|
||||
test('should route queries', () => {
|
||||
const llm = new RuvLLM();
|
||||
const decision = llm.route('test query');
|
||||
|
||||
assert.ok(decision.model);
|
||||
assert.ok(typeof decision.contextSize === 'number');
|
||||
assert.ok(typeof decision.temperature === 'number');
|
||||
assert.ok(typeof decision.confidence === 'number');
|
||||
});
|
||||
|
||||
test('should add and search memory', () => {
|
||||
const llm = new RuvLLM();
|
||||
|
||||
const id = llm.addMemory('test content', { type: 'test' });
|
||||
assert.ok(typeof id === 'number');
|
||||
|
||||
const results = llm.searchMemory('test', 5);
|
||||
assert.ok(Array.isArray(results));
|
||||
});
|
||||
|
||||
test('should compute embeddings', () => {
|
||||
const llm = new RuvLLM({ embeddingDim: 768 });
|
||||
const embedding = llm.embed('test text');
|
||||
|
||||
assert.ok(Array.isArray(embedding));
|
||||
assert.strictEqual(embedding.length, 768);
|
||||
});
|
||||
|
||||
test('should compute similarity', () => {
|
||||
const llm = new RuvLLM();
|
||||
const similarity = llm.similarity('hello', 'hello');
|
||||
|
||||
assert.ok(typeof similarity === 'number');
|
||||
assert.ok(similarity >= 0 && similarity <= 1);
|
||||
});
|
||||
|
||||
test('should return stats', () => {
|
||||
const llm = new RuvLLM();
|
||||
const stats = llm.stats();
|
||||
|
||||
assert.ok(typeof stats.totalQueries === 'number');
|
||||
assert.ok(typeof stats.memoryNodes === 'number');
|
||||
assert.ok(typeof stats.avgLatencyMs === 'number');
|
||||
});
|
||||
|
||||
test('should handle batch queries', () => {
|
||||
const llm = new RuvLLM();
|
||||
const response = llm.batchQuery({
|
||||
queries: ['query 1', 'query 2', 'query 3'],
|
||||
});
|
||||
|
||||
assert.strictEqual(response.responses.length, 3);
|
||||
assert.ok(typeof response.totalLatencyMs === 'number');
|
||||
});
|
||||
});
|
||||
|
||||
describe('SimdOps', () => {
|
||||
test('should create instance', () => {
|
||||
const simd = new SimdOps();
|
||||
assert.ok(simd);
|
||||
});
|
||||
|
||||
test('should compute dot product', () => {
|
||||
const simd = new SimdOps();
|
||||
const result = simd.dotProduct([1, 2, 3], [4, 5, 6]);
|
||||
|
||||
assert.strictEqual(result, 32); // 1*4 + 2*5 + 3*6 = 32
|
||||
});
|
||||
|
||||
test('should compute cosine similarity', () => {
|
||||
const simd = new SimdOps();
|
||||
|
||||
// Same vector should have similarity 1
|
||||
const same = simd.cosineSimilarity([1, 0, 0], [1, 0, 0]);
|
||||
assert.ok(Math.abs(same - 1) < 0.0001);
|
||||
|
||||
// Orthogonal vectors should have similarity 0
|
||||
const ortho = simd.cosineSimilarity([1, 0, 0], [0, 1, 0]);
|
||||
assert.ok(Math.abs(ortho) < 0.0001);
|
||||
});
|
||||
|
||||
test('should compute L2 distance', () => {
|
||||
const simd = new SimdOps();
|
||||
const result = simd.l2Distance([0, 0], [3, 4]);
|
||||
|
||||
assert.strictEqual(result, 5); // sqrt(9 + 16) = 5
|
||||
});
|
||||
|
||||
test('should compute softmax', () => {
|
||||
const simd = new SimdOps();
|
||||
const result = simd.softmax([1, 2, 3]);
|
||||
|
||||
// Sum should be 1
|
||||
const sum = result.reduce((a, b) => a + b, 0);
|
||||
assert.ok(Math.abs(sum - 1) < 0.0001);
|
||||
|
||||
// Should be monotonically increasing
|
||||
assert.ok(result[0] < result[1]);
|
||||
assert.ok(result[1] < result[2]);
|
||||
});
|
||||
|
||||
test('should compute ReLU', () => {
|
||||
const simd = new SimdOps();
|
||||
const result = simd.relu([-1, 0, 1, 2]);
|
||||
|
||||
assert.deepStrictEqual(result, [0, 0, 1, 2]);
|
||||
});
|
||||
|
||||
test('should normalize vectors', () => {
|
||||
const simd = new SimdOps();
|
||||
const result = simd.normalize([3, 4]);
|
||||
|
||||
// Should have unit length
|
||||
const norm = Math.sqrt(result[0] ** 2 + result[1] ** 2);
|
||||
assert.ok(Math.abs(norm - 1) < 0.0001);
|
||||
});
|
||||
|
||||
test('should report capabilities', () => {
|
||||
const simd = new SimdOps();
|
||||
const caps = simd.capabilities();
|
||||
|
||||
assert.ok(Array.isArray(caps));
|
||||
assert.ok(caps.length > 0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Module exports', () => {
|
||||
test('should export version', () => {
|
||||
assert.ok(typeof version === 'function');
|
||||
const v = version();
|
||||
assert.ok(typeof v === 'string');
|
||||
});
|
||||
|
||||
test('should export hasSimdSupport', () => {
|
||||
assert.ok(typeof hasSimdSupport === 'function');
|
||||
const has = hasSimdSupport();
|
||||
assert.ok(typeof has === 'boolean');
|
||||
});
|
||||
});
|
||||
655
vendor/ruvector/npm/packages/ruvllm/test/benchmark.js
vendored
Normal file
655
vendor/ruvector/npm/packages/ruvllm/test/benchmark.js
vendored
Normal file
@@ -0,0 +1,655 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Comprehensive Benchmark Suite for RuvLLM
|
||||
*
|
||||
* Tests performance of all major components:
|
||||
* - Core Engine (query, generate, embed)
|
||||
* - Memory operations (add, search)
|
||||
* - SIMD operations
|
||||
* - LoRA adapters
|
||||
* - Federated learning
|
||||
* - Training pipeline
|
||||
* - Export/Import
|
||||
*/
|
||||
|
||||
const {
|
||||
RuvLLM,
|
||||
SimdOps,
|
||||
SessionManager,
|
||||
StreamingGenerator,
|
||||
SonaCoordinator,
|
||||
TrajectoryBuilder,
|
||||
ReasoningBank,
|
||||
EwcManager,
|
||||
EphemeralAgent,
|
||||
FederatedCoordinator,
|
||||
LoraAdapter,
|
||||
LoraManager,
|
||||
SafeTensorsWriter,
|
||||
SafeTensorsReader,
|
||||
ModelExporter,
|
||||
TrainingPipeline,
|
||||
TrainingFactory,
|
||||
} = require('../dist/cjs/index.js');
|
||||
|
||||
// Benchmark configuration
|
||||
const CONFIG = {
|
||||
iterations: {
|
||||
fast: 100,
|
||||
medium: 1000,
|
||||
slow: 10000,
|
||||
},
|
||||
vectorDims: [64, 128, 256, 512, 768],
|
||||
batchSizes: [1, 10, 100],
|
||||
};
|
||||
|
||||
// Results storage
|
||||
const results = {
|
||||
timestamp: new Date().toISOString(),
|
||||
platform: process.platform,
|
||||
arch: process.arch,
|
||||
nodeVersion: process.version,
|
||||
benchmarks: {},
|
||||
};
|
||||
|
||||
// Utility functions
|
||||
function formatTime(ns) {
|
||||
if (ns < 1000) return `${ns.toFixed(2)}ns`;
|
||||
if (ns < 1000000) return `${(ns / 1000).toFixed(2)}μs`;
|
||||
if (ns < 1000000000) return `${(ns / 1000000).toFixed(2)}ms`;
|
||||
return `${(ns / 1000000000).toFixed(2)}s`;
|
||||
}
|
||||
|
||||
function formatOps(ops) {
|
||||
if (ops < 1000) return `${ops.toFixed(0)} ops/s`;
|
||||
if (ops < 1000000) return `${(ops / 1000).toFixed(2)}K ops/s`;
|
||||
return `${(ops / 1000000).toFixed(2)}M ops/s`;
|
||||
}
|
||||
|
||||
function generateVector(dim) {
|
||||
return Array.from({ length: dim }, () => Math.random());
|
||||
}
|
||||
|
||||
function generateVectors(count, dim) {
|
||||
return Array.from({ length: count }, () => generateVector(dim));
|
||||
}
|
||||
|
||||
function benchmark(name, fn, iterations = CONFIG.iterations.medium) {
|
||||
// Warmup
|
||||
for (let i = 0; i < Math.min(10, iterations / 10); i++) {
|
||||
fn();
|
||||
}
|
||||
|
||||
// Actual benchmark
|
||||
const start = process.hrtime.bigint();
|
||||
for (let i = 0; i < iterations; i++) {
|
||||
fn();
|
||||
}
|
||||
const end = process.hrtime.bigint();
|
||||
|
||||
const totalNs = Number(end - start);
|
||||
const avgNs = totalNs / iterations;
|
||||
const opsPerSec = 1e9 / avgNs;
|
||||
|
||||
return {
|
||||
name,
|
||||
iterations,
|
||||
totalMs: totalNs / 1e6,
|
||||
avgNs,
|
||||
opsPerSec,
|
||||
formatted: {
|
||||
avg: formatTime(avgNs),
|
||||
ops: formatOps(opsPerSec),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
async function benchmarkAsync(name, fn, iterations = CONFIG.iterations.fast) {
|
||||
// Warmup
|
||||
for (let i = 0; i < Math.min(5, iterations / 10); i++) {
|
||||
await fn();
|
||||
}
|
||||
|
||||
// Actual benchmark
|
||||
const start = process.hrtime.bigint();
|
||||
for (let i = 0; i < iterations; i++) {
|
||||
await fn();
|
||||
}
|
||||
const end = process.hrtime.bigint();
|
||||
|
||||
const totalNs = Number(end - start);
|
||||
const avgNs = totalNs / iterations;
|
||||
const opsPerSec = 1e9 / avgNs;
|
||||
|
||||
return {
|
||||
name,
|
||||
iterations,
|
||||
totalMs: totalNs / 1e6,
|
||||
avgNs,
|
||||
opsPerSec,
|
||||
formatted: {
|
||||
avg: formatTime(avgNs),
|
||||
ops: formatOps(opsPerSec),
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// ============================================
|
||||
// Benchmark Suites
|
||||
// ============================================
|
||||
|
||||
async function benchmarkCoreEngine() {
|
||||
console.log('\n📊 Core Engine Benchmarks');
|
||||
console.log('─'.repeat(60));
|
||||
|
||||
const llm = new RuvLLM({ embeddingDim: 256 });
|
||||
const benchmarks = [];
|
||||
|
||||
// Query benchmark
|
||||
benchmarks.push(benchmark('query (short)', () => {
|
||||
llm.query('Hello world');
|
||||
}, CONFIG.iterations.medium));
|
||||
|
||||
benchmarks.push(benchmark('query (long)', () => {
|
||||
llm.query('This is a longer query that contains more text and should require more processing time to handle properly.');
|
||||
}, CONFIG.iterations.medium));
|
||||
|
||||
// Generate benchmark
|
||||
benchmarks.push(benchmark('generate', () => {
|
||||
llm.generate('Write a story');
|
||||
}, CONFIG.iterations.medium));
|
||||
|
||||
// Embed benchmark
|
||||
for (const dim of [256, 768]) {
|
||||
const llmDim = new RuvLLM({ embeddingDim: dim });
|
||||
benchmarks.push(benchmark(`embed (${dim}d)`, () => {
|
||||
llmDim.embed('Test embedding text');
|
||||
}, CONFIG.iterations.medium));
|
||||
}
|
||||
|
||||
// Similarity benchmark
|
||||
benchmarks.push(benchmark('similarity', () => {
|
||||
llm.similarity('hello world', 'hello there');
|
||||
}, CONFIG.iterations.medium));
|
||||
|
||||
// Route benchmark
|
||||
benchmarks.push(benchmark('route', () => {
|
||||
llm.route('What is machine learning?');
|
||||
}, CONFIG.iterations.medium));
|
||||
|
||||
for (const b of benchmarks) {
|
||||
console.log(` ${b.name.padEnd(25)} ${b.formatted.avg.padStart(12)} | ${b.formatted.ops.padStart(15)}`);
|
||||
}
|
||||
|
||||
return benchmarks;
|
||||
}
|
||||
|
||||
async function benchmarkMemory() {
|
||||
console.log('\n📊 Memory Operations Benchmarks');
|
||||
console.log('─'.repeat(60));
|
||||
|
||||
const llm = new RuvLLM({ embeddingDim: 256 });
|
||||
const benchmarks = [];
|
||||
|
||||
// Add memory benchmark
|
||||
benchmarks.push(benchmark('addMemory', () => {
|
||||
llm.addMemory('Test content ' + Math.random(), { type: 'test' });
|
||||
}, CONFIG.iterations.medium));
|
||||
|
||||
// Pre-populate memory for search
|
||||
for (let i = 0; i < 100; i++) {
|
||||
llm.addMemory(`Memory item ${i}`, { index: i });
|
||||
}
|
||||
|
||||
// Search memory benchmark
|
||||
for (const k of [5, 10, 20]) {
|
||||
benchmarks.push(benchmark(`searchMemory (k=${k})`, () => {
|
||||
llm.searchMemory('Test search query', k);
|
||||
}, CONFIG.iterations.fast));
|
||||
}
|
||||
|
||||
for (const b of benchmarks) {
|
||||
console.log(` ${b.name.padEnd(25)} ${b.formatted.avg.padStart(12)} | ${b.formatted.ops.padStart(15)}`);
|
||||
}
|
||||
|
||||
return benchmarks;
|
||||
}
|
||||
|
||||
async function benchmarkSimd() {
|
||||
console.log('\n📊 SIMD Operations Benchmarks');
|
||||
console.log('─'.repeat(60));
|
||||
|
||||
const simd = new SimdOps();
|
||||
const benchmarks = [];
|
||||
|
||||
for (const dim of CONFIG.vectorDims) {
|
||||
const a = generateVector(dim);
|
||||
const b = generateVector(dim);
|
||||
|
||||
benchmarks.push(benchmark(`dotProduct (${dim}d)`, () => {
|
||||
simd.dotProduct(a, b);
|
||||
}, CONFIG.iterations.slow));
|
||||
|
||||
benchmarks.push(benchmark(`cosineSimilarity (${dim}d)`, () => {
|
||||
simd.cosineSimilarity(a, b);
|
||||
}, CONFIG.iterations.slow));
|
||||
|
||||
benchmarks.push(benchmark(`l2Distance (${dim}d)`, () => {
|
||||
simd.l2Distance(a, b);
|
||||
}, CONFIG.iterations.slow));
|
||||
}
|
||||
|
||||
// Softmax benchmark
|
||||
for (const dim of [64, 256]) {
|
||||
const vec = generateVector(dim);
|
||||
benchmarks.push(benchmark(`softmax (${dim}d)`, () => {
|
||||
simd.softmax(vec);
|
||||
}, CONFIG.iterations.medium));
|
||||
}
|
||||
|
||||
// Normalize benchmark
|
||||
for (const dim of [64, 256]) {
|
||||
const vec = generateVector(dim);
|
||||
benchmarks.push(benchmark(`normalize (${dim}d)`, () => {
|
||||
simd.normalize(vec);
|
||||
}, CONFIG.iterations.medium));
|
||||
}
|
||||
|
||||
for (const b of benchmarks) {
|
||||
console.log(` ${b.name.padEnd(25)} ${b.formatted.avg.padStart(12)} | ${b.formatted.ops.padStart(15)}`);
|
||||
}
|
||||
|
||||
return benchmarks;
|
||||
}
|
||||
|
||||
async function benchmarkLoRA() {
|
||||
console.log('\n📊 LoRA Adapter Benchmarks');
|
||||
console.log('─'.repeat(60));
|
||||
|
||||
const benchmarks = [];
|
||||
|
||||
for (const dim of [64, 128, 256]) {
|
||||
for (const rank of [4, 8, 16]) {
|
||||
const adapter = new LoraAdapter({ rank }, dim, dim);
|
||||
const input = generateVector(dim);
|
||||
|
||||
benchmarks.push(benchmark(`forward (${dim}d, r=${rank})`, () => {
|
||||
adapter.forward(input);
|
||||
}, CONFIG.iterations.medium));
|
||||
}
|
||||
}
|
||||
|
||||
// Backward pass benchmark
|
||||
const adapter = new LoraAdapter({ rank: 8 }, 128, 128);
|
||||
adapter.startTraining(0.001);
|
||||
const input = generateVector(128);
|
||||
const grad = generateVector(128);
|
||||
|
||||
benchmarks.push(benchmark('backward (128d, r=8)', () => {
|
||||
adapter.backward(input, grad, 0.001);
|
||||
}, CONFIG.iterations.medium));
|
||||
|
||||
// Merge benchmark
|
||||
benchmarks.push(benchmark('merge (128d, r=8)', () => {
|
||||
adapter.merge();
|
||||
}, CONFIG.iterations.fast));
|
||||
|
||||
// Batch forward benchmark
|
||||
for (const batchSize of CONFIG.batchSizes) {
|
||||
const batchAdapter = new LoraAdapter({ rank: 8 }, 128, 128);
|
||||
const batch = generateVectors(batchSize, 128);
|
||||
|
||||
benchmarks.push(benchmark(`forwardBatch (bs=${batchSize})`, () => {
|
||||
batchAdapter.forwardBatch(batch);
|
||||
}, CONFIG.iterations.fast));
|
||||
}
|
||||
|
||||
for (const b of benchmarks) {
|
||||
console.log(` ${b.name.padEnd(25)} ${b.formatted.avg.padStart(12)} | ${b.formatted.ops.padStart(15)}`);
|
||||
}
|
||||
|
||||
return benchmarks;
|
||||
}
|
||||
|
||||
async function benchmarkFederated() {
|
||||
console.log('\n📊 Federated Learning Benchmarks');
|
||||
console.log('─'.repeat(60));
|
||||
|
||||
const benchmarks = [];
|
||||
|
||||
// Agent creation
|
||||
benchmarks.push(benchmark('agent create', () => {
|
||||
new EphemeralAgent('agent-' + Math.random(), { hiddenDim: 128 });
|
||||
}, CONFIG.iterations.medium));
|
||||
|
||||
// Process task
|
||||
const agent = new EphemeralAgent('bench-agent', { hiddenDim: 128 });
|
||||
const embedding = generateVector(128);
|
||||
|
||||
benchmarks.push(benchmark('processTask', () => {
|
||||
agent.processTask(embedding, 0.9);
|
||||
}, CONFIG.iterations.medium));
|
||||
|
||||
// Export state
|
||||
for (let i = 0; i < 50; i++) {
|
||||
agent.processTask(generateVector(128), 0.8 + Math.random() * 0.2);
|
||||
}
|
||||
|
||||
benchmarks.push(benchmark('exportState', () => {
|
||||
agent.exportState();
|
||||
}, CONFIG.iterations.fast));
|
||||
|
||||
// Coordinator aggregation
|
||||
const coord = new FederatedCoordinator('coord', { hiddenDim: 128 });
|
||||
const exportData = agent.exportState();
|
||||
|
||||
benchmarks.push(benchmark('aggregate', () => {
|
||||
coord.aggregate(exportData);
|
||||
}, CONFIG.iterations.fast));
|
||||
|
||||
// Apply LoRA
|
||||
const input = generateVector(128);
|
||||
benchmarks.push(benchmark('applyLora', () => {
|
||||
coord.applyLora(input);
|
||||
}, CONFIG.iterations.medium));
|
||||
|
||||
for (const b of benchmarks) {
|
||||
console.log(` ${b.name.padEnd(25)} ${b.formatted.avg.padStart(12)} | ${b.formatted.ops.padStart(15)}`);
|
||||
}
|
||||
|
||||
return benchmarks;
|
||||
}
|
||||
|
||||
async function benchmarkTraining() {
|
||||
console.log('\n📊 Training Pipeline Benchmarks');
|
||||
console.log('─'.repeat(60));
|
||||
|
||||
const benchmarks = [];
|
||||
|
||||
// Data preparation
|
||||
const data = [];
|
||||
for (let i = 0; i < 100; i++) {
|
||||
data.push({
|
||||
input: generateVector(64),
|
||||
target: generateVector(64),
|
||||
quality: 0.7 + Math.random() * 0.3,
|
||||
});
|
||||
}
|
||||
|
||||
// Pipeline creation
|
||||
benchmarks.push(benchmark('pipeline create', () => {
|
||||
new TrainingPipeline({ batchSize: 16, epochs: 1 });
|
||||
}, CONFIG.iterations.medium));
|
||||
|
||||
// Add data
|
||||
const pipeline = new TrainingPipeline({ batchSize: 16, epochs: 1, validationSplit: 0 });
|
||||
benchmarks.push(benchmark('addData (100 samples)', () => {
|
||||
const p = new TrainingPipeline({ batchSize: 16 });
|
||||
p.addData(data);
|
||||
}, CONFIG.iterations.fast));
|
||||
|
||||
// Training step (mini benchmark)
|
||||
const trainPipeline = TrainingFactory.quickFinetune();
|
||||
trainPipeline.addData(data.slice(0, 32));
|
||||
|
||||
const start = process.hrtime.bigint();
|
||||
trainPipeline.train();
|
||||
const end = process.hrtime.bigint();
|
||||
|
||||
benchmarks.push({
|
||||
name: 'train (32 samples, 3 epochs)',
|
||||
iterations: 1,
|
||||
totalMs: Number(end - start) / 1e6,
|
||||
avgNs: Number(end - start),
|
||||
opsPerSec: 1e9 / Number(end - start),
|
||||
formatted: {
|
||||
avg: formatTime(Number(end - start)),
|
||||
ops: formatOps(1e9 / Number(end - start)),
|
||||
},
|
||||
});
|
||||
|
||||
for (const b of benchmarks) {
|
||||
console.log(` ${b.name.padEnd(30)} ${b.formatted.avg.padStart(12)} | ${b.formatted.ops.padStart(15)}`);
|
||||
}
|
||||
|
||||
return benchmarks;
|
||||
}
|
||||
|
||||
async function benchmarkExport() {
|
||||
console.log('\n📊 Export/Import Benchmarks');
|
||||
console.log('─'.repeat(60));
|
||||
|
||||
const benchmarks = [];
|
||||
|
||||
// SafeTensors write
|
||||
const writer = new SafeTensorsWriter();
|
||||
const weights2D = Array.from({ length: 64 }, () => generateVector(64));
|
||||
const weights1D = generateVector(64);
|
||||
|
||||
benchmarks.push(benchmark('safetensors write', () => {
|
||||
const w = new SafeTensorsWriter();
|
||||
w.add2D('weights', weights2D);
|
||||
w.add1D('bias', weights1D);
|
||||
w.build();
|
||||
}, CONFIG.iterations.medium));
|
||||
|
||||
// SafeTensors read
|
||||
writer.add2D('weights', weights2D);
|
||||
writer.add1D('bias', weights1D);
|
||||
const buffer = writer.build();
|
||||
|
||||
benchmarks.push(benchmark('safetensors read', () => {
|
||||
const r = new SafeTensorsReader(buffer);
|
||||
r.getTensor2D('weights');
|
||||
r.getTensor1D('bias');
|
||||
}, CONFIG.iterations.medium));
|
||||
|
||||
// Model export JSON
|
||||
const exporter = new ModelExporter();
|
||||
const model = {
|
||||
metadata: { name: 'bench', version: '1.0', architecture: 'lora' },
|
||||
loraWeights: {
|
||||
loraA: weights2D,
|
||||
loraB: weights2D,
|
||||
scaling: 2.0,
|
||||
},
|
||||
};
|
||||
|
||||
benchmarks.push(benchmark('export JSON', () => {
|
||||
exporter.toJSON(model);
|
||||
}, CONFIG.iterations.medium));
|
||||
|
||||
benchmarks.push(benchmark('export SafeTensors', () => {
|
||||
exporter.toSafeTensors(model);
|
||||
}, CONFIG.iterations.medium));
|
||||
|
||||
// LoRA serialization
|
||||
const adapter = new LoraAdapter({ rank: 8 }, 64, 64);
|
||||
benchmarks.push(benchmark('LoRA toJSON', () => {
|
||||
adapter.toJSON();
|
||||
}, CONFIG.iterations.medium));
|
||||
|
||||
const json = adapter.toJSON();
|
||||
benchmarks.push(benchmark('LoRA fromJSON', () => {
|
||||
LoraAdapter.fromJSON(json);
|
||||
}, CONFIG.iterations.medium));
|
||||
|
||||
for (const b of benchmarks) {
|
||||
console.log(` ${b.name.padEnd(25)} ${b.formatted.avg.padStart(12)} | ${b.formatted.ops.padStart(15)}`);
|
||||
}
|
||||
|
||||
return benchmarks;
|
||||
}
|
||||
|
||||
async function benchmarkSona() {
|
||||
console.log('\n📊 SONA Learning Benchmarks');
|
||||
console.log('─'.repeat(60));
|
||||
|
||||
const benchmarks = [];
|
||||
|
||||
// ReasoningBank
|
||||
const bank = new ReasoningBank(0.7);
|
||||
const embedding = generateVector(64);
|
||||
|
||||
benchmarks.push(benchmark('bank store', () => {
|
||||
bank.store('query_response', generateVector(64));
|
||||
}, CONFIG.iterations.medium));
|
||||
|
||||
// Pre-populate
|
||||
for (let i = 0; i < 100; i++) {
|
||||
bank.store('query_response', generateVector(64));
|
||||
}
|
||||
|
||||
benchmarks.push(benchmark('bank findSimilar (k=5)', () => {
|
||||
bank.findSimilar(embedding, 5);
|
||||
}, CONFIG.iterations.fast));
|
||||
|
||||
// EWC
|
||||
const ewc = new EwcManager(2000);
|
||||
const weights = generateVector(256);
|
||||
|
||||
benchmarks.push(benchmark('ewc registerTask', () => {
|
||||
ewc.registerTask('task-' + Math.random(), weights);
|
||||
}, CONFIG.iterations.medium));
|
||||
|
||||
for (let i = 0; i < 5; i++) {
|
||||
ewc.registerTask(`task-${i}`, generateVector(256));
|
||||
}
|
||||
|
||||
benchmarks.push(benchmark('ewc computePenalty', () => {
|
||||
ewc.computePenalty(weights);
|
||||
}, CONFIG.iterations.medium));
|
||||
|
||||
// Trajectory
|
||||
benchmarks.push(benchmark('trajectory build', () => {
|
||||
const builder = new TrajectoryBuilder();
|
||||
builder.startStep('query', 'test');
|
||||
builder.endStep('response', 0.9);
|
||||
builder.complete('success');
|
||||
}, CONFIG.iterations.medium));
|
||||
|
||||
// SonaCoordinator
|
||||
const sona = new SonaCoordinator();
|
||||
const trajectory = new TrajectoryBuilder()
|
||||
.startStep('query', 'test')
|
||||
.endStep('response', 0.9)
|
||||
.complete('success');
|
||||
|
||||
benchmarks.push(benchmark('sona recordTrajectory', () => {
|
||||
sona.recordTrajectory(trajectory);
|
||||
}, CONFIG.iterations.medium));
|
||||
|
||||
for (const b of benchmarks) {
|
||||
console.log(` ${b.name.padEnd(25)} ${b.formatted.avg.padStart(12)} | ${b.formatted.ops.padStart(15)}`);
|
||||
}
|
||||
|
||||
return benchmarks;
|
||||
}
|
||||
|
||||
async function benchmarkSession() {
|
||||
console.log('\n📊 Session & Streaming Benchmarks');
|
||||
console.log('─'.repeat(60));
|
||||
|
||||
const llm = new RuvLLM();
|
||||
const benchmarks = [];
|
||||
|
||||
// Session creation
|
||||
const sessions = new SessionManager(llm);
|
||||
benchmarks.push(benchmark('session create', () => {
|
||||
sessions.create({ userId: 'bench' });
|
||||
}, CONFIG.iterations.medium));
|
||||
|
||||
// Session chat
|
||||
const session = sessions.create();
|
||||
benchmarks.push(benchmark('session chat', () => {
|
||||
sessions.chat(session.id, 'Hello');
|
||||
}, CONFIG.iterations.medium));
|
||||
|
||||
// Session export/import
|
||||
sessions.chat(session.id, 'Message 1');
|
||||
sessions.chat(session.id, 'Message 2');
|
||||
const exported = sessions.export(session.id);
|
||||
|
||||
benchmarks.push(benchmark('session export', () => {
|
||||
sessions.export(session.id);
|
||||
}, CONFIG.iterations.medium));
|
||||
|
||||
benchmarks.push(benchmark('session import', () => {
|
||||
sessions.import(exported);
|
||||
}, CONFIG.iterations.medium));
|
||||
|
||||
// Streaming (async)
|
||||
const streamer = new StreamingGenerator(llm);
|
||||
const streamResult = await benchmarkAsync('stream collect', async () => {
|
||||
await streamer.collect('Test');
|
||||
}, 10);
|
||||
benchmarks.push(streamResult);
|
||||
|
||||
for (const b of benchmarks) {
|
||||
console.log(` ${b.name.padEnd(25)} ${b.formatted.avg.padStart(12)} | ${b.formatted.ops.padStart(15)}`);
|
||||
}
|
||||
|
||||
return benchmarks;
|
||||
}
|
||||
|
||||
// ============================================
|
||||
// Main
|
||||
// ============================================
|
||||
|
||||
async function main() {
|
||||
console.log('╔════════════════════════════════════════════════════════════╗');
|
||||
console.log('║ RuvLLM Comprehensive Benchmark Suite ║');
|
||||
console.log('╠════════════════════════════════════════════════════════════╣');
|
||||
console.log(`║ Platform: ${process.platform.padEnd(10)} Arch: ${process.arch.padEnd(10)} Node: ${process.version.padEnd(10)} ║`);
|
||||
console.log('╚════════════════════════════════════════════════════════════╝');
|
||||
|
||||
const startTime = Date.now();
|
||||
|
||||
results.benchmarks.core = await benchmarkCoreEngine();
|
||||
results.benchmarks.memory = await benchmarkMemory();
|
||||
results.benchmarks.simd = await benchmarkSimd();
|
||||
results.benchmarks.lora = await benchmarkLoRA();
|
||||
results.benchmarks.federated = await benchmarkFederated();
|
||||
results.benchmarks.training = await benchmarkTraining();
|
||||
results.benchmarks.export = await benchmarkExport();
|
||||
results.benchmarks.sona = await benchmarkSona();
|
||||
results.benchmarks.session = await benchmarkSession();
|
||||
|
||||
const totalTime = Date.now() - startTime;
|
||||
|
||||
console.log('\n╔════════════════════════════════════════════════════════════╗');
|
||||
console.log('║ Summary ║');
|
||||
console.log('╚════════════════════════════════════════════════════════════╝');
|
||||
|
||||
// Find slowest operations
|
||||
const allBenchmarks = Object.values(results.benchmarks).flat();
|
||||
const sorted = [...allBenchmarks].sort((a, b) => b.avgNs - a.avgNs);
|
||||
|
||||
console.log('\n🐢 Slowest Operations (optimization candidates):');
|
||||
for (const b of sorted.slice(0, 10)) {
|
||||
console.log(` ${b.name.padEnd(30)} ${b.formatted.avg.padStart(12)}`);
|
||||
}
|
||||
|
||||
console.log('\n🚀 Fastest Operations:');
|
||||
for (const b of sorted.slice(-5).reverse()) {
|
||||
console.log(` ${b.name.padEnd(30)} ${b.formatted.avg.padStart(12)}`);
|
||||
}
|
||||
|
||||
console.log(`\n✅ Total benchmark time: ${(totalTime / 1000).toFixed(2)}s`);
|
||||
|
||||
// Output JSON results
|
||||
console.log('\n📄 Full results saved to benchmark-results.json');
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
// Run if main
|
||||
main().then(results => {
|
||||
// Print JSON for capture
|
||||
console.log('\n--- JSON_RESULTS_START ---');
|
||||
console.log(JSON.stringify(results, null, 2));
|
||||
console.log('--- JSON_RESULTS_END ---');
|
||||
}).catch(err => {
|
||||
console.error('Benchmark failed:', err);
|
||||
process.exit(1);
|
||||
});
|
||||
294
vendor/ruvector/npm/packages/ruvllm/test/features.test.js
vendored
Normal file
294
vendor/ruvector/npm/packages/ruvllm/test/features.test.js
vendored
Normal file
@@ -0,0 +1,294 @@
|
||||
/**
|
||||
* Tests for new features: Sessions, Streaming, SONA
|
||||
*/
|
||||
|
||||
const { test, describe } = require('node:test');
|
||||
const assert = require('node:assert');
|
||||
|
||||
const {
|
||||
RuvLLM,
|
||||
SessionManager,
|
||||
StreamingGenerator,
|
||||
SonaCoordinator,
|
||||
TrajectoryBuilder,
|
||||
ReasoningBank,
|
||||
EwcManager,
|
||||
} = require('../dist/cjs/index.js');
|
||||
|
||||
describe('SessionManager', () => {
|
||||
test('should create session', () => {
|
||||
const llm = new RuvLLM();
|
||||
const sessions = new SessionManager(llm);
|
||||
|
||||
const session = sessions.create({ userId: 'test' });
|
||||
|
||||
assert.ok(session.id.startsWith('session-'));
|
||||
assert.strictEqual(session.messageCount, 0);
|
||||
assert.deepStrictEqual(session.metadata, { userId: 'test' });
|
||||
});
|
||||
|
||||
test('should chat with context', () => {
|
||||
const llm = new RuvLLM();
|
||||
const sessions = new SessionManager(llm);
|
||||
|
||||
const session = sessions.create();
|
||||
const response1 = sessions.chat(session.id, 'Hello');
|
||||
const response2 = sessions.chat(session.id, 'How are you?');
|
||||
|
||||
assert.strictEqual(session.messages.length, 4); // 2 user + 2 assistant
|
||||
assert.ok(response1.text);
|
||||
assert.ok(response2.text);
|
||||
});
|
||||
|
||||
test('should get history', () => {
|
||||
const llm = new RuvLLM();
|
||||
const sessions = new SessionManager(llm);
|
||||
|
||||
const session = sessions.create();
|
||||
sessions.chat(session.id, 'Message 1');
|
||||
sessions.chat(session.id, 'Message 2');
|
||||
|
||||
const history = sessions.getHistory(session.id);
|
||||
assert.strictEqual(history.length, 4);
|
||||
|
||||
const limited = sessions.getHistory(session.id, 2);
|
||||
assert.strictEqual(limited.length, 2);
|
||||
});
|
||||
|
||||
test('should export and import session', () => {
|
||||
const llm = new RuvLLM();
|
||||
const sessions = new SessionManager(llm);
|
||||
|
||||
const session = sessions.create({ key: 'value' });
|
||||
sessions.chat(session.id, 'Test message');
|
||||
|
||||
const exported = sessions.export(session.id);
|
||||
assert.ok(exported);
|
||||
|
||||
const imported = sessions.import(exported);
|
||||
assert.strictEqual(imported.id, session.id);
|
||||
assert.strictEqual(imported.messages.length, 2);
|
||||
});
|
||||
|
||||
test('should end session', () => {
|
||||
const llm = new RuvLLM();
|
||||
const sessions = new SessionManager(llm);
|
||||
|
||||
const session = sessions.create();
|
||||
assert.ok(sessions.get(session.id));
|
||||
|
||||
sessions.end(session.id);
|
||||
assert.strictEqual(sessions.get(session.id), undefined);
|
||||
});
|
||||
});
|
||||
|
||||
describe('StreamingGenerator', () => {
|
||||
test('should stream response', async () => {
|
||||
const llm = new RuvLLM();
|
||||
const streamer = new StreamingGenerator(llm);
|
||||
|
||||
const chunks = [];
|
||||
for await (const chunk of streamer.stream('Test prompt')) {
|
||||
chunks.push(chunk);
|
||||
}
|
||||
|
||||
assert.ok(chunks.length > 0);
|
||||
assert.ok(chunks[chunks.length - 1].done);
|
||||
});
|
||||
|
||||
test('should collect stream', async () => {
|
||||
const llm = new RuvLLM();
|
||||
const streamer = new StreamingGenerator(llm);
|
||||
|
||||
const result = await streamer.collect('Test prompt');
|
||||
assert.ok(typeof result === 'string');
|
||||
});
|
||||
|
||||
test('should use callbacks', async () => {
|
||||
const llm = new RuvLLM();
|
||||
const streamer = new StreamingGenerator(llm);
|
||||
|
||||
let chunkCount = 0;
|
||||
let completed = false;
|
||||
|
||||
await streamer.streamWithCallbacks('Test', {
|
||||
onChunk: () => chunkCount++,
|
||||
onComplete: () => { completed = true; },
|
||||
});
|
||||
|
||||
assert.ok(chunkCount > 0);
|
||||
assert.ok(completed);
|
||||
});
|
||||
});
|
||||
|
||||
describe('TrajectoryBuilder', () => {
|
||||
test('should build trajectory', () => {
|
||||
const builder = new TrajectoryBuilder();
|
||||
|
||||
const trajectory = builder
|
||||
.startStep('query', 'What is AI?')
|
||||
.endStep('AI is...', 0.95)
|
||||
.startStep('memory', 'searching')
|
||||
.endStep('found 3 results', 0.88)
|
||||
.complete('success');
|
||||
|
||||
assert.ok(trajectory.id.startsWith('traj-'));
|
||||
assert.strictEqual(trajectory.steps.length, 2);
|
||||
assert.strictEqual(trajectory.outcome, 'success');
|
||||
assert.ok(trajectory.durationMs >= 0);
|
||||
});
|
||||
|
||||
test('should track step durations', () => {
|
||||
const builder = new TrajectoryBuilder();
|
||||
|
||||
builder.startStep('query', 'input');
|
||||
// Small delay
|
||||
const start = Date.now();
|
||||
while (Date.now() - start < 5) { /* wait */ }
|
||||
builder.endStep('output', 0.9);
|
||||
|
||||
const trajectory = builder.complete('success');
|
||||
assert.ok(trajectory.steps[0].durationMs >= 0);
|
||||
});
|
||||
});
|
||||
|
||||
describe('ReasoningBank', () => {
|
||||
test('should store and retrieve patterns', () => {
|
||||
const bank = new ReasoningBank(0.5); // Lower threshold for testing
|
||||
|
||||
const embedding = [0.1, 0.2, 0.3, 0.4, 0.5];
|
||||
const id = bank.store('query_response', embedding);
|
||||
|
||||
assert.ok(id.startsWith('pat-'));
|
||||
|
||||
const pattern = bank.get(id);
|
||||
assert.ok(pattern);
|
||||
assert.strictEqual(pattern.type, 'query_response');
|
||||
assert.strictEqual(pattern.successRate, 1.0);
|
||||
});
|
||||
|
||||
test('should find similar patterns', () => {
|
||||
const bank = new ReasoningBank(0.5);
|
||||
|
||||
const emb1 = [1, 0, 0, 0, 0];
|
||||
const emb2 = [0.9, 0.1, 0, 0, 0]; // Similar to emb1
|
||||
|
||||
bank.store('query_response', emb1);
|
||||
bank.store('routing', emb2);
|
||||
|
||||
const similar = bank.findSimilar([1, 0, 0, 0, 0], 5);
|
||||
assert.ok(similar.length >= 1);
|
||||
});
|
||||
|
||||
test('should track usage', () => {
|
||||
const bank = new ReasoningBank();
|
||||
|
||||
const embedding = [0.1, 0.2, 0.3];
|
||||
const id = bank.store('query_response', embedding);
|
||||
|
||||
bank.recordUsage(id, true);
|
||||
bank.recordUsage(id, true);
|
||||
bank.recordUsage(id, false);
|
||||
|
||||
const pattern = bank.get(id);
|
||||
assert.strictEqual(pattern.useCount, 3);
|
||||
assert.ok(pattern.successRate < 1.0);
|
||||
});
|
||||
|
||||
test('should provide stats', () => {
|
||||
const bank = new ReasoningBank();
|
||||
|
||||
bank.store('query_response', [0.1, 0.2]);
|
||||
bank.store('routing', [0.3, 0.4]);
|
||||
|
||||
const stats = bank.stats();
|
||||
assert.strictEqual(stats.totalPatterns, 2);
|
||||
assert.strictEqual(stats.byType['query_response'], 1);
|
||||
assert.strictEqual(stats.byType['routing'], 1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('EwcManager', () => {
|
||||
test('should register tasks', () => {
|
||||
const ewc = new EwcManager(1000);
|
||||
|
||||
ewc.registerTask('task1', [0.1, 0.2, 0.3]);
|
||||
ewc.registerTask('task2', [0.4, 0.5, 0.6]);
|
||||
|
||||
const stats = ewc.stats();
|
||||
assert.strictEqual(stats.tasksLearned, 2);
|
||||
assert.strictEqual(stats.fisherComputed, true);
|
||||
});
|
||||
|
||||
test('should compute penalty', () => {
|
||||
const ewc = new EwcManager(1000);
|
||||
|
||||
ewc.registerTask('task1', [0.5, 0.5, 0.5]);
|
||||
|
||||
// Weights that differ from optimal should have higher penalty
|
||||
const penalty1 = ewc.computePenalty([0.5, 0.5, 0.5]);
|
||||
const penalty2 = ewc.computePenalty([1.0, 1.0, 1.0]);
|
||||
|
||||
assert.ok(penalty2 > penalty1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('SonaCoordinator', () => {
|
||||
test('should create with config', () => {
|
||||
const sona = new SonaCoordinator({
|
||||
instantLoopEnabled: true,
|
||||
ewcLambda: 5000,
|
||||
});
|
||||
|
||||
assert.ok(sona);
|
||||
const stats = sona.stats();
|
||||
assert.ok(stats.patterns);
|
||||
assert.ok(stats.ewc);
|
||||
});
|
||||
|
||||
test('should record signals', () => {
|
||||
const sona = new SonaCoordinator();
|
||||
|
||||
sona.recordSignal({
|
||||
requestId: 'req-123',
|
||||
quality: 0.9,
|
||||
type: 'positive',
|
||||
timestamp: new Date(),
|
||||
});
|
||||
|
||||
const stats = sona.stats();
|
||||
assert.strictEqual(stats.signalsReceived, 1);
|
||||
});
|
||||
|
||||
test('should record trajectories', () => {
|
||||
const sona = new SonaCoordinator();
|
||||
|
||||
const builder = new TrajectoryBuilder();
|
||||
const trajectory = builder
|
||||
.startStep('query', 'test')
|
||||
.endStep('response', 0.95)
|
||||
.complete('success');
|
||||
|
||||
sona.recordTrajectory(trajectory);
|
||||
|
||||
const stats = sona.stats();
|
||||
assert.strictEqual(stats.trajectoriesBuffered, 1);
|
||||
});
|
||||
|
||||
test('should run background loop', () => {
|
||||
const sona = new SonaCoordinator();
|
||||
|
||||
// Add some trajectories
|
||||
for (let i = 0; i < 3; i++) {
|
||||
const builder = new TrajectoryBuilder();
|
||||
const trajectory = builder
|
||||
.startStep('query', `test ${i}`)
|
||||
.endStep(`response ${i}`, 0.95)
|
||||
.complete('success');
|
||||
sona.recordTrajectory(trajectory);
|
||||
}
|
||||
|
||||
const result = sona.runBackgroundLoop();
|
||||
assert.strictEqual(result.trajectoriesProcessed, 3);
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user