Squashed 'vendor/ruvector/' content from commit b64c2172
git-subtree-dir: vendor/ruvector git-subtree-split: b64c21726f2bb37286d9ee36a7869fef60cc6900
This commit is contained in:
373
npm/packages/ruvector/test/benchmark-gnn.js
Normal file
373
npm/packages/ruvector/test/benchmark-gnn.js
Normal file
@@ -0,0 +1,373 @@
|
||||
/**
|
||||
* GNN Performance Benchmark Suite
|
||||
*
|
||||
* Tests performance of GNN operations and identifies bottlenecks
|
||||
*/
|
||||
|
||||
const { performance } = require('perf_hooks');
|
||||
|
||||
// Try to load native GNN module directly
|
||||
let gnnNative;
|
||||
let gnnWrapper;
|
||||
|
||||
try {
|
||||
gnnNative = require('@ruvector/gnn');
|
||||
console.log('✅ @ruvector/gnn loaded');
|
||||
} catch (e) {
|
||||
console.log('❌ @ruvector/gnn not available:', e.message);
|
||||
}
|
||||
|
||||
// Benchmark utilities
|
||||
function generateRandomVector(dim) {
|
||||
const arr = new Array(dim);
|
||||
for (let i = 0; i < dim; i++) {
|
||||
arr[i] = Math.random();
|
||||
}
|
||||
return arr;
|
||||
}
|
||||
|
||||
function generateRandomFloat32(dim) {
|
||||
const arr = new Float32Array(dim);
|
||||
for (let i = 0; i < dim; i++) {
|
||||
arr[i] = Math.random();
|
||||
}
|
||||
return arr;
|
||||
}
|
||||
|
||||
function benchmark(name, fn, iterations = 1000) {
|
||||
// Warmup
|
||||
for (let i = 0; i < 10; i++) fn();
|
||||
|
||||
const times = [];
|
||||
for (let i = 0; i < iterations; i++) {
|
||||
const start = performance.now();
|
||||
fn();
|
||||
times.push(performance.now() - start);
|
||||
}
|
||||
|
||||
times.sort((a, b) => a - b);
|
||||
const avg = times.reduce((a, b) => a + b, 0) / times.length;
|
||||
const p50 = times[Math.floor(times.length * 0.5)];
|
||||
const p95 = times[Math.floor(times.length * 0.95)];
|
||||
const p99 = times[Math.floor(times.length * 0.99)];
|
||||
|
||||
return { name, avg, p50, p95, p99, iterations };
|
||||
}
|
||||
|
||||
function formatMs(ms) {
|
||||
if (ms < 0.001) return `${(ms * 1000000).toFixed(2)}ns`;
|
||||
if (ms < 1) return `${(ms * 1000).toFixed(2)}µs`;
|
||||
return `${ms.toFixed(2)}ms`;
|
||||
}
|
||||
|
||||
function printResult(result) {
|
||||
console.log(` ${result.name}:`);
|
||||
console.log(` avg: ${formatMs(result.avg)} | p50: ${formatMs(result.p50)} | p95: ${formatMs(result.p95)} | p99: ${formatMs(result.p99)}`);
|
||||
}
|
||||
|
||||
// Array conversion benchmarks
|
||||
function benchmarkArrayConversion() {
|
||||
console.log('\n📊 Array Conversion Overhead Benchmarks');
|
||||
console.log('=========================================');
|
||||
|
||||
const dims = [128, 256, 512, 768, 1024];
|
||||
|
||||
for (const dim of dims) {
|
||||
console.log(`\n Dimension: ${dim}`);
|
||||
|
||||
const regularArray = generateRandomVector(dim);
|
||||
const float32Array = generateRandomFloat32(dim);
|
||||
|
||||
// Test Array.from on Float32Array
|
||||
printResult(benchmark(`Array.from(Float32Array)`, () => {
|
||||
return Array.from(float32Array);
|
||||
}));
|
||||
|
||||
// Test spread operator
|
||||
printResult(benchmark(`[...Float32Array]`, () => {
|
||||
return [...float32Array];
|
||||
}));
|
||||
|
||||
// Test slice (for regular arrays - noop baseline)
|
||||
printResult(benchmark(`Array.slice() (baseline)`, () => {
|
||||
return regularArray.slice();
|
||||
}));
|
||||
|
||||
// Test Float32Array.from
|
||||
printResult(benchmark(`Float32Array.from(Array)`, () => {
|
||||
return Float32Array.from(regularArray);
|
||||
}));
|
||||
|
||||
// Test new Float32Array
|
||||
printResult(benchmark(`new Float32Array(Array)`, () => {
|
||||
return new Float32Array(regularArray);
|
||||
}));
|
||||
}
|
||||
}
|
||||
|
||||
// GNN operation benchmarks
|
||||
function benchmarkGnnOperations() {
|
||||
if (!gnnNative) {
|
||||
console.log('\n⚠️ Skipping GNN benchmarks - module not available');
|
||||
return;
|
||||
}
|
||||
|
||||
console.log('\n📊 GNN Operation Benchmarks');
|
||||
console.log('===========================');
|
||||
|
||||
const dims = [128, 256, 512];
|
||||
const candidateCounts = [100, 1000, 10000];
|
||||
|
||||
for (const dim of dims) {
|
||||
for (const count of candidateCounts) {
|
||||
console.log(`\n Dimension: ${dim}, Candidates: ${count}`);
|
||||
|
||||
// Prepare data as regular arrays (user input)
|
||||
const queryArray = generateRandomVector(dim);
|
||||
const candidatesArray = Array.from({ length: count }, () => generateRandomVector(dim));
|
||||
|
||||
// Prepare data as Float32Array (pre-converted for max performance)
|
||||
const queryFloat32 = new Float32Array(queryArray);
|
||||
const candidatesFloat32 = candidatesArray.map(arr => new Float32Array(arr));
|
||||
|
||||
const iters = Math.min(100, Math.floor(10000 / count));
|
||||
|
||||
// Measure Float32Array conversion overhead (Array -> Float32Array)
|
||||
const conversionOverheadResult = benchmark(`Array→Float32 conversion`, () => {
|
||||
const q = new Float32Array(queryArray);
|
||||
const c = candidatesArray.map(arr => new Float32Array(arr));
|
||||
return { q, c };
|
||||
}, iters);
|
||||
printResult(conversionOverheadResult);
|
||||
|
||||
// Wrapped interface with regular arrays (tests full conversion + native)
|
||||
try {
|
||||
const wrappedArrayResult = benchmark(`Wrapped (from Array)`, () => {
|
||||
return gnnNative.differentiableSearch(queryArray, candidatesArray, 10, 1.0);
|
||||
}, iters);
|
||||
printResult(wrappedArrayResult);
|
||||
} catch (e) {
|
||||
console.log(` Wrapped (from Array): Error - ${e.message}`);
|
||||
}
|
||||
|
||||
// Wrapped interface with Float32Array (tests zero-copy path)
|
||||
try {
|
||||
const wrappedFloat32Result = benchmark(`Wrapped (from Float32)`, () => {
|
||||
return gnnNative.differentiableSearch(queryFloat32, candidatesFloat32, 10, 1.0);
|
||||
}, iters);
|
||||
printResult(wrappedFloat32Result);
|
||||
} catch (e) {
|
||||
console.log(` Wrapped (from Float32): Error - ${e.message}`);
|
||||
}
|
||||
|
||||
// Native direct with Float32Array (bypasses wrapper, max performance)
|
||||
try {
|
||||
const nativeResult = benchmark(`Native direct (Float32)`, () => {
|
||||
return gnnNative.nativeDifferentiableSearch(queryFloat32, candidatesFloat32, 10, 1.0);
|
||||
}, iters);
|
||||
printResult(nativeResult);
|
||||
} catch (e) {
|
||||
console.log(` Native direct (Float32): Error - ${e.message}`);
|
||||
}
|
||||
|
||||
console.log('');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Batch operation benchmarks
|
||||
function benchmarkBatchOperations() {
|
||||
if (!gnnNative) return;
|
||||
|
||||
console.log('\n📊 Batch vs Sequential Benchmarks');
|
||||
console.log('==================================');
|
||||
|
||||
const dim = 256;
|
||||
const batchSizes = [10, 50, 100];
|
||||
const candidateCount = 1000;
|
||||
|
||||
const candidates = Array.from({ length: candidateCount }, () => generateRandomVector(dim));
|
||||
|
||||
for (const batchSize of batchSizes) {
|
||||
console.log(`\n Batch size: ${batchSize}, Candidates: ${candidateCount}`);
|
||||
|
||||
const queries = Array.from({ length: batchSize }, () => generateRandomVector(dim));
|
||||
|
||||
// Sequential search
|
||||
const sequentialResult = benchmark(`Sequential search`, () => {
|
||||
const results = [];
|
||||
for (const query of queries) {
|
||||
results.push(gnnNative.differentiableSearch(query, candidates, 10, 1.0));
|
||||
}
|
||||
return results;
|
||||
}, 10);
|
||||
printResult(sequentialResult);
|
||||
|
||||
// Note: batch search would need to be implemented in native
|
||||
console.log(` Batch search: Not implemented (potential ${batchSize}x improvement)`);
|
||||
}
|
||||
}
|
||||
|
||||
// RuvectorLayer benchmarks
|
||||
function benchmarkRuvectorLayer() {
|
||||
if (!gnnNative) return;
|
||||
|
||||
console.log('\n📊 RuvectorLayer Benchmarks');
|
||||
console.log('===========================');
|
||||
|
||||
const dims = [128, 256, 512];
|
||||
const neighborCounts = [5, 10, 20, 50];
|
||||
|
||||
for (const dim of dims) {
|
||||
for (const neighborCount of neighborCounts) {
|
||||
console.log(`\n Dimension: ${dim}, Neighbors: ${neighborCount}`);
|
||||
|
||||
const layer = new gnnNative.RuvectorLayer(dim, dim, 4, 0.1);
|
||||
|
||||
// Test with regular arrays (triggers conversion)
|
||||
const nodeArray = generateRandomVector(dim);
|
||||
const neighborsArray = Array.from({ length: neighborCount }, () => generateRandomVector(dim));
|
||||
const weightsArray = generateRandomVector(neighborCount);
|
||||
|
||||
// Test with Float32Arrays (zero-copy)
|
||||
const nodeFloat32 = new Float32Array(nodeArray);
|
||||
const neighborsFloat32 = neighborsArray.map(arr => new Float32Array(arr));
|
||||
const weightsFloat32 = new Float32Array(weightsArray);
|
||||
|
||||
try {
|
||||
const arrayResult = benchmark(`Layer forward (Array)`, () => {
|
||||
return layer.forward(nodeArray, neighborsArray, weightsArray);
|
||||
}, 1000);
|
||||
printResult(arrayResult);
|
||||
} catch (e) {
|
||||
console.log(` Layer forward (Array): Error - ${e.message}`);
|
||||
}
|
||||
|
||||
try {
|
||||
const float32Result = benchmark(`Layer forward (Float32)`, () => {
|
||||
return layer.forward(nodeFloat32, neighborsFloat32, weightsFloat32);
|
||||
}, 1000);
|
||||
printResult(float32Result);
|
||||
} catch (e) {
|
||||
console.log(` Layer forward (Float32): Error - ${e.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// TensorCompress benchmarks
|
||||
function benchmarkTensorCompress() {
|
||||
if (!gnnNative) return;
|
||||
|
||||
console.log('\n📊 TensorCompress Benchmarks');
|
||||
console.log('============================');
|
||||
|
||||
const dims = [128, 256, 512, 768, 1024];
|
||||
|
||||
const compressor = new gnnNative.TensorCompress();
|
||||
|
||||
for (const dim of dims) {
|
||||
console.log(`\n Dimension: ${dim}`);
|
||||
|
||||
const embeddingArray = generateRandomVector(dim);
|
||||
const embeddingFloat32 = new Float32Array(embeddingArray);
|
||||
|
||||
// Test with Array (triggers conversion)
|
||||
try {
|
||||
const arrayResult = benchmark(`Compress Array (freq=0.5)`, () => {
|
||||
return compressor.compress(embeddingArray, 0.5);
|
||||
}, 1000);
|
||||
printResult(arrayResult);
|
||||
} catch (e) {
|
||||
console.log(` Compress Array: Error - ${e.message}`);
|
||||
}
|
||||
|
||||
// Test with Float32Array (zero-copy)
|
||||
try {
|
||||
const float32Result = benchmark(`Compress Float32 (freq=0.5)`, () => {
|
||||
return compressor.compress(embeddingFloat32, 0.5);
|
||||
}, 1000);
|
||||
printResult(float32Result);
|
||||
} catch (e) {
|
||||
console.log(` Compress Float32: Error - ${e.message}`);
|
||||
}
|
||||
|
||||
// Decompress benchmark
|
||||
try {
|
||||
const compressed = compressor.compress(embeddingFloat32, 0.5);
|
||||
const decompressResult = benchmark(`Decompress`, () => {
|
||||
return compressor.decompress(compressed);
|
||||
}, 1000);
|
||||
printResult(decompressResult);
|
||||
} catch (e) {
|
||||
console.log(` Decompress: Error - ${e.message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Memory allocation benchmarks
|
||||
function benchmarkMemoryAllocation() {
|
||||
console.log('\n📊 Memory Allocation Patterns');
|
||||
console.log('=============================');
|
||||
|
||||
const dim = 256;
|
||||
const count = 1000;
|
||||
|
||||
// Regular array creation
|
||||
printResult(benchmark(`Create ${count} regular arrays (${dim}d)`, () => {
|
||||
const arrays = [];
|
||||
for (let i = 0; i < count; i++) {
|
||||
arrays.push(new Array(dim).fill(0).map(() => Math.random()));
|
||||
}
|
||||
return arrays;
|
||||
}, 100));
|
||||
|
||||
// Float32Array creation
|
||||
printResult(benchmark(`Create ${count} Float32Arrays (${dim}d)`, () => {
|
||||
const arrays = [];
|
||||
for (let i = 0; i < count; i++) {
|
||||
const arr = new Float32Array(dim);
|
||||
for (let j = 0; j < dim; j++) arr[j] = Math.random();
|
||||
arrays.push(arr);
|
||||
}
|
||||
return arrays;
|
||||
}, 100));
|
||||
|
||||
// Pre-allocated buffer
|
||||
printResult(benchmark(`Pre-allocated buffer (${count * dim} floats)`, () => {
|
||||
const buffer = new Float32Array(count * dim);
|
||||
for (let i = 0; i < buffer.length; i++) {
|
||||
buffer[i] = Math.random();
|
||||
}
|
||||
return buffer;
|
||||
}, 100));
|
||||
}
|
||||
|
||||
// Main
|
||||
async function main() {
|
||||
console.log('🚀 RuVector GNN Performance Benchmark Suite');
|
||||
console.log('============================================\n');
|
||||
|
||||
console.log('System Info:');
|
||||
console.log(` Platform: ${process.platform}`);
|
||||
console.log(` Node.js: ${process.version}`);
|
||||
console.log(` CPU: ${require('os').cpus()[0].model}`);
|
||||
console.log(` Memory: ${Math.round(require('os').totalmem() / 1024 / 1024 / 1024)}GB`);
|
||||
|
||||
benchmarkArrayConversion();
|
||||
benchmarkMemoryAllocation();
|
||||
benchmarkGnnOperations();
|
||||
benchmarkRuvectorLayer();
|
||||
benchmarkTensorCompress();
|
||||
benchmarkBatchOperations();
|
||||
|
||||
console.log('\n\n📋 Performance Optimization Recommendations');
|
||||
console.log('============================================');
|
||||
console.log('1. Avoid Array.from() conversion - use typed arrays directly');
|
||||
console.log('2. Cache converted arrays when possible');
|
||||
console.log('3. Use pre-allocated buffers for batch operations');
|
||||
console.log('4. Implement native batch search for multiple queries');
|
||||
console.log('5. Consider zero-copy operations with SharedArrayBuffer');
|
||||
}
|
||||
|
||||
main().catch(console.error);
|
||||
149
npm/packages/ruvector/test/benchmark-hooks.js
Normal file
149
npm/packages/ruvector/test/benchmark-hooks.js
Normal file
@@ -0,0 +1,149 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* RuVector Hooks Performance Benchmark
|
||||
*
|
||||
* Measures performance of all hook operations to identify bottlenecks
|
||||
*/
|
||||
|
||||
const { execSync } = require('child_process');
|
||||
const path = require('path');
|
||||
|
||||
const CLI = path.join(__dirname, '../bin/cli.js');
|
||||
|
||||
// Benchmark configuration
|
||||
const ITERATIONS = 10;
|
||||
const WARMUP = 2;
|
||||
|
||||
// Results storage
|
||||
const results = {};
|
||||
|
||||
function runCommand(cmd, silent = true) {
|
||||
const start = performance.now();
|
||||
try {
|
||||
execSync(`node ${CLI} ${cmd}`, {
|
||||
stdio: silent ? 'pipe' : 'inherit',
|
||||
timeout: 30000
|
||||
});
|
||||
return performance.now() - start;
|
||||
} catch (e) {
|
||||
return performance.now() - start;
|
||||
}
|
||||
}
|
||||
|
||||
function benchmark(name, cmd, iterations = ITERATIONS) {
|
||||
console.log(`\nBenchmarking: ${name}`);
|
||||
|
||||
// Warmup
|
||||
for (let i = 0; i < WARMUP; i++) {
|
||||
runCommand(cmd);
|
||||
}
|
||||
|
||||
// Actual benchmark
|
||||
const times = [];
|
||||
for (let i = 0; i < iterations; i++) {
|
||||
const time = runCommand(cmd);
|
||||
times.push(time);
|
||||
process.stdout.write(` Run ${i + 1}/${iterations}: ${time.toFixed(1)}ms\r`);
|
||||
}
|
||||
|
||||
const avg = times.reduce((a, b) => a + b, 0) / times.length;
|
||||
const min = Math.min(...times);
|
||||
const max = Math.max(...times);
|
||||
const p95 = times.sort((a, b) => a - b)[Math.floor(times.length * 0.95)];
|
||||
|
||||
results[name] = { avg, min, max, p95, times };
|
||||
console.log(` Avg: ${avg.toFixed(1)}ms | Min: ${min.toFixed(1)}ms | Max: ${max.toFixed(1)}ms | P95: ${p95.toFixed(1)}ms`);
|
||||
|
||||
return avg;
|
||||
}
|
||||
|
||||
async function main() {
|
||||
console.log('='.repeat(60));
|
||||
console.log('RuVector Hooks Performance Benchmark');
|
||||
console.log('='.repeat(60));
|
||||
console.log(`Iterations: ${ITERATIONS} | Warmup: ${WARMUP}`);
|
||||
|
||||
// Session operations
|
||||
console.log('\n--- Session Operations ---');
|
||||
benchmark('session-start', 'hooks session-start');
|
||||
benchmark('session-end', 'hooks session-end');
|
||||
|
||||
// Memory operations (non-semantic)
|
||||
console.log('\n--- Memory Operations (Hash Embeddings) ---');
|
||||
benchmark('remember (hash)', 'hooks remember "Test memory content for benchmarking" -t benchmark');
|
||||
benchmark('recall (hash)', 'hooks recall "test benchmark" -k 5');
|
||||
|
||||
// Stats and routing
|
||||
console.log('\n--- Stats & Routing ---');
|
||||
benchmark('stats', 'hooks stats');
|
||||
benchmark('route', 'hooks route "implement feature" --file src/test.ts');
|
||||
benchmark('suggest-context', 'hooks suggest-context');
|
||||
|
||||
// Pre/Post hooks
|
||||
console.log('\n--- Pre/Post Edit Hooks ---');
|
||||
benchmark('pre-edit', 'hooks pre-edit /tmp/test.js');
|
||||
benchmark('post-edit (success)', 'hooks post-edit /tmp/test.js --success');
|
||||
benchmark('post-edit (failure)', 'hooks post-edit /tmp/test.js');
|
||||
|
||||
// Pre/Post command hooks
|
||||
console.log('\n--- Pre/Post Command Hooks ---');
|
||||
benchmark('pre-command', 'hooks pre-command "npm test"');
|
||||
benchmark('post-command (success)', 'hooks post-command "npm test" --success');
|
||||
|
||||
// Trajectory operations (using correct CLI args)
|
||||
console.log('\n--- Trajectory Operations ---');
|
||||
benchmark('trajectory-begin', 'hooks trajectory-begin -c "benchmark task" -a tester');
|
||||
benchmark('trajectory-step', 'hooks trajectory-step -a "step action" -r "step result"');
|
||||
benchmark('trajectory-end (success)', 'hooks trajectory-end --success --quality 1.0');
|
||||
|
||||
// Co-edit operations (using correct CLI args)
|
||||
console.log('\n--- Co-Edit Operations ---');
|
||||
benchmark('coedit-record', 'hooks coedit-record -p file1.js -r file2.js');
|
||||
benchmark('coedit-suggest', 'hooks coedit-suggest -f src/index.ts');
|
||||
|
||||
// Error operations (using correct CLI args)
|
||||
console.log('\n--- Error Operations ---');
|
||||
benchmark('error-record', 'hooks error-record -e "TypeError: undefined" -x "Add null check"');
|
||||
benchmark('error-suggest', 'hooks error-suggest -e "Cannot read property"');
|
||||
|
||||
// Learning operations
|
||||
console.log('\n--- Learning Operations ---');
|
||||
benchmark('force-learn', 'hooks force-learn');
|
||||
|
||||
// Summary
|
||||
console.log('\n' + '='.repeat(60));
|
||||
console.log('PERFORMANCE SUMMARY');
|
||||
console.log('='.repeat(60));
|
||||
|
||||
const sorted = Object.entries(results)
|
||||
.sort((a, b) => b[1].avg - a[1].avg);
|
||||
|
||||
console.log('\nSlowest operations:');
|
||||
sorted.slice(0, 5).forEach(([name, r], i) => {
|
||||
console.log(` ${i + 1}. ${name}: ${r.avg.toFixed(1)}ms avg`);
|
||||
});
|
||||
|
||||
console.log('\nFastest operations:');
|
||||
sorted.slice(-5).reverse().forEach(([name, r], i) => {
|
||||
console.log(` ${i + 1}. ${name}: ${r.avg.toFixed(1)}ms avg`);
|
||||
});
|
||||
|
||||
// Identify bottlenecks (>100ms)
|
||||
const bottlenecks = sorted.filter(([_, r]) => r.avg > 100);
|
||||
if (bottlenecks.length > 0) {
|
||||
console.log('\n⚠️ BOTTLENECKS (>100ms):');
|
||||
bottlenecks.forEach(([name, r]) => {
|
||||
console.log(` - ${name}: ${r.avg.toFixed(1)}ms`);
|
||||
});
|
||||
}
|
||||
|
||||
// Total time for all operations
|
||||
const total = Object.values(results).reduce((sum, r) => sum + r.avg, 0);
|
||||
console.log(`\nTotal benchmark time: ${total.toFixed(1)}ms`);
|
||||
|
||||
// Output JSON for further analysis
|
||||
console.log('\n--- JSON Results ---');
|
||||
console.log(JSON.stringify(results, null, 2));
|
||||
}
|
||||
|
||||
main().catch(console.error);
|
||||
250
npm/packages/ruvector/test/benchmark-perf.js
Normal file
250
npm/packages/ruvector/test/benchmark-perf.js
Normal file
@@ -0,0 +1,250 @@
|
||||
const { LRUCache, Float32BufferPool, VectorOps, ParallelBatchProcessor, OptimizedMemoryStore } = require('../dist/core/neural-perf.js');
|
||||
|
||||
// Benchmark utilities
|
||||
function benchmark(name, fn, iterations = 10000) {
|
||||
// Warmup
|
||||
for (let i = 0; i < 100; i++) fn();
|
||||
|
||||
const start = performance.now();
|
||||
for (let i = 0; i < iterations; i++) fn();
|
||||
const elapsed = performance.now() - start;
|
||||
|
||||
return { name, iterations, totalMs: elapsed, perOpUs: (elapsed / iterations) * 1000 };
|
||||
}
|
||||
|
||||
function formatResult(result) {
|
||||
const name = result.name.padEnd(40);
|
||||
const us = result.perOpUs.toFixed(3).padStart(10);
|
||||
return name + ' ' + us + ' us/op (' + result.iterations + ' ops in ' + result.totalMs.toFixed(1) + 'ms)';
|
||||
}
|
||||
|
||||
console.log('\n═══════════════════════════════════════════════════════════════════');
|
||||
console.log(' RUVECTOR PERFORMANCE BENCHMARKS');
|
||||
console.log('═══════════════════════════════════════════════════════════════════\n');
|
||||
|
||||
// ============================================================================
|
||||
// 1. LRU Cache: O(1) vs Map (simulated O(n) eviction)
|
||||
// ============================================================================
|
||||
console.log('┌─────────────────────────────────────────────────────────────────┐');
|
||||
console.log('│ LRU CACHE BENCHMARK │');
|
||||
console.log('└─────────────────────────────────────────────────────────────────┘');
|
||||
|
||||
const lruCache = new LRUCache(1000);
|
||||
const naiveCache = new Map();
|
||||
const CACHE_SIZE = 1000;
|
||||
|
||||
// Pre-fill caches
|
||||
for (let i = 0; i < CACHE_SIZE; i++) {
|
||||
lruCache.set('key' + i, { data: i });
|
||||
naiveCache.set('key' + i, { data: i });
|
||||
}
|
||||
|
||||
// Benchmark LRU get (O(1))
|
||||
const lruGet = benchmark('LRU Cache get (O(1))', () => {
|
||||
lruCache.get('key' + Math.floor(Math.random() * CACHE_SIZE));
|
||||
}, 100000);
|
||||
|
||||
// Benchmark LRU set with eviction (O(1))
|
||||
let lruSetCounter = CACHE_SIZE;
|
||||
const lruSet = benchmark('LRU Cache set+evict (O(1))', () => {
|
||||
lruCache.set('newkey' + lruSetCounter++, { data: lruSetCounter });
|
||||
}, 50000);
|
||||
|
||||
// Simulate naive O(n) eviction
|
||||
const naiveEvict = benchmark('Naive Map eviction (O(n))', () => {
|
||||
// Simulate finding oldest entry (O(n) scan)
|
||||
let oldest = null;
|
||||
for (const [k, v] of naiveCache.entries()) {
|
||||
oldest = k;
|
||||
break; // Just get first (simulating finding oldest)
|
||||
}
|
||||
naiveCache.delete(oldest);
|
||||
naiveCache.set('key' + Math.random(), { data: 1 });
|
||||
}, 50000);
|
||||
|
||||
console.log(formatResult(lruGet));
|
||||
console.log(formatResult(lruSet));
|
||||
console.log(formatResult(naiveEvict));
|
||||
console.log(' → LRU Speedup: ' + (naiveEvict.perOpUs / lruSet.perOpUs).toFixed(1) + 'x faster\n');
|
||||
|
||||
// ============================================================================
|
||||
// 2. Buffer Pool vs Fresh Allocation
|
||||
// ============================================================================
|
||||
console.log('┌─────────────────────────────────────────────────────────────────┐');
|
||||
console.log('│ BUFFER POOL BENCHMARK │');
|
||||
console.log('└─────────────────────────────────────────────────────────────────┘');
|
||||
|
||||
const bufferPool = new Float32BufferPool(64);
|
||||
bufferPool.prewarm([384], 32);
|
||||
|
||||
// Pooled allocation
|
||||
const pooledAlloc = benchmark('Buffer Pool acquire+release', () => {
|
||||
const buf = bufferPool.acquire(384);
|
||||
buf[0] = 1.0; // Use it
|
||||
bufferPool.release(buf);
|
||||
}, 100000);
|
||||
|
||||
// Fresh allocation
|
||||
const freshAlloc = benchmark('Fresh Float32Array allocation', () => {
|
||||
const buf = new Float32Array(384);
|
||||
buf[0] = 1.0; // Use it
|
||||
// Let GC handle it
|
||||
}, 100000);
|
||||
|
||||
console.log(formatResult(pooledAlloc));
|
||||
console.log(formatResult(freshAlloc));
|
||||
console.log(' → Pool Speedup: ' + (freshAlloc.perOpUs / pooledAlloc.perOpUs).toFixed(1) + 'x faster');
|
||||
const poolStats = bufferPool.getStats();
|
||||
console.log(' → Pool Stats: reuse=' + (poolStats.reuseRate * 100).toFixed(1) + '%, pooled=' + poolStats.pooledBuffers + '\n');
|
||||
|
||||
// ============================================================================
|
||||
// 3. Vector Ops: Unrolled vs Standard
|
||||
// ============================================================================
|
||||
console.log('┌─────────────────────────────────────────────────────────────────┐');
|
||||
console.log('│ VECTOR OPERATIONS BENCHMARK (384-dim) │');
|
||||
console.log('└─────────────────────────────────────────────────────────────────┘');
|
||||
|
||||
const vecA = new Float32Array(384);
|
||||
const vecB = new Float32Array(384);
|
||||
for (let i = 0; i < 384; i++) {
|
||||
vecA[i] = Math.random();
|
||||
vecB[i] = Math.random();
|
||||
}
|
||||
|
||||
// Unrolled cosine
|
||||
const unrolledCosine = benchmark('VectorOps.cosine (8x unrolled)', () => {
|
||||
VectorOps.cosine(vecA, vecB);
|
||||
}, 100000);
|
||||
|
||||
// Standard cosine
|
||||
function standardCosine(a, b) {
|
||||
let dot = 0, normA = 0, normB = 0;
|
||||
for (let i = 0; i < a.length; i++) {
|
||||
dot += a[i] * b[i];
|
||||
normA += a[i] * a[i];
|
||||
normB += b[i] * b[i];
|
||||
}
|
||||
return dot / Math.sqrt(normA * normB);
|
||||
}
|
||||
|
||||
const stdCosine = benchmark('Standard cosine (no unroll)', () => {
|
||||
standardCosine(vecA, vecB);
|
||||
}, 100000);
|
||||
|
||||
console.log(formatResult(unrolledCosine));
|
||||
console.log(formatResult(stdCosine));
|
||||
console.log(' → Unroll Speedup: ' + (stdCosine.perOpUs / unrolledCosine.perOpUs).toFixed(2) + 'x faster\n');
|
||||
|
||||
// Unrolled dot product
|
||||
const unrolledDot = benchmark('VectorOps.dot (8x unrolled)', () => {
|
||||
VectorOps.dot(vecA, vecB);
|
||||
}, 100000);
|
||||
|
||||
function standardDot(a, b) {
|
||||
let sum = 0;
|
||||
for (let i = 0; i < a.length; i++) sum += a[i] * b[i];
|
||||
return sum;
|
||||
}
|
||||
|
||||
const stdDot = benchmark('Standard dot product', () => {
|
||||
standardDot(vecA, vecB);
|
||||
}, 100000);
|
||||
|
||||
console.log(formatResult(unrolledDot));
|
||||
console.log(formatResult(stdDot));
|
||||
console.log(' → Unroll Speedup: ' + (stdDot.perOpUs / unrolledDot.perOpUs).toFixed(2) + 'x faster\n');
|
||||
|
||||
// Unrolled distance
|
||||
const unrolledDist = benchmark('VectorOps.distance (8x unrolled)', () => {
|
||||
VectorOps.distance(vecA, vecB);
|
||||
}, 100000);
|
||||
|
||||
function standardDistance(a, b) {
|
||||
let sum = 0;
|
||||
for (let i = 0; i < a.length; i++) {
|
||||
const d = a[i] - b[i];
|
||||
sum += d * d;
|
||||
}
|
||||
return Math.sqrt(sum);
|
||||
}
|
||||
|
||||
const stdDist = benchmark('Standard distance', () => {
|
||||
standardDistance(vecA, vecB);
|
||||
}, 100000);
|
||||
|
||||
console.log(formatResult(unrolledDist));
|
||||
console.log(formatResult(stdDist));
|
||||
console.log(' → Unroll Speedup: ' + (stdDist.perOpUs / unrolledDist.perOpUs).toFixed(2) + 'x faster\n');
|
||||
|
||||
// ============================================================================
|
||||
// 4. Batch Processing
|
||||
// ============================================================================
|
||||
console.log('┌─────────────────────────────────────────────────────────────────┐');
|
||||
console.log('│ BATCH SIMILARITY SEARCH │');
|
||||
console.log('└─────────────────────────────────────────────────────────────────┘');
|
||||
|
||||
const corpus = [];
|
||||
for (let i = 0; i < 1000; i++) {
|
||||
const v = new Float32Array(384);
|
||||
for (let j = 0; j < 384; j++) v[j] = Math.random();
|
||||
corpus.push(v);
|
||||
}
|
||||
|
||||
const queries = [];
|
||||
for (let i = 0; i < 100; i++) {
|
||||
const v = new Float32Array(384);
|
||||
for (let j = 0; j < 384; j++) v[j] = Math.random();
|
||||
queries.push(v);
|
||||
}
|
||||
|
||||
const batchProcessor = new ParallelBatchProcessor({ batchSize: 32 });
|
||||
|
||||
const batchSearch = benchmark('Batch similarity (10 queries x 100 corpus)', () => {
|
||||
batchProcessor.batchSimilarity(queries.slice(0, 10), corpus.slice(0, 100), 5);
|
||||
}, 100);
|
||||
|
||||
console.log(formatResult(batchSearch));
|
||||
|
||||
// ============================================================================
|
||||
// 5. OptimizedMemoryStore
|
||||
// ============================================================================
|
||||
console.log('\n┌─────────────────────────────────────────────────────────────────┐');
|
||||
console.log('│ OPTIMIZED MEMORY STORE │');
|
||||
console.log('└─────────────────────────────────────────────────────────────────┘');
|
||||
|
||||
const store = new OptimizedMemoryStore({ cacheSize: 1000, dimension: 384 });
|
||||
|
||||
// Pre-fill
|
||||
for (let i = 0; i < 1000; i++) {
|
||||
const emb = new Float32Array(384);
|
||||
for (let j = 0; j < 384; j++) emb[j] = Math.random();
|
||||
store.store('mem' + i, emb, 'content ' + i);
|
||||
}
|
||||
|
||||
const storeGet = benchmark('OptimizedMemoryStore.get (O(1))', () => {
|
||||
store.get('mem' + Math.floor(Math.random() * 1000));
|
||||
}, 100000);
|
||||
|
||||
const queryEmb = new Float32Array(384);
|
||||
for (let j = 0; j < 384; j++) queryEmb[j] = Math.random();
|
||||
|
||||
const storeSearch = benchmark('OptimizedMemoryStore.search (k=5)', () => {
|
||||
store.search(queryEmb, 5);
|
||||
}, 1000);
|
||||
|
||||
console.log(formatResult(storeGet));
|
||||
console.log(formatResult(storeSearch));
|
||||
const storeStats = store.getStats();
|
||||
console.log(' → Cache: hits=' + storeStats.cache.hits + ', hitRate=' + (storeStats.cache.hitRate * 100).toFixed(1) + '%\n');
|
||||
|
||||
// ============================================================================
|
||||
// Summary
|
||||
// ============================================================================
|
||||
console.log('═══════════════════════════════════════════════════════════════════');
|
||||
console.log(' SUMMARY');
|
||||
console.log('═══════════════════════════════════════════════════════════════════');
|
||||
console.log(' ✓ LRU Cache: O(1) operations, ' + (naiveEvict.perOpUs / lruSet.perOpUs).toFixed(0) + 'x faster than naive eviction');
|
||||
console.log(' ✓ Buffer Pool: ' + (freshAlloc.perOpUs / pooledAlloc.perOpUs).toFixed(1) + 'x faster, ' + (poolStats.reuseRate * 100).toFixed(0) + '% reuse rate');
|
||||
console.log(' ✓ Vector Ops: ' + (stdCosine.perOpUs / unrolledCosine.perOpUs).toFixed(1) + 'x faster with 8x unrolling');
|
||||
console.log(' ✓ Memory Store: O(1) lookup at ' + storeGet.perOpUs.toFixed(3) + ' µs/op');
|
||||
console.log('═══════════════════════════════════════════════════════════════════\n');
|
||||
155
npm/packages/ruvector/test/integration.js
Executable file
155
npm/packages/ruvector/test/integration.js
Executable file
@@ -0,0 +1,155 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
/**
|
||||
* Integration test for ruvector package
|
||||
* Tests the smart loader and basic functionality
|
||||
*/
|
||||
|
||||
const assert = require('assert');
|
||||
const path = require('path');
|
||||
|
||||
console.log('ruvector Integration Test\n');
|
||||
console.log('='.repeat(50));
|
||||
|
||||
// Test 1: Load ruvector module
|
||||
console.log('\n1. Testing module loading...');
|
||||
try {
|
||||
const ruvector = require('../dist/index.js');
|
||||
console.log(' ✓ Module loaded successfully');
|
||||
|
||||
// Check exports
|
||||
assert(typeof ruvector.VectorDB === 'function', 'VectorDB should be a function');
|
||||
assert(typeof ruvector.getImplementationType === 'function', 'getImplementationType should be a function');
|
||||
assert(typeof ruvector.isNative === 'function', 'isNative should be a function');
|
||||
assert(typeof ruvector.isWasm === 'function', 'isWasm should be a function');
|
||||
assert(typeof ruvector.getVersion === 'function', 'getVersion should be a function');
|
||||
console.log(' ✓ All exports present');
|
||||
} catch (error) {
|
||||
console.error(' ✗ Failed to load module:', error.message);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Test 2: Check implementation detection
|
||||
console.log('\n2. Testing implementation detection...');
|
||||
try {
|
||||
const { getImplementationType, isNative, isWasm, getVersion } = require('../dist/index.js');
|
||||
|
||||
const implType = getImplementationType();
|
||||
console.log(` Implementation type: ${implType}`);
|
||||
|
||||
assert(['native', 'wasm'].includes(implType), 'Implementation type should be native or wasm');
|
||||
console.log(' ✓ Valid implementation type');
|
||||
|
||||
const version = getVersion();
|
||||
console.log(` Version: ${version.version}`);
|
||||
console.log(` Using: ${version.implementation}`);
|
||||
assert(version.version === '0.1.1', 'Version should be 0.1.1');
|
||||
console.log(' ✓ Version info correct');
|
||||
|
||||
assert(isNative() !== isWasm(), 'Should be either native OR wasm, not both');
|
||||
console.log(' ✓ Implementation flags consistent');
|
||||
} catch (error) {
|
||||
console.error(' ✗ Implementation detection failed:', error.message);
|
||||
// This is expected to fail until we have the actual implementations
|
||||
console.log(' ⚠ This is expected until @ruvector/core and @ruvector/wasm are built');
|
||||
}
|
||||
|
||||
// Test 3: Type definitions
|
||||
console.log('\n3. Testing TypeScript type definitions...');
|
||||
try {
|
||||
const fs = require('fs');
|
||||
|
||||
const typeDefsExist = fs.existsSync(path.join(__dirname, '../dist/types.d.ts'));
|
||||
assert(typeDefsExist, 'Type definitions should exist');
|
||||
console.log(' ✓ Type definitions file exists');
|
||||
|
||||
const indexDefsExist = fs.existsSync(path.join(__dirname, '../dist/index.d.ts'));
|
||||
assert(indexDefsExist, 'Index type definitions should exist');
|
||||
console.log(' ✓ Index type definitions exist');
|
||||
|
||||
// Check type definitions content
|
||||
const typeDefs = fs.readFileSync(path.join(__dirname, '../dist/types.d.ts'), 'utf8');
|
||||
assert(typeDefs.includes('VectorEntry'), 'Should include VectorEntry interface');
|
||||
assert(typeDefs.includes('SearchQuery'), 'Should include SearchQuery interface');
|
||||
assert(typeDefs.includes('SearchResult'), 'Should include SearchResult interface');
|
||||
assert(typeDefs.includes('DbOptions'), 'Should include DbOptions interface');
|
||||
assert(typeDefs.includes('VectorDB'), 'Should include VectorDB interface');
|
||||
console.log(' ✓ All type definitions present');
|
||||
} catch (error) {
|
||||
console.error(' ✗ Type definitions test failed:', error.message);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Test 4: Package structure
|
||||
console.log('\n4. Testing package structure...');
|
||||
try {
|
||||
const fs = require('fs');
|
||||
|
||||
const packageJson = require('../package.json');
|
||||
assert(packageJson.name === 'ruvector', 'Package name should be ruvector');
|
||||
assert(packageJson.version === '0.1.1', 'Version should be 0.1.1');
|
||||
assert(packageJson.main === 'dist/index.js', 'Main entry should be dist/index.js');
|
||||
assert(packageJson.types === 'dist/index.d.ts', 'Types entry should be dist/index.d.ts');
|
||||
assert(packageJson.bin.ruvector === './bin/cli.js', 'CLI bin should be ./bin/cli.js');
|
||||
console.log(' ✓ package.json structure correct');
|
||||
|
||||
const cliExists = fs.existsSync(path.join(__dirname, '../bin/cli.js'));
|
||||
assert(cliExists, 'CLI script should exist');
|
||||
console.log(' ✓ CLI script exists');
|
||||
|
||||
const cliContent = fs.readFileSync(path.join(__dirname, '../bin/cli.js'), 'utf8');
|
||||
assert(cliContent.startsWith('#!/usr/bin/env node'), 'CLI should have shebang');
|
||||
console.log(' ✓ CLI has proper shebang');
|
||||
} catch (error) {
|
||||
console.error(' ✗ Package structure test failed:', error.message);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Test 5: CLI functionality (basic)
|
||||
console.log('\n5. Testing CLI basic functionality...');
|
||||
try {
|
||||
const { execSync } = require('child_process');
|
||||
|
||||
// Test CLI help
|
||||
try {
|
||||
const output = execSync('node bin/cli.js --help', {
|
||||
cwd: path.join(__dirname, '..'),
|
||||
encoding: 'utf8'
|
||||
});
|
||||
assert(output.includes('ruvector'), 'Help should mention ruvector');
|
||||
assert(output.includes('create'), 'Help should include create command');
|
||||
assert(output.includes('search'), 'Help should include search command');
|
||||
console.log(' ✓ CLI help works');
|
||||
} catch (error) {
|
||||
// CLI might fail if dependencies aren't available
|
||||
console.log(' ⚠ CLI help test skipped (dependencies not available)');
|
||||
}
|
||||
|
||||
// Test info command
|
||||
try {
|
||||
const output = execSync('node bin/cli.js info', {
|
||||
cwd: path.join(__dirname, '..'),
|
||||
encoding: 'utf8'
|
||||
});
|
||||
assert(output.includes('0.1.1'), 'Info should show version');
|
||||
console.log(' ✓ CLI info command works');
|
||||
} catch (error) {
|
||||
console.log(' ⚠ CLI info test skipped (dependencies not available)');
|
||||
}
|
||||
} catch (error) {
|
||||
console.error(' ✗ CLI test failed:', error.message);
|
||||
}
|
||||
|
||||
// Summary
|
||||
console.log('\n' + '='.repeat(50));
|
||||
console.log('\n✓ Core package structure tests passed!');
|
||||
console.log('\nPackage ready for:');
|
||||
console.log(' - Platform detection and smart loading');
|
||||
console.log(' - TypeScript type definitions');
|
||||
console.log(' - CLI tools (create, insert, search, stats, benchmark)');
|
||||
console.log(' - Integration with @ruvector/core and @ruvector/wasm');
|
||||
console.log('\nNext steps:');
|
||||
console.log(' 1. Build @ruvector/core (native Rust bindings)');
|
||||
console.log(' 2. Build @ruvector/wasm (WebAssembly module)');
|
||||
console.log(' 3. Test full integration with real implementations');
|
||||
console.log('\nPackage location: /workspaces/ruvector/npm/packages/ruvector');
|
||||
151
npm/packages/ruvector/test/mock-implementation.js
Normal file
151
npm/packages/ruvector/test/mock-implementation.js
Normal file
@@ -0,0 +1,151 @@
|
||||
/**
|
||||
* Mock VectorDB implementation for testing
|
||||
* This simulates the interface that @ruvector/core and @ruvector/wasm will provide
|
||||
*/
|
||||
|
||||
class VectorDB {
|
||||
constructor(options) {
|
||||
this.options = options;
|
||||
this.dimension = options.dimension;
|
||||
this.metric = options.metric || 'cosine';
|
||||
this.vectors = new Map();
|
||||
}
|
||||
|
||||
insert(entry) {
|
||||
if (!entry.id || !entry.vector) {
|
||||
throw new Error('Entry must have id and vector');
|
||||
}
|
||||
if (entry.vector.length !== this.dimension) {
|
||||
throw new Error(`Vector dimension must be ${this.dimension}`);
|
||||
}
|
||||
this.vectors.set(entry.id, {
|
||||
id: entry.id,
|
||||
vector: entry.vector,
|
||||
metadata: entry.metadata || {}
|
||||
});
|
||||
}
|
||||
|
||||
insertBatch(entries) {
|
||||
for (const entry of entries) {
|
||||
this.insert(entry);
|
||||
}
|
||||
}
|
||||
|
||||
search(query) {
|
||||
const results = [];
|
||||
const k = query.k || 10;
|
||||
const threshold = query.threshold || 0.0;
|
||||
|
||||
for (const [id, entry] of this.vectors.entries()) {
|
||||
const score = this._computeSimilarity(query.vector, entry.vector);
|
||||
if (score >= threshold) {
|
||||
results.push({
|
||||
id: entry.id,
|
||||
score,
|
||||
vector: entry.vector,
|
||||
metadata: entry.metadata
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Sort by score descending
|
||||
results.sort((a, b) => b.score - a.score);
|
||||
|
||||
return results.slice(0, k);
|
||||
}
|
||||
|
||||
get(id) {
|
||||
return this.vectors.get(id) || null;
|
||||
}
|
||||
|
||||
delete(id) {
|
||||
return this.vectors.delete(id);
|
||||
}
|
||||
|
||||
updateMetadata(id, metadata) {
|
||||
const entry = this.vectors.get(id);
|
||||
if (entry) {
|
||||
entry.metadata = { ...entry.metadata, ...metadata };
|
||||
}
|
||||
}
|
||||
|
||||
stats() {
|
||||
return {
|
||||
count: this.vectors.size,
|
||||
dimension: this.dimension,
|
||||
metric: this.metric,
|
||||
memoryUsage: this.vectors.size * this.dimension * 8, // rough estimate
|
||||
indexType: 'flat'
|
||||
};
|
||||
}
|
||||
|
||||
save(path) {
|
||||
// Mock save
|
||||
const data = {
|
||||
dimension: this.dimension,
|
||||
metric: this.metric,
|
||||
vectors: Array.from(this.vectors.values())
|
||||
};
|
||||
return JSON.stringify(data);
|
||||
}
|
||||
|
||||
load(path) {
|
||||
// Mock load - would read from file
|
||||
this.vectors.clear();
|
||||
}
|
||||
|
||||
clear() {
|
||||
this.vectors.clear();
|
||||
}
|
||||
|
||||
buildIndex() {
|
||||
// Mock index building
|
||||
}
|
||||
|
||||
optimize() {
|
||||
// Mock optimization
|
||||
}
|
||||
|
||||
_computeSimilarity(a, b) {
|
||||
if (this.metric === 'cosine') {
|
||||
return this._cosineSimilarity(a, b);
|
||||
} else if (this.metric === 'euclidean') {
|
||||
return 1 / (1 + this._euclideanDistance(a, b));
|
||||
} else {
|
||||
return this._dotProduct(a, b);
|
||||
}
|
||||
}
|
||||
|
||||
_cosineSimilarity(a, b) {
|
||||
let dot = 0;
|
||||
let magA = 0;
|
||||
let magB = 0;
|
||||
|
||||
for (let i = 0; i < a.length; i++) {
|
||||
dot += a[i] * b[i];
|
||||
magA += a[i] * a[i];
|
||||
magB += b[i] * b[i];
|
||||
}
|
||||
|
||||
return dot / (Math.sqrt(magA) * Math.sqrt(magB));
|
||||
}
|
||||
|
||||
_euclideanDistance(a, b) {
|
||||
let sum = 0;
|
||||
for (let i = 0; i < a.length; i++) {
|
||||
const diff = a[i] - b[i];
|
||||
sum += diff * diff;
|
||||
}
|
||||
return Math.sqrt(sum);
|
||||
}
|
||||
|
||||
_dotProduct(a, b) {
|
||||
let sum = 0;
|
||||
for (let i = 0; i < a.length; i++) {
|
||||
sum += a[i] * b[i];
|
||||
}
|
||||
return sum;
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = { VectorDB };
|
||||
214
npm/packages/ruvector/test/standalone-test.js
Executable file
214
npm/packages/ruvector/test/standalone-test.js
Executable file
@@ -0,0 +1,214 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
/**
|
||||
* Standalone test using mock implementation
|
||||
* This demonstrates the package structure and API without requiring native/WASM modules
|
||||
*/
|
||||
|
||||
const assert = require('assert');
|
||||
const path = require('path');
|
||||
const fs = require('fs');
|
||||
|
||||
console.log('ruvector Standalone Test (with mock implementation)\n');
|
||||
console.log('='.repeat(60));
|
||||
|
||||
// Test 1: Package structure
|
||||
console.log('\n1. Testing package structure...');
|
||||
try {
|
||||
const packageJson = require('../package.json');
|
||||
assert(packageJson.name === 'ruvector', 'Package name should be ruvector');
|
||||
assert(packageJson.version === '0.1.1', 'Version should be 0.1.1');
|
||||
assert(packageJson.main === 'dist/index.js', 'Main entry correct');
|
||||
assert(packageJson.types === 'dist/index.d.ts', 'Types entry correct');
|
||||
console.log(' ✓ package.json structure valid');
|
||||
|
||||
const distExists = fs.existsSync(path.join(__dirname, '../dist'));
|
||||
assert(distExists, 'dist directory should exist');
|
||||
console.log(' ✓ dist directory exists');
|
||||
|
||||
const indexExists = fs.existsSync(path.join(__dirname, '../dist/index.js'));
|
||||
assert(indexExists, 'dist/index.js should exist');
|
||||
console.log(' ✓ dist/index.js compiled');
|
||||
|
||||
const typesExist = fs.existsSync(path.join(__dirname, '../dist/types.d.ts'));
|
||||
assert(typesExist, 'Type definitions should exist');
|
||||
console.log(' ✓ TypeScript definitions compiled');
|
||||
|
||||
const cliExists = fs.existsSync(path.join(__dirname, '../bin/cli.js'));
|
||||
assert(cliExists, 'CLI script should exist');
|
||||
console.log(' ✓ CLI script exists');
|
||||
} catch (error) {
|
||||
console.error(' ✗ Package structure test failed:', error.message);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Test 2: Type definitions
|
||||
console.log('\n2. Testing TypeScript type definitions...');
|
||||
try {
|
||||
const typeDefs = fs.readFileSync(path.join(__dirname, '../dist/types.d.ts'), 'utf8');
|
||||
|
||||
const requiredTypes = [
|
||||
'VectorEntry',
|
||||
'SearchQuery',
|
||||
'SearchResult',
|
||||
'DbOptions',
|
||||
'DbStats',
|
||||
'VectorDB'
|
||||
];
|
||||
|
||||
for (const type of requiredTypes) {
|
||||
assert(typeDefs.includes(type), `Should include ${type}`);
|
||||
console.log(` ✓ ${type} interface defined`);
|
||||
}
|
||||
|
||||
const indexDefs = fs.readFileSync(path.join(__dirname, '../dist/index.d.ts'), 'utf8');
|
||||
// Check for type re-exports (TypeScript may compile to different formats)
|
||||
const hasTypeExports = indexDefs.includes('VectorEntry') ||
|
||||
indexDefs.includes('from "./types"') ||
|
||||
indexDefs.includes('export *');
|
||||
assert(hasTypeExports, 'Should export types');
|
||||
assert(indexDefs.includes('getImplementationType'), 'Should export getImplementationType');
|
||||
assert(indexDefs.includes('VectorDB'), 'Should export VectorDB');
|
||||
console.log(' ✓ Index exports all types and functions');
|
||||
} catch (error) {
|
||||
console.error(' ✗ Type definitions test failed:', error.message);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Test 3: Mock VectorDB functionality
|
||||
console.log('\n3. Testing VectorDB API (with mock)...');
|
||||
try {
|
||||
const { VectorDB } = require('./mock-implementation.js');
|
||||
|
||||
// Create database
|
||||
const db = new VectorDB({
|
||||
dimension: 3,
|
||||
metric: 'cosine'
|
||||
});
|
||||
console.log(' ✓ Database created');
|
||||
|
||||
// Insert vectors
|
||||
db.insert({
|
||||
id: 'vec1',
|
||||
vector: [1, 0, 0],
|
||||
metadata: { label: 'first' }
|
||||
});
|
||||
|
||||
db.insertBatch([
|
||||
{ id: 'vec2', vector: [0, 1, 0], metadata: { label: 'second' } },
|
||||
{ id: 'vec3', vector: [0, 0, 1], metadata: { label: 'third' } },
|
||||
{ id: 'vec4', vector: [0.7, 0.7, 0], metadata: { label: 'fourth' } }
|
||||
]);
|
||||
console.log(' ✓ Vectors inserted');
|
||||
|
||||
// Get stats
|
||||
const stats = db.stats();
|
||||
assert(stats.count === 4, 'Should have 4 vectors');
|
||||
assert(stats.dimension === 3, 'Dimension should be 3');
|
||||
console.log(` ✓ Stats: ${stats.count} vectors, dim=${stats.dimension}`);
|
||||
|
||||
// Search
|
||||
const results = db.search({
|
||||
vector: [1, 0, 0],
|
||||
k: 3
|
||||
});
|
||||
assert(results.length === 3, 'Should return 3 results');
|
||||
assert(results[0].id === 'vec1', 'First result should be vec1');
|
||||
console.log(` ✓ Search returned ${results.length} results`);
|
||||
console.log(` Top result: ${results[0].id} (score: ${results[0].score.toFixed(4)})`);
|
||||
|
||||
// Get by ID
|
||||
const vec = db.get('vec2');
|
||||
assert(vec !== null, 'Should find vector');
|
||||
assert(vec.id === 'vec2', 'Should have correct ID');
|
||||
console.log(' ✓ Get by ID works');
|
||||
|
||||
// Update metadata
|
||||
db.updateMetadata('vec1', { updated: true });
|
||||
const updated = db.get('vec1');
|
||||
assert(updated.metadata.updated === true, 'Metadata should be updated');
|
||||
console.log(' ✓ Update metadata works');
|
||||
|
||||
// Delete
|
||||
const deleted = db.delete('vec3');
|
||||
assert(deleted === true, 'Should delete successfully');
|
||||
assert(db.stats().count === 3, 'Should have 3 vectors after delete');
|
||||
console.log(' ✓ Delete works');
|
||||
|
||||
} catch (error) {
|
||||
console.error(' ✗ VectorDB API test failed:', error.message);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Test 4: CLI structure
|
||||
console.log('\n4. Testing CLI structure...');
|
||||
try {
|
||||
const cliContent = fs.readFileSync(path.join(__dirname, '../bin/cli.js'), 'utf8');
|
||||
|
||||
const cliFeatures = [
|
||||
'create',
|
||||
'insert',
|
||||
'search',
|
||||
'stats',
|
||||
'benchmark',
|
||||
'info'
|
||||
];
|
||||
|
||||
for (const feature of cliFeatures) {
|
||||
assert(cliContent.includes(feature), `CLI should include ${feature} command`);
|
||||
console.log(` ✓ ${feature} command present`);
|
||||
}
|
||||
|
||||
assert(cliContent.includes('#!/usr/bin/env node'), 'Should have shebang');
|
||||
assert(cliContent.includes('commander'), 'Should use commander');
|
||||
assert(cliContent.includes('chalk'), 'Should use chalk');
|
||||
assert(cliContent.includes('ora'), 'Should use ora');
|
||||
console.log(' ✓ CLI dependencies correct');
|
||||
|
||||
} catch (error) {
|
||||
console.error(' ✗ CLI structure test failed:', error.message);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Test 5: Smart loader logic
|
||||
console.log('\n5. Testing smart loader logic...');
|
||||
try {
|
||||
const loaderContent = fs.readFileSync(path.join(__dirname, '../dist/index.js'), 'utf8');
|
||||
|
||||
assert(loaderContent.includes('@ruvector/core'), 'Should try to load native');
|
||||
assert(loaderContent.includes('@ruvector/wasm'), 'Should fallback to WASM');
|
||||
assert(loaderContent.includes('getImplementationType'), 'Should export implementation type');
|
||||
assert(loaderContent.includes('isNative'), 'Should export isNative');
|
||||
assert(loaderContent.includes('isWasm'), 'Should export isWasm');
|
||||
console.log(' ✓ Smart loader has platform detection');
|
||||
console.log(' ✓ Exports implementation detection functions');
|
||||
|
||||
} catch (error) {
|
||||
console.error(' ✗ Smart loader test failed:', error.message);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Summary
|
||||
console.log('\n' + '='.repeat(60));
|
||||
console.log('\n✓ All package structure tests passed!');
|
||||
console.log('\nPackage features:');
|
||||
console.log(' ✓ Smart native/WASM loader with automatic fallback');
|
||||
console.log(' ✓ Complete TypeScript type definitions');
|
||||
console.log(' ✓ VectorDB API (insert, search, delete, stats)');
|
||||
console.log(' ✓ CLI tools (create, insert, search, stats, benchmark, info)');
|
||||
console.log(' ✓ Platform detection (isNative, isWasm, getImplementationType)');
|
||||
console.log('\nPackage structure:');
|
||||
console.log(' 📦 /workspaces/ruvector/npm/packages/ruvector');
|
||||
console.log(' ├── dist/ (compiled JavaScript and types)');
|
||||
console.log(' ├── src/ (TypeScript source)');
|
||||
console.log(' ├── bin/ (CLI script)');
|
||||
console.log(' ├── test/ (integration tests)');
|
||||
console.log(' └── package.json (npm package config)');
|
||||
console.log('\nReady for integration with:');
|
||||
console.log(' - @ruvector/core (native Rust bindings)');
|
||||
console.log(' - @ruvector/wasm (WebAssembly module)');
|
||||
console.log('\nNext steps:');
|
||||
console.log(' 1. Create @ruvector/core package (native bindings)');
|
||||
console.log(' 2. Create @ruvector/wasm package (WASM module)');
|
||||
console.log(' 3. Update package.json to include them as dependencies');
|
||||
console.log(' 4. Test full integration');
|
||||
Reference in New Issue
Block a user