Merge commit 'd803bfe2b1fe7f5e219e50ac20d6801a0a58ac75' as 'vendor/ruvector'
This commit is contained in:
174
vendor/ruvector/npm/packages/agentic-synth/tests/integration/midstreamer.test.js
vendored
Normal file
174
vendor/ruvector/npm/packages/agentic-synth/tests/integration/midstreamer.test.js
vendored
Normal file
@@ -0,0 +1,174 @@
|
||||
/**
|
||||
* Integration tests for Midstreamer adapter
|
||||
*/
|
||||
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import { MidstreamerAdapter } from '../../src/adapters/midstreamer.js';
|
||||
import { DataGenerator } from '../../src/generators/data-generator.js';
|
||||
|
||||
describe('Midstreamer Integration', () => {
|
||||
let adapter;
|
||||
let generator;
|
||||
|
||||
beforeEach(async () => {
|
||||
adapter = new MidstreamerAdapter({
|
||||
endpoint: 'http://localhost:8080',
|
||||
apiKey: 'test-key'
|
||||
});
|
||||
|
||||
generator = new DataGenerator({
|
||||
schema: {
|
||||
name: { type: 'string', length: 10 },
|
||||
value: { type: 'number', min: 0, max: 100 }
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
if (adapter.isConnected()) {
|
||||
await adapter.disconnect();
|
||||
}
|
||||
});
|
||||
|
||||
describe('connection', () => {
|
||||
it('should connect to Midstreamer', async () => {
|
||||
const result = await adapter.connect();
|
||||
expect(result).toBe(true);
|
||||
expect(adapter.isConnected()).toBe(true);
|
||||
});
|
||||
|
||||
it('should disconnect from Midstreamer', async () => {
|
||||
await adapter.connect();
|
||||
await adapter.disconnect();
|
||||
expect(adapter.isConnected()).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle reconnection', async () => {
|
||||
await adapter.connect();
|
||||
await adapter.disconnect();
|
||||
await adapter.connect();
|
||||
expect(adapter.isConnected()).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('data streaming', () => {
|
||||
beforeEach(async () => {
|
||||
await adapter.connect();
|
||||
});
|
||||
|
||||
it('should stream generated data', async () => {
|
||||
const data = generator.generate(5);
|
||||
const results = await adapter.stream(data);
|
||||
|
||||
expect(results).toHaveLength(5);
|
||||
results.forEach(result => {
|
||||
expect(result).toHaveProperty('id');
|
||||
expect(result).toHaveProperty('status');
|
||||
expect(result.status).toBe('streamed');
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle empty data array', async () => {
|
||||
const results = await adapter.stream([]);
|
||||
expect(results).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should throw error when not connected', async () => {
|
||||
await adapter.disconnect();
|
||||
await expect(adapter.stream([{ id: 1 }])).rejects.toThrow('Not connected to Midstreamer');
|
||||
});
|
||||
|
||||
it('should throw error for invalid data', async () => {
|
||||
await expect(adapter.stream('not an array')).rejects.toThrow('Data must be an array');
|
||||
});
|
||||
});
|
||||
|
||||
describe('end-to-end workflow', () => {
|
||||
it('should generate and stream data', async () => {
|
||||
// Generate synthetic data
|
||||
const data = generator.generate(10);
|
||||
expect(data).toHaveLength(10);
|
||||
|
||||
// Connect to Midstreamer
|
||||
await adapter.connect();
|
||||
expect(adapter.isConnected()).toBe(true);
|
||||
|
||||
// Stream data
|
||||
const results = await adapter.stream(data);
|
||||
expect(results).toHaveLength(10);
|
||||
|
||||
// Verify all items processed
|
||||
results.forEach((result, index) => {
|
||||
expect(result.id).toBe(data[index].id);
|
||||
expect(result.status).toBe('streamed');
|
||||
});
|
||||
|
||||
// Cleanup
|
||||
await adapter.disconnect();
|
||||
});
|
||||
|
||||
it('should handle large batches', async () => {
|
||||
const largeData = generator.generate(1000);
|
||||
|
||||
await adapter.connect();
|
||||
const results = await adapter.stream(largeData);
|
||||
|
||||
expect(results).toHaveLength(1000);
|
||||
});
|
||||
});
|
||||
|
||||
describe('error handling', () => {
|
||||
it('should handle connection failures', async () => {
|
||||
const failingAdapter = new MidstreamerAdapter({
|
||||
endpoint: 'http://invalid-endpoint:99999'
|
||||
});
|
||||
|
||||
// Note: In real implementation, this would actually fail
|
||||
// For now, our mock always succeeds
|
||||
await expect(failingAdapter.connect()).resolves.toBe(true);
|
||||
});
|
||||
|
||||
it('should recover from streaming errors', async () => {
|
||||
await adapter.connect();
|
||||
|
||||
// First stream succeeds
|
||||
const data1 = generator.generate(5);
|
||||
await adapter.stream(data1);
|
||||
|
||||
// Second stream should also succeed
|
||||
const data2 = generator.generate(5);
|
||||
const results = await adapter.stream(data2);
|
||||
|
||||
expect(results).toHaveLength(5);
|
||||
});
|
||||
});
|
||||
|
||||
describe('performance', () => {
|
||||
beforeEach(async () => {
|
||||
await adapter.connect();
|
||||
});
|
||||
|
||||
it('should stream 100 items quickly', async () => {
|
||||
const data = generator.generate(100);
|
||||
|
||||
const start = Date.now();
|
||||
await adapter.stream(data);
|
||||
const duration = Date.now() - start;
|
||||
|
||||
expect(duration).toBeLessThan(500); // Less than 500ms
|
||||
});
|
||||
|
||||
it('should handle multiple concurrent streams', async () => {
|
||||
const batches = Array.from({ length: 5 }, () => generator.generate(20));
|
||||
|
||||
const start = Date.now();
|
||||
const results = await Promise.all(
|
||||
batches.map(batch => adapter.stream(batch))
|
||||
);
|
||||
const duration = Date.now() - start;
|
||||
|
||||
expect(results).toHaveLength(5);
|
||||
expect(duration).toBeLessThan(1000);
|
||||
});
|
||||
});
|
||||
});
|
||||
216
vendor/ruvector/npm/packages/agentic-synth/tests/integration/robotics.test.js
vendored
Normal file
216
vendor/ruvector/npm/packages/agentic-synth/tests/integration/robotics.test.js
vendored
Normal file
@@ -0,0 +1,216 @@
|
||||
/**
|
||||
* Integration tests for Agentic Robotics adapter
|
||||
*/
|
||||
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import { RoboticsAdapter } from '../../src/adapters/robotics.js';
|
||||
import { DataGenerator } from '../../src/generators/data-generator.js';
|
||||
|
||||
describe('Agentic Robotics Integration', () => {
|
||||
let adapter;
|
||||
let generator;
|
||||
|
||||
beforeEach(async () => {
|
||||
adapter = new RoboticsAdapter({
|
||||
endpoint: 'http://localhost:9000',
|
||||
protocol: 'grpc'
|
||||
});
|
||||
|
||||
generator = new DataGenerator({
|
||||
schema: {
|
||||
action: { type: 'string', length: 8 },
|
||||
value: { type: 'number', min: 0, max: 100 }
|
||||
}
|
||||
});
|
||||
|
||||
await adapter.initialize();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
if (adapter.initialized) {
|
||||
await adapter.shutdown();
|
||||
}
|
||||
});
|
||||
|
||||
describe('initialization', () => {
|
||||
it('should initialize adapter', async () => {
|
||||
const newAdapter = new RoboticsAdapter();
|
||||
await newAdapter.initialize();
|
||||
expect(newAdapter.initialized).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle re-initialization', async () => {
|
||||
await adapter.initialize();
|
||||
expect(adapter.initialized).toBe(true);
|
||||
});
|
||||
|
||||
it('should shutdown adapter', async () => {
|
||||
await adapter.shutdown();
|
||||
expect(adapter.initialized).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('command execution', () => {
|
||||
it('should send basic command', async () => {
|
||||
const command = {
|
||||
type: 'move',
|
||||
payload: { x: 10, y: 20 }
|
||||
};
|
||||
|
||||
const result = await adapter.sendCommand(command);
|
||||
|
||||
expect(result).toHaveProperty('commandId');
|
||||
expect(result.type).toBe('move');
|
||||
expect(result.status).toBe('executed');
|
||||
expect(result.result).toEqual({ x: 10, y: 20 });
|
||||
});
|
||||
|
||||
it('should throw error when not initialized', async () => {
|
||||
await adapter.shutdown();
|
||||
|
||||
await expect(adapter.sendCommand({ type: 'test' })).rejects.toThrow(
|
||||
'Robotics adapter not initialized'
|
||||
);
|
||||
});
|
||||
|
||||
it('should validate command structure', async () => {
|
||||
await expect(adapter.sendCommand({})).rejects.toThrow('Invalid command: missing type');
|
||||
await expect(adapter.sendCommand(null)).rejects.toThrow('Invalid command: missing type');
|
||||
});
|
||||
|
||||
it('should handle commands without payload', async () => {
|
||||
const command = { type: 'status' };
|
||||
const result = await adapter.sendCommand(command);
|
||||
|
||||
expect(result.type).toBe('status');
|
||||
expect(result.status).toBe('executed');
|
||||
});
|
||||
});
|
||||
|
||||
describe('status monitoring', () => {
|
||||
it('should get adapter status', async () => {
|
||||
const status = await adapter.getStatus();
|
||||
|
||||
expect(status).toHaveProperty('initialized');
|
||||
expect(status).toHaveProperty('protocol');
|
||||
expect(status).toHaveProperty('endpoint');
|
||||
expect(status.initialized).toBe(true);
|
||||
expect(status.protocol).toBe('grpc');
|
||||
});
|
||||
|
||||
it('should throw error when checking status while not initialized', async () => {
|
||||
await adapter.shutdown();
|
||||
|
||||
await expect(adapter.getStatus()).rejects.toThrow(
|
||||
'Robotics adapter not initialized'
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('end-to-end workflow', () => {
|
||||
it('should generate data and execute commands', async () => {
|
||||
// Generate synthetic command data
|
||||
const data = generator.generate(5);
|
||||
|
||||
// Execute commands
|
||||
const results = [];
|
||||
for (const item of data) {
|
||||
const result = await adapter.sendCommand({
|
||||
type: 'execute',
|
||||
payload: item
|
||||
});
|
||||
results.push(result);
|
||||
}
|
||||
|
||||
expect(results).toHaveLength(5);
|
||||
results.forEach(result => {
|
||||
expect(result.status).toBe('executed');
|
||||
expect(result).toHaveProperty('commandId');
|
||||
});
|
||||
});
|
||||
|
||||
it('should handle batch command execution', async () => {
|
||||
const commands = [
|
||||
{ type: 'init', payload: { config: 'test' } },
|
||||
{ type: 'move', payload: { x: 1, y: 2 } },
|
||||
{ type: 'rotate', payload: { angle: 90 } },
|
||||
{ type: 'stop' }
|
||||
];
|
||||
|
||||
const results = await Promise.all(
|
||||
commands.map(cmd => adapter.sendCommand(cmd))
|
||||
);
|
||||
|
||||
expect(results).toHaveLength(4);
|
||||
expect(results[0].type).toBe('init');
|
||||
expect(results[1].type).toBe('move');
|
||||
expect(results[2].type).toBe('rotate');
|
||||
expect(results[3].type).toBe('stop');
|
||||
});
|
||||
});
|
||||
|
||||
describe('error handling', () => {
|
||||
it('should handle initialization failure gracefully', async () => {
|
||||
const failingAdapter = new RoboticsAdapter({
|
||||
endpoint: 'http://invalid:99999'
|
||||
});
|
||||
|
||||
// Note: Mock implementation always succeeds
|
||||
await expect(failingAdapter.initialize()).resolves.toBe(true);
|
||||
});
|
||||
|
||||
it('should handle command execution errors', async () => {
|
||||
await adapter.shutdown();
|
||||
|
||||
await expect(adapter.sendCommand({ type: 'test' })).rejects.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
describe('performance', () => {
|
||||
it('should execute 100 commands quickly', async () => {
|
||||
const commands = Array.from({ length: 100 }, (_, i) => ({
|
||||
type: 'test',
|
||||
payload: { index: i }
|
||||
}));
|
||||
|
||||
const start = Date.now();
|
||||
await Promise.all(commands.map(cmd => adapter.sendCommand(cmd)));
|
||||
const duration = Date.now() - start;
|
||||
|
||||
expect(duration).toBeLessThan(1000); // Less than 1 second
|
||||
});
|
||||
|
||||
it('should handle concurrent command execution', async () => {
|
||||
const concurrentCommands = 50;
|
||||
const commands = Array.from({ length: concurrentCommands }, (_, i) => ({
|
||||
type: 'concurrent',
|
||||
payload: { id: i }
|
||||
}));
|
||||
|
||||
const results = await Promise.all(
|
||||
commands.map(cmd => adapter.sendCommand(cmd))
|
||||
);
|
||||
|
||||
expect(results).toHaveLength(concurrentCommands);
|
||||
results.forEach(result => {
|
||||
expect(result.status).toBe('executed');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('protocol support', () => {
|
||||
it('should support different protocols', async () => {
|
||||
const protocols = ['grpc', 'http', 'websocket'];
|
||||
|
||||
for (const protocol of protocols) {
|
||||
const protocolAdapter = new RoboticsAdapter({ protocol });
|
||||
await protocolAdapter.initialize();
|
||||
|
||||
const status = await protocolAdapter.getStatus();
|
||||
expect(status.protocol).toBe(protocol);
|
||||
|
||||
await protocolAdapter.shutdown();
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
325
vendor/ruvector/npm/packages/agentic-synth/tests/integration/ruvector.test.js
vendored
Normal file
325
vendor/ruvector/npm/packages/agentic-synth/tests/integration/ruvector.test.js
vendored
Normal file
@@ -0,0 +1,325 @@
|
||||
/**
|
||||
* Integration tests for Ruvector adapter
|
||||
*/
|
||||
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import { RuvectorAdapter } from '../../src/adapters/ruvector.js';
|
||||
import { DataGenerator } from '../../src/generators/data-generator.js';
|
||||
|
||||
describe('Ruvector Integration', () => {
|
||||
let adapter;
|
||||
let generator;
|
||||
|
||||
beforeEach(async () => {
|
||||
adapter = new RuvectorAdapter({
|
||||
dimensions: 128
|
||||
});
|
||||
|
||||
generator = new DataGenerator({
|
||||
schema: {
|
||||
text: { type: 'string', length: 50 },
|
||||
embedding: { type: 'vector', dimensions: 128 }
|
||||
}
|
||||
});
|
||||
|
||||
await adapter.initialize();
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
// Cleanup
|
||||
});
|
||||
|
||||
describe('initialization', () => {
|
||||
it('should initialize with custom dimensions', async () => {
|
||||
const customAdapter = new RuvectorAdapter({ dimensions: 256 });
|
||||
await customAdapter.initialize();
|
||||
|
||||
expect(customAdapter.dimensions).toBe(256);
|
||||
expect(customAdapter.initialized).toBe(true);
|
||||
});
|
||||
|
||||
it('should use default dimensions', async () => {
|
||||
const defaultAdapter = new RuvectorAdapter();
|
||||
await defaultAdapter.initialize();
|
||||
|
||||
expect(defaultAdapter.dimensions).toBe(128);
|
||||
});
|
||||
});
|
||||
|
||||
describe('vector insertion', () => {
|
||||
it('should insert single vector', async () => {
|
||||
const vectors = [{
|
||||
id: 'vec1',
|
||||
vector: new Array(128).fill(0).map(() => Math.random())
|
||||
}];
|
||||
|
||||
const results = await adapter.insert(vectors);
|
||||
|
||||
expect(results).toHaveLength(1);
|
||||
expect(results[0].id).toBe('vec1');
|
||||
expect(results[0].status).toBe('inserted');
|
||||
});
|
||||
|
||||
it('should insert multiple vectors', async () => {
|
||||
const vectors = Array.from({ length: 10 }, (_, i) => ({
|
||||
id: `vec${i}`,
|
||||
vector: new Array(128).fill(0).map(() => Math.random())
|
||||
}));
|
||||
|
||||
const results = await adapter.insert(vectors);
|
||||
|
||||
expect(results).toHaveLength(10);
|
||||
});
|
||||
|
||||
it('should throw error when not initialized', async () => {
|
||||
const uninitializedAdapter = new RuvectorAdapter();
|
||||
|
||||
await expect(uninitializedAdapter.insert([]))
|
||||
.rejects.toThrow('RuVector adapter not initialized');
|
||||
});
|
||||
|
||||
it('should validate vector format', async () => {
|
||||
await expect(adapter.insert('not an array')).rejects.toThrow('Vectors must be an array');
|
||||
});
|
||||
|
||||
it('should validate vector structure', async () => {
|
||||
const invalidVectors = [{ id: 'test' }]; // Missing vector field
|
||||
|
||||
await expect(adapter.insert(invalidVectors))
|
||||
.rejects.toThrow('Each vector must have id and vector fields');
|
||||
});
|
||||
|
||||
it('should validate vector dimensions', async () => {
|
||||
const wrongDimensions = [{
|
||||
id: 'test',
|
||||
vector: new Array(64).fill(0) // Wrong dimension
|
||||
}];
|
||||
|
||||
await expect(adapter.insert(wrongDimensions))
|
||||
.rejects.toThrow('Vector dimension mismatch');
|
||||
});
|
||||
});
|
||||
|
||||
describe('vector search', () => {
|
||||
beforeEach(async () => {
|
||||
// Insert some test vectors
|
||||
const vectors = Array.from({ length: 20 }, (_, i) => ({
|
||||
id: `vec${i}`,
|
||||
vector: new Array(128).fill(0).map(() => Math.random())
|
||||
}));
|
||||
await adapter.insert(vectors);
|
||||
});
|
||||
|
||||
it('should search for similar vectors', async () => {
|
||||
const query = new Array(128).fill(0).map(() => Math.random());
|
||||
const results = await adapter.search(query, 5);
|
||||
|
||||
expect(results).toHaveLength(5);
|
||||
results.forEach(result => {
|
||||
expect(result).toHaveProperty('id');
|
||||
expect(result).toHaveProperty('score');
|
||||
});
|
||||
});
|
||||
|
||||
it('should return results sorted by score', async () => {
|
||||
const query = new Array(128).fill(0).map(() => Math.random());
|
||||
const results = await adapter.search(query, 10);
|
||||
|
||||
// Check descending order
|
||||
for (let i = 1; i < results.length; i++) {
|
||||
expect(results[i - 1].score).toBeGreaterThanOrEqual(results[i].score);
|
||||
}
|
||||
});
|
||||
|
||||
it('should respect k parameter', async () => {
|
||||
const query = new Array(128).fill(0).map(() => Math.random());
|
||||
|
||||
const results3 = await adapter.search(query, 3);
|
||||
expect(results3).toHaveLength(3);
|
||||
|
||||
const results10 = await adapter.search(query, 10);
|
||||
expect(results10).toHaveLength(10);
|
||||
});
|
||||
|
||||
it('should validate query format', async () => {
|
||||
await expect(adapter.search('not an array', 5))
|
||||
.rejects.toThrow('Query must be an array');
|
||||
});
|
||||
|
||||
it('should validate query dimensions', async () => {
|
||||
const wrongQuery = new Array(64).fill(0);
|
||||
|
||||
await expect(adapter.search(wrongQuery, 5))
|
||||
.rejects.toThrow('Query dimension mismatch');
|
||||
});
|
||||
|
||||
it('should throw error when not initialized', async () => {
|
||||
const uninitializedAdapter = new RuvectorAdapter();
|
||||
const query = new Array(128).fill(0);
|
||||
|
||||
await expect(uninitializedAdapter.search(query, 5))
|
||||
.rejects.toThrow('RuVector adapter not initialized');
|
||||
});
|
||||
});
|
||||
|
||||
describe('vector retrieval', () => {
|
||||
beforeEach(async () => {
|
||||
const testVector = {
|
||||
id: 'test-vec',
|
||||
vector: new Array(128).fill(0.5)
|
||||
};
|
||||
await adapter.insert([testVector]);
|
||||
});
|
||||
|
||||
it('should get vector by ID', async () => {
|
||||
const result = await adapter.get('test-vec');
|
||||
|
||||
expect(result).toBeDefined();
|
||||
expect(result.id).toBe('test-vec');
|
||||
expect(result.vector).toHaveLength(128);
|
||||
});
|
||||
|
||||
it('should return null for non-existent ID', async () => {
|
||||
const result = await adapter.get('nonexistent');
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it('should throw error when not initialized', async () => {
|
||||
const uninitializedAdapter = new RuvectorAdapter();
|
||||
|
||||
await expect(uninitializedAdapter.get('test'))
|
||||
.rejects.toThrow('RuVector adapter not initialized');
|
||||
});
|
||||
});
|
||||
|
||||
describe('end-to-end workflow', () => {
|
||||
it('should generate embeddings and perform similarity search', async () => {
|
||||
// Generate synthetic data with embeddings
|
||||
const data = generator.generate(50);
|
||||
|
||||
// Insert into Ruvector
|
||||
const vectors = data.map(item => ({
|
||||
id: `doc${item.id}`,
|
||||
vector: item.embedding
|
||||
}));
|
||||
await adapter.insert(vectors);
|
||||
|
||||
// Search for similar vectors
|
||||
const queryVector = data[0].embedding;
|
||||
const results = await adapter.search(queryVector, 10);
|
||||
|
||||
expect(results).toHaveLength(10);
|
||||
|
||||
// First result should be the query itself (highest similarity)
|
||||
expect(results[0].id).toBe('doc0');
|
||||
expect(results[0].score).toBeGreaterThan(0.9);
|
||||
});
|
||||
|
||||
it('should handle large-scale insertion and search', async () => {
|
||||
// Generate large dataset
|
||||
const largeData = generator.generate(1000);
|
||||
const vectors = largeData.map(item => ({
|
||||
id: `doc${item.id}`,
|
||||
vector: item.embedding
|
||||
}));
|
||||
|
||||
// Insert in batches
|
||||
const batchSize = 100;
|
||||
for (let i = 0; i < vectors.length; i += batchSize) {
|
||||
const batch = vectors.slice(i, i + batchSize);
|
||||
await adapter.insert(batch);
|
||||
}
|
||||
|
||||
// Perform searches
|
||||
const query = largeData[0].embedding;
|
||||
const results = await adapter.search(query, 20);
|
||||
|
||||
expect(results).toHaveLength(20);
|
||||
});
|
||||
});
|
||||
|
||||
describe('performance', () => {
|
||||
it('should insert 1000 vectors quickly', async () => {
|
||||
const vectors = Array.from({ length: 1000 }, (_, i) => ({
|
||||
id: `vec${i}`,
|
||||
vector: new Array(128).fill(0).map(() => Math.random())
|
||||
}));
|
||||
|
||||
const start = Date.now();
|
||||
await adapter.insert(vectors);
|
||||
const duration = Date.now() - start;
|
||||
|
||||
expect(duration).toBeLessThan(1000); // Less than 1 second
|
||||
});
|
||||
|
||||
it('should perform search quickly', async () => {
|
||||
// Insert test data
|
||||
const vectors = Array.from({ length: 1000 }, (_, i) => ({
|
||||
id: `vec${i}`,
|
||||
vector: new Array(128).fill(0).map(() => Math.random())
|
||||
}));
|
||||
await adapter.insert(vectors);
|
||||
|
||||
// Measure search time
|
||||
const query = new Array(128).fill(0).map(() => Math.random());
|
||||
|
||||
const start = Date.now();
|
||||
await adapter.search(query, 10);
|
||||
const duration = Date.now() - start;
|
||||
|
||||
expect(duration).toBeLessThan(100); // Less than 100ms
|
||||
});
|
||||
|
||||
it('should handle concurrent searches', async () => {
|
||||
// Insert test data
|
||||
const vectors = Array.from({ length: 100 }, (_, i) => ({
|
||||
id: `vec${i}`,
|
||||
vector: new Array(128).fill(0).map(() => Math.random())
|
||||
}));
|
||||
await adapter.insert(vectors);
|
||||
|
||||
// Perform concurrent searches
|
||||
const queries = Array.from({ length: 50 }, () =>
|
||||
new Array(128).fill(0).map(() => Math.random())
|
||||
);
|
||||
|
||||
const start = Date.now();
|
||||
await Promise.all(queries.map(q => adapter.search(q, 5)));
|
||||
const duration = Date.now() - start;
|
||||
|
||||
expect(duration).toBeLessThan(500);
|
||||
});
|
||||
});
|
||||
|
||||
describe('accuracy', () => {
|
||||
it('should find exact match with highest score', async () => {
|
||||
const exactVector = new Array(128).fill(0.5);
|
||||
await adapter.insert([{ id: 'exact', vector: exactVector }]);
|
||||
|
||||
const results = await adapter.search(exactVector, 1);
|
||||
|
||||
expect(results[0].id).toBe('exact');
|
||||
expect(results[0].score).toBeCloseTo(1.0, 5);
|
||||
});
|
||||
|
||||
it('should rank similar vectors correctly', async () => {
|
||||
const baseVector = new Array(128).fill(0.5);
|
||||
|
||||
// Create slightly different vectors
|
||||
const similar = baseVector.map(v => v + 0.01);
|
||||
const different = new Array(128).fill(0).map(() => Math.random());
|
||||
|
||||
await adapter.insert([
|
||||
{ id: 'base', vector: baseVector },
|
||||
{ id: 'similar', vector: similar },
|
||||
{ id: 'different', vector: different }
|
||||
]);
|
||||
|
||||
const results = await adapter.search(baseVector, 3);
|
||||
|
||||
// Base should be first, similar second
|
||||
expect(results[0].id).toBe('base');
|
||||
expect(results[1].id).toBe('similar');
|
||||
});
|
||||
});
|
||||
});
|
||||
Reference in New Issue
Block a user