Merge commit 'd803bfe2b1fe7f5e219e50ac20d6801a0a58ac75' as 'vendor/ruvector'
This commit is contained in:
386
vendor/ruvector/crates/ruvector-node/tests/basic.test.mjs
vendored
Normal file
386
vendor/ruvector/crates/ruvector-node/tests/basic.test.mjs
vendored
Normal file
@@ -0,0 +1,386 @@
|
||||
import test from 'ava';
|
||||
import { VectorDB } from '../index.js';
|
||||
import { mkdtempSync, rmSync } from 'fs';
|
||||
import { tmpdir } from 'os';
|
||||
import { join } from 'path';
|
||||
|
||||
// Helper to create temp directory
|
||||
function createTempDir() {
|
||||
return mkdtempSync(join(tmpdir(), 'ruvector-test-'));
|
||||
}
|
||||
|
||||
// Helper to cleanup temp directory
|
||||
function cleanupTempDir(dir) {
|
||||
try {
|
||||
rmSync(dir, { recursive: true, force: true });
|
||||
} catch (e) {
|
||||
console.warn('Failed to cleanup temp dir:', e.message);
|
||||
}
|
||||
}
|
||||
|
||||
test('VectorDB - version check', (t) => {
|
||||
const { version } = require('../index.js');
|
||||
t.is(typeof version, 'function');
|
||||
t.is(typeof version(), 'string');
|
||||
t.regex(version(), /^\d+\.\d+\.\d+/);
|
||||
});
|
||||
|
||||
test('VectorDB - hello function', (t) => {
|
||||
const { hello } = require('../index.js');
|
||||
t.is(typeof hello, 'function');
|
||||
t.is(hello(), 'Hello from Ruvector Node.js bindings!');
|
||||
});
|
||||
|
||||
test('VectorDB - constructor with options', (t) => {
|
||||
const tempDir = createTempDir();
|
||||
t.teardown(() => cleanupTempDir(tempDir));
|
||||
|
||||
const db = new VectorDB({
|
||||
dimensions: 3,
|
||||
distanceMetric: 'Euclidean',
|
||||
storagePath: join(tempDir, 'test.db'),
|
||||
});
|
||||
|
||||
t.truthy(db);
|
||||
t.is(typeof db.insert, 'function');
|
||||
t.is(typeof db.search, 'function');
|
||||
});
|
||||
|
||||
test('VectorDB - withDimensions factory', (t) => {
|
||||
const tempDir = createTempDir();
|
||||
t.teardown(() => cleanupTempDir(tempDir));
|
||||
|
||||
const db = VectorDB.withDimensions(128);
|
||||
t.truthy(db);
|
||||
});
|
||||
|
||||
test('VectorDB - insert single vector', async (t) => {
|
||||
const tempDir = createTempDir();
|
||||
t.teardown(() => cleanupTempDir(tempDir));
|
||||
|
||||
const db = new VectorDB({
|
||||
dimensions: 3,
|
||||
storagePath: join(tempDir, 'test.db'),
|
||||
});
|
||||
|
||||
const id = await db.insert({
|
||||
vector: new Float32Array([1.0, 2.0, 3.0]),
|
||||
metadata: { text: 'test vector' },
|
||||
});
|
||||
|
||||
t.is(typeof id, 'string');
|
||||
t.truthy(id.length > 0);
|
||||
});
|
||||
|
||||
test('VectorDB - insert with custom ID', async (t) => {
|
||||
const tempDir = createTempDir();
|
||||
t.teardown(() => cleanupTempDir(tempDir));
|
||||
|
||||
const db = new VectorDB({
|
||||
dimensions: 3,
|
||||
storagePath: join(tempDir, 'test.db'),
|
||||
});
|
||||
|
||||
const customId = 'custom-vector-123';
|
||||
const id = await db.insert({
|
||||
id: customId,
|
||||
vector: new Float32Array([1.0, 2.0, 3.0]),
|
||||
});
|
||||
|
||||
t.is(id, customId);
|
||||
});
|
||||
|
||||
test('VectorDB - insert batch', async (t) => {
|
||||
const tempDir = createTempDir();
|
||||
t.teardown(() => cleanupTempDir(tempDir));
|
||||
|
||||
const db = new VectorDB({
|
||||
dimensions: 3,
|
||||
storagePath: join(tempDir, 'test.db'),
|
||||
});
|
||||
|
||||
const ids = await db.insertBatch([
|
||||
{ vector: new Float32Array([1.0, 0.0, 0.0]) },
|
||||
{ vector: new Float32Array([0.0, 1.0, 0.0]) },
|
||||
{ vector: new Float32Array([0.0, 0.0, 1.0]) },
|
||||
]);
|
||||
|
||||
t.is(ids.length, 3);
|
||||
t.truthy(ids.every((id) => typeof id === 'string' && id.length > 0));
|
||||
});
|
||||
|
||||
test('VectorDB - search exact match', async (t) => {
|
||||
const tempDir = createTempDir();
|
||||
t.teardown(() => cleanupTempDir(tempDir));
|
||||
|
||||
const db = new VectorDB({
|
||||
dimensions: 3,
|
||||
distanceMetric: 'Euclidean',
|
||||
storagePath: join(tempDir, 'test.db'),
|
||||
hnswConfig: null, // Use flat index for testing
|
||||
});
|
||||
|
||||
await db.insert({
|
||||
id: 'v1',
|
||||
vector: new Float32Array([1.0, 0.0, 0.0]),
|
||||
});
|
||||
|
||||
await db.insert({
|
||||
id: 'v2',
|
||||
vector: new Float32Array([0.0, 1.0, 0.0]),
|
||||
});
|
||||
|
||||
const results = await db.search({
|
||||
vector: new Float32Array([1.0, 0.0, 0.0]),
|
||||
k: 2,
|
||||
});
|
||||
|
||||
t.truthy(Array.isArray(results));
|
||||
t.truthy(results.length >= 1);
|
||||
t.is(results[0].id, 'v1');
|
||||
t.true(results[0].score < 0.01);
|
||||
});
|
||||
|
||||
test('VectorDB - search with metadata filter', async (t) => {
|
||||
const tempDir = createTempDir();
|
||||
t.teardown(() => cleanupTempDir(tempDir));
|
||||
|
||||
const db = new VectorDB({
|
||||
dimensions: 3,
|
||||
storagePath: join(tempDir, 'test.db'),
|
||||
});
|
||||
|
||||
await db.insert({
|
||||
vector: new Float32Array([1.0, 0.0, 0.0]),
|
||||
metadata: { category: 'A' },
|
||||
});
|
||||
|
||||
await db.insert({
|
||||
vector: new Float32Array([0.9, 0.1, 0.0]),
|
||||
metadata: { category: 'B' },
|
||||
});
|
||||
|
||||
const results = await db.search({
|
||||
vector: new Float32Array([1.0, 0.0, 0.0]),
|
||||
k: 10,
|
||||
filter: { category: 'A' },
|
||||
});
|
||||
|
||||
t.truthy(results.length >= 1);
|
||||
t.is(results[0].metadata?.category, 'A');
|
||||
});
|
||||
|
||||
test('VectorDB - get by ID', async (t) => {
|
||||
const tempDir = createTempDir();
|
||||
t.teardown(() => cleanupTempDir(tempDir));
|
||||
|
||||
const db = new VectorDB({
|
||||
dimensions: 3,
|
||||
storagePath: join(tempDir, 'test.db'),
|
||||
});
|
||||
|
||||
const id = await db.insert({
|
||||
vector: new Float32Array([1.0, 2.0, 3.0]),
|
||||
metadata: { text: 'test' },
|
||||
});
|
||||
|
||||
const entry = await db.get(id);
|
||||
t.truthy(entry);
|
||||
t.deepEqual(Array.from(entry.vector), [1.0, 2.0, 3.0]);
|
||||
t.is(entry.metadata?.text, 'test');
|
||||
});
|
||||
|
||||
test('VectorDB - get non-existent ID', async (t) => {
|
||||
const tempDir = createTempDir();
|
||||
t.teardown(() => cleanupTempDir(tempDir));
|
||||
|
||||
const db = new VectorDB({
|
||||
dimensions: 3,
|
||||
storagePath: join(tempDir, 'test.db'),
|
||||
});
|
||||
|
||||
const entry = await db.get('non-existent-id');
|
||||
t.is(entry, null);
|
||||
});
|
||||
|
||||
test('VectorDB - delete', async (t) => {
|
||||
const tempDir = createTempDir();
|
||||
t.teardown(() => cleanupTempDir(tempDir));
|
||||
|
||||
const db = new VectorDB({
|
||||
dimensions: 3,
|
||||
storagePath: join(tempDir, 'test.db'),
|
||||
});
|
||||
|
||||
const id = await db.insert({
|
||||
vector: new Float32Array([1.0, 2.0, 3.0]),
|
||||
});
|
||||
|
||||
const deleted = await db.delete(id);
|
||||
t.true(deleted);
|
||||
|
||||
const entry = await db.get(id);
|
||||
t.is(entry, null);
|
||||
});
|
||||
|
||||
test('VectorDB - delete non-existent', async (t) => {
|
||||
const tempDir = createTempDir();
|
||||
t.teardown(() => cleanupTempDir(tempDir));
|
||||
|
||||
const db = new VectorDB({
|
||||
dimensions: 3,
|
||||
storagePath: join(tempDir, 'test.db'),
|
||||
});
|
||||
|
||||
const deleted = await db.delete('non-existent-id');
|
||||
t.false(deleted);
|
||||
});
|
||||
|
||||
test('VectorDB - len and isEmpty', async (t) => {
|
||||
const tempDir = createTempDir();
|
||||
t.teardown(() => cleanupTempDir(tempDir));
|
||||
|
||||
const db = new VectorDB({
|
||||
dimensions: 3,
|
||||
storagePath: join(tempDir, 'test.db'),
|
||||
});
|
||||
|
||||
t.true(await db.isEmpty());
|
||||
t.is(await db.len(), 0);
|
||||
|
||||
await db.insert({ vector: new Float32Array([1, 2, 3]) });
|
||||
t.false(await db.isEmpty());
|
||||
t.is(await db.len(), 1);
|
||||
|
||||
await db.insert({ vector: new Float32Array([4, 5, 6]) });
|
||||
t.is(await db.len(), 2);
|
||||
});
|
||||
|
||||
test('VectorDB - cosine similarity', async (t) => {
|
||||
const tempDir = createTempDir();
|
||||
t.teardown(() => cleanupTempDir(tempDir));
|
||||
|
||||
const db = new VectorDB({
|
||||
dimensions: 3,
|
||||
distanceMetric: 'Cosine',
|
||||
storagePath: join(tempDir, 'test.db'),
|
||||
});
|
||||
|
||||
await db.insert({
|
||||
id: 'v1',
|
||||
vector: new Float32Array([1.0, 0.0, 0.0]),
|
||||
});
|
||||
|
||||
await db.insert({
|
||||
id: 'v2',
|
||||
vector: new Float32Array([0.5, 0.5, 0.0]),
|
||||
});
|
||||
|
||||
const results = await db.search({
|
||||
vector: new Float32Array([1.0, 0.0, 0.0]),
|
||||
k: 2,
|
||||
});
|
||||
|
||||
t.truthy(results.length >= 1);
|
||||
t.is(results[0].id, 'v1');
|
||||
});
|
||||
|
||||
test('VectorDB - HNSW index configuration', async (t) => {
|
||||
const tempDir = createTempDir();
|
||||
t.teardown(() => cleanupTempDir(tempDir));
|
||||
|
||||
const db = new VectorDB({
|
||||
dimensions: 128,
|
||||
storagePath: join(tempDir, 'test.db'),
|
||||
hnswConfig: {
|
||||
m: 16,
|
||||
efConstruction: 100,
|
||||
efSearch: 50,
|
||||
maxElements: 10000,
|
||||
},
|
||||
});
|
||||
|
||||
// Insert some vectors
|
||||
const vectors = Array.from({ length: 10 }, (_, i) =>
|
||||
new Float32Array(128).fill(0).map((_, j) => (i + j) * 0.01)
|
||||
);
|
||||
|
||||
const ids = await db.insertBatch(
|
||||
vectors.map((vector) => ({ vector }))
|
||||
);
|
||||
|
||||
t.is(ids.length, 10);
|
||||
|
||||
const results = await db.search({
|
||||
vector: vectors[0],
|
||||
k: 5,
|
||||
});
|
||||
|
||||
t.truthy(results.length >= 1);
|
||||
});
|
||||
|
||||
test('VectorDB - memory stress test', async (t) => {
|
||||
const tempDir = createTempDir();
|
||||
t.teardown(() => cleanupTempDir(tempDir));
|
||||
|
||||
const db = new VectorDB({
|
||||
dimensions: 128,
|
||||
storagePath: join(tempDir, 'test.db'),
|
||||
});
|
||||
|
||||
// Insert 1000 vectors in batches
|
||||
const batchSize = 100;
|
||||
const totalVectors = 1000;
|
||||
|
||||
for (let i = 0; i < totalVectors / batchSize; i++) {
|
||||
const batch = Array.from({ length: batchSize }, (_, j) => ({
|
||||
vector: new Float32Array(128).fill(0).map((_, k) => Math.random()),
|
||||
}));
|
||||
|
||||
await db.insertBatch(batch);
|
||||
}
|
||||
|
||||
const count = await db.len();
|
||||
t.is(count, totalVectors);
|
||||
|
||||
// Search should still work
|
||||
const results = await db.search({
|
||||
vector: new Float32Array(128).fill(0).map(() => Math.random()),
|
||||
k: 10,
|
||||
});
|
||||
|
||||
t.is(results.length, 10);
|
||||
});
|
||||
|
||||
test('VectorDB - concurrent operations', async (t) => {
|
||||
const tempDir = createTempDir();
|
||||
t.teardown(() => cleanupTempDir(tempDir));
|
||||
|
||||
const db = new VectorDB({
|
||||
dimensions: 3,
|
||||
storagePath: join(tempDir, 'test.db'),
|
||||
});
|
||||
|
||||
// Insert vectors concurrently
|
||||
const promises = Array.from({ length: 50 }, (_, i) =>
|
||||
db.insert({
|
||||
vector: new Float32Array([i, i + 1, i + 2]),
|
||||
})
|
||||
);
|
||||
|
||||
const ids = await Promise.all(promises);
|
||||
t.is(ids.length, 50);
|
||||
t.is(new Set(ids).size, 50); // All IDs should be unique
|
||||
|
||||
// Search concurrently
|
||||
const searchPromises = Array.from({ length: 10 }, () =>
|
||||
db.search({
|
||||
vector: new Float32Array([1, 2, 3]),
|
||||
k: 5,
|
||||
})
|
||||
);
|
||||
|
||||
const results = await Promise.all(searchPromises);
|
||||
t.is(results.length, 10);
|
||||
results.forEach((r) => t.truthy(r.length >= 1));
|
||||
});
|
||||
258
vendor/ruvector/crates/ruvector-node/tests/benchmark.test.mjs
vendored
Normal file
258
vendor/ruvector/crates/ruvector-node/tests/benchmark.test.mjs
vendored
Normal file
@@ -0,0 +1,258 @@
|
||||
import test from 'ava';
|
||||
import { VectorDB } from '../index.js';
|
||||
import { mkdtempSync, rmSync } from 'fs';
|
||||
import { tmpdir } from 'os';
|
||||
import { join } from 'path';
|
||||
|
||||
// Helper to create temp directory
|
||||
function createTempDir() {
|
||||
return mkdtempSync(join(tmpdir(), 'ruvector-bench-'));
|
||||
}
|
||||
|
||||
// Helper to cleanup temp directory
|
||||
function cleanupTempDir(dir) {
|
||||
try {
|
||||
rmSync(dir, { recursive: true, force: true });
|
||||
} catch (e) {
|
||||
console.warn('Failed to cleanup temp dir:', e.message);
|
||||
}
|
||||
}
|
||||
|
||||
// Performance measurement helper
|
||||
function measure(name, fn) {
|
||||
const start = process.hrtime.bigint();
|
||||
const result = fn();
|
||||
const end = process.hrtime.bigint();
|
||||
const durationMs = Number(end - start) / 1_000_000;
|
||||
console.log(`${name}: ${durationMs.toFixed(2)}ms`);
|
||||
return { result, durationMs };
|
||||
}
|
||||
|
||||
async function measureAsync(name, fn) {
|
||||
const start = process.hrtime.bigint();
|
||||
const result = await fn();
|
||||
const end = process.hrtime.bigint();
|
||||
const durationMs = Number(end - start) / 1_000_000;
|
||||
console.log(`${name}: ${durationMs.toFixed(2)}ms`);
|
||||
return { result, durationMs };
|
||||
}
|
||||
|
||||
test('Benchmark - batch insert performance', async (t) => {
|
||||
const tempDir = createTempDir();
|
||||
t.teardown(() => cleanupTempDir(tempDir));
|
||||
|
||||
const db = new VectorDB({
|
||||
dimensions: 128,
|
||||
storagePath: join(tempDir, 'bench.db'),
|
||||
});
|
||||
|
||||
const vectors = Array.from({ length: 1000 }, () => ({
|
||||
vector: new Float32Array(128).fill(0).map(() => Math.random()),
|
||||
}));
|
||||
|
||||
const { durationMs } = await measureAsync(
|
||||
'Insert 1000 vectors (batch)',
|
||||
async () => {
|
||||
return await db.insertBatch(vectors);
|
||||
}
|
||||
);
|
||||
|
||||
// Should complete in reasonable time (< 1 second for 1000 vectors)
|
||||
t.true(durationMs < 1000);
|
||||
t.is(await db.len(), 1000);
|
||||
|
||||
const throughput = (1000 / durationMs) * 1000;
|
||||
console.log(`Throughput: ${throughput.toFixed(0)} vectors/sec`);
|
||||
});
|
||||
|
||||
test('Benchmark - search performance', async (t) => {
|
||||
const tempDir = createTempDir();
|
||||
t.teardown(() => cleanupTempDir(tempDir));
|
||||
|
||||
const db = new VectorDB({
|
||||
dimensions: 128,
|
||||
storagePath: join(tempDir, 'bench.db'),
|
||||
hnswConfig: {
|
||||
m: 32,
|
||||
efConstruction: 200,
|
||||
efSearch: 100,
|
||||
},
|
||||
});
|
||||
|
||||
// Insert 10k vectors
|
||||
const batchSize = 1000;
|
||||
const totalVectors = 10000;
|
||||
|
||||
console.log(`Inserting ${totalVectors} vectors...`);
|
||||
for (let i = 0; i < totalVectors / batchSize; i++) {
|
||||
const batch = Array.from({ length: batchSize }, () => ({
|
||||
vector: new Float32Array(128).fill(0).map(() => Math.random()),
|
||||
}));
|
||||
await db.insertBatch(batch);
|
||||
}
|
||||
|
||||
t.is(await db.len(), totalVectors);
|
||||
|
||||
// Benchmark search
|
||||
const queryVector = new Float32Array(128).fill(0).map(() => Math.random());
|
||||
|
||||
const { durationMs } = await measureAsync('Search 10k vectors (k=10)', async () => {
|
||||
return await db.search({
|
||||
vector: queryVector,
|
||||
k: 10,
|
||||
});
|
||||
});
|
||||
|
||||
// Should complete in < 10ms for 10k vectors
|
||||
t.true(durationMs < 100);
|
||||
console.log(`Search latency: ${durationMs.toFixed(2)}ms`);
|
||||
|
||||
// Multiple searches
|
||||
const numQueries = 100;
|
||||
const { durationMs: totalDuration } = await measureAsync(
|
||||
`${numQueries} searches`,
|
||||
async () => {
|
||||
const promises = Array.from({ length: numQueries }, () =>
|
||||
db.search({
|
||||
vector: new Float32Array(128).fill(0).map(() => Math.random()),
|
||||
k: 10,
|
||||
})
|
||||
);
|
||||
return await Promise.all(promises);
|
||||
}
|
||||
);
|
||||
|
||||
const avgLatency = totalDuration / numQueries;
|
||||
const qps = (numQueries / totalDuration) * 1000;
|
||||
console.log(`Average latency: ${avgLatency.toFixed(2)}ms`);
|
||||
console.log(`QPS: ${qps.toFixed(0)} queries/sec`);
|
||||
|
||||
t.pass();
|
||||
});
|
||||
|
||||
test('Benchmark - concurrent insert and search', async (t) => {
|
||||
const tempDir = createTempDir();
|
||||
t.teardown(() => cleanupTempDir(tempDir));
|
||||
|
||||
const db = new VectorDB({
|
||||
dimensions: 64,
|
||||
storagePath: join(tempDir, 'bench.db'),
|
||||
});
|
||||
|
||||
// Initial data
|
||||
await db.insertBatch(
|
||||
Array.from({ length: 1000 }, () => ({
|
||||
vector: new Float32Array(64).fill(0).map(() => Math.random()),
|
||||
}))
|
||||
);
|
||||
|
||||
// Mix of operations
|
||||
const operations = [];
|
||||
|
||||
// Add insert operations
|
||||
for (let i = 0; i < 50; i++) {
|
||||
operations.push(
|
||||
db.insert({
|
||||
vector: new Float32Array(64).fill(0).map(() => Math.random()),
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
// Add search operations
|
||||
for (let i = 0; i < 50; i++) {
|
||||
operations.push(
|
||||
db.search({
|
||||
vector: new Float32Array(64).fill(0).map(() => Math.random()),
|
||||
k: 10,
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
const { durationMs } = await measureAsync(
|
||||
'50 inserts + 50 searches (concurrent)',
|
||||
async () => {
|
||||
return await Promise.all(operations);
|
||||
}
|
||||
);
|
||||
|
||||
t.true(durationMs < 2000);
|
||||
console.log(`Mixed workload: ${durationMs.toFixed(2)}ms`);
|
||||
});
|
||||
|
||||
test('Benchmark - memory efficiency', async (t) => {
|
||||
const tempDir = createTempDir();
|
||||
t.teardown(() => cleanupTempDir(tempDir));
|
||||
|
||||
const db = new VectorDB({
|
||||
dimensions: 384,
|
||||
storagePath: join(tempDir, 'bench.db'),
|
||||
quantization: {
|
||||
type: 'scalar',
|
||||
},
|
||||
});
|
||||
|
||||
const memBefore = process.memoryUsage();
|
||||
|
||||
// Insert 5k vectors
|
||||
const batchSize = 500;
|
||||
const totalVectors = 5000;
|
||||
|
||||
for (let i = 0; i < totalVectors / batchSize; i++) {
|
||||
const batch = Array.from({ length: batchSize }, () => ({
|
||||
vector: new Float32Array(384).fill(0).map(() => Math.random()),
|
||||
}));
|
||||
await db.insertBatch(batch);
|
||||
}
|
||||
|
||||
const memAfter = process.memoryUsage();
|
||||
const heapUsed = (memAfter.heapUsed - memBefore.heapUsed) / 1024 / 1024;
|
||||
|
||||
console.log(`Heap used for ${totalVectors} 384D vectors: ${heapUsed.toFixed(2)}MB`);
|
||||
console.log(`Per-vector memory: ${((heapUsed / totalVectors) * 1024).toFixed(2)}KB`);
|
||||
|
||||
t.is(await db.len(), totalVectors);
|
||||
t.pass();
|
||||
});
|
||||
|
||||
test('Benchmark - different vector dimensions', async (t) => {
|
||||
const dimensions = [128, 384, 768, 1536];
|
||||
const numVectors = 1000;
|
||||
|
||||
for (const dim of dimensions) {
|
||||
const tempDir = createTempDir();
|
||||
|
||||
const db = new VectorDB({
|
||||
dimensions: dim,
|
||||
storagePath: join(tempDir, 'bench.db'),
|
||||
});
|
||||
|
||||
const vectors = Array.from({ length: numVectors }, () => ({
|
||||
vector: new Float32Array(dim).fill(0).map(() => Math.random()),
|
||||
}));
|
||||
|
||||
const { durationMs: insertTime } = await measureAsync(
|
||||
`Insert ${numVectors} ${dim}D vectors`,
|
||||
async () => {
|
||||
return await db.insertBatch(vectors);
|
||||
}
|
||||
);
|
||||
|
||||
const { durationMs: searchTime } = await measureAsync(
|
||||
`Search ${dim}D vectors`,
|
||||
async () => {
|
||||
return await db.search({
|
||||
vector: new Float32Array(dim).fill(0).map(() => Math.random()),
|
||||
k: 10,
|
||||
});
|
||||
}
|
||||
);
|
||||
|
||||
console.log(
|
||||
`${dim}D - Insert: ${insertTime.toFixed(2)}ms, Search: ${searchTime.toFixed(2)}ms`
|
||||
);
|
||||
|
||||
cleanupTempDir(tempDir);
|
||||
}
|
||||
|
||||
t.pass();
|
||||
});
|
||||
Reference in New Issue
Block a user