Merge commit 'd803bfe2b1fe7f5e219e50ac20d6801a0a58ac75' as 'vendor/ruvector'

This commit is contained in:
ruv
2026-02-28 14:39:40 -05:00
7854 changed files with 3522914 additions and 0 deletions

View File

@@ -0,0 +1,17 @@
/**
* Attention Commands
* CLI commands for attention mechanism operations
*/
import type { RuVectorClient } from '../client.js';
export interface AttentionComputeOptions {
query: string;
keys: string;
values: string;
type: 'scaled_dot' | 'multi_head' | 'flash';
}
export declare class AttentionCommands {
static compute(client: RuVectorClient, options: AttentionComputeOptions): Promise<void>;
static listTypes(client: RuVectorClient): Promise<void>;
}
export default AttentionCommands;
//# sourceMappingURL=attention.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"attention.d.ts","sourceRoot":"","sources":["attention.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAKH,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,cAAc,CAAC;AAEnD,MAAM,WAAW,uBAAuB;IACtC,KAAK,EAAE,MAAM,CAAC;IACd,IAAI,EAAE,MAAM,CAAC;IACb,MAAM,EAAE,MAAM,CAAC;IACf,IAAI,EAAE,YAAY,GAAG,YAAY,GAAG,OAAO,CAAC;CAC7C;AAED,qBAAa,iBAAiB;WACf,OAAO,CAClB,MAAM,EAAE,cAAc,EACtB,OAAO,EAAE,uBAAuB,GAC/B,OAAO,CAAC,IAAI,CAAC;WA0CH,SAAS,CAAC,MAAM,EAAE,cAAc,GAAG,OAAO,CAAC,IAAI,CAAC;CAqD9D;AAED,eAAe,iBAAiB,CAAC"}

View File

@@ -0,0 +1,99 @@
"use strict";
/**
* Attention Commands
* CLI commands for attention mechanism operations
*/
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.AttentionCommands = void 0;
const chalk_1 = __importDefault(require("chalk"));
const ora_1 = __importDefault(require("ora"));
const cli_table3_1 = __importDefault(require("cli-table3"));
class AttentionCommands {
static async compute(client, options) {
const spinner = (0, ora_1.default)('Computing attention...').start();
try {
await client.connect();
const query = JSON.parse(options.query);
const keys = JSON.parse(options.keys);
const values = JSON.parse(options.values);
const result = await client.computeAttention(query, keys, values, options.type);
spinner.succeed(chalk_1.default.green('Attention computed successfully'));
console.log(chalk_1.default.bold.blue('\nAttention Output:'));
console.log(chalk_1.default.gray('─'.repeat(40)));
// Display output vector
console.log(`${chalk_1.default.green('Output Vector:')} [${result.output.slice(0, 8).map(v => v.toFixed(4)).join(', ')}${result.output.length > 8 ? '...' : ''}]`);
console.log(`${chalk_1.default.gray('Dimensions:')} ${result.output.length}`);
// Display attention weights if available
if (result.weights) {
console.log(chalk_1.default.bold.blue('\nAttention Weights:'));
const table = new cli_table3_1.default({
head: keys.map((_, i) => chalk_1.default.cyan(`K${i}`)),
});
for (let i = 0; i < Math.min(result.weights.length, 5); i++) {
table.push(result.weights[i].slice(0, keys.length).map(w => w.toFixed(4)));
}
console.log(table.toString());
}
}
catch (err) {
spinner.fail(chalk_1.default.red('Failed to compute attention'));
console.error(chalk_1.default.red(err.message));
}
finally {
await client.disconnect();
}
}
static async listTypes(client) {
const spinner = (0, ora_1.default)('Fetching attention types...').start();
try {
await client.connect();
const types = await client.listAttentionTypes();
spinner.stop();
console.log(chalk_1.default.bold.blue('\nAvailable Attention Mechanisms:'));
console.log(chalk_1.default.gray('─'.repeat(40)));
// Group by category
const categories = {
'Core': ['scaled_dot_product_attention', 'multi_head_attention', 'flash_attention'],
'Sparse': ['sparse_attention', 'local_attention', 'strided_attention', 'random_attention', 'longformer_attention'],
'Memory': ['memory_attention', 'compressive_attention', 'memory_compressed_attention'],
'Cross-Modal': ['cross_attention', 'cross_modal_attention', 'multimodal_attention'],
'Efficient': ['linear_attention', 'performer_attention', 'reformer_attention', 'synthesizer_attention'],
'Positional': ['relative_attention', 'rotary_attention', 'alibi_attention', 'rope_attention'],
'Graph': ['graph_attention', 'gat_attention', 'sparse_graph_attention'],
'Advanced': ['self_attention', 'causal_attention', 'bidirectional_attention', 'grouped_query_attention'],
};
for (const [category, items] of Object.entries(categories)) {
const available = items.filter(t => types.includes(t));
if (available.length > 0) {
console.log(`\n${chalk_1.default.yellow(category)}:`);
for (const item of available) {
console.log(` ${chalk_1.default.green('✓')} ${item}`);
}
}
}
// Show any types not in categories
const categorized = Object.values(categories).flat();
const uncategorized = types.filter(t => !categorized.includes(t));
if (uncategorized.length > 0) {
console.log(`\n${chalk_1.default.yellow('Other')}:`);
for (const item of uncategorized) {
console.log(` ${chalk_1.default.green('✓')} ${item}`);
}
}
console.log(`\n${chalk_1.default.gray(`Total: ${types.length} attention mechanisms`)}`);
}
catch (err) {
spinner.fail(chalk_1.default.red('Failed to list attention types'));
console.error(chalk_1.default.red(err.message));
}
finally {
await client.disconnect();
}
}
}
exports.AttentionCommands = AttentionCommands;
exports.default = AttentionCommands;
//# sourceMappingURL=attention.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"attention.js","sourceRoot":"","sources":["attention.ts"],"names":[],"mappings":";AAAA;;;GAGG;;;;;;AAEH,kDAA0B;AAC1B,8CAAsB;AACtB,4DAA+B;AAU/B,MAAa,iBAAiB;IAC5B,MAAM,CAAC,KAAK,CAAC,OAAO,CAClB,MAAsB,EACtB,OAAgC;QAEhC,MAAM,OAAO,GAAG,IAAA,aAAG,EAAC,wBAAwB,CAAC,CAAC,KAAK,EAAE,CAAC;QAEtD,IAAI,CAAC;YACH,MAAM,MAAM,CAAC,OAAO,EAAE,CAAC;YAEvB,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,KAAK,CAAa,CAAC;YACpD,MAAM,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,IAAI,CAAe,CAAC;YACpD,MAAM,MAAM,GAAG,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,CAAe,CAAC;YAExD,MAAM,MAAM,GAAG,MAAM,MAAM,CAAC,gBAAgB,CAAC,KAAK,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,CAAC,IAAI,CAAC,CAAC;YAEhF,OAAO,CAAC,OAAO,CAAC,eAAK,CAAC,KAAK,CAAC,iCAAiC,CAAC,CAAC,CAAC;YAEhE,OAAO,CAAC,GAAG,CAAC,eAAK,CAAC,IAAI,CAAC,IAAI,CAAC,qBAAqB,CAAC,CAAC,CAAC;YACpD,OAAO,CAAC,GAAG,CAAC,eAAK,CAAC,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;YAExC,wBAAwB;YACxB,OAAO,CAAC,GAAG,CAAC,GAAG,eAAK,CAAC,KAAK,CAAC,gBAAgB,CAAC,KAAK,MAAM,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,MAAM,CAAC,MAAM,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC;YACzJ,OAAO,CAAC,GAAG,CAAC,GAAG,eAAK,CAAC,IAAI,CAAC,aAAa,CAAC,IAAI,MAAM,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC,CAAC;YAEpE,yCAAyC;YACzC,IAAI,MAAM,CAAC,OAAO,EAAE,CAAC;gBACnB,OAAO,CAAC,GAAG,CAAC,eAAK,CAAC,IAAI,CAAC,IAAI,CAAC,sBAAsB,CAAC,CAAC,CAAC;gBACrD,MAAM,KAAK,GAAG,IAAI,oBAAK,CAAC;oBACtB,IAAI,EAAE,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,eAAK,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC;iBAC9C,CAAC,CAAC;gBAEH,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,OAAO,CAAC,MAAM,EAAE,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;oBAC5D,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,EAAE,IAAI,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;gBAC7E,CAAC;gBAED,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,QAAQ,EAAE,CAAC,CAAC;YAChC,CAAC;QACH,CAAC;QAAC,OAAO,GAAG,EAAE,CAAC;YACb,OAAO,CAAC,IAAI,CAAC,eAAK,CAAC,GAAG,CAAC,6BAA6B,CAAC,CAAC,CAAC;YACvD,OAAO,CAAC,KAAK,CAAC,eAAK,CAAC,GAAG,CAAE,GAAa,CAAC,OAAO,CAAC,CAAC,CAAC;QACnD,CAAC;gBAAS,CAAC;YACT,MAAM,MAAM,CAAC,UAAU,EAAE,CAAC;QAC5B,CAAC;IACH,CAAC;IAED,MAAM,CAAC,KAAK,CAAC,SAAS,CAAC,MAAsB;QAC3C,MAAM,OAAO,GAAG,IAAA,aAAG,EAAC,6BAA6B,CAAC,CAAC,KAAK,EAAE,CAAC;QAE3D,IAAI,CAAC;YACH,MAAM,MAAM,CAAC,OAAO,EAAE,CAAC;YAEvB,MAAM,KAAK,GAAG,MAAM,MAAM,CAAC,kBAAkB,EAAE,CAAC;YAEhD,OAAO,CAAC,IAAI,EAAE,CAAC;YAEf,OAAO,CAAC,GAAG,CAAC,eAAK,CAAC,IAAI,CAAC,IAAI,CAAC,mCAAmC,CAAC,CAAC,CAAC;YAClE,OAAO,CAAC,GAAG,CAAC,eAAK,CAAC,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;YAExC,oBAAoB;YACpB,MAAM,UAAU,GAAG;gBACjB,MAAM,EAAE,CAAC,8BAA8B,EAAE,sBAAsB,EAAE,iBAAiB,CAAC;gBACnF,QAAQ,EAAE,CAAC,kBAAkB,EAAE,iBAAiB,EAAE,mBAAmB,EAAE,kBAAkB,EAAE,sBAAsB,CAAC;gBAClH,QAAQ,EAAE,CAAC,kBAAkB,EAAE,uBAAuB,EAAE,6BAA6B,CAAC;gBACtF,aAAa,EAAE,CAAC,iBAAiB,EAAE,uBAAuB,EAAE,sBAAsB,CAAC;gBACnF,WAAW,EAAE,CAAC,kBAAkB,EAAE,qBAAqB,EAAE,oBAAoB,EAAE,uBAAuB,CAAC;gBACvG,YAAY,EAAE,CAAC,oBAAoB,EAAE,kBAAkB,EAAE,iBAAiB,EAAE,gBAAgB,CAAC;gBAC7F,OAAO,EAAE,CAAC,iBAAiB,EAAE,eAAe,EAAE,wBAAwB,CAAC;gBACvE,UAAU,EAAE,CAAC,gBAAgB,EAAE,kBAAkB,EAAE,yBAAyB,EAAE,yBAAyB,CAAC;aACzG,CAAC;YAEF,KAAK,MAAM,CAAC,QAAQ,EAAE,KAAK,CAAC,IAAI,MAAM,CAAC,OAAO,CAAC,UAAU,CAAC,EAAE,CAAC;gBAC3D,MAAM,SAAS,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;gBACvD,IAAI,SAAS,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;oBACzB,OAAO,CAAC,GAAG,CAAC,KAAK,eAAK,CAAC,MAAM,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC;oBAC5C,KAAK,MAAM,IAAI,IAAI,SAAS,EAAE,CAAC;wBAC7B,OAAO,CAAC,GAAG,CAAC,KAAK,eAAK,CAAC,KAAK,CAAC,GAAG,CAAC,IAAI,IAAI,EAAE,CAAC,CAAC;oBAC/C,CAAC;gBACH,CAAC;YACH,CAAC;YAED,mCAAmC;YACnC,MAAM,WAAW,GAAG,MAAM,CAAC,MAAM,CAAC,UAAU,CAAC,CAAC,IAAI,EAAE,CAAC;YACrD,MAAM,aAAa,GAAG,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,WAAW,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC;YAClE,IAAI,aAAa,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;gBAC7B,OAAO,CAAC,GAAG,CAAC,KAAK,eAAK,CAAC,MAAM,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC;gBAC3C,KAAK,MAAM,IAAI,IAAI,aAAa,EAAE,CAAC;oBACjC,OAAO,CAAC,GAAG,CAAC,KAAK,eAAK,CAAC,KAAK,CAAC,GAAG,CAAC,IAAI,IAAI,EAAE,CAAC,CAAC;gBAC/C,CAAC;YACH,CAAC;YAED,OAAO,CAAC,GAAG,CAAC,KAAK,eAAK,CAAC,IAAI,CAAC,UAAU,KAAK,CAAC,MAAM,uBAAuB,CAAC,EAAE,CAAC,CAAC;QAChF,CAAC;QAAC,OAAO,GAAG,EAAE,CAAC;YACb,OAAO,CAAC,IAAI,CAAC,eAAK,CAAC,GAAG,CAAC,gCAAgC,CAAC,CAAC,CAAC;YAC1D,OAAO,CAAC,KAAK,CAAC,eAAK,CAAC,GAAG,CAAE,GAAa,CAAC,OAAO,CAAC,CAAC,CAAC;QACnD,CAAC;gBAAS,CAAC;YACT,MAAM,MAAM,CAAC,UAAU,EAAE,CAAC;QAC5B,CAAC;IACH,CAAC;CACF;AAnGD,8CAmGC;AAED,kBAAe,iBAAiB,CAAC"}

View File

@@ -0,0 +1,119 @@
/**
* Attention Commands
* CLI commands for attention mechanism operations
*/
import chalk from 'chalk';
import ora from 'ora';
import Table from 'cli-table3';
import type { RuVectorClient } from '../client.js';
export interface AttentionComputeOptions {
query: string;
keys: string;
values: string;
type: 'scaled_dot' | 'multi_head' | 'flash';
}
export class AttentionCommands {
static async compute(
client: RuVectorClient,
options: AttentionComputeOptions
): Promise<void> {
const spinner = ora('Computing attention...').start();
try {
await client.connect();
const query = JSON.parse(options.query) as number[];
const keys = JSON.parse(options.keys) as number[][];
const values = JSON.parse(options.values) as number[][];
const result = await client.computeAttention(query, keys, values, options.type);
spinner.succeed(chalk.green('Attention computed successfully'));
console.log(chalk.bold.blue('\nAttention Output:'));
console.log(chalk.gray('─'.repeat(40)));
// Display output vector
console.log(`${chalk.green('Output Vector:')} [${result.output.slice(0, 8).map(v => v.toFixed(4)).join(', ')}${result.output.length > 8 ? '...' : ''}]`);
console.log(`${chalk.gray('Dimensions:')} ${result.output.length}`);
// Display attention weights if available
if (result.weights) {
console.log(chalk.bold.blue('\nAttention Weights:'));
const table = new Table({
head: keys.map((_, i) => chalk.cyan(`K${i}`)),
});
for (let i = 0; i < Math.min(result.weights.length, 5); i++) {
table.push(result.weights[i].slice(0, keys.length).map(w => w.toFixed(4)));
}
console.log(table.toString());
}
} catch (err) {
spinner.fail(chalk.red('Failed to compute attention'));
console.error(chalk.red((err as Error).message));
} finally {
await client.disconnect();
}
}
static async listTypes(client: RuVectorClient): Promise<void> {
const spinner = ora('Fetching attention types...').start();
try {
await client.connect();
const types = await client.listAttentionTypes();
spinner.stop();
console.log(chalk.bold.blue('\nAvailable Attention Mechanisms:'));
console.log(chalk.gray('─'.repeat(40)));
// Group by category
const categories = {
'Core': ['scaled_dot_product_attention', 'multi_head_attention', 'flash_attention'],
'Sparse': ['sparse_attention', 'local_attention', 'strided_attention', 'random_attention', 'longformer_attention'],
'Memory': ['memory_attention', 'compressive_attention', 'memory_compressed_attention'],
'Cross-Modal': ['cross_attention', 'cross_modal_attention', 'multimodal_attention'],
'Efficient': ['linear_attention', 'performer_attention', 'reformer_attention', 'synthesizer_attention'],
'Positional': ['relative_attention', 'rotary_attention', 'alibi_attention', 'rope_attention'],
'Graph': ['graph_attention', 'gat_attention', 'sparse_graph_attention'],
'Advanced': ['self_attention', 'causal_attention', 'bidirectional_attention', 'grouped_query_attention'],
};
for (const [category, items] of Object.entries(categories)) {
const available = items.filter(t => types.includes(t));
if (available.length > 0) {
console.log(`\n${chalk.yellow(category)}:`);
for (const item of available) {
console.log(` ${chalk.green('✓')} ${item}`);
}
}
}
// Show any types not in categories
const categorized = Object.values(categories).flat();
const uncategorized = types.filter(t => !categorized.includes(t));
if (uncategorized.length > 0) {
console.log(`\n${chalk.yellow('Other')}:`);
for (const item of uncategorized) {
console.log(` ${chalk.green('✓')} ${item}`);
}
}
console.log(`\n${chalk.gray(`Total: ${types.length} attention mechanisms`)}`);
} catch (err) {
spinner.fail(chalk.red('Failed to list attention types'));
console.error(chalk.red((err as Error).message));
} finally {
await client.disconnect();
}
}
}
export default AttentionCommands;

View File

@@ -0,0 +1,20 @@
/**
* Benchmark Commands
* CLI commands for performance benchmarking
*/
import type { RuVectorClient } from '../client.js';
export interface BenchmarkRunOptions {
type: 'vector' | 'attention' | 'gnn' | 'all';
size: string;
dim: string;
}
export interface BenchmarkReportOptions {
format: 'json' | 'table' | 'markdown';
}
export declare class BenchmarkCommands {
static run(client: RuVectorClient, options: BenchmarkRunOptions): Promise<void>;
static report(client: RuVectorClient, options: BenchmarkReportOptions): Promise<void>;
static showInfo(): void;
}
export default BenchmarkCommands;
//# sourceMappingURL=benchmark.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"benchmark.d.ts","sourceRoot":"","sources":["benchmark.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAKH,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,cAAc,CAAC;AAEnD,MAAM,WAAW,mBAAmB;IAClC,IAAI,EAAE,QAAQ,GAAG,WAAW,GAAG,KAAK,GAAG,KAAK,CAAC;IAC7C,IAAI,EAAE,MAAM,CAAC;IACb,GAAG,EAAE,MAAM,CAAC;CACb;AAED,MAAM,WAAW,sBAAsB;IACrC,MAAM,EAAE,MAAM,GAAG,OAAO,GAAG,UAAU,CAAC;CACvC;AAaD,qBAAa,iBAAiB;WACf,GAAG,CACd,MAAM,EAAE,cAAc,EACtB,OAAO,EAAE,mBAAmB,GAC3B,OAAO,CAAC,IAAI,CAAC;WA0GH,MAAM,CACjB,MAAM,EAAE,cAAc,EACtB,OAAO,EAAE,sBAAsB,GAC9B,OAAO,CAAC,IAAI,CAAC;IAgFhB,MAAM,CAAC,QAAQ,IAAI,IAAI;CAmCxB;AAED,eAAe,iBAAiB,CAAC"}

View File

@@ -0,0 +1,205 @@
"use strict";
/**
* Benchmark Commands
* CLI commands for performance benchmarking
*/
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.BenchmarkCommands = void 0;
const chalk_1 = __importDefault(require("chalk"));
const ora_1 = __importDefault(require("ora"));
const cli_table3_1 = __importDefault(require("cli-table3"));
class BenchmarkCommands {
static async run(client, options) {
const spinner = (0, ora_1.default)('Running benchmarks...').start();
try {
await client.connect();
const size = parseInt(options.size);
const dim = parseInt(options.dim);
const results = [];
// Vector benchmarks
if (options.type === 'vector' || options.type === 'all') {
spinner.text = 'Running vector benchmarks...';
const vectorResult = await client.runBenchmark('vector', size, dim);
results.push({
name: 'Vector Search',
operations: size,
totalTime: vectorResult.total_time,
avgTime: vectorResult.avg_time,
opsPerSec: vectorResult.ops_per_sec,
p50: vectorResult.p50,
p95: vectorResult.p95,
p99: vectorResult.p99,
});
}
// Attention benchmarks
if (options.type === 'attention' || options.type === 'all') {
spinner.text = 'Running attention benchmarks...';
const attentionResult = await client.runBenchmark('attention', size, dim);
results.push({
name: 'Attention',
operations: size,
totalTime: attentionResult.total_time,
avgTime: attentionResult.avg_time,
opsPerSec: attentionResult.ops_per_sec,
p50: attentionResult.p50,
p95: attentionResult.p95,
p99: attentionResult.p99,
});
}
// GNN benchmarks
if (options.type === 'gnn' || options.type === 'all') {
spinner.text = 'Running GNN benchmarks...';
const gnnResult = await client.runBenchmark('gnn', size, dim);
results.push({
name: 'GNN Forward',
operations: size,
totalTime: gnnResult.total_time,
avgTime: gnnResult.avg_time,
opsPerSec: gnnResult.ops_per_sec,
p50: gnnResult.p50,
p95: gnnResult.p95,
p99: gnnResult.p99,
});
}
spinner.succeed(chalk_1.default.green('Benchmarks completed'));
// Display results
console.log(chalk_1.default.bold.blue('\nBenchmark Results:'));
console.log(chalk_1.default.gray('─'.repeat(70)));
console.log(` ${chalk_1.default.gray('Dataset Size:')} ${size.toLocaleString()}`);
console.log(` ${chalk_1.default.gray('Dimensions:')} ${dim}`);
const table = new cli_table3_1.default({
head: [
chalk_1.default.cyan('Benchmark'),
chalk_1.default.cyan('Ops/sec'),
chalk_1.default.cyan('Avg (ms)'),
chalk_1.default.cyan('P50 (ms)'),
chalk_1.default.cyan('P95 (ms)'),
chalk_1.default.cyan('P99 (ms)')
],
colWidths: [18, 12, 12, 12, 12, 12]
});
for (const result of results) {
table.push([
result.name,
result.opsPerSec.toFixed(0),
result.avgTime.toFixed(3),
result.p50.toFixed(3),
result.p95.toFixed(3),
result.p99.toFixed(3)
]);
}
console.log(table.toString());
// Summary
const totalOps = results.reduce((sum, r) => sum + r.opsPerSec, 0);
console.log(`\n ${chalk_1.default.green('Total Throughput:')} ${totalOps.toFixed(0)} ops/sec`);
}
catch (err) {
spinner.fail(chalk_1.default.red('Benchmark failed'));
console.error(chalk_1.default.red(err.message));
}
finally {
await client.disconnect();
}
}
static async report(client, options) {
const spinner = (0, ora_1.default)('Generating benchmark report...').start();
try {
await client.connect();
// Get historical benchmark results
const results = await client.query('SELECT * FROM benchmark_results ORDER BY created_at DESC LIMIT 10');
spinner.stop();
if (results.length === 0) {
console.log(chalk_1.default.yellow('No benchmark results found'));
console.log(chalk_1.default.gray('Run benchmarks first: ruvector-pg bench run'));
return;
}
if (options.format === 'json') {
console.log(JSON.stringify(results, null, 2));
return;
}
if (options.format === 'markdown') {
console.log('# Benchmark Report\n');
console.log('| Type | Date | Ops/sec | Avg Time |');
console.log('|------|------|---------|----------|');
for (const result of results) {
const metrics = result.metrics;
console.log(`| ${result.benchmark_type} | ${result.created_at} | ` +
`${metrics.ops_per_sec?.toFixed(0) || 'N/A'} | ` +
`${metrics.avg_time?.toFixed(3) || 'N/A'}ms |`);
}
return;
}
// Default: table format
console.log(chalk_1.default.bold.blue('\nBenchmark History:'));
console.log(chalk_1.default.gray('─'.repeat(70)));
const table = new cli_table3_1.default({
head: [
chalk_1.default.cyan('ID'),
chalk_1.default.cyan('Type'),
chalk_1.default.cyan('Date'),
chalk_1.default.cyan('Ops/sec'),
chalk_1.default.cyan('Avg (ms)')
],
colWidths: [8, 15, 25, 12, 12]
});
for (const result of results) {
const metrics = result.metrics;
table.push([
String(result.id),
result.benchmark_type,
result.created_at,
metrics.ops_per_sec?.toFixed(0) || 'N/A',
metrics.avg_time?.toFixed(3) || 'N/A'
]);
}
console.log(table.toString());
}
catch (err) {
spinner.fail(chalk_1.default.red('Failed to generate report'));
console.error(chalk_1.default.red(err.message));
}
finally {
await client.disconnect();
}
}
static showInfo() {
console.log(chalk_1.default.bold.blue('\nBenchmark System:'));
console.log(chalk_1.default.gray('─'.repeat(50)));
console.log(`
${chalk_1.default.yellow('Available Benchmarks:')}
${chalk_1.default.green('vector')} - Vector similarity search performance
HNSW index operations, cosine/L2/IP distances
${chalk_1.default.green('attention')} - Attention mechanism throughput
Scaled dot-product, multi-head, flash attention
${chalk_1.default.green('gnn')} - Graph Neural Network performance
GCN, GraphSAGE, GAT, GIN forward passes
${chalk_1.default.green('all')} - Run all benchmarks sequentially
${chalk_1.default.yellow('Options:')}
${chalk_1.default.gray('-s, --size')} Dataset size (default: 10000)
${chalk_1.default.gray('-d, --dim')} Vector dimensions (default: 384)
${chalk_1.default.yellow('Examples:')}
${chalk_1.default.gray('# Run all benchmarks with 100k vectors')}
ruvector-pg bench run -t all -s 100000
${chalk_1.default.gray('# Run vector benchmark with 768 dimensions')}
ruvector-pg bench run -t vector -d 768
${chalk_1.default.gray('# Generate markdown report')}
ruvector-pg bench report -f markdown
`);
}
}
exports.BenchmarkCommands = BenchmarkCommands;
exports.default = BenchmarkCommands;
//# sourceMappingURL=benchmark.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,262 @@
/**
* Benchmark Commands
* CLI commands for performance benchmarking
*/
import chalk from 'chalk';
import ora from 'ora';
import Table from 'cli-table3';
import type { RuVectorClient } from '../client.js';
export interface BenchmarkRunOptions {
type: 'vector' | 'attention' | 'gnn' | 'all';
size: string;
dim: string;
}
export interface BenchmarkReportOptions {
format: 'json' | 'table' | 'markdown';
}
interface BenchmarkResult {
name: string;
operations: number;
totalTime: number;
avgTime: number;
opsPerSec: number;
p50: number;
p95: number;
p99: number;
}
export class BenchmarkCommands {
static async run(
client: RuVectorClient,
options: BenchmarkRunOptions
): Promise<void> {
const spinner = ora('Running benchmarks...').start();
try {
await client.connect();
const size = parseInt(options.size);
const dim = parseInt(options.dim);
const results: BenchmarkResult[] = [];
// Vector benchmarks
if (options.type === 'vector' || options.type === 'all') {
spinner.text = 'Running vector benchmarks...';
const vectorResult = await client.runBenchmark('vector', size, dim);
results.push({
name: 'Vector Search',
operations: size,
totalTime: vectorResult.total_time as number,
avgTime: vectorResult.avg_time as number,
opsPerSec: vectorResult.ops_per_sec as number,
p50: vectorResult.p50 as number,
p95: vectorResult.p95 as number,
p99: vectorResult.p99 as number,
});
}
// Attention benchmarks
if (options.type === 'attention' || options.type === 'all') {
spinner.text = 'Running attention benchmarks...';
const attentionResult = await client.runBenchmark('attention', size, dim);
results.push({
name: 'Attention',
operations: size,
totalTime: attentionResult.total_time as number,
avgTime: attentionResult.avg_time as number,
opsPerSec: attentionResult.ops_per_sec as number,
p50: attentionResult.p50 as number,
p95: attentionResult.p95 as number,
p99: attentionResult.p99 as number,
});
}
// GNN benchmarks
if (options.type === 'gnn' || options.type === 'all') {
spinner.text = 'Running GNN benchmarks...';
const gnnResult = await client.runBenchmark('gnn', size, dim);
results.push({
name: 'GNN Forward',
operations: size,
totalTime: gnnResult.total_time as number,
avgTime: gnnResult.avg_time as number,
opsPerSec: gnnResult.ops_per_sec as number,
p50: gnnResult.p50 as number,
p95: gnnResult.p95 as number,
p99: gnnResult.p99 as number,
});
}
spinner.succeed(chalk.green('Benchmarks completed'));
// Display results
console.log(chalk.bold.blue('\nBenchmark Results:'));
console.log(chalk.gray('─'.repeat(70)));
console.log(` ${chalk.gray('Dataset Size:')} ${size.toLocaleString()}`);
console.log(` ${chalk.gray('Dimensions:')} ${dim}`);
const table = new Table({
head: [
chalk.cyan('Benchmark'),
chalk.cyan('Ops/sec'),
chalk.cyan('Avg (ms)'),
chalk.cyan('P50 (ms)'),
chalk.cyan('P95 (ms)'),
chalk.cyan('P99 (ms)')
],
colWidths: [18, 12, 12, 12, 12, 12]
});
for (const result of results) {
table.push([
result.name,
result.opsPerSec.toFixed(0),
result.avgTime.toFixed(3),
result.p50.toFixed(3),
result.p95.toFixed(3),
result.p99.toFixed(3)
]);
}
console.log(table.toString());
// Summary
const totalOps = results.reduce((sum, r) => sum + r.opsPerSec, 0);
console.log(`\n ${chalk.green('Total Throughput:')} ${totalOps.toFixed(0)} ops/sec`);
} catch (err) {
spinner.fail(chalk.red('Benchmark failed'));
console.error(chalk.red((err as Error).message));
} finally {
await client.disconnect();
}
}
static async report(
client: RuVectorClient,
options: BenchmarkReportOptions
): Promise<void> {
const spinner = ora('Generating benchmark report...').start();
try {
await client.connect();
// Get historical benchmark results
const results = await client.query<{
id: number;
benchmark_type: string;
created_at: string;
metrics: Record<string, unknown>;
}>(
'SELECT * FROM benchmark_results ORDER BY created_at DESC LIMIT 10'
);
spinner.stop();
if (results.length === 0) {
console.log(chalk.yellow('No benchmark results found'));
console.log(chalk.gray('Run benchmarks first: ruvector-pg bench run'));
return;
}
if (options.format === 'json') {
console.log(JSON.stringify(results, null, 2));
return;
}
if (options.format === 'markdown') {
console.log('# Benchmark Report\n');
console.log('| Type | Date | Ops/sec | Avg Time |');
console.log('|------|------|---------|----------|');
for (const result of results) {
const metrics = result.metrics as { ops_per_sec?: number; avg_time?: number };
console.log(
`| ${result.benchmark_type} | ${result.created_at} | ` +
`${metrics.ops_per_sec?.toFixed(0) || 'N/A'} | ` +
`${metrics.avg_time?.toFixed(3) || 'N/A'}ms |`
);
}
return;
}
// Default: table format
console.log(chalk.bold.blue('\nBenchmark History:'));
console.log(chalk.gray('─'.repeat(70)));
const table = new Table({
head: [
chalk.cyan('ID'),
chalk.cyan('Type'),
chalk.cyan('Date'),
chalk.cyan('Ops/sec'),
chalk.cyan('Avg (ms)')
],
colWidths: [8, 15, 25, 12, 12]
});
for (const result of results) {
const metrics = result.metrics as { ops_per_sec?: number; avg_time?: number };
table.push([
String(result.id),
result.benchmark_type,
result.created_at,
metrics.ops_per_sec?.toFixed(0) || 'N/A',
metrics.avg_time?.toFixed(3) || 'N/A'
]);
}
console.log(table.toString());
} catch (err) {
spinner.fail(chalk.red('Failed to generate report'));
console.error(chalk.red((err as Error).message));
} finally {
await client.disconnect();
}
}
static showInfo(): void {
console.log(chalk.bold.blue('\nBenchmark System:'));
console.log(chalk.gray('─'.repeat(50)));
console.log(`
${chalk.yellow('Available Benchmarks:')}
${chalk.green('vector')} - Vector similarity search performance
HNSW index operations, cosine/L2/IP distances
${chalk.green('attention')} - Attention mechanism throughput
Scaled dot-product, multi-head, flash attention
${chalk.green('gnn')} - Graph Neural Network performance
GCN, GraphSAGE, GAT, GIN forward passes
${chalk.green('all')} - Run all benchmarks sequentially
${chalk.yellow('Options:')}
${chalk.gray('-s, --size')} Dataset size (default: 10000)
${chalk.gray('-d, --dim')} Vector dimensions (default: 384)
${chalk.yellow('Examples:')}
${chalk.gray('# Run all benchmarks with 100k vectors')}
ruvector-pg bench run -t all -s 100000
${chalk.gray('# Run vector benchmark with 768 dimensions')}
ruvector-pg bench run -t vector -d 768
${chalk.gray('# Generate markdown report')}
ruvector-pg bench report -f markdown
`);
}
}
export default BenchmarkCommands;

View File

@@ -0,0 +1,21 @@
/**
* GNN Commands
* CLI commands for Graph Neural Network operations
*/
import type { RuVectorClient } from '../client.js';
export interface GnnCreateOptions {
type: 'gcn' | 'graphsage' | 'gat' | 'gin';
inputDim: string;
outputDim: string;
}
export interface GnnForwardOptions {
features: string;
edges: string;
}
export declare class GnnCommands {
static create(client: RuVectorClient, name: string, options: GnnCreateOptions): Promise<void>;
static forward(client: RuVectorClient, layer: string, options: GnnForwardOptions): Promise<void>;
static listTypes(): Promise<void>;
}
export default GnnCommands;
//# sourceMappingURL=gnn.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"gnn.d.ts","sourceRoot":"","sources":["gnn.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAMH,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,cAAc,CAAC;AAEnD,MAAM,WAAW,gBAAgB;IAC/B,IAAI,EAAE,KAAK,GAAG,WAAW,GAAG,KAAK,GAAG,KAAK,CAAC;IAC1C,QAAQ,EAAE,MAAM,CAAC;IACjB,SAAS,EAAE,MAAM,CAAC;CACnB;AAED,MAAM,WAAW,iBAAiB;IAChC,QAAQ,EAAE,MAAM,CAAC;IACjB,KAAK,EAAE,MAAM,CAAC;CACf;AAED,qBAAa,WAAW;WACT,MAAM,CACjB,MAAM,EAAE,cAAc,EACtB,IAAI,EAAE,MAAM,EACZ,OAAO,EAAE,gBAAgB,GACxB,OAAO,CAAC,IAAI,CAAC;WAsCH,OAAO,CAClB,MAAM,EAAE,cAAc,EACtB,KAAK,EAAE,MAAM,EACb,OAAO,EAAE,iBAAiB,GACzB,OAAO,CAAC,IAAI,CAAC;WA2DH,SAAS,IAAI,OAAO,CAAC,IAAI,CAAC;CAkCxC;AAED,eAAe,WAAW,CAAC"}

View File

@@ -0,0 +1,126 @@
"use strict";
/**
* GNN Commands
* CLI commands for Graph Neural Network operations
*/
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.GnnCommands = void 0;
const chalk_1 = __importDefault(require("chalk"));
const ora_1 = __importDefault(require("ora"));
const cli_table3_1 = __importDefault(require("cli-table3"));
const fs_1 = require("fs");
class GnnCommands {
static async create(client, name, options) {
const spinner = (0, ora_1.default)(`Creating GNN layer '${name}'...`).start();
try {
await client.connect();
await client.createGnnLayer(name, options.type, parseInt(options.inputDim), parseInt(options.outputDim));
spinner.succeed(chalk_1.default.green(`GNN layer '${name}' created successfully`));
console.log(chalk_1.default.bold.blue('\nLayer Configuration:'));
console.log(chalk_1.default.gray('─'.repeat(40)));
console.log(` ${chalk_1.default.green('Type:')} ${options.type.toUpperCase()}`);
console.log(` ${chalk_1.default.green('Input Dimensions:')} ${options.inputDim}`);
console.log(` ${chalk_1.default.green('Output Dimensions:')} ${options.outputDim}`);
// Type-specific info
const typeInfo = {
gcn: 'Graph Convolutional Network - Spectral graph convolutions',
graphsage: 'GraphSAGE - Inductive learning with neighborhood sampling',
gat: 'Graph Attention Network - Attention-based message passing',
gin: 'Graph Isomorphism Network - WL-test expressive power'
};
console.log(`\n ${chalk_1.default.gray(typeInfo[options.type])}`);
}
catch (err) {
spinner.fail(chalk_1.default.red('Failed to create GNN layer'));
console.error(chalk_1.default.red(err.message));
}
finally {
await client.disconnect();
}
}
static async forward(client, layer, options) {
const spinner = (0, ora_1.default)(`Running forward pass through '${layer}'...`).start();
try {
await client.connect();
// Load features and edges from files
const featuresContent = (0, fs_1.readFileSync)(options.features, 'utf-8');
const edgesContent = (0, fs_1.readFileSync)(options.edges, 'utf-8');
const features = JSON.parse(featuresContent);
const edges = JSON.parse(edgesContent);
// Extract src and dst from edges
const src = edges.map(e => e[0]);
const dst = edges.map(e => e[1]);
const outDim = features[0]?.length || 64;
const result = await client.gnnForward(layer, features, src, dst, outDim);
spinner.succeed(chalk_1.default.green('Forward pass completed successfully'));
console.log(chalk_1.default.bold.blue('\nGNN Output:'));
console.log(chalk_1.default.gray('─'.repeat(40)));
console.log(` ${chalk_1.default.green('Nodes:')} ${result.length}`);
console.log(` ${chalk_1.default.green('Embedding Dim:')} ${result[0]?.length || 0}`);
// Show sample embeddings
console.log(chalk_1.default.bold.blue('\nSample Node Embeddings:'));
const table = new cli_table3_1.default({
head: [
chalk_1.default.cyan('Node'),
chalk_1.default.cyan('Embedding (first 8 dims)')
],
colWidths: [8, 60]
});
for (let i = 0; i < Math.min(5, result.length); i++) {
const emb = result[i];
table.push([
`${i}`,
`[${emb.slice(0, 8).map((v) => v.toFixed(4)).join(', ')}${emb.length > 8 ? '...' : ''}]`
]);
}
console.log(table.toString());
if (result.length > 5) {
console.log(chalk_1.default.gray(` ... and ${result.length - 5} more nodes`));
}
}
catch (err) {
spinner.fail(chalk_1.default.red('Forward pass failed'));
console.error(chalk_1.default.red(err.message));
}
finally {
await client.disconnect();
}
}
static async listTypes() {
console.log(chalk_1.default.bold.blue('\nAvailable GNN Layer Types:'));
console.log(chalk_1.default.gray('─'.repeat(50)));
const types = [
{
name: 'GCN',
desc: 'Graph Convolutional Network',
details: 'Spectral graph convolutions using Chebyshev polynomials'
},
{
name: 'GraphSAGE',
desc: 'Sample and Aggregate',
details: 'Inductive learning with neighborhood sampling and aggregation'
},
{
name: 'GAT',
desc: 'Graph Attention Network',
details: 'Attention-weighted message passing between nodes'
},
{
name: 'GIN',
desc: 'Graph Isomorphism Network',
details: 'Provably as powerful as WL-test for graph isomorphism'
}
];
for (const type of types) {
console.log(`\n ${chalk_1.default.yellow(type.name)} - ${type.desc}`);
console.log(` ${chalk_1.default.gray(type.details)}`);
}
console.log();
}
}
exports.GnnCommands = GnnCommands;
exports.default = GnnCommands;
//# sourceMappingURL=gnn.js.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"gnn.js","sourceRoot":"","sources":["gnn.ts"],"names":[],"mappings":";AAAA;;;GAGG;;;;;;AAEH,kDAA0B;AAC1B,8CAAsB;AACtB,4DAA+B;AAC/B,2BAAkC;AAclC,MAAa,WAAW;IACtB,MAAM,CAAC,KAAK,CAAC,MAAM,CACjB,MAAsB,EACtB,IAAY,EACZ,OAAyB;QAEzB,MAAM,OAAO,GAAG,IAAA,aAAG,EAAC,uBAAuB,IAAI,MAAM,CAAC,CAAC,KAAK,EAAE,CAAC;QAE/D,IAAI,CAAC;YACH,MAAM,MAAM,CAAC,OAAO,EAAE,CAAC;YAEvB,MAAM,MAAM,CAAC,cAAc,CACzB,IAAI,EACJ,OAAO,CAAC,IAAI,EACZ,QAAQ,CAAC,OAAO,CAAC,QAAQ,CAAC,EAC1B,QAAQ,CAAC,OAAO,CAAC,SAAS,CAAC,CAC5B,CAAC;YAEF,OAAO,CAAC,OAAO,CAAC,eAAK,CAAC,KAAK,CAAC,cAAc,IAAI,wBAAwB,CAAC,CAAC,CAAC;YAEzE,OAAO,CAAC,GAAG,CAAC,eAAK,CAAC,IAAI,CAAC,IAAI,CAAC,wBAAwB,CAAC,CAAC,CAAC;YACvD,OAAO,CAAC,GAAG,CAAC,eAAK,CAAC,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;YACxC,OAAO,CAAC,GAAG,CAAC,KAAK,eAAK,CAAC,KAAK,CAAC,OAAO,CAAC,IAAI,OAAO,CAAC,IAAI,CAAC,WAAW,EAAE,EAAE,CAAC,CAAC;YACvE,OAAO,CAAC,GAAG,CAAC,KAAK,eAAK,CAAC,KAAK,CAAC,mBAAmB,CAAC,IAAI,OAAO,CAAC,QAAQ,EAAE,CAAC,CAAC;YACzE,OAAO,CAAC,GAAG,CAAC,KAAK,eAAK,CAAC,KAAK,CAAC,oBAAoB,CAAC,IAAI,OAAO,CAAC,SAAS,EAAE,CAAC,CAAC;YAE3E,qBAAqB;YACrB,MAAM,QAAQ,GAA2B;gBACvC,GAAG,EAAE,2DAA2D;gBAChE,SAAS,EAAE,2DAA2D;gBACtE,GAAG,EAAE,2DAA2D;gBAChE,GAAG,EAAE,sDAAsD;aAC5D,CAAC;YAEF,OAAO,CAAC,GAAG,CAAC,OAAO,eAAK,CAAC,IAAI,CAAC,QAAQ,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC;QAC3D,CAAC;QAAC,OAAO,GAAG,EAAE,CAAC;YACb,OAAO,CAAC,IAAI,CAAC,eAAK,CAAC,GAAG,CAAC,4BAA4B,CAAC,CAAC,CAAC;YACtD,OAAO,CAAC,KAAK,CAAC,eAAK,CAAC,GAAG,CAAE,GAAa,CAAC,OAAO,CAAC,CAAC,CAAC;QACnD,CAAC;gBAAS,CAAC;YACT,MAAM,MAAM,CAAC,UAAU,EAAE,CAAC;QAC5B,CAAC;IACH,CAAC;IAED,MAAM,CAAC,KAAK,CAAC,OAAO,CAClB,MAAsB,EACtB,KAAa,EACb,OAA0B;QAE1B,MAAM,OAAO,GAAG,IAAA,aAAG,EAAC,iCAAiC,KAAK,MAAM,CAAC,CAAC,KAAK,EAAE,CAAC;QAE1E,IAAI,CAAC;YACH,MAAM,MAAM,CAAC,OAAO,EAAE,CAAC;YAEvB,qCAAqC;YACrC,MAAM,eAAe,GAAG,IAAA,iBAAY,EAAC,OAAO,CAAC,QAAQ,EAAE,OAAO,CAAC,CAAC;YAChE,MAAM,YAAY,GAAG,IAAA,iBAAY,EAAC,OAAO,CAAC,KAAK,EAAE,OAAO,CAAC,CAAC;YAE1D,MAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,eAAe,CAAe,CAAC;YAC3D,MAAM,KAAK,GAAG,IAAI,CAAC,KAAK,CAAC,YAAY,CAAuB,CAAC;YAE7D,iCAAiC;YACjC,MAAM,GAAG,GAAG,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;YACjC,MAAM,GAAG,GAAG,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC;YACjC,MAAM,MAAM,GAAG,QAAQ,CAAC,CAAC,CAAC,EAAE,MAAM,IAAI,EAAE,CAAC;YAEzC,MAAM,MAAM,GAAG,MAAM,MAAM,CAAC,UAAU,CAAC,KAAuB,EAAE,QAAQ,EAAE,GAAG,EAAE,GAAG,EAAE,MAAM,CAAC,CAAC;YAE5F,OAAO,CAAC,OAAO,CAAC,eAAK,CAAC,KAAK,CAAC,qCAAqC,CAAC,CAAC,CAAC;YAEpE,OAAO,CAAC,GAAG,CAAC,eAAK,CAAC,IAAI,CAAC,IAAI,CAAC,eAAe,CAAC,CAAC,CAAC;YAC9C,OAAO,CAAC,GAAG,CAAC,eAAK,CAAC,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;YACxC,OAAO,CAAC,GAAG,CAAC,KAAK,eAAK,CAAC,KAAK,CAAC,QAAQ,CAAC,IAAI,MAAM,CAAC,MAAM,EAAE,CAAC,CAAC;YAC3D,OAAO,CAAC,GAAG,CAAC,KAAK,eAAK,CAAC,KAAK,CAAC,gBAAgB,CAAC,IAAI,MAAM,CAAC,CAAC,CAAC,EAAE,MAAM,IAAI,CAAC,EAAE,CAAC,CAAC;YAE5E,yBAAyB;YACzB,OAAO,CAAC,GAAG,CAAC,eAAK,CAAC,IAAI,CAAC,IAAI,CAAC,2BAA2B,CAAC,CAAC,CAAC;YAE1D,MAAM,KAAK,GAAG,IAAI,oBAAK,CAAC;gBACtB,IAAI,EAAE;oBACJ,eAAK,CAAC,IAAI,CAAC,MAAM,CAAC;oBAClB,eAAK,CAAC,IAAI,CAAC,0BAA0B,CAAC;iBACvC;gBACD,SAAS,EAAE,CAAC,CAAC,EAAE,EAAE,CAAC;aACnB,CAAC,CAAC;YAEH,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,MAAM,CAAC,MAAM,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC;gBACpD,MAAM,GAAG,GAAG,MAAM,CAAC,CAAC,CAAC,CAAC;gBACtB,KAAK,CAAC,IAAI,CAAC;oBACT,GAAG,CAAC,EAAE;oBACN,IAAI,GAAG,CAAC,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAS,EAAE,EAAE,CAAC,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,GAAG,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,GAAG;iBACjG,CAAC,CAAC;YACL,CAAC;YAED,OAAO,CAAC,GAAG,CAAC,KAAK,CAAC,QAAQ,EAAE,CAAC,CAAC;YAE9B,IAAI,MAAM,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;gBACtB,OAAO,CAAC,GAAG,CAAC,eAAK,CAAC,IAAI,CAAC,aAAa,MAAM,CAAC,MAAM,GAAG,CAAC,aAAa,CAAC,CAAC,CAAC;YACvE,CAAC;QACH,CAAC;QAAC,OAAO,GAAG,EAAE,CAAC;YACb,OAAO,CAAC,IAAI,CAAC,eAAK,CAAC,GAAG,CAAC,qBAAqB,CAAC,CAAC,CAAC;YAC/C,OAAO,CAAC,KAAK,CAAC,eAAK,CAAC,GAAG,CAAE,GAAa,CAAC,OAAO,CAAC,CAAC,CAAC;QACnD,CAAC;gBAAS,CAAC;YACT,MAAM,MAAM,CAAC,UAAU,EAAE,CAAC;QAC5B,CAAC;IACH,CAAC;IAED,MAAM,CAAC,KAAK,CAAC,SAAS;QACpB,OAAO,CAAC,GAAG,CAAC,eAAK,CAAC,IAAI,CAAC,IAAI,CAAC,8BAA8B,CAAC,CAAC,CAAC;QAC7D,OAAO,CAAC,GAAG,CAAC,eAAK,CAAC,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC;QAExC,MAAM,KAAK,GAAG;YACZ;gBACE,IAAI,EAAE,KAAK;gBACX,IAAI,EAAE,6BAA6B;gBACnC,OAAO,EAAE,yDAAyD;aACnE;YACD;gBACE,IAAI,EAAE,WAAW;gBACjB,IAAI,EAAE,sBAAsB;gBAC5B,OAAO,EAAE,+DAA+D;aACzE;YACD;gBACE,IAAI,EAAE,KAAK;gBACX,IAAI,EAAE,yBAAyB;gBAC/B,OAAO,EAAE,kDAAkD;aAC5D;YACD;gBACE,IAAI,EAAE,KAAK;gBACX,IAAI,EAAE,2BAA2B;gBACjC,OAAO,EAAE,uDAAuD;aACjE;SACF,CAAC;QAEF,KAAK,MAAM,IAAI,IAAI,KAAK,EAAE,CAAC;YACzB,OAAO,CAAC,GAAG,CAAC,OAAO,eAAK,CAAC,MAAM,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC;YAC7D,OAAO,CAAC,GAAG,CAAC,OAAO,eAAK,CAAC,IAAI,CAAC,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,CAAC;QACjD,CAAC;QAED,OAAO,CAAC,GAAG,EAAE,CAAC;IAChB,CAAC;CACF;AA5ID,kCA4IC;AAED,kBAAe,WAAW,CAAC"}

View File

@@ -0,0 +1,165 @@
/**
* GNN Commands
* CLI commands for Graph Neural Network operations
*/
import chalk from 'chalk';
import ora from 'ora';
import Table from 'cli-table3';
import { readFileSync } from 'fs';
import type { RuVectorClient } from '../client.js';
export interface GnnCreateOptions {
type: 'gcn' | 'graphsage' | 'gat' | 'gin';
inputDim: string;
outputDim: string;
}
export interface GnnForwardOptions {
features: string;
edges: string;
}
export class GnnCommands {
static async create(
client: RuVectorClient,
name: string,
options: GnnCreateOptions
): Promise<void> {
const spinner = ora(`Creating GNN layer '${name}'...`).start();
try {
await client.connect();
await client.createGnnLayer(
name,
options.type,
parseInt(options.inputDim),
parseInt(options.outputDim)
);
spinner.succeed(chalk.green(`GNN layer '${name}' created successfully`));
console.log(chalk.bold.blue('\nLayer Configuration:'));
console.log(chalk.gray('─'.repeat(40)));
console.log(` ${chalk.green('Type:')} ${options.type.toUpperCase()}`);
console.log(` ${chalk.green('Input Dimensions:')} ${options.inputDim}`);
console.log(` ${chalk.green('Output Dimensions:')} ${options.outputDim}`);
// Type-specific info
const typeInfo: Record<string, string> = {
gcn: 'Graph Convolutional Network - Spectral graph convolutions',
graphsage: 'GraphSAGE - Inductive learning with neighborhood sampling',
gat: 'Graph Attention Network - Attention-based message passing',
gin: 'Graph Isomorphism Network - WL-test expressive power'
};
console.log(`\n ${chalk.gray(typeInfo[options.type])}`);
} catch (err) {
spinner.fail(chalk.red('Failed to create GNN layer'));
console.error(chalk.red((err as Error).message));
} finally {
await client.disconnect();
}
}
static async forward(
client: RuVectorClient,
layer: string,
options: GnnForwardOptions
): Promise<void> {
const spinner = ora(`Running forward pass through '${layer}'...`).start();
try {
await client.connect();
// Load features and edges from files
const featuresContent = readFileSync(options.features, 'utf-8');
const edgesContent = readFileSync(options.edges, 'utf-8');
const features = JSON.parse(featuresContent) as number[][];
const edges = JSON.parse(edgesContent) as [number, number][];
// Extract src and dst from edges
const src = edges.map(e => e[0]);
const dst = edges.map(e => e[1]);
const outDim = features[0]?.length || 64;
const result = await client.gnnForward(layer as 'gcn' | 'sage', features, src, dst, outDim);
spinner.succeed(chalk.green('Forward pass completed successfully'));
console.log(chalk.bold.blue('\nGNN Output:'));
console.log(chalk.gray('─'.repeat(40)));
console.log(` ${chalk.green('Nodes:')} ${result.length}`);
console.log(` ${chalk.green('Embedding Dim:')} ${result[0]?.length || 0}`);
// Show sample embeddings
console.log(chalk.bold.blue('\nSample Node Embeddings:'));
const table = new Table({
head: [
chalk.cyan('Node'),
chalk.cyan('Embedding (first 8 dims)')
],
colWidths: [8, 60]
});
for (let i = 0; i < Math.min(5, result.length); i++) {
const emb = result[i];
table.push([
`${i}`,
`[${emb.slice(0, 8).map((v: number) => v.toFixed(4)).join(', ')}${emb.length > 8 ? '...' : ''}]`
]);
}
console.log(table.toString());
if (result.length > 5) {
console.log(chalk.gray(` ... and ${result.length - 5} more nodes`));
}
} catch (err) {
spinner.fail(chalk.red('Forward pass failed'));
console.error(chalk.red((err as Error).message));
} finally {
await client.disconnect();
}
}
static async listTypes(): Promise<void> {
console.log(chalk.bold.blue('\nAvailable GNN Layer Types:'));
console.log(chalk.gray('─'.repeat(50)));
const types = [
{
name: 'GCN',
desc: 'Graph Convolutional Network',
details: 'Spectral graph convolutions using Chebyshev polynomials'
},
{
name: 'GraphSAGE',
desc: 'Sample and Aggregate',
details: 'Inductive learning with neighborhood sampling and aggregation'
},
{
name: 'GAT',
desc: 'Graph Attention Network',
details: 'Attention-weighted message passing between nodes'
},
{
name: 'GIN',
desc: 'Graph Isomorphism Network',
details: 'Provably as powerful as WL-test for graph isomorphism'
}
];
for (const type of types) {
console.log(`\n ${chalk.yellow(type.name)} - ${type.desc}`);
console.log(` ${chalk.gray(type.details)}`);
}
console.log();
}
}
export default GnnCommands;

View File

@@ -0,0 +1,22 @@
/**
* Graph Commands
* CLI commands for graph operations and Cypher queries
*/
import type { RuVectorClient } from '../client.js';
export interface CreateNodeOptions {
labels: string;
properties: string;
}
export interface TraverseOptions {
start: string;
depth: string;
type: 'bfs' | 'dfs';
}
export declare class GraphCommands {
static query(client: RuVectorClient, cypher: string): Promise<void>;
static createNode(client: RuVectorClient, options: CreateNodeOptions): Promise<void>;
static traverse(client: RuVectorClient, options: TraverseOptions): Promise<void>;
static showSyntax(): void;
}
export default GraphCommands;
//# sourceMappingURL=graph.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"graph.d.ts","sourceRoot":"","sources":["graph.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAKH,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,cAAc,CAAC;AAEnD,MAAM,WAAW,iBAAiB;IAChC,MAAM,EAAE,MAAM,CAAC;IACf,UAAU,EAAE,MAAM,CAAC;CACpB;AAED,MAAM,WAAW,eAAe;IAC9B,KAAK,EAAE,MAAM,CAAC;IACd,KAAK,EAAE,MAAM,CAAC;IACd,IAAI,EAAE,KAAK,GAAG,KAAK,CAAC;CACrB;AAED,qBAAa,aAAa;WACX,KAAK,CAChB,MAAM,EAAE,cAAc,EACtB,MAAM,EAAE,MAAM,GACb,OAAO,CAAC,IAAI,CAAC;WAmDH,UAAU,CACrB,MAAM,EAAE,cAAc,EACtB,OAAO,EAAE,iBAAiB,GACzB,OAAO,CAAC,IAAI,CAAC;WA8BH,QAAQ,CACnB,MAAM,EAAE,cAAc,EACtB,OAAO,EAAE,eAAe,GACvB,OAAO,CAAC,IAAI,CAAC;IA+ChB,MAAM,CAAC,UAAU,IAAI,IAAI;CAoB1B;AAED,eAAe,aAAa,CAAC"}

View File

@@ -0,0 +1,141 @@
"use strict";
/**
* Graph Commands
* CLI commands for graph operations and Cypher queries
*/
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.GraphCommands = void 0;
const chalk_1 = __importDefault(require("chalk"));
const ora_1 = __importDefault(require("ora"));
const cli_table3_1 = __importDefault(require("cli-table3"));
class GraphCommands {
static async query(client, cypher) {
const spinner = (0, ora_1.default)('Executing Cypher query...').start();
try {
await client.connect();
const results = await client.cypherQuery('default', cypher);
spinner.stop();
if (!results || results.length === 0) {
console.log(chalk_1.default.yellow('Query executed successfully, no results returned'));
return;
}
console.log(chalk_1.default.bold.blue(`\nQuery Results (${results.length} rows):`));
console.log(chalk_1.default.gray('─'.repeat(60)));
// Auto-detect columns from first result
const firstRow = results[0];
const columns = Object.keys(firstRow);
const table = new cli_table3_1.default({
head: columns.map(c => chalk_1.default.cyan(c)),
colWidths: columns.map(() => Math.floor(60 / columns.length))
});
for (const row of results.slice(0, 20)) {
const r = row;
table.push(columns.map(c => {
const val = r[c];
if (typeof val === 'object') {
return JSON.stringify(val).slice(0, 20) + '...';
}
return String(val).slice(0, 20);
}));
}
console.log(table.toString());
if (results.length > 20) {
console.log(chalk_1.default.gray(`... and ${results.length - 20} more rows`));
}
}
catch (err) {
spinner.fail(chalk_1.default.red('Query failed'));
console.error(chalk_1.default.red(err.message));
}
finally {
await client.disconnect();
}
}
static async createNode(client, options) {
const spinner = (0, ora_1.default)('Creating graph node...').start();
try {
await client.connect();
const labels = options.labels.split(',').map(l => l.trim());
const properties = JSON.parse(options.properties);
const nodeId = await client.addNode('default', labels, properties);
spinner.succeed(chalk_1.default.green('Node created successfully'));
console.log(chalk_1.default.bold.blue('\nNode Details:'));
console.log(chalk_1.default.gray('─'.repeat(40)));
console.log(` ${chalk_1.default.green('ID:')} ${nodeId}`);
console.log(` ${chalk_1.default.green('Labels:')} ${labels.join(', ')}`);
console.log(` ${chalk_1.default.green('Properties:')}`);
for (const [key, value] of Object.entries(properties)) {
console.log(` ${chalk_1.default.gray(key + ':')} ${JSON.stringify(value)}`);
}
}
catch (err) {
spinner.fail(chalk_1.default.red('Failed to create node'));
console.error(chalk_1.default.red(err.message));
}
finally {
await client.disconnect();
}
}
static async traverse(client, options) {
const spinner = (0, ora_1.default)(`Traversing graph from node ${options.start}...`).start();
try {
await client.connect();
// Use Cypher query to find neighbors
const cypherQuery = `MATCH (n)-[*1..${options.depth}]-(m) WHERE id(n) = ${options.start} RETURN m`;
const results = await client.cypherQuery('default', cypherQuery);
spinner.succeed(chalk_1.default.green('Traversal completed'));
console.log(chalk_1.default.bold.blue('\nTraversal Results:'));
console.log(chalk_1.default.gray('─'.repeat(50)));
console.log(` ${chalk_1.default.green('Algorithm:')} ${options.type.toUpperCase()}`);
console.log(` ${chalk_1.default.green('Max Depth:')} ${options.depth}`);
console.log(` ${chalk_1.default.green('Nodes Found:')} ${results.length}`);
// Show nodes found
if (results.length > 0) {
console.log(chalk_1.default.bold.blue('\nFound Nodes:'));
const nodeTable = new cli_table3_1.default({
head: [chalk_1.default.cyan('Node')],
colWidths: [60]
});
for (const row of results.slice(0, 10)) {
nodeTable.push([
JSON.stringify(row).slice(0, 55) + '...'
]);
}
console.log(nodeTable.toString());
if (results.length > 10) {
console.log(chalk_1.default.gray(`... and ${results.length - 10} more nodes`));
}
}
}
catch (err) {
spinner.fail(chalk_1.default.red('Traversal failed'));
console.error(chalk_1.default.red(err.message));
}
finally {
await client.disconnect();
}
}
static showSyntax() {
console.log(chalk_1.default.bold.blue('\nCypher Query Syntax:'));
console.log(chalk_1.default.gray('─'.repeat(60)));
const examples = [
{ query: 'MATCH (n) RETURN n LIMIT 10', desc: 'Return first 10 nodes' },
{ query: 'MATCH (n:Person) RETURN n', desc: 'Find all Person nodes' },
{ query: 'MATCH (a)-[r]->(b) RETURN a,r,b', desc: 'Find relationships' },
{ query: "MATCH (n {name: 'Alice'}) RETURN n", desc: 'Find by property' },
{ query: 'MATCH p=(a)-[*1..3]->(b) RETURN p', desc: 'Variable-length path' },
{ query: "CREATE (n:Person {name: 'Bob'}) RETURN n", desc: 'Create a node' },
];
for (const ex of examples) {
console.log(`\n ${chalk_1.default.yellow(ex.desc)}`);
console.log(` ${chalk_1.default.green('>')} ${ex.query}`);
}
console.log();
}
}
exports.GraphCommands = GraphCommands;
exports.default = GraphCommands;
//# sourceMappingURL=graph.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,182 @@
/**
* Graph Commands
* CLI commands for graph operations and Cypher queries
*/
import chalk from 'chalk';
import ora from 'ora';
import Table from 'cli-table3';
import type { RuVectorClient } from '../client.js';
export interface CreateNodeOptions {
labels: string;
properties: string;
}
export interface TraverseOptions {
start: string;
depth: string;
type: 'bfs' | 'dfs';
}
export class GraphCommands {
static async query(
client: RuVectorClient,
cypher: string
): Promise<void> {
const spinner = ora('Executing Cypher query...').start();
try {
await client.connect();
const results = await client.cypherQuery('default', cypher);
spinner.stop();
if (!results || results.length === 0) {
console.log(chalk.yellow('Query executed successfully, no results returned'));
return;
}
console.log(chalk.bold.blue(`\nQuery Results (${results.length} rows):`));
console.log(chalk.gray('─'.repeat(60)));
// Auto-detect columns from first result
const firstRow = results[0] as Record<string, unknown>;
const columns = Object.keys(firstRow);
const table = new Table({
head: columns.map(c => chalk.cyan(c)),
colWidths: columns.map(() => Math.floor(60 / columns.length))
});
for (const row of results.slice(0, 20)) {
const r = row as Record<string, unknown>;
table.push(columns.map(c => {
const val = r[c];
if (typeof val === 'object') {
return JSON.stringify(val).slice(0, 20) + '...';
}
return String(val).slice(0, 20);
}));
}
console.log(table.toString());
if (results.length > 20) {
console.log(chalk.gray(`... and ${results.length - 20} more rows`));
}
} catch (err) {
spinner.fail(chalk.red('Query failed'));
console.error(chalk.red((err as Error).message));
} finally {
await client.disconnect();
}
}
static async createNode(
client: RuVectorClient,
options: CreateNodeOptions
): Promise<void> {
const spinner = ora('Creating graph node...').start();
try {
await client.connect();
const labels = options.labels.split(',').map(l => l.trim());
const properties = JSON.parse(options.properties);
const nodeId = await client.addNode('default', labels, properties);
spinner.succeed(chalk.green('Node created successfully'));
console.log(chalk.bold.blue('\nNode Details:'));
console.log(chalk.gray('─'.repeat(40)));
console.log(` ${chalk.green('ID:')} ${nodeId}`);
console.log(` ${chalk.green('Labels:')} ${labels.join(', ')}`);
console.log(` ${chalk.green('Properties:')}`);
for (const [key, value] of Object.entries(properties)) {
console.log(` ${chalk.gray(key + ':')} ${JSON.stringify(value)}`);
}
} catch (err) {
spinner.fail(chalk.red('Failed to create node'));
console.error(chalk.red((err as Error).message));
} finally {
await client.disconnect();
}
}
static async traverse(
client: RuVectorClient,
options: TraverseOptions
): Promise<void> {
const spinner = ora(`Traversing graph from node ${options.start}...`).start();
try {
await client.connect();
// Use Cypher query to find neighbors
const cypherQuery = `MATCH (n)-[*1..${options.depth}]-(m) WHERE id(n) = ${options.start} RETURN m`;
const results = await client.cypherQuery('default', cypherQuery);
spinner.succeed(chalk.green('Traversal completed'));
console.log(chalk.bold.blue('\nTraversal Results:'));
console.log(chalk.gray('─'.repeat(50)));
console.log(` ${chalk.green('Algorithm:')} ${options.type.toUpperCase()}`);
console.log(` ${chalk.green('Max Depth:')} ${options.depth}`);
console.log(` ${chalk.green('Nodes Found:')} ${results.length}`);
// Show nodes found
if (results.length > 0) {
console.log(chalk.bold.blue('\nFound Nodes:'));
const nodeTable = new Table({
head: [chalk.cyan('Node')],
colWidths: [60]
});
for (const row of results.slice(0, 10)) {
nodeTable.push([
JSON.stringify(row).slice(0, 55) + '...'
]);
}
console.log(nodeTable.toString());
if (results.length > 10) {
console.log(chalk.gray(`... and ${results.length - 10} more nodes`));
}
}
} catch (err) {
spinner.fail(chalk.red('Traversal failed'));
console.error(chalk.red((err as Error).message));
} finally {
await client.disconnect();
}
}
static showSyntax(): void {
console.log(chalk.bold.blue('\nCypher Query Syntax:'));
console.log(chalk.gray('─'.repeat(60)));
const examples = [
{ query: 'MATCH (n) RETURN n LIMIT 10', desc: 'Return first 10 nodes' },
{ query: 'MATCH (n:Person) RETURN n', desc: 'Find all Person nodes' },
{ query: 'MATCH (a)-[r]->(b) RETURN a,r,b', desc: 'Find relationships' },
{ query: "MATCH (n {name: 'Alice'}) RETURN n", desc: 'Find by property' },
{ query: 'MATCH p=(a)-[*1..3]->(b) RETURN p', desc: 'Variable-length path' },
{ query: "CREATE (n:Person {name: 'Bob'}) RETURN n", desc: 'Create a node' },
];
for (const ex of examples) {
console.log(`\n ${chalk.yellow(ex.desc)}`);
console.log(` ${chalk.green('>')} ${ex.query}`);
}
console.log();
}
}
export default GraphCommands;

View File

@@ -0,0 +1,50 @@
/**
* Hyperbolic Geometry Commands
* CLI commands for hyperbolic embedding operations (Poincare ball, Lorentz model)
*
* NOTE: These functions require the hyperbolic geometry module to be enabled
* in the RuVector PostgreSQL extension. Currently in development.
*/
import type { RuVectorClient } from '../client.js';
export interface PoincareDistanceOptions {
a: string;
b: string;
curvature?: string;
}
export interface LorentzDistanceOptions {
a: string;
b: string;
curvature?: string;
}
export interface MobiusAddOptions {
a: string;
b: string;
curvature?: string;
}
export interface ExpMapOptions {
base: string;
tangent: string;
curvature?: string;
}
export interface LogMapOptions {
base: string;
target: string;
curvature?: string;
}
export interface ConvertOptions {
vector: string;
curvature?: string;
}
export declare class HyperbolicCommands {
static poincareDistance(client: RuVectorClient, options: PoincareDistanceOptions): Promise<void>;
static lorentzDistance(client: RuVectorClient, options: LorentzDistanceOptions): Promise<void>;
static mobiusAdd(client: RuVectorClient, options: MobiusAddOptions): Promise<void>;
static expMap(client: RuVectorClient, options: ExpMapOptions): Promise<void>;
static logMap(client: RuVectorClient, options: LogMapOptions): Promise<void>;
static poincareToLorentz(client: RuVectorClient, options: ConvertOptions): Promise<void>;
static lorentzToPoincare(client: RuVectorClient, options: ConvertOptions): Promise<void>;
static minkowskiDot(client: RuVectorClient, a: string, b: string): Promise<void>;
static showHelp(): void;
}
export default HyperbolicCommands;
//# sourceMappingURL=hyperbolic.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"hyperbolic.d.ts","sourceRoot":"","sources":["hyperbolic.ts"],"names":[],"mappings":"AAAA;;;;;;GAMG;AAKH,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,cAAc,CAAC;AA6BnD,MAAM,WAAW,uBAAuB;IACtC,CAAC,EAAE,MAAM,CAAC;IACV,CAAC,EAAE,MAAM,CAAC;IACV,SAAS,CAAC,EAAE,MAAM,CAAC;CACpB;AAED,MAAM,WAAW,sBAAsB;IACrC,CAAC,EAAE,MAAM,CAAC;IACV,CAAC,EAAE,MAAM,CAAC;IACV,SAAS,CAAC,EAAE,MAAM,CAAC;CACpB;AAED,MAAM,WAAW,gBAAgB;IAC/B,CAAC,EAAE,MAAM,CAAC;IACV,CAAC,EAAE,MAAM,CAAC;IACV,SAAS,CAAC,EAAE,MAAM,CAAC;CACpB;AAED,MAAM,WAAW,aAAa;IAC5B,IAAI,EAAE,MAAM,CAAC;IACb,OAAO,EAAE,MAAM,CAAC;IAChB,SAAS,CAAC,EAAE,MAAM,CAAC;CACpB;AAED,MAAM,WAAW,aAAa;IAC5B,IAAI,EAAE,MAAM,CAAC;IACb,MAAM,EAAE,MAAM,CAAC;IACf,SAAS,CAAC,EAAE,MAAM,CAAC;CACpB;AAED,MAAM,WAAW,cAAc;IAC7B,MAAM,EAAE,MAAM,CAAC;IACf,SAAS,CAAC,EAAE,MAAM,CAAC;CACpB;AAED,qBAAa,kBAAkB;WAChB,gBAAgB,CAC3B,MAAM,EAAE,cAAc,EACtB,OAAO,EAAE,uBAAuB,GAC/B,OAAO,CAAC,IAAI,CAAC;WAgCH,eAAe,CAC1B,MAAM,EAAE,cAAc,EACtB,OAAO,EAAE,sBAAsB,GAC9B,OAAO,CAAC,IAAI,CAAC;WAgCH,SAAS,CACpB,MAAM,EAAE,cAAc,EACtB,OAAO,EAAE,gBAAgB,GACxB,OAAO,CAAC,IAAI,CAAC;WAmCH,MAAM,CACjB,MAAM,EAAE,cAAc,EACtB,OAAO,EAAE,aAAa,GACrB,OAAO,CAAC,IAAI,CAAC;WAgCH,MAAM,CACjB,MAAM,EAAE,cAAc,EACtB,OAAO,EAAE,aAAa,GACrB,OAAO,CAAC,IAAI,CAAC;WAgCH,iBAAiB,CAC5B,MAAM,EAAE,cAAc,EACtB,OAAO,EAAE,cAAc,GACtB,OAAO,CAAC,IAAI,CAAC;WA+BH,iBAAiB,CAC5B,MAAM,EAAE,cAAc,EACtB,OAAO,EAAE,cAAc,GACtB,OAAO,CAAC,IAAI,CAAC;WA+BH,YAAY,CACvB,MAAM,EAAE,cAAc,EACtB,CAAC,EAAE,MAAM,EACT,CAAC,EAAE,MAAM,GACR,OAAO,CAAC,IAAI,CAAC;IA8BhB,MAAM,CAAC,QAAQ,IAAI,IAAI;CAkCxB;AAED,eAAe,kBAAkB,CAAC"}

View File

@@ -0,0 +1,292 @@
"use strict";
/**
* Hyperbolic Geometry Commands
* CLI commands for hyperbolic embedding operations (Poincare ball, Lorentz model)
*
* NOTE: These functions require the hyperbolic geometry module to be enabled
* in the RuVector PostgreSQL extension. Currently in development.
*/
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.HyperbolicCommands = void 0;
const chalk_1 = __importDefault(require("chalk"));
const ora_1 = __importDefault(require("ora"));
const HYPERBOLIC_REQUIRES_EXTENSION_MSG = `
${chalk_1.default.yellow('Hyperbolic geometry requires the RuVector PostgreSQL extension.')}
Ensure you have:
1. Built the ruvector-postgres Docker image
2. Started a container with the extension installed
3. Run: CREATE EXTENSION ruvector;
Available functions:
- ruvector_poincare_distance(a, b, curvature)
- ruvector_lorentz_distance(a, b, curvature)
- ruvector_mobius_add(a, b, curvature)
- ruvector_exp_map(base, tangent, curvature)
- ruvector_log_map(base, target, curvature)
- ruvector_poincare_to_lorentz(poincare, curvature)
- ruvector_lorentz_to_poincare(lorentz, curvature)
- ruvector_minkowski_dot(a, b)
${chalk_1.default.gray('See: https://github.com/ruvnet/ruvector for setup instructions.')}
`;
function checkHyperbolicAvailable() {
// Hyperbolic geometry functions are now implemented in the PostgreSQL extension
// The functions are available in ruvector--0.1.0.sql
return true;
}
class HyperbolicCommands {
static async poincareDistance(client, options) {
if (!checkHyperbolicAvailable()) {
console.log(HYPERBOLIC_REQUIRES_EXTENSION_MSG);
return;
}
const spinner = (0, ora_1.default)('Computing Poincare distance...').start();
try {
await client.connect();
const a = JSON.parse(options.a);
const b = JSON.parse(options.b);
const curvature = options.curvature ? parseFloat(options.curvature) : -1.0;
const distance = await client.poincareDistance(a, b, curvature);
spinner.succeed(chalk_1.default.green('Poincare distance computed'));
console.log(chalk_1.default.bold.blue('\nPoincare Distance:'));
console.log(chalk_1.default.gray('-'.repeat(40)));
console.log(` ${chalk_1.default.green('Distance:')} ${distance.toFixed(6)}`);
console.log(` ${chalk_1.default.green('Curvature:')} ${curvature}`);
console.log(` ${chalk_1.default.green('Dimension:')} ${a.length}`);
}
catch (err) {
spinner.fail(chalk_1.default.red('Distance computation failed'));
console.error(chalk_1.default.red(err.message));
}
finally {
await client.disconnect();
}
}
static async lorentzDistance(client, options) {
if (!checkHyperbolicAvailable()) {
console.log(HYPERBOLIC_REQUIRES_EXTENSION_MSG);
return;
}
const spinner = (0, ora_1.default)('Computing Lorentz distance...').start();
try {
await client.connect();
const a = JSON.parse(options.a);
const b = JSON.parse(options.b);
const curvature = options.curvature ? parseFloat(options.curvature) : -1.0;
const distance = await client.lorentzDistance(a, b, curvature);
spinner.succeed(chalk_1.default.green('Lorentz distance computed'));
console.log(chalk_1.default.bold.blue('\nLorentz Distance:'));
console.log(chalk_1.default.gray('-'.repeat(40)));
console.log(` ${chalk_1.default.green('Distance:')} ${distance.toFixed(6)}`);
console.log(` ${chalk_1.default.green('Curvature:')} ${curvature}`);
console.log(` ${chalk_1.default.green('Dimension:')} ${a.length}`);
}
catch (err) {
spinner.fail(chalk_1.default.red('Distance computation failed'));
console.error(chalk_1.default.red(err.message));
}
finally {
await client.disconnect();
}
}
static async mobiusAdd(client, options) {
if (!checkHyperbolicAvailable()) {
console.log(HYPERBOLIC_REQUIRES_EXTENSION_MSG);
return;
}
const spinner = (0, ora_1.default)('Computing Mobius addition...').start();
try {
await client.connect();
const a = JSON.parse(options.a);
const b = JSON.parse(options.b);
const curvature = options.curvature ? parseFloat(options.curvature) : -1.0;
const result = await client.mobiusAdd(a, b, curvature);
spinner.succeed(chalk_1.default.green('Mobius addition computed'));
console.log(chalk_1.default.bold.blue('\nMobius Addition Result:'));
console.log(chalk_1.default.gray('-'.repeat(40)));
console.log(` ${chalk_1.default.green('Curvature:')} ${curvature}`);
console.log(` ${chalk_1.default.green('Result:')} [${result.map((v) => v.toFixed(4)).join(', ')}]`);
// Verify result is in ball
const norm = Math.sqrt(result.reduce((sum, v) => sum + v * v, 0));
console.log(` ${chalk_1.default.green('Result Norm:')} ${norm.toFixed(6)} ${norm < 1 ? chalk_1.default.green('(valid)') : chalk_1.default.red('(invalid)')}`);
}
catch (err) {
spinner.fail(chalk_1.default.red('Mobius addition failed'));
console.error(chalk_1.default.red(err.message));
}
finally {
await client.disconnect();
}
}
static async expMap(client, options) {
if (!checkHyperbolicAvailable()) {
console.log(HYPERBOLIC_REQUIRES_EXTENSION_MSG);
return;
}
const spinner = (0, ora_1.default)('Computing exponential map...').start();
try {
await client.connect();
const base = JSON.parse(options.base);
const tangent = JSON.parse(options.tangent);
const curvature = options.curvature ? parseFloat(options.curvature) : -1.0;
const result = await client.expMap(base, tangent, curvature);
spinner.succeed(chalk_1.default.green('Exponential map computed'));
console.log(chalk_1.default.bold.blue('\nExponential Map Result:'));
console.log(chalk_1.default.gray('-'.repeat(40)));
console.log(` ${chalk_1.default.green('Base Point:')} [${base.map((v) => v.toFixed(4)).join(', ')}]`);
console.log(` ${chalk_1.default.green('Tangent Vector:')} [${tangent.map((v) => v.toFixed(4)).join(', ')}]`);
console.log(` ${chalk_1.default.green('Result (on manifold):')} [${result.map((v) => v.toFixed(4)).join(', ')}]`);
}
catch (err) {
spinner.fail(chalk_1.default.red('Exponential map failed'));
console.error(chalk_1.default.red(err.message));
}
finally {
await client.disconnect();
}
}
static async logMap(client, options) {
if (!checkHyperbolicAvailable()) {
console.log(HYPERBOLIC_REQUIRES_EXTENSION_MSG);
return;
}
const spinner = (0, ora_1.default)('Computing logarithmic map...').start();
try {
await client.connect();
const base = JSON.parse(options.base);
const target = JSON.parse(options.target);
const curvature = options.curvature ? parseFloat(options.curvature) : -1.0;
const result = await client.logMap(base, target, curvature);
spinner.succeed(chalk_1.default.green('Logarithmic map computed'));
console.log(chalk_1.default.bold.blue('\nLogarithmic Map Result:'));
console.log(chalk_1.default.gray('-'.repeat(40)));
console.log(` ${chalk_1.default.green('Base Point:')} [${base.map((v) => v.toFixed(4)).join(', ')}]`);
console.log(` ${chalk_1.default.green('Target Point:')} [${target.map((v) => v.toFixed(4)).join(', ')}]`);
console.log(` ${chalk_1.default.green('Tangent (at base):')} [${result.map((v) => v.toFixed(4)).join(', ')}]`);
}
catch (err) {
spinner.fail(chalk_1.default.red('Logarithmic map failed'));
console.error(chalk_1.default.red(err.message));
}
finally {
await client.disconnect();
}
}
static async poincareToLorentz(client, options) {
if (!checkHyperbolicAvailable()) {
console.log(HYPERBOLIC_REQUIRES_EXTENSION_MSG);
return;
}
const spinner = (0, ora_1.default)('Converting Poincare to Lorentz...').start();
try {
await client.connect();
const poincare = JSON.parse(options.vector);
const curvature = options.curvature ? parseFloat(options.curvature) : -1.0;
const lorentz = await client.poincareToLorentz(poincare, curvature);
spinner.succeed(chalk_1.default.green('Conversion completed'));
console.log(chalk_1.default.bold.blue('\nCoordinate Conversion:'));
console.log(chalk_1.default.gray('-'.repeat(40)));
console.log(` ${chalk_1.default.green('Poincare (ball):')} [${poincare.map((v) => v.toFixed(4)).join(', ')}]`);
console.log(` ${chalk_1.default.green('Lorentz (hyperboloid):')} [${lorentz.map((v) => v.toFixed(4)).join(', ')}]`);
console.log(` ${chalk_1.default.green('Dimension change:')} ${poincare.length} -> ${lorentz.length}`);
}
catch (err) {
spinner.fail(chalk_1.default.red('Conversion failed'));
console.error(chalk_1.default.red(err.message));
}
finally {
await client.disconnect();
}
}
static async lorentzToPoincare(client, options) {
if (!checkHyperbolicAvailable()) {
console.log(HYPERBOLIC_REQUIRES_EXTENSION_MSG);
return;
}
const spinner = (0, ora_1.default)('Converting Lorentz to Poincare...').start();
try {
await client.connect();
const lorentz = JSON.parse(options.vector);
const curvature = options.curvature ? parseFloat(options.curvature) : -1.0;
const poincare = await client.lorentzToPoincare(lorentz, curvature);
spinner.succeed(chalk_1.default.green('Conversion completed'));
console.log(chalk_1.default.bold.blue('\nCoordinate Conversion:'));
console.log(chalk_1.default.gray('-'.repeat(40)));
console.log(` ${chalk_1.default.green('Lorentz (hyperboloid):')} [${lorentz.map((v) => v.toFixed(4)).join(', ')}]`);
console.log(` ${chalk_1.default.green('Poincare (ball):')} [${poincare.map((v) => v.toFixed(4)).join(', ')}]`);
console.log(` ${chalk_1.default.green('Dimension change:')} ${lorentz.length} -> ${poincare.length}`);
}
catch (err) {
spinner.fail(chalk_1.default.red('Conversion failed'));
console.error(chalk_1.default.red(err.message));
}
finally {
await client.disconnect();
}
}
static async minkowskiDot(client, a, b) {
if (!checkHyperbolicAvailable()) {
console.log(HYPERBOLIC_REQUIRES_EXTENSION_MSG);
return;
}
const spinner = (0, ora_1.default)('Computing Minkowski inner product...').start();
try {
await client.connect();
const vecA = JSON.parse(a);
const vecB = JSON.parse(b);
const result = await client.minkowskiDot(vecA, vecB);
spinner.succeed(chalk_1.default.green('Minkowski inner product computed'));
console.log(chalk_1.default.bold.blue('\nMinkowski Inner Product:'));
console.log(chalk_1.default.gray('-'.repeat(40)));
console.log(` ${chalk_1.default.green('Result:')} ${result.toFixed(6)}`);
console.log(` ${chalk_1.default.gray('Note:')} Uses signature (-,+,+,...,+)`);
}
catch (err) {
spinner.fail(chalk_1.default.red('Computation failed'));
console.error(chalk_1.default.red(err.message));
}
finally {
await client.disconnect();
}
}
static showHelp() {
console.log(chalk_1.default.bold.blue('\nHyperbolic Geometry Operations:'));
console.log(chalk_1.default.gray('-'.repeat(60)));
console.log(`
${chalk_1.default.yellow('Overview:')}
Hyperbolic space is ideal for embedding hierarchical data like
taxonomies, organizational charts, and knowledge graphs.
${chalk_1.default.yellow('Models:')}
${chalk_1.default.green('Poincare Ball')} - Unit ball model, good for visualization
${chalk_1.default.green('Lorentz/Hyperboloid')} - Numerically stable, good for training
${chalk_1.default.yellow('Curvature:')}
Default curvature is -1.0. More negative = more "curved" space.
Must always be negative for hyperbolic geometry.
${chalk_1.default.yellow('Commands:')}
${chalk_1.default.green('hyperbolic poincare-distance')} - Distance in Poincare ball
${chalk_1.default.green('hyperbolic lorentz-distance')} - Distance on hyperboloid
${chalk_1.default.green('hyperbolic mobius-add')} - Hyperbolic addition
${chalk_1.default.green('hyperbolic exp-map')} - Tangent to manifold
${chalk_1.default.green('hyperbolic log-map')} - Manifold to tangent
${chalk_1.default.green('hyperbolic poincare-to-lorentz')} - Convert coordinates
${chalk_1.default.green('hyperbolic lorentz-to-poincare')} - Convert coordinates
${chalk_1.default.green('hyperbolic minkowski-dot')} - Minkowski inner product
${chalk_1.default.yellow('Use Cases:')}
- Hierarchical clustering
- Knowledge graph embeddings
- Taxonomy representation
- Social network analysis
`);
}
}
exports.HyperbolicCommands = HyperbolicCommands;
exports.default = HyperbolicCommands;
//# sourceMappingURL=hyperbolic.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,393 @@
/**
* Hyperbolic Geometry Commands
* CLI commands for hyperbolic embedding operations (Poincare ball, Lorentz model)
*
* NOTE: These functions require the hyperbolic geometry module to be enabled
* in the RuVector PostgreSQL extension. Currently in development.
*/
import chalk from 'chalk';
import ora from 'ora';
import Table from 'cli-table3';
import type { RuVectorClient } from '../client.js';
const HYPERBOLIC_REQUIRES_EXTENSION_MSG = `
${chalk.yellow('Hyperbolic geometry requires the RuVector PostgreSQL extension.')}
Ensure you have:
1. Built the ruvector-postgres Docker image
2. Started a container with the extension installed
3. Run: CREATE EXTENSION ruvector;
Available functions:
- ruvector_poincare_distance(a, b, curvature)
- ruvector_lorentz_distance(a, b, curvature)
- ruvector_mobius_add(a, b, curvature)
- ruvector_exp_map(base, tangent, curvature)
- ruvector_log_map(base, target, curvature)
- ruvector_poincare_to_lorentz(poincare, curvature)
- ruvector_lorentz_to_poincare(lorentz, curvature)
- ruvector_minkowski_dot(a, b)
${chalk.gray('See: https://github.com/ruvnet/ruvector for setup instructions.')}
`;
function checkHyperbolicAvailable(): boolean {
// Hyperbolic geometry functions are now implemented in the PostgreSQL extension
// The functions are available in ruvector--0.1.0.sql
return true;
}
export interface PoincareDistanceOptions {
a: string;
b: string;
curvature?: string;
}
export interface LorentzDistanceOptions {
a: string;
b: string;
curvature?: string;
}
export interface MobiusAddOptions {
a: string;
b: string;
curvature?: string;
}
export interface ExpMapOptions {
base: string;
tangent: string;
curvature?: string;
}
export interface LogMapOptions {
base: string;
target: string;
curvature?: string;
}
export interface ConvertOptions {
vector: string;
curvature?: string;
}
export class HyperbolicCommands {
static async poincareDistance(
client: RuVectorClient,
options: PoincareDistanceOptions
): Promise<void> {
if (!checkHyperbolicAvailable()) {
console.log(HYPERBOLIC_REQUIRES_EXTENSION_MSG);
return;
}
const spinner = ora('Computing Poincare distance...').start();
try {
await client.connect();
const a = JSON.parse(options.a);
const b = JSON.parse(options.b);
const curvature = options.curvature ? parseFloat(options.curvature) : -1.0;
const distance = await client.poincareDistance(a, b, curvature);
spinner.succeed(chalk.green('Poincare distance computed'));
console.log(chalk.bold.blue('\nPoincare Distance:'));
console.log(chalk.gray('-'.repeat(40)));
console.log(` ${chalk.green('Distance:')} ${distance.toFixed(6)}`);
console.log(` ${chalk.green('Curvature:')} ${curvature}`);
console.log(` ${chalk.green('Dimension:')} ${a.length}`);
} catch (err) {
spinner.fail(chalk.red('Distance computation failed'));
console.error(chalk.red((err as Error).message));
} finally {
await client.disconnect();
}
}
static async lorentzDistance(
client: RuVectorClient,
options: LorentzDistanceOptions
): Promise<void> {
if (!checkHyperbolicAvailable()) {
console.log(HYPERBOLIC_REQUIRES_EXTENSION_MSG);
return;
}
const spinner = ora('Computing Lorentz distance...').start();
try {
await client.connect();
const a = JSON.parse(options.a);
const b = JSON.parse(options.b);
const curvature = options.curvature ? parseFloat(options.curvature) : -1.0;
const distance = await client.lorentzDistance(a, b, curvature);
spinner.succeed(chalk.green('Lorentz distance computed'));
console.log(chalk.bold.blue('\nLorentz Distance:'));
console.log(chalk.gray('-'.repeat(40)));
console.log(` ${chalk.green('Distance:')} ${distance.toFixed(6)}`);
console.log(` ${chalk.green('Curvature:')} ${curvature}`);
console.log(` ${chalk.green('Dimension:')} ${a.length}`);
} catch (err) {
spinner.fail(chalk.red('Distance computation failed'));
console.error(chalk.red((err as Error).message));
} finally {
await client.disconnect();
}
}
static async mobiusAdd(
client: RuVectorClient,
options: MobiusAddOptions
): Promise<void> {
if (!checkHyperbolicAvailable()) {
console.log(HYPERBOLIC_REQUIRES_EXTENSION_MSG);
return;
}
const spinner = ora('Computing Mobius addition...').start();
try {
await client.connect();
const a = JSON.parse(options.a);
const b = JSON.parse(options.b);
const curvature = options.curvature ? parseFloat(options.curvature) : -1.0;
const result = await client.mobiusAdd(a, b, curvature);
spinner.succeed(chalk.green('Mobius addition computed'));
console.log(chalk.bold.blue('\nMobius Addition Result:'));
console.log(chalk.gray('-'.repeat(40)));
console.log(` ${chalk.green('Curvature:')} ${curvature}`);
console.log(` ${chalk.green('Result:')} [${result.map((v: number) => v.toFixed(4)).join(', ')}]`);
// Verify result is in ball
const norm = Math.sqrt(result.reduce((sum: number, v: number) => sum + v * v, 0));
console.log(` ${chalk.green('Result Norm:')} ${norm.toFixed(6)} ${norm < 1 ? chalk.green('(valid)') : chalk.red('(invalid)')}`);
} catch (err) {
spinner.fail(chalk.red('Mobius addition failed'));
console.error(chalk.red((err as Error).message));
} finally {
await client.disconnect();
}
}
static async expMap(
client: RuVectorClient,
options: ExpMapOptions
): Promise<void> {
if (!checkHyperbolicAvailable()) {
console.log(HYPERBOLIC_REQUIRES_EXTENSION_MSG);
return;
}
const spinner = ora('Computing exponential map...').start();
try {
await client.connect();
const base = JSON.parse(options.base);
const tangent = JSON.parse(options.tangent);
const curvature = options.curvature ? parseFloat(options.curvature) : -1.0;
const result = await client.expMap(base, tangent, curvature);
spinner.succeed(chalk.green('Exponential map computed'));
console.log(chalk.bold.blue('\nExponential Map Result:'));
console.log(chalk.gray('-'.repeat(40)));
console.log(` ${chalk.green('Base Point:')} [${base.map((v: number) => v.toFixed(4)).join(', ')}]`);
console.log(` ${chalk.green('Tangent Vector:')} [${tangent.map((v: number) => v.toFixed(4)).join(', ')}]`);
console.log(` ${chalk.green('Result (on manifold):')} [${result.map((v: number) => v.toFixed(4)).join(', ')}]`);
} catch (err) {
spinner.fail(chalk.red('Exponential map failed'));
console.error(chalk.red((err as Error).message));
} finally {
await client.disconnect();
}
}
static async logMap(
client: RuVectorClient,
options: LogMapOptions
): Promise<void> {
if (!checkHyperbolicAvailable()) {
console.log(HYPERBOLIC_REQUIRES_EXTENSION_MSG);
return;
}
const spinner = ora('Computing logarithmic map...').start();
try {
await client.connect();
const base = JSON.parse(options.base);
const target = JSON.parse(options.target);
const curvature = options.curvature ? parseFloat(options.curvature) : -1.0;
const result = await client.logMap(base, target, curvature);
spinner.succeed(chalk.green('Logarithmic map computed'));
console.log(chalk.bold.blue('\nLogarithmic Map Result:'));
console.log(chalk.gray('-'.repeat(40)));
console.log(` ${chalk.green('Base Point:')} [${base.map((v: number) => v.toFixed(4)).join(', ')}]`);
console.log(` ${chalk.green('Target Point:')} [${target.map((v: number) => v.toFixed(4)).join(', ')}]`);
console.log(` ${chalk.green('Tangent (at base):')} [${result.map((v: number) => v.toFixed(4)).join(', ')}]`);
} catch (err) {
spinner.fail(chalk.red('Logarithmic map failed'));
console.error(chalk.red((err as Error).message));
} finally {
await client.disconnect();
}
}
static async poincareToLorentz(
client: RuVectorClient,
options: ConvertOptions
): Promise<void> {
if (!checkHyperbolicAvailable()) {
console.log(HYPERBOLIC_REQUIRES_EXTENSION_MSG);
return;
}
const spinner = ora('Converting Poincare to Lorentz...').start();
try {
await client.connect();
const poincare = JSON.parse(options.vector);
const curvature = options.curvature ? parseFloat(options.curvature) : -1.0;
const lorentz = await client.poincareToLorentz(poincare, curvature);
spinner.succeed(chalk.green('Conversion completed'));
console.log(chalk.bold.blue('\nCoordinate Conversion:'));
console.log(chalk.gray('-'.repeat(40)));
console.log(` ${chalk.green('Poincare (ball):')} [${poincare.map((v: number) => v.toFixed(4)).join(', ')}]`);
console.log(` ${chalk.green('Lorentz (hyperboloid):')} [${lorentz.map((v: number) => v.toFixed(4)).join(', ')}]`);
console.log(` ${chalk.green('Dimension change:')} ${poincare.length} -> ${lorentz.length}`);
} catch (err) {
spinner.fail(chalk.red('Conversion failed'));
console.error(chalk.red((err as Error).message));
} finally {
await client.disconnect();
}
}
static async lorentzToPoincare(
client: RuVectorClient,
options: ConvertOptions
): Promise<void> {
if (!checkHyperbolicAvailable()) {
console.log(HYPERBOLIC_REQUIRES_EXTENSION_MSG);
return;
}
const spinner = ora('Converting Lorentz to Poincare...').start();
try {
await client.connect();
const lorentz = JSON.parse(options.vector);
const curvature = options.curvature ? parseFloat(options.curvature) : -1.0;
const poincare = await client.lorentzToPoincare(lorentz, curvature);
spinner.succeed(chalk.green('Conversion completed'));
console.log(chalk.bold.blue('\nCoordinate Conversion:'));
console.log(chalk.gray('-'.repeat(40)));
console.log(` ${chalk.green('Lorentz (hyperboloid):')} [${lorentz.map((v: number) => v.toFixed(4)).join(', ')}]`);
console.log(` ${chalk.green('Poincare (ball):')} [${poincare.map((v: number) => v.toFixed(4)).join(', ')}]`);
console.log(` ${chalk.green('Dimension change:')} ${lorentz.length} -> ${poincare.length}`);
} catch (err) {
spinner.fail(chalk.red('Conversion failed'));
console.error(chalk.red((err as Error).message));
} finally {
await client.disconnect();
}
}
static async minkowskiDot(
client: RuVectorClient,
a: string,
b: string
): Promise<void> {
if (!checkHyperbolicAvailable()) {
console.log(HYPERBOLIC_REQUIRES_EXTENSION_MSG);
return;
}
const spinner = ora('Computing Minkowski inner product...').start();
try {
await client.connect();
const vecA = JSON.parse(a);
const vecB = JSON.parse(b);
const result = await client.minkowskiDot(vecA, vecB);
spinner.succeed(chalk.green('Minkowski inner product computed'));
console.log(chalk.bold.blue('\nMinkowski Inner Product:'));
console.log(chalk.gray('-'.repeat(40)));
console.log(` ${chalk.green('Result:')} ${result.toFixed(6)}`);
console.log(` ${chalk.gray('Note:')} Uses signature (-,+,+,...,+)`);
} catch (err) {
spinner.fail(chalk.red('Computation failed'));
console.error(chalk.red((err as Error).message));
} finally {
await client.disconnect();
}
}
static showHelp(): void {
console.log(chalk.bold.blue('\nHyperbolic Geometry Operations:'));
console.log(chalk.gray('-'.repeat(60)));
console.log(`
${chalk.yellow('Overview:')}
Hyperbolic space is ideal for embedding hierarchical data like
taxonomies, organizational charts, and knowledge graphs.
${chalk.yellow('Models:')}
${chalk.green('Poincare Ball')} - Unit ball model, good for visualization
${chalk.green('Lorentz/Hyperboloid')} - Numerically stable, good for training
${chalk.yellow('Curvature:')}
Default curvature is -1.0. More negative = more "curved" space.
Must always be negative for hyperbolic geometry.
${chalk.yellow('Commands:')}
${chalk.green('hyperbolic poincare-distance')} - Distance in Poincare ball
${chalk.green('hyperbolic lorentz-distance')} - Distance on hyperboloid
${chalk.green('hyperbolic mobius-add')} - Hyperbolic addition
${chalk.green('hyperbolic exp-map')} - Tangent to manifold
${chalk.green('hyperbolic log-map')} - Manifold to tangent
${chalk.green('hyperbolic poincare-to-lorentz')} - Convert coordinates
${chalk.green('hyperbolic lorentz-to-poincare')} - Convert coordinates
${chalk.green('hyperbolic minkowski-dot')} - Minkowski inner product
${chalk.yellow('Use Cases:')}
- Hierarchical clustering
- Knowledge graph embeddings
- Taxonomy representation
- Social network analysis
`);
}
}
export default HyperbolicCommands;

View File

@@ -0,0 +1,154 @@
/**
* RuVector PostgreSQL Installation Commands
*
* Provides complete installation of RuVector PostgreSQL extension:
* - Full native installation (PostgreSQL + Rust + pgrx + extension)
* - Docker-based installation (recommended for quick start)
* - Extension management (enable, disable, upgrade)
*/
interface InstallOptions {
method?: 'docker' | 'native' | 'auto';
port?: number;
user?: string;
password?: string;
database?: string;
dataDir?: string;
version?: string;
pgVersion?: string;
detach?: boolean;
name?: string;
skipPostgres?: boolean;
skipRust?: boolean;
}
interface StatusInfo {
installed: boolean;
running: boolean;
method: 'docker' | 'native' | 'none';
version?: string;
containerId?: string;
port?: number;
connectionString?: string;
}
interface SystemInfo {
platform: NodeJS.Platform;
arch: string;
docker: boolean;
postgres: boolean;
pgVersion: string | null;
pgConfig: string | null;
rust: boolean;
rustVersion: string | null;
cargo: boolean;
pgrx: boolean;
pgrxVersion: string | null;
sudo: boolean;
packageManager: 'apt' | 'yum' | 'dnf' | 'brew' | 'pacman' | 'unknown';
}
export declare class InstallCommands {
/**
* Comprehensive system check
*/
static checkSystem(): Promise<SystemInfo>;
/**
* Check system requirements (backward compatible)
*/
static checkRequirements(): Promise<{
docker: boolean;
postgres: boolean;
pgConfig: string | null;
}>;
/**
* Run command with sudo if needed
*/
static sudoExec(command: string, options?: {
silent?: boolean;
}): string;
/**
* Install PostgreSQL
*/
static installPostgreSQL(pgVersion: string, sys: SystemInfo): Promise<boolean>;
/**
* Install Rust
*/
static installRust(): Promise<boolean>;
/**
* Install required build dependencies
*/
static installBuildDeps(sys: SystemInfo, pgVersion?: string): Promise<boolean>;
/**
* Install cargo-pgrx
*/
static installPgrx(pgVersion: string): Promise<boolean>;
/**
* Build and install ruvector-postgres extension
*/
static buildAndInstallExtension(pgVersion: string): Promise<boolean>;
/**
* Configure PostgreSQL for the extension
*/
static configurePostgreSQL(options: InstallOptions): Promise<boolean>;
/**
* Full native installation
*/
static installNativeFull(options?: InstallOptions): Promise<void>;
/**
* Install RuVector PostgreSQL (auto-detect best method)
*/
static install(options?: InstallOptions): Promise<void>;
/**
* Install via Docker
*/
static installDocker(options?: InstallOptions): Promise<void>;
/**
* Install native extension (download pre-built binaries) - Legacy method
*/
static installNative(options?: InstallOptions): Promise<void>;
/**
* Uninstall RuVector PostgreSQL
*/
static uninstall(options?: {
name?: string;
removeData?: boolean;
}): Promise<void>;
/**
* Get installation status
*/
static status(options?: {
name?: string;
}): Promise<StatusInfo>;
/**
* Print status information
*/
static printStatus(options?: {
name?: string;
}): Promise<void>;
/**
* Start the database
*/
static start(options?: {
name?: string;
}): Promise<void>;
/**
* Stop the database
*/
static stop(options?: {
name?: string;
}): Promise<void>;
/**
* Show logs
*/
static logs(options?: {
name?: string;
follow?: boolean;
tail?: number;
}): Promise<void>;
/**
* Execute psql command
*/
static psql(options?: {
name?: string;
command?: string;
}): Promise<void>;
}
export {};
//# sourceMappingURL=install.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"install.d.ts","sourceRoot":"","sources":["install.ts"],"names":[],"mappings":"AAAA;;;;;;;GAOG;AAsBH,UAAU,cAAc;IACtB,MAAM,CAAC,EAAE,QAAQ,GAAG,QAAQ,GAAG,MAAM,CAAC;IACtC,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,SAAS,CAAC,EAAE,MAAM,CAAC;IACnB,MAAM,CAAC,EAAE,OAAO,CAAC;IACjB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,YAAY,CAAC,EAAE,OAAO,CAAC;IACvB,QAAQ,CAAC,EAAE,OAAO,CAAC;CACpB;AAED,UAAU,UAAU;IAClB,SAAS,EAAE,OAAO,CAAC;IACnB,OAAO,EAAE,OAAO,CAAC;IACjB,MAAM,EAAE,QAAQ,GAAG,QAAQ,GAAG,MAAM,CAAC;IACrC,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,gBAAgB,CAAC,EAAE,MAAM,CAAC;CAC3B;AAED,UAAU,UAAU;IAClB,QAAQ,EAAE,MAAM,CAAC,QAAQ,CAAC;IAC1B,IAAI,EAAE,MAAM,CAAC;IACb,MAAM,EAAE,OAAO,CAAC;IAChB,QAAQ,EAAE,OAAO,CAAC;IAClB,SAAS,EAAE,MAAM,GAAG,IAAI,CAAC;IACzB,QAAQ,EAAE,MAAM,GAAG,IAAI,CAAC;IACxB,IAAI,EAAE,OAAO,CAAC;IACd,WAAW,EAAE,MAAM,GAAG,IAAI,CAAC;IAC3B,KAAK,EAAE,OAAO,CAAC;IACf,IAAI,EAAE,OAAO,CAAC;IACd,WAAW,EAAE,MAAM,GAAG,IAAI,CAAC;IAC3B,IAAI,EAAE,OAAO,CAAC;IACd,cAAc,EAAE,KAAK,GAAG,KAAK,GAAG,KAAK,GAAG,MAAM,GAAG,QAAQ,GAAG,SAAS,CAAC;CACvE;AAED,qBAAa,eAAe;IAE1B;;OAEG;WACU,WAAW,IAAI,OAAO,CAAC,UAAU,CAAC;IAqF/C;;OAEG;WACU,iBAAiB,IAAI,OAAO,CAAC;QAAE,MAAM,EAAE,OAAO,CAAC;QAAC,QAAQ,EAAE,OAAO,CAAC;QAAC,QAAQ,EAAE,MAAM,GAAG,IAAI,CAAA;KAAE,CAAC;IAS1G;;OAEG;IACH,MAAM,CAAC,QAAQ,CAAC,OAAO,EAAE,MAAM,EAAE,OAAO,GAAE;QAAE,MAAM,CAAC,EAAE,OAAO,CAAA;KAAO,GAAG,MAAM;IAU5E;;OAEG;WACU,iBAAiB,CAAC,SAAS,EAAE,MAAM,EAAE,GAAG,EAAE,UAAU,GAAG,OAAO,CAAC,OAAO,CAAC;IAgFpF;;OAEG;WACU,WAAW,IAAI,OAAO,CAAC,OAAO,CAAC;IAyB5C;;OAEG;WACU,gBAAgB,CAAC,GAAG,EAAE,UAAU,EAAE,SAAS,CAAC,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;IAoCpF;;OAEG;WACU,WAAW,CAAC,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;IAoC7D;;OAEG;WACU,wBAAwB,CAAC,SAAS,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;IA8C1E;;OAEG;WACU,mBAAmB,CAAC,OAAO,EAAE,cAAc,GAAG,OAAO,CAAC,OAAO,CAAC;IAiC3E;;OAEG;WACU,iBAAiB,CAAC,OAAO,GAAE,cAAmB,GAAG,OAAO,CAAC,IAAI,CAAC;IAgG3E;;OAEG;WACU,OAAO,CAAC,OAAO,GAAE,cAAmB,GAAG,OAAO,CAAC,IAAI,CAAC;IAqCjE;;OAEG;WACU,aAAa,CAAC,OAAO,GAAE,cAAmB,GAAG,OAAO,CAAC,IAAI,CAAC;IAwIvE;;OAEG;WACU,aAAa,CAAC,OAAO,GAAE,cAAmB,GAAG,OAAO,CAAC,IAAI,CAAC;IAKvE;;OAEG;WACU,SAAS,CAAC,OAAO,GAAE;QAAE,IAAI,CAAC,EAAE,MAAM,CAAC;QAAC,UAAU,CAAC,EAAE,OAAO,CAAA;KAAO,GAAG,OAAO,CAAC,IAAI,CAAC;IAmC5F;;OAEG;WACU,MAAM,CAAC,OAAO,GAAE;QAAE,IAAI,CAAC,EAAE,MAAM,CAAA;KAAO,GAAG,OAAO,CAAC,UAAU,CAAC;IAiDzE;;OAEG;WACU,WAAW,CAAC,OAAO,GAAE;QAAE,IAAI,CAAC,EAAE,MAAM,CAAA;KAAO,GAAG,OAAO,CAAC,IAAI,CAAC;IAoCxE;;OAEG;WACU,KAAK,CAAC,OAAO,GAAE;QAAE,IAAI,CAAC,EAAE,MAAM,CAAA;KAAO,GAAG,OAAO,CAAC,IAAI,CAAC;IAyBlE;;OAEG;WACU,IAAI,CAAC,OAAO,GAAE;QAAE,IAAI,CAAC,EAAE,MAAM,CAAA;KAAO,GAAG,OAAO,CAAC,IAAI,CAAC;IAajE;;OAEG;WACU,IAAI,CAAC,OAAO,GAAE;QAAE,IAAI,CAAC,EAAE,MAAM,CAAC;QAAC,MAAM,CAAC,EAAE,OAAO,CAAC;QAAC,IAAI,CAAC,EAAE,MAAM,CAAA;KAAO,GAAG,OAAO,CAAC,IAAI,CAAC;IAsBlG;;OAEG;WACU,IAAI,CAAC,OAAO,GAAE;QAAE,IAAI,CAAC,EAAE,MAAM,CAAC;QAAC,OAAO,CAAC,EAAE,MAAM,CAAA;KAAO,GAAG,OAAO,CAAC,IAAI,CAAC;CAwBpF"}

View File

@@ -0,0 +1,881 @@
"use strict";
/**
* RuVector PostgreSQL Installation Commands
*
* Provides complete installation of RuVector PostgreSQL extension:
* - Full native installation (PostgreSQL + Rust + pgrx + extension)
* - Docker-based installation (recommended for quick start)
* - Extension management (enable, disable, upgrade)
*/
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
var desc = Object.getOwnPropertyDescriptor(m, k);
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
desc = { enumerable: true, get: function() { return m[k]; } };
}
Object.defineProperty(o, k2, desc);
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || (function () {
var ownKeys = function(o) {
ownKeys = Object.getOwnPropertyNames || function (o) {
var ar = [];
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
return ar;
};
return ownKeys(o);
};
return function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
__setModuleDefault(result, mod);
return result;
};
})();
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.InstallCommands = void 0;
const child_process_1 = require("child_process");
const fs = __importStar(require("fs"));
const path = __importStar(require("path"));
const os = __importStar(require("os"));
const chalk_1 = __importDefault(require("chalk"));
const ora_1 = __importDefault(require("ora"));
// Constants
const DOCKER_IMAGE = 'ruvnet/ruvector-postgres';
const DOCKER_IMAGE_VERSION = '0.2.5';
const RUVECTOR_CRATE_VERSION = '0.2.5';
const PGRX_VERSION = '0.12.6';
const DEFAULT_PG_VERSION = '16';
const SUPPORTED_PG_VERSIONS = ['14', '15', '16', '17'];
const DEFAULT_PORT = 5432;
const DEFAULT_USER = 'ruvector';
const DEFAULT_PASSWORD = 'ruvector';
const DEFAULT_DB = 'ruvector';
class InstallCommands {
/**
* Comprehensive system check
*/
static async checkSystem() {
const info = {
platform: os.platform(),
arch: os.arch(),
docker: false,
postgres: false,
pgVersion: null,
pgConfig: null,
rust: false,
rustVersion: null,
cargo: false,
pgrx: false,
pgrxVersion: null,
sudo: false,
packageManager: 'unknown',
};
// Check Docker
try {
(0, child_process_1.execSync)('docker --version', { stdio: 'pipe' });
info.docker = true;
}
catch { /* not available */ }
// Check PostgreSQL
try {
const pgVersion = (0, child_process_1.execSync)('psql --version', { stdio: 'pipe', encoding: 'utf-8' });
info.postgres = true;
const match = pgVersion.match(/(\d+)/);
if (match)
info.pgVersion = match[1];
}
catch { /* not available */ }
// Check pg_config
try {
info.pgConfig = (0, child_process_1.execSync)('pg_config --libdir', { stdio: 'pipe', encoding: 'utf-8' }).trim();
}
catch { /* not available */ }
// Check Rust
try {
const rustVersion = (0, child_process_1.execSync)('rustc --version', { stdio: 'pipe', encoding: 'utf-8' });
info.rust = true;
const match = rustVersion.match(/rustc (\d+\.\d+\.\d+)/);
if (match)
info.rustVersion = match[1];
}
catch { /* not available */ }
// Check Cargo
try {
(0, child_process_1.execSync)('cargo --version', { stdio: 'pipe' });
info.cargo = true;
}
catch { /* not available */ }
// Check pgrx
try {
const pgrxVersion = (0, child_process_1.execSync)('cargo pgrx --version', { stdio: 'pipe', encoding: 'utf-8' });
info.pgrx = true;
const match = pgrxVersion.match(/cargo-pgrx (\d+\.\d+\.\d+)/);
if (match)
info.pgrxVersion = match[1];
}
catch { /* not available */ }
// Check sudo
try {
(0, child_process_1.execSync)('sudo -n true', { stdio: 'pipe' });
info.sudo = true;
}
catch { /* not available or needs password */ }
// Detect package manager
if (info.platform === 'darwin') {
try {
(0, child_process_1.execSync)('brew --version', { stdio: 'pipe' });
info.packageManager = 'brew';
}
catch { /* not available */ }
}
else if (info.platform === 'linux') {
if (fs.existsSync('/usr/bin/apt-get')) {
info.packageManager = 'apt';
}
else if (fs.existsSync('/usr/bin/dnf')) {
info.packageManager = 'dnf';
}
else if (fs.existsSync('/usr/bin/yum')) {
info.packageManager = 'yum';
}
else if (fs.existsSync('/usr/bin/pacman')) {
info.packageManager = 'pacman';
}
}
return info;
}
/**
* Check system requirements (backward compatible)
*/
static async checkRequirements() {
const sys = await this.checkSystem();
return {
docker: sys.docker,
postgres: sys.postgres,
pgConfig: sys.pgConfig,
};
}
/**
* Run command with sudo if needed
*/
static sudoExec(command, options = {}) {
const needsSudo = process.getuid?.() !== 0;
const fullCommand = needsSudo ? `sudo ${command}` : command;
return (0, child_process_1.execSync)(fullCommand, {
stdio: options.silent ? 'pipe' : 'inherit',
encoding: 'utf-8',
});
}
/**
* Install PostgreSQL
*/
static async installPostgreSQL(pgVersion, sys) {
const spinner = (0, ora_1.default)(`Installing PostgreSQL ${pgVersion}...`).start();
try {
if (sys.platform === 'darwin') {
if (sys.packageManager !== 'brew') {
spinner.fail('Homebrew not found. Please install it first: https://brew.sh');
return false;
}
(0, child_process_1.execSync)(`brew install postgresql@${pgVersion}`, { stdio: 'inherit' });
(0, child_process_1.execSync)(`brew services start postgresql@${pgVersion}`, { stdio: 'inherit' });
// Add to PATH
const brewPrefix = (0, child_process_1.execSync)('brew --prefix', { encoding: 'utf-8' }).trim();
process.env.PATH = `${brewPrefix}/opt/postgresql@${pgVersion}/bin:${process.env.PATH}`;
spinner.succeed(`PostgreSQL ${pgVersion} installed via Homebrew`);
return true;
}
if (sys.platform === 'linux') {
switch (sys.packageManager) {
case 'apt':
// Add PostgreSQL APT repository
spinner.text = 'Adding PostgreSQL APT repository...';
this.sudoExec('apt-get update');
this.sudoExec('apt-get install -y wget gnupg2 lsb-release');
this.sudoExec('sh -c \'echo "deb http://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list\'');
this.sudoExec('wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add -');
this.sudoExec('apt-get update');
// Install PostgreSQL and dev files
spinner.text = `Installing PostgreSQL ${pgVersion} and development files...`;
this.sudoExec(`apt-get install -y postgresql-${pgVersion} postgresql-server-dev-${pgVersion}`);
// Start service
this.sudoExec(`systemctl start postgresql`);
this.sudoExec(`systemctl enable postgresql`);
spinner.succeed(`PostgreSQL ${pgVersion} installed via APT`);
return true;
case 'dnf':
case 'yum':
const pkg = sys.packageManager;
spinner.text = 'Adding PostgreSQL repository...';
this.sudoExec(`${pkg} install -y https://download.postgresql.org/pub/repos/yum/reporpms/EL-$(rpm -E %{rhel})-x86_64/pgdg-redhat-repo-latest.noarch.rpm`);
this.sudoExec(`${pkg} install -y postgresql${pgVersion}-server postgresql${pgVersion}-devel`);
this.sudoExec(`/usr/pgsql-${pgVersion}/bin/postgresql-${pgVersion}-setup initdb`);
this.sudoExec(`systemctl start postgresql-${pgVersion}`);
this.sudoExec(`systemctl enable postgresql-${pgVersion}`);
spinner.succeed(`PostgreSQL ${pgVersion} installed via ${pkg.toUpperCase()}`);
return true;
case 'pacman':
this.sudoExec(`pacman -S --noconfirm postgresql`);
this.sudoExec(`su - postgres -c "initdb -D /var/lib/postgres/data"`);
this.sudoExec(`systemctl start postgresql`);
this.sudoExec(`systemctl enable postgresql`);
spinner.succeed('PostgreSQL installed via Pacman');
return true;
default:
spinner.fail('Unknown package manager. Please install PostgreSQL manually.');
return false;
}
}
spinner.fail(`Unsupported platform: ${sys.platform}`);
return false;
}
catch (error) {
spinner.fail('Failed to install PostgreSQL');
console.error(chalk_1.default.red(error.message));
return false;
}
}
/**
* Install Rust
*/
static async installRust() {
const spinner = (0, ora_1.default)('Installing Rust...').start();
try {
// Use rustup to install Rust
(0, child_process_1.execSync)('curl --proto \'=https\' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y', {
stdio: 'inherit',
shell: '/bin/bash',
});
// Source cargo env
const cargoEnv = path.join(os.homedir(), '.cargo', 'env');
if (fs.existsSync(cargoEnv)) {
process.env.PATH = `${path.join(os.homedir(), '.cargo', 'bin')}:${process.env.PATH}`;
}
spinner.succeed('Rust installed via rustup');
return true;
}
catch (error) {
spinner.fail('Failed to install Rust');
console.error(chalk_1.default.red(error.message));
return false;
}
}
/**
* Install required build dependencies
*/
static async installBuildDeps(sys, pgVersion) {
const spinner = (0, ora_1.default)('Installing build dependencies...').start();
const pg = pgVersion || sys.pgVersion || DEFAULT_PG_VERSION;
try {
if (sys.platform === 'darwin') {
(0, child_process_1.execSync)('brew install llvm pkg-config openssl cmake', { stdio: 'inherit' });
}
else if (sys.platform === 'linux') {
switch (sys.packageManager) {
case 'apt':
// Update package lists first, then install PostgreSQL server dev headers for pgrx
this.sudoExec('apt-get update');
this.sudoExec(`apt-get install -y build-essential libclang-dev clang pkg-config libssl-dev cmake postgresql-server-dev-${pg}`);
break;
case 'dnf':
case 'yum':
this.sudoExec(`${sys.packageManager} install -y gcc gcc-c++ clang clang-devel openssl-devel cmake make postgresql${pg}-devel`);
break;
case 'pacman':
this.sudoExec('pacman -S --noconfirm base-devel clang openssl cmake postgresql-libs');
break;
default:
spinner.warn('Please install: gcc, clang, libclang-dev, pkg-config, libssl-dev, cmake, postgresql-server-dev');
return true;
}
}
spinner.succeed('Build dependencies installed');
return true;
}
catch (error) {
spinner.fail('Failed to install build dependencies');
console.error(chalk_1.default.red(error.message));
return false;
}
}
/**
* Install cargo-pgrx
*/
static async installPgrx(pgVersion) {
const spinner = (0, ora_1.default)(`Installing cargo-pgrx ${PGRX_VERSION}...`).start();
try {
(0, child_process_1.execSync)(`cargo install cargo-pgrx --version ${PGRX_VERSION} --locked`, { stdio: 'inherit' });
spinner.succeed(`cargo-pgrx ${PGRX_VERSION} installed`);
// Initialize pgrx
spinner.start(`Initializing pgrx for PostgreSQL ${pgVersion}...`);
// Find pg_config
let pgConfigPath;
try {
pgConfigPath = (0, child_process_1.execSync)(`which pg_config`, { encoding: 'utf-8' }).trim();
}
catch {
// Try common paths
const commonPaths = [
`/usr/lib/postgresql/${pgVersion}/bin/pg_config`,
`/usr/pgsql-${pgVersion}/bin/pg_config`,
`/opt/homebrew/opt/postgresql@${pgVersion}/bin/pg_config`,
`/usr/local/opt/postgresql@${pgVersion}/bin/pg_config`,
];
pgConfigPath = commonPaths.find(p => fs.existsSync(p)) || 'pg_config';
}
(0, child_process_1.execSync)(`cargo pgrx init --pg${pgVersion}=${pgConfigPath}`, { stdio: 'inherit' });
spinner.succeed(`pgrx initialized for PostgreSQL ${pgVersion}`);
return true;
}
catch (error) {
spinner.fail('Failed to install/initialize pgrx');
console.error(chalk_1.default.red(error.message));
return false;
}
}
/**
* Build and install ruvector-postgres extension
*/
static async buildAndInstallExtension(pgVersion) {
const spinner = (0, ora_1.default)('Building ruvector-postgres extension...').start();
try {
// Create temporary directory
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'ruvector-'));
spinner.text = 'Cloning ruvector repository...';
// Clone the actual repository (pgrx needs .control file and proper structure)
(0, child_process_1.execSync)(`git clone --depth 1 https://github.com/ruvnet/ruvector.git ${tmpDir}/ruvector`, {
stdio: 'pipe',
});
const projectDir = path.join(tmpDir, 'ruvector', 'crates', 'ruvector-postgres');
// Verify the extension directory exists
if (!fs.existsSync(projectDir)) {
throw new Error('ruvector-postgres crate not found in repository');
}
spinner.text = 'Building extension (this may take 5-10 minutes)...';
// Build and install using pgrx
(0, child_process_1.execSync)(`cargo pgrx install --features pg${pgVersion} --release`, {
cwd: projectDir,
stdio: 'inherit',
env: {
...process.env,
CARGO_NET_GIT_FETCH_WITH_CLI: 'true',
},
});
// Cleanup
spinner.text = 'Cleaning up...';
fs.rmSync(tmpDir, { recursive: true, force: true });
spinner.succeed('ruvector-postgres extension installed');
return true;
}
catch (error) {
spinner.fail('Failed to build extension');
console.error(chalk_1.default.red(error.message));
return false;
}
}
/**
* Configure PostgreSQL for the extension
*/
static async configurePostgreSQL(options) {
const spinner = (0, ora_1.default)('Configuring PostgreSQL...').start();
const user = options.user || DEFAULT_USER;
const password = options.password || DEFAULT_PASSWORD;
const database = options.database || DEFAULT_DB;
try {
// Create user and database
const commands = [
`CREATE USER ${user} WITH PASSWORD '${password}' SUPERUSER;`,
`CREATE DATABASE ${database} OWNER ${user};`,
`\\c ${database}`,
`CREATE EXTENSION IF NOT EXISTS ruvector;`,
];
for (const cmd of commands) {
try {
(0, child_process_1.execSync)(`sudo -u postgres psql -c "${cmd}"`, { stdio: 'pipe' });
}
catch {
// User/DB might already exist, that's OK
}
}
spinner.succeed('PostgreSQL configured');
return true;
}
catch (error) {
spinner.fail('Failed to configure PostgreSQL');
console.error(chalk_1.default.red(error.message));
return false;
}
}
/**
* Full native installation
*/
static async installNativeFull(options = {}) {
const pgVersion = options.pgVersion || DEFAULT_PG_VERSION;
console.log(chalk_1.default.bold.blue('\n🚀 RuVector PostgreSQL Native Installation\n'));
console.log(chalk_1.default.gray('This will install PostgreSQL, Rust, and the RuVector extension.\n'));
// Check system
let sys = await this.checkSystem();
console.log(chalk_1.default.bold('📋 System Check:'));
console.log(` Platform: ${chalk_1.default.cyan(sys.platform)} ${chalk_1.default.cyan(sys.arch)}`);
console.log(` PostgreSQL: ${sys.postgres ? chalk_1.default.green(`${sys.pgVersion}`) : chalk_1.default.yellow('✗ Not installed')}`);
console.log(` Rust: ${sys.rust ? chalk_1.default.green(`${sys.rustVersion}`) : chalk_1.default.yellow('✗ Not installed')}`);
console.log(` cargo-pgrx: ${sys.pgrx ? chalk_1.default.green(`${sys.pgrxVersion}`) : chalk_1.default.yellow('✗ Not installed')}`);
console.log(` Pkg Manager: ${chalk_1.default.cyan(sys.packageManager)}`);
console.log();
// Install PostgreSQL if needed
if (!sys.postgres && !options.skipPostgres) {
console.log(chalk_1.default.bold(`\n📦 Step 1: Installing PostgreSQL ${pgVersion}`));
const installed = await this.installPostgreSQL(pgVersion, sys);
if (!installed) {
throw new Error('Failed to install PostgreSQL');
}
sys = await this.checkSystem(); // Refresh
}
else if (sys.postgres) {
console.log(chalk_1.default.green(`✓ PostgreSQL ${sys.pgVersion} already installed`));
}
// Install build dependencies (including PostgreSQL dev headers)
const targetPgVersion = options.pgVersion || sys.pgVersion || DEFAULT_PG_VERSION;
console.log(chalk_1.default.bold('\n🔧 Step 2: Installing build dependencies'));
await this.installBuildDeps(sys, targetPgVersion);
// Install Rust if needed
if (!sys.rust && !options.skipRust) {
console.log(chalk_1.default.bold('\n🦀 Step 3: Installing Rust'));
const installed = await this.installRust();
if (!installed) {
throw new Error('Failed to install Rust');
}
sys = await this.checkSystem(); // Refresh
}
else if (sys.rust) {
console.log(chalk_1.default.green(`✓ Rust ${sys.rustVersion} already installed`));
}
// Install pgrx if needed
if (!sys.pgrx || sys.pgrxVersion !== PGRX_VERSION) {
console.log(chalk_1.default.bold('\n🔌 Step 4: Installing cargo-pgrx'));
const installed = await this.installPgrx(targetPgVersion);
if (!installed) {
throw new Error('Failed to install pgrx');
}
}
else {
console.log(chalk_1.default.green(`✓ cargo-pgrx ${sys.pgrxVersion} already installed`));
}
// Build and install extension
console.log(chalk_1.default.bold('\n🏗 Step 5: Building RuVector extension'));
const built = await this.buildAndInstallExtension(targetPgVersion);
if (!built) {
throw new Error('Failed to build extension');
}
// Configure PostgreSQL
console.log(chalk_1.default.bold('\n⚙ Step 6: Configuring PostgreSQL'));
await this.configurePostgreSQL(options);
// Success!
const port = options.port || DEFAULT_PORT;
const user = options.user || DEFAULT_USER;
const password = options.password || DEFAULT_PASSWORD;
const database = options.database || DEFAULT_DB;
const connString = `postgresql://${user}:${password}@localhost:${port}/${database}`;
console.log(chalk_1.default.green.bold('\n✅ RuVector PostgreSQL installed successfully!\n'));
console.log(chalk_1.default.bold('Connection Details:'));
console.log(` Host: ${chalk_1.default.cyan('localhost')}`);
console.log(` Port: ${chalk_1.default.cyan(port.toString())}`);
console.log(` User: ${chalk_1.default.cyan(user)}`);
console.log(` Password: ${chalk_1.default.cyan(password)}`);
console.log(` Database: ${chalk_1.default.cyan(database)}`);
console.log(chalk_1.default.bold('\nConnection String:'));
console.log(` ${chalk_1.default.cyan(connString)}`);
console.log(chalk_1.default.bold('\nQuick Test:'));
console.log(chalk_1.default.gray(` psql "${connString}" -c "SELECT ruvector_version();"`));
console.log(chalk_1.default.bold('\nExample Usage:'));
console.log(chalk_1.default.gray(' CREATE TABLE embeddings (id serial, vec real[384]);'));
console.log(chalk_1.default.gray(' CREATE INDEX ON embeddings USING hnsw (vec);'));
console.log(chalk_1.default.gray(' INSERT INTO embeddings (vec) VALUES (ARRAY[0.1, 0.2, ...]);'));
}
/**
* Install RuVector PostgreSQL (auto-detect best method)
*/
static async install(options = {}) {
const spinner = (0, ora_1.default)('Checking system requirements...').start();
try {
const sys = await this.checkSystem();
spinner.succeed('System check complete');
console.log(chalk_1.default.bold('\n📋 System Status:'));
console.log(` Docker: ${sys.docker ? chalk_1.default.green('✓ Available') : chalk_1.default.yellow('✗ Not found')}`);
console.log(` PostgreSQL: ${sys.postgres ? chalk_1.default.green(`${sys.pgVersion}`) : chalk_1.default.yellow('✗ Not found')}`);
console.log(` Rust: ${sys.rust ? chalk_1.default.green(`${sys.rustVersion}`) : chalk_1.default.yellow('✗ Not found')}`);
const method = options.method || 'auto';
if (method === 'auto') {
// Prefer Docker for simplicity, fall back to native
if (sys.docker) {
console.log(chalk_1.default.cyan('\n→ Using Docker installation (fastest)\n'));
await this.installDocker(options);
}
else {
console.log(chalk_1.default.cyan('\n→ Using native installation (will install all dependencies)\n'));
await this.installNativeFull(options);
}
}
else if (method === 'docker') {
if (!sys.docker) {
throw new Error('Docker not found. Please install Docker first: https://docs.docker.com/get-docker/');
}
await this.installDocker(options);
}
else if (method === 'native') {
await this.installNativeFull(options);
}
}
catch (error) {
spinner.fail('Installation failed');
throw error;
}
}
/**
* Install via Docker
*/
static async installDocker(options = {}) {
const port = options.port || DEFAULT_PORT;
const user = options.user || DEFAULT_USER;
const password = options.password || DEFAULT_PASSWORD;
const database = options.database || DEFAULT_DB;
const version = options.version || DOCKER_IMAGE_VERSION;
const containerName = options.name || 'ruvector-postgres';
const dataDir = options.dataDir;
// Check if container already exists
const existingSpinner = (0, ora_1.default)('Checking for existing installation...').start();
try {
const existing = (0, child_process_1.execSync)(`docker ps -a --filter name=^${containerName}$ --format "{{.ID}}"`, { encoding: 'utf-8' }).trim();
if (existing) {
existingSpinner.warn(`Container '${containerName}' already exists`);
console.log(chalk_1.default.yellow(` Run 'ruvector-pg uninstall' first or use a different --name`));
return;
}
existingSpinner.succeed('No existing installation found');
}
catch {
existingSpinner.succeed('No existing installation found');
}
// Check for local image first, then try to pull from Docker Hub
const pullSpinner = (0, ora_1.default)(`Checking for ${DOCKER_IMAGE}:${version}...`).start();
try {
// Check if image exists locally
(0, child_process_1.execSync)(`docker image inspect ${DOCKER_IMAGE}:${version}`, { stdio: 'pipe' });
pullSpinner.succeed(`Found local image ${DOCKER_IMAGE}:${version}`);
}
catch {
// Try pulling from Docker Hub (ruvnet/ruvector-postgres)
pullSpinner.text = `Pulling ${DOCKER_IMAGE}:${version} from Docker Hub...`;
try {
(0, child_process_1.execSync)(`docker pull ${DOCKER_IMAGE}:${version}`, { stdio: 'pipe' });
pullSpinner.succeed(`Pulled ${DOCKER_IMAGE}:${version}`);
}
catch {
pullSpinner.fail('Image not found locally or on Docker Hub');
console.log(chalk_1.default.yellow('\n📦 To build the image locally, run:'));
console.log(chalk_1.default.gray(' git clone https://github.com/ruvnet/ruvector.git'));
console.log(chalk_1.default.gray(' cd ruvector'));
console.log(chalk_1.default.gray(` docker build -f crates/ruvector-postgres/docker/Dockerfile -t ${DOCKER_IMAGE}:${version} .`));
console.log(chalk_1.default.yellow('\n Then run this install command again.'));
console.log(chalk_1.default.yellow('\n💡 Or use native installation:'));
console.log(chalk_1.default.gray(' npx @ruvector/postgres-cli install --method native\n'));
throw new Error(`RuVector Docker image not available. Build it first or use native installation.`);
}
}
// Build run command
let runCmd = `docker run -d --name ${containerName}`;
runCmd += ` -p ${port}:5432`;
runCmd += ` -e POSTGRES_USER=${user}`;
runCmd += ` -e POSTGRES_PASSWORD=${password}`;
runCmd += ` -e POSTGRES_DB=${database}`;
if (dataDir) {
const absDataDir = path.resolve(dataDir);
if (!fs.existsSync(absDataDir)) {
fs.mkdirSync(absDataDir, { recursive: true });
}
runCmd += ` -v ${absDataDir}:/var/lib/postgresql/data`;
}
runCmd += ` ${DOCKER_IMAGE}:${version}`;
// Run container
const runSpinner = (0, ora_1.default)('Starting RuVector PostgreSQL...').start();
try {
(0, child_process_1.execSync)(runCmd, { encoding: 'utf-8' });
runSpinner.succeed('Container started');
// Wait for PostgreSQL to be ready
const readySpinner = (0, ora_1.default)('Waiting for PostgreSQL to be ready...').start();
let ready = false;
for (let i = 0; i < 30; i++) {
try {
(0, child_process_1.execSync)(`docker exec ${containerName} pg_isready -U ${user}`, { stdio: 'pipe' });
ready = true;
break;
}
catch {
await new Promise(resolve => setTimeout(resolve, 1000));
}
}
if (ready) {
readySpinner.succeed('PostgreSQL is ready');
}
else {
readySpinner.warn('PostgreSQL may still be starting...');
}
// Verify extension
const verifySpinner = (0, ora_1.default)('Verifying RuVector extension...').start();
try {
const extCheck = (0, child_process_1.execSync)(`docker exec ${containerName} psql -U ${user} -d ${database} -c "SELECT extname, extversion FROM pg_extension WHERE extname = 'ruvector';"`, { encoding: 'utf-8' });
if (extCheck.includes('ruvector')) {
verifySpinner.succeed('RuVector extension verified');
}
else {
verifySpinner.warn('Extension may need manual activation');
}
}
catch {
verifySpinner.warn('Could not verify extension (database may still be initializing)');
}
// Print success message
console.log(chalk_1.default.green.bold('\n✅ RuVector PostgreSQL installed successfully!\n'));
console.log(chalk_1.default.bold('Connection Details:'));
console.log(` Host: ${chalk_1.default.cyan('localhost')}`);
console.log(` Port: ${chalk_1.default.cyan(port.toString())}`);
console.log(` User: ${chalk_1.default.cyan(user)}`);
console.log(` Password: ${chalk_1.default.cyan(password)}`);
console.log(` Database: ${chalk_1.default.cyan(database)}`);
console.log(` Container: ${chalk_1.default.cyan(containerName)}`);
const connString = `postgresql://${user}:${password}@localhost:${port}/${database}`;
console.log(chalk_1.default.bold('\nConnection String:'));
console.log(` ${chalk_1.default.cyan(connString)}`);
console.log(chalk_1.default.bold('\nQuick Start:'));
console.log(` ${chalk_1.default.gray('# Connect with psql')}`);
console.log(` psql "${connString}"`);
console.log(` ${chalk_1.default.gray('# Or use docker')}`);
console.log(` docker exec -it ${containerName} psql -U ${user} -d ${database}`);
console.log(chalk_1.default.bold('\nTest HNSW Index:'));
console.log(chalk_1.default.gray(` CREATE TABLE items (id serial, embedding real[]);`));
console.log(chalk_1.default.gray(` CREATE INDEX ON items USING hnsw (embedding);`));
}
catch (error) {
runSpinner.fail('Failed to start container');
throw error;
}
}
/**
* Install native extension (download pre-built binaries) - Legacy method
*/
static async installNative(options = {}) {
// Redirect to full native installation
await this.installNativeFull(options);
}
/**
* Uninstall RuVector PostgreSQL
*/
static async uninstall(options = {}) {
const containerName = options.name || 'ruvector-postgres';
const spinner = (0, ora_1.default)(`Stopping container '${containerName}'...`).start();
try {
// Stop container
try {
(0, child_process_1.execSync)(`docker stop ${containerName}`, { stdio: 'pipe' });
spinner.succeed('Container stopped');
}
catch {
spinner.info('Container was not running');
}
// Remove container
const removeSpinner = (0, ora_1.default)('Removing container...').start();
try {
(0, child_process_1.execSync)(`docker rm ${containerName}`, { stdio: 'pipe' });
removeSpinner.succeed('Container removed');
}
catch {
removeSpinner.info('Container already removed');
}
if (options.removeData) {
console.log(chalk_1.default.yellow('\n⚠ Data volumes were not removed (manual cleanup required)'));
}
console.log(chalk_1.default.green.bold('\n✅ RuVector PostgreSQL uninstalled\n'));
}
catch (error) {
spinner.fail('Uninstall failed');
throw error;
}
}
/**
* Get installation status
*/
static async status(options = {}) {
const containerName = options.name || 'ruvector-postgres';
const info = {
installed: false,
running: false,
method: 'none',
};
// Check Docker installation
try {
const containerInfo = (0, child_process_1.execSync)(`docker inspect ${containerName} --format '{{.State.Running}} {{.Config.Image}} {{.NetworkSettings.Ports}}'`, { encoding: 'utf-8', stdio: ['pipe', 'pipe', 'pipe'] }).trim();
const [running, image] = containerInfo.split(' ');
info.installed = true;
info.running = running === 'true';
info.method = 'docker';
info.version = image.split(':')[1] || 'latest';
info.containerId = (0, child_process_1.execSync)(`docker inspect ${containerName} --format '{{.Id}}'`, { encoding: 'utf-8' }).trim().substring(0, 12);
// Get port mapping
try {
const portMapping = (0, child_process_1.execSync)(`docker port ${containerName} 5432`, { encoding: 'utf-8', stdio: ['pipe', 'pipe', 'pipe'] }).trim();
const portMatch = portMapping.match(/:(\d+)$/);
if (portMatch) {
info.port = parseInt(portMatch[1]);
info.connectionString = `postgresql://ruvector:ruvector@localhost:${info.port}/ruvector`;
}
}
catch { /* port not mapped */ }
}
catch {
// No Docker installation found, check native
try {
(0, child_process_1.execSync)('psql -c "SELECT 1 FROM pg_extension WHERE extname = \'ruvector\'" 2>/dev/null', { stdio: 'pipe' });
info.installed = true;
info.running = true;
info.method = 'native';
}
catch { /* not installed */ }
}
return info;
}
/**
* Print status information
*/
static async printStatus(options = {}) {
const spinner = (0, ora_1.default)('Checking installation status...').start();
const status = await this.status(options);
spinner.stop();
console.log(chalk_1.default.bold('\n📊 RuVector PostgreSQL Status\n'));
if (!status.installed) {
console.log(` Status: ${chalk_1.default.yellow('Not installed')}`);
console.log(chalk_1.default.gray('\n Run `ruvector-pg install` to install'));
return;
}
console.log(` Installed: ${chalk_1.default.green('Yes')}`);
console.log(` Method: ${chalk_1.default.cyan(status.method)}`);
console.log(` Version: ${chalk_1.default.cyan(status.version || 'unknown')}`);
console.log(` Running: ${status.running ? chalk_1.default.green('Yes') : chalk_1.default.red('No')}`);
if (status.method === 'docker') {
console.log(` Container: ${chalk_1.default.cyan(status.containerId)}`);
}
if (status.port) {
console.log(` Port: ${chalk_1.default.cyan(status.port.toString())}`);
}
if (status.connectionString) {
console.log(`\n Connection: ${chalk_1.default.cyan(status.connectionString)}`);
}
if (!status.running) {
console.log(chalk_1.default.gray('\n Run `ruvector-pg start` to start the database'));
}
}
/**
* Start the database
*/
static async start(options = {}) {
const containerName = options.name || 'ruvector-postgres';
const spinner = (0, ora_1.default)('Starting RuVector PostgreSQL...').start();
try {
(0, child_process_1.execSync)(`docker start ${containerName}`, { stdio: 'pipe' });
// Wait for ready
for (let i = 0; i < 30; i++) {
try {
(0, child_process_1.execSync)(`docker exec ${containerName} pg_isready`, { stdio: 'pipe' });
spinner.succeed('RuVector PostgreSQL started');
return;
}
catch {
await new Promise(resolve => setTimeout(resolve, 1000));
}
}
spinner.warn('Started but may not be ready yet');
}
catch (error) {
spinner.fail('Failed to start');
throw error;
}
}
/**
* Stop the database
*/
static async stop(options = {}) {
const containerName = options.name || 'ruvector-postgres';
const spinner = (0, ora_1.default)('Stopping RuVector PostgreSQL...').start();
try {
(0, child_process_1.execSync)(`docker stop ${containerName}`, { stdio: 'pipe' });
spinner.succeed('RuVector PostgreSQL stopped');
}
catch (error) {
spinner.fail('Failed to stop');
throw error;
}
}
/**
* Show logs
*/
static async logs(options = {}) {
const containerName = options.name || 'ruvector-postgres';
const tail = options.tail || 100;
try {
if (options.follow) {
const child = (0, child_process_1.spawn)('docker', ['logs', containerName, '--tail', tail.toString(), '-f'], {
stdio: 'inherit'
});
child.on('error', (err) => {
console.error(chalk_1.default.red(`Error: ${err.message}`));
});
}
else {
const output = (0, child_process_1.execSync)(`docker logs ${containerName} --tail ${tail}`, { encoding: 'utf-8' });
console.log(output);
}
}
catch (error) {
console.error(chalk_1.default.red('Failed to get logs'));
throw error;
}
}
/**
* Execute psql command
*/
static async psql(options = {}) {
const containerName = options.name || 'ruvector-postgres';
if (options.command) {
try {
const output = (0, child_process_1.execSync)(`docker exec ${containerName} psql -U ruvector -d ruvector -c "${options.command}"`, { encoding: 'utf-8' });
console.log(output);
}
catch (error) {
console.error(chalk_1.default.red('Failed to execute command'));
throw error;
}
}
else {
// Interactive mode
const child = (0, child_process_1.spawn)('docker', ['exec', '-it', containerName, 'psql', '-U', 'ruvector', '-d', 'ruvector'], {
stdio: 'inherit'
});
child.on('error', (err) => {
console.error(chalk_1.default.red(`Error: ${err.message}`));
});
}
}
}
exports.InstallCommands = InstallCommands;
//# sourceMappingURL=install.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,971 @@
/**
* RuVector PostgreSQL Installation Commands
*
* Provides complete installation of RuVector PostgreSQL extension:
* - Full native installation (PostgreSQL + Rust + pgrx + extension)
* - Docker-based installation (recommended for quick start)
* - Extension management (enable, disable, upgrade)
*/
import { execSync, spawn, spawnSync } from 'child_process';
import { promisify } from 'util';
import * as fs from 'fs';
import * as path from 'path';
import * as os from 'os';
import chalk from 'chalk';
import ora from 'ora';
// Constants
const DOCKER_IMAGE = 'ruvnet/ruvector-postgres';
const DOCKER_IMAGE_VERSION = '0.2.5';
const RUVECTOR_CRATE_VERSION = '0.2.5';
const PGRX_VERSION = '0.12.6';
const DEFAULT_PG_VERSION = '16';
const SUPPORTED_PG_VERSIONS = ['14', '15', '16', '17'];
const DEFAULT_PORT = 5432;
const DEFAULT_USER = 'ruvector';
const DEFAULT_PASSWORD = 'ruvector';
const DEFAULT_DB = 'ruvector';
interface InstallOptions {
method?: 'docker' | 'native' | 'auto';
port?: number;
user?: string;
password?: string;
database?: string;
dataDir?: string;
version?: string;
pgVersion?: string;
detach?: boolean;
name?: string;
skipPostgres?: boolean;
skipRust?: boolean;
}
interface StatusInfo {
installed: boolean;
running: boolean;
method: 'docker' | 'native' | 'none';
version?: string;
containerId?: string;
port?: number;
connectionString?: string;
}
interface SystemInfo {
platform: NodeJS.Platform;
arch: string;
docker: boolean;
postgres: boolean;
pgVersion: string | null;
pgConfig: string | null;
rust: boolean;
rustVersion: string | null;
cargo: boolean;
pgrx: boolean;
pgrxVersion: string | null;
sudo: boolean;
packageManager: 'apt' | 'yum' | 'dnf' | 'brew' | 'pacman' | 'unknown';
}
export class InstallCommands {
/**
* Comprehensive system check
*/
static async checkSystem(): Promise<SystemInfo> {
const info: SystemInfo = {
platform: os.platform(),
arch: os.arch(),
docker: false,
postgres: false,
pgVersion: null,
pgConfig: null,
rust: false,
rustVersion: null,
cargo: false,
pgrx: false,
pgrxVersion: null,
sudo: false,
packageManager: 'unknown',
};
// Check Docker
try {
execSync('docker --version', { stdio: 'pipe' });
info.docker = true;
} catch { /* not available */ }
// Check PostgreSQL
try {
const pgVersion = execSync('psql --version', { stdio: 'pipe', encoding: 'utf-8' });
info.postgres = true;
const match = pgVersion.match(/(\d+)/);
if (match) info.pgVersion = match[1];
} catch { /* not available */ }
// Check pg_config
try {
info.pgConfig = execSync('pg_config --libdir', { stdio: 'pipe', encoding: 'utf-8' }).trim();
} catch { /* not available */ }
// Check Rust
try {
const rustVersion = execSync('rustc --version', { stdio: 'pipe', encoding: 'utf-8' });
info.rust = true;
const match = rustVersion.match(/rustc (\d+\.\d+\.\d+)/);
if (match) info.rustVersion = match[1];
} catch { /* not available */ }
// Check Cargo
try {
execSync('cargo --version', { stdio: 'pipe' });
info.cargo = true;
} catch { /* not available */ }
// Check pgrx
try {
const pgrxVersion = execSync('cargo pgrx --version', { stdio: 'pipe', encoding: 'utf-8' });
info.pgrx = true;
const match = pgrxVersion.match(/cargo-pgrx (\d+\.\d+\.\d+)/);
if (match) info.pgrxVersion = match[1];
} catch { /* not available */ }
// Check sudo
try {
execSync('sudo -n true', { stdio: 'pipe' });
info.sudo = true;
} catch { /* not available or needs password */ }
// Detect package manager
if (info.platform === 'darwin') {
try {
execSync('brew --version', { stdio: 'pipe' });
info.packageManager = 'brew';
} catch { /* not available */ }
} else if (info.platform === 'linux') {
if (fs.existsSync('/usr/bin/apt-get')) {
info.packageManager = 'apt';
} else if (fs.existsSync('/usr/bin/dnf')) {
info.packageManager = 'dnf';
} else if (fs.existsSync('/usr/bin/yum')) {
info.packageManager = 'yum';
} else if (fs.existsSync('/usr/bin/pacman')) {
info.packageManager = 'pacman';
}
}
return info;
}
/**
* Check system requirements (backward compatible)
*/
static async checkRequirements(): Promise<{ docker: boolean; postgres: boolean; pgConfig: string | null }> {
const sys = await this.checkSystem();
return {
docker: sys.docker,
postgres: sys.postgres,
pgConfig: sys.pgConfig,
};
}
/**
* Run command with sudo if needed
*/
static sudoExec(command: string, options: { silent?: boolean } = {}): string {
const needsSudo = process.getuid?.() !== 0;
const fullCommand = needsSudo ? `sudo ${command}` : command;
return execSync(fullCommand, {
stdio: options.silent ? 'pipe' : 'inherit',
encoding: 'utf-8',
});
}
/**
* Install PostgreSQL
*/
static async installPostgreSQL(pgVersion: string, sys: SystemInfo): Promise<boolean> {
const spinner = ora(`Installing PostgreSQL ${pgVersion}...`).start();
try {
if (sys.platform === 'darwin') {
if (sys.packageManager !== 'brew') {
spinner.fail('Homebrew not found. Please install it first: https://brew.sh');
return false;
}
execSync(`brew install postgresql@${pgVersion}`, { stdio: 'inherit' });
execSync(`brew services start postgresql@${pgVersion}`, { stdio: 'inherit' });
// Add to PATH
const brewPrefix = execSync('brew --prefix', { encoding: 'utf-8' }).trim();
process.env.PATH = `${brewPrefix}/opt/postgresql@${pgVersion}/bin:${process.env.PATH}`;
spinner.succeed(`PostgreSQL ${pgVersion} installed via Homebrew`);
return true;
}
if (sys.platform === 'linux') {
switch (sys.packageManager) {
case 'apt':
// Add PostgreSQL APT repository
spinner.text = 'Adding PostgreSQL APT repository...';
this.sudoExec('apt-get update');
this.sudoExec('apt-get install -y wget gnupg2 lsb-release');
this.sudoExec('sh -c \'echo "deb http://apt.postgresql.org/pub/repos/apt $(lsb_release -cs)-pgdg main" > /etc/apt/sources.list.d/pgdg.list\'');
this.sudoExec('wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add -');
this.sudoExec('apt-get update');
// Install PostgreSQL and dev files
spinner.text = `Installing PostgreSQL ${pgVersion} and development files...`;
this.sudoExec(`apt-get install -y postgresql-${pgVersion} postgresql-server-dev-${pgVersion}`);
// Start service
this.sudoExec(`systemctl start postgresql`);
this.sudoExec(`systemctl enable postgresql`);
spinner.succeed(`PostgreSQL ${pgVersion} installed via APT`);
return true;
case 'dnf':
case 'yum':
const pkg = sys.packageManager;
spinner.text = 'Adding PostgreSQL repository...';
this.sudoExec(`${pkg} install -y https://download.postgresql.org/pub/repos/yum/reporpms/EL-$(rpm -E %{rhel})-x86_64/pgdg-redhat-repo-latest.noarch.rpm`);
this.sudoExec(`${pkg} install -y postgresql${pgVersion}-server postgresql${pgVersion}-devel`);
this.sudoExec(`/usr/pgsql-${pgVersion}/bin/postgresql-${pgVersion}-setup initdb`);
this.sudoExec(`systemctl start postgresql-${pgVersion}`);
this.sudoExec(`systemctl enable postgresql-${pgVersion}`);
spinner.succeed(`PostgreSQL ${pgVersion} installed via ${pkg.toUpperCase()}`);
return true;
case 'pacman':
this.sudoExec(`pacman -S --noconfirm postgresql`);
this.sudoExec(`su - postgres -c "initdb -D /var/lib/postgres/data"`);
this.sudoExec(`systemctl start postgresql`);
this.sudoExec(`systemctl enable postgresql`);
spinner.succeed('PostgreSQL installed via Pacman');
return true;
default:
spinner.fail('Unknown package manager. Please install PostgreSQL manually.');
return false;
}
}
spinner.fail(`Unsupported platform: ${sys.platform}`);
return false;
} catch (error) {
spinner.fail('Failed to install PostgreSQL');
console.error(chalk.red((error as Error).message));
return false;
}
}
/**
* Install Rust
*/
static async installRust(): Promise<boolean> {
const spinner = ora('Installing Rust...').start();
try {
// Use rustup to install Rust
execSync('curl --proto \'=https\' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y', {
stdio: 'inherit',
shell: '/bin/bash',
});
// Source cargo env
const cargoEnv = path.join(os.homedir(), '.cargo', 'env');
if (fs.existsSync(cargoEnv)) {
process.env.PATH = `${path.join(os.homedir(), '.cargo', 'bin')}:${process.env.PATH}`;
}
spinner.succeed('Rust installed via rustup');
return true;
} catch (error) {
spinner.fail('Failed to install Rust');
console.error(chalk.red((error as Error).message));
return false;
}
}
/**
* Install required build dependencies
*/
static async installBuildDeps(sys: SystemInfo, pgVersion?: string): Promise<boolean> {
const spinner = ora('Installing build dependencies...').start();
const pg = pgVersion || sys.pgVersion || DEFAULT_PG_VERSION;
try {
if (sys.platform === 'darwin') {
execSync('brew install llvm pkg-config openssl cmake', { stdio: 'inherit' });
} else if (sys.platform === 'linux') {
switch (sys.packageManager) {
case 'apt':
// Update package lists first, then install PostgreSQL server dev headers for pgrx
this.sudoExec('apt-get update');
this.sudoExec(`apt-get install -y build-essential libclang-dev clang pkg-config libssl-dev cmake postgresql-server-dev-${pg}`);
break;
case 'dnf':
case 'yum':
this.sudoExec(`${sys.packageManager} install -y gcc gcc-c++ clang clang-devel openssl-devel cmake make postgresql${pg}-devel`);
break;
case 'pacman':
this.sudoExec('pacman -S --noconfirm base-devel clang openssl cmake postgresql-libs');
break;
default:
spinner.warn('Please install: gcc, clang, libclang-dev, pkg-config, libssl-dev, cmake, postgresql-server-dev');
return true;
}
}
spinner.succeed('Build dependencies installed');
return true;
} catch (error) {
spinner.fail('Failed to install build dependencies');
console.error(chalk.red((error as Error).message));
return false;
}
}
/**
* Install cargo-pgrx
*/
static async installPgrx(pgVersion: string): Promise<boolean> {
const spinner = ora(`Installing cargo-pgrx ${PGRX_VERSION}...`).start();
try {
execSync(`cargo install cargo-pgrx --version ${PGRX_VERSION} --locked`, { stdio: 'inherit' });
spinner.succeed(`cargo-pgrx ${PGRX_VERSION} installed`);
// Initialize pgrx
spinner.start(`Initializing pgrx for PostgreSQL ${pgVersion}...`);
// Find pg_config
let pgConfigPath: string;
try {
pgConfigPath = execSync(`which pg_config`, { encoding: 'utf-8' }).trim();
} catch {
// Try common paths
const commonPaths = [
`/usr/lib/postgresql/${pgVersion}/bin/pg_config`,
`/usr/pgsql-${pgVersion}/bin/pg_config`,
`/opt/homebrew/opt/postgresql@${pgVersion}/bin/pg_config`,
`/usr/local/opt/postgresql@${pgVersion}/bin/pg_config`,
];
pgConfigPath = commonPaths.find(p => fs.existsSync(p)) || 'pg_config';
}
execSync(`cargo pgrx init --pg${pgVersion}=${pgConfigPath}`, { stdio: 'inherit' });
spinner.succeed(`pgrx initialized for PostgreSQL ${pgVersion}`);
return true;
} catch (error) {
spinner.fail('Failed to install/initialize pgrx');
console.error(chalk.red((error as Error).message));
return false;
}
}
/**
* Build and install ruvector-postgres extension
*/
static async buildAndInstallExtension(pgVersion: string): Promise<boolean> {
const spinner = ora('Building ruvector-postgres extension...').start();
try {
// Create temporary directory
const tmpDir = fs.mkdtempSync(path.join(os.tmpdir(), 'ruvector-'));
spinner.text = 'Cloning ruvector repository...';
// Clone the actual repository (pgrx needs .control file and proper structure)
execSync(`git clone --depth 1 https://github.com/ruvnet/ruvector.git ${tmpDir}/ruvector`, {
stdio: 'pipe',
});
const projectDir = path.join(tmpDir, 'ruvector', 'crates', 'ruvector-postgres');
// Verify the extension directory exists
if (!fs.existsSync(projectDir)) {
throw new Error('ruvector-postgres crate not found in repository');
}
spinner.text = 'Building extension (this may take 5-10 minutes)...';
// Build and install using pgrx
execSync(`cargo pgrx install --features pg${pgVersion} --release`, {
cwd: projectDir,
stdio: 'inherit',
env: {
...process.env,
CARGO_NET_GIT_FETCH_WITH_CLI: 'true',
},
});
// Cleanup
spinner.text = 'Cleaning up...';
fs.rmSync(tmpDir, { recursive: true, force: true });
spinner.succeed('ruvector-postgres extension installed');
return true;
} catch (error) {
spinner.fail('Failed to build extension');
console.error(chalk.red((error as Error).message));
return false;
}
}
/**
* Configure PostgreSQL for the extension
*/
static async configurePostgreSQL(options: InstallOptions): Promise<boolean> {
const spinner = ora('Configuring PostgreSQL...').start();
const user = options.user || DEFAULT_USER;
const password = options.password || DEFAULT_PASSWORD;
const database = options.database || DEFAULT_DB;
try {
// Create user and database
const commands = [
`CREATE USER ${user} WITH PASSWORD '${password}' SUPERUSER;`,
`CREATE DATABASE ${database} OWNER ${user};`,
`\\c ${database}`,
`CREATE EXTENSION IF NOT EXISTS ruvector;`,
];
for (const cmd of commands) {
try {
execSync(`sudo -u postgres psql -c "${cmd}"`, { stdio: 'pipe' });
} catch {
// User/DB might already exist, that's OK
}
}
spinner.succeed('PostgreSQL configured');
return true;
} catch (error) {
spinner.fail('Failed to configure PostgreSQL');
console.error(chalk.red((error as Error).message));
return false;
}
}
/**
* Full native installation
*/
static async installNativeFull(options: InstallOptions = {}): Promise<void> {
const pgVersion = options.pgVersion || DEFAULT_PG_VERSION;
console.log(chalk.bold.blue('\n🚀 RuVector PostgreSQL Native Installation\n'));
console.log(chalk.gray('This will install PostgreSQL, Rust, and the RuVector extension.\n'));
// Check system
let sys = await this.checkSystem();
console.log(chalk.bold('📋 System Check:'));
console.log(` Platform: ${chalk.cyan(sys.platform)} ${chalk.cyan(sys.arch)}`);
console.log(` PostgreSQL: ${sys.postgres ? chalk.green(`${sys.pgVersion}`) : chalk.yellow('✗ Not installed')}`);
console.log(` Rust: ${sys.rust ? chalk.green(`${sys.rustVersion}`) : chalk.yellow('✗ Not installed')}`);
console.log(` cargo-pgrx: ${sys.pgrx ? chalk.green(`${sys.pgrxVersion}`) : chalk.yellow('✗ Not installed')}`);
console.log(` Pkg Manager: ${chalk.cyan(sys.packageManager)}`);
console.log();
// Install PostgreSQL if needed
if (!sys.postgres && !options.skipPostgres) {
console.log(chalk.bold(`\n📦 Step 1: Installing PostgreSQL ${pgVersion}`));
const installed = await this.installPostgreSQL(pgVersion, sys);
if (!installed) {
throw new Error('Failed to install PostgreSQL');
}
sys = await this.checkSystem(); // Refresh
} else if (sys.postgres) {
console.log(chalk.green(`✓ PostgreSQL ${sys.pgVersion} already installed`));
}
// Install build dependencies (including PostgreSQL dev headers)
const targetPgVersion = options.pgVersion || sys.pgVersion || DEFAULT_PG_VERSION;
console.log(chalk.bold('\n🔧 Step 2: Installing build dependencies'));
await this.installBuildDeps(sys, targetPgVersion);
// Install Rust if needed
if (!sys.rust && !options.skipRust) {
console.log(chalk.bold('\n🦀 Step 3: Installing Rust'));
const installed = await this.installRust();
if (!installed) {
throw new Error('Failed to install Rust');
}
sys = await this.checkSystem(); // Refresh
} else if (sys.rust) {
console.log(chalk.green(`✓ Rust ${sys.rustVersion} already installed`));
}
// Install pgrx if needed
if (!sys.pgrx || sys.pgrxVersion !== PGRX_VERSION) {
console.log(chalk.bold('\n🔌 Step 4: Installing cargo-pgrx'));
const installed = await this.installPgrx(targetPgVersion);
if (!installed) {
throw new Error('Failed to install pgrx');
}
} else {
console.log(chalk.green(`✓ cargo-pgrx ${sys.pgrxVersion} already installed`));
}
// Build and install extension
console.log(chalk.bold('\n🏗 Step 5: Building RuVector extension'));
const built = await this.buildAndInstallExtension(targetPgVersion);
if (!built) {
throw new Error('Failed to build extension');
}
// Configure PostgreSQL
console.log(chalk.bold('\n⚙ Step 6: Configuring PostgreSQL'));
await this.configurePostgreSQL(options);
// Success!
const port = options.port || DEFAULT_PORT;
const user = options.user || DEFAULT_USER;
const password = options.password || DEFAULT_PASSWORD;
const database = options.database || DEFAULT_DB;
const connString = `postgresql://${user}:${password}@localhost:${port}/${database}`;
console.log(chalk.green.bold('\n✅ RuVector PostgreSQL installed successfully!\n'));
console.log(chalk.bold('Connection Details:'));
console.log(` Host: ${chalk.cyan('localhost')}`);
console.log(` Port: ${chalk.cyan(port.toString())}`);
console.log(` User: ${chalk.cyan(user)}`);
console.log(` Password: ${chalk.cyan(password)}`);
console.log(` Database: ${chalk.cyan(database)}`);
console.log(chalk.bold('\nConnection String:'));
console.log(` ${chalk.cyan(connString)}`);
console.log(chalk.bold('\nQuick Test:'));
console.log(chalk.gray(` psql "${connString}" -c "SELECT ruvector_version();"`));
console.log(chalk.bold('\nExample Usage:'));
console.log(chalk.gray(' CREATE TABLE embeddings (id serial, vec real[384]);'));
console.log(chalk.gray(' CREATE INDEX ON embeddings USING hnsw (vec);'));
console.log(chalk.gray(' INSERT INTO embeddings (vec) VALUES (ARRAY[0.1, 0.2, ...]);'));
}
/**
* Install RuVector PostgreSQL (auto-detect best method)
*/
static async install(options: InstallOptions = {}): Promise<void> {
const spinner = ora('Checking system requirements...').start();
try {
const sys = await this.checkSystem();
spinner.succeed('System check complete');
console.log(chalk.bold('\n📋 System Status:'));
console.log(` Docker: ${sys.docker ? chalk.green('✓ Available') : chalk.yellow('✗ Not found')}`);
console.log(` PostgreSQL: ${sys.postgres ? chalk.green(`${sys.pgVersion}`) : chalk.yellow('✗ Not found')}`);
console.log(` Rust: ${sys.rust ? chalk.green(`${sys.rustVersion}`) : chalk.yellow('✗ Not found')}`);
const method = options.method || 'auto';
if (method === 'auto') {
// Prefer Docker for simplicity, fall back to native
if (sys.docker) {
console.log(chalk.cyan('\n→ Using Docker installation (fastest)\n'));
await this.installDocker(options);
} else {
console.log(chalk.cyan('\n→ Using native installation (will install all dependencies)\n'));
await this.installNativeFull(options);
}
} else if (method === 'docker') {
if (!sys.docker) {
throw new Error('Docker not found. Please install Docker first: https://docs.docker.com/get-docker/');
}
await this.installDocker(options);
} else if (method === 'native') {
await this.installNativeFull(options);
}
} catch (error) {
spinner.fail('Installation failed');
throw error;
}
}
/**
* Install via Docker
*/
static async installDocker(options: InstallOptions = {}): Promise<void> {
const port = options.port || DEFAULT_PORT;
const user = options.user || DEFAULT_USER;
const password = options.password || DEFAULT_PASSWORD;
const database = options.database || DEFAULT_DB;
const version = options.version || DOCKER_IMAGE_VERSION;
const containerName = options.name || 'ruvector-postgres';
const dataDir = options.dataDir;
// Check if container already exists
const existingSpinner = ora('Checking for existing installation...').start();
try {
const existing = execSync(`docker ps -a --filter name=^${containerName}$ --format "{{.ID}}"`, { encoding: 'utf-8' }).trim();
if (existing) {
existingSpinner.warn(`Container '${containerName}' already exists`);
console.log(chalk.yellow(` Run 'ruvector-pg uninstall' first or use a different --name`));
return;
}
existingSpinner.succeed('No existing installation found');
} catch {
existingSpinner.succeed('No existing installation found');
}
// Check for local image first, then try to pull from Docker Hub
const pullSpinner = ora(`Checking for ${DOCKER_IMAGE}:${version}...`).start();
try {
// Check if image exists locally
execSync(`docker image inspect ${DOCKER_IMAGE}:${version}`, { stdio: 'pipe' });
pullSpinner.succeed(`Found local image ${DOCKER_IMAGE}:${version}`);
} catch {
// Try pulling from Docker Hub (ruvnet/ruvector-postgres)
pullSpinner.text = `Pulling ${DOCKER_IMAGE}:${version} from Docker Hub...`;
try {
execSync(`docker pull ${DOCKER_IMAGE}:${version}`, { stdio: 'pipe' });
pullSpinner.succeed(`Pulled ${DOCKER_IMAGE}:${version}`);
} catch {
pullSpinner.fail('Image not found locally or on Docker Hub');
console.log(chalk.yellow('\n📦 To build the image locally, run:'));
console.log(chalk.gray(' git clone https://github.com/ruvnet/ruvector.git'));
console.log(chalk.gray(' cd ruvector'));
console.log(chalk.gray(` docker build -f crates/ruvector-postgres/docker/Dockerfile -t ${DOCKER_IMAGE}:${version} .`));
console.log(chalk.yellow('\n Then run this install command again.'));
console.log(chalk.yellow('\n💡 Or use native installation:'));
console.log(chalk.gray(' npx @ruvector/postgres-cli install --method native\n'));
throw new Error(`RuVector Docker image not available. Build it first or use native installation.`);
}
}
// Build run command
let runCmd = `docker run -d --name ${containerName}`;
runCmd += ` -p ${port}:5432`;
runCmd += ` -e POSTGRES_USER=${user}`;
runCmd += ` -e POSTGRES_PASSWORD=${password}`;
runCmd += ` -e POSTGRES_DB=${database}`;
if (dataDir) {
const absDataDir = path.resolve(dataDir);
if (!fs.existsSync(absDataDir)) {
fs.mkdirSync(absDataDir, { recursive: true });
}
runCmd += ` -v ${absDataDir}:/var/lib/postgresql/data`;
}
runCmd += ` ${DOCKER_IMAGE}:${version}`;
// Run container
const runSpinner = ora('Starting RuVector PostgreSQL...').start();
try {
execSync(runCmd, { encoding: 'utf-8' });
runSpinner.succeed('Container started');
// Wait for PostgreSQL to be ready
const readySpinner = ora('Waiting for PostgreSQL to be ready...').start();
let ready = false;
for (let i = 0; i < 30; i++) {
try {
execSync(`docker exec ${containerName} pg_isready -U ${user}`, { stdio: 'pipe' });
ready = true;
break;
} catch {
await new Promise(resolve => setTimeout(resolve, 1000));
}
}
if (ready) {
readySpinner.succeed('PostgreSQL is ready');
} else {
readySpinner.warn('PostgreSQL may still be starting...');
}
// Verify extension
const verifySpinner = ora('Verifying RuVector extension...').start();
try {
const extCheck = execSync(
`docker exec ${containerName} psql -U ${user} -d ${database} -c "SELECT extname, extversion FROM pg_extension WHERE extname = 'ruvector';"`,
{ encoding: 'utf-8' }
);
if (extCheck.includes('ruvector')) {
verifySpinner.succeed('RuVector extension verified');
} else {
verifySpinner.warn('Extension may need manual activation');
}
} catch {
verifySpinner.warn('Could not verify extension (database may still be initializing)');
}
// Print success message
console.log(chalk.green.bold('\n✅ RuVector PostgreSQL installed successfully!\n'));
console.log(chalk.bold('Connection Details:'));
console.log(` Host: ${chalk.cyan('localhost')}`);
console.log(` Port: ${chalk.cyan(port.toString())}`);
console.log(` User: ${chalk.cyan(user)}`);
console.log(` Password: ${chalk.cyan(password)}`);
console.log(` Database: ${chalk.cyan(database)}`);
console.log(` Container: ${chalk.cyan(containerName)}`);
const connString = `postgresql://${user}:${password}@localhost:${port}/${database}`;
console.log(chalk.bold('\nConnection String:'));
console.log(` ${chalk.cyan(connString)}`);
console.log(chalk.bold('\nQuick Start:'));
console.log(` ${chalk.gray('# Connect with psql')}`);
console.log(` psql "${connString}"`);
console.log(` ${chalk.gray('# Or use docker')}`);
console.log(` docker exec -it ${containerName} psql -U ${user} -d ${database}`);
console.log(chalk.bold('\nTest HNSW Index:'));
console.log(chalk.gray(` CREATE TABLE items (id serial, embedding real[]);`));
console.log(chalk.gray(` CREATE INDEX ON items USING hnsw (embedding);`));
} catch (error) {
runSpinner.fail('Failed to start container');
throw error;
}
}
/**
* Install native extension (download pre-built binaries) - Legacy method
*/
static async installNative(options: InstallOptions = {}): Promise<void> {
// Redirect to full native installation
await this.installNativeFull(options);
}
/**
* Uninstall RuVector PostgreSQL
*/
static async uninstall(options: { name?: string; removeData?: boolean } = {}): Promise<void> {
const containerName = options.name || 'ruvector-postgres';
const spinner = ora(`Stopping container '${containerName}'...`).start();
try {
// Stop container
try {
execSync(`docker stop ${containerName}`, { stdio: 'pipe' });
spinner.succeed('Container stopped');
} catch {
spinner.info('Container was not running');
}
// Remove container
const removeSpinner = ora('Removing container...').start();
try {
execSync(`docker rm ${containerName}`, { stdio: 'pipe' });
removeSpinner.succeed('Container removed');
} catch {
removeSpinner.info('Container already removed');
}
if (options.removeData) {
console.log(chalk.yellow('\n⚠ Data volumes were not removed (manual cleanup required)'));
}
console.log(chalk.green.bold('\n✅ RuVector PostgreSQL uninstalled\n'));
} catch (error) {
spinner.fail('Uninstall failed');
throw error;
}
}
/**
* Get installation status
*/
static async status(options: { name?: string } = {}): Promise<StatusInfo> {
const containerName = options.name || 'ruvector-postgres';
const info: StatusInfo = {
installed: false,
running: false,
method: 'none',
};
// Check Docker installation
try {
const containerInfo = execSync(
`docker inspect ${containerName} --format '{{.State.Running}} {{.Config.Image}} {{.NetworkSettings.Ports}}'`,
{ encoding: 'utf-8', stdio: ['pipe', 'pipe', 'pipe'] }
).trim();
const [running, image] = containerInfo.split(' ');
info.installed = true;
info.running = running === 'true';
info.method = 'docker';
info.version = image.split(':')[1] || 'latest';
info.containerId = execSync(`docker inspect ${containerName} --format '{{.Id}}'`, { encoding: 'utf-8' }).trim().substring(0, 12);
// Get port mapping
try {
const portMapping = execSync(
`docker port ${containerName} 5432`,
{ encoding: 'utf-8', stdio: ['pipe', 'pipe', 'pipe'] }
).trim();
const portMatch = portMapping.match(/:(\d+)$/);
if (portMatch) {
info.port = parseInt(portMatch[1]);
info.connectionString = `postgresql://ruvector:ruvector@localhost:${info.port}/ruvector`;
}
} catch { /* port not mapped */ }
} catch {
// No Docker installation found, check native
try {
execSync('psql -c "SELECT 1 FROM pg_extension WHERE extname = \'ruvector\'" 2>/dev/null', { stdio: 'pipe' });
info.installed = true;
info.running = true;
info.method = 'native';
} catch { /* not installed */ }
}
return info;
}
/**
* Print status information
*/
static async printStatus(options: { name?: string } = {}): Promise<void> {
const spinner = ora('Checking installation status...').start();
const status = await this.status(options);
spinner.stop();
console.log(chalk.bold('\n📊 RuVector PostgreSQL Status\n'));
if (!status.installed) {
console.log(` Status: ${chalk.yellow('Not installed')}`);
console.log(chalk.gray('\n Run `ruvector-pg install` to install'));
return;
}
console.log(` Installed: ${chalk.green('Yes')}`);
console.log(` Method: ${chalk.cyan(status.method)}`);
console.log(` Version: ${chalk.cyan(status.version || 'unknown')}`);
console.log(` Running: ${status.running ? chalk.green('Yes') : chalk.red('No')}`);
if (status.method === 'docker') {
console.log(` Container: ${chalk.cyan(status.containerId)}`);
}
if (status.port) {
console.log(` Port: ${chalk.cyan(status.port.toString())}`);
}
if (status.connectionString) {
console.log(`\n Connection: ${chalk.cyan(status.connectionString)}`);
}
if (!status.running) {
console.log(chalk.gray('\n Run `ruvector-pg start` to start the database'));
}
}
/**
* Start the database
*/
static async start(options: { name?: string } = {}): Promise<void> {
const containerName = options.name || 'ruvector-postgres';
const spinner = ora('Starting RuVector PostgreSQL...').start();
try {
execSync(`docker start ${containerName}`, { stdio: 'pipe' });
// Wait for ready
for (let i = 0; i < 30; i++) {
try {
execSync(`docker exec ${containerName} pg_isready`, { stdio: 'pipe' });
spinner.succeed('RuVector PostgreSQL started');
return;
} catch {
await new Promise(resolve => setTimeout(resolve, 1000));
}
}
spinner.warn('Started but may not be ready yet');
} catch (error) {
spinner.fail('Failed to start');
throw error;
}
}
/**
* Stop the database
*/
static async stop(options: { name?: string } = {}): Promise<void> {
const containerName = options.name || 'ruvector-postgres';
const spinner = ora('Stopping RuVector PostgreSQL...').start();
try {
execSync(`docker stop ${containerName}`, { stdio: 'pipe' });
spinner.succeed('RuVector PostgreSQL stopped');
} catch (error) {
spinner.fail('Failed to stop');
throw error;
}
}
/**
* Show logs
*/
static async logs(options: { name?: string; follow?: boolean; tail?: number } = {}): Promise<void> {
const containerName = options.name || 'ruvector-postgres';
const tail = options.tail || 100;
try {
if (options.follow) {
const child = spawn('docker', ['logs', containerName, '--tail', tail.toString(), '-f'], {
stdio: 'inherit'
});
child.on('error', (err) => {
console.error(chalk.red(`Error: ${err.message}`));
});
} else {
const output = execSync(`docker logs ${containerName} --tail ${tail}`, { encoding: 'utf-8' });
console.log(output);
}
} catch (error) {
console.error(chalk.red('Failed to get logs'));
throw error;
}
}
/**
* Execute psql command
*/
static async psql(options: { name?: string; command?: string } = {}): Promise<void> {
const containerName = options.name || 'ruvector-postgres';
if (options.command) {
try {
const output = execSync(
`docker exec ${containerName} psql -U ruvector -d ruvector -c "${options.command}"`,
{ encoding: 'utf-8' }
);
console.log(output);
} catch (error) {
console.error(chalk.red('Failed to execute command'));
throw error;
}
} else {
// Interactive mode
const child = spawn('docker', ['exec', '-it', containerName, 'psql', '-U', 'ruvector', '-d', 'ruvector'], {
stdio: 'inherit'
});
child.on('error', (err) => {
console.error(chalk.red(`Error: ${err.message}`));
});
}
}
}

View File

@@ -0,0 +1,20 @@
/**
* Learning Commands
* CLI commands for self-learning and ReasoningBank operations
*/
import type { RuVectorClient } from '../client.js';
export interface TrainOptions {
file: string;
epochs: string;
}
export interface PredictOptions {
input: string;
}
export declare class LearningCommands {
static train(client: RuVectorClient, options: TrainOptions): Promise<void>;
static predict(client: RuVectorClient, options: PredictOptions): Promise<void>;
static status(client: RuVectorClient): Promise<void>;
static showInfo(): void;
}
export default LearningCommands;
//# sourceMappingURL=learning.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"learning.d.ts","sourceRoot":"","sources":["learning.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAMH,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,cAAc,CAAC;AAEnD,MAAM,WAAW,YAAY;IAC3B,IAAI,EAAE,MAAM,CAAC;IACb,MAAM,EAAE,MAAM,CAAC;CAChB;AAED,MAAM,WAAW,cAAc;IAC7B,KAAK,EAAE,MAAM,CAAC;CACf;AAED,qBAAa,gBAAgB;WACd,KAAK,CAChB,MAAM,EAAE,cAAc,EACtB,OAAO,EAAE,YAAY,GACpB,OAAO,CAAC,IAAI,CAAC;WAsCH,OAAO,CAClB,MAAM,EAAE,cAAc,EACtB,OAAO,EAAE,cAAc,GACtB,OAAO,CAAC,IAAI,CAAC;WAuCH,MAAM,CAAC,MAAM,EAAE,cAAc,GAAG,OAAO,CAAC,IAAI,CAAC;IAwC1D,MAAM,CAAC,QAAQ,IAAI,IAAI;CAmCxB;AAED,eAAe,gBAAgB,CAAC"}

View File

@@ -0,0 +1,144 @@
"use strict";
/**
* Learning Commands
* CLI commands for self-learning and ReasoningBank operations
*/
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.LearningCommands = void 0;
const chalk_1 = __importDefault(require("chalk"));
const ora_1 = __importDefault(require("ora"));
const fs_1 = require("fs");
class LearningCommands {
static async train(client, options) {
const spinner = (0, ora_1.default)('Training from trajectories...').start();
try {
await client.connect();
// Load trajectory data from file
const content = (0, fs_1.readFileSync)(options.file, 'utf-8');
const data = JSON.parse(content);
const epochs = parseInt(options.epochs);
spinner.text = `Training for ${epochs} epochs...`;
const result = await client.trainFromTrajectories(data, epochs);
spinner.succeed(chalk_1.default.green('Training completed successfully'));
console.log(chalk_1.default.bold.blue('\nTraining Results:'));
console.log(chalk_1.default.gray('─'.repeat(40)));
console.log(` ${chalk_1.default.green('Epochs:')} ${epochs}`);
console.log(` ${chalk_1.default.green('Trajectories:')} ${data.length}`);
console.log(` ${chalk_1.default.green('Final Loss:')} ${result.loss.toFixed(6)}`);
console.log(` ${chalk_1.default.green('Accuracy:')} ${(result.accuracy * 100).toFixed(2)}%`);
// Show training progress visualization
console.log(chalk_1.default.bold.blue('\nLearning Progress:'));
const progressBar = '█'.repeat(Math.floor(result.accuracy * 20)) +
'░'.repeat(20 - Math.floor(result.accuracy * 20));
console.log(` [${chalk_1.default.green(progressBar)}] ${(result.accuracy * 100).toFixed(1)}%`);
}
catch (err) {
spinner.fail(chalk_1.default.red('Training failed'));
console.error(chalk_1.default.red(err.message));
}
finally {
await client.disconnect();
}
}
static async predict(client, options) {
const spinner = (0, ora_1.default)('Making prediction...').start();
try {
await client.connect();
const input = JSON.parse(options.input);
const prediction = await client.predict(input);
spinner.succeed(chalk_1.default.green('Prediction completed'));
console.log(chalk_1.default.bold.blue('\nPrediction Result:'));
console.log(chalk_1.default.gray('─'.repeat(40)));
console.log(` ${chalk_1.default.green('Input Dimensions:')} ${input.length}`);
console.log(` ${chalk_1.default.green('Output Dimensions:')} ${prediction.length}`);
console.log(` ${chalk_1.default.green('Output Vector:')}`);
// Format output nicely
const formatted = prediction.slice(0, 10).map(v => v.toFixed(4)).join(', ');
console.log(` [${formatted}${prediction.length > 10 ? ', ...' : ''}]`);
// Show stats
const sum = prediction.reduce((a, b) => a + b, 0);
const max = Math.max(...prediction);
const maxIdx = prediction.indexOf(max);
console.log(chalk_1.default.bold.blue('\nStatistics:'));
console.log(` ${chalk_1.default.gray('Sum:')} ${sum.toFixed(4)}`);
console.log(` ${chalk_1.default.gray('Max:')} ${max.toFixed(4)} (index ${maxIdx})`);
console.log(` ${chalk_1.default.gray('Mean:')} ${(sum / prediction.length).toFixed(4)}`);
}
catch (err) {
spinner.fail(chalk_1.default.red('Prediction failed'));
console.error(chalk_1.default.red(err.message));
}
finally {
await client.disconnect();
}
}
static async status(client) {
const spinner = (0, ora_1.default)('Fetching learning status...').start();
try {
await client.connect();
// Get learning system status
const result = await client.query('SELECT * FROM learning_status()');
spinner.stop();
const status = result[0];
console.log(chalk_1.default.bold.blue('\nLearning System Status:'));
console.log(chalk_1.default.gray('─'.repeat(40)));
if (status) {
console.log(` ${chalk_1.default.green('Models:')} ${status.model_count}`);
console.log(` ${chalk_1.default.green('Trajectories:')} ${status.trajectory_count}`);
console.log(` ${chalk_1.default.green('Last Training:')} ${status.last_training}`);
console.log(` ${chalk_1.default.green('Current Accuracy:')} ${(status.accuracy * 100).toFixed(2)}%`);
}
else {
console.log(chalk_1.default.yellow(' No learning models found'));
console.log(chalk_1.default.gray(' Train with: ruvector-pg learning train -f <trajectories.json>'));
}
}
catch (err) {
spinner.fail(chalk_1.default.red('Failed to get status'));
console.error(chalk_1.default.red(err.message));
}
finally {
await client.disconnect();
}
}
static showInfo() {
console.log(chalk_1.default.bold.blue('\nSelf-Learning / ReasoningBank System:'));
console.log(chalk_1.default.gray('─'.repeat(50)));
console.log(`
${chalk_1.default.yellow('Overview:')}
The self-learning system enables the database to learn from
past query trajectories and improve over time. Based on the
ReasoningBank architecture.
${chalk_1.default.yellow('Trajectory Format:')}
A trajectory is a sequence of (state, action, outcome) tuples
that represent decision points during query execution.
Example trajectory file (trajectories.json):
${chalk_1.default.gray(`[
{
"state": [0.1, 0.2, ...], // Current context vector
"action": "expand_hnsw", // Action taken
"outcome": "success", // Result
"reward": 0.95 // Performance score
},
...
]`)}
${chalk_1.default.yellow('Commands:')}
${chalk_1.default.green('ruvector-pg learning train')} - Train from trajectory data
${chalk_1.default.green('ruvector-pg learning predict')} - Make predictions
${chalk_1.default.green('ruvector-pg learning status')} - Check system status
${chalk_1.default.yellow('Algorithm:')}
Uses Decision Transformer architecture to learn optimal
action sequences from reward-conditioned trajectory data.
`);
}
}
exports.LearningCommands = LearningCommands;
exports.default = LearningCommands;
//# sourceMappingURL=learning.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,182 @@
/**
* Learning Commands
* CLI commands for self-learning and ReasoningBank operations
*/
import chalk from 'chalk';
import ora from 'ora';
import Table from 'cli-table3';
import { readFileSync } from 'fs';
import type { RuVectorClient } from '../client.js';
export interface TrainOptions {
file: string;
epochs: string;
}
export interface PredictOptions {
input: string;
}
export class LearningCommands {
static async train(
client: RuVectorClient,
options: TrainOptions
): Promise<void> {
const spinner = ora('Training from trajectories...').start();
try {
await client.connect();
// Load trajectory data from file
const content = readFileSync(options.file, 'utf-8');
const data = JSON.parse(content) as Record<string, unknown>[];
const epochs = parseInt(options.epochs);
spinner.text = `Training for ${epochs} epochs...`;
const result = await client.trainFromTrajectories(data, epochs);
spinner.succeed(chalk.green('Training completed successfully'));
console.log(chalk.bold.blue('\nTraining Results:'));
console.log(chalk.gray('─'.repeat(40)));
console.log(` ${chalk.green('Epochs:')} ${epochs}`);
console.log(` ${chalk.green('Trajectories:')} ${data.length}`);
console.log(` ${chalk.green('Final Loss:')} ${result.loss.toFixed(6)}`);
console.log(` ${chalk.green('Accuracy:')} ${(result.accuracy * 100).toFixed(2)}%`);
// Show training progress visualization
console.log(chalk.bold.blue('\nLearning Progress:'));
const progressBar = '█'.repeat(Math.floor(result.accuracy * 20)) +
'░'.repeat(20 - Math.floor(result.accuracy * 20));
console.log(` [${chalk.green(progressBar)}] ${(result.accuracy * 100).toFixed(1)}%`);
} catch (err) {
spinner.fail(chalk.red('Training failed'));
console.error(chalk.red((err as Error).message));
} finally {
await client.disconnect();
}
}
static async predict(
client: RuVectorClient,
options: PredictOptions
): Promise<void> {
const spinner = ora('Making prediction...').start();
try {
await client.connect();
const input = JSON.parse(options.input) as number[];
const prediction = await client.predict(input);
spinner.succeed(chalk.green('Prediction completed'));
console.log(chalk.bold.blue('\nPrediction Result:'));
console.log(chalk.gray('─'.repeat(40)));
console.log(` ${chalk.green('Input Dimensions:')} ${input.length}`);
console.log(` ${chalk.green('Output Dimensions:')} ${prediction.length}`);
console.log(` ${chalk.green('Output Vector:')}`);
// Format output nicely
const formatted = prediction.slice(0, 10).map(v => v.toFixed(4)).join(', ');
console.log(` [${formatted}${prediction.length > 10 ? ', ...' : ''}]`);
// Show stats
const sum = prediction.reduce((a, b) => a + b, 0);
const max = Math.max(...prediction);
const maxIdx = prediction.indexOf(max);
console.log(chalk.bold.blue('\nStatistics:'));
console.log(` ${chalk.gray('Sum:')} ${sum.toFixed(4)}`);
console.log(` ${chalk.gray('Max:')} ${max.toFixed(4)} (index ${maxIdx})`);
console.log(` ${chalk.gray('Mean:')} ${(sum / prediction.length).toFixed(4)}`);
} catch (err) {
spinner.fail(chalk.red('Prediction failed'));
console.error(chalk.red((err as Error).message));
} finally {
await client.disconnect();
}
}
static async status(client: RuVectorClient): Promise<void> {
const spinner = ora('Fetching learning status...').start();
try {
await client.connect();
// Get learning system status
const result = await client.query<{
model_count: number;
trajectory_count: number;
last_training: string;
accuracy: number;
}>(
'SELECT * FROM learning_status()'
);
spinner.stop();
const status = result[0];
console.log(chalk.bold.blue('\nLearning System Status:'));
console.log(chalk.gray('─'.repeat(40)));
if (status) {
console.log(` ${chalk.green('Models:')} ${status.model_count}`);
console.log(` ${chalk.green('Trajectories:')} ${status.trajectory_count}`);
console.log(` ${chalk.green('Last Training:')} ${status.last_training}`);
console.log(` ${chalk.green('Current Accuracy:')} ${(status.accuracy * 100).toFixed(2)}%`);
} else {
console.log(chalk.yellow(' No learning models found'));
console.log(chalk.gray(' Train with: ruvector-pg learning train -f <trajectories.json>'));
}
} catch (err) {
spinner.fail(chalk.red('Failed to get status'));
console.error(chalk.red((err as Error).message));
} finally {
await client.disconnect();
}
}
static showInfo(): void {
console.log(chalk.bold.blue('\nSelf-Learning / ReasoningBank System:'));
console.log(chalk.gray('─'.repeat(50)));
console.log(`
${chalk.yellow('Overview:')}
The self-learning system enables the database to learn from
past query trajectories and improve over time. Based on the
ReasoningBank architecture.
${chalk.yellow('Trajectory Format:')}
A trajectory is a sequence of (state, action, outcome) tuples
that represent decision points during query execution.
Example trajectory file (trajectories.json):
${chalk.gray(`[
{
"state": [0.1, 0.2, ...], // Current context vector
"action": "expand_hnsw", // Action taken
"outcome": "success", // Result
"reward": 0.95 // Performance score
},
...
]`)}
${chalk.yellow('Commands:')}
${chalk.green('ruvector-pg learning train')} - Train from trajectory data
${chalk.green('ruvector-pg learning predict')} - Make predictions
${chalk.green('ruvector-pg learning status')} - Check system status
${chalk.yellow('Algorithm:')}
Uses Decision Transformer architecture to learn optimal
action sequences from reward-conditioned trajectory data.
`);
}
}
export default LearningCommands;

View File

@@ -0,0 +1,26 @@
/**
* Quantization Commands
* CLI commands for vector quantization operations (binary, scalar, product)
*/
import type { RuVectorClient } from '../client.js';
export interface BinaryQuantizeOptions {
vector: string;
}
export interface ScalarQuantizeOptions {
vector: string;
}
export interface QuantizedSearchOptions {
table: string;
query: string;
topK?: string;
quantType?: 'binary' | 'scalar';
}
export declare class QuantizationCommands {
static binaryQuantize(client: RuVectorClient, options: BinaryQuantizeOptions): Promise<void>;
static scalarQuantize(client: RuVectorClient, options: ScalarQuantizeOptions): Promise<void>;
static stats(client: RuVectorClient): Promise<void>;
static compare(client: RuVectorClient, vector: string): Promise<void>;
static showHelp(): void;
}
export default QuantizationCommands;
//# sourceMappingURL=quantization.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"quantization.d.ts","sourceRoot":"","sources":["quantization.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAKH,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,cAAc,CAAC;AAEnD,MAAM,WAAW,qBAAqB;IACpC,MAAM,EAAE,MAAM,CAAC;CAChB;AAED,MAAM,WAAW,qBAAqB;IACpC,MAAM,EAAE,MAAM,CAAC;CAChB;AAED,MAAM,WAAW,sBAAsB;IACrC,KAAK,EAAE,MAAM,CAAC;IACd,KAAK,EAAE,MAAM,CAAC;IACd,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,SAAS,CAAC,EAAE,QAAQ,GAAG,QAAQ,CAAC;CACjC;AAED,qBAAa,oBAAoB;WAClB,cAAc,CACzB,MAAM,EAAE,cAAc,EACtB,OAAO,EAAE,qBAAqB,GAC7B,OAAO,CAAC,IAAI,CAAC;WA6BH,cAAc,CACzB,MAAM,EAAE,cAAc,EACtB,OAAO,EAAE,qBAAqB,GAC7B,OAAO,CAAC,IAAI,CAAC;WAmCH,KAAK,CAAC,MAAM,EAAE,cAAc,GAAG,OAAO,CAAC,IAAI,CAAC;WA0C5C,OAAO,CAClB,MAAM,EAAE,cAAc,EACtB,MAAM,EAAE,MAAM,GACb,OAAO,CAAC,IAAI,CAAC;IAiDhB,MAAM,CAAC,QAAQ,IAAI,IAAI;CA6CxB;AAED,eAAe,oBAAoB,CAAC"}

View File

@@ -0,0 +1,185 @@
"use strict";
/**
* Quantization Commands
* CLI commands for vector quantization operations (binary, scalar, product)
*/
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.QuantizationCommands = void 0;
const chalk_1 = __importDefault(require("chalk"));
const ora_1 = __importDefault(require("ora"));
const cli_table3_1 = __importDefault(require("cli-table3"));
class QuantizationCommands {
static async binaryQuantize(client, options) {
const spinner = (0, ora_1.default)('Binary quantizing vector...').start();
try {
await client.connect();
const vector = JSON.parse(options.vector);
const result = await client.binaryQuantize(vector);
spinner.succeed(chalk_1.default.green('Binary quantization completed'));
console.log(chalk_1.default.bold.blue('\nBinary Quantization Result:'));
console.log(chalk_1.default.gray('-'.repeat(50)));
console.log(` ${chalk_1.default.green('Original Dimension:')} ${vector.length}`);
console.log(` ${chalk_1.default.green('Quantized Bytes:')} ${result.length}`);
console.log(` ${chalk_1.default.green('Compression Ratio:')} ${(vector.length * 4 / result.length).toFixed(1)}x`);
console.log(` ${chalk_1.default.green('Memory Savings:')} ${((1 - result.length / (vector.length * 4)) * 100).toFixed(1)}%`);
// Show first few bytes as hex
const hexPreview = result.slice(0, 16).map((b) => b.toString(16).padStart(2, '0')).join(' ');
console.log(` ${chalk_1.default.green('Preview (hex):')} ${hexPreview}${result.length > 16 ? '...' : ''}`);
}
catch (err) {
spinner.fail(chalk_1.default.red('Binary quantization failed'));
console.error(chalk_1.default.red(err.message));
}
finally {
await client.disconnect();
}
}
static async scalarQuantize(client, options) {
const spinner = (0, ora_1.default)('Scalar quantizing vector (SQ8)...').start();
try {
await client.connect();
const vector = JSON.parse(options.vector);
const result = await client.scalarQuantize(vector);
spinner.succeed(chalk_1.default.green('Scalar quantization completed'));
console.log(chalk_1.default.bold.blue('\nScalar Quantization (SQ8) Result:'));
console.log(chalk_1.default.gray('-'.repeat(50)));
console.log(` ${chalk_1.default.green('Original Dimension:')} ${vector.length}`);
console.log(` ${chalk_1.default.green('Quantized Elements:')} ${result.data.length}`);
console.log(` ${chalk_1.default.green('Scale Factor:')} ${result.scale.toFixed(6)}`);
console.log(` ${chalk_1.default.green('Offset:')} ${result.offset.toFixed(6)}`);
console.log(` ${chalk_1.default.green('Compression Ratio:')} 4x (32-bit to 8-bit)`);
console.log(` ${chalk_1.default.green('Memory Savings:')} 75%`);
// Show reconstruction formula
console.log(chalk_1.default.bold.blue('\nReconstruction:'));
console.log(` ${chalk_1.default.gray('original[i] = quantized[i] * scale + offset')}`);
// Show preview
const preview = result.data.slice(0, 10).join(', ');
console.log(` ${chalk_1.default.green('Quantized Preview:')} [${preview}${result.data.length > 10 ? ', ...' : ''}]`);
}
catch (err) {
spinner.fail(chalk_1.default.red('Scalar quantization failed'));
console.error(chalk_1.default.red(err.message));
}
finally {
await client.disconnect();
}
}
static async stats(client) {
const spinner = (0, ora_1.default)('Fetching quantization statistics...').start();
try {
await client.connect();
const stats = await client.quantizationStats();
spinner.stop();
console.log(chalk_1.default.bold.blue('\nQuantization Statistics:'));
console.log(chalk_1.default.gray('-'.repeat(50)));
const table = new cli_table3_1.default({
head: [
chalk_1.default.cyan('Type'),
chalk_1.default.cyan('Bits/Dim'),
chalk_1.default.cyan('Compression'),
chalk_1.default.cyan('Accuracy Loss'),
chalk_1.default.cyan('Speed Boost'),
],
colWidths: [15, 12, 14, 15, 14],
});
table.push(['Binary (BQ)', '1', '32x', '~20-30%', '~10-20x'], ['Scalar (SQ8)', '8', '4x', '~1-5%', '~2-4x'], ['Product (PQ)', 'Variable', '8-32x', '~5-15%', '~5-10x']);
console.log(table.toString());
console.log(chalk_1.default.bold.blue('\nMemory Usage:'));
console.log(` ${chalk_1.default.green('Quantization Tables:')} ${stats.quantization_tables_mb.toFixed(2)} MB`);
}
catch (err) {
spinner.fail(chalk_1.default.red('Failed to get stats'));
console.error(chalk_1.default.red(err.message));
}
finally {
await client.disconnect();
}
}
static async compare(client, vector) {
const spinner = (0, ora_1.default)('Comparing quantization methods...').start();
try {
await client.connect();
const vec = JSON.parse(vector);
const dim = vec.length;
// Get all quantization results
const binary = await client.binaryQuantize(vec);
const scalar = await client.scalarQuantize(vec);
spinner.stop();
console.log(chalk_1.default.bold.blue('\nQuantization Comparison:'));
console.log(chalk_1.default.gray('-'.repeat(60)));
console.log(` ${chalk_1.default.green('Original Vector:')} ${dim} dimensions, ${dim * 4} bytes`);
const table = new cli_table3_1.default({
head: [
chalk_1.default.cyan('Method'),
chalk_1.default.cyan('Size'),
chalk_1.default.cyan('Compression'),
chalk_1.default.cyan('Type'),
],
colWidths: [18, 15, 15, 20],
});
table.push(['Original (f32)', `${dim * 4} bytes`, '1x', '32-bit float'], ['Binary (BQ)', `${binary.length} bytes`, `${(dim * 4 / binary.length).toFixed(1)}x`, '1-bit per dim'], ['Scalar (SQ8)', `${scalar.data.length + 8} bytes`, `${(dim * 4 / (scalar.data.length + 8)).toFixed(1)}x`, '8-bit + metadata']);
console.log(table.toString());
console.log(chalk_1.default.bold.blue('\nTrade-offs:'));
console.log(` ${chalk_1.default.yellow('Binary:')} Best compression, lowest accuracy, fastest`);
console.log(` ${chalk_1.default.yellow('Scalar:')} Good balance of compression and accuracy`);
console.log(` ${chalk_1.default.yellow('Product:')} Variable, best for specific use cases`);
}
catch (err) {
spinner.fail(chalk_1.default.red('Comparison failed'));
console.error(chalk_1.default.red(err.message));
}
finally {
await client.disconnect();
}
}
static showHelp() {
console.log(chalk_1.default.bold.blue('\nVector Quantization:'));
console.log(chalk_1.default.gray('-'.repeat(60)));
console.log(`
${chalk_1.default.yellow('Overview:')}
Quantization reduces vector storage size and speeds up search
by representing vectors with fewer bits per dimension.
${chalk_1.default.yellow('Quantization Types:')}
${chalk_1.default.green('Binary Quantization (BQ)')}
- Converts each dimension to 1 bit (sign)
- 32x memory reduction
- 10-20x search speedup
- ~20-30% accuracy loss
- Best for: Large-scale approximate search
${chalk_1.default.green('Scalar Quantization (SQ8)')}
- Converts 32-bit floats to 8-bit integers
- 4x memory reduction
- 2-4x search speedup
- ~1-5% accuracy loss
- Best for: Balanced accuracy/efficiency
${chalk_1.default.green('Product Quantization (PQ)')}
- Splits vector into subvectors, each quantized separately
- 8-32x memory reduction
- 5-10x search speedup
- ~5-15% accuracy loss
- Best for: Medium-scale with accuracy needs
${chalk_1.default.yellow('Commands:')}
${chalk_1.default.green('quantization binary')} - Binary quantize a vector
${chalk_1.default.green('quantization scalar')} - Scalar quantize (SQ8)
${chalk_1.default.green('quantization compare')} - Compare all methods
${chalk_1.default.green('quantization stats')} - View quantization statistics
${chalk_1.default.yellow('When to Use:')}
- Dataset > 1M vectors: Consider BQ or PQ
- Need < 5% accuracy loss: Use SQ8
- Filtering important: Use BQ with re-ranking
- Memory constrained: Use BQ or PQ
`);
}
}
exports.QuantizationCommands = QuantizationCommands;
exports.default = QuantizationCommands;
//# sourceMappingURL=quantization.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,238 @@
/**
* Quantization Commands
* CLI commands for vector quantization operations (binary, scalar, product)
*/
import chalk from 'chalk';
import ora from 'ora';
import Table from 'cli-table3';
import type { RuVectorClient } from '../client.js';
export interface BinaryQuantizeOptions {
vector: string;
}
export interface ScalarQuantizeOptions {
vector: string;
}
export interface QuantizedSearchOptions {
table: string;
query: string;
topK?: string;
quantType?: 'binary' | 'scalar';
}
export class QuantizationCommands {
static async binaryQuantize(
client: RuVectorClient,
options: BinaryQuantizeOptions
): Promise<void> {
const spinner = ora('Binary quantizing vector...').start();
try {
await client.connect();
const vector = JSON.parse(options.vector);
const result = await client.binaryQuantize(vector);
spinner.succeed(chalk.green('Binary quantization completed'));
console.log(chalk.bold.blue('\nBinary Quantization Result:'));
console.log(chalk.gray('-'.repeat(50)));
console.log(` ${chalk.green('Original Dimension:')} ${vector.length}`);
console.log(` ${chalk.green('Quantized Bytes:')} ${result.length}`);
console.log(` ${chalk.green('Compression Ratio:')} ${(vector.length * 4 / result.length).toFixed(1)}x`);
console.log(` ${chalk.green('Memory Savings:')} ${((1 - result.length / (vector.length * 4)) * 100).toFixed(1)}%`);
// Show first few bytes as hex
const hexPreview = result.slice(0, 16).map((b: number) => b.toString(16).padStart(2, '0')).join(' ');
console.log(` ${chalk.green('Preview (hex):')} ${hexPreview}${result.length > 16 ? '...' : ''}`);
} catch (err) {
spinner.fail(chalk.red('Binary quantization failed'));
console.error(chalk.red((err as Error).message));
} finally {
await client.disconnect();
}
}
static async scalarQuantize(
client: RuVectorClient,
options: ScalarQuantizeOptions
): Promise<void> {
const spinner = ora('Scalar quantizing vector (SQ8)...').start();
try {
await client.connect();
const vector = JSON.parse(options.vector);
const result = await client.scalarQuantize(vector);
spinner.succeed(chalk.green('Scalar quantization completed'));
console.log(chalk.bold.blue('\nScalar Quantization (SQ8) Result:'));
console.log(chalk.gray('-'.repeat(50)));
console.log(` ${chalk.green('Original Dimension:')} ${vector.length}`);
console.log(` ${chalk.green('Quantized Elements:')} ${result.data.length}`);
console.log(` ${chalk.green('Scale Factor:')} ${result.scale.toFixed(6)}`);
console.log(` ${chalk.green('Offset:')} ${result.offset.toFixed(6)}`);
console.log(` ${chalk.green('Compression Ratio:')} 4x (32-bit to 8-bit)`);
console.log(` ${chalk.green('Memory Savings:')} 75%`);
// Show reconstruction formula
console.log(chalk.bold.blue('\nReconstruction:'));
console.log(` ${chalk.gray('original[i] = quantized[i] * scale + offset')}`);
// Show preview
const preview = result.data.slice(0, 10).join(', ');
console.log(` ${chalk.green('Quantized Preview:')} [${preview}${result.data.length > 10 ? ', ...' : ''}]`);
} catch (err) {
spinner.fail(chalk.red('Scalar quantization failed'));
console.error(chalk.red((err as Error).message));
} finally {
await client.disconnect();
}
}
static async stats(client: RuVectorClient): Promise<void> {
const spinner = ora('Fetching quantization statistics...').start();
try {
await client.connect();
const stats = await client.quantizationStats();
spinner.stop();
console.log(chalk.bold.blue('\nQuantization Statistics:'));
console.log(chalk.gray('-'.repeat(50)));
const table = new Table({
head: [
chalk.cyan('Type'),
chalk.cyan('Bits/Dim'),
chalk.cyan('Compression'),
chalk.cyan('Accuracy Loss'),
chalk.cyan('Speed Boost'),
],
colWidths: [15, 12, 14, 15, 14],
});
table.push(
['Binary (BQ)', '1', '32x', '~20-30%', '~10-20x'],
['Scalar (SQ8)', '8', '4x', '~1-5%', '~2-4x'],
['Product (PQ)', 'Variable', '8-32x', '~5-15%', '~5-10x'],
);
console.log(table.toString());
console.log(chalk.bold.blue('\nMemory Usage:'));
console.log(` ${chalk.green('Quantization Tables:')} ${stats.quantization_tables_mb.toFixed(2)} MB`);
} catch (err) {
spinner.fail(chalk.red('Failed to get stats'));
console.error(chalk.red((err as Error).message));
} finally {
await client.disconnect();
}
}
static async compare(
client: RuVectorClient,
vector: string
): Promise<void> {
const spinner = ora('Comparing quantization methods...').start();
try {
await client.connect();
const vec = JSON.parse(vector);
const dim = vec.length;
// Get all quantization results
const binary = await client.binaryQuantize(vec);
const scalar = await client.scalarQuantize(vec);
spinner.stop();
console.log(chalk.bold.blue('\nQuantization Comparison:'));
console.log(chalk.gray('-'.repeat(60)));
console.log(` ${chalk.green('Original Vector:')} ${dim} dimensions, ${dim * 4} bytes`);
const table = new Table({
head: [
chalk.cyan('Method'),
chalk.cyan('Size'),
chalk.cyan('Compression'),
chalk.cyan('Type'),
],
colWidths: [18, 15, 15, 20],
});
table.push(
['Original (f32)', `${dim * 4} bytes`, '1x', '32-bit float'],
['Binary (BQ)', `${binary.length} bytes`, `${(dim * 4 / binary.length).toFixed(1)}x`, '1-bit per dim'],
['Scalar (SQ8)', `${scalar.data.length + 8} bytes`, `${(dim * 4 / (scalar.data.length + 8)).toFixed(1)}x`, '8-bit + metadata'],
);
console.log(table.toString());
console.log(chalk.bold.blue('\nTrade-offs:'));
console.log(` ${chalk.yellow('Binary:')} Best compression, lowest accuracy, fastest`);
console.log(` ${chalk.yellow('Scalar:')} Good balance of compression and accuracy`);
console.log(` ${chalk.yellow('Product:')} Variable, best for specific use cases`);
} catch (err) {
spinner.fail(chalk.red('Comparison failed'));
console.error(chalk.red((err as Error).message));
} finally {
await client.disconnect();
}
}
static showHelp(): void {
console.log(chalk.bold.blue('\nVector Quantization:'));
console.log(chalk.gray('-'.repeat(60)));
console.log(`
${chalk.yellow('Overview:')}
Quantization reduces vector storage size and speeds up search
by representing vectors with fewer bits per dimension.
${chalk.yellow('Quantization Types:')}
${chalk.green('Binary Quantization (BQ)')}
- Converts each dimension to 1 bit (sign)
- 32x memory reduction
- 10-20x search speedup
- ~20-30% accuracy loss
- Best for: Large-scale approximate search
${chalk.green('Scalar Quantization (SQ8)')}
- Converts 32-bit floats to 8-bit integers
- 4x memory reduction
- 2-4x search speedup
- ~1-5% accuracy loss
- Best for: Balanced accuracy/efficiency
${chalk.green('Product Quantization (PQ)')}
- Splits vector into subvectors, each quantized separately
- 8-32x memory reduction
- 5-10x search speedup
- ~5-15% accuracy loss
- Best for: Medium-scale with accuracy needs
${chalk.yellow('Commands:')}
${chalk.green('quantization binary')} - Binary quantize a vector
${chalk.green('quantization scalar')} - Scalar quantize (SQ8)
${chalk.green('quantization compare')} - Compare all methods
${chalk.green('quantization stats')} - View quantization statistics
${chalk.yellow('When to Use:')}
- Dataset > 1M vectors: Consider BQ or PQ
- Need < 5% accuracy loss: Use SQ8
- Filtering important: Use BQ with re-ranking
- Memory constrained: Use BQ or PQ
`);
}
}
export default QuantizationCommands;

View File

@@ -0,0 +1,47 @@
/**
* Routing/Agent Commands
* CLI commands for Tiny Dancer agent routing and management
*/
import type { RuVectorClient } from '../client.js';
export interface RegisterAgentOptions {
name: string;
type: string;
capabilities: string;
cost: string;
latency: string;
quality: string;
}
export interface RegisterAgentFullOptions {
config: string;
}
export interface UpdateMetricsOptions {
name: string;
latency: string;
success: boolean;
quality?: string;
}
export interface RouteOptions {
embedding: string;
optimizeFor?: string;
constraints?: string;
}
export interface FindAgentsOptions {
capability: string;
limit?: string;
}
export declare class RoutingCommands {
static registerAgent(client: RuVectorClient, options: RegisterAgentOptions): Promise<void>;
static registerAgentFull(client: RuVectorClient, options: RegisterAgentFullOptions): Promise<void>;
static updateMetrics(client: RuVectorClient, options: UpdateMetricsOptions): Promise<void>;
static removeAgent(client: RuVectorClient, name: string): Promise<void>;
static setActive(client: RuVectorClient, name: string, active: boolean): Promise<void>;
static route(client: RuVectorClient, options: RouteOptions): Promise<void>;
static listAgents(client: RuVectorClient): Promise<void>;
static getAgent(client: RuVectorClient, name: string): Promise<void>;
static findByCapability(client: RuVectorClient, options: FindAgentsOptions): Promise<void>;
static stats(client: RuVectorClient): Promise<void>;
static clearAgents(client: RuVectorClient): Promise<void>;
static showHelp(): void;
}
export default RoutingCommands;
//# sourceMappingURL=routing.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"routing.d.ts","sourceRoot":"","sources":["routing.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAKH,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,cAAc,CAAC;AAEnD,MAAM,WAAW,oBAAoB;IACnC,IAAI,EAAE,MAAM,CAAC;IACb,IAAI,EAAE,MAAM,CAAC;IACb,YAAY,EAAE,MAAM,CAAC;IACrB,IAAI,EAAE,MAAM,CAAC;IACb,OAAO,EAAE,MAAM,CAAC;IAChB,OAAO,EAAE,MAAM,CAAC;CACjB;AAED,MAAM,WAAW,wBAAwB;IACvC,MAAM,EAAE,MAAM,CAAC;CAChB;AAED,MAAM,WAAW,oBAAoB;IACnC,IAAI,EAAE,MAAM,CAAC;IACb,OAAO,EAAE,MAAM,CAAC;IAChB,OAAO,EAAE,OAAO,CAAC;IACjB,OAAO,CAAC,EAAE,MAAM,CAAC;CAClB;AAED,MAAM,WAAW,YAAY;IAC3B,SAAS,EAAE,MAAM,CAAC;IAClB,WAAW,CAAC,EAAE,MAAM,CAAC;IACrB,WAAW,CAAC,EAAE,MAAM,CAAC;CACtB;AAED,MAAM,WAAW,iBAAiB;IAChC,UAAU,EAAE,MAAM,CAAC;IACnB,KAAK,CAAC,EAAE,MAAM,CAAC;CAChB;AAED,qBAAa,eAAe;WACb,aAAa,CACxB,MAAM,EAAE,cAAc,EACtB,OAAO,EAAE,oBAAoB,GAC5B,OAAO,CAAC,IAAI,CAAC;WAmCH,iBAAiB,CAC5B,MAAM,EAAE,cAAc,EACtB,OAAO,EAAE,wBAAwB,GAChC,OAAO,CAAC,IAAI,CAAC;WAkBH,aAAa,CACxB,MAAM,EAAE,cAAc,EACtB,OAAO,EAAE,oBAAoB,GAC5B,OAAO,CAAC,IAAI,CAAC;WA4BH,WAAW,CACtB,MAAM,EAAE,cAAc,EACtB,IAAI,EAAE,MAAM,GACX,OAAO,CAAC,IAAI,CAAC;WAeH,SAAS,CACpB,MAAM,EAAE,cAAc,EACtB,IAAI,EAAE,MAAM,EACZ,MAAM,EAAE,OAAO,GACd,OAAO,CAAC,IAAI,CAAC;WAeH,KAAK,CAChB,MAAM,EAAE,cAAc,EACtB,OAAO,EAAE,YAAY,GACpB,OAAO,CAAC,IAAI,CAAC;WAyCH,UAAU,CAAC,MAAM,EAAE,cAAc,GAAG,OAAO,CAAC,IAAI,CAAC;WAmDjD,QAAQ,CAAC,MAAM,EAAE,cAAc,EAAE,IAAI,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;WAmC7D,gBAAgB,CAC3B,MAAM,EAAE,cAAc,EACtB,OAAO,EAAE,iBAAiB,GACzB,OAAO,CAAC,IAAI,CAAC;WA8CH,KAAK,CAAC,MAAM,EAAE,cAAc,GAAG,OAAO,CAAC,IAAI,CAAC;WAwB5C,WAAW,CAAC,MAAM,EAAE,cAAc,GAAG,OAAO,CAAC,IAAI,CAAC;IAe/D,MAAM,CAAC,QAAQ,IAAI,IAAI;CAmDxB;AAED,eAAe,eAAe,CAAC"}

View File

@@ -0,0 +1,342 @@
"use strict";
/**
* Routing/Agent Commands
* CLI commands for Tiny Dancer agent routing and management
*/
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.RoutingCommands = void 0;
const chalk_1 = __importDefault(require("chalk"));
const ora_1 = __importDefault(require("ora"));
const cli_table3_1 = __importDefault(require("cli-table3"));
class RoutingCommands {
static async registerAgent(client, options) {
const spinner = (0, ora_1.default)(`Registering agent '${options.name}'...`).start();
try {
await client.connect();
const capabilities = options.capabilities.split(',').map(c => c.trim());
await client.registerAgent(options.name, options.type, capabilities, parseFloat(options.cost), parseFloat(options.latency), parseFloat(options.quality));
spinner.succeed(chalk_1.default.green(`Agent '${options.name}' registered successfully`));
console.log(chalk_1.default.bold.blue('\nAgent Details:'));
console.log(chalk_1.default.gray('-'.repeat(40)));
console.log(` ${chalk_1.default.green('Name:')} ${options.name}`);
console.log(` ${chalk_1.default.green('Type:')} ${options.type}`);
console.log(` ${chalk_1.default.green('Capabilities:')} ${capabilities.join(', ')}`);
console.log(` ${chalk_1.default.green('Cost/Request:')} $${options.cost}`);
console.log(` ${chalk_1.default.green('Avg Latency:')} ${options.latency}ms`);
console.log(` ${chalk_1.default.green('Quality Score:')} ${options.quality}`);
}
catch (err) {
spinner.fail(chalk_1.default.red('Failed to register agent'));
console.error(chalk_1.default.red(err.message));
}
finally {
await client.disconnect();
}
}
static async registerAgentFull(client, options) {
const spinner = (0, ora_1.default)('Registering agent with full config...').start();
try {
await client.connect();
const config = JSON.parse(options.config);
await client.registerAgentFull(config);
spinner.succeed(chalk_1.default.green(`Agent '${config.name}' registered successfully`));
}
catch (err) {
spinner.fail(chalk_1.default.red('Failed to register agent'));
console.error(chalk_1.default.red(err.message));
}
finally {
await client.disconnect();
}
}
static async updateMetrics(client, options) {
const spinner = (0, ora_1.default)(`Updating metrics for '${options.name}'...`).start();
try {
await client.connect();
await client.updateAgentMetrics(options.name, parseFloat(options.latency), options.success, options.quality ? parseFloat(options.quality) : undefined);
spinner.succeed(chalk_1.default.green('Metrics updated'));
console.log(` ${chalk_1.default.green('Latency:')} ${options.latency}ms`);
console.log(` ${chalk_1.default.green('Success:')} ${options.success}`);
if (options.quality) {
console.log(` ${chalk_1.default.green('Quality:')} ${options.quality}`);
}
}
catch (err) {
spinner.fail(chalk_1.default.red('Failed to update metrics'));
console.error(chalk_1.default.red(err.message));
}
finally {
await client.disconnect();
}
}
static async removeAgent(client, name) {
const spinner = (0, ora_1.default)(`Removing agent '${name}'...`).start();
try {
await client.connect();
await client.removeAgent(name);
spinner.succeed(chalk_1.default.green(`Agent '${name}' removed`));
}
catch (err) {
spinner.fail(chalk_1.default.red('Failed to remove agent'));
console.error(chalk_1.default.red(err.message));
}
finally {
await client.disconnect();
}
}
static async setActive(client, name, active) {
const spinner = (0, ora_1.default)(`Setting agent '${name}' ${active ? 'active' : 'inactive'}...`).start();
try {
await client.connect();
await client.setAgentActive(name, active);
spinner.succeed(chalk_1.default.green(`Agent '${name}' is now ${active ? 'active' : 'inactive'}`));
}
catch (err) {
spinner.fail(chalk_1.default.red('Failed to update agent status'));
console.error(chalk_1.default.red(err.message));
}
finally {
await client.disconnect();
}
}
static async route(client, options) {
const spinner = (0, ora_1.default)('Routing request to best agent...').start();
try {
await client.connect();
const embedding = JSON.parse(options.embedding);
const optimizeFor = options.optimizeFor || 'balanced';
const constraints = options.constraints ? JSON.parse(options.constraints) : undefined;
const decision = await client.route(embedding, optimizeFor, constraints);
spinner.succeed(chalk_1.default.green('Routing decision made'));
console.log(chalk_1.default.bold.blue('\nRouting Decision:'));
console.log(chalk_1.default.gray('-'.repeat(50)));
console.log(` ${chalk_1.default.green('Selected Agent:')} ${chalk_1.default.bold(decision.agent_name)}`);
console.log(` ${chalk_1.default.green('Confidence:')} ${(decision.confidence * 100).toFixed(1)}%`);
console.log(` ${chalk_1.default.green('Estimated Cost:')} $${decision.estimated_cost.toFixed(4)}`);
console.log(` ${chalk_1.default.green('Estimated Latency:')} ${decision.estimated_latency_ms.toFixed(0)}ms`);
console.log(` ${chalk_1.default.green('Expected Quality:')} ${(decision.expected_quality * 100).toFixed(1)}%`);
console.log(` ${chalk_1.default.green('Similarity Score:')} ${decision.similarity_score.toFixed(4)}`);
if (decision.reasoning) {
console.log(` ${chalk_1.default.green('Reasoning:')} ${decision.reasoning}`);
}
if (decision.alternatives && decision.alternatives.length > 0) {
console.log(chalk_1.default.bold.blue('\nAlternatives:'));
for (const alt of decision.alternatives.slice(0, 3)) {
console.log(` ${chalk_1.default.yellow('-')} ${alt.name} (score: ${alt.score?.toFixed(3) || 'N/A'})`);
}
}
}
catch (err) {
spinner.fail(chalk_1.default.red('Routing failed'));
console.error(chalk_1.default.red(err.message));
}
finally {
await client.disconnect();
}
}
static async listAgents(client) {
const spinner = (0, ora_1.default)('Fetching agents...').start();
try {
await client.connect();
const agents = await client.listAgents();
spinner.stop();
if (agents.length === 0) {
console.log(chalk_1.default.yellow('No agents registered'));
return;
}
console.log(chalk_1.default.bold.blue(`\nRegistered Agents (${agents.length}):`));
const table = new cli_table3_1.default({
head: [
chalk_1.default.cyan('Name'),
chalk_1.default.cyan('Type'),
chalk_1.default.cyan('Cost'),
chalk_1.default.cyan('Latency'),
chalk_1.default.cyan('Quality'),
chalk_1.default.cyan('Requests'),
chalk_1.default.cyan('Active'),
],
colWidths: [15, 12, 10, 10, 10, 10, 8],
});
for (const agent of agents) {
table.push([
agent.name,
agent.agent_type,
`$${agent.cost_per_request.toFixed(3)}`,
`${agent.avg_latency_ms.toFixed(0)}ms`,
`${(agent.quality_score * 100).toFixed(0)}%`,
agent.total_requests.toString(),
agent.is_active ? chalk_1.default.green('Yes') : chalk_1.default.red('No'),
]);
}
console.log(table.toString());
}
catch (err) {
spinner.fail(chalk_1.default.red('Failed to list agents'));
console.error(chalk_1.default.red(err.message));
}
finally {
await client.disconnect();
}
}
static async getAgent(client, name) {
const spinner = (0, ora_1.default)(`Fetching agent '${name}'...`).start();
try {
await client.connect();
const agent = await client.getAgent(name);
spinner.stop();
console.log(chalk_1.default.bold.blue(`\nAgent: ${agent.name}`));
console.log(chalk_1.default.gray('-'.repeat(50)));
console.log(` ${chalk_1.default.green('Type:')} ${agent.agent_type}`);
console.log(` ${chalk_1.default.green('Capabilities:')} ${agent.capabilities.join(', ')}`);
console.log(` ${chalk_1.default.green('Active:')} ${agent.is_active ? chalk_1.default.green('Yes') : chalk_1.default.red('No')}`);
console.log(chalk_1.default.bold.blue('\nCost Model:'));
console.log(` ${chalk_1.default.green('Per Request:')} $${agent.cost_model.per_request}`);
if (agent.cost_model.per_token) {
console.log(` ${chalk_1.default.green('Per Token:')} $${agent.cost_model.per_token}`);
}
console.log(chalk_1.default.bold.blue('\nPerformance:'));
console.log(` ${chalk_1.default.green('Avg Latency:')} ${agent.performance.avg_latency_ms}ms`);
console.log(` ${chalk_1.default.green('Quality Score:')} ${(agent.performance.quality_score * 100).toFixed(1)}%`);
console.log(` ${chalk_1.default.green('Success Rate:')} ${(agent.performance.success_rate * 100).toFixed(1)}%`);
console.log(` ${chalk_1.default.green('Total Requests:')} ${agent.performance.total_requests}`);
}
catch (err) {
spinner.fail(chalk_1.default.red('Failed to get agent'));
console.error(chalk_1.default.red(err.message));
}
finally {
await client.disconnect();
}
}
static async findByCapability(client, options) {
const spinner = (0, ora_1.default)(`Finding agents with '${options.capability}'...`).start();
try {
await client.connect();
const limit = options.limit ? parseInt(options.limit) : 10;
const agents = await client.findAgentsByCapability(options.capability, limit);
spinner.stop();
if (agents.length === 0) {
console.log(chalk_1.default.yellow(`No agents found with capability '${options.capability}'`));
return;
}
console.log(chalk_1.default.bold.blue(`\nAgents with '${options.capability}' (${agents.length}):`));
const table = new cli_table3_1.default({
head: [
chalk_1.default.cyan('Name'),
chalk_1.default.cyan('Quality'),
chalk_1.default.cyan('Latency'),
chalk_1.default.cyan('Cost'),
],
colWidths: [20, 12, 12, 12],
});
for (const agent of agents) {
table.push([
agent.name,
`${(agent.quality_score * 100).toFixed(0)}%`,
`${agent.avg_latency_ms.toFixed(0)}ms`,
`$${agent.cost_per_request.toFixed(3)}`,
]);
}
console.log(table.toString());
}
catch (err) {
spinner.fail(chalk_1.default.red('Failed to find agents'));
console.error(chalk_1.default.red(err.message));
}
finally {
await client.disconnect();
}
}
static async stats(client) {
const spinner = (0, ora_1.default)('Fetching routing statistics...').start();
try {
await client.connect();
const stats = await client.routingStats();
spinner.stop();
console.log(chalk_1.default.bold.blue('\nRouting Statistics:'));
console.log(chalk_1.default.gray('-'.repeat(40)));
console.log(` ${chalk_1.default.green('Total Agents:')} ${stats.total_agents}`);
console.log(` ${chalk_1.default.green('Active Agents:')} ${stats.active_agents}`);
console.log(` ${chalk_1.default.green('Total Requests:')} ${stats.total_requests}`);
console.log(` ${chalk_1.default.green('Avg Quality:')} ${(stats.average_quality * 100).toFixed(1)}%`);
}
catch (err) {
spinner.fail(chalk_1.default.red('Failed to get stats'));
console.error(chalk_1.default.red(err.message));
}
finally {
await client.disconnect();
}
}
static async clearAgents(client) {
const spinner = (0, ora_1.default)('Clearing all agents...').start();
try {
await client.connect();
await client.clearAgents();
spinner.succeed(chalk_1.default.green('All agents cleared'));
}
catch (err) {
spinner.fail(chalk_1.default.red('Failed to clear agents'));
console.error(chalk_1.default.red(err.message));
}
finally {
await client.disconnect();
}
}
static showHelp() {
console.log(chalk_1.default.bold.blue('\nTiny Dancer Routing System:'));
console.log(chalk_1.default.gray('-'.repeat(60)));
console.log(`
${chalk_1.default.yellow('Overview:')}
Intelligent routing of AI requests to the most suitable agent
based on cost, latency, quality, and capabilities.
${chalk_1.default.yellow('Agent Types:')}
${chalk_1.default.green('llm')} - Large Language Models (GPT-4, Claude, etc.)
${chalk_1.default.green('embedding')} - Embedding models
${chalk_1.default.green('specialized')} - Domain-specific models
${chalk_1.default.green('multimodal')} - Vision/audio models
${chalk_1.default.yellow('Optimization Targets:')}
${chalk_1.default.green('cost')} - Minimize cost
${chalk_1.default.green('latency')} - Minimize response time
${chalk_1.default.green('quality')} - Maximize output quality
${chalk_1.default.green('balanced')} - Balance all factors (default)
${chalk_1.default.yellow('Commands:')}
${chalk_1.default.green('routing register')} - Register a new agent
${chalk_1.default.green('routing register-full')} - Register with full JSON config
${chalk_1.default.green('routing update')} - Update agent metrics
${chalk_1.default.green('routing remove')} - Remove an agent
${chalk_1.default.green('routing set-active')} - Enable/disable agent
${chalk_1.default.green('routing route')} - Route a request
${chalk_1.default.green('routing list')} - List all agents
${chalk_1.default.green('routing get')} - Get agent details
${chalk_1.default.green('routing find')} - Find agents by capability
${chalk_1.default.green('routing stats')} - Get routing statistics
${chalk_1.default.green('routing clear')} - Clear all agents
${chalk_1.default.yellow('Example:')}
${chalk_1.default.gray('# Register an agent')}
ruvector-pg routing register \\
--name gpt-4 \\
--type llm \\
--capabilities "code,translation,analysis" \\
--cost 0.03 \\
--latency 500 \\
--quality 0.95
${chalk_1.default.gray('# Route a request')}
ruvector-pg routing route \\
--embedding "[0.1, 0.2, ...]" \\
--optimize-for balanced \\
--constraints '{"max_cost": 0.1}'
`);
}
}
exports.RoutingCommands = RoutingCommands;
exports.default = RoutingCommands;
//# sourceMappingURL=routing.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,441 @@
/**
* Routing/Agent Commands
* CLI commands for Tiny Dancer agent routing and management
*/
import chalk from 'chalk';
import ora from 'ora';
import Table from 'cli-table3';
import type { RuVectorClient } from '../client.js';
export interface RegisterAgentOptions {
name: string;
type: string;
capabilities: string;
cost: string;
latency: string;
quality: string;
}
export interface RegisterAgentFullOptions {
config: string;
}
export interface UpdateMetricsOptions {
name: string;
latency: string;
success: boolean;
quality?: string;
}
export interface RouteOptions {
embedding: string;
optimizeFor?: string;
constraints?: string;
}
export interface FindAgentsOptions {
capability: string;
limit?: string;
}
export class RoutingCommands {
static async registerAgent(
client: RuVectorClient,
options: RegisterAgentOptions
): Promise<void> {
const spinner = ora(`Registering agent '${options.name}'...`).start();
try {
await client.connect();
const capabilities = options.capabilities.split(',').map(c => c.trim());
await client.registerAgent(
options.name,
options.type,
capabilities,
parseFloat(options.cost),
parseFloat(options.latency),
parseFloat(options.quality)
);
spinner.succeed(chalk.green(`Agent '${options.name}' registered successfully`));
console.log(chalk.bold.blue('\nAgent Details:'));
console.log(chalk.gray('-'.repeat(40)));
console.log(` ${chalk.green('Name:')} ${options.name}`);
console.log(` ${chalk.green('Type:')} ${options.type}`);
console.log(` ${chalk.green('Capabilities:')} ${capabilities.join(', ')}`);
console.log(` ${chalk.green('Cost/Request:')} $${options.cost}`);
console.log(` ${chalk.green('Avg Latency:')} ${options.latency}ms`);
console.log(` ${chalk.green('Quality Score:')} ${options.quality}`);
} catch (err) {
spinner.fail(chalk.red('Failed to register agent'));
console.error(chalk.red((err as Error).message));
} finally {
await client.disconnect();
}
}
static async registerAgentFull(
client: RuVectorClient,
options: RegisterAgentFullOptions
): Promise<void> {
const spinner = ora('Registering agent with full config...').start();
try {
await client.connect();
const config = JSON.parse(options.config);
await client.registerAgentFull(config);
spinner.succeed(chalk.green(`Agent '${config.name}' registered successfully`));
} catch (err) {
spinner.fail(chalk.red('Failed to register agent'));
console.error(chalk.red((err as Error).message));
} finally {
await client.disconnect();
}
}
static async updateMetrics(
client: RuVectorClient,
options: UpdateMetricsOptions
): Promise<void> {
const spinner = ora(`Updating metrics for '${options.name}'...`).start();
try {
await client.connect();
await client.updateAgentMetrics(
options.name,
parseFloat(options.latency),
options.success,
options.quality ? parseFloat(options.quality) : undefined
);
spinner.succeed(chalk.green('Metrics updated'));
console.log(` ${chalk.green('Latency:')} ${options.latency}ms`);
console.log(` ${chalk.green('Success:')} ${options.success}`);
if (options.quality) {
console.log(` ${chalk.green('Quality:')} ${options.quality}`);
}
} catch (err) {
spinner.fail(chalk.red('Failed to update metrics'));
console.error(chalk.red((err as Error).message));
} finally {
await client.disconnect();
}
}
static async removeAgent(
client: RuVectorClient,
name: string
): Promise<void> {
const spinner = ora(`Removing agent '${name}'...`).start();
try {
await client.connect();
await client.removeAgent(name);
spinner.succeed(chalk.green(`Agent '${name}' removed`));
} catch (err) {
spinner.fail(chalk.red('Failed to remove agent'));
console.error(chalk.red((err as Error).message));
} finally {
await client.disconnect();
}
}
static async setActive(
client: RuVectorClient,
name: string,
active: boolean
): Promise<void> {
const spinner = ora(`Setting agent '${name}' ${active ? 'active' : 'inactive'}...`).start();
try {
await client.connect();
await client.setAgentActive(name, active);
spinner.succeed(chalk.green(`Agent '${name}' is now ${active ? 'active' : 'inactive'}`));
} catch (err) {
spinner.fail(chalk.red('Failed to update agent status'));
console.error(chalk.red((err as Error).message));
} finally {
await client.disconnect();
}
}
static async route(
client: RuVectorClient,
options: RouteOptions
): Promise<void> {
const spinner = ora('Routing request to best agent...').start();
try {
await client.connect();
const embedding = JSON.parse(options.embedding);
const optimizeFor = options.optimizeFor || 'balanced';
const constraints = options.constraints ? JSON.parse(options.constraints) : undefined;
const decision = await client.route(embedding, optimizeFor, constraints);
spinner.succeed(chalk.green('Routing decision made'));
console.log(chalk.bold.blue('\nRouting Decision:'));
console.log(chalk.gray('-'.repeat(50)));
console.log(` ${chalk.green('Selected Agent:')} ${chalk.bold(decision.agent_name)}`);
console.log(` ${chalk.green('Confidence:')} ${(decision.confidence * 100).toFixed(1)}%`);
console.log(` ${chalk.green('Estimated Cost:')} $${decision.estimated_cost.toFixed(4)}`);
console.log(` ${chalk.green('Estimated Latency:')} ${decision.estimated_latency_ms.toFixed(0)}ms`);
console.log(` ${chalk.green('Expected Quality:')} ${(decision.expected_quality * 100).toFixed(1)}%`);
console.log(` ${chalk.green('Similarity Score:')} ${decision.similarity_score.toFixed(4)}`);
if (decision.reasoning) {
console.log(` ${chalk.green('Reasoning:')} ${decision.reasoning}`);
}
if (decision.alternatives && decision.alternatives.length > 0) {
console.log(chalk.bold.blue('\nAlternatives:'));
for (const alt of decision.alternatives.slice(0, 3)) {
console.log(` ${chalk.yellow('-')} ${alt.name} (score: ${alt.score?.toFixed(3) || 'N/A'})`);
}
}
} catch (err) {
spinner.fail(chalk.red('Routing failed'));
console.error(chalk.red((err as Error).message));
} finally {
await client.disconnect();
}
}
static async listAgents(client: RuVectorClient): Promise<void> {
const spinner = ora('Fetching agents...').start();
try {
await client.connect();
const agents = await client.listAgents();
spinner.stop();
if (agents.length === 0) {
console.log(chalk.yellow('No agents registered'));
return;
}
console.log(chalk.bold.blue(`\nRegistered Agents (${agents.length}):`));
const table = new Table({
head: [
chalk.cyan('Name'),
chalk.cyan('Type'),
chalk.cyan('Cost'),
chalk.cyan('Latency'),
chalk.cyan('Quality'),
chalk.cyan('Requests'),
chalk.cyan('Active'),
],
colWidths: [15, 12, 10, 10, 10, 10, 8],
});
for (const agent of agents) {
table.push([
agent.name,
agent.agent_type,
`$${agent.cost_per_request.toFixed(3)}`,
`${agent.avg_latency_ms.toFixed(0)}ms`,
`${(agent.quality_score * 100).toFixed(0)}%`,
agent.total_requests.toString(),
agent.is_active ? chalk.green('Yes') : chalk.red('No'),
]);
}
console.log(table.toString());
} catch (err) {
spinner.fail(chalk.red('Failed to list agents'));
console.error(chalk.red((err as Error).message));
} finally {
await client.disconnect();
}
}
static async getAgent(client: RuVectorClient, name: string): Promise<void> {
const spinner = ora(`Fetching agent '${name}'...`).start();
try {
await client.connect();
const agent = await client.getAgent(name);
spinner.stop();
console.log(chalk.bold.blue(`\nAgent: ${agent.name}`));
console.log(chalk.gray('-'.repeat(50)));
console.log(` ${chalk.green('Type:')} ${agent.agent_type}`);
console.log(` ${chalk.green('Capabilities:')} ${agent.capabilities.join(', ')}`);
console.log(` ${chalk.green('Active:')} ${agent.is_active ? chalk.green('Yes') : chalk.red('No')}`);
console.log(chalk.bold.blue('\nCost Model:'));
console.log(` ${chalk.green('Per Request:')} $${agent.cost_model.per_request}`);
if (agent.cost_model.per_token) {
console.log(` ${chalk.green('Per Token:')} $${agent.cost_model.per_token}`);
}
console.log(chalk.bold.blue('\nPerformance:'));
console.log(` ${chalk.green('Avg Latency:')} ${agent.performance.avg_latency_ms}ms`);
console.log(` ${chalk.green('Quality Score:')} ${(agent.performance.quality_score * 100).toFixed(1)}%`);
console.log(` ${chalk.green('Success Rate:')} ${(agent.performance.success_rate * 100).toFixed(1)}%`);
console.log(` ${chalk.green('Total Requests:')} ${agent.performance.total_requests}`);
} catch (err) {
spinner.fail(chalk.red('Failed to get agent'));
console.error(chalk.red((err as Error).message));
} finally {
await client.disconnect();
}
}
static async findByCapability(
client: RuVectorClient,
options: FindAgentsOptions
): Promise<void> {
const spinner = ora(`Finding agents with '${options.capability}'...`).start();
try {
await client.connect();
const limit = options.limit ? parseInt(options.limit) : 10;
const agents = await client.findAgentsByCapability(options.capability, limit);
spinner.stop();
if (agents.length === 0) {
console.log(chalk.yellow(`No agents found with capability '${options.capability}'`));
return;
}
console.log(chalk.bold.blue(`\nAgents with '${options.capability}' (${agents.length}):`));
const table = new Table({
head: [
chalk.cyan('Name'),
chalk.cyan('Quality'),
chalk.cyan('Latency'),
chalk.cyan('Cost'),
],
colWidths: [20, 12, 12, 12],
});
for (const agent of agents) {
table.push([
agent.name,
`${(agent.quality_score * 100).toFixed(0)}%`,
`${agent.avg_latency_ms.toFixed(0)}ms`,
`$${agent.cost_per_request.toFixed(3)}`,
]);
}
console.log(table.toString());
} catch (err) {
spinner.fail(chalk.red('Failed to find agents'));
console.error(chalk.red((err as Error).message));
} finally {
await client.disconnect();
}
}
static async stats(client: RuVectorClient): Promise<void> {
const spinner = ora('Fetching routing statistics...').start();
try {
await client.connect();
const stats = await client.routingStats();
spinner.stop();
console.log(chalk.bold.blue('\nRouting Statistics:'));
console.log(chalk.gray('-'.repeat(40)));
console.log(` ${chalk.green('Total Agents:')} ${stats.total_agents}`);
console.log(` ${chalk.green('Active Agents:')} ${stats.active_agents}`);
console.log(` ${chalk.green('Total Requests:')} ${stats.total_requests}`);
console.log(` ${chalk.green('Avg Quality:')} ${(stats.average_quality * 100).toFixed(1)}%`);
} catch (err) {
spinner.fail(chalk.red('Failed to get stats'));
console.error(chalk.red((err as Error).message));
} finally {
await client.disconnect();
}
}
static async clearAgents(client: RuVectorClient): Promise<void> {
const spinner = ora('Clearing all agents...').start();
try {
await client.connect();
await client.clearAgents();
spinner.succeed(chalk.green('All agents cleared'));
} catch (err) {
spinner.fail(chalk.red('Failed to clear agents'));
console.error(chalk.red((err as Error).message));
} finally {
await client.disconnect();
}
}
static showHelp(): void {
console.log(chalk.bold.blue('\nTiny Dancer Routing System:'));
console.log(chalk.gray('-'.repeat(60)));
console.log(`
${chalk.yellow('Overview:')}
Intelligent routing of AI requests to the most suitable agent
based on cost, latency, quality, and capabilities.
${chalk.yellow('Agent Types:')}
${chalk.green('llm')} - Large Language Models (GPT-4, Claude, etc.)
${chalk.green('embedding')} - Embedding models
${chalk.green('specialized')} - Domain-specific models
${chalk.green('multimodal')} - Vision/audio models
${chalk.yellow('Optimization Targets:')}
${chalk.green('cost')} - Minimize cost
${chalk.green('latency')} - Minimize response time
${chalk.green('quality')} - Maximize output quality
${chalk.green('balanced')} - Balance all factors (default)
${chalk.yellow('Commands:')}
${chalk.green('routing register')} - Register a new agent
${chalk.green('routing register-full')} - Register with full JSON config
${chalk.green('routing update')} - Update agent metrics
${chalk.green('routing remove')} - Remove an agent
${chalk.green('routing set-active')} - Enable/disable agent
${chalk.green('routing route')} - Route a request
${chalk.green('routing list')} - List all agents
${chalk.green('routing get')} - Get agent details
${chalk.green('routing find')} - Find agents by capability
${chalk.green('routing stats')} - Get routing statistics
${chalk.green('routing clear')} - Clear all agents
${chalk.yellow('Example:')}
${chalk.gray('# Register an agent')}
ruvector-pg routing register \\
--name gpt-4 \\
--type llm \\
--capabilities "code,translation,analysis" \\
--cost 0.03 \\
--latency 500 \\
--quality 0.95
${chalk.gray('# Route a request')}
ruvector-pg routing route \\
--embedding "[0.1, 0.2, ...]" \\
--optimize-for balanced \\
--constraints '{"max_cost": 0.1}'
`);
}
}
export default RoutingCommands;

View File

@@ -0,0 +1,47 @@
/**
* Sparse Vector Commands
* CLI commands for sparse vector operations including BM25, sparsification, and distance calculations
*/
import type { RuVectorClient } from '../client.js';
export interface SparseCreateOptions {
indices: string;
values: string;
dim: string;
}
export interface SparseDistanceOptions {
a: string;
b: string;
metric: 'dot' | 'cosine' | 'euclidean' | 'manhattan';
}
export interface SparseBM25Options {
query: string;
doc: string;
docLen: string;
avgDocLen: string;
k1?: string;
b?: string;
}
export interface SparseTopKOptions {
sparse: string;
k: string;
}
export interface SparsePruneOptions {
sparse: string;
threshold: string;
}
export interface DenseToSparseOptions {
dense: string;
}
export declare class SparseCommands {
static create(client: RuVectorClient, options: SparseCreateOptions): Promise<void>;
static distance(client: RuVectorClient, options: SparseDistanceOptions): Promise<void>;
static bm25(client: RuVectorClient, options: SparseBM25Options): Promise<void>;
static topK(client: RuVectorClient, options: SparseTopKOptions): Promise<void>;
static prune(client: RuVectorClient, options: SparsePruneOptions): Promise<void>;
static denseToSparse(client: RuVectorClient, options: DenseToSparseOptions): Promise<void>;
static sparseToDense(client: RuVectorClient, sparse: string): Promise<void>;
static info(client: RuVectorClient, sparse: string): Promise<void>;
static showHelp(): void;
}
export default SparseCommands;
//# sourceMappingURL=sparse.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"sparse.d.ts","sourceRoot":"","sources":["sparse.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAMH,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,cAAc,CAAC;AAEnD,MAAM,WAAW,mBAAmB;IAClC,OAAO,EAAE,MAAM,CAAC;IAChB,MAAM,EAAE,MAAM,CAAC;IACf,GAAG,EAAE,MAAM,CAAC;CACb;AAED,MAAM,WAAW,qBAAqB;IACpC,CAAC,EAAE,MAAM,CAAC;IACV,CAAC,EAAE,MAAM,CAAC;IACV,MAAM,EAAE,KAAK,GAAG,QAAQ,GAAG,WAAW,GAAG,WAAW,CAAC;CACtD;AAED,MAAM,WAAW,iBAAiB;IAChC,KAAK,EAAE,MAAM,CAAC;IACd,GAAG,EAAE,MAAM,CAAC;IACZ,MAAM,EAAE,MAAM,CAAC;IACf,SAAS,EAAE,MAAM,CAAC;IAClB,EAAE,CAAC,EAAE,MAAM,CAAC;IACZ,CAAC,CAAC,EAAE,MAAM,CAAC;CACZ;AAED,MAAM,WAAW,iBAAiB;IAChC,MAAM,EAAE,MAAM,CAAC;IACf,CAAC,EAAE,MAAM,CAAC;CACX;AAED,MAAM,WAAW,kBAAkB;IACjC,MAAM,EAAE,MAAM,CAAC;IACf,SAAS,EAAE,MAAM,CAAC;CACnB;AAED,MAAM,WAAW,oBAAoB;IACnC,KAAK,EAAE,MAAM,CAAC;CACf;AAED,qBAAa,cAAc;WACZ,MAAM,CACjB,MAAM,EAAE,cAAc,EACtB,OAAO,EAAE,mBAAmB,GAC3B,OAAO,CAAC,IAAI,CAAC;WA4BH,QAAQ,CACnB,MAAM,EAAE,cAAc,EACtB,OAAO,EAAE,qBAAqB,GAC7B,OAAO,CAAC,IAAI,CAAC;WAsBH,IAAI,CACf,MAAM,EAAE,cAAc,EACtB,OAAO,EAAE,iBAAiB,GACzB,OAAO,CAAC,IAAI,CAAC;WAmCH,IAAI,CACf,MAAM,EAAE,cAAc,EACtB,OAAO,EAAE,iBAAiB,GACzB,OAAO,CAAC,IAAI,CAAC;WAuBH,KAAK,CAChB,MAAM,EAAE,cAAc,EACtB,OAAO,EAAE,kBAAkB,GAC1B,OAAO,CAAC,IAAI,CAAC;WA2BH,aAAa,CACxB,MAAM,EAAE,cAAc,EACtB,OAAO,EAAE,oBAAoB,GAC5B,OAAO,CAAC,IAAI,CAAC;WAyBH,aAAa,CACxB,MAAM,EAAE,cAAc,EACtB,MAAM,EAAE,MAAM,GACb,OAAO,CAAC,IAAI,CAAC;WA0BH,IAAI,CAAC,MAAM,EAAE,cAAc,EAAE,MAAM,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAwBxE,MAAM,CAAC,QAAQ,IAAI,IAAI;CAgCxB;AAED,eAAe,cAAc,CAAC"}

View File

@@ -0,0 +1,221 @@
"use strict";
/**
* Sparse Vector Commands
* CLI commands for sparse vector operations including BM25, sparsification, and distance calculations
*/
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.SparseCommands = void 0;
const chalk_1 = __importDefault(require("chalk"));
const ora_1 = __importDefault(require("ora"));
class SparseCommands {
static async create(client, options) {
const spinner = (0, ora_1.default)('Creating sparse vector...').start();
try {
await client.connect();
const indices = JSON.parse(options.indices);
const values = JSON.parse(options.values);
const dim = parseInt(options.dim);
const result = await client.createSparseVector(indices, values, dim);
spinner.succeed(chalk_1.default.green('Sparse vector created successfully'));
console.log(chalk_1.default.bold.blue('\nSparse Vector Details:'));
console.log(chalk_1.default.gray('-'.repeat(40)));
console.log(` ${chalk_1.default.green('Indices:')} ${indices.length}`);
console.log(` ${chalk_1.default.green('Non-zero elements:')} ${values.length}`);
console.log(` ${chalk_1.default.green('Dimension:')} ${dim}`);
console.log(` ${chalk_1.default.green('Sparsity:')} ${((1 - values.length / dim) * 100).toFixed(2)}%`);
}
catch (err) {
spinner.fail(chalk_1.default.red('Failed to create sparse vector'));
console.error(chalk_1.default.red(err.message));
}
finally {
await client.disconnect();
}
}
static async distance(client, options) {
const spinner = (0, ora_1.default)(`Computing sparse ${options.metric} distance...`).start();
try {
await client.connect();
const result = await client.sparseDistance(options.a, options.b, options.metric);
spinner.succeed(chalk_1.default.green(`Sparse ${options.metric} distance computed`));
console.log(chalk_1.default.bold.blue('\nDistance Result:'));
console.log(chalk_1.default.gray('-'.repeat(40)));
console.log(` ${chalk_1.default.green('Metric:')} ${options.metric}`);
console.log(` ${chalk_1.default.green('Distance:')} ${result.toFixed(6)}`);
}
catch (err) {
spinner.fail(chalk_1.default.red('Distance computation failed'));
console.error(chalk_1.default.red(err.message));
}
finally {
await client.disconnect();
}
}
static async bm25(client, options) {
const spinner = (0, ora_1.default)('Computing BM25 score...').start();
try {
await client.connect();
const k1 = options.k1 ? parseFloat(options.k1) : 1.2;
const b = options.b ? parseFloat(options.b) : 0.75;
const score = await client.sparseBM25(options.query, options.doc, parseFloat(options.docLen), parseFloat(options.avgDocLen), k1, b);
spinner.succeed(chalk_1.default.green('BM25 score computed'));
console.log(chalk_1.default.bold.blue('\nBM25 Result:'));
console.log(chalk_1.default.gray('-'.repeat(40)));
console.log(` ${chalk_1.default.green('Score:')} ${score.toFixed(6)}`);
console.log(` ${chalk_1.default.green('k1:')} ${k1}`);
console.log(` ${chalk_1.default.green('b:')} ${b}`);
console.log(` ${chalk_1.default.green('Document Length:')} ${options.docLen}`);
console.log(` ${chalk_1.default.green('Avg Doc Length:')} ${options.avgDocLen}`);
}
catch (err) {
spinner.fail(chalk_1.default.red('BM25 computation failed'));
console.error(chalk_1.default.red(err.message));
}
finally {
await client.disconnect();
}
}
static async topK(client, options) {
const spinner = (0, ora_1.default)('Computing top-k sparse elements...').start();
try {
await client.connect();
const result = await client.sparseTopK(options.sparse, parseInt(options.k));
spinner.succeed(chalk_1.default.green('Top-k elements computed'));
console.log(chalk_1.default.bold.blue('\nTop-K Result:'));
console.log(chalk_1.default.gray('-'.repeat(40)));
console.log(` ${chalk_1.default.green('Original NNZ:')} ${result.originalNnz}`);
console.log(` ${chalk_1.default.green('After Top-K:')} ${result.newNnz}`);
console.log(` ${chalk_1.default.green('Sparse Vector:')} ${result.vector}`);
}
catch (err) {
spinner.fail(chalk_1.default.red('Top-k computation failed'));
console.error(chalk_1.default.red(err.message));
}
finally {
await client.disconnect();
}
}
static async prune(client, options) {
const spinner = (0, ora_1.default)('Pruning sparse vector...').start();
try {
await client.connect();
const result = await client.sparsePrune(options.sparse, parseFloat(options.threshold));
spinner.succeed(chalk_1.default.green('Sparse vector pruned'));
console.log(chalk_1.default.bold.blue('\nPrune Result:'));
console.log(chalk_1.default.gray('-'.repeat(40)));
console.log(` ${chalk_1.default.green('Threshold:')} ${options.threshold}`);
console.log(` ${chalk_1.default.green('Original NNZ:')} ${result.originalNnz ?? 'N/A'}`);
console.log(` ${chalk_1.default.green('After Pruning:')} ${result.newNnz ?? 'N/A'}`);
console.log(` ${chalk_1.default.green('Elements Removed:')} ${(result.originalNnz ?? 0) - (result.newNnz ?? 0)}`);
}
catch (err) {
spinner.fail(chalk_1.default.red('Pruning failed'));
console.error(chalk_1.default.red(err.message));
}
finally {
await client.disconnect();
}
}
static async denseToSparse(client, options) {
const spinner = (0, ora_1.default)('Converting dense to sparse...').start();
try {
await client.connect();
const dense = JSON.parse(options.dense);
const result = await client.denseToSparse(dense);
spinner.succeed(chalk_1.default.green('Conversion completed'));
console.log(chalk_1.default.bold.blue('\nConversion Result:'));
console.log(chalk_1.default.gray('-'.repeat(40)));
console.log(` ${chalk_1.default.green('Dense Dimension:')} ${dense.length}`);
console.log(` ${chalk_1.default.green('Non-zero Elements:')} ${result.nnz}`);
console.log(` ${chalk_1.default.green('Sparsity:')} ${((1 - result.nnz / dense.length) * 100).toFixed(2)}%`);
console.log(` ${chalk_1.default.green('Sparse Vector:')} ${result.vector}`);
}
catch (err) {
spinner.fail(chalk_1.default.red('Conversion failed'));
console.error(chalk_1.default.red(err.message));
}
finally {
await client.disconnect();
}
}
static async sparseToDense(client, sparse) {
const spinner = (0, ora_1.default)('Converting sparse to dense...').start();
try {
await client.connect();
const result = await client.sparseToDense(sparse);
spinner.succeed(chalk_1.default.green('Conversion completed'));
console.log(chalk_1.default.bold.blue('\nConversion Result:'));
console.log(chalk_1.default.gray('-'.repeat(40)));
console.log(` ${chalk_1.default.green('Dense Dimension:')} ${result.length}`);
console.log(` ${chalk_1.default.green('Non-zero Elements:')} ${result.filter((v) => v !== 0).length}`);
// Show first 10 elements
const preview = result.slice(0, 10).map((v) => v.toFixed(4)).join(', ');
console.log(` ${chalk_1.default.green('Preview:')} [${preview}${result.length > 10 ? ', ...' : ''}]`);
}
catch (err) {
spinner.fail(chalk_1.default.red('Conversion failed'));
console.error(chalk_1.default.red(err.message));
}
finally {
await client.disconnect();
}
}
static async info(client, sparse) {
const spinner = (0, ora_1.default)('Getting sparse vector info...').start();
try {
await client.connect();
const info = await client.sparseInfo(sparse);
spinner.stop();
console.log(chalk_1.default.bold.blue('\nSparse Vector Info:'));
console.log(chalk_1.default.gray('-'.repeat(40)));
console.log(` ${chalk_1.default.green('Dimension:')} ${info.dim}`);
console.log(` ${chalk_1.default.green('Non-zero Elements (NNZ):')} ${info.nnz}`);
console.log(` ${chalk_1.default.green('Sparsity:')} ${info.sparsity.toFixed(2)}%`);
console.log(` ${chalk_1.default.green('L2 Norm:')} ${info.norm.toFixed(6)}`);
}
catch (err) {
spinner.fail(chalk_1.default.red('Failed to get info'));
console.error(chalk_1.default.red(err.message));
}
finally {
await client.disconnect();
}
}
static showHelp() {
console.log(chalk_1.default.bold.blue('\nSparse Vector Operations:'));
console.log(chalk_1.default.gray('-'.repeat(60)));
console.log(`
${chalk_1.default.yellow('Format:')}
Sparse vectors use the format: '{index:value, index:value, ...}'
Example: '{0:0.5, 10:0.3, 100:0.8}'
${chalk_1.default.yellow('Distance Metrics:')}
${chalk_1.default.green('dot')} - Dot product (inner product)
${chalk_1.default.green('cosine')} - Cosine similarity
${chalk_1.default.green('euclidean')} - L2 distance
${chalk_1.default.green('manhattan')} - L1 distance
${chalk_1.default.yellow('BM25 Scoring:')}
Used for text search relevance ranking.
Parameters:
${chalk_1.default.green('k1')} - Term frequency saturation (default: 1.2)
${chalk_1.default.green('b')} - Length normalization (default: 0.75)
${chalk_1.default.yellow('Commands:')}
${chalk_1.default.green('sparse create')} - Create sparse vector from indices/values
${chalk_1.default.green('sparse distance')} - Compute distance between sparse vectors
${chalk_1.default.green('sparse bm25')} - Compute BM25 relevance score
${chalk_1.default.green('sparse top-k')} - Keep only top-k elements by value
${chalk_1.default.green('sparse prune')} - Remove elements below threshold
${chalk_1.default.green('sparse dense-to-sparse')} - Convert dense to sparse
${chalk_1.default.green('sparse sparse-to-dense')} - Convert sparse to dense
${chalk_1.default.green('sparse info')} - Get sparse vector statistics
`);
}
}
exports.SparseCommands = SparseCommands;
exports.default = SparseCommands;
//# sourceMappingURL=sparse.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,313 @@
/**
* Sparse Vector Commands
* CLI commands for sparse vector operations including BM25, sparsification, and distance calculations
*/
import chalk from 'chalk';
import ora from 'ora';
import Table from 'cli-table3';
import { readFileSync } from 'fs';
import type { RuVectorClient } from '../client.js';
export interface SparseCreateOptions {
indices: string;
values: string;
dim: string;
}
export interface SparseDistanceOptions {
a: string;
b: string;
metric: 'dot' | 'cosine' | 'euclidean' | 'manhattan';
}
export interface SparseBM25Options {
query: string;
doc: string;
docLen: string;
avgDocLen: string;
k1?: string;
b?: string;
}
export interface SparseTopKOptions {
sparse: string;
k: string;
}
export interface SparsePruneOptions {
sparse: string;
threshold: string;
}
export interface DenseToSparseOptions {
dense: string;
}
export class SparseCommands {
static async create(
client: RuVectorClient,
options: SparseCreateOptions
): Promise<void> {
const spinner = ora('Creating sparse vector...').start();
try {
await client.connect();
const indices = JSON.parse(options.indices);
const values = JSON.parse(options.values);
const dim = parseInt(options.dim);
const result = await client.createSparseVector(indices, values, dim);
spinner.succeed(chalk.green('Sparse vector created successfully'));
console.log(chalk.bold.blue('\nSparse Vector Details:'));
console.log(chalk.gray('-'.repeat(40)));
console.log(` ${chalk.green('Indices:')} ${indices.length}`);
console.log(` ${chalk.green('Non-zero elements:')} ${values.length}`);
console.log(` ${chalk.green('Dimension:')} ${dim}`);
console.log(` ${chalk.green('Sparsity:')} ${((1 - values.length / dim) * 100).toFixed(2)}%`);
} catch (err) {
spinner.fail(chalk.red('Failed to create sparse vector'));
console.error(chalk.red((err as Error).message));
} finally {
await client.disconnect();
}
}
static async distance(
client: RuVectorClient,
options: SparseDistanceOptions
): Promise<void> {
const spinner = ora(`Computing sparse ${options.metric} distance...`).start();
try {
await client.connect();
const result = await client.sparseDistance(options.a, options.b, options.metric);
spinner.succeed(chalk.green(`Sparse ${options.metric} distance computed`));
console.log(chalk.bold.blue('\nDistance Result:'));
console.log(chalk.gray('-'.repeat(40)));
console.log(` ${chalk.green('Metric:')} ${options.metric}`);
console.log(` ${chalk.green('Distance:')} ${result.toFixed(6)}`);
} catch (err) {
spinner.fail(chalk.red('Distance computation failed'));
console.error(chalk.red((err as Error).message));
} finally {
await client.disconnect();
}
}
static async bm25(
client: RuVectorClient,
options: SparseBM25Options
): Promise<void> {
const spinner = ora('Computing BM25 score...').start();
try {
await client.connect();
const k1 = options.k1 ? parseFloat(options.k1) : 1.2;
const b = options.b ? parseFloat(options.b) : 0.75;
const score = await client.sparseBM25(
options.query,
options.doc,
parseFloat(options.docLen),
parseFloat(options.avgDocLen),
k1,
b
);
spinner.succeed(chalk.green('BM25 score computed'));
console.log(chalk.bold.blue('\nBM25 Result:'));
console.log(chalk.gray('-'.repeat(40)));
console.log(` ${chalk.green('Score:')} ${score.toFixed(6)}`);
console.log(` ${chalk.green('k1:')} ${k1}`);
console.log(` ${chalk.green('b:')} ${b}`);
console.log(` ${chalk.green('Document Length:')} ${options.docLen}`);
console.log(` ${chalk.green('Avg Doc Length:')} ${options.avgDocLen}`);
} catch (err) {
spinner.fail(chalk.red('BM25 computation failed'));
console.error(chalk.red((err as Error).message));
} finally {
await client.disconnect();
}
}
static async topK(
client: RuVectorClient,
options: SparseTopKOptions
): Promise<void> {
const spinner = ora('Computing top-k sparse elements...').start();
try {
await client.connect();
const result = await client.sparseTopK(options.sparse, parseInt(options.k));
spinner.succeed(chalk.green('Top-k elements computed'));
console.log(chalk.bold.blue('\nTop-K Result:'));
console.log(chalk.gray('-'.repeat(40)));
console.log(` ${chalk.green('Original NNZ:')} ${result.originalNnz}`);
console.log(` ${chalk.green('After Top-K:')} ${result.newNnz}`);
console.log(` ${chalk.green('Sparse Vector:')} ${result.vector}`);
} catch (err) {
spinner.fail(chalk.red('Top-k computation failed'));
console.error(chalk.red((err as Error).message));
} finally {
await client.disconnect();
}
}
static async prune(
client: RuVectorClient,
options: SparsePruneOptions
): Promise<void> {
const spinner = ora('Pruning sparse vector...').start();
try {
await client.connect();
const result = await client.sparsePrune(
options.sparse,
parseFloat(options.threshold)
);
spinner.succeed(chalk.green('Sparse vector pruned'));
console.log(chalk.bold.blue('\nPrune Result:'));
console.log(chalk.gray('-'.repeat(40)));
console.log(` ${chalk.green('Threshold:')} ${options.threshold}`);
console.log(` ${chalk.green('Original NNZ:')} ${result.originalNnz ?? 'N/A'}`);
console.log(` ${chalk.green('After Pruning:')} ${result.newNnz ?? 'N/A'}`);
console.log(` ${chalk.green('Elements Removed:')} ${(result.originalNnz ?? 0) - (result.newNnz ?? 0)}`);
} catch (err) {
spinner.fail(chalk.red('Pruning failed'));
console.error(chalk.red((err as Error).message));
} finally {
await client.disconnect();
}
}
static async denseToSparse(
client: RuVectorClient,
options: DenseToSparseOptions
): Promise<void> {
const spinner = ora('Converting dense to sparse...').start();
try {
await client.connect();
const dense = JSON.parse(options.dense);
const result = await client.denseToSparse(dense);
spinner.succeed(chalk.green('Conversion completed'));
console.log(chalk.bold.blue('\nConversion Result:'));
console.log(chalk.gray('-'.repeat(40)));
console.log(` ${chalk.green('Dense Dimension:')} ${dense.length}`);
console.log(` ${chalk.green('Non-zero Elements:')} ${result.nnz}`);
console.log(` ${chalk.green('Sparsity:')} ${((1 - result.nnz / dense.length) * 100).toFixed(2)}%`);
console.log(` ${chalk.green('Sparse Vector:')} ${result.vector}`);
} catch (err) {
spinner.fail(chalk.red('Conversion failed'));
console.error(chalk.red((err as Error).message));
} finally {
await client.disconnect();
}
}
static async sparseToDense(
client: RuVectorClient,
sparse: string
): Promise<void> {
const spinner = ora('Converting sparse to dense...').start();
try {
await client.connect();
const result = await client.sparseToDense(sparse);
spinner.succeed(chalk.green('Conversion completed'));
console.log(chalk.bold.blue('\nConversion Result:'));
console.log(chalk.gray('-'.repeat(40)));
console.log(` ${chalk.green('Dense Dimension:')} ${result.length}`);
console.log(` ${chalk.green('Non-zero Elements:')} ${result.filter((v: number) => v !== 0).length}`);
// Show first 10 elements
const preview = result.slice(0, 10).map((v: number) => v.toFixed(4)).join(', ');
console.log(` ${chalk.green('Preview:')} [${preview}${result.length > 10 ? ', ...' : ''}]`);
} catch (err) {
spinner.fail(chalk.red('Conversion failed'));
console.error(chalk.red((err as Error).message));
} finally {
await client.disconnect();
}
}
static async info(client: RuVectorClient, sparse: string): Promise<void> {
const spinner = ora('Getting sparse vector info...').start();
try {
await client.connect();
const info = await client.sparseInfo(sparse);
spinner.stop();
console.log(chalk.bold.blue('\nSparse Vector Info:'));
console.log(chalk.gray('-'.repeat(40)));
console.log(` ${chalk.green('Dimension:')} ${info.dim}`);
console.log(` ${chalk.green('Non-zero Elements (NNZ):')} ${info.nnz}`);
console.log(` ${chalk.green('Sparsity:')} ${info.sparsity.toFixed(2)}%`);
console.log(` ${chalk.green('L2 Norm:')} ${info.norm.toFixed(6)}`);
} catch (err) {
spinner.fail(chalk.red('Failed to get info'));
console.error(chalk.red((err as Error).message));
} finally {
await client.disconnect();
}
}
static showHelp(): void {
console.log(chalk.bold.blue('\nSparse Vector Operations:'));
console.log(chalk.gray('-'.repeat(60)));
console.log(`
${chalk.yellow('Format:')}
Sparse vectors use the format: '{index:value, index:value, ...}'
Example: '{0:0.5, 10:0.3, 100:0.8}'
${chalk.yellow('Distance Metrics:')}
${chalk.green('dot')} - Dot product (inner product)
${chalk.green('cosine')} - Cosine similarity
${chalk.green('euclidean')} - L2 distance
${chalk.green('manhattan')} - L1 distance
${chalk.yellow('BM25 Scoring:')}
Used for text search relevance ranking.
Parameters:
${chalk.green('k1')} - Term frequency saturation (default: 1.2)
${chalk.green('b')} - Length normalization (default: 0.75)
${chalk.yellow('Commands:')}
${chalk.green('sparse create')} - Create sparse vector from indices/values
${chalk.green('sparse distance')} - Compute distance between sparse vectors
${chalk.green('sparse bm25')} - Compute BM25 relevance score
${chalk.green('sparse top-k')} - Keep only top-k elements by value
${chalk.green('sparse prune')} - Remove elements below threshold
${chalk.green('sparse dense-to-sparse')} - Convert dense to sparse
${chalk.green('sparse sparse-to-dense')} - Convert sparse to dense
${chalk.green('sparse info')} - Get sparse vector statistics
`);
}
}
export default SparseCommands;

View File

@@ -0,0 +1,36 @@
/**
* Vector Commands
* CLI commands for vector operations
*/
import type { RuVectorClient } from '../client.js';
export interface VectorCreateOptions {
dim: string;
index: 'hnsw' | 'ivfflat';
}
export interface VectorInsertOptions {
file?: string;
text?: string;
}
export interface VectorSearchOptions {
query?: string;
text?: string;
topK: string;
metric: 'cosine' | 'l2' | 'ip';
}
export interface VectorDistanceOptions {
a: string;
b: string;
metric: 'cosine' | 'l2' | 'ip';
}
export interface VectorNormalizeOptions {
vector: string;
}
export declare class VectorCommands {
static distance(client: RuVectorClient, options: VectorDistanceOptions): Promise<void>;
static normalize(client: RuVectorClient, options: VectorNormalizeOptions): Promise<void>;
static create(client: RuVectorClient, name: string, options: VectorCreateOptions): Promise<void>;
static insert(client: RuVectorClient, table: string, options: VectorInsertOptions): Promise<void>;
static search(client: RuVectorClient, table: string, options: VectorSearchOptions): Promise<void>;
}
export default VectorCommands;
//# sourceMappingURL=vector.d.ts.map

View File

@@ -0,0 +1 @@
{"version":3,"file":"vector.d.ts","sourceRoot":"","sources":["vector.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAMH,OAAO,KAAK,EAAE,cAAc,EAAE,MAAM,cAAc,CAAC;AAEnD,MAAM,WAAW,mBAAmB;IAClC,GAAG,EAAE,MAAM,CAAC;IACZ,KAAK,EAAE,MAAM,GAAG,SAAS,CAAC;CAC3B;AAED,MAAM,WAAW,mBAAmB;IAClC,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,IAAI,CAAC,EAAE,MAAM,CAAC;CACf;AAED,MAAM,WAAW,mBAAmB;IAClC,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,IAAI,EAAE,MAAM,CAAC;IACb,MAAM,EAAE,QAAQ,GAAG,IAAI,GAAG,IAAI,CAAC;CAChC;AAED,MAAM,WAAW,qBAAqB;IACpC,CAAC,EAAE,MAAM,CAAC;IACV,CAAC,EAAE,MAAM,CAAC;IACV,MAAM,EAAE,QAAQ,GAAG,IAAI,GAAG,IAAI,CAAC;CAChC;AAED,MAAM,WAAW,sBAAsB;IACrC,MAAM,EAAE,MAAM,CAAC;CAChB;AAED,qBAAa,cAAc;WACZ,QAAQ,CACnB,MAAM,EAAE,cAAc,EACtB,OAAO,EAAE,qBAAqB,GAC7B,OAAO,CAAC,IAAI,CAAC;WAiDH,SAAS,CACpB,MAAM,EAAE,cAAc,EACtB,OAAO,EAAE,sBAAsB,GAC9B,OAAO,CAAC,IAAI,CAAC;WAuCH,MAAM,CACjB,MAAM,EAAE,cAAc,EACtB,IAAI,EAAE,MAAM,EACZ,OAAO,EAAE,mBAAmB,GAC3B,OAAO,CAAC,IAAI,CAAC;WAsBH,MAAM,CACjB,MAAM,EAAE,cAAc,EACtB,KAAK,EAAE,MAAM,EACb,OAAO,EAAE,mBAAmB,GAC3B,OAAO,CAAC,IAAI,CAAC;WAsCH,MAAM,CACjB,MAAM,EAAE,cAAc,EACtB,KAAK,EAAE,MAAM,EACb,OAAO,EAAE,mBAAmB,GAC3B,OAAO,CAAC,IAAI,CAAC;CA0DjB;AAED,eAAe,cAAc,CAAC"}

View File

@@ -0,0 +1,196 @@
"use strict";
/**
* Vector Commands
* CLI commands for vector operations
*/
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.VectorCommands = void 0;
const chalk_1 = __importDefault(require("chalk"));
const ora_1 = __importDefault(require("ora"));
const cli_table3_1 = __importDefault(require("cli-table3"));
const fs_1 = require("fs");
class VectorCommands {
static async distance(client, options) {
const spinner = (0, ora_1.default)('Computing vector distance...').start();
try {
await client.connect();
const a = JSON.parse(options.a);
const b = JSON.parse(options.b);
let distance;
let metricName;
switch (options.metric) {
case 'l2':
distance = await client.l2DistanceArr(a, b);
metricName = 'L2 (Euclidean)';
break;
case 'ip':
distance = await client.innerProductArr(a, b);
metricName = 'Inner Product';
break;
case 'cosine':
default:
distance = await client.cosineDistanceArr(a, b);
metricName = 'Cosine';
break;
}
spinner.succeed(chalk_1.default.green('Distance computed'));
console.log(chalk_1.default.bold.blue('\nVector Distance:'));
console.log(chalk_1.default.gray('-'.repeat(40)));
console.log(` ${chalk_1.default.green('Metric:')} ${metricName}`);
console.log(` ${chalk_1.default.green('Distance:')} ${distance.toFixed(6)}`);
console.log(` ${chalk_1.default.green('Dimension:')} ${a.length}`);
// Additional context for cosine distance
if (options.metric === 'cosine') {
const similarity = 1 - distance;
console.log(` ${chalk_1.default.green('Similarity:')} ${similarity.toFixed(6)} (1 - distance)`);
}
}
catch (err) {
spinner.fail(chalk_1.default.red('Distance computation failed'));
console.error(chalk_1.default.red(err.message));
}
finally {
await client.disconnect();
}
}
static async normalize(client, options) {
const spinner = (0, ora_1.default)('Normalizing vector...').start();
try {
await client.connect();
const vector = JSON.parse(options.vector);
const normalized = await client.vectorNormalize(vector);
spinner.succeed(chalk_1.default.green('Vector normalized'));
console.log(chalk_1.default.bold.blue('\nNormalized Vector:'));
console.log(chalk_1.default.gray('-'.repeat(40)));
console.log(` ${chalk_1.default.green('Original Dimension:')} ${vector.length}`);
// Compute original norm for reference
const originalNorm = Math.sqrt(vector.reduce((sum, v) => sum + v * v, 0));
console.log(` ${chalk_1.default.green('Original Norm:')} ${originalNorm.toFixed(6)}`);
// Verify normalized norm is ~1
const normalizedNorm = Math.sqrt(normalized.reduce((sum, v) => sum + v * v, 0));
console.log(` ${chalk_1.default.green('Normalized Norm:')} ${normalizedNorm.toFixed(6)}`);
// Display vector (truncated if too long)
if (normalized.length <= 10) {
console.log(` ${chalk_1.default.green('Result:')} [${normalized.map((v) => v.toFixed(4)).join(', ')}]`);
}
else {
const first5 = normalized.slice(0, 5).map((v) => v.toFixed(4)).join(', ');
const last3 = normalized.slice(-3).map((v) => v.toFixed(4)).join(', ');
console.log(` ${chalk_1.default.green('Result:')} [${first5}, ..., ${last3}]`);
}
}
catch (err) {
spinner.fail(chalk_1.default.red('Normalization failed'));
console.error(chalk_1.default.red(err.message));
}
finally {
await client.disconnect();
}
}
static async create(client, name, options) {
const spinner = (0, ora_1.default)(`Creating vector table '${name}'...`).start();
try {
await client.connect();
await client.createVectorTable(name, parseInt(options.dim), options.index);
spinner.succeed(chalk_1.default.green(`Vector table '${name}' created successfully`));
console.log(` ${chalk_1.default.gray('Dimensions:')} ${options.dim}`);
console.log(` ${chalk_1.default.gray('Index Type:')} ${options.index.toUpperCase()}`);
}
catch (err) {
spinner.fail(chalk_1.default.red('Failed to create vector table'));
console.error(chalk_1.default.red(err.message));
}
finally {
await client.disconnect();
}
}
static async insert(client, table, options) {
const spinner = (0, ora_1.default)(`Inserting vectors into '${table}'...`).start();
try {
await client.connect();
let vectors = [];
if (options.file) {
const content = (0, fs_1.readFileSync)(options.file, 'utf-8');
const data = JSON.parse(content);
vectors = Array.isArray(data) ? data : [data];
}
else if (options.text) {
// For text, we'd need an embedding model
// For now, just show a placeholder
console.log(chalk_1.default.yellow('Note: Text embedding requires an embedding model'));
console.log(chalk_1.default.gray('Using placeholder embedding...'));
vectors = [{
vector: Array(384).fill(0).map(() => Math.random()),
metadata: { text: options.text }
}];
}
let inserted = 0;
for (const item of vectors) {
await client.insertVector(table, item.vector, item.metadata);
inserted++;
}
spinner.succeed(chalk_1.default.green(`Inserted ${inserted} vector(s) into '${table}'`));
}
catch (err) {
spinner.fail(chalk_1.default.red('Failed to insert vectors'));
console.error(chalk_1.default.red(err.message));
}
finally {
await client.disconnect();
}
}
static async search(client, table, options) {
const spinner = (0, ora_1.default)(`Searching vectors in '${table}'...`).start();
try {
await client.connect();
let queryVector;
if (options.query) {
queryVector = JSON.parse(options.query);
}
else if (options.text) {
console.log(chalk_1.default.yellow('Note: Text embedding requires an embedding model'));
console.log(chalk_1.default.gray('Using placeholder embedding...'));
queryVector = Array(384).fill(0).map(() => Math.random());
}
else {
throw new Error('Either --query or --text is required');
}
const results = await client.searchVectors(table, queryVector, parseInt(options.topK), options.metric);
spinner.stop();
if (results.length === 0) {
console.log(chalk_1.default.yellow('No results found'));
return;
}
const resultTable = new cli_table3_1.default({
head: [
chalk_1.default.cyan('ID'),
chalk_1.default.cyan('Distance'),
chalk_1.default.cyan('Metadata')
],
colWidths: [10, 15, 50]
});
for (const result of results) {
resultTable.push([
String(result.id),
result.distance.toFixed(6),
result.metadata ? JSON.stringify(result.metadata).slice(0, 45) + '...' : '-'
]);
}
console.log(chalk_1.default.bold.blue(`\nSearch Results (${results.length} matches)`));
console.log(resultTable.toString());
}
catch (err) {
spinner.fail(chalk_1.default.red('Search failed'));
console.error(chalk_1.default.red(err.message));
}
finally {
await client.disconnect();
}
}
}
exports.VectorCommands = VectorCommands;
exports.default = VectorCommands;
//# sourceMappingURL=vector.js.map

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,266 @@
/**
* Vector Commands
* CLI commands for vector operations
*/
import chalk from 'chalk';
import ora from 'ora';
import Table from 'cli-table3';
import { readFileSync } from 'fs';
import type { RuVectorClient } from '../client.js';
export interface VectorCreateOptions {
dim: string;
index: 'hnsw' | 'ivfflat';
}
export interface VectorInsertOptions {
file?: string;
text?: string;
}
export interface VectorSearchOptions {
query?: string;
text?: string;
topK: string;
metric: 'cosine' | 'l2' | 'ip';
}
export interface VectorDistanceOptions {
a: string;
b: string;
metric: 'cosine' | 'l2' | 'ip';
}
export interface VectorNormalizeOptions {
vector: string;
}
export class VectorCommands {
static async distance(
client: RuVectorClient,
options: VectorDistanceOptions
): Promise<void> {
const spinner = ora('Computing vector distance...').start();
try {
await client.connect();
const a = JSON.parse(options.a);
const b = JSON.parse(options.b);
let distance: number;
let metricName: string;
switch (options.metric) {
case 'l2':
distance = await client.l2DistanceArr(a, b);
metricName = 'L2 (Euclidean)';
break;
case 'ip':
distance = await client.innerProductArr(a, b);
metricName = 'Inner Product';
break;
case 'cosine':
default:
distance = await client.cosineDistanceArr(a, b);
metricName = 'Cosine';
break;
}
spinner.succeed(chalk.green('Distance computed'));
console.log(chalk.bold.blue('\nVector Distance:'));
console.log(chalk.gray('-'.repeat(40)));
console.log(` ${chalk.green('Metric:')} ${metricName}`);
console.log(` ${chalk.green('Distance:')} ${distance.toFixed(6)}`);
console.log(` ${chalk.green('Dimension:')} ${a.length}`);
// Additional context for cosine distance
if (options.metric === 'cosine') {
const similarity = 1 - distance;
console.log(` ${chalk.green('Similarity:')} ${similarity.toFixed(6)} (1 - distance)`);
}
} catch (err) {
spinner.fail(chalk.red('Distance computation failed'));
console.error(chalk.red((err as Error).message));
} finally {
await client.disconnect();
}
}
static async normalize(
client: RuVectorClient,
options: VectorNormalizeOptions
): Promise<void> {
const spinner = ora('Normalizing vector...').start();
try {
await client.connect();
const vector = JSON.parse(options.vector);
const normalized = await client.vectorNormalize(vector);
spinner.succeed(chalk.green('Vector normalized'));
console.log(chalk.bold.blue('\nNormalized Vector:'));
console.log(chalk.gray('-'.repeat(40)));
console.log(` ${chalk.green('Original Dimension:')} ${vector.length}`);
// Compute original norm for reference
const originalNorm = Math.sqrt(vector.reduce((sum: number, v: number) => sum + v * v, 0));
console.log(` ${chalk.green('Original Norm:')} ${originalNorm.toFixed(6)}`);
// Verify normalized norm is ~1
const normalizedNorm = Math.sqrt(normalized.reduce((sum: number, v: number) => sum + v * v, 0));
console.log(` ${chalk.green('Normalized Norm:')} ${normalizedNorm.toFixed(6)}`);
// Display vector (truncated if too long)
if (normalized.length <= 10) {
console.log(` ${chalk.green('Result:')} [${normalized.map((v: number) => v.toFixed(4)).join(', ')}]`);
} else {
const first5 = normalized.slice(0, 5).map((v: number) => v.toFixed(4)).join(', ');
const last3 = normalized.slice(-3).map((v: number) => v.toFixed(4)).join(', ');
console.log(` ${chalk.green('Result:')} [${first5}, ..., ${last3}]`);
}
} catch (err) {
spinner.fail(chalk.red('Normalization failed'));
console.error(chalk.red((err as Error).message));
} finally {
await client.disconnect();
}
}
static async create(
client: RuVectorClient,
name: string,
options: VectorCreateOptions
): Promise<void> {
const spinner = ora(`Creating vector table '${name}'...`).start();
try {
await client.connect();
await client.createVectorTable(
name,
parseInt(options.dim),
options.index
);
spinner.succeed(chalk.green(`Vector table '${name}' created successfully`));
console.log(` ${chalk.gray('Dimensions:')} ${options.dim}`);
console.log(` ${chalk.gray('Index Type:')} ${options.index.toUpperCase()}`);
} catch (err) {
spinner.fail(chalk.red('Failed to create vector table'));
console.error(chalk.red((err as Error).message));
} finally {
await client.disconnect();
}
}
static async insert(
client: RuVectorClient,
table: string,
options: VectorInsertOptions
): Promise<void> {
const spinner = ora(`Inserting vectors into '${table}'...`).start();
try {
await client.connect();
let vectors: { vector: number[]; metadata?: Record<string, unknown> }[] = [];
if (options.file) {
const content = readFileSync(options.file, 'utf-8');
const data = JSON.parse(content);
vectors = Array.isArray(data) ? data : [data];
} else if (options.text) {
// For text, we'd need an embedding model
// For now, just show a placeholder
console.log(chalk.yellow('Note: Text embedding requires an embedding model'));
console.log(chalk.gray('Using placeholder embedding...'));
vectors = [{
vector: Array(384).fill(0).map(() => Math.random()),
metadata: { text: options.text }
}];
}
let inserted = 0;
for (const item of vectors) {
await client.insertVector(table, item.vector, item.metadata);
inserted++;
}
spinner.succeed(chalk.green(`Inserted ${inserted} vector(s) into '${table}'`));
} catch (err) {
spinner.fail(chalk.red('Failed to insert vectors'));
console.error(chalk.red((err as Error).message));
} finally {
await client.disconnect();
}
}
static async search(
client: RuVectorClient,
table: string,
options: VectorSearchOptions
): Promise<void> {
const spinner = ora(`Searching vectors in '${table}'...`).start();
try {
await client.connect();
let queryVector: number[];
if (options.query) {
queryVector = JSON.parse(options.query);
} else if (options.text) {
console.log(chalk.yellow('Note: Text embedding requires an embedding model'));
console.log(chalk.gray('Using placeholder embedding...'));
queryVector = Array(384).fill(0).map(() => Math.random());
} else {
throw new Error('Either --query or --text is required');
}
const results = await client.searchVectors(
table,
queryVector,
parseInt(options.topK),
options.metric
);
spinner.stop();
if (results.length === 0) {
console.log(chalk.yellow('No results found'));
return;
}
const resultTable = new Table({
head: [
chalk.cyan('ID'),
chalk.cyan('Distance'),
chalk.cyan('Metadata')
],
colWidths: [10, 15, 50]
});
for (const result of results) {
resultTable.push([
String(result.id),
result.distance.toFixed(6),
result.metadata ? JSON.stringify(result.metadata).slice(0, 45) + '...' : '-'
]);
}
console.log(chalk.bold.blue(`\nSearch Results (${results.length} matches)`));
console.log(resultTable.toString());
} catch (err) {
spinner.fail(chalk.red('Search failed'));
console.error(chalk.red((err as Error).message));
} finally {
await client.disconnect();
}
}
}
export default VectorCommands;