Squashed 'vendor/ruvector/' content from commit b64c2172
git-subtree-dir: vendor/ruvector git-subtree-split: b64c21726f2bb37286d9ee36a7869fef60cc6900
This commit is contained in:
12
npm/packages/ruvector-extensions/examples/complete-integration.d.ts
vendored
Normal file
12
npm/packages/ruvector-extensions/examples/complete-integration.d.ts
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
/**
|
||||
* Complete Integration Example for RuVector Extensions
|
||||
*
|
||||
* This example demonstrates all 5 major features:
|
||||
* 1. Real Embeddings (OpenAI/Cohere/Anthropic/HuggingFace)
|
||||
* 2. Database Persistence (save/load/snapshots)
|
||||
* 3. Graph Exports (GraphML, GEXF, Neo4j, D3.js, NetworkX)
|
||||
* 4. Temporal Tracking (version control, time-travel)
|
||||
* 5. Interactive Web UI (D3.js visualization)
|
||||
*/
|
||||
export {};
|
||||
//# sourceMappingURL=complete-integration.d.ts.map
|
||||
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"complete-integration.d.ts","sourceRoot":"","sources":["complete-integration.ts"],"names":[],"mappings":"AAAA;;;;;;;;;GASG"}
|
||||
@@ -0,0 +1,189 @@
|
||||
"use strict";
|
||||
/**
|
||||
* Complete Integration Example for RuVector Extensions
|
||||
*
|
||||
* This example demonstrates all 5 major features:
|
||||
* 1. Real Embeddings (OpenAI/Cohere/Anthropic/HuggingFace)
|
||||
* 2. Database Persistence (save/load/snapshots)
|
||||
* 3. Graph Exports (GraphML, GEXF, Neo4j, D3.js, NetworkX)
|
||||
* 4. Temporal Tracking (version control, time-travel)
|
||||
* 5. Interactive Web UI (D3.js visualization)
|
||||
*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
const ruvector_1 = require("ruvector");
|
||||
const index_js_1 = require("../dist/index.js");
|
||||
async function main() {
|
||||
console.log('🚀 RuVector Extensions - Complete Integration Example\n');
|
||||
console.log('='.repeat(60));
|
||||
// ========== 1. Initialize Database ==========
|
||||
console.log('\n📊 Step 1: Initialize VectorDB');
|
||||
const db = new ruvector_1.VectorDB({
|
||||
dimensions: 1536,
|
||||
distanceMetric: 'Cosine',
|
||||
storagePath: './data/example.db'
|
||||
});
|
||||
console.log('✅ Database initialized (1536 dimensions, Cosine similarity)');
|
||||
// ========== 2. Real Embeddings Integration ==========
|
||||
console.log('\n🔤 Step 2: Generate Real Embeddings with OpenAI');
|
||||
const openai = new index_js_1.OpenAIEmbeddings({
|
||||
apiKey: process.env.OPENAI_API_KEY || 'demo-key',
|
||||
model: 'text-embedding-3-small'
|
||||
});
|
||||
const documents = [
|
||||
{ id: '1', text: 'Machine learning is a subset of artificial intelligence', category: 'AI' },
|
||||
{ id: '2', text: 'Deep learning uses neural networks with multiple layers', category: 'AI' },
|
||||
{ id: '3', text: 'Natural language processing enables computers to understand text', category: 'NLP' },
|
||||
{ id: '4', text: 'Computer vision allows machines to interpret visual information', category: 'CV' },
|
||||
{ id: '5', text: 'Reinforcement learning trains agents through rewards and penalties', category: 'RL' }
|
||||
];
|
||||
console.log(`Embedding ${documents.length} documents...`);
|
||||
await (0, index_js_1.embedAndInsert)(db, openai, documents.map(d => ({
|
||||
id: d.id,
|
||||
text: d.text,
|
||||
metadata: { category: d.category }
|
||||
})), {
|
||||
onProgress: (progress) => {
|
||||
console.log(` Progress: ${progress.percentage}% - ${progress.message}`);
|
||||
}
|
||||
});
|
||||
console.log('✅ Documents embedded and inserted');
|
||||
// ========== 3. Database Persistence ==========
|
||||
console.log('\n💾 Step 3: Database Persistence');
|
||||
const persistence = new index_js_1.DatabasePersistence(db, {
|
||||
baseDir: './data/backups',
|
||||
format: 'json',
|
||||
compression: 'gzip',
|
||||
autoSaveInterval: 60000 // Auto-save every minute
|
||||
});
|
||||
// Save database
|
||||
console.log('Saving database...');
|
||||
await persistence.save({
|
||||
onProgress: (p) => console.log(` ${p.percentage}% - ${p.message}`)
|
||||
});
|
||||
console.log('✅ Database saved');
|
||||
// Create snapshot
|
||||
console.log('Creating snapshot...');
|
||||
const snapshot = await persistence.createSnapshot('initial-state', {
|
||||
description: 'Initial state with 5 documents',
|
||||
tags: ['demo', 'v1.0']
|
||||
});
|
||||
console.log(`✅ Snapshot created: ${snapshot.id}`);
|
||||
// ========== 4. Temporal Tracking ==========
|
||||
console.log('\n⏰ Step 4: Temporal Tracking & Version Control');
|
||||
const temporal = new index_js_1.TemporalTracker();
|
||||
// Track initial state
|
||||
temporal.trackChange({
|
||||
type: index_js_1.ChangeType.ADDITION,
|
||||
path: 'documents',
|
||||
before: null,
|
||||
after: { count: 5, categories: ['AI', 'NLP', 'CV', 'RL'] },
|
||||
timestamp: Date.now(),
|
||||
metadata: { operation: 'initial_load' }
|
||||
});
|
||||
// Create version
|
||||
const v1 = await temporal.createVersion({
|
||||
description: 'Initial dataset with 5 AI/ML documents',
|
||||
tags: ['v1.0', 'baseline'],
|
||||
author: 'demo-user'
|
||||
});
|
||||
console.log(`✅ Version created: ${v1.id}`);
|
||||
// Simulate a change
|
||||
temporal.trackChange({
|
||||
type: index_js_1.ChangeType.ADDITION,
|
||||
path: 'documents.6',
|
||||
before: null,
|
||||
after: { id: '6', text: 'Transformer models revolutionized NLP', category: 'NLP' },
|
||||
timestamp: Date.now()
|
||||
});
|
||||
const v2 = await temporal.createVersion({
|
||||
description: 'Added transformer document',
|
||||
tags: ['v1.1']
|
||||
});
|
||||
console.log(`✅ Version updated: ${v2.id}`);
|
||||
// Compare versions
|
||||
const diff = await temporal.compareVersions(v1.id, v2.id);
|
||||
console.log(`📊 Changes: ${diff.changes.length} modifications`);
|
||||
console.log(` Added: ${diff.summary.added}, Modified: ${diff.summary.modified}`);
|
||||
// ========== 5. Graph Exports ==========
|
||||
console.log('\n📈 Step 5: Export Similarity Graphs');
|
||||
// Build graph from vectors
|
||||
console.log('Building similarity graph...');
|
||||
const entries = await Promise.all(documents.map(async (d) => {
|
||||
const vector = await db.get(d.id);
|
||||
return vector;
|
||||
}));
|
||||
const graph = await (0, index_js_1.buildGraphFromEntries)(entries.filter(e => e !== null), {
|
||||
threshold: 0.7, // Only edges with >70% similarity
|
||||
maxNeighbors: 3
|
||||
});
|
||||
console.log(`✅ Graph built: ${graph.nodes.length} nodes, ${graph.edges.length} edges`);
|
||||
// Export to multiple formats
|
||||
console.log('Exporting to formats...');
|
||||
// GraphML (for Gephi, yEd)
|
||||
const graphml = (0, index_js_1.exportToGraphML)(graph, {
|
||||
graphName: 'AI Concepts Network',
|
||||
includeVectors: false
|
||||
});
|
||||
console.log(' ✅ GraphML export ready (for Gephi/yEd)');
|
||||
// GEXF (for Gephi)
|
||||
const gexf = (0, index_js_1.exportToGEXF)(graph, {
|
||||
graphName: 'AI Knowledge Graph',
|
||||
graphDescription: 'Vector similarity network of AI concepts'
|
||||
});
|
||||
console.log(' ✅ GEXF export ready (for Gephi)');
|
||||
// Neo4j (for graph database)
|
||||
const neo4j = (0, index_js_1.exportToNeo4j)(graph, {
|
||||
includeMetadata: true
|
||||
});
|
||||
console.log(' ✅ Neo4j Cypher queries ready');
|
||||
// D3.js (for web visualization)
|
||||
const d3Data = (0, index_js_1.exportToD3)(graph);
|
||||
console.log(' ✅ D3.js JSON ready (for web viz)');
|
||||
// ========== 6. Interactive Web UI ==========
|
||||
console.log('\n🌐 Step 6: Launch Interactive Web UI');
|
||||
console.log('Starting web server...');
|
||||
const uiServer = await (0, index_js_1.startUIServer)(db, 3000);
|
||||
console.log('✅ Web UI started at http://localhost:3000');
|
||||
console.log('\n📱 Features:');
|
||||
console.log(' • Force-directed graph visualization');
|
||||
console.log(' • Interactive node dragging & zoom');
|
||||
console.log(' • Real-time similarity search');
|
||||
console.log(' • Metadata inspection');
|
||||
console.log(' • Export as PNG/SVG');
|
||||
console.log(' • WebSocket live updates');
|
||||
// ========== Summary ==========
|
||||
console.log('\n' + '='.repeat(60));
|
||||
console.log('🎉 Complete Integration Successful!\n');
|
||||
console.log('Summary:');
|
||||
console.log(` 📊 Database: ${await db.len()} vectors (1536-dim)`);
|
||||
console.log(` 💾 Persistence: 1 snapshot, auto-save enabled`);
|
||||
console.log(` ⏰ Versions: 2 versions tracked`);
|
||||
console.log(` 📈 Graph: ${graph.nodes.length} nodes, ${graph.edges.length} edges`);
|
||||
console.log(` 📦 Exports: GraphML, GEXF, Neo4j, D3.js ready`);
|
||||
console.log(` 🌐 UI Server: Running on port 3000`);
|
||||
console.log('\n📖 Next Steps:');
|
||||
console.log(' 1. Open http://localhost:3000 to explore the graph');
|
||||
console.log(' 2. Import GraphML into Gephi for advanced visualization');
|
||||
console.log(' 3. Run Neo4j queries to analyze relationships');
|
||||
console.log(' 4. Use temporal tracking to monitor changes over time');
|
||||
console.log(' 5. Set up auto-save for production deployments');
|
||||
console.log('\n💡 Pro Tips:');
|
||||
console.log(' • Use OpenAI embeddings for best semantic understanding');
|
||||
console.log(' • Create snapshots before major updates');
|
||||
console.log(' • Enable auto-save for production (already enabled in this demo)');
|
||||
console.log(' • Export to Neo4j for complex graph queries');
|
||||
console.log(' • Monitor versions to track ontology evolution');
|
||||
console.log('\n🛑 Press Ctrl+C to stop the UI server');
|
||||
console.log('='.repeat(60) + '\n');
|
||||
// Keep server running
|
||||
process.on('SIGINT', async () => {
|
||||
console.log('\n\n🛑 Shutting down...');
|
||||
await uiServer.stop();
|
||||
await persistence.shutdown();
|
||||
console.log('✅ Cleanup complete. Goodbye!');
|
||||
process.exit(0);
|
||||
});
|
||||
}
|
||||
// Run example
|
||||
main().catch(console.error);
|
||||
//# sourceMappingURL=complete-integration.js.map
|
||||
File diff suppressed because one or more lines are too long
@@ -0,0 +1,243 @@
|
||||
/**
|
||||
* Complete Integration Example for RuVector Extensions
|
||||
*
|
||||
* This example demonstrates all 5 major features:
|
||||
* 1. Real Embeddings (OpenAI/Cohere/Anthropic/HuggingFace)
|
||||
* 2. Database Persistence (save/load/snapshots)
|
||||
* 3. Graph Exports (GraphML, GEXF, Neo4j, D3.js, NetworkX)
|
||||
* 4. Temporal Tracking (version control, time-travel)
|
||||
* 5. Interactive Web UI (D3.js visualization)
|
||||
*/
|
||||
|
||||
import { VectorDB } from 'ruvector';
|
||||
import {
|
||||
// Embeddings
|
||||
OpenAIEmbeddings,
|
||||
embedAndInsert,
|
||||
|
||||
// Persistence
|
||||
DatabasePersistence,
|
||||
|
||||
// Exports
|
||||
buildGraphFromEntries,
|
||||
exportToGraphML,
|
||||
exportToGEXF,
|
||||
exportToNeo4j,
|
||||
exportToD3,
|
||||
|
||||
// Temporal
|
||||
TemporalTracker,
|
||||
ChangeType,
|
||||
|
||||
// UI
|
||||
startUIServer
|
||||
} from '../dist/index.js';
|
||||
|
||||
async function main() {
|
||||
console.log('🚀 RuVector Extensions - Complete Integration Example\n');
|
||||
console.log('=' .repeat(60));
|
||||
|
||||
// ========== 1. Initialize Database ==========
|
||||
console.log('\n📊 Step 1: Initialize VectorDB');
|
||||
const db = new VectorDB({
|
||||
dimensions: 1536,
|
||||
distanceMetric: 'Cosine',
|
||||
storagePath: './data/example.db'
|
||||
});
|
||||
console.log('✅ Database initialized (1536 dimensions, Cosine similarity)');
|
||||
|
||||
// ========== 2. Real Embeddings Integration ==========
|
||||
console.log('\n🔤 Step 2: Generate Real Embeddings with OpenAI');
|
||||
|
||||
const openai = new OpenAIEmbeddings({
|
||||
apiKey: process.env.OPENAI_API_KEY || 'demo-key',
|
||||
model: 'text-embedding-3-small'
|
||||
});
|
||||
|
||||
const documents = [
|
||||
{ id: '1', text: 'Machine learning is a subset of artificial intelligence', category: 'AI' },
|
||||
{ id: '2', text: 'Deep learning uses neural networks with multiple layers', category: 'AI' },
|
||||
{ id: '3', text: 'Natural language processing enables computers to understand text', category: 'NLP' },
|
||||
{ id: '4', text: 'Computer vision allows machines to interpret visual information', category: 'CV' },
|
||||
{ id: '5', text: 'Reinforcement learning trains agents through rewards and penalties', category: 'RL' }
|
||||
];
|
||||
|
||||
console.log(`Embedding ${documents.length} documents...`);
|
||||
await embedAndInsert(db, openai, documents.map(d => ({
|
||||
id: d.id,
|
||||
text: d.text,
|
||||
metadata: { category: d.category }
|
||||
})), {
|
||||
onProgress: (progress) => {
|
||||
console.log(` Progress: ${progress.percentage}% - ${progress.message}`);
|
||||
}
|
||||
});
|
||||
console.log('✅ Documents embedded and inserted');
|
||||
|
||||
// ========== 3. Database Persistence ==========
|
||||
console.log('\n💾 Step 3: Database Persistence');
|
||||
|
||||
const persistence = new DatabasePersistence(db, {
|
||||
baseDir: './data/backups',
|
||||
format: 'json',
|
||||
compression: 'gzip',
|
||||
autoSaveInterval: 60000 // Auto-save every minute
|
||||
});
|
||||
|
||||
// Save database
|
||||
console.log('Saving database...');
|
||||
await persistence.save({
|
||||
onProgress: (p) => console.log(` ${p.percentage}% - ${p.message}`)
|
||||
});
|
||||
console.log('✅ Database saved');
|
||||
|
||||
// Create snapshot
|
||||
console.log('Creating snapshot...');
|
||||
const snapshot = await persistence.createSnapshot('initial-state', {
|
||||
description: 'Initial state with 5 documents',
|
||||
tags: ['demo', 'v1.0']
|
||||
});
|
||||
console.log(`✅ Snapshot created: ${snapshot.id}`);
|
||||
|
||||
// ========== 4. Temporal Tracking ==========
|
||||
console.log('\n⏰ Step 4: Temporal Tracking & Version Control');
|
||||
|
||||
const temporal = new TemporalTracker();
|
||||
|
||||
// Track initial state
|
||||
temporal.trackChange({
|
||||
type: ChangeType.ADDITION,
|
||||
path: 'documents',
|
||||
before: null,
|
||||
after: { count: 5, categories: ['AI', 'NLP', 'CV', 'RL'] },
|
||||
timestamp: Date.now(),
|
||||
metadata: { operation: 'initial_load' }
|
||||
});
|
||||
|
||||
// Create version
|
||||
const v1 = await temporal.createVersion({
|
||||
description: 'Initial dataset with 5 AI/ML documents',
|
||||
tags: ['v1.0', 'baseline'],
|
||||
author: 'demo-user'
|
||||
});
|
||||
console.log(`✅ Version created: ${v1.id}`);
|
||||
|
||||
// Simulate a change
|
||||
temporal.trackChange({
|
||||
type: ChangeType.ADDITION,
|
||||
path: 'documents.6',
|
||||
before: null,
|
||||
after: { id: '6', text: 'Transformer models revolutionized NLP', category: 'NLP' },
|
||||
timestamp: Date.now()
|
||||
});
|
||||
|
||||
const v2 = await temporal.createVersion({
|
||||
description: 'Added transformer document',
|
||||
tags: ['v1.1']
|
||||
});
|
||||
console.log(`✅ Version updated: ${v2.id}`);
|
||||
|
||||
// Compare versions
|
||||
const diff = await temporal.compareVersions(v1.id, v2.id);
|
||||
console.log(`📊 Changes: ${diff.changes.length} modifications`);
|
||||
console.log(` Added: ${diff.summary.added}, Modified: ${diff.summary.modified}`);
|
||||
|
||||
// ========== 5. Graph Exports ==========
|
||||
console.log('\n📈 Step 5: Export Similarity Graphs');
|
||||
|
||||
// Build graph from vectors
|
||||
console.log('Building similarity graph...');
|
||||
const entries = await Promise.all(
|
||||
documents.map(async (d) => {
|
||||
const vector = await db.get(d.id);
|
||||
return vector;
|
||||
})
|
||||
);
|
||||
|
||||
const graph = await buildGraphFromEntries(entries.filter(e => e !== null), {
|
||||
threshold: 0.7, // Only edges with >70% similarity
|
||||
maxNeighbors: 3
|
||||
});
|
||||
console.log(`✅ Graph built: ${graph.nodes.length} nodes, ${graph.edges.length} edges`);
|
||||
|
||||
// Export to multiple formats
|
||||
console.log('Exporting to formats...');
|
||||
|
||||
// GraphML (for Gephi, yEd)
|
||||
const graphml = exportToGraphML(graph, {
|
||||
graphName: 'AI Concepts Network',
|
||||
includeVectors: false
|
||||
});
|
||||
console.log(' ✅ GraphML export ready (for Gephi/yEd)');
|
||||
|
||||
// GEXF (for Gephi)
|
||||
const gexf = exportToGEXF(graph, {
|
||||
graphName: 'AI Knowledge Graph',
|
||||
graphDescription: 'Vector similarity network of AI concepts'
|
||||
});
|
||||
console.log(' ✅ GEXF export ready (for Gephi)');
|
||||
|
||||
// Neo4j (for graph database)
|
||||
const neo4j = exportToNeo4j(graph, {
|
||||
includeMetadata: true
|
||||
});
|
||||
console.log(' ✅ Neo4j Cypher queries ready');
|
||||
|
||||
// D3.js (for web visualization)
|
||||
const d3Data = exportToD3(graph);
|
||||
console.log(' ✅ D3.js JSON ready (for web viz)');
|
||||
|
||||
// ========== 6. Interactive Web UI ==========
|
||||
console.log('\n🌐 Step 6: Launch Interactive Web UI');
|
||||
|
||||
console.log('Starting web server...');
|
||||
const uiServer = await startUIServer(db, 3000);
|
||||
|
||||
console.log('✅ Web UI started at http://localhost:3000');
|
||||
console.log('\n📱 Features:');
|
||||
console.log(' • Force-directed graph visualization');
|
||||
console.log(' • Interactive node dragging & zoom');
|
||||
console.log(' • Real-time similarity search');
|
||||
console.log(' • Metadata inspection');
|
||||
console.log(' • Export as PNG/SVG');
|
||||
console.log(' • WebSocket live updates');
|
||||
|
||||
// ========== Summary ==========
|
||||
console.log('\n' + '='.repeat(60));
|
||||
console.log('🎉 Complete Integration Successful!\n');
|
||||
console.log('Summary:');
|
||||
console.log(` 📊 Database: ${await db.len()} vectors (1536-dim)`);
|
||||
console.log(` 💾 Persistence: 1 snapshot, auto-save enabled`);
|
||||
console.log(` ⏰ Versions: 2 versions tracked`);
|
||||
console.log(` 📈 Graph: ${graph.nodes.length} nodes, ${graph.edges.length} edges`);
|
||||
console.log(` 📦 Exports: GraphML, GEXF, Neo4j, D3.js ready`);
|
||||
console.log(` 🌐 UI Server: Running on port 3000`);
|
||||
console.log('\n📖 Next Steps:');
|
||||
console.log(' 1. Open http://localhost:3000 to explore the graph');
|
||||
console.log(' 2. Import GraphML into Gephi for advanced visualization');
|
||||
console.log(' 3. Run Neo4j queries to analyze relationships');
|
||||
console.log(' 4. Use temporal tracking to monitor changes over time');
|
||||
console.log(' 5. Set up auto-save for production deployments');
|
||||
|
||||
console.log('\n💡 Pro Tips:');
|
||||
console.log(' • Use OpenAI embeddings for best semantic understanding');
|
||||
console.log(' • Create snapshots before major updates');
|
||||
console.log(' • Enable auto-save for production (already enabled in this demo)');
|
||||
console.log(' • Export to Neo4j for complex graph queries');
|
||||
console.log(' • Monitor versions to track ontology evolution');
|
||||
|
||||
console.log('\n🛑 Press Ctrl+C to stop the UI server');
|
||||
console.log('=' .repeat(60) + '\n');
|
||||
|
||||
// Keep server running
|
||||
process.on('SIGINT', async () => {
|
||||
console.log('\n\n🛑 Shutting down...');
|
||||
await uiServer.stop();
|
||||
await persistence.shutdown();
|
||||
console.log('✅ Cleanup complete. Goodbye!');
|
||||
process.exit(0);
|
||||
});
|
||||
}
|
||||
|
||||
// Run example
|
||||
main().catch(console.error);
|
||||
16
npm/packages/ruvector-extensions/examples/graph-export-examples.d.ts
vendored
Normal file
16
npm/packages/ruvector-extensions/examples/graph-export-examples.d.ts
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
/**
|
||||
* Graph Export Examples
|
||||
*
|
||||
* Demonstrates how to use the graph export module with various formats
|
||||
* and configurations.
|
||||
*/
|
||||
export declare function example1_basicExport(): Promise<void>;
|
||||
export declare function example2_graphMLExport(): Promise<void>;
|
||||
export declare function example3_gephiExport(): Promise<void>;
|
||||
export declare function example4_neo4jExport(): Promise<void>;
|
||||
export declare function example5_d3Export(): Promise<void>;
|
||||
export declare function example6_networkXExport(): Promise<void>;
|
||||
export declare function example7_streamingExport(): Promise<void>;
|
||||
export declare function example8_customGraph(): Promise<void>;
|
||||
export declare function runAllExamples(): Promise<void>;
|
||||
//# sourceMappingURL=graph-export-examples.d.ts.map
|
||||
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"graph-export-examples.d.ts","sourceRoot":"","sources":["graph-export-examples.ts"],"names":[],"mappings":"AAAA;;;;;GAKG;AAyBH,wBAAsB,oBAAoB,kBA0DzC;AAMD,wBAAsB,sBAAsB,kBAuC3C;AAMD,wBAAsB,oBAAoB,kBA+BzC;AAMD,wBAAsB,oBAAoB,kBAwCzC;AAMD,wBAAsB,iBAAiB,kBAqItC;AAMD,wBAAsB,uBAAuB,kBAsE5C;AAMD,wBAAsB,wBAAwB,kBAqD7C;AAMD,wBAAsB,oBAAoB,kBAsCzC;AAMD,wBAAsB,cAAc,kBAsCnC"}
|
||||
@@ -0,0 +1,546 @@
|
||||
"use strict";
|
||||
/**
|
||||
* Graph Export Examples
|
||||
*
|
||||
* Demonstrates how to use the graph export module with various formats
|
||||
* and configurations.
|
||||
*/
|
||||
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
var desc = Object.getOwnPropertyDescriptor(m, k);
|
||||
if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
|
||||
desc = { enumerable: true, get: function() { return m[k]; } };
|
||||
}
|
||||
Object.defineProperty(o, k2, desc);
|
||||
}) : (function(o, m, k, k2) {
|
||||
if (k2 === undefined) k2 = k;
|
||||
o[k2] = m[k];
|
||||
}));
|
||||
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
|
||||
Object.defineProperty(o, "default", { enumerable: true, value: v });
|
||||
}) : function(o, v) {
|
||||
o["default"] = v;
|
||||
});
|
||||
var __importStar = (this && this.__importStar) || (function () {
|
||||
var ownKeys = function(o) {
|
||||
ownKeys = Object.getOwnPropertyNames || function (o) {
|
||||
var ar = [];
|
||||
for (var k in o) if (Object.prototype.hasOwnProperty.call(o, k)) ar[ar.length] = k;
|
||||
return ar;
|
||||
};
|
||||
return ownKeys(o);
|
||||
};
|
||||
return function (mod) {
|
||||
if (mod && mod.__esModule) return mod;
|
||||
var result = {};
|
||||
if (mod != null) for (var k = ownKeys(mod), i = 0; i < k.length; i++) if (k[i] !== "default") __createBinding(result, mod, k[i]);
|
||||
__setModuleDefault(result, mod);
|
||||
return result;
|
||||
};
|
||||
})();
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.example1_basicExport = example1_basicExport;
|
||||
exports.example2_graphMLExport = example2_graphMLExport;
|
||||
exports.example3_gephiExport = example3_gephiExport;
|
||||
exports.example4_neo4jExport = example4_neo4jExport;
|
||||
exports.example5_d3Export = example5_d3Export;
|
||||
exports.example6_networkXExport = example6_networkXExport;
|
||||
exports.example7_streamingExport = example7_streamingExport;
|
||||
exports.example8_customGraph = example8_customGraph;
|
||||
exports.runAllExamples = runAllExamples;
|
||||
const exporters_js_1 = require("../src/exporters.js");
|
||||
const fs_1 = require("fs");
|
||||
const promises_1 = require("fs/promises");
|
||||
// ============================================================================
|
||||
// Example 1: Basic Graph Export to Multiple Formats
|
||||
// ============================================================================
|
||||
async function example1_basicExport() {
|
||||
console.log('\n=== Example 1: Basic Graph Export ===\n');
|
||||
// Sample vector entries (embeddings from a document collection)
|
||||
const entries = [
|
||||
{
|
||||
id: 'doc1',
|
||||
vector: [0.1, 0.2, 0.3, 0.4],
|
||||
metadata: { title: 'Introduction to AI', category: 'AI', year: 2023 }
|
||||
},
|
||||
{
|
||||
id: 'doc2',
|
||||
vector: [0.15, 0.25, 0.35, 0.42],
|
||||
metadata: { title: 'Machine Learning Basics', category: 'ML', year: 2023 }
|
||||
},
|
||||
{
|
||||
id: 'doc3',
|
||||
vector: [0.8, 0.1, 0.05, 0.05],
|
||||
metadata: { title: 'History of Rome', category: 'History', year: 2022 }
|
||||
},
|
||||
{
|
||||
id: 'doc4',
|
||||
vector: [0.12, 0.22, 0.32, 0.38],
|
||||
metadata: { title: 'Neural Networks', category: 'AI', year: 2024 }
|
||||
}
|
||||
];
|
||||
// Build graph from vector entries
|
||||
const graph = (0, exporters_js_1.buildGraphFromEntries)(entries, {
|
||||
maxNeighbors: 2,
|
||||
threshold: 0.5,
|
||||
includeVectors: false,
|
||||
includeMetadata: true
|
||||
});
|
||||
console.log(`Graph built: ${graph.nodes.length} nodes, ${graph.edges.length} edges\n`);
|
||||
// Export to different formats
|
||||
const formats = ['graphml', 'gexf', 'neo4j', 'd3', 'networkx'];
|
||||
for (const format of formats) {
|
||||
const result = (0, exporters_js_1.exportGraph)(graph, format, {
|
||||
graphName: 'Document Similarity Network',
|
||||
graphDescription: 'Similarity network of document embeddings',
|
||||
includeMetadata: true
|
||||
});
|
||||
console.log(`${format.toUpperCase()}:`);
|
||||
console.log(` Nodes: ${result.nodeCount}, Edges: ${result.edgeCount}`);
|
||||
if (typeof result.data === 'string') {
|
||||
console.log(` Size: ${result.data.length} characters`);
|
||||
console.log(` Preview: ${result.data.substring(0, 100)}...\n`);
|
||||
}
|
||||
else {
|
||||
console.log(` Type: JSON object`);
|
||||
console.log(` Preview: ${JSON.stringify(result.data).substring(0, 100)}...\n`);
|
||||
}
|
||||
}
|
||||
}
|
||||
// ============================================================================
|
||||
// Example 2: Export to GraphML with Full Configuration
|
||||
// ============================================================================
|
||||
async function example2_graphMLExport() {
|
||||
console.log('\n=== Example 2: GraphML Export ===\n');
|
||||
const entries = [
|
||||
{
|
||||
id: 'vec1',
|
||||
vector: [1.0, 0.0, 0.0],
|
||||
metadata: { label: 'Vector 1', type: 'test', score: 0.95 }
|
||||
},
|
||||
{
|
||||
id: 'vec2',
|
||||
vector: [0.9, 0.1, 0.0],
|
||||
metadata: { label: 'Vector 2', type: 'test', score: 0.87 }
|
||||
},
|
||||
{
|
||||
id: 'vec3',
|
||||
vector: [0.0, 1.0, 0.0],
|
||||
metadata: { label: 'Vector 3', type: 'control', score: 0.92 }
|
||||
}
|
||||
];
|
||||
const graph = (0, exporters_js_1.buildGraphFromEntries)(entries, {
|
||||
maxNeighbors: 2,
|
||||
threshold: 0.0,
|
||||
includeVectors: true, // Include vectors in export
|
||||
includeMetadata: true
|
||||
});
|
||||
const graphml = (0, exporters_js_1.exportToGraphML)(graph, {
|
||||
graphName: 'Test Vectors',
|
||||
includeVectors: true
|
||||
});
|
||||
console.log('GraphML Export:');
|
||||
console.log(graphml);
|
||||
// Save to file
|
||||
await (0, promises_1.writeFile)('examples/output/graph.graphml', graphml);
|
||||
console.log('\nSaved to: examples/output/graph.graphml');
|
||||
}
|
||||
// ============================================================================
|
||||
// Example 3: Export to GEXF for Gephi Visualization
|
||||
// ============================================================================
|
||||
async function example3_gephiExport() {
|
||||
console.log('\n=== Example 3: GEXF Export for Gephi ===\n');
|
||||
// Simulate a larger network
|
||||
const entries = [];
|
||||
for (let i = 0; i < 20; i++) {
|
||||
entries.push({
|
||||
id: `node${i}`,
|
||||
vector: Array(128).fill(0).map(() => Math.random()),
|
||||
metadata: {
|
||||
label: `Node ${i}`,
|
||||
cluster: Math.floor(i / 5),
|
||||
importance: Math.random()
|
||||
}
|
||||
});
|
||||
}
|
||||
const graph = (0, exporters_js_1.buildGraphFromEntries)(entries, {
|
||||
maxNeighbors: 3,
|
||||
threshold: 0.7,
|
||||
includeMetadata: true
|
||||
});
|
||||
const gexf = (0, exporters_js_1.exportToGEXF)(graph, {
|
||||
graphName: 'Large Network',
|
||||
graphDescription: 'Network with 20 nodes and cluster information'
|
||||
});
|
||||
await (0, promises_1.writeFile)('examples/output/network.gexf', gexf);
|
||||
console.log('GEXF file created: examples/output/network.gexf');
|
||||
console.log('Import this file into Gephi for visualization!');
|
||||
}
|
||||
// ============================================================================
|
||||
// Example 4: Export to Neo4j and Execute Queries
|
||||
// ============================================================================
|
||||
async function example4_neo4jExport() {
|
||||
console.log('\n=== Example 4: Neo4j Export ===\n');
|
||||
const entries = [
|
||||
{
|
||||
id: 'person1',
|
||||
vector: [0.5, 0.5],
|
||||
metadata: { name: 'Alice', role: 'Engineer', experience: 5 }
|
||||
},
|
||||
{
|
||||
id: 'person2',
|
||||
vector: [0.52, 0.48],
|
||||
metadata: { name: 'Bob', role: 'Engineer', experience: 3 }
|
||||
},
|
||||
{
|
||||
id: 'person3',
|
||||
vector: [0.1, 0.9],
|
||||
metadata: { name: 'Charlie', role: 'Manager', experience: 10 }
|
||||
}
|
||||
];
|
||||
const graph = (0, exporters_js_1.buildGraphFromEntries)(entries, {
|
||||
maxNeighbors: 2,
|
||||
threshold: 0.5,
|
||||
includeMetadata: true
|
||||
});
|
||||
const cypher = (0, exporters_js_1.exportToNeo4j)(graph, {
|
||||
includeMetadata: true
|
||||
});
|
||||
console.log('Neo4j Cypher Queries:');
|
||||
console.log(cypher);
|
||||
await (0, promises_1.writeFile)('examples/output/import.cypher', cypher);
|
||||
console.log('\nSaved to: examples/output/import.cypher');
|
||||
console.log('\nTo import into Neo4j:');
|
||||
console.log(' 1. Open Neo4j Browser');
|
||||
console.log(' 2. Copy and paste the Cypher queries');
|
||||
console.log(' 3. Execute to create the graph');
|
||||
}
|
||||
// ============================================================================
|
||||
// Example 5: Export to D3.js for Web Visualization
|
||||
// ============================================================================
|
||||
async function example5_d3Export() {
|
||||
console.log('\n=== Example 5: D3.js Export ===\n');
|
||||
const entries = [
|
||||
{
|
||||
id: 'central',
|
||||
vector: [0.5, 0.5],
|
||||
metadata: { name: 'Central Node', size: 20, color: '#ff0000' }
|
||||
},
|
||||
{
|
||||
id: 'node1',
|
||||
vector: [0.6, 0.5],
|
||||
metadata: { name: 'Node 1', size: 10, color: '#00ff00' }
|
||||
},
|
||||
{
|
||||
id: 'node2',
|
||||
vector: [0.4, 0.5],
|
||||
metadata: { name: 'Node 2', size: 10, color: '#0000ff' }
|
||||
},
|
||||
{
|
||||
id: 'node3',
|
||||
vector: [0.5, 0.6],
|
||||
metadata: { name: 'Node 3', size: 10, color: '#ffff00' }
|
||||
}
|
||||
];
|
||||
const graph = (0, exporters_js_1.buildGraphFromEntries)(entries, {
|
||||
maxNeighbors: 3,
|
||||
threshold: 0.0,
|
||||
includeMetadata: true
|
||||
});
|
||||
const d3Data = (0, exporters_js_1.exportToD3)(graph, {
|
||||
includeMetadata: true
|
||||
});
|
||||
console.log('D3.js Data:');
|
||||
console.log(JSON.stringify(d3Data, null, 2));
|
||||
await (0, promises_1.writeFile)('examples/output/d3-graph.json', JSON.stringify(d3Data, null, 2));
|
||||
console.log('\nSaved to: examples/output/d3-graph.json');
|
||||
// Generate simple HTML visualization
|
||||
const html = `
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>D3.js Force Graph</title>
|
||||
<script src="https://d3js.org/d3.v7.min.js"></script>
|
||||
<style>
|
||||
body { margin: 0; font-family: Arial, sans-serif; }
|
||||
svg { border: 1px solid #ccc; }
|
||||
.links line { stroke: #999; stroke-opacity: 0.6; }
|
||||
.nodes circle { stroke: #fff; stroke-width: 1.5px; }
|
||||
.labels { font-size: 10px; pointer-events: none; }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<svg width="800" height="600"></svg>
|
||||
<script>
|
||||
const graphData = ${JSON.stringify(d3Data)};
|
||||
|
||||
const svg = d3.select("svg"),
|
||||
width = +svg.attr("width"),
|
||||
height = +svg.attr("height");
|
||||
|
||||
const simulation = d3.forceSimulation(graphData.nodes)
|
||||
.force("link", d3.forceLink(graphData.links).id(d => d.id).distance(100))
|
||||
.force("charge", d3.forceManyBody().strength(-300))
|
||||
.force("center", d3.forceCenter(width / 2, height / 2));
|
||||
|
||||
const link = svg.append("g")
|
||||
.attr("class", "links")
|
||||
.selectAll("line")
|
||||
.data(graphData.links)
|
||||
.enter().append("line")
|
||||
.attr("stroke-width", d => Math.sqrt(d.value) * 2);
|
||||
|
||||
const node = svg.append("g")
|
||||
.attr("class", "nodes")
|
||||
.selectAll("circle")
|
||||
.data(graphData.nodes)
|
||||
.enter().append("circle")
|
||||
.attr("r", d => d.size || 5)
|
||||
.attr("fill", d => d.color || "#69b3a2")
|
||||
.call(d3.drag()
|
||||
.on("start", dragstarted)
|
||||
.on("drag", dragged)
|
||||
.on("end", dragended));
|
||||
|
||||
const label = svg.append("g")
|
||||
.attr("class", "labels")
|
||||
.selectAll("text")
|
||||
.data(graphData.nodes)
|
||||
.enter().append("text")
|
||||
.text(d => d.name)
|
||||
.attr("dx", 12)
|
||||
.attr("dy", 4);
|
||||
|
||||
simulation.on("tick", () => {
|
||||
link.attr("x1", d => d.source.x)
|
||||
.attr("y1", d => d.source.y)
|
||||
.attr("x2", d => d.target.x)
|
||||
.attr("y2", d => d.target.y);
|
||||
node.attr("cx", d => d.x)
|
||||
.attr("cy", d => d.y);
|
||||
label.attr("x", d => d.x)
|
||||
.attr("y", d => d.y);
|
||||
});
|
||||
|
||||
function dragstarted(event, d) {
|
||||
if (!event.active) simulation.alphaTarget(0.3).restart();
|
||||
d.fx = d.x;
|
||||
d.fy = d.y;
|
||||
}
|
||||
|
||||
function dragged(event, d) {
|
||||
d.fx = event.x;
|
||||
d.fy = event.y;
|
||||
}
|
||||
|
||||
function dragended(event, d) {
|
||||
if (!event.active) simulation.alphaTarget(0);
|
||||
d.fx = null;
|
||||
d.fy = null;
|
||||
}
|
||||
</script>
|
||||
</body>
|
||||
</html>`;
|
||||
await (0, promises_1.writeFile)('examples/output/d3-visualization.html', html);
|
||||
console.log('Created HTML visualization: examples/output/d3-visualization.html');
|
||||
console.log('Open this file in a web browser to see the interactive graph!');
|
||||
}
|
||||
// ============================================================================
|
||||
// Example 6: Export to NetworkX for Python Analysis
|
||||
// ============================================================================
|
||||
async function example6_networkXExport() {
|
||||
console.log('\n=== Example 6: NetworkX Export ===\n');
|
||||
const entries = [];
|
||||
for (let i = 0; i < 10; i++) {
|
||||
entries.push({
|
||||
id: `node_${i}`,
|
||||
vector: Array(64).fill(0).map(() => Math.random()),
|
||||
metadata: { degree: i, centrality: Math.random() }
|
||||
});
|
||||
}
|
||||
const graph = (0, exporters_js_1.buildGraphFromEntries)(entries, {
|
||||
maxNeighbors: 3,
|
||||
threshold: 0.6
|
||||
});
|
||||
const nxData = (0, exporters_js_1.exportToNetworkX)(graph, {
|
||||
includeMetadata: true
|
||||
});
|
||||
await (0, promises_1.writeFile)('examples/output/networkx-graph.json', JSON.stringify(nxData, null, 2));
|
||||
console.log('NetworkX JSON saved to: examples/output/networkx-graph.json');
|
||||
// Generate Python script
|
||||
const pythonScript = `
|
||||
import json
|
||||
import networkx as nx
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
# Load the graph
|
||||
with open('networkx-graph.json', 'r') as f:
|
||||
data = json.load(f)
|
||||
|
||||
G = nx.node_link_graph(data)
|
||||
|
||||
# Calculate centrality measures
|
||||
degree_centrality = nx.degree_centrality(G)
|
||||
betweenness_centrality = nx.betweenness_centrality(G)
|
||||
|
||||
print(f"Graph has {G.number_of_nodes()} nodes and {G.number_of_edges()} edges")
|
||||
print(f"\\nTop 5 nodes by degree centrality:")
|
||||
sorted_nodes = sorted(degree_centrality.items(), key=lambda x: x[1], reverse=True)[:5]
|
||||
for node, centrality in sorted_nodes:
|
||||
print(f" {node}: {centrality:.4f}")
|
||||
|
||||
# Visualize
|
||||
plt.figure(figsize=(12, 8))
|
||||
pos = nx.spring_layout(G, k=0.5, iterations=50)
|
||||
nx.draw(G, pos,
|
||||
node_color=[degree_centrality[node] for node in G.nodes()],
|
||||
node_size=[v * 1000 for v in degree_centrality.values()],
|
||||
cmap=plt.cm.plasma,
|
||||
with_labels=True,
|
||||
font_size=8,
|
||||
font_weight='bold',
|
||||
edge_color='gray',
|
||||
alpha=0.7)
|
||||
plt.title('Network Graph Visualization')
|
||||
plt.colorbar(plt.cm.ScalarMappable(cmap=plt.cm.plasma), label='Degree Centrality')
|
||||
plt.savefig('network-visualization.png', dpi=300, bbox_inches='tight')
|
||||
print("\\nVisualization saved to: network-visualization.png")
|
||||
`;
|
||||
await (0, promises_1.writeFile)('examples/output/analyze_network.py', pythonScript);
|
||||
console.log('Python analysis script saved to: examples/output/analyze_network.py');
|
||||
console.log('\nTo analyze in Python:');
|
||||
console.log(' cd examples/output');
|
||||
console.log(' pip install networkx matplotlib');
|
||||
console.log(' python analyze_network.py');
|
||||
}
|
||||
// ============================================================================
|
||||
// Example 7: Streaming Export for Large Graphs
|
||||
// ============================================================================
|
||||
async function example7_streamingExport() {
|
||||
console.log('\n=== Example 7: Streaming Export ===\n');
|
||||
// Simulate a large graph that doesn't fit in memory
|
||||
console.log('Creating streaming GraphML export...');
|
||||
const stream = (0, fs_1.createWriteStream)('examples/output/large-graph.graphml');
|
||||
const exporter = new exporters_js_1.GraphMLStreamExporter(stream, {
|
||||
graphName: 'Large Streaming Graph'
|
||||
});
|
||||
await exporter.start();
|
||||
// Add nodes in batches
|
||||
for (let i = 0; i < 1000; i++) {
|
||||
const node = {
|
||||
id: `node${i}`,
|
||||
label: `Node ${i}`,
|
||||
attributes: {
|
||||
batch: Math.floor(i / 100),
|
||||
value: Math.random()
|
||||
}
|
||||
};
|
||||
await exporter.addNode(node);
|
||||
if (i % 100 === 0) {
|
||||
console.log(` Added ${i} nodes...`);
|
||||
}
|
||||
}
|
||||
console.log(' Added 1000 nodes');
|
||||
// Add edges
|
||||
let edgeCount = 0;
|
||||
for (let i = 0; i < 1000; i++) {
|
||||
for (let j = i + 1; j < Math.min(i + 5, 1000); j++) {
|
||||
const edge = {
|
||||
source: `node${i}`,
|
||||
target: `node${j}`,
|
||||
weight: Math.random()
|
||||
};
|
||||
await exporter.addEdge(edge);
|
||||
edgeCount++;
|
||||
}
|
||||
}
|
||||
console.log(` Added ${edgeCount} edges`);
|
||||
await exporter.end();
|
||||
stream.close();
|
||||
console.log('\nStreaming export completed: examples/output/large-graph.graphml');
|
||||
console.log('This approach works for graphs with millions of nodes!');
|
||||
}
|
||||
// ============================================================================
|
||||
// Example 8: Custom Graph Construction
|
||||
// ============================================================================
|
||||
async function example8_customGraph() {
|
||||
console.log('\n=== Example 8: Custom Graph Construction ===\n');
|
||||
// Build a custom graph structure manually
|
||||
const graph = {
|
||||
nodes: [
|
||||
{ id: 'A', label: 'Root', attributes: { level: 0, type: 'root' } },
|
||||
{ id: 'B', label: 'Child 1', attributes: { level: 1, type: 'child' } },
|
||||
{ id: 'C', label: 'Child 2', attributes: { level: 1, type: 'child' } },
|
||||
{ id: 'D', label: 'Leaf 1', attributes: { level: 2, type: 'leaf' } },
|
||||
{ id: 'E', label: 'Leaf 2', attributes: { level: 2, type: 'leaf' } }
|
||||
],
|
||||
edges: [
|
||||
{ source: 'A', target: 'B', weight: 1.0, type: 'parent-child' },
|
||||
{ source: 'A', target: 'C', weight: 1.0, type: 'parent-child' },
|
||||
{ source: 'B', target: 'D', weight: 0.8, type: 'parent-child' },
|
||||
{ source: 'C', target: 'E', weight: 0.9, type: 'parent-child' },
|
||||
{ source: 'B', target: 'C', weight: 0.5, type: 'sibling' }
|
||||
],
|
||||
metadata: {
|
||||
description: 'Hierarchical tree structure',
|
||||
created: new Date().toISOString()
|
||||
}
|
||||
};
|
||||
// Export to multiple formats
|
||||
const graphML = (0, exporters_js_1.exportToGraphML)(graph);
|
||||
const d3Data = (0, exporters_js_1.exportToD3)(graph);
|
||||
const neo4j = (0, exporters_js_1.exportToNeo4j)(graph);
|
||||
await (0, promises_1.writeFile)('examples/output/custom-graph.graphml', graphML);
|
||||
await (0, promises_1.writeFile)('examples/output/custom-graph-d3.json', JSON.stringify(d3Data, null, 2));
|
||||
await (0, promises_1.writeFile)('examples/output/custom-graph.cypher', neo4j);
|
||||
console.log('Custom graph exported to:');
|
||||
console.log(' - examples/output/custom-graph.graphml');
|
||||
console.log(' - examples/output/custom-graph-d3.json');
|
||||
console.log(' - examples/output/custom-graph.cypher');
|
||||
}
|
||||
// ============================================================================
|
||||
// Run All Examples
|
||||
// ============================================================================
|
||||
async function runAllExamples() {
|
||||
console.log('╔═══════════════════════════════════════════════════════╗');
|
||||
console.log('║ ruvector Graph Export Examples ║');
|
||||
console.log('╚═══════════════════════════════════════════════════════╝');
|
||||
// Create output directory
|
||||
const fs = await Promise.resolve().then(() => __importStar(require('fs/promises')));
|
||||
try {
|
||||
await fs.mkdir('examples/output', { recursive: true });
|
||||
}
|
||||
catch (e) {
|
||||
// Directory already exists
|
||||
}
|
||||
try {
|
||||
await example1_basicExport();
|
||||
await example2_graphMLExport();
|
||||
await example3_gephiExport();
|
||||
await example4_neo4jExport();
|
||||
await example5_d3Export();
|
||||
await example6_networkXExport();
|
||||
await example7_streamingExport();
|
||||
await example8_customGraph();
|
||||
console.log('\n✅ All examples completed successfully!');
|
||||
console.log('\nGenerated files in examples/output/:');
|
||||
console.log(' - graph.graphml (GraphML format)');
|
||||
console.log(' - network.gexf (Gephi format)');
|
||||
console.log(' - import.cypher (Neo4j queries)');
|
||||
console.log(' - d3-graph.json (D3.js data)');
|
||||
console.log(' - d3-visualization.html (Interactive visualization)');
|
||||
console.log(' - networkx-graph.json (NetworkX format)');
|
||||
console.log(' - analyze_network.py (Python analysis script)');
|
||||
console.log(' - large-graph.graphml (Streaming export demo)');
|
||||
console.log(' - custom-graph.* (Custom graph exports)');
|
||||
}
|
||||
catch (error) {
|
||||
console.error('\n❌ Error running examples:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
// Run if executed directly
|
||||
if (import.meta.url === `file://${process.argv[1]}`) {
|
||||
runAllExamples().catch(console.error);
|
||||
}
|
||||
//# sourceMappingURL=graph-export-examples.js.map
|
||||
File diff suppressed because one or more lines are too long
@@ -0,0 +1,584 @@
|
||||
/**
|
||||
* Graph Export Examples
|
||||
*
|
||||
* Demonstrates how to use the graph export module with various formats
|
||||
* and configurations.
|
||||
*/
|
||||
|
||||
import {
|
||||
buildGraphFromEntries,
|
||||
exportGraph,
|
||||
exportToGraphML,
|
||||
exportToGEXF,
|
||||
exportToNeo4j,
|
||||
exportToD3,
|
||||
exportToNetworkX,
|
||||
GraphMLStreamExporter,
|
||||
D3StreamExporter,
|
||||
type Graph,
|
||||
type GraphNode,
|
||||
type GraphEdge,
|
||||
type VectorEntry,
|
||||
type ExportOptions
|
||||
} from '../src/exporters.js';
|
||||
import { createWriteStream } from 'fs';
|
||||
import { writeFile } from 'fs/promises';
|
||||
|
||||
// ============================================================================
|
||||
// Example 1: Basic Graph Export to Multiple Formats
|
||||
// ============================================================================
|
||||
|
||||
export async function example1_basicExport() {
|
||||
console.log('\n=== Example 1: Basic Graph Export ===\n');
|
||||
|
||||
// Sample vector entries (embeddings from a document collection)
|
||||
const entries: VectorEntry[] = [
|
||||
{
|
||||
id: 'doc1',
|
||||
vector: [0.1, 0.2, 0.3, 0.4],
|
||||
metadata: { title: 'Introduction to AI', category: 'AI', year: 2023 }
|
||||
},
|
||||
{
|
||||
id: 'doc2',
|
||||
vector: [0.15, 0.25, 0.35, 0.42],
|
||||
metadata: { title: 'Machine Learning Basics', category: 'ML', year: 2023 }
|
||||
},
|
||||
{
|
||||
id: 'doc3',
|
||||
vector: [0.8, 0.1, 0.05, 0.05],
|
||||
metadata: { title: 'History of Rome', category: 'History', year: 2022 }
|
||||
},
|
||||
{
|
||||
id: 'doc4',
|
||||
vector: [0.12, 0.22, 0.32, 0.38],
|
||||
metadata: { title: 'Neural Networks', category: 'AI', year: 2024 }
|
||||
}
|
||||
];
|
||||
|
||||
// Build graph from vector entries
|
||||
const graph = buildGraphFromEntries(entries, {
|
||||
maxNeighbors: 2,
|
||||
threshold: 0.5,
|
||||
includeVectors: false,
|
||||
includeMetadata: true
|
||||
});
|
||||
|
||||
console.log(`Graph built: ${graph.nodes.length} nodes, ${graph.edges.length} edges\n`);
|
||||
|
||||
// Export to different formats
|
||||
const formats = ['graphml', 'gexf', 'neo4j', 'd3', 'networkx'] as const;
|
||||
|
||||
for (const format of formats) {
|
||||
const result = exportGraph(graph, format, {
|
||||
graphName: 'Document Similarity Network',
|
||||
graphDescription: 'Similarity network of document embeddings',
|
||||
includeMetadata: true
|
||||
});
|
||||
|
||||
console.log(`${format.toUpperCase()}:`);
|
||||
console.log(` Nodes: ${result.nodeCount}, Edges: ${result.edgeCount}`);
|
||||
|
||||
if (typeof result.data === 'string') {
|
||||
console.log(` Size: ${result.data.length} characters`);
|
||||
console.log(` Preview: ${result.data.substring(0, 100)}...\n`);
|
||||
} else {
|
||||
console.log(` Type: JSON object`);
|
||||
console.log(` Preview: ${JSON.stringify(result.data).substring(0, 100)}...\n`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Example 2: Export to GraphML with Full Configuration
|
||||
// ============================================================================
|
||||
|
||||
export async function example2_graphMLExport() {
|
||||
console.log('\n=== Example 2: GraphML Export ===\n');
|
||||
|
||||
const entries: VectorEntry[] = [
|
||||
{
|
||||
id: 'vec1',
|
||||
vector: [1.0, 0.0, 0.0],
|
||||
metadata: { label: 'Vector 1', type: 'test', score: 0.95 }
|
||||
},
|
||||
{
|
||||
id: 'vec2',
|
||||
vector: [0.9, 0.1, 0.0],
|
||||
metadata: { label: 'Vector 2', type: 'test', score: 0.87 }
|
||||
},
|
||||
{
|
||||
id: 'vec3',
|
||||
vector: [0.0, 1.0, 0.0],
|
||||
metadata: { label: 'Vector 3', type: 'control', score: 0.92 }
|
||||
}
|
||||
];
|
||||
|
||||
const graph = buildGraphFromEntries(entries, {
|
||||
maxNeighbors: 2,
|
||||
threshold: 0.0,
|
||||
includeVectors: true, // Include vectors in export
|
||||
includeMetadata: true
|
||||
});
|
||||
|
||||
const graphml = exportToGraphML(graph, {
|
||||
graphName: 'Test Vectors',
|
||||
includeVectors: true
|
||||
});
|
||||
|
||||
console.log('GraphML Export:');
|
||||
console.log(graphml);
|
||||
|
||||
// Save to file
|
||||
await writeFile('examples/output/graph.graphml', graphml);
|
||||
console.log('\nSaved to: examples/output/graph.graphml');
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Example 3: Export to GEXF for Gephi Visualization
|
||||
// ============================================================================
|
||||
|
||||
export async function example3_gephiExport() {
|
||||
console.log('\n=== Example 3: GEXF Export for Gephi ===\n');
|
||||
|
||||
// Simulate a larger network
|
||||
const entries: VectorEntry[] = [];
|
||||
for (let i = 0; i < 20; i++) {
|
||||
entries.push({
|
||||
id: `node${i}`,
|
||||
vector: Array(128).fill(0).map(() => Math.random()),
|
||||
metadata: {
|
||||
label: `Node ${i}`,
|
||||
cluster: Math.floor(i / 5),
|
||||
importance: Math.random()
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
const graph = buildGraphFromEntries(entries, {
|
||||
maxNeighbors: 3,
|
||||
threshold: 0.7,
|
||||
includeMetadata: true
|
||||
});
|
||||
|
||||
const gexf = exportToGEXF(graph, {
|
||||
graphName: 'Large Network',
|
||||
graphDescription: 'Network with 20 nodes and cluster information'
|
||||
});
|
||||
|
||||
await writeFile('examples/output/network.gexf', gexf);
|
||||
console.log('GEXF file created: examples/output/network.gexf');
|
||||
console.log('Import this file into Gephi for visualization!');
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Example 4: Export to Neo4j and Execute Queries
|
||||
// ============================================================================
|
||||
|
||||
export async function example4_neo4jExport() {
|
||||
console.log('\n=== Example 4: Neo4j Export ===\n');
|
||||
|
||||
const entries: VectorEntry[] = [
|
||||
{
|
||||
id: 'person1',
|
||||
vector: [0.5, 0.5],
|
||||
metadata: { name: 'Alice', role: 'Engineer', experience: 5 }
|
||||
},
|
||||
{
|
||||
id: 'person2',
|
||||
vector: [0.52, 0.48],
|
||||
metadata: { name: 'Bob', role: 'Engineer', experience: 3 }
|
||||
},
|
||||
{
|
||||
id: 'person3',
|
||||
vector: [0.1, 0.9],
|
||||
metadata: { name: 'Charlie', role: 'Manager', experience: 10 }
|
||||
}
|
||||
];
|
||||
|
||||
const graph = buildGraphFromEntries(entries, {
|
||||
maxNeighbors: 2,
|
||||
threshold: 0.5,
|
||||
includeMetadata: true
|
||||
});
|
||||
|
||||
const cypher = exportToNeo4j(graph, {
|
||||
includeMetadata: true
|
||||
});
|
||||
|
||||
console.log('Neo4j Cypher Queries:');
|
||||
console.log(cypher);
|
||||
|
||||
await writeFile('examples/output/import.cypher', cypher);
|
||||
console.log('\nSaved to: examples/output/import.cypher');
|
||||
console.log('\nTo import into Neo4j:');
|
||||
console.log(' 1. Open Neo4j Browser');
|
||||
console.log(' 2. Copy and paste the Cypher queries');
|
||||
console.log(' 3. Execute to create the graph');
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Example 5: Export to D3.js for Web Visualization
|
||||
// ============================================================================
|
||||
|
||||
export async function example5_d3Export() {
|
||||
console.log('\n=== Example 5: D3.js Export ===\n');
|
||||
|
||||
const entries: VectorEntry[] = [
|
||||
{
|
||||
id: 'central',
|
||||
vector: [0.5, 0.5],
|
||||
metadata: { name: 'Central Node', size: 20, color: '#ff0000' }
|
||||
},
|
||||
{
|
||||
id: 'node1',
|
||||
vector: [0.6, 0.5],
|
||||
metadata: { name: 'Node 1', size: 10, color: '#00ff00' }
|
||||
},
|
||||
{
|
||||
id: 'node2',
|
||||
vector: [0.4, 0.5],
|
||||
metadata: { name: 'Node 2', size: 10, color: '#0000ff' }
|
||||
},
|
||||
{
|
||||
id: 'node3',
|
||||
vector: [0.5, 0.6],
|
||||
metadata: { name: 'Node 3', size: 10, color: '#ffff00' }
|
||||
}
|
||||
];
|
||||
|
||||
const graph = buildGraphFromEntries(entries, {
|
||||
maxNeighbors: 3,
|
||||
threshold: 0.0,
|
||||
includeMetadata: true
|
||||
});
|
||||
|
||||
const d3Data = exportToD3(graph, {
|
||||
includeMetadata: true
|
||||
});
|
||||
|
||||
console.log('D3.js Data:');
|
||||
console.log(JSON.stringify(d3Data, null, 2));
|
||||
|
||||
await writeFile('examples/output/d3-graph.json', JSON.stringify(d3Data, null, 2));
|
||||
console.log('\nSaved to: examples/output/d3-graph.json');
|
||||
|
||||
// Generate simple HTML visualization
|
||||
const html = `
|
||||
<!DOCTYPE html>
|
||||
<html>
|
||||
<head>
|
||||
<title>D3.js Force Graph</title>
|
||||
<script src="https://d3js.org/d3.v7.min.js"></script>
|
||||
<style>
|
||||
body { margin: 0; font-family: Arial, sans-serif; }
|
||||
svg { border: 1px solid #ccc; }
|
||||
.links line { stroke: #999; stroke-opacity: 0.6; }
|
||||
.nodes circle { stroke: #fff; stroke-width: 1.5px; }
|
||||
.labels { font-size: 10px; pointer-events: none; }
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<svg width="800" height="600"></svg>
|
||||
<script>
|
||||
const graphData = ${JSON.stringify(d3Data)};
|
||||
|
||||
const svg = d3.select("svg"),
|
||||
width = +svg.attr("width"),
|
||||
height = +svg.attr("height");
|
||||
|
||||
const simulation = d3.forceSimulation(graphData.nodes)
|
||||
.force("link", d3.forceLink(graphData.links).id(d => d.id).distance(100))
|
||||
.force("charge", d3.forceManyBody().strength(-300))
|
||||
.force("center", d3.forceCenter(width / 2, height / 2));
|
||||
|
||||
const link = svg.append("g")
|
||||
.attr("class", "links")
|
||||
.selectAll("line")
|
||||
.data(graphData.links)
|
||||
.enter().append("line")
|
||||
.attr("stroke-width", d => Math.sqrt(d.value) * 2);
|
||||
|
||||
const node = svg.append("g")
|
||||
.attr("class", "nodes")
|
||||
.selectAll("circle")
|
||||
.data(graphData.nodes)
|
||||
.enter().append("circle")
|
||||
.attr("r", d => d.size || 5)
|
||||
.attr("fill", d => d.color || "#69b3a2")
|
||||
.call(d3.drag()
|
||||
.on("start", dragstarted)
|
||||
.on("drag", dragged)
|
||||
.on("end", dragended));
|
||||
|
||||
const label = svg.append("g")
|
||||
.attr("class", "labels")
|
||||
.selectAll("text")
|
||||
.data(graphData.nodes)
|
||||
.enter().append("text")
|
||||
.text(d => d.name)
|
||||
.attr("dx", 12)
|
||||
.attr("dy", 4);
|
||||
|
||||
simulation.on("tick", () => {
|
||||
link.attr("x1", d => d.source.x)
|
||||
.attr("y1", d => d.source.y)
|
||||
.attr("x2", d => d.target.x)
|
||||
.attr("y2", d => d.target.y);
|
||||
node.attr("cx", d => d.x)
|
||||
.attr("cy", d => d.y);
|
||||
label.attr("x", d => d.x)
|
||||
.attr("y", d => d.y);
|
||||
});
|
||||
|
||||
function dragstarted(event, d) {
|
||||
if (!event.active) simulation.alphaTarget(0.3).restart();
|
||||
d.fx = d.x;
|
||||
d.fy = d.y;
|
||||
}
|
||||
|
||||
function dragged(event, d) {
|
||||
d.fx = event.x;
|
||||
d.fy = event.y;
|
||||
}
|
||||
|
||||
function dragended(event, d) {
|
||||
if (!event.active) simulation.alphaTarget(0);
|
||||
d.fx = null;
|
||||
d.fy = null;
|
||||
}
|
||||
</script>
|
||||
</body>
|
||||
</html>`;
|
||||
|
||||
await writeFile('examples/output/d3-visualization.html', html);
|
||||
console.log('Created HTML visualization: examples/output/d3-visualization.html');
|
||||
console.log('Open this file in a web browser to see the interactive graph!');
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Example 6: Export to NetworkX for Python Analysis
|
||||
// ============================================================================
|
||||
|
||||
export async function example6_networkXExport() {
|
||||
console.log('\n=== Example 6: NetworkX Export ===\n');
|
||||
|
||||
const entries: VectorEntry[] = [];
|
||||
for (let i = 0; i < 10; i++) {
|
||||
entries.push({
|
||||
id: `node_${i}`,
|
||||
vector: Array(64).fill(0).map(() => Math.random()),
|
||||
metadata: { degree: i, centrality: Math.random() }
|
||||
});
|
||||
}
|
||||
|
||||
const graph = buildGraphFromEntries(entries, {
|
||||
maxNeighbors: 3,
|
||||
threshold: 0.6
|
||||
});
|
||||
|
||||
const nxData = exportToNetworkX(graph, {
|
||||
includeMetadata: true
|
||||
});
|
||||
|
||||
await writeFile('examples/output/networkx-graph.json', JSON.stringify(nxData, null, 2));
|
||||
console.log('NetworkX JSON saved to: examples/output/networkx-graph.json');
|
||||
|
||||
// Generate Python script
|
||||
const pythonScript = `
|
||||
import json
|
||||
import networkx as nx
|
||||
import matplotlib.pyplot as plt
|
||||
|
||||
# Load the graph
|
||||
with open('networkx-graph.json', 'r') as f:
|
||||
data = json.load(f)
|
||||
|
||||
G = nx.node_link_graph(data)
|
||||
|
||||
# Calculate centrality measures
|
||||
degree_centrality = nx.degree_centrality(G)
|
||||
betweenness_centrality = nx.betweenness_centrality(G)
|
||||
|
||||
print(f"Graph has {G.number_of_nodes()} nodes and {G.number_of_edges()} edges")
|
||||
print(f"\\nTop 5 nodes by degree centrality:")
|
||||
sorted_nodes = sorted(degree_centrality.items(), key=lambda x: x[1], reverse=True)[:5]
|
||||
for node, centrality in sorted_nodes:
|
||||
print(f" {node}: {centrality:.4f}")
|
||||
|
||||
# Visualize
|
||||
plt.figure(figsize=(12, 8))
|
||||
pos = nx.spring_layout(G, k=0.5, iterations=50)
|
||||
nx.draw(G, pos,
|
||||
node_color=[degree_centrality[node] for node in G.nodes()],
|
||||
node_size=[v * 1000 for v in degree_centrality.values()],
|
||||
cmap=plt.cm.plasma,
|
||||
with_labels=True,
|
||||
font_size=8,
|
||||
font_weight='bold',
|
||||
edge_color='gray',
|
||||
alpha=0.7)
|
||||
plt.title('Network Graph Visualization')
|
||||
plt.colorbar(plt.cm.ScalarMappable(cmap=plt.cm.plasma), label='Degree Centrality')
|
||||
plt.savefig('network-visualization.png', dpi=300, bbox_inches='tight')
|
||||
print("\\nVisualization saved to: network-visualization.png")
|
||||
`;
|
||||
|
||||
await writeFile('examples/output/analyze_network.py', pythonScript);
|
||||
console.log('Python analysis script saved to: examples/output/analyze_network.py');
|
||||
console.log('\nTo analyze in Python:');
|
||||
console.log(' cd examples/output');
|
||||
console.log(' pip install networkx matplotlib');
|
||||
console.log(' python analyze_network.py');
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Example 7: Streaming Export for Large Graphs
|
||||
// ============================================================================
|
||||
|
||||
export async function example7_streamingExport() {
|
||||
console.log('\n=== Example 7: Streaming Export ===\n');
|
||||
|
||||
// Simulate a large graph that doesn't fit in memory
|
||||
console.log('Creating streaming GraphML export...');
|
||||
|
||||
const stream = createWriteStream('examples/output/large-graph.graphml');
|
||||
const exporter = new GraphMLStreamExporter(stream, {
|
||||
graphName: 'Large Streaming Graph'
|
||||
});
|
||||
|
||||
await exporter.start();
|
||||
|
||||
// Add nodes in batches
|
||||
for (let i = 0; i < 1000; i++) {
|
||||
const node: GraphNode = {
|
||||
id: `node${i}`,
|
||||
label: `Node ${i}`,
|
||||
attributes: {
|
||||
batch: Math.floor(i / 100),
|
||||
value: Math.random()
|
||||
}
|
||||
};
|
||||
await exporter.addNode(node);
|
||||
|
||||
if (i % 100 === 0) {
|
||||
console.log(` Added ${i} nodes...`);
|
||||
}
|
||||
}
|
||||
|
||||
console.log(' Added 1000 nodes');
|
||||
|
||||
// Add edges
|
||||
let edgeCount = 0;
|
||||
for (let i = 0; i < 1000; i++) {
|
||||
for (let j = i + 1; j < Math.min(i + 5, 1000); j++) {
|
||||
const edge: GraphEdge = {
|
||||
source: `node${i}`,
|
||||
target: `node${j}`,
|
||||
weight: Math.random()
|
||||
};
|
||||
await exporter.addEdge(edge);
|
||||
edgeCount++;
|
||||
}
|
||||
}
|
||||
|
||||
console.log(` Added ${edgeCount} edges`);
|
||||
|
||||
await exporter.end();
|
||||
stream.close();
|
||||
|
||||
console.log('\nStreaming export completed: examples/output/large-graph.graphml');
|
||||
console.log('This approach works for graphs with millions of nodes!');
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Example 8: Custom Graph Construction
|
||||
// ============================================================================
|
||||
|
||||
export async function example8_customGraph() {
|
||||
console.log('\n=== Example 8: Custom Graph Construction ===\n');
|
||||
|
||||
// Build a custom graph structure manually
|
||||
const graph: Graph = {
|
||||
nodes: [
|
||||
{ id: 'A', label: 'Root', attributes: { level: 0, type: 'root' } },
|
||||
{ id: 'B', label: 'Child 1', attributes: { level: 1, type: 'child' } },
|
||||
{ id: 'C', label: 'Child 2', attributes: { level: 1, type: 'child' } },
|
||||
{ id: 'D', label: 'Leaf 1', attributes: { level: 2, type: 'leaf' } },
|
||||
{ id: 'E', label: 'Leaf 2', attributes: { level: 2, type: 'leaf' } }
|
||||
],
|
||||
edges: [
|
||||
{ source: 'A', target: 'B', weight: 1.0, type: 'parent-child' },
|
||||
{ source: 'A', target: 'C', weight: 1.0, type: 'parent-child' },
|
||||
{ source: 'B', target: 'D', weight: 0.8, type: 'parent-child' },
|
||||
{ source: 'C', target: 'E', weight: 0.9, type: 'parent-child' },
|
||||
{ source: 'B', target: 'C', weight: 0.5, type: 'sibling' }
|
||||
],
|
||||
metadata: {
|
||||
description: 'Hierarchical tree structure',
|
||||
created: new Date().toISOString()
|
||||
}
|
||||
};
|
||||
|
||||
// Export to multiple formats
|
||||
const graphML = exportToGraphML(graph);
|
||||
const d3Data = exportToD3(graph);
|
||||
const neo4j = exportToNeo4j(graph);
|
||||
|
||||
await writeFile('examples/output/custom-graph.graphml', graphML);
|
||||
await writeFile('examples/output/custom-graph-d3.json', JSON.stringify(d3Data, null, 2));
|
||||
await writeFile('examples/output/custom-graph.cypher', neo4j);
|
||||
|
||||
console.log('Custom graph exported to:');
|
||||
console.log(' - examples/output/custom-graph.graphml');
|
||||
console.log(' - examples/output/custom-graph-d3.json');
|
||||
console.log(' - examples/output/custom-graph.cypher');
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Run All Examples
|
||||
// ============================================================================
|
||||
|
||||
export async function runAllExamples() {
|
||||
console.log('╔═══════════════════════════════════════════════════════╗');
|
||||
console.log('║ ruvector Graph Export Examples ║');
|
||||
console.log('╚═══════════════════════════════════════════════════════╝');
|
||||
|
||||
// Create output directory
|
||||
const fs = await import('fs/promises');
|
||||
try {
|
||||
await fs.mkdir('examples/output', { recursive: true });
|
||||
} catch (e) {
|
||||
// Directory already exists
|
||||
}
|
||||
|
||||
try {
|
||||
await example1_basicExport();
|
||||
await example2_graphMLExport();
|
||||
await example3_gephiExport();
|
||||
await example4_neo4jExport();
|
||||
await example5_d3Export();
|
||||
await example6_networkXExport();
|
||||
await example7_streamingExport();
|
||||
await example8_customGraph();
|
||||
|
||||
console.log('\n✅ All examples completed successfully!');
|
||||
console.log('\nGenerated files in examples/output/:');
|
||||
console.log(' - graph.graphml (GraphML format)');
|
||||
console.log(' - network.gexf (Gephi format)');
|
||||
console.log(' - import.cypher (Neo4j queries)');
|
||||
console.log(' - d3-graph.json (D3.js data)');
|
||||
console.log(' - d3-visualization.html (Interactive visualization)');
|
||||
console.log(' - networkx-graph.json (NetworkX format)');
|
||||
console.log(' - analyze_network.py (Python analysis script)');
|
||||
console.log(' - large-graph.graphml (Streaming export demo)');
|
||||
console.log(' - custom-graph.* (Custom graph exports)');
|
||||
} catch (error) {
|
||||
console.error('\n❌ Error running examples:', error);
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
|
||||
// Run if executed directly
|
||||
if (import.meta.url === `file://${process.argv[1]}`) {
|
||||
runAllExamples().catch(console.error);
|
||||
}
|
||||
Reference in New Issue
Block a user