Merge commit 'd803bfe2b1fe7f5e219e50ac20d6801a0a58ac75' as 'vendor/ruvector'
This commit is contained in:
563
vendor/ruvector/crates/ruvector-graph-node/src/lib.rs
vendored
Normal file
563
vendor/ruvector/crates/ruvector-graph-node/src/lib.rs
vendored
Normal file
@@ -0,0 +1,563 @@
|
||||
//! Node.js bindings for RuVector Graph Database via NAPI-RS
|
||||
//!
|
||||
//! High-performance native graph database with Cypher-like query support,
|
||||
//! hypergraph capabilities, async/await support, and zero-copy buffer sharing.
|
||||
|
||||
#![deny(clippy::all)]
|
||||
#![warn(clippy::pedantic)]
|
||||
|
||||
use napi::bindgen_prelude::*;
|
||||
use napi_derive::napi;
|
||||
use ruvector_core::advanced::hypergraph::{
|
||||
CausalMemory as CoreCausalMemory, Hyperedge as CoreHyperedge,
|
||||
HypergraphIndex as CoreHypergraphIndex,
|
||||
};
|
||||
use ruvector_core::DistanceMetric;
|
||||
use ruvector_graph::cypher::{parse_cypher, Statement};
|
||||
use ruvector_graph::node::NodeBuilder;
|
||||
use ruvector_graph::storage::GraphStorage;
|
||||
use ruvector_graph::GraphDB;
|
||||
use std::sync::{Arc, RwLock};
|
||||
|
||||
mod streaming;
|
||||
mod transactions;
|
||||
mod types;
|
||||
|
||||
pub use streaming::*;
|
||||
pub use transactions::*;
|
||||
pub use types::*;
|
||||
|
||||
/// Graph database for complex relationship queries
|
||||
#[napi]
|
||||
pub struct GraphDatabase {
|
||||
hypergraph: Arc<RwLock<CoreHypergraphIndex>>,
|
||||
causal_memory: Arc<RwLock<CoreCausalMemory>>,
|
||||
transaction_manager: Arc<RwLock<transactions::TransactionManager>>,
|
||||
/// Property graph database with Cypher support
|
||||
graph_db: Arc<RwLock<GraphDB>>,
|
||||
/// Persistent storage backend (optional)
|
||||
storage: Option<Arc<RwLock<GraphStorage>>>,
|
||||
/// Path to storage file (if persisted)
|
||||
storage_path: Option<String>,
|
||||
}
|
||||
|
||||
#[napi]
|
||||
impl GraphDatabase {
|
||||
/// Create a new graph database
|
||||
///
|
||||
/// # Example
|
||||
/// ```javascript
|
||||
/// const db = new GraphDatabase({
|
||||
/// distanceMetric: 'Cosine',
|
||||
/// dimensions: 384
|
||||
/// });
|
||||
/// ```
|
||||
#[napi(constructor)]
|
||||
pub fn new(options: Option<JsGraphOptions>) -> Result<Self> {
|
||||
let opts = options.unwrap_or_default();
|
||||
let metric = opts.distance_metric.unwrap_or(JsDistanceMetric::Cosine);
|
||||
let core_metric: DistanceMetric = metric.into();
|
||||
|
||||
// Check if storage path is provided for persistence
|
||||
let (storage, storage_path) = if let Some(ref path) = opts.storage_path {
|
||||
let gs = GraphStorage::new(path)
|
||||
.map_err(|e| Error::from_reason(format!("Failed to open storage: {}", e)))?;
|
||||
(Some(Arc::new(RwLock::new(gs))), Some(path.clone()))
|
||||
} else {
|
||||
(None, None)
|
||||
};
|
||||
|
||||
Ok(Self {
|
||||
hypergraph: Arc::new(RwLock::new(CoreHypergraphIndex::new(core_metric))),
|
||||
causal_memory: Arc::new(RwLock::new(CoreCausalMemory::new(core_metric))),
|
||||
transaction_manager: Arc::new(RwLock::new(transactions::TransactionManager::new())),
|
||||
graph_db: Arc::new(RwLock::new(GraphDB::new())),
|
||||
storage,
|
||||
storage_path,
|
||||
})
|
||||
}
|
||||
|
||||
/// Open an existing graph database from disk
|
||||
///
|
||||
/// # Example
|
||||
/// ```javascript
|
||||
/// const db = GraphDatabase.open('./my-graph.db');
|
||||
/// ```
|
||||
#[napi(factory)]
|
||||
pub fn open(path: String) -> Result<Self> {
|
||||
let storage = GraphStorage::new(&path)
|
||||
.map_err(|e| Error::from_reason(format!("Failed to open storage: {}", e)))?;
|
||||
|
||||
let metric = DistanceMetric::Cosine;
|
||||
|
||||
Ok(Self {
|
||||
hypergraph: Arc::new(RwLock::new(CoreHypergraphIndex::new(metric))),
|
||||
causal_memory: Arc::new(RwLock::new(CoreCausalMemory::new(metric))),
|
||||
transaction_manager: Arc::new(RwLock::new(transactions::TransactionManager::new())),
|
||||
graph_db: Arc::new(RwLock::new(GraphDB::new())),
|
||||
storage: Some(Arc::new(RwLock::new(storage))),
|
||||
storage_path: Some(path),
|
||||
})
|
||||
}
|
||||
|
||||
/// Check if persistence is enabled
|
||||
///
|
||||
/// # Example
|
||||
/// ```javascript
|
||||
/// if (db.isPersistent()) {
|
||||
/// console.log('Data is being saved to:', db.getStoragePath());
|
||||
/// }
|
||||
/// ```
|
||||
#[napi]
|
||||
pub fn is_persistent(&self) -> bool {
|
||||
self.storage.is_some()
|
||||
}
|
||||
|
||||
/// Get the storage path (if persisted)
|
||||
#[napi]
|
||||
pub fn get_storage_path(&self) -> Option<String> {
|
||||
self.storage_path.clone()
|
||||
}
|
||||
|
||||
/// Create a node in the graph
|
||||
///
|
||||
/// # Example
|
||||
/// ```javascript
|
||||
/// const nodeId = await db.createNode({
|
||||
/// id: 'node1',
|
||||
/// embedding: new Float32Array([1, 2, 3]),
|
||||
/// properties: { name: 'Alice', age: 30 }
|
||||
/// });
|
||||
/// ```
|
||||
#[napi]
|
||||
pub async fn create_node(&self, node: JsNode) -> Result<String> {
|
||||
let hypergraph = self.hypergraph.clone();
|
||||
let graph_db = self.graph_db.clone();
|
||||
let storage = self.storage.clone();
|
||||
let id = node.id.clone();
|
||||
let embedding = node.embedding.to_vec();
|
||||
let properties = node.properties.clone();
|
||||
let labels = node.labels.clone();
|
||||
|
||||
tokio::task::spawn_blocking(move || {
|
||||
// Add to hypergraph index
|
||||
let mut hg = hypergraph.write().expect("RwLock poisoned");
|
||||
hg.add_entity(id.clone(), embedding);
|
||||
|
||||
// Add to property graph
|
||||
let mut gdb = graph_db.write().expect("RwLock poisoned");
|
||||
let mut builder = NodeBuilder::new().id(&id);
|
||||
|
||||
// Add labels if provided
|
||||
if let Some(node_labels) = labels {
|
||||
for label in node_labels {
|
||||
builder = builder.label(&label);
|
||||
}
|
||||
}
|
||||
|
||||
// Add properties if provided
|
||||
if let Some(props) = properties {
|
||||
for (key, value) in props {
|
||||
builder = builder.property(&key, value);
|
||||
}
|
||||
}
|
||||
|
||||
let graph_node = builder.build();
|
||||
|
||||
// Persist to storage if enabled
|
||||
if let Some(ref storage_arc) = storage {
|
||||
let storage_guard = storage_arc.write().expect("Storage RwLock poisoned");
|
||||
storage_guard
|
||||
.insert_node(&graph_node)
|
||||
.map_err(|e| Error::from_reason(format!("Failed to persist node: {}", e)))?;
|
||||
}
|
||||
|
||||
gdb.create_node(graph_node)
|
||||
.map_err(|e| Error::from_reason(format!("Failed to create node: {}", e)))?;
|
||||
|
||||
Ok::<String, Error>(id)
|
||||
})
|
||||
.await
|
||||
.map_err(|e| Error::from_reason(format!("Task failed: {}", e)))?
|
||||
}
|
||||
|
||||
/// Create an edge between two nodes
|
||||
///
|
||||
/// # Example
|
||||
/// ```javascript
|
||||
/// const edgeId = await db.createEdge({
|
||||
/// from: 'node1',
|
||||
/// to: 'node2',
|
||||
/// description: 'knows',
|
||||
/// embedding: new Float32Array([0.5, 0.5, 0.5]),
|
||||
/// confidence: 0.95
|
||||
/// });
|
||||
/// ```
|
||||
#[napi]
|
||||
pub async fn create_edge(&self, edge: JsEdge) -> Result<String> {
|
||||
let hypergraph = self.hypergraph.clone();
|
||||
let nodes = vec![edge.from.clone(), edge.to.clone()];
|
||||
let description = edge.description.clone();
|
||||
let embedding = edge.embedding.to_vec();
|
||||
let confidence = edge.confidence.unwrap_or(1.0) as f32;
|
||||
|
||||
tokio::task::spawn_blocking(move || {
|
||||
let core_edge = CoreHyperedge::new(nodes, description, embedding, confidence);
|
||||
let edge_id = core_edge.id.clone();
|
||||
let mut hg = hypergraph.write().expect("RwLock poisoned");
|
||||
hg.add_hyperedge(core_edge)
|
||||
.map_err(|e| Error::from_reason(format!("Failed to create edge: {}", e)))?;
|
||||
Ok(edge_id)
|
||||
})
|
||||
.await
|
||||
.map_err(|e| Error::from_reason(format!("Task failed: {}", e)))?
|
||||
}
|
||||
|
||||
/// Create a hyperedge connecting multiple nodes
|
||||
///
|
||||
/// # Example
|
||||
/// ```javascript
|
||||
/// const hyperedgeId = await db.createHyperedge({
|
||||
/// nodes: ['node1', 'node2', 'node3'],
|
||||
/// description: 'collaborated_on_project',
|
||||
/// embedding: new Float32Array([0.3, 0.6, 0.9]),
|
||||
/// confidence: 0.85,
|
||||
/// metadata: { project: 'AI Research' }
|
||||
/// });
|
||||
/// ```
|
||||
#[napi]
|
||||
pub async fn create_hyperedge(&self, hyperedge: JsHyperedge) -> Result<String> {
|
||||
let hypergraph = self.hypergraph.clone();
|
||||
let nodes = hyperedge.nodes.clone();
|
||||
let description = hyperedge.description.clone();
|
||||
let embedding = hyperedge.embedding.to_vec();
|
||||
let confidence = hyperedge.confidence.unwrap_or(1.0) as f32;
|
||||
|
||||
tokio::task::spawn_blocking(move || {
|
||||
let core_edge = CoreHyperedge::new(nodes, description, embedding, confidence);
|
||||
let edge_id = core_edge.id.clone();
|
||||
let mut hg = hypergraph.write().expect("RwLock poisoned");
|
||||
hg.add_hyperedge(core_edge)
|
||||
.map_err(|e| Error::from_reason(format!("Failed to create hyperedge: {}", e)))?;
|
||||
Ok(edge_id)
|
||||
})
|
||||
.await
|
||||
.map_err(|e| Error::from_reason(format!("Task failed: {}", e)))?
|
||||
}
|
||||
|
||||
/// Query the graph using Cypher-like syntax
|
||||
///
|
||||
/// # Example
|
||||
/// ```javascript
|
||||
/// const results = await db.query('MATCH (n) RETURN n LIMIT 10');
|
||||
/// ```
|
||||
#[napi]
|
||||
pub async fn query(&self, cypher: String) -> Result<JsQueryResult> {
|
||||
let graph_db = self.graph_db.clone();
|
||||
let hypergraph = self.hypergraph.clone();
|
||||
|
||||
tokio::task::spawn_blocking(move || {
|
||||
// Parse the Cypher query
|
||||
let parsed = parse_cypher(&cypher)
|
||||
.map_err(|e| Error::from_reason(format!("Cypher parse error: {}", e)))?;
|
||||
|
||||
let gdb = graph_db.read().expect("RwLock poisoned");
|
||||
let hg = hypergraph.read().expect("RwLock poisoned");
|
||||
|
||||
let mut result_nodes: Vec<JsNodeResult> = Vec::new();
|
||||
let mut result_edges: Vec<JsEdgeResult> = Vec::new();
|
||||
|
||||
// Execute each statement
|
||||
for statement in &parsed.statements {
|
||||
match statement {
|
||||
Statement::Match(match_clause) => {
|
||||
// Extract label from match patterns for query
|
||||
for pattern in &match_clause.patterns {
|
||||
if let ruvector_graph::cypher::ast::Pattern::Node(node_pattern) =
|
||||
pattern
|
||||
{
|
||||
for label in &node_pattern.labels {
|
||||
let nodes = gdb.get_nodes_by_label(label);
|
||||
for node in nodes {
|
||||
result_nodes.push(JsNodeResult {
|
||||
id: node.id.clone(),
|
||||
labels: node
|
||||
.labels
|
||||
.iter()
|
||||
.map(|l| l.name.clone())
|
||||
.collect(),
|
||||
properties: node
|
||||
.properties
|
||||
.iter()
|
||||
.map(|(k, v)| (k.clone(), format!("{:?}", v)))
|
||||
.collect(),
|
||||
});
|
||||
}
|
||||
}
|
||||
// If no labels specified, return all nodes (simplified)
|
||||
if node_pattern.labels.is_empty() && node_pattern.variable.is_some()
|
||||
{
|
||||
// This would need iteration over all nodes - for now just stats
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Statement::Create(create_clause) => {
|
||||
// Handle CREATE - but we need mutable access, so skip in query
|
||||
}
|
||||
Statement::Return(_) => {
|
||||
// RETURN is handled implicitly
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
let stats = hg.stats();
|
||||
|
||||
Ok::<JsQueryResult, Error>(JsQueryResult {
|
||||
nodes: result_nodes,
|
||||
edges: result_edges,
|
||||
stats: Some(JsGraphStats {
|
||||
total_nodes: stats.total_entities as u32,
|
||||
total_edges: stats.total_hyperedges as u32,
|
||||
avg_degree: stats.avg_entity_degree as f64,
|
||||
}),
|
||||
})
|
||||
})
|
||||
.await
|
||||
.map_err(|e| Error::from_reason(format!("Task failed: {}", e)))?
|
||||
}
|
||||
|
||||
/// Query the graph synchronously
|
||||
///
|
||||
/// # Example
|
||||
/// ```javascript
|
||||
/// const results = db.querySync('MATCH (n) RETURN n LIMIT 10');
|
||||
/// ```
|
||||
#[napi]
|
||||
pub fn query_sync(&self, cypher: String) -> Result<JsQueryResult> {
|
||||
let hg = self.hypergraph.read().expect("RwLock poisoned");
|
||||
let stats = hg.stats();
|
||||
|
||||
// Simplified query result for now
|
||||
Ok(JsQueryResult {
|
||||
nodes: vec![],
|
||||
edges: vec![],
|
||||
stats: Some(JsGraphStats {
|
||||
total_nodes: stats.total_entities as u32,
|
||||
total_edges: stats.total_hyperedges as u32,
|
||||
avg_degree: stats.avg_entity_degree as f64,
|
||||
}),
|
||||
})
|
||||
}
|
||||
|
||||
/// Search for similar hyperedges
|
||||
///
|
||||
/// # Example
|
||||
/// ```javascript
|
||||
/// const results = await db.searchHyperedges({
|
||||
/// embedding: new Float32Array([0.5, 0.5, 0.5]),
|
||||
/// k: 10
|
||||
/// });
|
||||
/// ```
|
||||
#[napi]
|
||||
pub async fn search_hyperedges(
|
||||
&self,
|
||||
query: JsHyperedgeQuery,
|
||||
) -> Result<Vec<JsHyperedgeResult>> {
|
||||
let hypergraph = self.hypergraph.clone();
|
||||
let embedding = query.embedding.to_vec();
|
||||
let k = query.k as usize;
|
||||
|
||||
tokio::task::spawn_blocking(move || {
|
||||
let hg = hypergraph.read().expect("RwLock poisoned");
|
||||
let results = hg.search_hyperedges(&embedding, k);
|
||||
|
||||
Ok::<Vec<JsHyperedgeResult>, Error>(
|
||||
results
|
||||
.into_iter()
|
||||
.map(|(id, score)| JsHyperedgeResult {
|
||||
id,
|
||||
score: f64::from(score),
|
||||
})
|
||||
.collect(),
|
||||
)
|
||||
})
|
||||
.await
|
||||
.map_err(|e| Error::from_reason(format!("Task failed: {}", e)))?
|
||||
}
|
||||
|
||||
/// Get k-hop neighbors from a starting node
|
||||
///
|
||||
/// # Example
|
||||
/// ```javascript
|
||||
/// const neighbors = await db.kHopNeighbors('node1', 2);
|
||||
/// ```
|
||||
#[napi]
|
||||
pub async fn k_hop_neighbors(&self, start_node: String, k: u32) -> Result<Vec<String>> {
|
||||
let hypergraph = self.hypergraph.clone();
|
||||
let hops = k as usize;
|
||||
|
||||
tokio::task::spawn_blocking(move || {
|
||||
let hg = hypergraph.read().expect("RwLock poisoned");
|
||||
let neighbors = hg.k_hop_neighbors(start_node, hops);
|
||||
Ok::<Vec<String>, Error>(neighbors.into_iter().collect())
|
||||
})
|
||||
.await
|
||||
.map_err(|e| Error::from_reason(format!("Task failed: {}", e)))?
|
||||
}
|
||||
|
||||
/// Begin a new transaction
|
||||
///
|
||||
/// # Example
|
||||
/// ```javascript
|
||||
/// const txId = await db.begin();
|
||||
/// ```
|
||||
#[napi]
|
||||
pub async fn begin(&self) -> Result<String> {
|
||||
let tm = self.transaction_manager.clone();
|
||||
|
||||
tokio::task::spawn_blocking(move || {
|
||||
let mut manager = tm.write().expect("RwLock poisoned");
|
||||
Ok::<String, Error>(manager.begin())
|
||||
})
|
||||
.await
|
||||
.map_err(|e| Error::from_reason(format!("Task failed: {}", e)))?
|
||||
}
|
||||
|
||||
/// Commit a transaction
|
||||
///
|
||||
/// # Example
|
||||
/// ```javascript
|
||||
/// await db.commit(txId);
|
||||
/// ```
|
||||
#[napi]
|
||||
pub async fn commit(&self, tx_id: String) -> Result<()> {
|
||||
let tm = self.transaction_manager.clone();
|
||||
|
||||
tokio::task::spawn_blocking(move || {
|
||||
let mut manager = tm.write().expect("RwLock poisoned");
|
||||
manager
|
||||
.commit(&tx_id)
|
||||
.map_err(|e| Error::from_reason(format!("Failed to commit: {}", e)))
|
||||
})
|
||||
.await
|
||||
.map_err(|e| Error::from_reason(format!("Task failed: {}", e)))?
|
||||
}
|
||||
|
||||
/// Rollback a transaction
|
||||
///
|
||||
/// # Example
|
||||
/// ```javascript
|
||||
/// await db.rollback(txId);
|
||||
/// ```
|
||||
#[napi]
|
||||
pub async fn rollback(&self, tx_id: String) -> Result<()> {
|
||||
let tm = self.transaction_manager.clone();
|
||||
|
||||
tokio::task::spawn_blocking(move || {
|
||||
let mut manager = tm.write().expect("RwLock poisoned");
|
||||
manager
|
||||
.rollback(&tx_id)
|
||||
.map_err(|e| Error::from_reason(format!("Failed to rollback: {}", e)))
|
||||
})
|
||||
.await
|
||||
.map_err(|e| Error::from_reason(format!("Task failed: {}", e)))?
|
||||
}
|
||||
|
||||
/// Batch insert nodes and edges
|
||||
///
|
||||
/// # Example
|
||||
/// ```javascript
|
||||
/// await db.batchInsert({
|
||||
/// nodes: [{ id: 'n1', embedding: new Float32Array([1, 2]) }],
|
||||
/// edges: [{ from: 'n1', to: 'n2', description: 'knows' }]
|
||||
/// });
|
||||
/// ```
|
||||
#[napi]
|
||||
pub async fn batch_insert(&self, batch: JsBatchInsert) -> Result<JsBatchResult> {
|
||||
let hypergraph = self.hypergraph.clone();
|
||||
let nodes = batch.nodes;
|
||||
let edges = batch.edges;
|
||||
|
||||
tokio::task::spawn_blocking(move || {
|
||||
let mut hg = hypergraph.write().expect("RwLock poisoned");
|
||||
let mut node_ids = Vec::new();
|
||||
let mut edge_ids = Vec::new();
|
||||
|
||||
// Insert nodes
|
||||
for node in nodes {
|
||||
hg.add_entity(node.id.clone(), node.embedding.to_vec());
|
||||
node_ids.push(node.id);
|
||||
}
|
||||
|
||||
// Insert edges
|
||||
for edge in edges {
|
||||
let nodes = vec![edge.from.clone(), edge.to.clone()];
|
||||
let embedding = edge.embedding.to_vec();
|
||||
let confidence = edge.confidence.unwrap_or(1.0) as f32;
|
||||
let core_edge = CoreHyperedge::new(nodes, edge.description, embedding, confidence);
|
||||
let edge_id = core_edge.id.clone();
|
||||
hg.add_hyperedge(core_edge)
|
||||
.map_err(|e| Error::from_reason(format!("Failed to insert edge: {}", e)))?;
|
||||
edge_ids.push(edge_id);
|
||||
}
|
||||
|
||||
Ok::<JsBatchResult, Error>(JsBatchResult { node_ids, edge_ids })
|
||||
})
|
||||
.await
|
||||
.map_err(|e| Error::from_reason(format!("Task failed: {}", e)))?
|
||||
}
|
||||
|
||||
/// Subscribe to graph changes (returns a change stream)
|
||||
///
|
||||
/// # Example
|
||||
/// ```javascript
|
||||
/// const unsubscribe = db.subscribe((change) => {
|
||||
/// console.log('Graph changed:', change);
|
||||
/// });
|
||||
/// ```
|
||||
#[napi]
|
||||
pub fn subscribe(&self, callback: JsFunction) -> Result<()> {
|
||||
// Placeholder for event emitter pattern
|
||||
// In a real implementation, this would set up a change listener
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get graph statistics
|
||||
///
|
||||
/// # Example
|
||||
/// ```javascript
|
||||
/// const stats = await db.stats();
|
||||
/// console.log(`Nodes: ${stats.totalNodes}, Edges: ${stats.totalEdges}`);
|
||||
/// ```
|
||||
#[napi]
|
||||
pub async fn stats(&self) -> Result<JsGraphStats> {
|
||||
let hypergraph = self.hypergraph.clone();
|
||||
|
||||
tokio::task::spawn_blocking(move || {
|
||||
let hg = hypergraph.read().expect("RwLock poisoned");
|
||||
let stats = hg.stats();
|
||||
|
||||
Ok::<JsGraphStats, Error>(JsGraphStats {
|
||||
total_nodes: stats.total_entities as u32,
|
||||
total_edges: stats.total_hyperedges as u32,
|
||||
avg_degree: stats.avg_entity_degree as f64,
|
||||
})
|
||||
})
|
||||
.await
|
||||
.map_err(|e| Error::from_reason(format!("Task failed: {}", e)))?
|
||||
}
|
||||
}
|
||||
|
||||
/// Get the version of the library
|
||||
#[napi]
|
||||
pub fn version() -> String {
|
||||
env!("CARGO_PKG_VERSION").to_string()
|
||||
}
|
||||
|
||||
/// Test function to verify bindings
|
||||
#[napi]
|
||||
pub fn hello() -> String {
|
||||
"Hello from RuVector Graph Node.js bindings!".to_string()
|
||||
}
|
||||
123
vendor/ruvector/crates/ruvector-graph-node/src/streaming.rs
vendored
Normal file
123
vendor/ruvector/crates/ruvector-graph-node/src/streaming.rs
vendored
Normal file
@@ -0,0 +1,123 @@
|
||||
//! Streaming query results using AsyncIterator pattern
|
||||
|
||||
use crate::types::*;
|
||||
use futures::stream::Stream;
|
||||
use napi::bindgen_prelude::*;
|
||||
use napi_derive::napi;
|
||||
use std::pin::Pin;
|
||||
use std::task::{Context, Poll};
|
||||
|
||||
/// Streaming query result iterator
|
||||
#[napi]
|
||||
pub struct QueryResultStream {
|
||||
inner: Pin<Box<dyn Stream<Item = JsQueryResult> + Send>>,
|
||||
}
|
||||
|
||||
impl QueryResultStream {
|
||||
/// Create a new query result stream
|
||||
pub fn new(stream: Pin<Box<dyn Stream<Item = JsQueryResult> + Send>>) -> Self {
|
||||
Self { inner: stream }
|
||||
}
|
||||
}
|
||||
|
||||
#[napi]
|
||||
impl QueryResultStream {
|
||||
/// Get the next result from the stream
|
||||
///
|
||||
/// # Example
|
||||
/// ```javascript
|
||||
/// const stream = await db.queryStream('MATCH (n) RETURN n');
|
||||
/// while (true) {
|
||||
/// const result = await stream.next();
|
||||
/// if (!result) break;
|
||||
/// console.log(result);
|
||||
/// }
|
||||
/// ```
|
||||
#[napi]
|
||||
pub fn next(&mut self) -> Result<Option<JsQueryResult>> {
|
||||
// This would poll the stream in a real implementation
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
/// Streaming hyperedge result iterator
|
||||
#[napi]
|
||||
pub struct HyperedgeStream {
|
||||
results: Vec<JsHyperedgeResult>,
|
||||
index: usize,
|
||||
}
|
||||
|
||||
impl HyperedgeStream {
|
||||
/// Create a new hyperedge stream
|
||||
pub fn new(results: Vec<JsHyperedgeResult>) -> Self {
|
||||
Self { results, index: 0 }
|
||||
}
|
||||
}
|
||||
|
||||
#[napi]
|
||||
impl HyperedgeStream {
|
||||
/// Get the next hyperedge result
|
||||
///
|
||||
/// # Example
|
||||
/// ```javascript
|
||||
/// const stream = await db.searchHyperedgesStream(query);
|
||||
/// for await (const result of stream) {
|
||||
/// console.log(result);
|
||||
/// }
|
||||
/// ```
|
||||
#[napi]
|
||||
pub fn next(&mut self) -> Result<Option<JsHyperedgeResult>> {
|
||||
if self.index < self.results.len() {
|
||||
let result = self.results[self.index].clone();
|
||||
self.index += 1;
|
||||
Ok(Some(result))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
/// Collect all remaining results
|
||||
#[napi]
|
||||
pub fn collect(&mut self) -> Vec<JsHyperedgeResult> {
|
||||
let remaining = self.results[self.index..].to_vec();
|
||||
self.index = self.results.len();
|
||||
remaining
|
||||
}
|
||||
}
|
||||
|
||||
/// Node stream iterator
|
||||
#[napi]
|
||||
pub struct NodeStream {
|
||||
nodes: Vec<JsNode>,
|
||||
index: usize,
|
||||
}
|
||||
|
||||
impl NodeStream {
|
||||
/// Create a new node stream
|
||||
pub fn new(nodes: Vec<JsNode>) -> Self {
|
||||
Self { nodes, index: 0 }
|
||||
}
|
||||
}
|
||||
|
||||
#[napi]
|
||||
impl NodeStream {
|
||||
/// Get the next node
|
||||
#[napi]
|
||||
pub fn next(&mut self) -> Result<Option<JsNode>> {
|
||||
if self.index < self.nodes.len() {
|
||||
let node = self.nodes[self.index].clone();
|
||||
self.index += 1;
|
||||
Ok(Some(node))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
|
||||
/// Collect all remaining nodes
|
||||
#[napi]
|
||||
pub fn collect(&mut self) -> Vec<JsNode> {
|
||||
let remaining = self.nodes[self.index..].to_vec();
|
||||
self.index = self.nodes.len();
|
||||
remaining
|
||||
}
|
||||
}
|
||||
161
vendor/ruvector/crates/ruvector-graph-node/src/transactions.rs
vendored
Normal file
161
vendor/ruvector/crates/ruvector-graph-node/src/transactions.rs
vendored
Normal file
@@ -0,0 +1,161 @@
|
||||
//! Transaction support for graph database operations
|
||||
|
||||
use std::collections::HashMap;
|
||||
use uuid::Uuid;
|
||||
|
||||
/// Transaction state
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum TransactionState {
|
||||
Active,
|
||||
Committed,
|
||||
RolledBack,
|
||||
}
|
||||
|
||||
/// Transaction metadata
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Transaction {
|
||||
pub id: String,
|
||||
pub state: TransactionState,
|
||||
pub operations: Vec<String>,
|
||||
}
|
||||
|
||||
/// Transaction manager
|
||||
pub struct TransactionManager {
|
||||
transactions: HashMap<String, Transaction>,
|
||||
}
|
||||
|
||||
impl TransactionManager {
|
||||
/// Create a new transaction manager
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
transactions: HashMap::new(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Begin a new transaction
|
||||
pub fn begin(&mut self) -> String {
|
||||
let tx_id = Uuid::new_v4().to_string();
|
||||
let tx = Transaction {
|
||||
id: tx_id.clone(),
|
||||
state: TransactionState::Active,
|
||||
operations: Vec::new(),
|
||||
};
|
||||
self.transactions.insert(tx_id.clone(), tx);
|
||||
tx_id
|
||||
}
|
||||
|
||||
/// Commit a transaction
|
||||
pub fn commit(&mut self, tx_id: &str) -> Result<(), String> {
|
||||
let tx = self
|
||||
.transactions
|
||||
.get_mut(tx_id)
|
||||
.ok_or_else(|| format!("Transaction not found: {}", tx_id))?;
|
||||
|
||||
match tx.state {
|
||||
TransactionState::Active => {
|
||||
tx.state = TransactionState::Committed;
|
||||
Ok(())
|
||||
}
|
||||
TransactionState::Committed => Err("Transaction already committed".to_string()),
|
||||
TransactionState::RolledBack => Err("Transaction already rolled back".to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
/// Rollback a transaction
|
||||
pub fn rollback(&mut self, tx_id: &str) -> Result<(), String> {
|
||||
let tx = self
|
||||
.transactions
|
||||
.get_mut(tx_id)
|
||||
.ok_or_else(|| format!("Transaction not found: {}", tx_id))?;
|
||||
|
||||
match tx.state {
|
||||
TransactionState::Active => {
|
||||
tx.state = TransactionState::RolledBack;
|
||||
Ok(())
|
||||
}
|
||||
TransactionState::Committed => Err("Cannot rollback committed transaction".to_string()),
|
||||
TransactionState::RolledBack => Err("Transaction already rolled back".to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
/// Add an operation to a transaction
|
||||
pub fn add_operation(&mut self, tx_id: &str, operation: String) -> Result<(), String> {
|
||||
let tx = self
|
||||
.transactions
|
||||
.get_mut(tx_id)
|
||||
.ok_or_else(|| format!("Transaction not found: {}", tx_id))?;
|
||||
|
||||
match tx.state {
|
||||
TransactionState::Active => {
|
||||
tx.operations.push(operation);
|
||||
Ok(())
|
||||
}
|
||||
_ => Err("Transaction is not active".to_string()),
|
||||
}
|
||||
}
|
||||
|
||||
/// Get transaction state
|
||||
pub fn get_state(&self, tx_id: &str) -> Option<TransactionState> {
|
||||
self.transactions.get(tx_id).map(|tx| tx.state.clone())
|
||||
}
|
||||
|
||||
/// Clean up old transactions
|
||||
pub fn cleanup(&mut self) {
|
||||
self.transactions
|
||||
.retain(|_, tx| matches!(tx.state, TransactionState::Active));
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for TransactionManager {
|
||||
fn default() -> Self {
|
||||
Self::new()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_transaction_lifecycle() {
|
||||
let mut tm = TransactionManager::new();
|
||||
|
||||
// Begin transaction
|
||||
let tx_id = tm.begin();
|
||||
assert!(matches!(
|
||||
tm.get_state(&tx_id),
|
||||
Some(TransactionState::Active)
|
||||
));
|
||||
|
||||
// Add operation
|
||||
tm.add_operation(&tx_id, "CREATE NODE".to_string()).unwrap();
|
||||
|
||||
// Commit
|
||||
tm.commit(&tx_id).unwrap();
|
||||
assert!(matches!(
|
||||
tm.get_state(&tx_id),
|
||||
Some(TransactionState::Committed)
|
||||
));
|
||||
|
||||
// Cannot commit again
|
||||
assert!(tm.commit(&tx_id).is_err());
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_transaction_rollback() {
|
||||
let mut tm = TransactionManager::new();
|
||||
|
||||
let tx_id = tm.begin();
|
||||
tm.add_operation(&tx_id, "CREATE NODE".to_string()).unwrap();
|
||||
|
||||
// Rollback
|
||||
tm.rollback(&tx_id).unwrap();
|
||||
assert!(matches!(
|
||||
tm.get_state(&tx_id),
|
||||
Some(TransactionState::RolledBack)
|
||||
));
|
||||
|
||||
// Cannot rollback again
|
||||
assert!(tm.rollback(&tx_id).is_err());
|
||||
}
|
||||
}
|
||||
213
vendor/ruvector/crates/ruvector-graph-node/src/types.rs
vendored
Normal file
213
vendor/ruvector/crates/ruvector-graph-node/src/types.rs
vendored
Normal file
@@ -0,0 +1,213 @@
|
||||
//! Type conversions between Rust and JavaScript
|
||||
|
||||
use napi::bindgen_prelude::*;
|
||||
use napi_derive::napi;
|
||||
use ruvector_core::DistanceMetric;
|
||||
use std::collections::HashMap;
|
||||
|
||||
/// Distance metric for similarity calculation
|
||||
#[napi(string_enum)]
|
||||
#[derive(Debug)]
|
||||
pub enum JsDistanceMetric {
|
||||
Euclidean,
|
||||
Cosine,
|
||||
DotProduct,
|
||||
Manhattan,
|
||||
}
|
||||
|
||||
impl From<JsDistanceMetric> for DistanceMetric {
|
||||
fn from(metric: JsDistanceMetric) -> Self {
|
||||
match metric {
|
||||
JsDistanceMetric::Euclidean => DistanceMetric::Euclidean,
|
||||
JsDistanceMetric::Cosine => DistanceMetric::Cosine,
|
||||
JsDistanceMetric::DotProduct => DistanceMetric::DotProduct,
|
||||
JsDistanceMetric::Manhattan => DistanceMetric::Manhattan,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Graph database configuration options
|
||||
#[napi(object)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct JsGraphOptions {
|
||||
/// Distance metric for embeddings
|
||||
pub distance_metric: Option<JsDistanceMetric>,
|
||||
/// Vector dimensions
|
||||
pub dimensions: Option<u32>,
|
||||
/// Storage path
|
||||
pub storage_path: Option<String>,
|
||||
}
|
||||
|
||||
impl Default for JsGraphOptions {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
distance_metric: Some(JsDistanceMetric::Cosine),
|
||||
dimensions: Some(384),
|
||||
storage_path: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Node in the graph
|
||||
#[napi(object)]
|
||||
#[derive(Clone)]
|
||||
pub struct JsNode {
|
||||
/// Node ID
|
||||
pub id: String,
|
||||
/// Node embedding
|
||||
pub embedding: Float32Array,
|
||||
/// Node labels (e.g., ["Person", "Employee"])
|
||||
pub labels: Option<Vec<String>>,
|
||||
/// Optional properties
|
||||
pub properties: Option<HashMap<String, String>>,
|
||||
}
|
||||
|
||||
/// Edge between two nodes
|
||||
#[napi(object)]
|
||||
#[derive(Clone)]
|
||||
pub struct JsEdge {
|
||||
/// Source node ID
|
||||
pub from: String,
|
||||
/// Target node ID
|
||||
pub to: String,
|
||||
/// Edge description/label
|
||||
pub description: String,
|
||||
/// Edge embedding
|
||||
pub embedding: Float32Array,
|
||||
/// Confidence score (0.0-1.0)
|
||||
pub confidence: Option<f64>,
|
||||
/// Optional metadata
|
||||
pub metadata: Option<HashMap<String, String>>,
|
||||
}
|
||||
|
||||
/// Hyperedge connecting multiple nodes
|
||||
#[napi(object)]
|
||||
#[derive(Clone)]
|
||||
pub struct JsHyperedge {
|
||||
/// Node IDs connected by this hyperedge
|
||||
pub nodes: Vec<String>,
|
||||
/// Natural language description of the relationship
|
||||
pub description: String,
|
||||
/// Embedding of the hyperedge description
|
||||
pub embedding: Float32Array,
|
||||
/// Confidence weight (0.0-1.0)
|
||||
pub confidence: Option<f64>,
|
||||
/// Optional metadata
|
||||
pub metadata: Option<HashMap<String, String>>,
|
||||
}
|
||||
|
||||
/// Query for searching hyperedges
|
||||
#[napi(object)]
|
||||
#[derive(Clone)]
|
||||
pub struct JsHyperedgeQuery {
|
||||
/// Query embedding
|
||||
pub embedding: Float32Array,
|
||||
/// Number of results to return
|
||||
pub k: u32,
|
||||
}
|
||||
|
||||
/// Hyperedge search result
|
||||
#[napi(object)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct JsHyperedgeResult {
|
||||
/// Hyperedge ID
|
||||
pub id: String,
|
||||
/// Similarity score
|
||||
pub score: f64,
|
||||
}
|
||||
|
||||
/// Node result from query (without embedding)
|
||||
#[napi(object)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct JsNodeResult {
|
||||
/// Node ID
|
||||
pub id: String,
|
||||
/// Node labels
|
||||
pub labels: Vec<String>,
|
||||
/// Node properties
|
||||
pub properties: HashMap<String, String>,
|
||||
}
|
||||
|
||||
/// Edge result from query
|
||||
#[napi(object)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct JsEdgeResult {
|
||||
/// Edge ID
|
||||
pub id: String,
|
||||
/// Source node ID
|
||||
pub from: String,
|
||||
/// Target node ID
|
||||
pub to: String,
|
||||
/// Edge type/label
|
||||
pub edge_type: String,
|
||||
/// Edge properties
|
||||
pub properties: HashMap<String, String>,
|
||||
}
|
||||
|
||||
/// Query result
|
||||
#[napi(object)]
|
||||
#[derive(Clone)]
|
||||
pub struct JsQueryResult {
|
||||
/// Nodes returned by the query
|
||||
pub nodes: Vec<JsNodeResult>,
|
||||
/// Edges returned by the query
|
||||
pub edges: Vec<JsEdgeResult>,
|
||||
/// Optional statistics
|
||||
pub stats: Option<JsGraphStats>,
|
||||
}
|
||||
|
||||
/// Graph statistics
|
||||
#[napi(object)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct JsGraphStats {
|
||||
/// Total number of nodes
|
||||
pub total_nodes: u32,
|
||||
/// Total number of edges
|
||||
pub total_edges: u32,
|
||||
/// Average node degree
|
||||
pub avg_degree: f64,
|
||||
}
|
||||
|
||||
/// Batch insert data
|
||||
#[napi(object)]
|
||||
#[derive(Clone)]
|
||||
pub struct JsBatchInsert {
|
||||
/// Nodes to insert
|
||||
pub nodes: Vec<JsNode>,
|
||||
/// Edges to insert
|
||||
pub edges: Vec<JsEdge>,
|
||||
}
|
||||
|
||||
/// Batch insert result
|
||||
#[napi(object)]
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct JsBatchResult {
|
||||
/// IDs of inserted nodes
|
||||
pub node_ids: Vec<String>,
|
||||
/// IDs of inserted edges
|
||||
pub edge_ids: Vec<String>,
|
||||
}
|
||||
|
||||
/// Temporal granularity
|
||||
#[napi(string_enum)]
|
||||
#[derive(Debug)]
|
||||
pub enum JsTemporalGranularity {
|
||||
Hourly,
|
||||
Daily,
|
||||
Monthly,
|
||||
Yearly,
|
||||
}
|
||||
|
||||
/// Temporal hyperedge
|
||||
#[napi(object)]
|
||||
#[derive(Clone)]
|
||||
pub struct JsTemporalHyperedge {
|
||||
/// Base hyperedge
|
||||
pub hyperedge: JsHyperedge,
|
||||
/// Creation timestamp (Unix epoch seconds)
|
||||
pub timestamp: i64,
|
||||
/// Optional expiration timestamp
|
||||
pub expires_at: Option<i64>,
|
||||
/// Temporal context
|
||||
pub granularity: JsTemporalGranularity,
|
||||
}
|
||||
Reference in New Issue
Block a user