Merge commit 'd803bfe2b1fe7f5e219e50ac20d6801a0a58ac75' as 'vendor/ruvector'

This commit is contained in:
ruv
2026-02-28 14:39:40 -05:00
7854 changed files with 3522914 additions and 0 deletions

View File

@@ -0,0 +1,46 @@
//! MicroLoRA WASM - Ultra-fast Low-Rank Adaptation for Edge AI
//!
//! This crate provides rank-2 LoRA (Low-Rank Adaptation) matrices optimized for
//! WASM execution with <100us adaptation latency. Designed for real-time
//! per-operator-type learning in query optimization systems.
//!
//! ## Key Features
//!
//! - **Rank-2 LoRA**: Minimal parameter count (2d parameters per adapter)
//! - **Per-Operator Scoping**: Separate adapters for different operator types
//! - **<100us Adaptation**: Instant weight updates for real-time learning
//! - **WASM-Optimized**: no_std compatible, minimal allocations
//!
//! ## Architecture
//!
//! ```text
//! Input Embedding (d-dim)
//! |
//! v
//! +---------+
//! | A: d x 2 | Down projection
//! +---------+
//! |
//! v
//! +---------+
//! | B: 2 x d | Up projection
//! +---------+
//! |
//! v
//! Delta W = alpha * (A @ B)
//! |
//! v
//! Output = Input + Delta W
//! ```
mod lora;
mod operator_scope;
mod trajectory;
pub use lora::{LoRAConfig, LoRAPair, MicroLoRAEngine};
pub use operator_scope::{OperatorScope, ScopedLoRA};
pub use trajectory::{Trajectory, TrajectoryBuffer, TrajectoryStats};
// Re-export core types for JS
pub use lora::wasm_exports::*;
pub use operator_scope::wasm_exports::*;