[package] name = "hyperbolic-attention" version = "0.1.0" edition = "2021" license = "MIT OR Apache-2.0" authors = ["rUv Research "] repository = "https://github.com/ruvnet/ruvector" homepage = "https://ruv.io/research" documentation = "https://docs.rs/hyperbolic-attention" description = "Hyperbolic attention networks with O(log n) hierarchical reasoning capacity" keywords = ["hyperbolic", "attention", "geometry", "neural", "transformer"] categories = ["science", "algorithms", "mathematics"] readme = "README.md" [workspace] # This package is not part of the parent workspace [lib] name = "hyperbolic_attention" path = "src/lib.rs" [dependencies] # Core dependencies would go here in production # For research prototype, keeping minimal [dev-dependencies] approx = "0.5" criterion = "0.5" [[bench]] name = "hyperbolic_ops" harness = false path = "benches/hyperbolic_ops.rs" required-features = [] [profile.release] opt-level = 3 lto = "fat" codegen-units = 1 # Enable SIMD optimizations [target.'cfg(target_arch = "x86_64")'.dependencies] [target.'cfg(target_arch = "aarch64")'.dependencies] [features] default = [] # SIMD optimizations (enabled by default on supported platforms) simd = [] # Linear attention (O(n) complexity) linear-attention = [] # Multi-curvature product spaces multi-curvature = [] # Full feature set full = ["simd", "linear-attention", "multi-curvature"]