Files

41 lines
1.2 KiB
TOML

[package]
name = "ruvector-attention-wasm"
version.workspace = true
edition.workspace = true
authors.workspace = true
license.workspace = true
repository.workspace = true
description = "High-performance WebAssembly attention mechanisms: Multi-Head, Flash, Hyperbolic, MoE, CGT Sheaf Attention with GPU acceleration for transformers and LLMs"
homepage = "https://ruv.io/ruvector"
documentation = "https://docs.rs/ruvector-attention-wasm"
keywords = ["wasm", "attention", "transformer", "flash-attention", "llm"]
categories = ["wasm", "algorithms", "science"]
readme = "README.md"
[lib]
crate-type = ["cdylib", "rlib"]
[dependencies]
ruvector-attention = { version = "2.0", path = "../ruvector-attention", default-features = false, features = ["wasm"] }
wasm-bindgen = "0.2"
js-sys = "0.3"
web-sys = { version = "0.3", features = ["console"] }
serde = { version = "1.0", features = ["derive"] }
serde-wasm-bindgen = "0.6"
console_error_panic_hook = { version = "0.1", optional = true }
getrandom = { version = "0.2", features = ["js"] }
[dev-dependencies]
wasm-bindgen-test = "0.3"
[features]
default = ["console_error_panic_hook"]
[profile.release]
opt-level = "s"
lto = true
codegen-units = 1
[package.metadata.wasm-pack.profile.release]
wasm-opt = false