Merge commit 'd803bfe2b1fe7f5e219e50ac20d6801a0a58ac75' as 'vendor/ruvector'
This commit is contained in:
28
vendor/ruvector/crates/ruvector-attention-wasm/pkg/package.json
vendored
Normal file
28
vendor/ruvector/crates/ruvector-attention-wasm/pkg/package.json
vendored
Normal file
@@ -0,0 +1,28 @@
|
||||
{
|
||||
"name": "ruvector-attention-wasm",
|
||||
"collaborators": [
|
||||
"Ruvector Team"
|
||||
],
|
||||
"description": "High-performance WebAssembly attention mechanisms: Multi-Head, Flash, Hyperbolic, MoE, CGT Sheaf Attention with GPU acceleration for transformers and LLMs",
|
||||
"version": "2.0.5",
|
||||
"license": "MIT",
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "https://github.com/ruvnet/ruvector"
|
||||
},
|
||||
"files": [
|
||||
"ruvector_attention_wasm_bg.wasm",
|
||||
"ruvector_attention_wasm.js",
|
||||
"ruvector_attention_wasm.d.ts"
|
||||
],
|
||||
"main": "ruvector_attention_wasm.js",
|
||||
"homepage": "https://ruv.io/ruvector",
|
||||
"types": "ruvector_attention_wasm.d.ts",
|
||||
"keywords": [
|
||||
"wasm",
|
||||
"attention",
|
||||
"transformer",
|
||||
"flash-attention",
|
||||
"llm"
|
||||
]
|
||||
}
|
||||
Reference in New Issue
Block a user