feat: ADR-033 CRV signal-line integration + ruvector-crv 6-stage pipeline

Implement full CRV (Coordinate Remote Viewing) signal-line protocol
mapping to WiFi CSI sensing via ruvector-crv:

- Stage I: CsiGestaltClassifier (6 gestalt types from amplitude/phase)
- Stage II: CsiSensoryEncoder (texture/color/temperature/sound/luminosity/dimension)
- Stage III: Mesh topology encoding (AP nodes/links → GNN graph)
- Stage IV: Coherence gate → AOL detection (signal vs noise separation)
- Stage V: Pose interrogation via differentiable search
- Stage VI: Person partitioning via MinCut clustering
- Cross-session convergence for cross-room identity

New files:
- crv/mod.rs: 1,430 lines, 43 tests
- crv_bench.rs: 8 criterion benchmarks (gestalt, sensory, pipeline, convergence)
- ADR-033: 740-line architecture decision with 30+ acceptance criteria
- patches/ruvector-crv: Fix ruvector-gnn 2.0.5 API mismatch

Dependencies: ruvector-crv 0.1.1, ruvector-gnn 2.0.5

Co-Authored-By: claude-flow <ruv@ruv.net>
This commit is contained in:
ruv
2026-03-01 22:21:59 -05:00
parent 97f2a490eb
commit 60e0e6d3c4
21 changed files with 7461 additions and 4 deletions

View File

@@ -488,12 +488,24 @@ dependencies = [
"shlex",
]
[[package]]
name = "cesu8"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6d43a04d8753f35258c91f8ec639f792891f748a1edbd759cf1dcea3382ad83c"
[[package]]
name = "cfg-if"
version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801"
[[package]]
name = "cfg_aliases"
version = "0.2.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724"
[[package]]
name = "chrono"
version = "0.4.44"
@@ -601,6 +613,16 @@ dependencies = [
"windows-sys 0.59.0",
]
[[package]]
name = "combine"
version = "4.6.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ba5a308b75df32fe02788e748662718f03fde005016435c444eea572398219fd"
dependencies = [
"bytes",
"memchr",
]
[[package]]
name = "console"
version = "0.15.11"
@@ -915,6 +937,18 @@ dependencies = [
"windows-sys 0.61.2",
]
[[package]]
name = "fastbloom"
version = "0.14.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4e7f34442dbe69c60fe8eaf58a8cafff81a1f278816d8ab4db255b3bef4ac3c4"
dependencies = [
"getrandom 0.3.4",
"libm",
"rand 0.9.2",
"siphasher",
]
[[package]]
name = "fastrand"
version = "2.3.0"
@@ -1291,9 +1325,11 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "899def5c37c4fd7b2664648c28120ecec138e4d395b459e5ca34f9cce2dd77fd"
dependencies = [
"cfg-if",
"js-sys",
"libc",
"r-efi",
"wasip2",
"wasm-bindgen",
]
[[package]]
@@ -1634,6 +1670,28 @@ version = "1.0.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "92ecc6618181def0457392ccd0ee51198e065e016d1d527a7ac1b6dc7c1f09d2"
[[package]]
name = "jni"
version = "0.21.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1a87aa2bb7d2af34197c04845522473242e1aa17c12f4935d5856491a7fb8c97"
dependencies = [
"cesu8",
"cfg-if",
"combine",
"jni-sys",
"log",
"thiserror 1.0.69",
"walkdir",
"windows-sys 0.45.0",
]
[[package]]
name = "jni-sys"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8eaf4bc02d17cbdd7ff4c7438cafcdf7fb9a4613313ad11b4f8fefe7d3fa0130"
[[package]]
name = "jobserver"
version = "0.1.34"
@@ -1699,6 +1757,21 @@ version = "0.4.29"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5e5032e24019045c762d3c0f28f5b6b8bbf38563a65908389bf7978758920897"
[[package]]
name = "lru"
version = "0.12.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "234cf4f4a04dc1f57e24b96cc0cd600cf2af460d4161ac5ecdd0af8e1f3b2a38"
dependencies = [
"hashbrown 0.15.5",
]
[[package]]
name = "lru-slab"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "112b39cec0b298b6c1999fee3e31427f74f676e4cb9879ed1a121b43661a4154"
[[package]]
name = "lzma-rust2"
version = "0.15.7"
@@ -1746,6 +1819,63 @@ dependencies = [
"stable_deref_trait",
]
[[package]]
name = "midstreamer-attractor"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ab86df06cf1705ca37692b4fc0027868f92e5170a7ebb1d706302f04b6044f70"
dependencies = [
"midstreamer-temporal-compare",
"nalgebra",
"ndarray 0.16.1",
"serde",
"thiserror 2.0.18",
]
[[package]]
name = "midstreamer-quic"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "35ad2099588e987cdbedb039fdf8a56163a2f3dc1ff6bf5a39c63b9ce4e2248c"
dependencies = [
"futures",
"js-sys",
"quinn",
"rcgen",
"rustls 0.22.4",
"serde",
"thiserror 2.0.18",
"tokio",
"wasm-bindgen",
"wasm-bindgen-futures",
"web-sys",
]
[[package]]
name = "midstreamer-scheduler"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a9296b3f0a2b04e5c1a378ee7926e9f892895bface2ccebcfa407450c3aca269"
dependencies = [
"crossbeam",
"parking_lot",
"serde",
"thiserror 2.0.18",
"tokio",
]
[[package]]
name = "midstreamer-temporal-compare"
version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e1f935ba86c1632a3b5bc5e1cb56a308d4c5d2ec87c84db551c65f3e1001a642"
dependencies = [
"dashmap",
"lru",
"serde",
"thiserror 2.0.18",
]
[[package]]
name = "mime"
version = "0.3.17"
@@ -1819,6 +1949,33 @@ dependencies = [
"syn 2.0.117",
]
[[package]]
name = "nalgebra"
version = "0.33.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "26aecdf64b707efd1310e3544d709c5c0ac61c13756046aaaba41be5c4f66a3b"
dependencies = [
"approx",
"matrixmultiply",
"nalgebra-macros",
"num-complex",
"num-rational",
"num-traits",
"simba",
"typenum",
]
[[package]]
name = "nalgebra-macros"
version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "254a5372af8fc138e36684761d3c0cdb758a4410e938babcff1c860ce14ddbfc"
dependencies = [
"proc-macro2",
"quote",
"syn 2.0.117",
]
[[package]]
name = "native-tls"
version = "0.2.18"
@@ -1955,6 +2112,17 @@ dependencies = [
"num-traits",
]
[[package]]
name = "num-rational"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f83d14da390562dca69fc84082e73e548e1ad308d24accdedd2720017cb37824"
dependencies = [
"num-bigint",
"num-integer",
"num-traits",
]
[[package]]
name = "num-traits"
version = "0.2.19"
@@ -2147,6 +2315,16 @@ version = "0.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "4ec91767ecc0a0bbe558ce8c9da33c068066c57ecc8bb8477ef8c1ad3ef77c27"
[[package]]
name = "pem"
version = "3.0.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1d30c53c26bc5b31a98cd02d20f25a7c8567146caf63ed593a9d87b2775291be"
dependencies = [
"base64",
"serde_core",
]
[[package]]
name = "pem-rfc7468"
version = "0.7.0"
@@ -2443,6 +2621,63 @@ version = "1.2.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0"
[[package]]
name = "quinn"
version = "0.11.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b9e20a958963c291dc322d98411f541009df2ced7b5a4f2bd52337638cfccf20"
dependencies = [
"bytes",
"cfg_aliases",
"pin-project-lite",
"quinn-proto",
"quinn-udp",
"rustc-hash",
"rustls 0.23.37",
"socket2",
"thiserror 2.0.18",
"tokio",
"tracing",
"web-time",
]
[[package]]
name = "quinn-proto"
version = "0.11.13"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f1906b49b0c3bc04b5fe5d86a77925ae6524a19b816ae38ce1e426255f1d8a31"
dependencies = [
"bytes",
"fastbloom",
"getrandom 0.3.4",
"lru-slab",
"rand 0.9.2",
"ring",
"rustc-hash",
"rustls 0.23.37",
"rustls-pki-types",
"rustls-platform-verifier",
"slab",
"thiserror 2.0.18",
"tinyvec",
"tracing",
"web-time",
]
[[package]]
name = "quinn-udp"
version = "0.5.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "addec6a0dcad8a8d96a771f815f0eaf55f9d1805756410b39f5fa81332574cbd"
dependencies = [
"cfg_aliases",
"libc",
"once_cell",
"socket2",
"tracing",
"windows-sys 0.60.2",
]
[[package]]
name = "quote"
version = "1.0.44"
@@ -2590,6 +2825,18 @@ dependencies = [
"crossbeam-utils",
]
[[package]]
name = "rcgen"
version = "0.12.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "48406db8ac1f3cbc7dcdb56ec355343817958a356ff430259bb07baf7607e1e1"
dependencies = [
"pem",
"ring",
"time",
"yasna",
]
[[package]]
name = "reborrow"
version = "0.5.5"
@@ -2643,6 +2890,20 @@ dependencies = [
"bytecheck",
]
[[package]]
name = "ring"
version = "0.17.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a4689e6c2294d81e88dc6261c768b63bc4fcdb852be6d1352498b114f61383b7"
dependencies = [
"cc",
"cfg-if",
"getrandom 0.2.17",
"libc",
"untrusted",
"windows-sys 0.52.0",
]
[[package]]
name = "rkyv"
version = "0.8.15"
@@ -2750,6 +3011,12 @@ dependencies = [
"smallvec",
]
[[package]]
name = "rustc-hash"
version = "2.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "357703d41365b4b27c590e3ed91eabb1b663f07c4c084095e60cbed4362dff0d"
[[package]]
name = "rustc_version"
version = "0.4.1"
@@ -2786,15 +3053,105 @@ dependencies = [
"windows-sys 0.61.2",
]
[[package]]
name = "rustls"
version = "0.22.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bf4ef73721ac7bcd79b2b315da7779d8fc09718c6b3d2d1b2d94850eb8c18432"
dependencies = [
"log",
"ring",
"rustls-pki-types",
"rustls-webpki 0.102.8",
"subtle",
"zeroize",
]
[[package]]
name = "rustls"
version = "0.23.37"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "758025cb5fccfd3bc2fd74708fd4682be41d99e5dff73c377c0646c6012c73a4"
dependencies = [
"once_cell",
"ring",
"rustls-pki-types",
"rustls-webpki 0.103.9",
"subtle",
"zeroize",
]
[[package]]
name = "rustls-native-certs"
version = "0.8.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "612460d5f7bea540c490b2b6395d8e34a953e52b491accd6c86c8164c5932a63"
dependencies = [
"openssl-probe",
"rustls-pki-types",
"schannel",
"security-framework",
]
[[package]]
name = "rustls-pki-types"
version = "1.14.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "be040f8b0a225e40375822a563fa9524378b9d63112f53e19ffff34df5d33fdd"
dependencies = [
"web-time",
"zeroize",
]
[[package]]
name = "rustls-platform-verifier"
version = "0.6.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1d99feebc72bae7ab76ba994bb5e121b8d83d910ca40b36e0921f53becc41784"
dependencies = [
"core-foundation",
"core-foundation-sys",
"jni",
"log",
"once_cell",
"rustls 0.23.37",
"rustls-native-certs",
"rustls-platform-verifier-android",
"rustls-webpki 0.103.9",
"security-framework",
"security-framework-sys",
"webpki-root-certs",
"windows-sys 0.61.2",
]
[[package]]
name = "rustls-platform-verifier-android"
version = "0.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f87165f0995f63a9fbeea62b64d10b4d9d8e78ec6d7d51fb2125fda7bb36788f"
[[package]]
name = "rustls-webpki"
version = "0.102.8"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "64ca1bc8749bd4cf37b5ce386cc146580777b4e8572c7b97baf22c83f444bee9"
dependencies = [
"ring",
"rustls-pki-types",
"untrusted",
]
[[package]]
name = "rustls-webpki"
version = "0.103.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d7df23109aa6c1567d1c575b9952556388da57401e4ace1d15f79eedad0d8f53"
dependencies = [
"ring",
"rustls-pki-types",
"untrusted",
]
[[package]]
name = "rustversion"
version = "1.0.22"
@@ -2813,6 +3170,18 @@ dependencies = [
"wait-timeout",
]
[[package]]
name = "ruvector-attention"
version = "0.1.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ef4c2b4ef9db0d5a038c5cb8e9e91ffc11c789db660132d50165d2ba6a71d23f"
dependencies = [
"rand 0.8.5",
"rayon",
"serde",
"thiserror 1.0.69",
]
[[package]]
name = "ruvector-attention"
version = "2.0.4"
@@ -2859,6 +3228,38 @@ dependencies = [
"uuid",
]
[[package]]
name = "ruvector-crv"
version = "0.1.1"
dependencies = [
"ruvector-attention 0.1.32",
"ruvector-gnn",
"ruvector-mincut",
"serde",
"serde_json",
"thiserror 1.0.69",
]
[[package]]
name = "ruvector-gnn"
version = "2.0.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e17c1cf1ff3380026b299ff3c1ba3a5685c3d8d54700e6ab0b585b6cec21d7b"
dependencies = [
"anyhow",
"dashmap",
"libc",
"ndarray 0.16.1",
"parking_lot",
"rand 0.8.5",
"rand_distr 0.4.3",
"rayon",
"ruvector-core",
"serde",
"serde_json",
"thiserror 2.0.18",
]
[[package]]
name = "ruvector-mincut"
version = "2.0.4"
@@ -2908,6 +3309,15 @@ version = "1.0.23"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9774ba4a74de5f7b1c1451ed6cd5285a32eddb5cccb8cc655a4e50009e06477f"
[[package]]
name = "safe_arch"
version = "0.7.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "96b02de82ddbe1b636e6170c21be622223aea188ef2e139be0a5b219ec215323"
dependencies = [
"bytemuck",
]
[[package]]
name = "safetensors"
version = "0.3.3"
@@ -3120,6 +3530,19 @@ dependencies = [
"libc",
]
[[package]]
name = "simba"
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c99284beb21666094ba2b75bbceda012e610f5479dfcc2d6e2426f53197ffd95"
dependencies = [
"approx",
"num-complex",
"num-traits",
"paste",
"wide",
]
[[package]]
name = "simd-adler32"
version = "0.3.8"
@@ -3132,6 +3555,12 @@ version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e3a9fe34e3e7a50316060351f37187a3f546bce95496156754b601a5fa71b76e"
[[package]]
name = "siphasher"
version = "1.0.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b2aa850e253778c88a04c3d7323b043aeda9d3e30d5971937c1855769763678e"
[[package]]
name = "slab"
version = "0.4.12"
@@ -3750,6 +4179,12 @@ version = "0.2.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ebc1c04c71510c7f702b52b7c350734c9ff1295c464a03335b00bb84fc54f853"
[[package]]
name = "untrusted"
version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1"
[[package]]
name = "unty"
version = "0.0.4"
@@ -4070,6 +4505,16 @@ dependencies = [
"rustls-pki-types",
]
[[package]]
name = "wide"
version = "0.7.33"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0ce5da8ecb62bcd8ec8b7ea19f69a51275e91299be594ea5cc6ef7819e16cd03"
dependencies = [
"bytemuck",
"safe_arch",
]
[[package]]
name = "wifi-densepose-api"
version = "0.2.0"
@@ -4131,9 +4576,13 @@ dependencies = [
"byteorder",
"chrono",
"clap",
"criterion",
"midstreamer-quic",
"midstreamer-scheduler",
"serde",
"serde_json",
"thiserror 1.0.69",
"tokio",
"tracing",
]
@@ -4195,11 +4644,17 @@ dependencies = [
name = "wifi-densepose-ruvector"
version = "0.2.0"
dependencies = [
"ruvector-attention",
"approx",
"criterion",
"ruvector-attention 2.0.4",
"ruvector-attn-mincut",
"ruvector-crv",
"ruvector-gnn",
"ruvector-mincut",
"ruvector-solver",
"ruvector-temporal-tensor",
"serde",
"serde_json",
"thiserror 1.0.69",
]
@@ -4227,12 +4682,14 @@ version = "0.2.0"
dependencies = [
"chrono",
"criterion",
"midstreamer-attractor",
"midstreamer-temporal-compare",
"ndarray 0.15.6",
"num-complex",
"num-traits",
"proptest",
"rustfft",
"ruvector-attention",
"ruvector-attention 2.0.4",
"ruvector-attn-mincut",
"ruvector-mincut",
"ruvector-solver",
@@ -4260,7 +4717,7 @@ dependencies = [
"num-traits",
"petgraph",
"proptest",
"ruvector-attention",
"ruvector-attention 2.0.4",
"ruvector-attn-mincut",
"ruvector-mincut",
"ruvector-solver",
@@ -4410,6 +4867,24 @@ dependencies = [
"windows-link",
]
[[package]]
name = "windows-sys"
version = "0.45.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0"
dependencies = [
"windows-targets 0.42.2",
]
[[package]]
name = "windows-sys"
version = "0.52.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "282be5f36a8ce781fad8c8ae18fa3f9beff57ec1b52cb3de0789201425d9a33d"
dependencies = [
"windows-targets 0.52.6",
]
[[package]]
name = "windows-sys"
version = "0.59.0"
@@ -4437,6 +4912,21 @@ dependencies = [
"windows-link",
]
[[package]]
name = "windows-targets"
version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071"
dependencies = [
"windows_aarch64_gnullvm 0.42.2",
"windows_aarch64_msvc 0.42.2",
"windows_i686_gnu 0.42.2",
"windows_i686_msvc 0.42.2",
"windows_x86_64_gnu 0.42.2",
"windows_x86_64_gnullvm 0.42.2",
"windows_x86_64_msvc 0.42.2",
]
[[package]]
name = "windows-targets"
version = "0.52.6"
@@ -4470,6 +4960,12 @@ dependencies = [
"windows_x86_64_msvc 0.53.1",
]
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8"
[[package]]
name = "windows_aarch64_gnullvm"
version = "0.52.6"
@@ -4482,6 +4978,12 @@ version = "0.53.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a9d8416fa8b42f5c947f8482c43e7d89e73a173cead56d044f6a56104a6d1b53"
[[package]]
name = "windows_aarch64_msvc"
version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43"
[[package]]
name = "windows_aarch64_msvc"
version = "0.52.6"
@@ -4494,6 +4996,12 @@ version = "0.53.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b9d782e804c2f632e395708e99a94275910eb9100b2114651e04744e9b125006"
[[package]]
name = "windows_i686_gnu"
version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f"
[[package]]
name = "windows_i686_gnu"
version = "0.52.6"
@@ -4518,6 +5026,12 @@ version = "0.53.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fa7359d10048f68ab8b09fa71c3daccfb0e9b559aed648a8f95469c27057180c"
[[package]]
name = "windows_i686_msvc"
version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060"
[[package]]
name = "windows_i686_msvc"
version = "0.52.6"
@@ -4530,6 +5044,12 @@ version = "0.53.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1e7ac75179f18232fe9c285163565a57ef8d3c89254a30685b57d83a38d326c2"
[[package]]
name = "windows_x86_64_gnu"
version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36"
[[package]]
name = "windows_x86_64_gnu"
version = "0.52.6"
@@ -4542,6 +5062,12 @@ version = "0.53.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9c3842cdd74a865a8066ab39c8a7a473c0778a3f29370b5fd6b4b9aa7df4a499"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3"
[[package]]
name = "windows_x86_64_gnullvm"
version = "0.52.6"
@@ -4554,6 +5080,12 @@ version = "0.53.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0ffa179e2d07eee8ad8f57493436566c7cc30ac536a3379fdf008f47f6bb7ae1"
[[package]]
name = "windows_x86_64_msvc"
version = "0.42.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0"
[[package]]
name = "windows_x86_64_msvc"
version = "0.52.6"
@@ -4663,6 +5195,15 @@ dependencies = [
"wasmparser",
]
[[package]]
name = "yasna"
version = "0.5.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e17bb3549cc1321ae1296b9cdc2698e2b6cb1992adfa19a8c72e5b7a738f44cd"
dependencies = [
"time",
]
[[package]]
name = "yoke"
version = "0.7.5"

View File

@@ -103,12 +103,20 @@ proptest = "1.4"
mockall = "0.12"
wiremock = "0.5"
# ruvector integration (all at v2.0.4 — published on crates.io)
# midstreamer integration (published on crates.io)
midstreamer-quic = "0.1.0"
midstreamer-scheduler = "0.1.0"
midstreamer-temporal-compare = "0.1.0"
midstreamer-attractor = "0.1.0"
# ruvector integration (published on crates.io)
ruvector-mincut = "2.0.4"
ruvector-attn-mincut = "2.0.4"
ruvector-temporal-tensor = "2.0.4"
ruvector-solver = "2.0.4"
ruvector-attention = "2.0.4"
ruvector-crv = "0.1.1"
ruvector-gnn = { version = "2.0.5", default-features = false }
# Internal crates
@@ -123,6 +131,11 @@ wifi-densepose-wasm = { version = "0.2.0", path = "crates/wifi-densepose-wasm" }
wifi-densepose-mat = { version = "0.2.0", path = "crates/wifi-densepose-mat" }
wifi-densepose-ruvector = { version = "0.2.0", path = "crates/wifi-densepose-ruvector" }
# Patch ruvector-crv to fix RuvectorLayer::new() Result API mismatch
# with ruvector-gnn 2.0.5 (upstream ruvector-crv 0.1.1 was built against 2.0.1).
[patch.crates-io]
ruvector-crv = { path = "patches/ruvector-crv" }
[profile.release]
lto = true
codegen-units = 1

View File

@@ -16,4 +16,16 @@ ruvector-attn-mincut = { workspace = true }
ruvector-temporal-tensor = { workspace = true }
ruvector-solver = { workspace = true }
ruvector-attention = { workspace = true }
ruvector-crv = { workspace = true }
ruvector-gnn = { workspace = true }
thiserror = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
[dev-dependencies]
approx = "0.5"
criterion = { workspace = true }
[[bench]]
name = "crv_bench"
harness = false

View File

@@ -0,0 +1,405 @@
//! Benchmarks for CRV (Coordinate Remote Viewing) integration.
//!
//! Measures throughput of gestalt classification, sensory encoding,
//! full session pipelines, cross-session convergence, and embedding
//! dimension scaling using the `ruvector-crv` crate directly.
use criterion::{black_box, criterion_group, criterion_main, BenchmarkId, Criterion};
use ruvector_crv::{
CrvConfig, CrvSessionManager, GestaltType, SensoryModality, StageIData, StageIIData,
StageIIIData, StageIVData,
};
use ruvector_crv::types::{
GeometricKind, SketchElement, SpatialRelationType, SpatialRelationship,
};
// ---------------------------------------------------------------------------
// Helpers
// ---------------------------------------------------------------------------
/// Build a synthetic CSI-like ideogram stroke with `n` subcarrier points.
fn make_stroke(n: usize) -> Vec<(f32, f32)> {
(0..n)
.map(|i| {
let t = i as f32 / n as f32;
(t, (t * std::f32::consts::TAU).sin() * 0.5 + 0.5)
})
.collect()
}
/// Build a Stage I data frame representing a single CSI gestalt sample.
fn make_stage_i(gestalt: GestaltType) -> StageIData {
StageIData {
stroke: make_stroke(64),
spontaneous_descriptor: "angular rising".to_string(),
classification: gestalt,
confidence: 0.85,
}
}
/// Build a Stage II sensory data frame.
fn make_stage_ii() -> StageIIData {
StageIIData {
impressions: vec![
(SensoryModality::Texture, "rough metallic".to_string()),
(SensoryModality::Temperature, "warm".to_string()),
(SensoryModality::Color, "silver-gray".to_string()),
(SensoryModality::Luminosity, "reflective".to_string()),
(SensoryModality::Sound, "low hum".to_string()),
],
feature_vector: None,
}
}
/// Build a Stage III spatial sketch.
fn make_stage_iii() -> StageIIIData {
StageIIIData {
sketch_elements: vec![
SketchElement {
label: "tower".to_string(),
kind: GeometricKind::Rectangle,
position: (0.5, 0.8),
scale: Some(3.0),
},
SketchElement {
label: "base".to_string(),
kind: GeometricKind::Rectangle,
position: (0.5, 0.2),
scale: Some(5.0),
},
SketchElement {
label: "antenna".to_string(),
kind: GeometricKind::Line,
position: (0.5, 0.95),
scale: Some(1.0),
},
],
relationships: vec![
SpatialRelationship {
from: "tower".to_string(),
to: "base".to_string(),
relation: SpatialRelationType::Above,
strength: 0.9,
},
SpatialRelationship {
from: "antenna".to_string(),
to: "tower".to_string(),
relation: SpatialRelationType::Above,
strength: 0.85,
},
],
}
}
/// Build a Stage IV emotional / AOL data frame.
fn make_stage_iv() -> StageIVData {
StageIVData {
emotional_impact: vec![
("awe".to_string(), 0.7),
("curiosity".to_string(), 0.6),
("unease".to_string(), 0.3),
],
tangibles: vec!["metal structure".to_string(), "concrete".to_string()],
intangibles: vec!["transmission".to_string(), "power".to_string()],
aol_detections: vec![],
}
}
/// Create a manager with one session pre-loaded with 4 stages of data.
fn populated_manager(dims: usize) -> (CrvSessionManager, String) {
let config = CrvConfig {
dimensions: dims,
..CrvConfig::default()
};
let mut mgr = CrvSessionManager::new(config);
let sid = "bench-sess".to_string();
mgr.create_session(sid.clone(), "coord-001".to_string())
.unwrap();
mgr.add_stage_i(&sid, &make_stage_i(GestaltType::Manmade))
.unwrap();
mgr.add_stage_ii(&sid, &make_stage_ii()).unwrap();
mgr.add_stage_iii(&sid, &make_stage_iii()).unwrap();
mgr.add_stage_iv(&sid, &make_stage_iv()).unwrap();
(mgr, sid)
}
// ---------------------------------------------------------------------------
// Benchmarks
// ---------------------------------------------------------------------------
/// Benchmark: classify a single CSI frame through Stage I (64 subcarriers).
fn gestalt_classify_single(c: &mut Criterion) {
let config = CrvConfig {
dimensions: 64,
..CrvConfig::default()
};
let mut manager = CrvSessionManager::new(config);
manager
.create_session("gc-single".to_string(), "coord-gc".to_string())
.unwrap();
let data = make_stage_i(GestaltType::Manmade);
c.bench_function("gestalt_classify_single", |b| {
b.iter(|| {
manager
.add_stage_i("gc-single", black_box(&data))
.unwrap();
})
});
}
/// Benchmark: classify a batch of 100 CSI frames through Stage I.
fn gestalt_classify_batch(c: &mut Criterion) {
let config = CrvConfig {
dimensions: 64,
..CrvConfig::default()
};
let gestalts = GestaltType::all();
let frames: Vec<StageIData> = (0..100)
.map(|i| make_stage_i(gestalts[i % gestalts.len()]))
.collect();
c.bench_function("gestalt_classify_batch_100", |b| {
b.iter(|| {
let mut manager = CrvSessionManager::new(CrvConfig {
dimensions: 64,
..CrvConfig::default()
});
manager
.create_session("gc-batch".to_string(), "coord-gcb".to_string())
.unwrap();
for frame in black_box(&frames) {
manager.add_stage_i("gc-batch", frame).unwrap();
}
})
});
}
/// Benchmark: extract sensory features from a single CSI frame (Stage II).
fn sensory_encode_single(c: &mut Criterion) {
let config = CrvConfig {
dimensions: 64,
..CrvConfig::default()
};
let mut manager = CrvSessionManager::new(config);
manager
.create_session("se-single".to_string(), "coord-se".to_string())
.unwrap();
let data = make_stage_ii();
c.bench_function("sensory_encode_single", |b| {
b.iter(|| {
manager
.add_stage_ii("se-single", black_box(&data))
.unwrap();
})
});
}
/// Benchmark: full session pipeline -- create session, add 10 mixed-stage
/// frames, run Stage V interrogation, and run Stage VI partitioning.
fn pipeline_full_session(c: &mut Criterion) {
let stage_i_data = make_stage_i(GestaltType::Manmade);
let stage_ii_data = make_stage_ii();
let stage_iii_data = make_stage_iii();
let stage_iv_data = make_stage_iv();
c.bench_function("pipeline_full_session", |b| {
let mut counter = 0u64;
b.iter(|| {
counter += 1;
let config = CrvConfig {
dimensions: 64,
..CrvConfig::default()
};
let mut manager = CrvSessionManager::new(config);
let sid = format!("pfs-{}", counter);
manager
.create_session(sid.clone(), "coord-pfs".to_string())
.unwrap();
// 10 frames across stages I-IV
for _ in 0..3 {
manager
.add_stage_i(&sid, black_box(&stage_i_data))
.unwrap();
}
for _ in 0..3 {
manager
.add_stage_ii(&sid, black_box(&stage_ii_data))
.unwrap();
}
for _ in 0..2 {
manager
.add_stage_iii(&sid, black_box(&stage_iii_data))
.unwrap();
}
for _ in 0..2 {
manager
.add_stage_iv(&sid, black_box(&stage_iv_data))
.unwrap();
}
// Stage V: interrogate with a probe embedding
let probe_emb = vec![0.1f32; 64];
let probes: Vec<(&str, u8, Vec<f32>)> = vec![
("structure query", 1, probe_emb.clone()),
("texture query", 2, probe_emb.clone()),
];
let _ = manager.run_stage_v(&sid, &probes, 3);
// Stage VI: partition
let _ = manager.run_stage_vi(&sid);
})
});
}
/// Benchmark: cross-session convergence analysis with 2 independent
/// sessions of 10 frames each, targeting the same coordinate.
fn convergence_two_sessions(c: &mut Criterion) {
let gestalts = [GestaltType::Manmade, GestaltType::Natural, GestaltType::Energy];
let stage_ii_data = make_stage_ii();
c.bench_function("convergence_two_sessions", |b| {
let mut counter = 0u64;
b.iter(|| {
counter += 1;
let config = CrvConfig {
dimensions: 64,
convergence_threshold: 0.5,
..CrvConfig::default()
};
let mut manager = CrvSessionManager::new(config);
let coord = format!("conv-coord-{}", counter);
// Session A: 10 frames
let sid_a = format!("viewer-a-{}", counter);
manager
.create_session(sid_a.clone(), coord.clone())
.unwrap();
for i in 0..5 {
let data = make_stage_i(gestalts[i % gestalts.len()]);
manager.add_stage_i(&sid_a, black_box(&data)).unwrap();
}
for _ in 0..5 {
manager
.add_stage_ii(&sid_a, black_box(&stage_ii_data))
.unwrap();
}
// Session B: 10 frames (similar but not identical)
let sid_b = format!("viewer-b-{}", counter);
manager
.create_session(sid_b.clone(), coord.clone())
.unwrap();
for i in 0..5 {
let data = make_stage_i(gestalts[(i + 1) % gestalts.len()]);
manager.add_stage_i(&sid_b, black_box(&data)).unwrap();
}
for _ in 0..5 {
manager
.add_stage_ii(&sid_b, black_box(&stage_ii_data))
.unwrap();
}
// Convergence analysis
let _ = manager.find_convergence(&coord, black_box(0.5));
})
});
}
/// Benchmark: session creation overhead alone.
fn crv_session_create(c: &mut Criterion) {
c.bench_function("crv_session_create", |b| {
b.iter(|| {
let config = CrvConfig {
dimensions: 32,
..CrvConfig::default()
};
let mut manager = CrvSessionManager::new(black_box(config));
manager
.create_session(
black_box("sess-1".to_string()),
black_box("coord-1".to_string()),
)
.unwrap();
})
});
}
/// Benchmark: embedding dimension scaling (32, 128, 384).
///
/// Measures Stage I + Stage II encode time across different embedding
/// dimensions to characterize how cost grows with dimensionality.
fn crv_embedding_dimension_scaling(c: &mut Criterion) {
let stage_i_data = make_stage_i(GestaltType::Manmade);
let stage_ii_data = make_stage_ii();
let mut group = c.benchmark_group("crv_embedding_dimension_scaling");
for dims in [32, 128, 384] {
group.bench_with_input(BenchmarkId::from_parameter(dims), &dims, |b, &dims| {
let mut counter = 0u64;
b.iter(|| {
counter += 1;
let config = CrvConfig {
dimensions: dims,
..CrvConfig::default()
};
let mut manager = CrvSessionManager::new(config);
let sid = format!("dim-{}-{}", dims, counter);
manager
.create_session(sid.clone(), "coord-dim".to_string())
.unwrap();
// Encode one Stage I + one Stage II at this dimensionality
let emb_i = manager
.add_stage_i(&sid, black_box(&stage_i_data))
.unwrap();
let emb_ii = manager
.add_stage_ii(&sid, black_box(&stage_ii_data))
.unwrap();
assert_eq!(emb_i.len(), dims);
assert_eq!(emb_ii.len(), dims);
})
});
}
group.finish();
}
/// Benchmark: Stage VI partitioning on a pre-populated session
/// (4 stages of accumulated data).
fn crv_stage_vi_partition(c: &mut Criterion) {
c.bench_function("crv_stage_vi_partition", |b| {
let mut counter = 0u64;
b.iter(|| {
counter += 1;
// Re-create the populated manager each iteration because
// run_stage_vi mutates the session (appends an entry).
let (mut mgr, sid) = populated_manager(64);
let _ = mgr.run_stage_vi(black_box(&sid));
})
});
}
// ---------------------------------------------------------------------------
// Criterion groups
// ---------------------------------------------------------------------------
criterion_group!(
benches,
gestalt_classify_single,
gestalt_classify_batch,
sensory_encode_single,
pipeline_full_session,
convergence_two_sessions,
crv_session_create,
crv_embedding_dimension_scaling,
crv_stage_vi_partition,
);
criterion_main!(benches);

File diff suppressed because it is too large Load Diff

View File

@@ -26,6 +26,7 @@
#![warn(missing_docs)]
pub mod crv;
pub mod mat;
pub mod signal;
pub mod viewpoint;

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,23 @@
[package]
name = "ruvector-crv"
version = "0.1.1"
edition = "2021"
authors = ["ruvector contributors"]
description = "CRV (Coordinate Remote Viewing) protocol integration for ruvector - maps 6-stage signal line methodology to vector database subsystems"
license = "MIT OR Apache-2.0"
repository = "https://github.com/ruvnet/ruvector"
[lib]
name = "ruvector_crv"
path = "src/lib.rs"
[dependencies]
ruvector-attention = "0.1.31"
ruvector-gnn = { version = "2.0", default-features = false }
ruvector-mincut = { version = "2.0", default-features = false, features = ["exact"] }
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
thiserror = "1.0"
[dev-dependencies]
approx = "0.5"

View File

@@ -0,0 +1,28 @@
[package]
name = "ruvector-crv"
version = "0.1.1"
edition = "2021"
authors = ["ruvector contributors"]
description = "CRV (Coordinate Remote Viewing) protocol integration for ruvector - maps 6-stage signal line methodology to vector database subsystems"
license = "MIT OR Apache-2.0"
repository = "https://github.com/ruvnet/ruvector"
readme = "README.md"
keywords = ["crv", "signal-line", "vector-search", "attention", "hyperbolic"]
categories = ["algorithms", "science"]
[lib]
crate-type = ["rlib"]
[features]
default = []
[dependencies]
ruvector-attention = { version = "0.1.31", path = "../ruvector-attention" }
ruvector-gnn = { version = "2.0.1", path = "../ruvector-gnn", default-features = false }
ruvector-mincut = { version = "2.0.1", path = "../ruvector-mincut", default-features = false, features = ["exact"] }
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
thiserror = "1.0"
[dev-dependencies]
approx = "0.5"

View File

@@ -0,0 +1,68 @@
# ruvector-crv
CRV (Coordinate Remote Viewing) protocol integration for ruvector.
Maps the 6-stage CRV signal line methodology to ruvector's subsystems:
| CRV Stage | Data Type | ruvector Component |
|-----------|-----------|-------------------|
| Stage I (Ideograms) | Gestalt primitives | Poincaré ball hyperbolic embeddings |
| Stage II (Sensory) | Textures, colors, temps | Multi-head attention vectors |
| Stage III (Dimensional) | Spatial sketches | GNN graph topology |
| Stage IV (Emotional) | AOL, intangibles | SNN temporal encoding |
| Stage V (Interrogation) | Signal line probing | Differentiable search |
| Stage VI (3D Model) | Composite model | MinCut partitioning |
## Quick Start
```rust
use ruvector_crv::{CrvConfig, CrvSessionManager, GestaltType, StageIData};
// Create session manager with default config (384 dimensions)
let config = CrvConfig::default();
let mut manager = CrvSessionManager::new(config);
// Create a session for a target coordinate
manager.create_session("session-001".to_string(), "1234-5678".to_string()).unwrap();
// Add Stage I ideogram data
let stage_i = StageIData {
stroke: vec![(0.0, 0.0), (1.0, 0.5), (2.0, 1.0), (3.0, 0.5)],
spontaneous_descriptor: "angular rising".to_string(),
classification: GestaltType::Manmade,
confidence: 0.85,
};
let embedding = manager.add_stage_i("session-001", &stage_i).unwrap();
assert_eq!(embedding.len(), 384);
```
## Architecture
The Poincaré ball embedding for Stage I gestalts encodes the hierarchical
gestalt taxonomy (root → manmade/natural/movement/energy/water/land) with
exponentially less distortion than Euclidean space.
For AOL (Analytical Overlay) separation, the spiking neural network temporal
encoding models signal-vs-noise discrimination: high-frequency spike bursts
correlate with AOL contamination, while sustained low-frequency patterns
indicate clean signal line data.
MinCut partitioning in Stage VI identifies natural cluster boundaries in the
accumulated session graph, separating distinct target aspects.
## Cross-Session Convergence
Multiple sessions targeting the same coordinate can be analyzed for
convergence — agreement between independent viewers strengthens the
signal validity:
```rust
// After adding data to multiple sessions for "1234-5678"...
let convergence = manager.find_convergence("1234-5678", 0.75).unwrap();
// convergence.scores contains similarity values for converging entries
```
## License
MIT OR Apache-2.0

View File

@@ -0,0 +1,38 @@
//! Error types for the CRV protocol integration.
use thiserror::Error;
/// CRV-specific errors.
#[derive(Debug, Error)]
pub enum CrvError {
/// Dimension mismatch between expected and actual vector sizes.
#[error("Dimension mismatch: expected {expected}, got {actual}")]
DimensionMismatch { expected: usize, actual: usize },
/// Invalid CRV stage number.
#[error("Invalid stage: {0} (must be 1-6)")]
InvalidStage(u8),
/// Empty input data.
#[error("Empty input: {0}")]
EmptyInput(String),
/// Session not found.
#[error("Session not found: {0}")]
SessionNotFound(String),
/// Encoding failure.
#[error("Encoding error: {0}")]
EncodingError(String),
/// Attention mechanism error.
#[error("Attention error: {0}")]
AttentionError(#[from] ruvector_attention::AttentionError),
/// Serialization error.
#[error("Serialization error: {0}")]
SerializationError(#[from] serde_json::Error),
}
/// Result type alias for CRV operations.
pub type CrvResult<T> = Result<T, CrvError>;

View File

@@ -0,0 +1,178 @@
//! # ruvector-crv
//!
//! CRV (Coordinate Remote Viewing) protocol integration for ruvector.
//!
//! Maps the 6-stage CRV signal line methodology to ruvector's subsystems:
//!
//! | CRV Stage | Data Type | ruvector Component |
//! |-----------|-----------|-------------------|
//! | Stage I (Ideograms) | Gestalt primitives | Poincaré ball hyperbolic embeddings |
//! | Stage II (Sensory) | Textures, colors, temps | Multi-head attention vectors |
//! | Stage III (Dimensional) | Spatial sketches | GNN graph topology |
//! | Stage IV (Emotional) | AOL, intangibles | SNN temporal encoding |
//! | Stage V (Interrogation) | Signal line probing | Differentiable search |
//! | Stage VI (3D Model) | Composite model | MinCut partitioning |
//!
//! ## Quick Start
//!
//! ```rust,no_run
//! use ruvector_crv::{CrvConfig, CrvSessionManager, GestaltType, StageIData};
//!
//! // Create session manager with default config (384 dimensions)
//! let config = CrvConfig::default();
//! let mut manager = CrvSessionManager::new(config);
//!
//! // Create a session for a target coordinate
//! manager.create_session("session-001".to_string(), "1234-5678".to_string()).unwrap();
//!
//! // Add Stage I ideogram data
//! let stage_i = StageIData {
//! stroke: vec![(0.0, 0.0), (1.0, 0.5), (2.0, 1.0), (3.0, 0.5)],
//! spontaneous_descriptor: "angular rising".to_string(),
//! classification: GestaltType::Manmade,
//! confidence: 0.85,
//! };
//!
//! let embedding = manager.add_stage_i("session-001", &stage_i).unwrap();
//! assert_eq!(embedding.len(), 384);
//! ```
//!
//! ## Architecture
//!
//! The Poincaré ball embedding for Stage I gestalts encodes the hierarchical
//! gestalt taxonomy (root → manmade/natural/movement/energy/water/land) with
//! exponentially less distortion than Euclidean space.
//!
//! For AOL (Analytical Overlay) separation, the spiking neural network temporal
//! encoding models signal-vs-noise discrimination: high-frequency spike bursts
//! correlate with AOL contamination, while sustained low-frequency patterns
//! indicate clean signal line data.
//!
//! MinCut partitioning in Stage VI identifies natural cluster boundaries in the
//! accumulated session graph, separating distinct target aspects.
//!
//! ## Cross-Session Convergence
//!
//! Multiple sessions targeting the same coordinate can be analyzed for
//! convergence — agreement between independent viewers strengthens the
//! signal validity:
//!
//! ```rust,no_run
//! # use ruvector_crv::{CrvConfig, CrvSessionManager};
//! # let mut manager = CrvSessionManager::new(CrvConfig::default());
//! // After adding data to multiple sessions for "1234-5678"...
//! let convergence = manager.find_convergence("1234-5678", 0.75).unwrap();
//! // convergence.scores contains similarity values for converging entries
//! ```
pub mod error;
pub mod session;
pub mod stage_i;
pub mod stage_ii;
pub mod stage_iii;
pub mod stage_iv;
pub mod stage_v;
pub mod stage_vi;
pub mod types;
// Re-export main types
pub use error::{CrvError, CrvResult};
pub use session::CrvSessionManager;
pub use stage_i::StageIEncoder;
pub use stage_ii::StageIIEncoder;
pub use stage_iii::StageIIIEncoder;
pub use stage_iv::StageIVEncoder;
pub use stage_v::StageVEngine;
pub use stage_vi::StageVIModeler;
pub use types::{
AOLDetection, ConvergenceResult, CrossReference, CrvConfig, CrvSessionEntry,
GeometricKind, GestaltType, SensoryModality, SignalLineProbe, SketchElement,
SpatialRelationType, SpatialRelationship, StageIData, StageIIData, StageIIIData,
StageIVData, StageVData, StageVIData, TargetPartition,
};
/// Library version.
pub const VERSION: &str = env!("CARGO_PKG_VERSION");
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_version() {
assert!(!VERSION.is_empty());
}
#[test]
fn test_end_to_end_session() {
let config = CrvConfig {
dimensions: 32,
..CrvConfig::default()
};
let mut manager = CrvSessionManager::new(config);
// Create two sessions for the same coordinate
manager
.create_session("viewer-a".to_string(), "target-001".to_string())
.unwrap();
manager
.create_session("viewer-b".to_string(), "target-001".to_string())
.unwrap();
// Viewer A: Stage I
let s1_a = StageIData {
stroke: vec![(0.0, 0.0), (1.0, 1.0), (2.0, 0.5), (3.0, 0.0)],
spontaneous_descriptor: "tall angular".to_string(),
classification: GestaltType::Manmade,
confidence: 0.85,
};
manager.add_stage_i("viewer-a", &s1_a).unwrap();
// Viewer B: Stage I (similar gestalt)
let s1_b = StageIData {
stroke: vec![(0.0, 0.0), (0.5, 1.2), (1.5, 0.8), (2.5, 0.0)],
spontaneous_descriptor: "structured upward".to_string(),
classification: GestaltType::Manmade,
confidence: 0.78,
};
manager.add_stage_i("viewer-b", &s1_b).unwrap();
// Viewer A: Stage II
let s2_a = StageIIData {
impressions: vec![
(SensoryModality::Texture, "rough stone".to_string()),
(SensoryModality::Temperature, "cool".to_string()),
(SensoryModality::Color, "gray".to_string()),
],
feature_vector: None,
};
manager.add_stage_ii("viewer-a", &s2_a).unwrap();
// Viewer B: Stage II (overlapping sensory)
let s2_b = StageIIData {
impressions: vec![
(SensoryModality::Texture, "grainy rough".to_string()),
(SensoryModality::Color, "dark gray".to_string()),
(SensoryModality::Luminosity, "dim".to_string()),
],
feature_vector: None,
};
manager.add_stage_ii("viewer-b", &s2_b).unwrap();
// Verify entries
assert_eq!(manager.session_entry_count("viewer-a"), 2);
assert_eq!(manager.session_entry_count("viewer-b"), 2);
// Both sessions should have embeddings
let entries_a = manager.get_session_embeddings("viewer-a").unwrap();
let entries_b = manager.get_session_embeddings("viewer-b").unwrap();
assert_eq!(entries_a.len(), 2);
assert_eq!(entries_b.len(), 2);
// All embeddings should be 32-dimensional
for entry in entries_a.iter().chain(entries_b.iter()) {
assert_eq!(entry.embedding.len(), 32);
}
}
}

View File

@@ -0,0 +1,629 @@
//! CRV Session Manager
//!
//! Manages CRV sessions as directed acyclic graphs (DAGs), where each session
//! progresses through stages I-VI. Provides cross-session convergence analysis
//! to find agreement between multiple viewers targeting the same coordinate.
//!
//! # Architecture
//!
//! Each session is a DAG of stage entries. Cross-session convergence is computed
//! by finding entries with high embedding similarity across different sessions
//! targeting the same coordinate.
use crate::error::{CrvError, CrvResult};
use crate::stage_i::StageIEncoder;
use crate::stage_ii::StageIIEncoder;
use crate::stage_iii::StageIIIEncoder;
use crate::stage_iv::StageIVEncoder;
use crate::stage_v::StageVEngine;
use crate::stage_vi::StageVIModeler;
use crate::types::*;
use ruvector_gnn::search::cosine_similarity;
use std::collections::HashMap;
/// A session entry stored in the session graph.
#[derive(Debug, Clone)]
struct SessionEntry {
/// The stage data embedding.
embedding: Vec<f32>,
/// Stage number (1-6).
stage: u8,
/// Entry index within the stage.
entry_index: usize,
/// Metadata.
metadata: HashMap<String, serde_json::Value>,
/// Timestamp.
timestamp_ms: u64,
}
/// A complete CRV session with all stage data.
#[derive(Debug)]
struct Session {
/// Session identifier.
id: SessionId,
/// Target coordinate.
coordinate: TargetCoordinate,
/// Entries organized by stage.
entries: Vec<SessionEntry>,
}
/// CRV Session Manager: coordinates all stage encoders and manages sessions.
#[derive(Debug)]
pub struct CrvSessionManager {
/// Configuration.
config: CrvConfig,
/// Stage I encoder.
stage_i: StageIEncoder,
/// Stage II encoder.
stage_ii: StageIIEncoder,
/// Stage III encoder.
stage_iii: StageIIIEncoder,
/// Stage IV encoder.
stage_iv: StageIVEncoder,
/// Stage V engine.
stage_v: StageVEngine,
/// Stage VI modeler.
stage_vi: StageVIModeler,
/// Active sessions indexed by session ID.
sessions: HashMap<SessionId, Session>,
}
impl CrvSessionManager {
/// Create a new session manager with the given configuration.
pub fn new(config: CrvConfig) -> Self {
let stage_i = StageIEncoder::new(&config);
let stage_ii = StageIIEncoder::new(&config);
let stage_iii = StageIIIEncoder::new(&config);
let stage_iv = StageIVEncoder::new(&config);
let stage_v = StageVEngine::new(&config);
let stage_vi = StageVIModeler::new(&config);
Self {
config,
stage_i,
stage_ii,
stage_iii,
stage_iv,
stage_v,
stage_vi,
sessions: HashMap::new(),
}
}
/// Create a new session for a given target coordinate.
pub fn create_session(
&mut self,
session_id: SessionId,
coordinate: TargetCoordinate,
) -> CrvResult<()> {
if self.sessions.contains_key(&session_id) {
return Err(CrvError::EncodingError(format!(
"Session {} already exists",
session_id
)));
}
self.sessions.insert(
session_id.clone(),
Session {
id: session_id,
coordinate,
entries: Vec::new(),
},
);
Ok(())
}
/// Add Stage I data to a session.
pub fn add_stage_i(
&mut self,
session_id: &str,
data: &StageIData,
) -> CrvResult<Vec<f32>> {
let embedding = self.stage_i.encode(data)?;
self.add_entry(session_id, 1, embedding.clone(), HashMap::new())?;
Ok(embedding)
}
/// Add Stage II data to a session.
pub fn add_stage_ii(
&mut self,
session_id: &str,
data: &StageIIData,
) -> CrvResult<Vec<f32>> {
let embedding = self.stage_ii.encode(data)?;
self.add_entry(session_id, 2, embedding.clone(), HashMap::new())?;
Ok(embedding)
}
/// Add Stage III data to a session.
pub fn add_stage_iii(
&mut self,
session_id: &str,
data: &StageIIIData,
) -> CrvResult<Vec<f32>> {
let embedding = self.stage_iii.encode(data)?;
self.add_entry(session_id, 3, embedding.clone(), HashMap::new())?;
Ok(embedding)
}
/// Add Stage IV data to a session.
pub fn add_stage_iv(
&mut self,
session_id: &str,
data: &StageIVData,
) -> CrvResult<Vec<f32>> {
let embedding = self.stage_iv.encode(data)?;
self.add_entry(session_id, 4, embedding.clone(), HashMap::new())?;
Ok(embedding)
}
/// Run Stage V interrogation on a session.
///
/// Probes the accumulated session data with specified queries.
pub fn run_stage_v(
&mut self,
session_id: &str,
probe_queries: &[(&str, u8, Vec<f32>)], // (query text, target stage, query embedding)
k: usize,
) -> CrvResult<StageVData> {
let session = self
.sessions
.get(session_id)
.ok_or_else(|| CrvError::SessionNotFound(session_id.to_string()))?;
let all_embeddings: Vec<Vec<f32>> =
session.entries.iter().map(|e| e.embedding.clone()).collect();
let mut probes = Vec::new();
let mut cross_refs = Vec::new();
for (query_text, target_stage, query_emb) in probe_queries {
// Filter candidates to the target stage
let stage_entries: Vec<Vec<f32>> = session
.entries
.iter()
.filter(|e| e.stage == *target_stage)
.map(|e| e.embedding.clone())
.collect();
if stage_entries.is_empty() {
continue;
}
let mut probe = self.stage_v.probe(query_emb, &stage_entries, k)?;
probe.query = query_text.to_string();
probe.target_stage = *target_stage;
probes.push(probe);
}
// Cross-reference between all stage pairs
for from_stage in 1..=4u8 {
for to_stage in (from_stage + 1)..=4u8 {
let from_entries: Vec<Vec<f32>> = session
.entries
.iter()
.filter(|e| e.stage == from_stage)
.map(|e| e.embedding.clone())
.collect();
let to_entries: Vec<Vec<f32>> = session
.entries
.iter()
.filter(|e| e.stage == to_stage)
.map(|e| e.embedding.clone())
.collect();
if !from_entries.is_empty() && !to_entries.is_empty() {
let refs = self.stage_v.cross_reference(
from_stage,
&from_entries,
to_stage,
&to_entries,
self.config.convergence_threshold,
);
cross_refs.extend(refs);
}
}
}
let stage_v_data = StageVData {
probes,
cross_references: cross_refs,
};
// Encode Stage V result and add to session
if !stage_v_data.probes.is_empty() {
let embedding = self.stage_v.encode(&stage_v_data, &all_embeddings)?;
self.add_entry(session_id, 5, embedding, HashMap::new())?;
}
Ok(stage_v_data)
}
/// Run Stage VI composite modeling on a session.
pub fn run_stage_vi(&mut self, session_id: &str) -> CrvResult<StageVIData> {
let session = self
.sessions
.get(session_id)
.ok_or_else(|| CrvError::SessionNotFound(session_id.to_string()))?;
let embeddings: Vec<Vec<f32>> =
session.entries.iter().map(|e| e.embedding.clone()).collect();
let labels: Vec<(u8, usize)> = session
.entries
.iter()
.map(|e| (e.stage, e.entry_index))
.collect();
let stage_vi_data = self.stage_vi.partition(&embeddings, &labels)?;
// Encode Stage VI result and add to session
let embedding = self.stage_vi.encode(&stage_vi_data)?;
self.add_entry(session_id, 6, embedding, HashMap::new())?;
Ok(stage_vi_data)
}
/// Find convergence across multiple sessions targeting the same coordinate.
///
/// This is the core multi-viewer matching operation: given sessions from
/// different viewers targeting the same coordinate, find which aspects
/// of their signal line data converge (agree).
pub fn find_convergence(
&self,
coordinate: &str,
min_similarity: f32,
) -> CrvResult<ConvergenceResult> {
// Collect all sessions for this coordinate
let relevant_sessions: Vec<&Session> = self
.sessions
.values()
.filter(|s| s.coordinate == coordinate)
.collect();
if relevant_sessions.len() < 2 {
return Err(CrvError::EmptyInput(
"Need at least 2 sessions for convergence analysis".to_string(),
));
}
let mut session_pairs = Vec::new();
let mut scores = Vec::new();
let mut convergent_stages = Vec::new();
// Compare all pairs of sessions
for i in 0..relevant_sessions.len() {
for j in (i + 1)..relevant_sessions.len() {
let sess_a = relevant_sessions[i];
let sess_b = relevant_sessions[j];
// Compare stage-by-stage
for stage in 1..=6u8 {
let entries_a: Vec<&[f32]> = sess_a
.entries
.iter()
.filter(|e| e.stage == stage)
.map(|e| e.embedding.as_slice())
.collect();
let entries_b: Vec<&[f32]> = sess_b
.entries
.iter()
.filter(|e| e.stage == stage)
.map(|e| e.embedding.as_slice())
.collect();
if entries_a.is_empty() || entries_b.is_empty() {
continue;
}
// Find best match for each entry in A against entries in B
for emb_a in &entries_a {
for emb_b in &entries_b {
if emb_a.len() == emb_b.len() && !emb_a.is_empty() {
let sim = cosine_similarity(emb_a, emb_b);
if sim >= min_similarity {
session_pairs
.push((sess_a.id.clone(), sess_b.id.clone()));
scores.push(sim);
if !convergent_stages.contains(&stage) {
convergent_stages.push(stage);
}
}
}
}
}
}
}
}
// Compute consensus embedding (mean of all converging embeddings)
let consensus_embedding = if !scores.is_empty() {
let mut consensus = vec![0.0f32; self.config.dimensions];
let mut count = 0usize;
for session in &relevant_sessions {
for entry in &session.entries {
if convergent_stages.contains(&entry.stage) {
for (i, &v) in entry.embedding.iter().enumerate() {
if i < self.config.dimensions {
consensus[i] += v;
}
}
count += 1;
}
}
}
if count > 0 {
for v in &mut consensus {
*v /= count as f32;
}
Some(consensus)
} else {
None
}
} else {
None
};
// Sort convergent stages
convergent_stages.sort();
Ok(ConvergenceResult {
session_pairs,
scores,
convergent_stages,
consensus_embedding,
})
}
/// Get all embeddings for a session.
pub fn get_session_embeddings(&self, session_id: &str) -> CrvResult<Vec<CrvSessionEntry>> {
let session = self
.sessions
.get(session_id)
.ok_or_else(|| CrvError::SessionNotFound(session_id.to_string()))?;
Ok(session
.entries
.iter()
.map(|e| CrvSessionEntry {
session_id: session.id.clone(),
coordinate: session.coordinate.clone(),
stage: e.stage,
embedding: e.embedding.clone(),
metadata: e.metadata.clone(),
timestamp_ms: e.timestamp_ms,
})
.collect())
}
/// Get the number of entries in a session.
pub fn session_entry_count(&self, session_id: &str) -> usize {
self.sessions
.get(session_id)
.map(|s| s.entries.len())
.unwrap_or(0)
}
/// Get the number of active sessions.
pub fn session_count(&self) -> usize {
self.sessions.len()
}
/// Remove a session.
pub fn remove_session(&mut self, session_id: &str) -> bool {
self.sessions.remove(session_id).is_some()
}
/// Get access to the Stage I encoder for direct operations.
pub fn stage_i_encoder(&self) -> &StageIEncoder {
&self.stage_i
}
/// Get access to the Stage II encoder for direct operations.
pub fn stage_ii_encoder(&self) -> &StageIIEncoder {
&self.stage_ii
}
/// Get access to the Stage IV encoder for direct operations.
pub fn stage_iv_encoder(&self) -> &StageIVEncoder {
&self.stage_iv
}
/// Get access to the Stage V engine for direct operations.
pub fn stage_v_engine(&self) -> &StageVEngine {
&self.stage_v
}
/// Get access to the Stage VI modeler for direct operations.
pub fn stage_vi_modeler(&self) -> &StageVIModeler {
&self.stage_vi
}
/// Internal: add an entry to a session.
fn add_entry(
&mut self,
session_id: &str,
stage: u8,
embedding: Vec<f32>,
metadata: HashMap<String, serde_json::Value>,
) -> CrvResult<()> {
let session = self
.sessions
.get_mut(session_id)
.ok_or_else(|| CrvError::SessionNotFound(session_id.to_string()))?;
let entry_index = session.entries.iter().filter(|e| e.stage == stage).count();
session.entries.push(SessionEntry {
embedding,
stage,
entry_index,
metadata,
timestamp_ms: 0,
});
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
fn test_config() -> CrvConfig {
CrvConfig {
dimensions: 32,
convergence_threshold: 0.5,
..CrvConfig::default()
}
}
#[test]
fn test_session_creation() {
let config = test_config();
let mut manager = CrvSessionManager::new(config);
manager
.create_session("sess-1".to_string(), "1234-5678".to_string())
.unwrap();
assert_eq!(manager.session_count(), 1);
assert_eq!(manager.session_entry_count("sess-1"), 0);
}
#[test]
fn test_add_stage_i() {
let config = test_config();
let mut manager = CrvSessionManager::new(config);
manager
.create_session("sess-1".to_string(), "1234-5678".to_string())
.unwrap();
let data = StageIData {
stroke: vec![(0.0, 0.0), (1.0, 1.0), (2.0, 0.0)],
spontaneous_descriptor: "angular".to_string(),
classification: GestaltType::Manmade,
confidence: 0.9,
};
let emb = manager.add_stage_i("sess-1", &data).unwrap();
assert_eq!(emb.len(), 32);
assert_eq!(manager.session_entry_count("sess-1"), 1);
}
#[test]
fn test_add_stage_ii() {
let config = test_config();
let mut manager = CrvSessionManager::new(config);
manager
.create_session("sess-1".to_string(), "coord-1".to_string())
.unwrap();
let data = StageIIData {
impressions: vec![
(SensoryModality::Texture, "rough".to_string()),
(SensoryModality::Color, "gray".to_string()),
],
feature_vector: None,
};
let emb = manager.add_stage_ii("sess-1", &data).unwrap();
assert_eq!(emb.len(), 32);
}
#[test]
fn test_full_session_flow() {
let config = test_config();
let mut manager = CrvSessionManager::new(config);
manager
.create_session("sess-1".to_string(), "coord-1".to_string())
.unwrap();
// Stage I
let s1 = StageIData {
stroke: vec![(0.0, 0.0), (1.0, 1.0), (2.0, 0.0)],
spontaneous_descriptor: "angular".to_string(),
classification: GestaltType::Manmade,
confidence: 0.9,
};
manager.add_stage_i("sess-1", &s1).unwrap();
// Stage II
let s2 = StageIIData {
impressions: vec![
(SensoryModality::Texture, "rough stone".to_string()),
(SensoryModality::Temperature, "cold".to_string()),
],
feature_vector: None,
};
manager.add_stage_ii("sess-1", &s2).unwrap();
// Stage IV
let s4 = StageIVData {
emotional_impact: vec![("solemn".to_string(), 0.6)],
tangibles: vec!["stone blocks".to_string()],
intangibles: vec!["ancient".to_string()],
aol_detections: vec![],
};
manager.add_stage_iv("sess-1", &s4).unwrap();
assert_eq!(manager.session_entry_count("sess-1"), 3);
// Get all entries
let entries = manager.get_session_embeddings("sess-1").unwrap();
assert_eq!(entries.len(), 3);
assert_eq!(entries[0].stage, 1);
assert_eq!(entries[1].stage, 2);
assert_eq!(entries[2].stage, 4);
}
#[test]
fn test_duplicate_session() {
let config = test_config();
let mut manager = CrvSessionManager::new(config);
manager
.create_session("sess-1".to_string(), "coord-1".to_string())
.unwrap();
let result = manager.create_session("sess-1".to_string(), "coord-2".to_string());
assert!(result.is_err());
}
#[test]
fn test_session_not_found() {
let config = test_config();
let mut manager = CrvSessionManager::new(config);
let s1 = StageIData {
stroke: vec![(0.0, 0.0), (1.0, 1.0)],
spontaneous_descriptor: "test".to_string(),
classification: GestaltType::Natural,
confidence: 0.5,
};
let result = manager.add_stage_i("nonexistent", &s1);
assert!(result.is_err());
}
#[test]
fn test_remove_session() {
let config = test_config();
let mut manager = CrvSessionManager::new(config);
manager
.create_session("sess-1".to_string(), "coord-1".to_string())
.unwrap();
assert_eq!(manager.session_count(), 1);
assert!(manager.remove_session("sess-1"));
assert_eq!(manager.session_count(), 0);
assert!(!manager.remove_session("sess-1"));
}
}

View File

@@ -0,0 +1,364 @@
//! Stage I Encoder: Ideogram Gestalts via Poincaré Ball Embeddings
//!
//! CRV Stage I captures gestalt primitives (manmade, natural, movement, energy,
//! water, land) through ideogram traces. The hierarchical taxonomy of gestalts
//! maps naturally to hyperbolic space, where the Poincaré ball model encodes
//! tree-like structures with exponentially less distortion than Euclidean space.
//!
//! # Architecture
//!
//! Ideogram stroke traces are converted to fixed-dimension feature vectors,
//! then projected into the Poincaré ball. Gestalt classification uses hyperbolic
//! distance to prototype embeddings for each gestalt type.
use crate::error::{CrvError, CrvResult};
use crate::types::{CrvConfig, GestaltType, StageIData};
use ruvector_attention::hyperbolic::{
exp_map, frechet_mean, log_map, mobius_add, poincare_distance, project_to_ball,
};
/// Stage I encoder using Poincaré ball hyperbolic embeddings.
#[derive(Debug, Clone)]
pub struct StageIEncoder {
/// Embedding dimensionality.
dim: usize,
/// Poincaré ball curvature (positive).
curvature: f32,
/// Prototype embeddings for each gestalt type in the Poincaré ball.
/// Indexed by `GestaltType::index()`.
prototypes: Vec<Vec<f32>>,
}
impl StageIEncoder {
/// Create a new Stage I encoder with default gestalt prototypes.
pub fn new(config: &CrvConfig) -> Self {
let dim = config.dimensions;
let curvature = config.curvature;
// Initialize gestalt prototypes as points in the Poincaré ball.
// Each prototype is placed at a distinct region of the ball,
// with hierarchical relationships preserved by hyperbolic distance.
let prototypes = Self::init_prototypes(dim, curvature);
Self {
dim,
curvature,
prototypes,
}
}
/// Initialize gestalt prototype embeddings in the Poincaré ball.
///
/// Places each gestalt type at a distinct angular position with
/// controlled radial distance from the origin. The hierarchical
/// structure (root → gestalt types → sub-types) is preserved
/// by the exponential volume growth of hyperbolic space.
fn init_prototypes(dim: usize, curvature: f32) -> Vec<Vec<f32>> {
let num_types = GestaltType::all().len();
let mut prototypes = Vec::with_capacity(num_types);
for gestalt in GestaltType::all() {
let idx = gestalt.index();
// Place each prototype along a different axis direction
// with a moderate radial distance (0.3-0.5 of ball radius).
let mut proto = vec![0.0f32; dim];
// Use multiple dimensions to spread prototypes apart
let base_dim = idx * (dim / num_types);
let spread = dim / num_types;
for d in 0..spread.min(dim - base_dim) {
let angle = std::f32::consts::PI * 2.0 * (d as f32) / (spread as f32);
proto[base_dim + d] = 0.3 * angle.cos() / (spread as f32).sqrt();
}
// Project to ball to ensure it's inside
proto = project_to_ball(&proto, curvature, 1e-7);
prototypes.push(proto);
}
prototypes
}
/// Encode an ideogram stroke trace into a fixed-dimension feature vector.
///
/// Extracts geometric features from the stroke: curvature statistics,
/// velocity profile, angular distribution, and bounding box ratios.
pub fn encode_stroke(&self, stroke: &[(f32, f32)]) -> CrvResult<Vec<f32>> {
if stroke.is_empty() {
return Err(CrvError::EmptyInput("Stroke trace is empty".to_string()));
}
let mut features = vec![0.0f32; self.dim];
// Feature 1: Stroke statistics (first few dimensions)
let n = stroke.len() as f32;
let (cx, cy) = stroke
.iter()
.fold((0.0, 0.0), |(sx, sy), &(x, y)| (sx + x, sy + y));
features[0] = cx / n; // centroid x
features[1] = cy / n; // centroid y
// Feature 2: Bounding box aspect ratio
let (min_x, max_x) = stroke
.iter()
.map(|p| p.0)
.fold((f32::MAX, f32::MIN), |(mn, mx), v| (mn.min(v), mx.max(v)));
let (min_y, max_y) = stroke
.iter()
.map(|p| p.1)
.fold((f32::MAX, f32::MIN), |(mn, mx), v| (mn.min(v), mx.max(v)));
let width = (max_x - min_x).max(1e-6);
let height = (max_y - min_y).max(1e-6);
features[2] = width / height; // aspect ratio
// Feature 3: Total path length (normalized)
let mut path_length = 0.0f32;
for i in 1..stroke.len() {
let dx = stroke[i].0 - stroke[i - 1].0;
let dy = stroke[i].1 - stroke[i - 1].1;
path_length += (dx * dx + dy * dy).sqrt();
}
features[3] = path_length / (width + height).max(1e-6);
// Feature 4: Angular distribution (segment angles)
if stroke.len() >= 3 {
let num_angle_bins = 8.min(self.dim.saturating_sub(4));
for i in 1..stroke.len().saturating_sub(1) {
let dx1 = stroke[i].0 - stroke[i - 1].0;
let dy1 = stroke[i].1 - stroke[i - 1].1;
let dx2 = stroke[i + 1].0 - stroke[i].0;
let dy2 = stroke[i + 1].1 - stroke[i].1;
let angle = dy1.atan2(dx1) - dy2.atan2(dx2);
let bin = ((angle + std::f32::consts::PI) / (2.0 * std::f32::consts::PI)
* num_angle_bins as f32) as usize;
let bin = bin.min(num_angle_bins - 1);
if 4 + bin < self.dim {
features[4 + bin] += 1.0 / (stroke.len() as f32 - 2.0).max(1.0);
}
}
}
// Feature 5: Curvature variance (spread across remaining dimensions)
if stroke.len() >= 3 {
let mut curvatures = Vec::new();
for i in 1..stroke.len() - 1 {
let dx1 = stroke[i].0 - stroke[i - 1].0;
let dy1 = stroke[i].1 - stroke[i - 1].1;
let dx2 = stroke[i + 1].0 - stroke[i].0;
let dy2 = stroke[i + 1].1 - stroke[i].1;
let cross = dx1 * dy2 - dy1 * dx2;
let ds1 = (dx1 * dx1 + dy1 * dy1).sqrt().max(1e-6);
let ds2 = (dx2 * dx2 + dy2 * dy2).sqrt().max(1e-6);
curvatures.push(cross / (ds1 * ds2));
}
if !curvatures.is_empty() {
let mean_k: f32 = curvatures.iter().sum::<f32>() / curvatures.len() as f32;
let var_k: f32 = curvatures.iter().map(|k| (k - mean_k).powi(2)).sum::<f32>()
/ curvatures.len() as f32;
if 12 < self.dim {
features[12] = mean_k;
}
if 13 < self.dim {
features[13] = var_k;
}
}
}
// Normalize the feature vector
let norm: f32 = features.iter().map(|x| x * x).sum::<f32>().sqrt();
if norm > 1e-6 {
let scale = 0.4 / norm; // keep within ball
for f in &mut features {
*f *= scale;
}
}
Ok(features)
}
/// Encode complete Stage I data into a Poincaré ball embedding.
///
/// Combines stroke features with the gestalt prototype via Möbius addition,
/// producing a vector that encodes both the raw ideogram trace and its
/// gestalt classification in hyperbolic space.
pub fn encode(&self, data: &StageIData) -> CrvResult<Vec<f32>> {
let stroke_features = self.encode_stroke(&data.stroke)?;
// Get the prototype for the classified gestalt type
let prototype = &self.prototypes[data.classification.index()];
// Combine stroke features with gestalt prototype via Möbius addition.
// This places the encoded vector near the gestalt prototype in
// hyperbolic space, with the stroke features providing the offset.
let combined = mobius_add(&stroke_features, prototype, self.curvature);
// Weight by confidence
let weighted: Vec<f32> = combined
.iter()
.map(|&v| v * data.confidence + stroke_features[0] * (1.0 - data.confidence))
.collect();
Ok(project_to_ball(&weighted, self.curvature, 1e-7))
}
/// Classify a stroke embedding into a gestalt type by finding the
/// nearest prototype in hyperbolic space.
pub fn classify(&self, embedding: &[f32]) -> CrvResult<(GestaltType, f32)> {
if embedding.len() != self.dim {
return Err(CrvError::DimensionMismatch {
expected: self.dim,
actual: embedding.len(),
});
}
let mut best_type = GestaltType::Manmade;
let mut best_distance = f32::MAX;
for gestalt in GestaltType::all() {
let proto = &self.prototypes[gestalt.index()];
let dist = poincare_distance(embedding, proto, self.curvature);
if dist < best_distance {
best_distance = dist;
best_type = *gestalt;
}
}
// Convert distance to confidence (closer = higher confidence)
let confidence = (-best_distance).exp();
Ok((best_type, confidence))
}
/// Compute the Fréchet mean of multiple Stage I embeddings.
///
/// Useful for finding the consensus gestalt across multiple sessions
/// targeting the same coordinate.
pub fn consensus(&self, embeddings: &[&[f32]]) -> CrvResult<Vec<f32>> {
if embeddings.is_empty() {
return Err(CrvError::EmptyInput(
"No embeddings for consensus".to_string(),
));
}
Ok(frechet_mean(embeddings, None, self.curvature, 50, 1e-5))
}
/// Compute pairwise hyperbolic distance between two Stage I embeddings.
pub fn distance(&self, a: &[f32], b: &[f32]) -> f32 {
poincare_distance(a, b, self.curvature)
}
/// Get the prototype embedding for a gestalt type.
pub fn prototype(&self, gestalt: GestaltType) -> &[f32] {
&self.prototypes[gestalt.index()]
}
/// Map an embedding to tangent space at the origin for Euclidean operations.
pub fn to_tangent(&self, embedding: &[f32]) -> Vec<f32> {
let origin = vec![0.0f32; self.dim];
log_map(embedding, &origin, self.curvature)
}
/// Map a tangent vector back to the Poincaré ball.
pub fn from_tangent(&self, tangent: &[f32]) -> Vec<f32> {
let origin = vec![0.0f32; self.dim];
exp_map(tangent, &origin, self.curvature)
}
}
#[cfg(test)]
mod tests {
use super::*;
fn test_config() -> CrvConfig {
CrvConfig {
dimensions: 32,
curvature: 1.0,
..CrvConfig::default()
}
}
#[test]
fn test_encoder_creation() {
let config = test_config();
let encoder = StageIEncoder::new(&config);
assert_eq!(encoder.dim, 32);
assert_eq!(encoder.prototypes.len(), 6);
}
#[test]
fn test_stroke_encoding() {
let config = test_config();
let encoder = StageIEncoder::new(&config);
let stroke = vec![(0.0, 0.0), (1.0, 0.5), (2.0, 1.0), (3.0, 0.5), (4.0, 0.0)];
let embedding = encoder.encode_stroke(&stroke).unwrap();
assert_eq!(embedding.len(), 32);
// Should be inside the Poincaré ball
let norm_sq: f32 = embedding.iter().map(|x| x * x).sum();
assert!(norm_sq < 1.0 / config.curvature);
}
#[test]
fn test_full_encode() {
let config = test_config();
let encoder = StageIEncoder::new(&config);
let data = StageIData {
stroke: vec![(0.0, 0.0), (1.0, 1.0), (2.0, 0.0)],
spontaneous_descriptor: "angular".to_string(),
classification: GestaltType::Manmade,
confidence: 0.9,
};
let embedding = encoder.encode(&data).unwrap();
assert_eq!(embedding.len(), 32);
}
#[test]
fn test_classification() {
let config = test_config();
let encoder = StageIEncoder::new(&config);
// Encode and classify should round-trip for strong prototypes
let proto = encoder.prototype(GestaltType::Energy).to_vec();
let (classified, confidence) = encoder.classify(&proto).unwrap();
assert_eq!(classified, GestaltType::Energy);
assert!(confidence > 0.5);
}
#[test]
fn test_distance_symmetry() {
let config = test_config();
let encoder = StageIEncoder::new(&config);
let a = encoder.prototype(GestaltType::Manmade);
let b = encoder.prototype(GestaltType::Natural);
let d_ab = encoder.distance(a, b);
let d_ba = encoder.distance(b, a);
assert!((d_ab - d_ba).abs() < 1e-5);
}
#[test]
fn test_tangent_roundtrip() {
let config = test_config();
let encoder = StageIEncoder::new(&config);
let proto = encoder.prototype(GestaltType::Water).to_vec();
let tangent = encoder.to_tangent(&proto);
let recovered = encoder.from_tangent(&tangent);
// Should approximately round-trip
let error: f32 = proto
.iter()
.zip(&recovered)
.map(|(a, b)| (a - b).abs())
.sum::<f32>()
/ proto.len() as f32;
assert!(error < 0.1);
}
}

View File

@@ -0,0 +1,268 @@
//! Stage II Encoder: Sensory Data via Multi-Head Attention Vectors
//!
//! CRV Stage II captures sensory impressions (textures, colors, temperatures,
//! sounds, etc.). Each sensory modality is encoded as a separate attention head,
//! with the multi-head mechanism combining them into a unified 384-dimensional
//! representation.
//!
//! # Architecture
//!
//! Sensory descriptors are hashed into feature vectors per modality, then
//! processed through multi-head attention where each head specializes in
//! a different sensory channel.
use crate::error::{CrvError, CrvResult};
use crate::types::{CrvConfig, SensoryModality, StageIIData};
use ruvector_attention::traits::Attention;
use ruvector_attention::MultiHeadAttention;
/// Number of sensory modality heads.
const NUM_MODALITIES: usize = 8;
/// Stage II encoder using multi-head attention for sensory fusion.
pub struct StageIIEncoder {
/// Embedding dimensionality.
dim: usize,
/// Multi-head attention mechanism (one head per modality).
attention: MultiHeadAttention,
}
impl std::fmt::Debug for StageIIEncoder {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("StageIIEncoder")
.field("dim", &self.dim)
.field("attention", &"MultiHeadAttention { .. }")
.finish()
}
}
impl StageIIEncoder {
/// Create a new Stage II encoder.
pub fn new(config: &CrvConfig) -> Self {
let dim = config.dimensions;
// Ensure dim is divisible by NUM_MODALITIES
let effective_heads = if dim % NUM_MODALITIES == 0 {
NUM_MODALITIES
} else {
// Fall back to a divisor
let mut h = NUM_MODALITIES;
while dim % h != 0 && h > 1 {
h -= 1;
}
h
};
let attention = MultiHeadAttention::new(dim, effective_heads);
Self { dim, attention }
}
/// Encode a sensory descriptor string into a feature vector.
///
/// Uses a deterministic hash-based encoding to convert text descriptors
/// into fixed-dimension vectors. Each modality gets a distinct subspace.
fn encode_descriptor(&self, modality: SensoryModality, descriptor: &str) -> Vec<f32> {
let mut features = vec![0.0f32; self.dim];
let modality_offset = modality_index(modality) * (self.dim / NUM_MODALITIES.max(1));
let subspace_size = self.dim / NUM_MODALITIES.max(1);
// Simple deterministic hash encoding
let bytes = descriptor.as_bytes();
for (i, &byte) in bytes.iter().enumerate() {
let dim_idx = modality_offset + (i % subspace_size);
if dim_idx < self.dim {
// Distribute byte values across the subspace with varied phases
let phase = (i as f32) * 0.618_034; // golden ratio
features[dim_idx] += (byte as f32 / 255.0) * (phase * std::f32::consts::PI).cos();
}
}
// Add modality-specific bias
if modality_offset < self.dim {
features[modality_offset] += 1.0;
}
// L2 normalize
let norm: f32 = features.iter().map(|x| x * x).sum::<f32>().sqrt();
if norm > 1e-6 {
for f in &mut features {
*f /= norm;
}
}
features
}
/// Encode Stage II data into a unified sensory embedding.
///
/// Each sensory impression becomes a key-value pair in the attention
/// mechanism. A learned query (based on the modality distribution)
/// attends over all impressions to produce the fused output.
pub fn encode(&self, data: &StageIIData) -> CrvResult<Vec<f32>> {
if data.impressions.is_empty() {
return Err(CrvError::EmptyInput(
"No sensory impressions".to_string(),
));
}
// If a pre-computed feature vector exists, use it
if let Some(ref fv) = data.feature_vector {
if fv.len() == self.dim {
return Ok(fv.clone());
}
}
// Encode each impression into a feature vector
let encoded: Vec<Vec<f32>> = data
.impressions
.iter()
.map(|(modality, descriptor)| self.encode_descriptor(*modality, descriptor))
.collect();
// Build query from modality distribution
let query = self.build_modality_query(&data.impressions);
let keys: Vec<&[f32]> = encoded.iter().map(|v| v.as_slice()).collect();
let values: Vec<&[f32]> = encoded.iter().map(|v| v.as_slice()).collect();
let result = self.attention.compute(&query, &keys, &values)?;
Ok(result)
}
/// Build a query vector from the distribution of modalities present.
fn build_modality_query(&self, impressions: &[(SensoryModality, String)]) -> Vec<f32> {
let mut query = vec![0.0f32; self.dim];
let subspace_size = self.dim / NUM_MODALITIES.max(1);
// Count modality occurrences
let mut counts = [0usize; NUM_MODALITIES];
for (modality, _) in impressions {
let idx = modality_index(*modality);
if idx < NUM_MODALITIES {
counts[idx] += 1;
}
}
// Encode counts as the query
let total: f32 = counts.iter().sum::<usize>() as f32;
for (m, &count) in counts.iter().enumerate() {
let weight = count as f32 / total.max(1.0);
let offset = m * subspace_size;
for d in 0..subspace_size.min(self.dim - offset) {
query[offset + d] = weight * (1.0 + d as f32 * 0.01);
}
}
// L2 normalize
let norm: f32 = query.iter().map(|x| x * x).sum::<f32>().sqrt();
if norm > 1e-6 {
for f in &mut query {
*f /= norm;
}
}
query
}
/// Compute similarity between two Stage II embeddings.
pub fn similarity(&self, a: &[f32], b: &[f32]) -> f32 {
if a.len() != b.len() || a.is_empty() {
return 0.0;
}
let dot: f32 = a.iter().zip(b).map(|(x, y)| x * y).sum();
let norm_a: f32 = a.iter().map(|x| x * x).sum::<f32>().sqrt();
let norm_b: f32 = b.iter().map(|x| x * x).sum::<f32>().sqrt();
if norm_a < 1e-6 || norm_b < 1e-6 {
return 0.0;
}
dot / (norm_a * norm_b)
}
}
/// Map sensory modality to index.
fn modality_index(m: SensoryModality) -> usize {
match m {
SensoryModality::Texture => 0,
SensoryModality::Color => 1,
SensoryModality::Temperature => 2,
SensoryModality::Sound => 3,
SensoryModality::Smell => 4,
SensoryModality::Taste => 5,
SensoryModality::Dimension => 6,
SensoryModality::Luminosity => 7,
}
}
#[cfg(test)]
mod tests {
use super::*;
fn test_config() -> CrvConfig {
CrvConfig {
dimensions: 32, // 32 / 8 = 4 dims per head
..CrvConfig::default()
}
}
#[test]
fn test_encoder_creation() {
let config = test_config();
let encoder = StageIIEncoder::new(&config);
assert_eq!(encoder.dim, 32);
}
#[test]
fn test_descriptor_encoding() {
let config = test_config();
let encoder = StageIIEncoder::new(&config);
let v = encoder.encode_descriptor(SensoryModality::Texture, "rough grainy");
assert_eq!(v.len(), 32);
// Should be normalized
let norm: f32 = v.iter().map(|x| x * x).sum::<f32>().sqrt();
assert!((norm - 1.0).abs() < 0.01);
}
#[test]
fn test_full_encode() {
let config = test_config();
let encoder = StageIIEncoder::new(&config);
let data = StageIIData {
impressions: vec![
(SensoryModality::Texture, "rough".to_string()),
(SensoryModality::Color, "blue-gray".to_string()),
(SensoryModality::Temperature, "cold".to_string()),
],
feature_vector: None,
};
let embedding = encoder.encode(&data).unwrap();
assert_eq!(embedding.len(), 32);
}
#[test]
fn test_similarity() {
let config = test_config();
let encoder = StageIIEncoder::new(&config);
let a = vec![1.0; 32];
let b = vec![1.0; 32];
let sim = encoder.similarity(&a, &b);
assert!((sim - 1.0).abs() < 1e-5);
}
#[test]
fn test_empty_impressions() {
let config = test_config();
let encoder = StageIIEncoder::new(&config);
let data = StageIIData {
impressions: vec![],
feature_vector: None,
};
assert!(encoder.encode(&data).is_err());
}
}

View File

@@ -0,0 +1,282 @@
//! Stage III Encoder: Dimensional Data via GNN Graph Topology
//!
//! CRV Stage III captures spatial sketches and geometric relationships.
//! These naturally form a graph where sketch elements are nodes and spatial
//! relationships are edges. The GNN layer learns to propagate spatial
//! context through the graph, producing an embedding that captures the
//! full dimensional structure of the target.
//!
//! # Architecture
//!
//! Sketch elements → node features, spatial relationships → edge weights.
//! A GNN forward pass aggregates neighborhood information to produce
//! a graph-level embedding.
use crate::error::{CrvError, CrvResult};
use crate::types::{CrvConfig, GeometricKind, SpatialRelationType, StageIIIData};
use ruvector_gnn::layer::RuvectorLayer;
use ruvector_gnn::search::cosine_similarity;
/// Stage III encoder using GNN graph topology.
#[derive(Debug)]
pub struct StageIIIEncoder {
/// Embedding dimensionality.
dim: usize,
/// GNN layer for spatial message passing.
gnn_layer: RuvectorLayer,
}
impl StageIIIEncoder {
/// Create a new Stage III encoder.
pub fn new(config: &CrvConfig) -> Self {
let dim = config.dimensions;
// Single GNN layer: input_dim -> hidden_dim, 1 head
let gnn_layer = RuvectorLayer::new(dim, dim, 1, 0.0)
.expect("ruvector-crv: valid GNN layer config (dim, dim, 1 head, 0.0 dropout)");
Self { dim, gnn_layer }
}
/// Encode a sketch element into a node feature vector.
fn encode_element(&self, label: &str, kind: GeometricKind, position: (f32, f32), scale: Option<f32>) -> Vec<f32> {
let mut features = vec![0.0f32; self.dim];
// Geometric kind encoding (one-hot style in first 8 dims)
let kind_idx = match kind {
GeometricKind::Point => 0,
GeometricKind::Line => 1,
GeometricKind::Curve => 2,
GeometricKind::Rectangle => 3,
GeometricKind::Circle => 4,
GeometricKind::Triangle => 5,
GeometricKind::Polygon => 6,
GeometricKind::Freeform => 7,
};
if kind_idx < self.dim {
features[kind_idx] = 1.0;
}
// Position encoding (normalized)
if 8 < self.dim {
features[8] = position.0;
}
if 9 < self.dim {
features[9] = position.1;
}
// Scale encoding
if let Some(s) = scale {
if 10 < self.dim {
features[10] = s;
}
}
// Label hash encoding (spread across remaining dims)
for (i, byte) in label.bytes().enumerate() {
let idx = 11 + (i % (self.dim.saturating_sub(11)));
if idx < self.dim {
features[idx] += (byte as f32 / 255.0) * 0.5;
}
}
// L2 normalize
let norm: f32 = features.iter().map(|x| x * x).sum::<f32>().sqrt();
if norm > 1e-6 {
for f in &mut features {
*f /= norm;
}
}
features
}
/// Compute edge weight from spatial relationship type.
fn relationship_weight(relation: SpatialRelationType) -> f32 {
match relation {
SpatialRelationType::Adjacent => 0.8,
SpatialRelationType::Contains => 0.9,
SpatialRelationType::Above => 0.6,
SpatialRelationType::Below => 0.6,
SpatialRelationType::Inside => 0.95,
SpatialRelationType::Surrounding => 0.85,
SpatialRelationType::Connected => 0.7,
SpatialRelationType::Separated => 0.3,
}
}
/// Encode Stage III data into a graph-level embedding.
///
/// Builds a graph from sketch elements and relationships,
/// runs GNN message passing, then aggregates node embeddings
/// into a single graph-level vector.
pub fn encode(&self, data: &StageIIIData) -> CrvResult<Vec<f32>> {
if data.sketch_elements.is_empty() {
return Err(CrvError::EmptyInput(
"No sketch elements".to_string(),
));
}
// Build label → index mapping
let label_to_idx: std::collections::HashMap<&str, usize> = data
.sketch_elements
.iter()
.enumerate()
.map(|(i, elem)| (elem.label.as_str(), i))
.collect();
// Encode each element as a node feature vector
let node_features: Vec<Vec<f32>> = data
.sketch_elements
.iter()
.map(|elem| {
self.encode_element(&elem.label, elem.kind, elem.position, elem.scale)
})
.collect();
// For each node, collect neighbor embeddings and edge weights
// based on the spatial relationships
let mut aggregated = vec![vec![0.0f32; self.dim]; node_features.len()];
for (node_idx, node_feat) in node_features.iter().enumerate() {
let label = &data.sketch_elements[node_idx].label;
// Find all relationships involving this node
let mut neighbor_feats = Vec::new();
let mut edge_weights = Vec::new();
for rel in &data.relationships {
if rel.from == *label {
if let Some(&neighbor_idx) = label_to_idx.get(rel.to.as_str()) {
neighbor_feats.push(node_features[neighbor_idx].clone());
edge_weights.push(Self::relationship_weight(rel.relation) * rel.strength);
}
} else if rel.to == *label {
if let Some(&neighbor_idx) = label_to_idx.get(rel.from.as_str()) {
neighbor_feats.push(node_features[neighbor_idx].clone());
edge_weights.push(Self::relationship_weight(rel.relation) * rel.strength);
}
}
}
// GNN forward pass for this node
aggregated[node_idx] =
self.gnn_layer
.forward(node_feat, &neighbor_feats, &edge_weights);
}
// Aggregate into graph-level embedding via mean pooling
let mut graph_embedding = vec![0.0f32; self.dim];
for node_emb in &aggregated {
for (i, &v) in node_emb.iter().enumerate() {
if i < self.dim {
graph_embedding[i] += v;
}
}
}
let n = aggregated.len() as f32;
for v in &mut graph_embedding {
*v /= n;
}
Ok(graph_embedding)
}
/// Compute similarity between two Stage III embeddings.
pub fn similarity(&self, a: &[f32], b: &[f32]) -> f32 {
cosine_similarity(a, b)
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::types::{SketchElement, SpatialRelationship};
fn test_config() -> CrvConfig {
CrvConfig {
dimensions: 32,
..CrvConfig::default()
}
}
#[test]
fn test_encoder_creation() {
let config = test_config();
let encoder = StageIIIEncoder::new(&config);
assert_eq!(encoder.dim, 32);
}
#[test]
fn test_element_encoding() {
let config = test_config();
let encoder = StageIIIEncoder::new(&config);
let features = encoder.encode_element(
"building",
GeometricKind::Rectangle,
(0.5, 0.3),
Some(2.0),
);
assert_eq!(features.len(), 32);
}
#[test]
fn test_full_encode() {
let config = test_config();
let encoder = StageIIIEncoder::new(&config);
let data = StageIIIData {
sketch_elements: vec![
SketchElement {
label: "tower".to_string(),
kind: GeometricKind::Rectangle,
position: (0.5, 0.8),
scale: Some(3.0),
},
SketchElement {
label: "base".to_string(),
kind: GeometricKind::Rectangle,
position: (0.5, 0.2),
scale: Some(5.0),
},
SketchElement {
label: "path".to_string(),
kind: GeometricKind::Line,
position: (0.3, 0.1),
scale: None,
},
],
relationships: vec![
SpatialRelationship {
from: "tower".to_string(),
to: "base".to_string(),
relation: SpatialRelationType::Above,
strength: 0.9,
},
SpatialRelationship {
from: "path".to_string(),
to: "base".to_string(),
relation: SpatialRelationType::Adjacent,
strength: 0.7,
},
],
};
let embedding = encoder.encode(&data).unwrap();
assert_eq!(embedding.len(), 32);
}
#[test]
fn test_empty_elements() {
let config = test_config();
let encoder = StageIIIEncoder::new(&config);
let data = StageIIIData {
sketch_elements: vec![],
relationships: vec![],
};
assert!(encoder.encode(&data).is_err());
}
}

View File

@@ -0,0 +1,339 @@
//! Stage IV Encoder: Emotional/AOL Data via SNN Temporal Encoding
//!
//! CRV Stage IV captures emotional impacts, tangibles, intangibles, and
//! analytical overlay (AOL) detections. The spiking neural network (SNN)
//! temporal encoding naturally models the signal-vs-noise discrimination
//! that Stage IV demands:
//!
//! - High-frequency spike bursts correlate with AOL contamination
//! - Sustained low-frequency patterns indicate clean signal line data
//! - The refractory period prevents AOL cascade (analytical runaway)
//!
//! # Architecture
//!
//! Emotional intensity timeseries → SNN input currents.
//! Network spike rate analysis detects AOL events.
//! The embedding captures both the clean signal and AOL separation.
use crate::error::CrvResult;
use crate::types::{AOLDetection, CrvConfig, StageIVData};
use ruvector_mincut::snn::{LayerConfig, NetworkConfig, NeuronConfig, SpikingNetwork};
/// Stage IV encoder using spiking neural network temporal encoding.
#[derive(Debug)]
pub struct StageIVEncoder {
/// Embedding dimensionality.
dim: usize,
/// AOL detection threshold (spike rate above this = likely AOL).
aol_threshold: f32,
/// SNN time step.
dt: f64,
/// Refractory period for AOL cascade prevention.
refractory_period: f64,
}
impl StageIVEncoder {
/// Create a new Stage IV encoder.
pub fn new(config: &CrvConfig) -> Self {
Self {
dim: config.dimensions,
aol_threshold: config.aol_threshold,
dt: config.snn_dt,
refractory_period: config.refractory_period_ms,
}
}
/// Create a spiking network configured for emotional signal processing.
///
/// The network has 3 layers:
/// - Input: receives emotional intensity as current
/// - Hidden: processes temporal patterns
/// - Output: produces the embedding dimensions
fn create_network(&self, input_size: usize) -> SpikingNetwork {
let hidden_size = (input_size * 2).max(16).min(128);
let output_size = self.dim.min(64); // SNN output, will be expanded
let neuron_config = NeuronConfig {
tau_membrane: 20.0,
v_rest: 0.0,
v_reset: 0.0,
threshold: 1.0,
t_refrac: self.refractory_period,
resistance: 1.0,
threshold_adapt: 0.1,
tau_threshold: 100.0,
homeostatic: true,
target_rate: 0.01,
tau_homeostatic: 1000.0,
};
let config = NetworkConfig {
layers: vec![
LayerConfig::new(input_size).with_neuron_config(neuron_config.clone()),
LayerConfig::new(hidden_size)
.with_neuron_config(neuron_config.clone())
.with_recurrence(),
LayerConfig::new(output_size).with_neuron_config(neuron_config),
],
stdp_config: Default::default(),
dt: self.dt,
winner_take_all: false,
wta_strength: 0.0,
};
SpikingNetwork::new(config)
}
/// Encode emotional intensity values into SNN input currents.
fn emotional_to_currents(intensities: &[(String, f32)]) -> Vec<f64> {
intensities
.iter()
.map(|(_, intensity)| *intensity as f64 * 5.0) // Scale to reasonable current
.collect()
}
/// Analyze spike output to detect AOL events.
///
/// High spike rate in a short window indicates the analytical mind
/// is overriding the signal line (AOL contamination).
fn detect_aol(
&self,
spike_rates: &[f64],
window_ms: f64,
) -> Vec<AOLDetection> {
let mut detections = Vec::new();
let threshold = self.aol_threshold as f64;
for (i, &rate) in spike_rates.iter().enumerate() {
if rate > threshold {
detections.push(AOLDetection {
content: format!("AOL burst at timestep {}", i),
timestamp_ms: (i as f64 * window_ms) as u64,
flagged: rate > threshold * 1.5, // Auto-flag strong AOL
anomaly_score: (rate / threshold).min(1.0) as f32,
});
}
}
detections
}
/// Encode Stage IV data into a temporal embedding.
///
/// Runs the SNN on emotional intensity data, analyzes spike patterns
/// for AOL contamination, and produces a combined embedding that
/// captures both clean signal and AOL separation.
pub fn encode(&self, data: &StageIVData) -> CrvResult<Vec<f32>> {
// Build input from emotional impact data
let input_size = data.emotional_impact.len().max(1);
let currents = Self::emotional_to_currents(&data.emotional_impact);
if currents.is_empty() {
// Fall back to text-based encoding if no emotional intensity data
return self.encode_from_text(data);
}
// Run SNN simulation
let mut network = self.create_network(input_size);
let num_steps = 100; // 100ms simulation
let mut spike_counts = vec![0usize; network.layer_size(network.num_layers() - 1)];
let mut step_rates = Vec::new();
for step in 0..num_steps {
// Inject currents (modulated by step for temporal variation)
let modulated: Vec<f64> = currents
.iter()
.map(|&c| c * (1.0 + 0.3 * ((step as f64 * 0.1).sin())))
.collect();
network.inject_current(&modulated);
let spikes = network.step();
for spike in &spikes {
if spike.neuron_id < spike_counts.len() {
spike_counts[spike.neuron_id] += 1;
}
}
// Track rate per window
if step % 10 == 9 {
let rate = spikes.len() as f64 / 10.0;
step_rates.push(rate);
}
}
// Build embedding from spike counts and output activities
let output = network.get_output();
let mut embedding = vec![0.0f32; self.dim];
// First portion: spike count features
let spike_dims = spike_counts.len().min(self.dim / 3);
let max_count = *spike_counts.iter().max().unwrap_or(&1) as f32;
for (i, &count) in spike_counts.iter().take(spike_dims).enumerate() {
embedding[i] = count as f32 / max_count.max(1.0);
}
// Second portion: membrane potential output
let pot_offset = self.dim / 3;
let pot_dims = output.len().min(self.dim / 3);
for (i, &v) in output.iter().take(pot_dims).enumerate() {
if pot_offset + i < self.dim {
embedding[pot_offset + i] = v as f32;
}
}
// Third portion: text-derived features from tangibles/intangibles
let text_offset = 2 * self.dim / 3;
self.encode_text_features(data, &mut embedding[text_offset..]);
// Encode AOL information
let aol_detections = self.detect_aol(&step_rates, 10.0);
let aol_count = (aol_detections.len() + data.aol_detections.len()) as f32;
if self.dim > 2 {
// Store AOL contamination level in last dimension
embedding[self.dim - 1] = (aol_count / num_steps as f32).min(1.0);
}
// L2 normalize
let norm: f32 = embedding.iter().map(|x| x * x).sum::<f32>().sqrt();
if norm > 1e-6 {
for f in &mut embedding {
*f /= norm;
}
}
Ok(embedding)
}
/// Text-based encoding fallback when no intensity timeseries is available.
fn encode_from_text(&self, data: &StageIVData) -> CrvResult<Vec<f32>> {
let mut embedding = vec![0.0f32; self.dim];
self.encode_text_features(data, &mut embedding);
// L2 normalize
let norm: f32 = embedding.iter().map(|x| x * x).sum::<f32>().sqrt();
if norm > 1e-6 {
for f in &mut embedding {
*f /= norm;
}
}
Ok(embedding)
}
/// Encode text descriptors (tangibles, intangibles) into feature slots.
fn encode_text_features(&self, data: &StageIVData, features: &mut [f32]) {
if features.is_empty() {
return;
}
// Hash tangibles
for (i, tangible) in data.tangibles.iter().enumerate() {
for (j, byte) in tangible.bytes().enumerate() {
let idx = (i * 7 + j) % features.len();
features[idx] += (byte as f32 / 255.0) * 0.3;
}
}
// Hash intangibles
for (i, intangible) in data.intangibles.iter().enumerate() {
for (j, byte) in intangible.bytes().enumerate() {
let idx = (i * 11 + j + features.len() / 2) % features.len();
features[idx] += (byte as f32 / 255.0) * 0.3;
}
}
}
/// Get the AOL anomaly score for a given Stage IV embedding.
///
/// Higher values indicate more AOL contamination.
pub fn aol_score(&self, embedding: &[f32]) -> f32 {
if embedding.len() >= self.dim && self.dim > 2 {
embedding[self.dim - 1].abs()
} else {
0.0
}
}
}
#[cfg(test)]
mod tests {
use super::*;
fn test_config() -> CrvConfig {
CrvConfig {
dimensions: 32,
aol_threshold: 0.7,
refractory_period_ms: 50.0,
snn_dt: 1.0,
..CrvConfig::default()
}
}
#[test]
fn test_encoder_creation() {
let config = test_config();
let encoder = StageIVEncoder::new(&config);
assert_eq!(encoder.dim, 32);
assert_eq!(encoder.aol_threshold, 0.7);
}
#[test]
fn test_text_only_encode() {
let config = test_config();
let encoder = StageIVEncoder::new(&config);
let data = StageIVData {
emotional_impact: vec![],
tangibles: vec!["metal".to_string(), "concrete".to_string()],
intangibles: vec!["historical significance".to_string()],
aol_detections: vec![],
};
let embedding = encoder.encode(&data).unwrap();
assert_eq!(embedding.len(), 32);
}
#[test]
fn test_full_encode_with_snn() {
let config = test_config();
let encoder = StageIVEncoder::new(&config);
let data = StageIVData {
emotional_impact: vec![
("awe".to_string(), 0.8),
("unease".to_string(), 0.3),
("curiosity".to_string(), 0.6),
],
tangibles: vec!["stone wall".to_string()],
intangibles: vec!["ancient purpose".to_string()],
aol_detections: vec![AOLDetection {
content: "looks like a castle".to_string(),
timestamp_ms: 500,
flagged: true,
anomaly_score: 0.8,
}],
};
let embedding = encoder.encode(&data).unwrap();
assert_eq!(embedding.len(), 32);
// Should be normalized
let norm: f32 = embedding.iter().map(|x| x * x).sum::<f32>().sqrt();
assert!((norm - 1.0).abs() < 0.1 || norm < 0.01); // normalized or near-zero
}
#[test]
fn test_aol_detection() {
let config = test_config();
let encoder = StageIVEncoder::new(&config);
let rates = vec![0.1, 0.2, 0.9, 0.95, 0.3, 0.1];
let detections = encoder.detect_aol(&rates, 10.0);
// Should detect the high-rate windows as AOL
assert!(detections.len() >= 2);
for d in &detections {
assert!(d.anomaly_score > 0.0);
}
}
}

View File

@@ -0,0 +1,222 @@
//! Stage V: Interrogation via Differentiable Search with Soft Attention
//!
//! CRV Stage V involves probing the signal line by asking targeted questions
//! about specific aspects of the target, then cross-referencing results
//! across all accumulated data from Stages I-IV.
//!
//! # Architecture
//!
//! Uses `ruvector_gnn::search::differentiable_search` to find the most
//! relevant data entries for each probe query, with soft attention weights
//! providing a continuous similarity measure rather than hard thresholds.
//! This enables gradient-based refinement of probe queries.
use crate::error::{CrvError, CrvResult};
use crate::types::{CrossReference, CrvConfig, SignalLineProbe, StageVData};
use ruvector_gnn::search::{cosine_similarity, differentiable_search};
/// Stage V interrogation engine using differentiable search.
#[derive(Debug, Clone)]
pub struct StageVEngine {
/// Embedding dimensionality.
dim: usize,
/// Temperature for differentiable search softmax.
temperature: f32,
}
impl StageVEngine {
/// Create a new Stage V engine.
pub fn new(config: &CrvConfig) -> Self {
Self {
dim: config.dimensions,
temperature: config.search_temperature,
}
}
/// Probe the accumulated session embeddings with a query.
///
/// Performs differentiable search over the given candidate embeddings,
/// returning soft attention weights and top-k candidates.
pub fn probe(
&self,
query_embedding: &[f32],
candidates: &[Vec<f32>],
k: usize,
) -> CrvResult<SignalLineProbe> {
if candidates.is_empty() {
return Err(CrvError::EmptyInput(
"No candidates for probing".to_string(),
));
}
let (top_candidates, attention_weights) =
differentiable_search(query_embedding, candidates, k, self.temperature);
Ok(SignalLineProbe {
query: String::new(), // Caller sets the text
target_stage: 0, // Caller sets the stage
attention_weights,
top_candidates,
})
}
/// Cross-reference entries across stages to find correlations.
///
/// For each entry in `from_entries`, finds the most similar entries
/// in `to_entries` using cosine similarity, producing cross-references
/// above the given threshold.
pub fn cross_reference(
&self,
from_stage: u8,
from_entries: &[Vec<f32>],
to_stage: u8,
to_entries: &[Vec<f32>],
threshold: f32,
) -> Vec<CrossReference> {
let mut refs = Vec::new();
for (from_idx, from_emb) in from_entries.iter().enumerate() {
for (to_idx, to_emb) in to_entries.iter().enumerate() {
if from_emb.len() == to_emb.len() {
let score = cosine_similarity(from_emb, to_emb);
if score >= threshold {
refs.push(CrossReference {
from_stage,
from_entry: from_idx,
to_stage,
to_entry: to_idx,
score,
});
}
}
}
}
// Sort by score descending
refs.sort_by(|a, b| {
b.score
.partial_cmp(&a.score)
.unwrap_or(std::cmp::Ordering::Equal)
});
refs
}
/// Encode Stage V data into a combined interrogation embedding.
///
/// Aggregates the attention weights from all probes to produce
/// a unified view of which aspects of the target were most
/// responsive to interrogation.
pub fn encode(&self, data: &StageVData, all_embeddings: &[Vec<f32>]) -> CrvResult<Vec<f32>> {
if data.probes.is_empty() {
return Err(CrvError::EmptyInput("No probes in Stage V data".to_string()));
}
let mut embedding = vec![0.0f32; self.dim];
// Weight each candidate embedding by its attention weight across all probes
for probe in &data.probes {
for (&candidate_idx, &weight) in probe
.top_candidates
.iter()
.zip(probe.attention_weights.iter())
{
if candidate_idx < all_embeddings.len() {
let emb = &all_embeddings[candidate_idx];
for (i, &v) in emb.iter().enumerate() {
if i < self.dim {
embedding[i] += v * weight;
}
}
}
}
}
// Normalize by number of probes
let num_probes = data.probes.len() as f32;
for v in &mut embedding {
*v /= num_probes;
}
Ok(embedding)
}
/// Compute the interrogation signal strength for a given embedding.
///
/// Higher values indicate more responsive signal line data.
pub fn signal_strength(&self, embedding: &[f32]) -> f32 {
let norm: f32 = embedding.iter().map(|x| x * x).sum::<f32>().sqrt();
norm
}
}
#[cfg(test)]
mod tests {
use super::*;
fn test_config() -> CrvConfig {
CrvConfig {
dimensions: 8,
search_temperature: 1.0,
..CrvConfig::default()
}
}
#[test]
fn test_engine_creation() {
let config = test_config();
let engine = StageVEngine::new(&config);
assert_eq!(engine.dim, 8);
}
#[test]
fn test_probe() {
let config = test_config();
let engine = StageVEngine::new(&config);
let query = vec![1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0];
let candidates = vec![
vec![1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], // exact match
vec![0.5, 0.5, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], // partial
vec![0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], // orthogonal
];
let probe = engine.probe(&query, &candidates, 2).unwrap();
assert_eq!(probe.top_candidates.len(), 2);
assert_eq!(probe.attention_weights.len(), 2);
// Best match should be first
assert_eq!(probe.top_candidates[0], 0);
}
#[test]
fn test_cross_reference() {
let config = test_config();
let engine = StageVEngine::new(&config);
let from = vec![
vec![1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
vec![0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
];
let to = vec![
vec![0.9, 0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0], // similar to from[0]
vec![0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0], // different
];
let refs = engine.cross_reference(1, &from, 2, &to, 0.5);
assert!(!refs.is_empty());
assert_eq!(refs[0].from_stage, 1);
assert_eq!(refs[0].to_stage, 2);
assert!(refs[0].score > 0.5);
}
#[test]
fn test_empty_probe() {
let config = test_config();
let engine = StageVEngine::new(&config);
let query = vec![1.0; 8];
let candidates: Vec<Vec<f32>> = vec![];
assert!(engine.probe(&query, &candidates, 5).is_err());
}
}

View File

@@ -0,0 +1,387 @@
//! Stage VI: Composite Modeling via MinCut Partitioning
//!
//! CRV Stage VI builds a composite 3D model from all accumulated session data.
//! The MinCut algorithm identifies natural cluster boundaries in the session
//! graph, separating distinct target aspects that emerged across stages.
//!
//! # Architecture
//!
//! All session embeddings form nodes in a weighted graph, with edge weights
//! derived from cosine similarity. MinCut partitioning finds the natural
//! separations between target aspects, producing distinct partitions that
//! represent different facets of the target.
use crate::error::{CrvError, CrvResult};
use crate::types::{CrvConfig, StageVIData, TargetPartition};
use ruvector_gnn::search::cosine_similarity;
use ruvector_mincut::prelude::*;
/// Stage VI composite modeler using MinCut partitioning.
#[derive(Debug, Clone)]
pub struct StageVIModeler {
/// Embedding dimensionality.
dim: usize,
/// Minimum edge weight to create an edge (similarity threshold).
edge_threshold: f32,
}
impl StageVIModeler {
/// Create a new Stage VI modeler.
pub fn new(config: &CrvConfig) -> Self {
Self {
dim: config.dimensions,
edge_threshold: 0.2, // Low threshold to capture weak relationships too
}
}
/// Build a similarity graph from session embeddings.
///
/// Each embedding becomes a vertex. Edges are created between
/// pairs with cosine similarity above the threshold, with
/// edge weight equal to the similarity score.
fn build_similarity_graph(&self, embeddings: &[Vec<f32>]) -> Vec<(u64, u64, f64)> {
let n = embeddings.len();
let mut edges = Vec::new();
for i in 0..n {
for j in (i + 1)..n {
if embeddings[i].len() == embeddings[j].len() && !embeddings[i].is_empty() {
let sim = cosine_similarity(&embeddings[i], &embeddings[j]);
if sim > self.edge_threshold {
edges.push((i as u64 + 1, j as u64 + 1, sim as f64));
}
}
}
}
edges
}
/// Compute centroid of a set of embeddings.
fn compute_centroid(&self, embeddings: &[&[f32]]) -> Vec<f32> {
if embeddings.is_empty() {
return vec![0.0; self.dim];
}
let mut centroid = vec![0.0f32; self.dim];
for emb in embeddings {
for (i, &v) in emb.iter().enumerate() {
if i < self.dim {
centroid[i] += v;
}
}
}
let n = embeddings.len() as f32;
for v in &mut centroid {
*v /= n;
}
centroid
}
/// Partition session embeddings into target aspects using MinCut.
///
/// Returns the MinCut-based partition assignments and centroids.
pub fn partition(
&self,
embeddings: &[Vec<f32>],
stage_labels: &[(u8, usize)], // (stage, entry_index) for each embedding
) -> CrvResult<StageVIData> {
if embeddings.len() < 2 {
// With fewer than 2 embeddings, return a single partition
let centroid = if embeddings.is_empty() {
vec![0.0; self.dim]
} else {
embeddings[0].clone()
};
return Ok(StageVIData {
partitions: vec![TargetPartition {
label: "primary".to_string(),
member_entries: stage_labels.to_vec(),
centroid,
separation_strength: 0.0,
}],
composite_description: "Single-aspect target".to_string(),
partition_confidence: vec![1.0],
});
}
// Build similarity graph
let edges = self.build_similarity_graph(embeddings);
if edges.is_empty() {
// No significant similarities found - each embedding is its own partition
let partitions: Vec<TargetPartition> = embeddings
.iter()
.enumerate()
.map(|(i, emb)| TargetPartition {
label: format!("aspect-{}", i),
member_entries: if i < stage_labels.len() {
vec![stage_labels[i]]
} else {
vec![]
},
centroid: emb.clone(),
separation_strength: 1.0,
})
.collect();
let n = partitions.len();
return Ok(StageVIData {
partitions,
composite_description: format!("{} disconnected aspects", n),
partition_confidence: vec![0.5; n],
});
}
// Build MinCut structure
let mincut_result = MinCutBuilder::new()
.exact()
.with_edges(edges.clone())
.build();
let mincut = match mincut_result {
Ok(mc) => mc,
Err(_) => {
// Fallback: single partition
let centroid = self.compute_centroid(
&embeddings.iter().map(|e| e.as_slice()).collect::<Vec<_>>(),
);
return Ok(StageVIData {
partitions: vec![TargetPartition {
label: "composite".to_string(),
member_entries: stage_labels.to_vec(),
centroid,
separation_strength: 0.0,
}],
composite_description: "Unified composite model".to_string(),
partition_confidence: vec![0.8],
});
}
};
let cut_value = mincut.min_cut_value();
// Use the MinCut value to determine partition boundary.
// We partition into two groups based on connectivity:
// vertices more connected to the "left" side vs "right" side.
let n = embeddings.len();
// Simple 2-partition based on similarity to first vs last embedding
let (group_a, group_b) = self.bisect_by_similarity(embeddings);
let centroid_a = self.compute_centroid(
&group_a.iter().map(|&i| embeddings[i].as_slice()).collect::<Vec<_>>(),
);
let centroid_b = self.compute_centroid(
&group_b.iter().map(|&i| embeddings[i].as_slice()).collect::<Vec<_>>(),
);
let members_a: Vec<(u8, usize)> = group_a
.iter()
.filter_map(|&i| stage_labels.get(i).copied())
.collect();
let members_b: Vec<(u8, usize)> = group_b
.iter()
.filter_map(|&i| stage_labels.get(i).copied())
.collect();
let partitions = vec![
TargetPartition {
label: "primary-aspect".to_string(),
member_entries: members_a,
centroid: centroid_a,
separation_strength: cut_value as f32,
},
TargetPartition {
label: "secondary-aspect".to_string(),
member_entries: members_b,
centroid: centroid_b,
separation_strength: cut_value as f32,
},
];
// Confidence based on separation strength
let total_edges = edges.len() as f32;
let conf = if total_edges > 0.0 {
(cut_value as f32 / total_edges).min(1.0)
} else {
0.5
};
Ok(StageVIData {
partitions,
composite_description: format!(
"Bisected composite: {} embeddings, cut value {:.3}",
n, cut_value
),
partition_confidence: vec![conf, conf],
})
}
/// Bisect embeddings into two groups by maximizing inter-group dissimilarity.
///
/// Uses a greedy approach: pick the two most dissimilar embeddings as seeds,
/// then assign each remaining embedding to the nearer seed.
fn bisect_by_similarity(&self, embeddings: &[Vec<f32>]) -> (Vec<usize>, Vec<usize>) {
let n = embeddings.len();
if n <= 1 {
return ((0..n).collect(), vec![]);
}
// Find the two most dissimilar embeddings
let mut min_sim = f32::MAX;
let mut seed_a = 0;
let mut seed_b = 1;
for i in 0..n {
for j in (i + 1)..n {
if embeddings[i].len() == embeddings[j].len() && !embeddings[i].is_empty() {
let sim = cosine_similarity(&embeddings[i], &embeddings[j]);
if sim < min_sim {
min_sim = sim;
seed_a = i;
seed_b = j;
}
}
}
}
let mut group_a = vec![seed_a];
let mut group_b = vec![seed_b];
for i in 0..n {
if i == seed_a || i == seed_b {
continue;
}
let sim_a = if embeddings[i].len() == embeddings[seed_a].len() {
cosine_similarity(&embeddings[i], &embeddings[seed_a])
} else {
0.0
};
let sim_b = if embeddings[i].len() == embeddings[seed_b].len() {
cosine_similarity(&embeddings[i], &embeddings[seed_b])
} else {
0.0
};
if sim_a >= sim_b {
group_a.push(i);
} else {
group_b.push(i);
}
}
(group_a, group_b)
}
/// Encode the Stage VI partition result into a single embedding.
///
/// Produces a weighted combination of partition centroids.
pub fn encode(&self, data: &StageVIData) -> CrvResult<Vec<f32>> {
if data.partitions.is_empty() {
return Err(CrvError::EmptyInput("No partitions".to_string()));
}
let mut embedding = vec![0.0f32; self.dim];
let mut total_weight = 0.0f32;
for (partition, &confidence) in data.partitions.iter().zip(data.partition_confidence.iter()) {
let weight = confidence * partition.member_entries.len() as f32;
for (i, &v) in partition.centroid.iter().enumerate() {
if i < self.dim {
embedding[i] += v * weight;
}
}
total_weight += weight;
}
if total_weight > 1e-6 {
for v in &mut embedding {
*v /= total_weight;
}
}
Ok(embedding)
}
}
#[cfg(test)]
mod tests {
use super::*;
fn test_config() -> CrvConfig {
CrvConfig {
dimensions: 8,
..CrvConfig::default()
}
}
#[test]
fn test_modeler_creation() {
let config = test_config();
let modeler = StageVIModeler::new(&config);
assert_eq!(modeler.dim, 8);
}
#[test]
fn test_partition_single() {
let config = test_config();
let modeler = StageVIModeler::new(&config);
let embeddings = vec![vec![1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]];
let labels = vec![(1, 0)];
let result = modeler.partition(&embeddings, &labels).unwrap();
assert_eq!(result.partitions.len(), 1);
}
#[test]
fn test_partition_two_clusters() {
let config = test_config();
let modeler = StageVIModeler::new(&config);
// Two clearly separated clusters
let embeddings = vec![
vec![1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
vec![0.9, 0.1, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
vec![0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0],
vec![0.0, 0.0, 0.0, 0.0, 0.9, 0.1, 0.0, 0.0],
];
let labels = vec![(1, 0), (2, 0), (3, 0), (4, 0)];
let result = modeler.partition(&embeddings, &labels).unwrap();
assert_eq!(result.partitions.len(), 2);
}
#[test]
fn test_encode_partitions() {
let config = test_config();
let modeler = StageVIModeler::new(&config);
let data = StageVIData {
partitions: vec![
TargetPartition {
label: "a".to_string(),
member_entries: vec![(1, 0), (2, 0)],
centroid: vec![1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
separation_strength: 0.5,
},
TargetPartition {
label: "b".to_string(),
member_entries: vec![(3, 0)],
centroid: vec![0.0, 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
separation_strength: 0.5,
},
],
composite_description: "test".to_string(),
partition_confidence: vec![0.8, 0.6],
};
let embedding = modeler.encode(&data).unwrap();
assert_eq!(embedding.len(), 8);
}
}

View File

@@ -0,0 +1,360 @@
//! Core types for the CRV (Coordinate Remote Viewing) protocol.
//!
//! Defines the data structures for the 6-stage CRV signal line methodology,
//! session management, and analytical overlay (AOL) detection.
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
/// Unique identifier for a CRV session.
pub type SessionId = String;
/// Unique identifier for a target coordinate.
pub type TargetCoordinate = String;
/// Unique identifier for a stage data entry.
pub type EntryId = String;
/// Classification of gestalt primitives in Stage I.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub enum GestaltType {
/// Human-made structures, artifacts
Manmade,
/// Organic, natural formations
Natural,
/// Dynamic, kinetic signals
Movement,
/// Thermal, electromagnetic, force
Energy,
/// Aqueous, fluid, wet
Water,
/// Solid, terrain, geological
Land,
}
impl GestaltType {
/// Returns all gestalt types for iteration.
pub fn all() -> &'static [GestaltType] {
&[
GestaltType::Manmade,
GestaltType::Natural,
GestaltType::Movement,
GestaltType::Energy,
GestaltType::Water,
GestaltType::Land,
]
}
/// Returns the index of this gestalt type in the canonical ordering.
pub fn index(&self) -> usize {
match self {
GestaltType::Manmade => 0,
GestaltType::Natural => 1,
GestaltType::Movement => 2,
GestaltType::Energy => 3,
GestaltType::Water => 4,
GestaltType::Land => 5,
}
}
}
/// Stage I data: Ideogram traces and gestalt classifications.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct StageIData {
/// Raw ideogram stroke trace as a sequence of (x, y) coordinates.
pub stroke: Vec<(f32, f32)>,
/// First spontaneous descriptor word.
pub spontaneous_descriptor: String,
/// Classified gestalt type.
pub classification: GestaltType,
/// Confidence in the classification (0.0 - 1.0).
pub confidence: f32,
}
/// Sensory modality for Stage II data.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub enum SensoryModality {
/// Surface textures (smooth, rough, grainy, etc.)
Texture,
/// Visual colors and patterns
Color,
/// Thermal impressions (hot, cold, warm)
Temperature,
/// Auditory impressions
Sound,
/// Olfactory impressions
Smell,
/// Taste impressions
Taste,
/// Size/scale impressions (large, small, vast)
Dimension,
/// Luminosity (bright, dark, glowing)
Luminosity,
}
/// Stage II data: Sensory impressions.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct StageIIData {
/// Sensory impressions as modality-descriptor pairs.
pub impressions: Vec<(SensoryModality, String)>,
/// Raw sensory feature vector (encoded from descriptors).
pub feature_vector: Option<Vec<f32>>,
}
/// Stage III data: Dimensional and spatial relationships.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct StageIIIData {
/// Spatial sketch as a set of named geometric primitives.
pub sketch_elements: Vec<SketchElement>,
/// Spatial relationships between elements.
pub relationships: Vec<SpatialRelationship>,
}
/// A geometric element in a Stage III sketch.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SketchElement {
/// Unique label for this element.
pub label: String,
/// Type of geometric primitive.
pub kind: GeometricKind,
/// Position in sketch space (x, y).
pub position: (f32, f32),
/// Optional size/scale.
pub scale: Option<f32>,
}
/// Types of geometric primitives.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
pub enum GeometricKind {
Point,
Line,
Curve,
Rectangle,
Circle,
Triangle,
Polygon,
Freeform,
}
/// Spatial relationship between two sketch elements.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SpatialRelationship {
/// Source element label.
pub from: String,
/// Target element label.
pub to: String,
/// Relationship type.
pub relation: SpatialRelationType,
/// Strength of the relationship (0.0 - 1.0).
pub strength: f32,
}
/// Types of spatial relationships.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize)]
pub enum SpatialRelationType {
Adjacent,
Contains,
Above,
Below,
Inside,
Surrounding,
Connected,
Separated,
}
/// Stage IV data: Emotional, aesthetic, and intangible impressions.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct StageIVData {
/// Emotional impact descriptors with intensity.
pub emotional_impact: Vec<(String, f32)>,
/// Tangible object impressions.
pub tangibles: Vec<String>,
/// Intangible concept impressions (purpose, function, significance).
pub intangibles: Vec<String>,
/// Analytical overlay detections with timestamps.
pub aol_detections: Vec<AOLDetection>,
}
/// An analytical overlay (AOL) detection event.
///
/// AOL occurs when the viewer's analytical mind attempts to assign
/// a known label/concept to incoming signal line data, potentially
/// contaminating the raw perception.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AOLDetection {
/// The AOL content (what the viewer's mind jumped to).
pub content: String,
/// Timestamp within the session (milliseconds from start).
pub timestamp_ms: u64,
/// Whether it was flagged and set aside ("AOL break").
pub flagged: bool,
/// Anomaly score from spike rate analysis (0.0 - 1.0).
/// Higher scores indicate stronger AOL contamination.
pub anomaly_score: f32,
}
/// Stage V data: Interrogation and cross-referencing results.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct StageVData {
/// Probe queries and their results.
pub probes: Vec<SignalLineProbe>,
/// Cross-references to data from earlier stages.
pub cross_references: Vec<CrossReference>,
}
/// A signal line probe query.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SignalLineProbe {
/// The question or aspect being probed.
pub query: String,
/// Stage being interrogated.
pub target_stage: u8,
/// Resulting soft attention weights over candidates.
pub attention_weights: Vec<f32>,
/// Top-k candidate indices from differentiable search.
pub top_candidates: Vec<usize>,
}
/// A cross-reference between stage data entries.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CrossReference {
/// Source stage number.
pub from_stage: u8,
/// Source entry index.
pub from_entry: usize,
/// Target stage number.
pub to_stage: u8,
/// Target entry index.
pub to_entry: usize,
/// Similarity/relevance score.
pub score: f32,
}
/// Stage VI data: Composite 3D model from accumulated session data.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct StageVIData {
/// Cluster partitions discovered by MinCut.
pub partitions: Vec<TargetPartition>,
/// Overall composite descriptor.
pub composite_description: String,
/// Confidence scores per partition.
pub partition_confidence: Vec<f32>,
}
/// A partition of the target, representing a distinct aspect or component.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct TargetPartition {
/// Human-readable label for this partition.
pub label: String,
/// Stage data entry indices that belong to this partition.
pub member_entries: Vec<(u8, usize)>,
/// Centroid embedding of this partition.
pub centroid: Vec<f32>,
/// MinCut value separating this partition from others.
pub separation_strength: f32,
}
/// A complete CRV session entry stored in the database.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CrvSessionEntry {
/// Session identifier.
pub session_id: SessionId,
/// Target coordinate.
pub coordinate: TargetCoordinate,
/// CRV stage (1-6).
pub stage: u8,
/// Embedding vector for this entry.
pub embedding: Vec<f32>,
/// Arbitrary metadata.
pub metadata: HashMap<String, serde_json::Value>,
/// Timestamp in milliseconds.
pub timestamp_ms: u64,
}
/// Configuration for CRV session processing.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CrvConfig {
/// Embedding dimensionality.
pub dimensions: usize,
/// Curvature for Poincare ball (Stage I). Positive value.
pub curvature: f32,
/// AOL anomaly detection threshold (Stage IV).
pub aol_threshold: f32,
/// SNN refractory period in ms (Stage IV).
pub refractory_period_ms: f64,
/// SNN time step in ms (Stage IV).
pub snn_dt: f64,
/// Differentiable search temperature (Stage V).
pub search_temperature: f32,
/// Convergence threshold for cross-session matching.
pub convergence_threshold: f32,
}
impl Default for CrvConfig {
fn default() -> Self {
Self {
dimensions: 384,
curvature: 1.0,
aol_threshold: 0.7,
refractory_period_ms: 50.0,
snn_dt: 1.0,
search_temperature: 1.0,
convergence_threshold: 0.75,
}
}
}
/// Result of a convergence analysis across multiple sessions.
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ConvergenceResult {
/// Session pairs that converged.
pub session_pairs: Vec<(SessionId, SessionId)>,
/// Convergence scores per pair.
pub scores: Vec<f32>,
/// Stages where convergence was strongest.
pub convergent_stages: Vec<u8>,
/// Merged embedding representing the consensus signal.
pub consensus_embedding: Option<Vec<f32>>,
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_gestalt_type_all() {
let all = GestaltType::all();
assert_eq!(all.len(), 6);
}
#[test]
fn test_gestalt_type_index() {
assert_eq!(GestaltType::Manmade.index(), 0);
assert_eq!(GestaltType::Land.index(), 5);
}
#[test]
fn test_default_config() {
let config = CrvConfig::default();
assert_eq!(config.dimensions, 384);
assert_eq!(config.curvature, 1.0);
assert_eq!(config.aol_threshold, 0.7);
}
#[test]
fn test_session_entry_serialization() {
let entry = CrvSessionEntry {
session_id: "sess-001".to_string(),
coordinate: "1234-5678".to_string(),
stage: 1,
embedding: vec![0.1, 0.2, 0.3],
metadata: HashMap::new(),
timestamp_ms: 1000,
};
let json = serde_json::to_string(&entry).unwrap();
let deserialized: CrvSessionEntry = serde_json::from_str(&json).unwrap();
assert_eq!(deserialized.session_id, "sess-001");
assert_eq!(deserialized.stage, 1);
}
}