Merge commit 'd803bfe2b1fe7f5e219e50ac20d6801a0a58ac75' as 'vendor/ruvector'
This commit is contained in:
42
vendor/ruvector/crates/rvlite/examples/dashboard/src/App.css
vendored
Normal file
42
vendor/ruvector/crates/rvlite/examples/dashboard/src/App.css
vendored
Normal file
@@ -0,0 +1,42 @@
|
||||
#root {
|
||||
max-width: 1280px;
|
||||
margin: 0 auto;
|
||||
padding: 2rem;
|
||||
text-align: center;
|
||||
}
|
||||
|
||||
.logo {
|
||||
height: 6em;
|
||||
padding: 1.5em;
|
||||
will-change: filter;
|
||||
transition: filter 300ms;
|
||||
}
|
||||
.logo:hover {
|
||||
filter: drop-shadow(0 0 2em #646cffaa);
|
||||
}
|
||||
.logo.react:hover {
|
||||
filter: drop-shadow(0 0 2em #61dafbaa);
|
||||
}
|
||||
|
||||
@keyframes logo-spin {
|
||||
from {
|
||||
transform: rotate(0deg);
|
||||
}
|
||||
to {
|
||||
transform: rotate(360deg);
|
||||
}
|
||||
}
|
||||
|
||||
@media (prefers-reduced-motion: no-preference) {
|
||||
a:nth-of-type(2) .logo {
|
||||
animation: logo-spin infinite 20s linear;
|
||||
}
|
||||
}
|
||||
|
||||
.card {
|
||||
padding: 2em;
|
||||
}
|
||||
|
||||
.read-the-docs {
|
||||
color: #888;
|
||||
}
|
||||
4134
vendor/ruvector/crates/rvlite/examples/dashboard/src/App.tsx
vendored
Normal file
4134
vendor/ruvector/crates/rvlite/examples/dashboard/src/App.tsx
vendored
Normal file
File diff suppressed because it is too large
Load Diff
2798
vendor/ruvector/crates/rvlite/examples/dashboard/src/App.tsx.backup
vendored
Normal file
2798
vendor/ruvector/crates/rvlite/examples/dashboard/src/App.tsx.backup
vendored
Normal file
File diff suppressed because it is too large
Load Diff
124
vendor/ruvector/crates/rvlite/examples/dashboard/src/CODE_SNIPPETS.md
vendored
Normal file
124
vendor/ruvector/crates/rvlite/examples/dashboard/src/CODE_SNIPPETS.md
vendored
Normal file
@@ -0,0 +1,124 @@
|
||||
# Filter Builder Code Snippets
|
||||
|
||||
## Snippet 1: Import Statement (Line ~92)
|
||||
|
||||
```typescript
|
||||
import FilterBuilder from './FilterBuilder';
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Snippet 2: Helper Functions (Line ~545)
|
||||
|
||||
```typescript
|
||||
// Filter condition helpers
|
||||
const addFilterCondition = useCallback(() => {
|
||||
const newCondition: FilterCondition = {
|
||||
id: `condition_${Date.now()}`,
|
||||
field: '',
|
||||
operator: 'eq',
|
||||
value: '',
|
||||
};
|
||||
setFilterConditions(prev => [...prev, newCondition]);
|
||||
}, []);
|
||||
|
||||
const updateFilterCondition = useCallback((id: string, updates: Partial<FilterCondition>) => {
|
||||
setFilterConditions(prev =>
|
||||
prev.map(cond => cond.id === id ? { ...cond, ...updates } : cond)
|
||||
);
|
||||
}, []);
|
||||
|
||||
const removeFilterCondition = useCallback((id: string) => {
|
||||
setFilterConditions(prev => prev.filter(cond => cond.id !== id));
|
||||
}, []);
|
||||
|
||||
const conditionsToFilterJson = useCallback((conditions: FilterCondition[]): string => {
|
||||
if (conditions.length === 0) return '{}';
|
||||
|
||||
const filter: Record<string, any> = {};
|
||||
|
||||
conditions.forEach(cond => {
|
||||
if (!cond.field.trim()) return;
|
||||
|
||||
const fieldName = cond.field.trim();
|
||||
|
||||
switch (cond.operator) {
|
||||
case 'eq':
|
||||
filter[fieldName] = cond.value;
|
||||
break;
|
||||
case 'ne':
|
||||
filter[fieldName] = { $ne: cond.value };
|
||||
break;
|
||||
case 'gt':
|
||||
filter[fieldName] = { ...(filter[fieldName] || {}), $gt: cond.value };
|
||||
break;
|
||||
case 'lt':
|
||||
filter[fieldName] = { ...(filter[fieldName] || {}), $lt: cond.value };
|
||||
break;
|
||||
case 'gte':
|
||||
filter[fieldName] = { ...(filter[fieldName] || {}), $gte: cond.value };
|
||||
break;
|
||||
case 'lte':
|
||||
filter[fieldName] = { ...(filter[fieldName] || {}), $lte: cond.value };
|
||||
break;
|
||||
case 'contains':
|
||||
filter[fieldName] = { $contains: cond.value };
|
||||
break;
|
||||
case 'exists':
|
||||
filter[fieldName] = { $exists: cond.value === 'true' || cond.value === true };
|
||||
break;
|
||||
}
|
||||
});
|
||||
|
||||
return JSON.stringify(filter, null, 2);
|
||||
}, []);
|
||||
|
||||
// Update filterJson whenever conditions change
|
||||
useEffect(() => {
|
||||
if (useFilter && filterConditions.length > 0) {
|
||||
const jsonStr = conditionsToFilterJson(filterConditions);
|
||||
setFilterJson(jsonStr);
|
||||
}
|
||||
}, [filterConditions, useFilter, conditionsToFilterJson]);
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Snippet 3: UI Replacement (Line ~1190)
|
||||
|
||||
```typescript
|
||||
{/* Filter option */}
|
||||
<div className="space-y-3">
|
||||
<Switch
|
||||
size="sm"
|
||||
isSelected={useFilter}
|
||||
onValueChange={setUseFilter}
|
||||
>
|
||||
Use metadata filter
|
||||
</Switch>
|
||||
|
||||
{useFilter && (
|
||||
<FilterBuilder
|
||||
conditions={filterConditions}
|
||||
onAddCondition={addFilterCondition}
|
||||
onUpdateCondition={updateFilterCondition}
|
||||
onRemoveCondition={removeFilterCondition}
|
||||
generatedJson={filterJson}
|
||||
showJson={showFilterJson}
|
||||
onToggleJson={() => setShowFilterJson(!showFilterJson)}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Quick Reference
|
||||
|
||||
| Change | Location | Type | Lines |
|
||||
|--------|----------|------|-------|
|
||||
| Import | ~92 | Add | 1 |
|
||||
| Helpers | ~545 | Add | 75 |
|
||||
| UI | ~1190 | Replace | 20 |
|
||||
|
||||
Total changes: ~96 lines added/modified
|
||||
241
vendor/ruvector/crates/rvlite/examples/dashboard/src/FILTER_BUILDER_DEMO.md
vendored
Normal file
241
vendor/ruvector/crates/rvlite/examples/dashboard/src/FILTER_BUILDER_DEMO.md
vendored
Normal file
@@ -0,0 +1,241 @@
|
||||
# Filter Builder UI Demo
|
||||
|
||||
## Visual Preview
|
||||
|
||||
### Before (Current UI)
|
||||
```
|
||||
┌─────────────────────────────────────────────────────┐
|
||||
│ ☑ Use metadata filter │
|
||||
│ │
|
||||
│ ┌─────────────────────────────────────────────┐ │
|
||||
│ │ 🔍 {"category": "ML"} │ │
|
||||
│ └─────────────────────────────────────────────┘ │
|
||||
└─────────────────────────────────────────────────────┘
|
||||
```
|
||||
|
||||
### After (New Filter Builder UI)
|
||||
```
|
||||
┌────────────────────────────────────────────────────────────────┐
|
||||
│ ☑ Use metadata filter │
|
||||
│ │
|
||||
│ ┌──────────────────────────────────────────────────────────┐ │
|
||||
│ │ 🔍 Filter Builder [Show JSON] [+ Add] │ │
|
||||
│ ├──────────────────────────────────────────────────────────┤ │
|
||||
│ │ │ │
|
||||
│ │ ┌────────┐ ┌──────────┐ ┌────────┐ [🗑] │ │
|
||||
│ │ │category│ │Equals (=)│ │ ML │ │ │
|
||||
│ │ └────────┘ └──────────┘ └────────┘ │ │
|
||||
│ │ │ │
|
||||
│ │ AND ┌────────┐ ┌──────────┐ ┌────────┐ [🗑] │ │
|
||||
│ │ │ price │ │ < (<) │ │ 100 │ │ │
|
||||
│ │ └────────┘ └──────────┘ └────────┘ │ │
|
||||
│ │ │ │
|
||||
│ ├──────────────────────────────────────────────────────────┤ │
|
||||
│ │ Generated Filter JSON: │ │
|
||||
│ │ ┌────────────────────────────────────────────────────┐ │ │
|
||||
│ │ │ { │ │ │
|
||||
│ │ │ "category": "ML", │ │ │
|
||||
│ │ │ "price": { "$lt": 100 } │ │ │
|
||||
│ │ │ } │ │ │
|
||||
│ │ └────────────────────────────────────────────────────┘ │ │
|
||||
│ │ │ │
|
||||
│ │ All conditions are combined with AND logic. │ │
|
||||
│ └──────────────────────────────────────────────────────────┘ │
|
||||
└────────────────────────────────────────────────────────────────┘
|
||||
```
|
||||
|
||||
## UI Components
|
||||
|
||||
### Filter Condition Row
|
||||
Each condition has 4 components in a row:
|
||||
|
||||
```
|
||||
[AND] [Field Input] [Operator Select] [Value Input] [Delete Button]
|
||||
(1) (2) (3) (4) (5)
|
||||
```
|
||||
|
||||
1. **AND Label**: Shows for 2nd+ conditions
|
||||
2. **Field Input**: Text input for metadata field name
|
||||
3. **Operator Select**: Dropdown with options:
|
||||
- Equals (=)
|
||||
- Not Equals (≠)
|
||||
- Greater Than (>)
|
||||
- Less Than (<)
|
||||
- Greater or Equal (≥)
|
||||
- Less or Equal (≤)
|
||||
- Contains
|
||||
- Exists
|
||||
4. **Value Input**:
|
||||
- Text/number input for most operators
|
||||
- True/False dropdown for "Exists" operator
|
||||
5. **Delete Button**: Trash icon to remove condition
|
||||
|
||||
### Header Controls
|
||||
```
|
||||
┌──────────────────────────────────────────────────────┐
|
||||
│ 🔍 Filter Builder [Show JSON] [+ Add Condition] │
|
||||
└──────────────────────────────────────────────────────┘
|
||||
```
|
||||
|
||||
- **Title**: "Filter Builder" with filter icon
|
||||
- **Show/Hide JSON Button**: Toggle JSON preview
|
||||
- **Add Condition Button**: Add new filter condition
|
||||
|
||||
### JSON Preview (Collapsible)
|
||||
```
|
||||
┌────────────────────────────────────────┐
|
||||
│ Generated Filter JSON: │
|
||||
│ ┌────────────────────────────────────┐ │
|
||||
│ │ { │ │
|
||||
│ │ "category": "ML", │ │
|
||||
│ │ "price": { "$lt": 100 } │ │
|
||||
│ │ } │ │
|
||||
│ └────────────────────────────────────┘ │
|
||||
└────────────────────────────────────────┘
|
||||
```
|
||||
|
||||
Read-only textarea with syntax-highlighted JSON
|
||||
|
||||
### Empty State
|
||||
```
|
||||
┌──────────────────────────────────────────────────────┐
|
||||
│ 🔍 Filter Builder [Show JSON] [+ Add Condition] │
|
||||
├──────────────────────────────────────────────────────┤
|
||||
│ │
|
||||
│ No filter conditions. │
|
||||
│ Click "Add Condition" to get started. │
|
||||
│ │
|
||||
└──────────────────────────────────────────────────────┘
|
||||
```
|
||||
|
||||
## Interaction Flow
|
||||
|
||||
### Step 1: Enable Filter
|
||||
User toggles "Use metadata filter" switch
|
||||
|
||||
### Step 2: Add Condition
|
||||
1. Click "+ Add Condition" button
|
||||
2. New row appears with empty fields
|
||||
|
||||
### Step 3: Configure Condition
|
||||
1. Type field name (e.g., "category")
|
||||
2. Select operator (e.g., "Equals (=)")
|
||||
3. Enter value (e.g., "ML")
|
||||
|
||||
### Step 4: View JSON (Optional)
|
||||
1. Click "Show JSON" button
|
||||
2. See generated filter in JSON format
|
||||
3. Verify the filter is correct
|
||||
|
||||
### Step 5: Add More Conditions (Optional)
|
||||
1. Click "+ Add Condition" again
|
||||
2. Configure second condition
|
||||
3. Both conditions combine with AND
|
||||
|
||||
### Step 6: Perform Search
|
||||
1. Enter search query
|
||||
2. Click search button
|
||||
3. Filter is automatically applied
|
||||
|
||||
### Step 7: Remove Condition (Optional)
|
||||
1. Click trash icon next to any condition
|
||||
2. Condition is removed
|
||||
3. Filter JSON updates automatically
|
||||
|
||||
## Example Usage Scenarios
|
||||
|
||||
### Scenario 1: Simple Category Filter
|
||||
```
|
||||
Goal: Find all ML-related vectors
|
||||
|
||||
Steps:
|
||||
1. Add condition: category = ML
|
||||
2. Click search
|
||||
|
||||
Result Filter:
|
||||
{
|
||||
"category": "ML"
|
||||
}
|
||||
```
|
||||
|
||||
### Scenario 2: Price Range Filter
|
||||
```
|
||||
Goal: Find products between $50 and $200
|
||||
|
||||
Steps:
|
||||
1. Add condition: price > 50
|
||||
2. Add condition: price < 200
|
||||
3. Click search
|
||||
|
||||
Result Filter:
|
||||
{
|
||||
"price": {
|
||||
"$gt": 50,
|
||||
"$lt": 200
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Scenario 3: Complex Multi-Field Filter
|
||||
```
|
||||
Goal: Find ML documents with "sample" tag and recent scores
|
||||
|
||||
Steps:
|
||||
1. Add condition: category = ML
|
||||
2. Add condition: tags Contains sample
|
||||
3. Add condition: score >= 0.8
|
||||
4. Click search
|
||||
|
||||
Result Filter:
|
||||
{
|
||||
"category": "ML",
|
||||
"tags": { "$contains": "sample" },
|
||||
"score": { "$gte": 0.8 }
|
||||
}
|
||||
```
|
||||
|
||||
### Scenario 4: Existence Check
|
||||
```
|
||||
Goal: Find vectors that have metadata field
|
||||
|
||||
Steps:
|
||||
1. Add condition: description Exists true
|
||||
2. Click search
|
||||
|
||||
Result Filter:
|
||||
{
|
||||
"description": { "$exists": true }
|
||||
}
|
||||
```
|
||||
|
||||
## Color Scheme (Dark Theme)
|
||||
|
||||
- **Background**: Dark gray (bg-gray-800/50)
|
||||
- **Borders**: Medium gray (border-gray-700)
|
||||
- **Text**: White
|
||||
- **Inputs**: Dark background (bg-gray-800/50)
|
||||
- **Primary Button**: Blue accent (color="primary")
|
||||
- **Danger Button**: Red (color="danger")
|
||||
- **JSON Text**: Green (text-green-400) for syntax highlighting
|
||||
|
||||
## Responsive Behavior
|
||||
|
||||
- Condition rows stack vertically
|
||||
- Each row remains horizontal with flex layout
|
||||
- Inputs resize proportionally:
|
||||
- Field: flex-1 (flexible width)
|
||||
- Operator: w-48 (fixed 192px)
|
||||
- Value: flex-1 (flexible width)
|
||||
- Delete: min-w-8 (32px square)
|
||||
|
||||
## Accessibility
|
||||
|
||||
- All inputs have proper labels
|
||||
- Keyboard navigation supported
|
||||
- Select dropdowns are keyboard-accessible
|
||||
- Delete buttons have aria-labels
|
||||
- Color contrast meets WCAG AA standards
|
||||
|
||||
---
|
||||
|
||||
This visual guide helps you understand what the Filter Builder will look like and how users will interact with it!
|
||||
200
vendor/ruvector/crates/rvlite/examples/dashboard/src/FilterBuilder.tsx
vendored
Normal file
200
vendor/ruvector/crates/rvlite/examples/dashboard/src/FilterBuilder.tsx
vendored
Normal file
@@ -0,0 +1,200 @@
|
||||
import { Button, Input, Select, SelectItem, Card, CardBody, Textarea } from '@heroui/react';
|
||||
import { Plus, Trash2, Code, Filter as FilterIcon } from 'lucide-react';
|
||||
|
||||
interface FilterCondition {
|
||||
id: string;
|
||||
field: string;
|
||||
operator: 'eq' | 'ne' | 'gt' | 'lt' | 'gte' | 'lte' | 'contains' | 'exists';
|
||||
value: string | number | boolean;
|
||||
}
|
||||
|
||||
interface FilterBuilderProps {
|
||||
conditions: FilterCondition[];
|
||||
onAddCondition: () => void;
|
||||
onUpdateCondition: (id: string, updates: Partial<FilterCondition>) => void;
|
||||
onRemoveCondition: (id: string) => void;
|
||||
generatedJson: string;
|
||||
showJson: boolean;
|
||||
onToggleJson: () => void;
|
||||
}
|
||||
|
||||
const OPERATORS = [
|
||||
{ key: 'eq', label: 'Equals (=)' },
|
||||
{ key: 'ne', label: 'Not Equals (≠)' },
|
||||
{ key: 'gt', label: 'Greater Than (>)' },
|
||||
{ key: 'lt', label: 'Less Than (<)' },
|
||||
{ key: 'gte', label: 'Greater or Equal (≥)' },
|
||||
{ key: 'lte', label: 'Less or Equal (≤)' },
|
||||
{ key: 'contains', label: 'Contains' },
|
||||
{ key: 'exists', label: 'Exists' },
|
||||
];
|
||||
|
||||
export default function FilterBuilder({
|
||||
conditions,
|
||||
onAddCondition,
|
||||
onUpdateCondition,
|
||||
onRemoveCondition,
|
||||
generatedJson,
|
||||
showJson,
|
||||
onToggleJson,
|
||||
}: FilterBuilderProps) {
|
||||
return (
|
||||
<Card className="bg-gray-800/50 border border-gray-700">
|
||||
<CardBody className="space-y-3">
|
||||
{/* Header */}
|
||||
<div className="flex items-center justify-between">
|
||||
<div className="flex items-center gap-2">
|
||||
<FilterIcon className="w-4 h-4 text-primary" />
|
||||
<span className="text-sm font-semibold">Filter Builder</span>
|
||||
</div>
|
||||
<div className="flex items-center gap-2">
|
||||
<Button
|
||||
size="sm"
|
||||
variant="flat"
|
||||
onPress={onToggleJson}
|
||||
startContent={<Code className="w-3 h-3" />}
|
||||
className="bg-gray-700/50 hover:bg-gray-700"
|
||||
>
|
||||
{showJson ? 'Hide' : 'Show'} JSON
|
||||
</Button>
|
||||
<Button
|
||||
size="sm"
|
||||
color="primary"
|
||||
variant="flat"
|
||||
onPress={onAddCondition}
|
||||
startContent={<Plus className="w-3 h-3" />}
|
||||
>
|
||||
Add Condition
|
||||
</Button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
{/* Conditions */}
|
||||
{conditions.length === 0 ? (
|
||||
<div className="text-center py-4 text-gray-500 text-sm">
|
||||
No filter conditions. Click "Add Condition" to get started.
|
||||
</div>
|
||||
) : (
|
||||
<div className="space-y-2">
|
||||
{conditions.map((condition, index) => (
|
||||
<div key={condition.id} className="flex items-center gap-2">
|
||||
{/* AND label for subsequent conditions */}
|
||||
{index > 0 && (
|
||||
<div className="text-xs text-gray-500 font-semibold w-10">AND</div>
|
||||
)}
|
||||
{index === 0 && <div className="w-10" />}
|
||||
|
||||
{/* Field Input */}
|
||||
<Input
|
||||
size="sm"
|
||||
placeholder="field name"
|
||||
value={condition.field}
|
||||
onChange={(e) => onUpdateCondition(condition.id, { field: e.target.value })}
|
||||
classNames={{
|
||||
input: "bg-gray-800/50 text-white placeholder:text-gray-500",
|
||||
inputWrapper: "bg-gray-800/50 border-gray-600 hover:border-gray-500",
|
||||
}}
|
||||
className="flex-1"
|
||||
/>
|
||||
|
||||
{/* Operator Select */}
|
||||
<Select
|
||||
size="sm"
|
||||
placeholder="operator"
|
||||
selectedKeys={[condition.operator]}
|
||||
onChange={(e) => onUpdateCondition(condition.id, { operator: e.target.value as any })}
|
||||
classNames={{
|
||||
trigger: "bg-gray-800/50 border-gray-600 hover:border-gray-500",
|
||||
value: "text-white text-xs",
|
||||
}}
|
||||
className="w-48"
|
||||
>
|
||||
{OPERATORS.map((op) => (
|
||||
<SelectItem key={op.key}>
|
||||
{op.label}
|
||||
</SelectItem>
|
||||
))}
|
||||
</Select>
|
||||
|
||||
{/* Value Input */}
|
||||
{condition.operator === 'exists' ? (
|
||||
<Select
|
||||
size="sm"
|
||||
placeholder="value"
|
||||
selectedKeys={[String(condition.value)]}
|
||||
onChange={(e) => onUpdateCondition(condition.id, { value: e.target.value === 'true' })}
|
||||
classNames={{
|
||||
trigger: "bg-gray-800/50 border-gray-600 hover:border-gray-500",
|
||||
value: "text-white text-xs",
|
||||
}}
|
||||
className="flex-1"
|
||||
>
|
||||
<SelectItem key="true">True</SelectItem>
|
||||
<SelectItem key="false">False</SelectItem>
|
||||
</Select>
|
||||
) : (
|
||||
<Input
|
||||
size="sm"
|
||||
placeholder="value"
|
||||
value={String(condition.value)}
|
||||
onChange={(e) => {
|
||||
const val = e.target.value;
|
||||
// Try to parse as number for numeric operators
|
||||
if (['gt', 'lt', 'gte', 'lte'].includes(condition.operator)) {
|
||||
const num = parseFloat(val);
|
||||
onUpdateCondition(condition.id, { value: isNaN(num) ? val : num });
|
||||
} else {
|
||||
onUpdateCondition(condition.id, { value: val });
|
||||
}
|
||||
}}
|
||||
classNames={{
|
||||
input: "bg-gray-800/50 text-white placeholder:text-gray-500",
|
||||
inputWrapper: "bg-gray-800/50 border-gray-600 hover:border-gray-500",
|
||||
}}
|
||||
className="flex-1"
|
||||
/>
|
||||
)}
|
||||
|
||||
{/* Delete Button */}
|
||||
<Button
|
||||
isIconOnly
|
||||
size="sm"
|
||||
color="danger"
|
||||
variant="flat"
|
||||
onPress={() => onRemoveCondition(condition.id)}
|
||||
className="min-w-8"
|
||||
>
|
||||
<Trash2 className="w-3 h-3" />
|
||||
</Button>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Generated JSON Preview */}
|
||||
{showJson && (
|
||||
<div className="pt-2 border-t border-gray-700">
|
||||
<div className="text-xs text-gray-500 mb-1 font-semibold">Generated Filter JSON:</div>
|
||||
<Textarea
|
||||
value={generatedJson}
|
||||
readOnly
|
||||
minRows={3}
|
||||
maxRows={8}
|
||||
classNames={{
|
||||
input: "bg-gray-900 text-green-400 font-mono text-xs",
|
||||
inputWrapper: "bg-gray-900 border-gray-700",
|
||||
}}
|
||||
/>
|
||||
</div>
|
||||
)}
|
||||
|
||||
{/* Helper Text */}
|
||||
{conditions.length > 0 && (
|
||||
<div className="text-xs text-gray-500 pt-1">
|
||||
All conditions are combined with AND logic. Use the generated JSON for your vector search filter.
|
||||
</div>
|
||||
)}
|
||||
</CardBody>
|
||||
</Card>
|
||||
);
|
||||
}
|
||||
278
vendor/ruvector/crates/rvlite/examples/dashboard/src/IMPLEMENTATION_GUIDE.md
vendored
Normal file
278
vendor/ruvector/crates/rvlite/examples/dashboard/src/IMPLEMENTATION_GUIDE.md
vendored
Normal file
@@ -0,0 +1,278 @@
|
||||
# Advanced Filter Builder - Implementation Guide
|
||||
|
||||
This guide provides step-by-step instructions to integrate the Advanced Filter Builder into the RvLite Dashboard.
|
||||
|
||||
## Prerequisites
|
||||
|
||||
The following files have been created and are ready to use:
|
||||
- `/workspaces/ruvector/crates/rvlite/examples/dashboard/src/FilterBuilder.tsx` ✓
|
||||
|
||||
## Integration Steps
|
||||
|
||||
### Step 1: Add Import Statement
|
||||
|
||||
**Location:** Line ~92 (after `import useLearning from './hooks/useLearning';`)
|
||||
|
||||
Add this line:
|
||||
```typescript
|
||||
import FilterBuilder from './FilterBuilder';
|
||||
```
|
||||
|
||||
**Full context:**
|
||||
```typescript
|
||||
import useRvLite, { type SearchResult, type CypherResult, type SparqlResult, type SqlResult, type VectorEntry } from './hooks/useRvLite';
|
||||
import useLearning from './hooks/useLearning';
|
||||
import FilterBuilder from './FilterBuilder'; // <-- ADD THIS LINE
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Step 2: Add Filter Helper Functions
|
||||
|
||||
**Location:** Line ~545 (right after the `addLog` callback, before `hasInitialized`)
|
||||
|
||||
Add this code:
|
||||
|
||||
```typescript
|
||||
// Filter condition helpers
|
||||
const addFilterCondition = useCallback(() => {
|
||||
const newCondition: FilterCondition = {
|
||||
id: `condition_${Date.now()}`,
|
||||
field: '',
|
||||
operator: 'eq',
|
||||
value: '',
|
||||
};
|
||||
setFilterConditions(prev => [...prev, newCondition]);
|
||||
}, []);
|
||||
|
||||
const updateFilterCondition = useCallback((id: string, updates: Partial<FilterCondition>) => {
|
||||
setFilterConditions(prev =>
|
||||
prev.map(cond => cond.id === id ? { ...cond, ...updates } : cond)
|
||||
);
|
||||
}, []);
|
||||
|
||||
const removeFilterCondition = useCallback((id: string) => {
|
||||
setFilterConditions(prev => prev.filter(cond => cond.id !== id));
|
||||
}, []);
|
||||
|
||||
const conditionsToFilterJson = useCallback((conditions: FilterCondition[]): string => {
|
||||
if (conditions.length === 0) return '{}';
|
||||
|
||||
const filter: Record<string, any> = {};
|
||||
|
||||
conditions.forEach(cond => {
|
||||
if (!cond.field.trim()) return;
|
||||
|
||||
const fieldName = cond.field.trim();
|
||||
|
||||
switch (cond.operator) {
|
||||
case 'eq':
|
||||
filter[fieldName] = cond.value;
|
||||
break;
|
||||
case 'ne':
|
||||
filter[fieldName] = { $ne: cond.value };
|
||||
break;
|
||||
case 'gt':
|
||||
filter[fieldName] = { ...(filter[fieldName] || {}), $gt: cond.value };
|
||||
break;
|
||||
case 'lt':
|
||||
filter[fieldName] = { ...(filter[fieldName] || {}), $lt: cond.value };
|
||||
break;
|
||||
case 'gte':
|
||||
filter[fieldName] = { ...(filter[fieldName] || {}), $gte: cond.value };
|
||||
break;
|
||||
case 'lte':
|
||||
filter[fieldName] = { ...(filter[fieldName] || {}), $lte: cond.value };
|
||||
break;
|
||||
case 'contains':
|
||||
filter[fieldName] = { $contains: cond.value };
|
||||
break;
|
||||
case 'exists':
|
||||
filter[fieldName] = { $exists: cond.value === 'true' || cond.value === true };
|
||||
break;
|
||||
}
|
||||
});
|
||||
|
||||
return JSON.stringify(filter, null, 2);
|
||||
}, []);
|
||||
|
||||
// Update filterJson whenever conditions change
|
||||
useEffect(() => {
|
||||
if (useFilter && filterConditions.length > 0) {
|
||||
const jsonStr = conditionsToFilterJson(filterConditions);
|
||||
setFilterJson(jsonStr);
|
||||
}
|
||||
}, [filterConditions, useFilter, conditionsToFilterJson]);
|
||||
```
|
||||
|
||||
**Full context:**
|
||||
```typescript
|
||||
// Logging
|
||||
const addLog = useCallback((type: LogEntry['type'], message: string) => {
|
||||
const timestamp = new Date().toLocaleTimeString();
|
||||
setLogs(prev => [...prev.slice(-99), { timestamp, type, message }]);
|
||||
}, []);
|
||||
|
||||
// Filter condition helpers <-- START ADDING HERE
|
||||
const addFilterCondition = useCallback(() => {
|
||||
// ... (code above)
|
||||
}, []);
|
||||
// ... (rest of the helper functions)
|
||||
|
||||
useEffect(() => {
|
||||
// ... (update filterJson effect)
|
||||
}, [filterConditions, useFilter, conditionsToFilterJson]);
|
||||
<-- END HERE
|
||||
|
||||
// Track if we've initialized to prevent re-running effects
|
||||
const hasInitialized = useRef(false);
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
### Step 3: Replace the Filter UI Section
|
||||
|
||||
**Location:** Around line 1190-1213
|
||||
|
||||
**FIND THIS CODE:**
|
||||
```typescript
|
||||
{/* Filter option */}
|
||||
<div className="flex items-center gap-4">
|
||||
<Switch
|
||||
size="sm"
|
||||
isSelected={useFilter}
|
||||
onValueChange={setUseFilter}
|
||||
>
|
||||
Use metadata filter
|
||||
</Switch>
|
||||
{useFilter && (
|
||||
<Input
|
||||
size="sm"
|
||||
placeholder='{"category": "ML"}'
|
||||
value={filterJson}
|
||||
onChange={(e) => setFilterJson(e.target.value)}
|
||||
startContent={<Filter className="w-4 h-4 text-gray-400" />}
|
||||
classNames={{
|
||||
input: "bg-gray-800/50 text-white placeholder:text-gray-500 font-mono text-xs",
|
||||
inputWrapper: "bg-gray-800/50 border-gray-600 hover:border-gray-500",
|
||||
}}
|
||||
className="flex-1"
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
```
|
||||
|
||||
**REPLACE WITH:**
|
||||
```typescript
|
||||
{/* Filter option */}
|
||||
<div className="space-y-3">
|
||||
<Switch
|
||||
size="sm"
|
||||
isSelected={useFilter}
|
||||
onValueChange={setUseFilter}
|
||||
>
|
||||
Use metadata filter
|
||||
</Switch>
|
||||
|
||||
{useFilter && (
|
||||
<FilterBuilder
|
||||
conditions={filterConditions}
|
||||
onAddCondition={addFilterCondition}
|
||||
onUpdateCondition={updateFilterCondition}
|
||||
onRemoveCondition={removeFilterCondition}
|
||||
generatedJson={filterJson}
|
||||
showJson={showFilterJson}
|
||||
onToggleJson={() => setShowFilterJson(!showFilterJson)}
|
||||
/>
|
||||
)}
|
||||
</div>
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## Verification
|
||||
|
||||
After making the changes:
|
||||
|
||||
1. **Check for TypeScript errors:**
|
||||
```bash
|
||||
npm run typecheck
|
||||
```
|
||||
|
||||
2. **Start the dev server:**
|
||||
```bash
|
||||
npm run dev
|
||||
```
|
||||
|
||||
3. **Test the Filter Builder:**
|
||||
- Navigate to the Vector tab
|
||||
- Enable "Use metadata filter" switch
|
||||
- Click "Add Condition"
|
||||
- Add a filter: Field=`category`, Operator=`Equals`, Value=`ML`
|
||||
- Click "Show JSON" to verify the generated filter
|
||||
- Perform a search
|
||||
|
||||
## Expected Behavior
|
||||
|
||||
1. When you toggle "Use metadata filter" ON, the Filter Builder appears
|
||||
2. Click "Add Condition" to add filter rows
|
||||
3. Each row has:
|
||||
- Field input (for metadata field name)
|
||||
- Operator dropdown (equals, not equals, greater than, etc.)
|
||||
- Value input (auto-detects number vs string)
|
||||
- Delete button (trash icon)
|
||||
4. Click "Show JSON" to see the generated filter JSON
|
||||
5. Multiple conditions combine with AND logic
|
||||
6. The filter is automatically applied when performing vector searches
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
### Issue: TypeScript errors about FilterCondition
|
||||
**Solution:** The `FilterCondition` interface is already defined in App.tsx at line 100-105. No action needed.
|
||||
|
||||
### Issue: Import error for FilterBuilder
|
||||
**Solution:** Verify that `/workspaces/ruvector/crates/rvlite/examples/dashboard/src/FilterBuilder.tsx` exists.
|
||||
|
||||
### Issue: Filter doesn't apply to searches
|
||||
**Solution:** Check the browser console for errors. Verify that `filterJson` state is being updated when conditions change.
|
||||
|
||||
### Issue: Can't find the UI section to replace
|
||||
**Solution:** Search for the text "Use metadata filter" in App.tsx to find the exact location.
|
||||
|
||||
## Example Filters
|
||||
|
||||
### Example 1: Simple Equality
|
||||
```
|
||||
Field: category
|
||||
Operator: Equals (=)
|
||||
Value: ML
|
||||
```
|
||||
Generates: `{ "category": "ML" }`
|
||||
|
||||
### Example 2: Numeric Range
|
||||
```
|
||||
Condition 1: Field=price, Operator=Greater Than, Value=50
|
||||
Condition 2: Field=price, Operator=Less Than, Value=100
|
||||
```
|
||||
Generates: `{ "price": { "$gt": 50, "$lt": 100 } }`
|
||||
|
||||
### Example 3: Multiple Fields
|
||||
```
|
||||
Condition 1: Field=category, Operator=Equals, Value=ML
|
||||
Condition 2: Field=tags, Operator=Contains, Value=sample
|
||||
```
|
||||
Generates: `{ "category": "ML", "tags": { "$contains": "sample" } }`
|
||||
|
||||
---
|
||||
|
||||
## Summary
|
||||
|
||||
You need to make 3 changes to `src/App.tsx`:
|
||||
|
||||
1. ✓ Add import for FilterBuilder (line ~92)
|
||||
2. ✓ Add filter helper functions (line ~545)
|
||||
3. ✓ Replace filter UI section (line ~1190)
|
||||
|
||||
State variables (`filterConditions`, `showFilterJson`) are already defined (lines 531-534).
|
||||
|
||||
The FilterBuilder component is already created at `src/FilterBuilder.tsx`.
|
||||
1
vendor/ruvector/crates/rvlite/examples/dashboard/src/assets/react.svg
vendored
Normal file
1
vendor/ruvector/crates/rvlite/examples/dashboard/src/assets/react.svg
vendored
Normal file
@@ -0,0 +1 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" aria-hidden="true" role="img" class="iconify iconify--logos" width="35.93" height="32" preserveAspectRatio="xMidYMid meet" viewBox="0 0 256 228"><path fill="#00D8FF" d="M210.483 73.824a171.49 171.49 0 0 0-8.24-2.597c.465-1.9.893-3.777 1.273-5.621c6.238-30.281 2.16-54.676-11.769-62.708c-13.355-7.7-35.196.329-57.254 19.526a171.23 171.23 0 0 0-6.375 5.848a155.866 155.866 0 0 0-4.241-3.917C100.759 3.829 77.587-4.822 63.673 3.233C50.33 10.957 46.379 33.89 51.995 62.588a170.974 170.974 0 0 0 1.892 8.48c-3.28.932-6.445 1.924-9.474 2.98C17.309 83.498 0 98.307 0 113.668c0 15.865 18.582 31.778 46.812 41.427a145.52 145.52 0 0 0 6.921 2.165a167.467 167.467 0 0 0-2.01 9.138c-5.354 28.2-1.173 50.591 12.134 58.266c13.744 7.926 36.812-.22 59.273-19.855a145.567 145.567 0 0 0 5.342-4.923a168.064 168.064 0 0 0 6.92 6.314c21.758 18.722 43.246 26.282 56.54 18.586c13.731-7.949 18.194-32.003 12.4-61.268a145.016 145.016 0 0 0-1.535-6.842c1.62-.48 3.21-.974 4.76-1.488c29.348-9.723 48.443-25.443 48.443-41.52c0-15.417-17.868-30.326-45.517-39.844Zm-6.365 70.984c-1.4.463-2.836.91-4.3 1.345c-3.24-10.257-7.612-21.163-12.963-32.432c5.106-11 9.31-21.767 12.459-31.957c2.619.758 5.16 1.557 7.61 2.4c23.69 8.156 38.14 20.213 38.14 29.504c0 9.896-15.606 22.743-40.946 31.14Zm-10.514 20.834c2.562 12.94 2.927 24.64 1.23 33.787c-1.524 8.219-4.59 13.698-8.382 15.893c-8.067 4.67-25.32-1.4-43.927-17.412a156.726 156.726 0 0 1-6.437-5.87c7.214-7.889 14.423-17.06 21.459-27.246c12.376-1.098 24.068-2.894 34.671-5.345a134.17 134.17 0 0 1 1.386 6.193ZM87.276 214.515c-7.882 2.783-14.16 2.863-17.955.675c-8.075-4.657-11.432-22.636-6.853-46.752a156.923 156.923 0 0 1 1.869-8.499c10.486 2.32 22.093 3.988 34.498 4.994c7.084 9.967 14.501 19.128 21.976 27.15a134.668 134.668 0 0 1-4.877 4.492c-9.933 8.682-19.886 14.842-28.658 17.94ZM50.35 144.747c-12.483-4.267-22.792-9.812-29.858-15.863c-6.35-5.437-9.555-10.836-9.555-15.216c0-9.322 13.897-21.212 37.076-29.293c2.813-.98 5.757-1.905 8.812-2.773c3.204 10.42 7.406 21.315 12.477 32.332c-5.137 11.18-9.399 22.249-12.634 32.792a134.718 134.718 0 0 1-6.318-1.979Zm12.378-84.26c-4.811-24.587-1.616-43.134 6.425-47.789c8.564-4.958 27.502 2.111 47.463 19.835a144.318 144.318 0 0 1 3.841 3.545c-7.438 7.987-14.787 17.08-21.808 26.988c-12.04 1.116-23.565 2.908-34.161 5.309a160.342 160.342 0 0 1-1.76-7.887Zm110.427 27.268a347.8 347.8 0 0 0-7.785-12.803c8.168 1.033 15.994 2.404 23.343 4.08c-2.206 7.072-4.956 14.465-8.193 22.045a381.151 381.151 0 0 0-7.365-13.322Zm-45.032-43.861c5.044 5.465 10.096 11.566 15.065 18.186a322.04 322.04 0 0 0-30.257-.006c4.974-6.559 10.069-12.652 15.192-18.18ZM82.802 87.83a323.167 323.167 0 0 0-7.227 13.238c-3.184-7.553-5.909-14.98-8.134-22.152c7.304-1.634 15.093-2.97 23.209-3.984a321.524 321.524 0 0 0-7.848 12.897Zm8.081 65.352c-8.385-.936-16.291-2.203-23.593-3.793c2.26-7.3 5.045-14.885 8.298-22.6a321.187 321.187 0 0 0 7.257 13.246c2.594 4.48 5.28 8.868 8.038 13.147Zm37.542 31.03c-5.184-5.592-10.354-11.779-15.403-18.433c4.902.192 9.899.29 14.978.29c5.218 0 10.376-.117 15.453-.343c-4.985 6.774-10.018 12.97-15.028 18.486Zm52.198-57.817c3.422 7.8 6.306 15.345 8.596 22.52c-7.422 1.694-15.436 3.058-23.88 4.071a382.417 382.417 0 0 0 7.859-13.026a347.403 347.403 0 0 0 7.425-13.565Zm-16.898 8.101a358.557 358.557 0 0 1-12.281 19.815a329.4 329.4 0 0 1-23.444.823c-7.967 0-15.716-.248-23.178-.732a310.202 310.202 0 0 1-12.513-19.846h.001a307.41 307.41 0 0 1-10.923-20.627a310.278 310.278 0 0 1 10.89-20.637l-.001.001a307.318 307.318 0 0 1 12.413-19.761c7.613-.576 15.42-.876 23.31-.876H128c7.926 0 15.743.303 23.354.883a329.357 329.357 0 0 1 12.335 19.695a358.489 358.489 0 0 1 11.036 20.54a329.472 329.472 0 0 1-11 20.722Zm22.56-122.124c8.572 4.944 11.906 24.881 6.52 51.026c-.344 1.668-.73 3.367-1.15 5.09c-10.622-2.452-22.155-4.275-34.23-5.408c-7.034-10.017-14.323-19.124-21.64-27.008a160.789 160.789 0 0 1 5.888-5.4c18.9-16.447 36.564-22.941 44.612-18.3ZM128 90.808c12.625 0 22.86 10.235 22.86 22.86s-10.235 22.86-22.86 22.86s-22.86-10.235-22.86-22.86s10.235-22.86 22.86-22.86Z"></path></svg>
|
||||
|
After Width: | Height: | Size: 4.0 KiB |
237
vendor/ruvector/crates/rvlite/examples/dashboard/src/components/GraphVisualization.tsx
vendored
Normal file
237
vendor/ruvector/crates/rvlite/examples/dashboard/src/components/GraphVisualization.tsx
vendored
Normal file
@@ -0,0 +1,237 @@
|
||||
import { useState } from 'react';
|
||||
import { Card, CardBody } from '@heroui/react';
|
||||
import { CircleDot, Link2 } from 'lucide-react';
|
||||
|
||||
interface GraphVisualizationProps {
|
||||
nodes: Array<{
|
||||
id: string;
|
||||
labels: string[];
|
||||
properties: Record<string, unknown>;
|
||||
}>;
|
||||
relationships: Array<{
|
||||
id: string;
|
||||
type: string;
|
||||
start: string;
|
||||
end: string;
|
||||
properties: Record<string, unknown>;
|
||||
}>;
|
||||
}
|
||||
|
||||
export function GraphVisualization({ nodes, relationships }: GraphVisualizationProps) {
|
||||
const [hoveredNode, setHoveredNode] = useState<string | null>(null);
|
||||
const [hoveredRel, setHoveredRel] = useState<string | null>(null);
|
||||
|
||||
// Simple circular layout
|
||||
const layoutNodes = () => {
|
||||
const radius = 150;
|
||||
const centerX = 300;
|
||||
const centerY = 200;
|
||||
const angleStep = (2 * Math.PI) / Math.max(nodes.length, 1);
|
||||
|
||||
return nodes.map((node, index) => {
|
||||
const angle = index * angleStep;
|
||||
return {
|
||||
...node,
|
||||
x: centerX + radius * Math.cos(angle),
|
||||
y: centerY + radius * Math.sin(angle),
|
||||
};
|
||||
});
|
||||
};
|
||||
|
||||
const layoutedNodes = layoutNodes();
|
||||
const nodePositions = new Map(layoutedNodes.map(n => [n.id, { x: n.x, y: n.y }]));
|
||||
|
||||
// Color palette for node labels
|
||||
const labelColors: Record<string, string> = {
|
||||
Person: '#00e68a',
|
||||
Movie: '#7c3aed',
|
||||
Actor: '#ff6b9d',
|
||||
Director: '#fbbf24',
|
||||
City: '#3b82f6',
|
||||
Country: '#10b981',
|
||||
};
|
||||
|
||||
const getNodeColor = (labels: string[]) => {
|
||||
if (labels.length === 0) return '#6b7280';
|
||||
return labelColors[labels[0]] || '#6b7280';
|
||||
};
|
||||
|
||||
const hoveredNodeData = hoveredNode ? nodes.find(n => n.id === hoveredNode) : null;
|
||||
const hoveredRelData = hoveredRel ? relationships.find(r => r.id === hoveredRel) : null;
|
||||
|
||||
return (
|
||||
<div className="relative w-full h-full">
|
||||
<svg width="100%" height="400" className="bg-gray-950/50 rounded-lg">
|
||||
{/* Relationships (lines) */}
|
||||
{relationships.map(rel => {
|
||||
const start = nodePositions.get(rel.start);
|
||||
const end = nodePositions.get(rel.end);
|
||||
if (!start || !end) return null;
|
||||
|
||||
const isHovered = hoveredRel === rel.id;
|
||||
const midX = (start.x + end.x) / 2;
|
||||
const midY = (start.y + end.y) / 2;
|
||||
|
||||
return (
|
||||
<g key={rel.id}>
|
||||
{/* Line */}
|
||||
<line
|
||||
x1={start.x}
|
||||
y1={start.y}
|
||||
x2={end.x}
|
||||
y2={end.y}
|
||||
stroke={isHovered ? '#00e68a' : '#4b5563'}
|
||||
strokeWidth={isHovered ? 3 : 2}
|
||||
markerEnd="url(#arrowhead)"
|
||||
className="cursor-pointer transition-all"
|
||||
onMouseEnter={() => setHoveredRel(rel.id)}
|
||||
onMouseLeave={() => setHoveredRel(null)}
|
||||
/>
|
||||
{/* Relationship label */}
|
||||
<text
|
||||
x={midX}
|
||||
y={midY - 5}
|
||||
fill={isHovered ? '#00e68a' : '#9ca3af'}
|
||||
fontSize="10"
|
||||
textAnchor="middle"
|
||||
className="pointer-events-none select-none"
|
||||
>
|
||||
{rel.type}
|
||||
</text>
|
||||
</g>
|
||||
);
|
||||
})}
|
||||
|
||||
{/* Arrow marker definition */}
|
||||
<defs>
|
||||
<marker
|
||||
id="arrowhead"
|
||||
markerWidth="10"
|
||||
markerHeight="10"
|
||||
refX="8"
|
||||
refY="3"
|
||||
orient="auto"
|
||||
>
|
||||
<polygon points="0 0, 10 3, 0 6" fill="#4b5563" />
|
||||
</marker>
|
||||
</defs>
|
||||
|
||||
{/* Nodes */}
|
||||
{layoutedNodes.map(node => {
|
||||
const isHovered = hoveredNode === node.id;
|
||||
const color = getNodeColor(node.labels);
|
||||
const label = node.labels[0] || 'Node';
|
||||
const nameProperty = node.properties.name || node.properties.title || node.id;
|
||||
|
||||
return (
|
||||
<g key={node.id}>
|
||||
{/* Node circle */}
|
||||
<circle
|
||||
cx={node.x}
|
||||
cy={node.y}
|
||||
r={isHovered ? 25 : 20}
|
||||
fill={color}
|
||||
fillOpacity={0.2}
|
||||
stroke={color}
|
||||
strokeWidth={isHovered ? 3 : 2}
|
||||
className="cursor-pointer transition-all"
|
||||
onMouseEnter={() => setHoveredNode(node.id)}
|
||||
onMouseLeave={() => setHoveredNode(null)}
|
||||
/>
|
||||
{/* Node label */}
|
||||
<text
|
||||
x={node.x}
|
||||
y={node.y + 35}
|
||||
fill="#e5e7eb"
|
||||
fontSize="11"
|
||||
fontWeight="600"
|
||||
textAnchor="middle"
|
||||
className="pointer-events-none select-none"
|
||||
>
|
||||
{label}
|
||||
</text>
|
||||
{/* Node name/title */}
|
||||
<text
|
||||
x={node.x}
|
||||
y={node.y + 48}
|
||||
fill="#9ca3af"
|
||||
fontSize="9"
|
||||
textAnchor="middle"
|
||||
className="pointer-events-none select-none"
|
||||
>
|
||||
{String(nameProperty).substring(0, 15)}
|
||||
</text>
|
||||
</g>
|
||||
);
|
||||
})}
|
||||
</svg>
|
||||
|
||||
{/* Tooltip for hovered node */}
|
||||
{hoveredNodeData && (
|
||||
<Card className="absolute top-2 right-2 bg-gray-800 border border-gray-700 max-w-xs z-10">
|
||||
<CardBody className="p-3">
|
||||
<div className="space-y-2">
|
||||
<div className="flex items-center gap-2">
|
||||
<CircleDot className="w-4 h-4" style={{ color: getNodeColor(hoveredNodeData.labels) }} />
|
||||
<span className="font-semibold text-sm">{hoveredNodeData.labels.join(', ') || 'Node'}</span>
|
||||
</div>
|
||||
<div className="text-xs text-gray-400">ID: {hoveredNodeData.id}</div>
|
||||
{Object.keys(hoveredNodeData.properties).length > 0 && (
|
||||
<div className="mt-2 space-y-1">
|
||||
<div className="text-xs font-semibold text-gray-300">Properties:</div>
|
||||
{Object.entries(hoveredNodeData.properties).map(([key, value]) => (
|
||||
<div key={key} className="text-xs text-gray-400 flex gap-2">
|
||||
<span className="font-mono text-cyan-400">{key}:</span>
|
||||
<span className="truncate">{String(value)}</span>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</CardBody>
|
||||
</Card>
|
||||
)}
|
||||
|
||||
{/* Tooltip for hovered relationship */}
|
||||
{hoveredRelData && !hoveredNodeData && (
|
||||
<Card className="absolute top-2 right-2 bg-gray-800 border border-gray-700 max-w-xs z-10">
|
||||
<CardBody className="p-3">
|
||||
<div className="space-y-2">
|
||||
<div className="flex items-center gap-2">
|
||||
<Link2 className="w-4 h-4 text-cyan-400" />
|
||||
<span className="font-semibold text-sm">{hoveredRelData.type}</span>
|
||||
</div>
|
||||
<div className="text-xs text-gray-400">
|
||||
{hoveredRelData.start} → {hoveredRelData.end}
|
||||
</div>
|
||||
{Object.keys(hoveredRelData.properties).length > 0 && (
|
||||
<div className="mt-2 space-y-1">
|
||||
<div className="text-xs font-semibold text-gray-300">Properties:</div>
|
||||
{Object.entries(hoveredRelData.properties).map(([key, value]) => (
|
||||
<div key={key} className="text-xs text-gray-400 flex gap-2">
|
||||
<span className="font-mono text-cyan-400">{key}:</span>
|
||||
<span className="truncate">{String(value)}</span>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</CardBody>
|
||||
</Card>
|
||||
)}
|
||||
|
||||
{/* Legend */}
|
||||
<div className="mt-4 flex flex-wrap gap-3">
|
||||
{Array.from(new Set(nodes.flatMap(n => n.labels))).map(label => (
|
||||
<div key={label} className="flex items-center gap-2">
|
||||
<div
|
||||
className="w-3 h-3 rounded-full"
|
||||
style={{ backgroundColor: labelColors[label] || '#6b7280' }}
|
||||
/>
|
||||
<span className="text-xs text-gray-400">{label}</span>
|
||||
</div>
|
||||
))}
|
||||
</div>
|
||||
</div>
|
||||
);
|
||||
}
|
||||
1079
vendor/ruvector/crates/rvlite/examples/dashboard/src/components/SimulationEngine.tsx
vendored
Normal file
1079
vendor/ruvector/crates/rvlite/examples/dashboard/src/components/SimulationEngine.tsx
vendored
Normal file
File diff suppressed because it is too large
Load Diff
2335
vendor/ruvector/crates/rvlite/examples/dashboard/src/components/SupplyChainSimulation.tsx
vendored
Normal file
2335
vendor/ruvector/crates/rvlite/examples/dashboard/src/components/SupplyChainSimulation.tsx
vendored
Normal file
File diff suppressed because it is too large
Load Diff
30
vendor/ruvector/crates/rvlite/examples/dashboard/src/hero.ts
vendored
Normal file
30
vendor/ruvector/crates/rvlite/examples/dashboard/src/hero.ts
vendored
Normal file
@@ -0,0 +1,30 @@
|
||||
import { heroui } from "@heroui/react";
|
||||
|
||||
export default heroui({
|
||||
themes: {
|
||||
dark: {
|
||||
colors: {
|
||||
background: "#0a0a0f",
|
||||
foreground: "#ECEDEE",
|
||||
primary: {
|
||||
50: "#e6fff5",
|
||||
100: "#b3ffe0",
|
||||
200: "#80ffcc",
|
||||
300: "#4dffb8",
|
||||
400: "#1affa3",
|
||||
500: "#00e68a",
|
||||
600: "#00b36b",
|
||||
700: "#00804d",
|
||||
800: "#004d2e",
|
||||
900: "#001a10",
|
||||
DEFAULT: "#00e68a",
|
||||
foreground: "#000000",
|
||||
},
|
||||
secondary: {
|
||||
DEFAULT: "#7c3aed",
|
||||
foreground: "#ffffff",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
891
vendor/ruvector/crates/rvlite/examples/dashboard/src/hooks/useLearning.ts
vendored
Normal file
891
vendor/ruvector/crates/rvlite/examples/dashboard/src/hooks/useLearning.ts
vendored
Normal file
@@ -0,0 +1,891 @@
|
||||
/**
|
||||
* Self-Learning Hook for RvLite Dashboard
|
||||
*
|
||||
* Implements adaptive learning capabilities:
|
||||
* - Query pattern recognition and optimization
|
||||
* - Result relevance feedback and scoring
|
||||
* - Usage pattern analysis
|
||||
* - Automatic query suggestions
|
||||
* - Performance optimization recommendations
|
||||
*/
|
||||
|
||||
import { useState, useCallback, useEffect, useRef } from 'react';
|
||||
|
||||
// ============================================================================
|
||||
// Types
|
||||
// ============================================================================
|
||||
|
||||
export interface QueryPattern {
|
||||
id: string;
|
||||
queryType: 'sql' | 'sparql' | 'cypher' | 'vector';
|
||||
pattern: string;
|
||||
frequency: number;
|
||||
avgExecutionTime: number;
|
||||
successRate: number;
|
||||
lastUsed: number;
|
||||
resultCount: number;
|
||||
feedback: {
|
||||
helpful: number;
|
||||
notHelpful: number;
|
||||
};
|
||||
}
|
||||
|
||||
export interface LearningMetrics {
|
||||
totalQueries: number;
|
||||
successfulQueries: number;
|
||||
failedQueries: number;
|
||||
avgResponseTime: number;
|
||||
queryPatterns: QueryPattern[];
|
||||
suggestions: QuerySuggestion[];
|
||||
insights: LearningInsight[];
|
||||
adaptationLevel: number; // 0-100 scale
|
||||
learningRate: number;
|
||||
}
|
||||
|
||||
export interface QuerySuggestion {
|
||||
id: string;
|
||||
query: string;
|
||||
queryType: 'sql' | 'sparql' | 'cypher' | 'vector';
|
||||
confidence: number;
|
||||
reason: string;
|
||||
basedOn: string[];
|
||||
}
|
||||
|
||||
export interface LearningInsight {
|
||||
id: string;
|
||||
type: 'optimization' | 'pattern' | 'anomaly' | 'recommendation';
|
||||
title: string;
|
||||
description: string;
|
||||
recommendation?: string;
|
||||
severity: 'info' | 'warning' | 'success';
|
||||
timestamp: number;
|
||||
actionable: boolean;
|
||||
action?: () => void;
|
||||
}
|
||||
|
||||
export interface FeedbackEntry {
|
||||
queryId: string;
|
||||
query: string;
|
||||
queryType: 'sql' | 'sparql' | 'cypher' | 'vector';
|
||||
helpful: boolean;
|
||||
timestamp: number;
|
||||
resultCount: number;
|
||||
executionTime: number;
|
||||
}
|
||||
|
||||
export interface QueryExecution {
|
||||
id: string;
|
||||
query: string;
|
||||
queryType: 'sql' | 'sparql' | 'cypher' | 'vector';
|
||||
timestamp: number;
|
||||
executionTime: number;
|
||||
success: boolean;
|
||||
resultCount: number;
|
||||
error?: string;
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Learning Engine
|
||||
// ============================================================================
|
||||
|
||||
class LearningEngine {
|
||||
private patterns: Map<string, QueryPattern> = new Map();
|
||||
private executions: QueryExecution[] = [];
|
||||
private feedback: FeedbackEntry[] = [];
|
||||
private storageKey = 'rvlite_learning_data';
|
||||
|
||||
constructor() {
|
||||
this.loadFromStorage();
|
||||
}
|
||||
|
||||
// Pattern extraction from query
|
||||
private extractPattern(query: string, queryType: string): string {
|
||||
let normalized = query.trim().toLowerCase();
|
||||
|
||||
// Normalize SQL patterns
|
||||
if (queryType === 'sql') {
|
||||
// Replace specific values with placeholders
|
||||
normalized = normalized
|
||||
.replace(/'[^']*'/g, "'?'")
|
||||
.replace(/\[[^\]]*\]/g, '[?]')
|
||||
.replace(/\d+(\.\d+)?/g, '?')
|
||||
.replace(/\s+/g, ' ');
|
||||
}
|
||||
|
||||
// Normalize SPARQL patterns
|
||||
if (queryType === 'sparql') {
|
||||
normalized = normalized
|
||||
.replace(/<[^>]+>/g, '<?>') // URIs
|
||||
.replace(/"[^"]*"/g, '"?"') // Literals
|
||||
.replace(/\s+/g, ' ');
|
||||
}
|
||||
|
||||
// Normalize Cypher patterns
|
||||
if (queryType === 'cypher') {
|
||||
normalized = normalized
|
||||
.replace(/'[^']*'/g, "'?'")
|
||||
.replace(/\{[^}]+\}/g, '{?}')
|
||||
.replace(/\s+/g, ' ');
|
||||
}
|
||||
|
||||
return normalized;
|
||||
}
|
||||
|
||||
// Generate pattern ID
|
||||
private generatePatternId(pattern: string, queryType: string): string {
|
||||
const hash = pattern.split('').reduce((acc, char) => {
|
||||
return ((acc << 5) - acc) + char.charCodeAt(0);
|
||||
}, 0);
|
||||
return `${queryType}_${Math.abs(hash).toString(16)}`;
|
||||
}
|
||||
|
||||
// Record query execution
|
||||
recordExecution(
|
||||
query: string,
|
||||
queryType: 'sql' | 'sparql' | 'cypher' | 'vector',
|
||||
executionTime: number,
|
||||
success: boolean,
|
||||
resultCount: number,
|
||||
error?: string
|
||||
): string {
|
||||
const execution: QueryExecution = {
|
||||
id: `exec_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`,
|
||||
query,
|
||||
queryType,
|
||||
timestamp: Date.now(),
|
||||
executionTime,
|
||||
success,
|
||||
resultCount,
|
||||
error,
|
||||
};
|
||||
|
||||
this.executions.push(execution);
|
||||
|
||||
// Keep only last 1000 executions
|
||||
if (this.executions.length > 1000) {
|
||||
this.executions = this.executions.slice(-1000);
|
||||
}
|
||||
|
||||
// Update pattern
|
||||
this.updatePattern(execution);
|
||||
|
||||
// Save to storage
|
||||
this.saveToStorage();
|
||||
|
||||
return execution.id;
|
||||
}
|
||||
|
||||
// Update pattern from execution
|
||||
private updatePattern(execution: QueryExecution): void {
|
||||
const pattern = this.extractPattern(execution.query, execution.queryType);
|
||||
const patternId = this.generatePatternId(pattern, execution.queryType);
|
||||
|
||||
const existing = this.patterns.get(patternId);
|
||||
|
||||
if (existing) {
|
||||
existing.frequency++;
|
||||
existing.avgExecutionTime = (existing.avgExecutionTime * (existing.frequency - 1) + execution.executionTime) / existing.frequency;
|
||||
existing.successRate = (existing.successRate * (existing.frequency - 1) + (execution.success ? 1 : 0)) / existing.frequency;
|
||||
existing.lastUsed = execution.timestamp;
|
||||
existing.resultCount = (existing.resultCount + execution.resultCount) / 2;
|
||||
} else {
|
||||
this.patterns.set(patternId, {
|
||||
id: patternId,
|
||||
queryType: execution.queryType,
|
||||
pattern,
|
||||
frequency: 1,
|
||||
avgExecutionTime: execution.executionTime,
|
||||
successRate: execution.success ? 1 : 0,
|
||||
lastUsed: execution.timestamp,
|
||||
resultCount: execution.resultCount,
|
||||
feedback: { helpful: 0, notHelpful: 0 },
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Record feedback
|
||||
recordFeedback(
|
||||
queryId: string,
|
||||
query: string,
|
||||
queryType: 'sql' | 'sparql' | 'cypher' | 'vector',
|
||||
helpful: boolean,
|
||||
resultCount: number,
|
||||
executionTime: number
|
||||
): void {
|
||||
this.feedback.push({
|
||||
queryId,
|
||||
query,
|
||||
queryType,
|
||||
helpful,
|
||||
timestamp: Date.now(),
|
||||
resultCount,
|
||||
executionTime,
|
||||
});
|
||||
|
||||
// Update pattern feedback
|
||||
const pattern = this.extractPattern(query, queryType);
|
||||
const patternId = this.generatePatternId(pattern, queryType);
|
||||
const existing = this.patterns.get(patternId);
|
||||
|
||||
if (existing) {
|
||||
if (helpful) {
|
||||
existing.feedback.helpful++;
|
||||
} else {
|
||||
existing.feedback.notHelpful++;
|
||||
}
|
||||
}
|
||||
|
||||
this.saveToStorage();
|
||||
}
|
||||
|
||||
// Get learning metrics
|
||||
getMetrics(): LearningMetrics {
|
||||
const patterns = Array.from(this.patterns.values());
|
||||
const recentExecutions = this.executions.filter(
|
||||
e => Date.now() - e.timestamp < 24 * 60 * 60 * 1000 // Last 24 hours
|
||||
);
|
||||
|
||||
const totalQueries = recentExecutions.length;
|
||||
const successfulQueries = recentExecutions.filter(e => e.success).length;
|
||||
const failedQueries = totalQueries - successfulQueries;
|
||||
const avgResponseTime = recentExecutions.length > 0
|
||||
? recentExecutions.reduce((sum, e) => sum + e.executionTime, 0) / recentExecutions.length
|
||||
: 0;
|
||||
|
||||
// Calculate adaptation level based on pattern recognition
|
||||
const totalFeedback = patterns.reduce(
|
||||
(sum, p) => sum + p.feedback.helpful + p.feedback.notHelpful, 0
|
||||
);
|
||||
const positiveFeedback = patterns.reduce((sum, p) => sum + p.feedback.helpful, 0);
|
||||
const adaptationLevel = totalFeedback > 0
|
||||
? Math.round((positiveFeedback / totalFeedback) * 100)
|
||||
: 50;
|
||||
|
||||
// Calculate learning rate (queries per hour)
|
||||
const hourAgo = Date.now() - 60 * 60 * 1000;
|
||||
const queriesLastHour = this.executions.filter(e => e.timestamp > hourAgo).length;
|
||||
|
||||
return {
|
||||
totalQueries,
|
||||
successfulQueries,
|
||||
failedQueries,
|
||||
avgResponseTime,
|
||||
queryPatterns: patterns.sort((a, b) => b.frequency - a.frequency).slice(0, 20),
|
||||
suggestions: this.generateSuggestions(),
|
||||
insights: this.generateInsights(),
|
||||
adaptationLevel,
|
||||
learningRate: queriesLastHour,
|
||||
};
|
||||
}
|
||||
|
||||
// Generate query suggestions
|
||||
private generateSuggestions(): QuerySuggestion[] {
|
||||
const suggestions: QuerySuggestion[] = [];
|
||||
const patterns = Array.from(this.patterns.values());
|
||||
|
||||
// Suggest frequently used successful patterns
|
||||
const frequentPatterns = patterns
|
||||
.filter(p => p.frequency >= 2 && p.successRate > 0.7)
|
||||
.sort((a, b) => b.frequency - a.frequency)
|
||||
.slice(0, 5);
|
||||
|
||||
frequentPatterns.forEach((p, i) => {
|
||||
suggestions.push({
|
||||
id: `sug_freq_${i}`,
|
||||
query: p.pattern,
|
||||
queryType: p.queryType,
|
||||
confidence: Math.min(0.95, p.successRate * (1 + Math.log10(p.frequency) / 10)),
|
||||
reason: `Frequently used pattern (${p.frequency} times) with ${Math.round(p.successRate * 100)}% success rate`,
|
||||
basedOn: [p.id],
|
||||
});
|
||||
});
|
||||
|
||||
// Suggest based on positive feedback
|
||||
const positiveFeedbackPatterns = patterns
|
||||
.filter(p => p.feedback.helpful > p.feedback.notHelpful)
|
||||
.sort((a, b) => b.feedback.helpful - a.feedback.helpful)
|
||||
.slice(0, 3);
|
||||
|
||||
positiveFeedbackPatterns.forEach((p, i) => {
|
||||
if (!suggestions.find(s => s.basedOn.includes(p.id))) {
|
||||
suggestions.push({
|
||||
id: `sug_fb_${i}`,
|
||||
query: p.pattern,
|
||||
queryType: p.queryType,
|
||||
confidence: 0.8 + (p.feedback.helpful / (p.feedback.helpful + p.feedback.notHelpful + 1)) * 0.2,
|
||||
reason: `Marked as helpful ${p.feedback.helpful} times`,
|
||||
basedOn: [p.id],
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
return suggestions;
|
||||
}
|
||||
|
||||
// Generate learning insights
|
||||
private generateInsights(): LearningInsight[] {
|
||||
const insights: LearningInsight[] = [];
|
||||
const patterns = Array.from(this.patterns.values());
|
||||
const recentExecutions = this.executions.slice(-100);
|
||||
|
||||
// Slow query insight
|
||||
const slowPatterns = patterns.filter(p => p.avgExecutionTime > 500);
|
||||
if (slowPatterns.length > 0) {
|
||||
insights.push({
|
||||
id: 'insight_slow_queries',
|
||||
type: 'optimization',
|
||||
title: 'Slow Queries Detected',
|
||||
description: `${slowPatterns.length} query pattern(s) have average execution time > 500ms. Consider optimizing these queries or adding indexes.`,
|
||||
recommendation: 'Try reducing result set size with LIMIT, or simplify complex JOINs and subqueries.',
|
||||
severity: 'warning',
|
||||
timestamp: Date.now(),
|
||||
actionable: true,
|
||||
});
|
||||
}
|
||||
|
||||
// High failure rate insight
|
||||
const failingPatterns = patterns.filter(p => p.frequency >= 3 && p.successRate < 0.5);
|
||||
if (failingPatterns.length > 0) {
|
||||
insights.push({
|
||||
id: 'insight_failing_queries',
|
||||
type: 'anomaly',
|
||||
title: 'Query Patterns with High Failure Rate',
|
||||
description: `${failingPatterns.length} frequently used patterns have >50% failure rate. Review syntax and data requirements.`,
|
||||
recommendation: 'Check for typos, missing tables/columns, or invalid data types in your queries.',
|
||||
severity: 'warning',
|
||||
timestamp: Date.now(),
|
||||
actionable: true,
|
||||
});
|
||||
}
|
||||
|
||||
// Success pattern insight
|
||||
const successfulPatterns = patterns.filter(p => p.frequency >= 5 && p.successRate > 0.9);
|
||||
if (successfulPatterns.length > 0) {
|
||||
insights.push({
|
||||
id: 'insight_success_patterns',
|
||||
type: 'pattern',
|
||||
title: 'Reliable Query Patterns Identified',
|
||||
description: `${successfulPatterns.length} patterns consistently succeed. These can be used as templates for similar queries.`,
|
||||
severity: 'success',
|
||||
timestamp: Date.now(),
|
||||
actionable: false,
|
||||
});
|
||||
}
|
||||
|
||||
// Query diversity insight
|
||||
const queryTypes = new Set(patterns.map(p => p.queryType));
|
||||
if (queryTypes.size >= 3) {
|
||||
insights.push({
|
||||
id: 'insight_diversity',
|
||||
type: 'recommendation',
|
||||
title: 'Multi-Modal Database Usage',
|
||||
description: `You're effectively using ${queryTypes.size} different query languages. This is optimal for complex data applications.`,
|
||||
severity: 'info',
|
||||
timestamp: Date.now(),
|
||||
actionable: false,
|
||||
});
|
||||
}
|
||||
|
||||
// Learning progress insight
|
||||
const recentFeedback = this.feedback.filter(f => Date.now() - f.timestamp < 7 * 24 * 60 * 60 * 1000);
|
||||
if (recentFeedback.length >= 10) {
|
||||
const helpfulRate = recentFeedback.filter(f => f.helpful).length / recentFeedback.length;
|
||||
if (helpfulRate > 0.8) {
|
||||
insights.push({
|
||||
id: 'insight_learning_success',
|
||||
type: 'pattern',
|
||||
title: 'High Learning Effectiveness',
|
||||
description: `${Math.round(helpfulRate * 100)}% of recent results were marked as helpful. The system is adapting well to your needs.`,
|
||||
severity: 'success',
|
||||
timestamp: Date.now(),
|
||||
actionable: false,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Recent activity insight
|
||||
if (recentExecutions.length > 50) {
|
||||
const successRate = recentExecutions.filter(e => e.success).length / recentExecutions.length;
|
||||
insights.push({
|
||||
id: 'insight_activity',
|
||||
type: 'pattern',
|
||||
title: 'Query Activity Analysis',
|
||||
description: `${recentExecutions.length} queries in recent session with ${Math.round(successRate * 100)}% success rate.`,
|
||||
severity: successRate > 0.8 ? 'success' : 'info',
|
||||
timestamp: Date.now(),
|
||||
actionable: false,
|
||||
});
|
||||
}
|
||||
|
||||
return insights;
|
||||
}
|
||||
|
||||
// Get top patterns by query type
|
||||
getTopPatterns(queryType: 'sql' | 'sparql' | 'cypher' | 'vector', limit: number = 5): QueryPattern[] {
|
||||
return Array.from(this.patterns.values())
|
||||
.filter(p => p.queryType === queryType)
|
||||
.sort((a, b) => {
|
||||
// Score based on frequency, success rate, and recent usage
|
||||
const scoreA = a.frequency * a.successRate * (1 + a.feedback.helpful - a.feedback.notHelpful * 0.5);
|
||||
const scoreB = b.frequency * b.successRate * (1 + b.feedback.helpful - b.feedback.notHelpful * 0.5);
|
||||
return scoreB - scoreA;
|
||||
})
|
||||
.slice(0, limit);
|
||||
}
|
||||
|
||||
// Get recent query executions
|
||||
getRecentExecutions(limit: number = 10): QueryExecution[] {
|
||||
return this.executions
|
||||
.slice(-limit)
|
||||
.reverse(); // Most recent first
|
||||
}
|
||||
|
||||
// Clear learning data
|
||||
clear(): void {
|
||||
this.patterns.clear();
|
||||
this.executions = [];
|
||||
this.feedback = [];
|
||||
this.saveToStorage();
|
||||
}
|
||||
|
||||
// Save to localStorage
|
||||
private saveToStorage(): void {
|
||||
try {
|
||||
const data = {
|
||||
patterns: Array.from(this.patterns.entries()),
|
||||
executions: this.executions.slice(-500), // Keep last 500
|
||||
feedback: this.feedback.slice(-500),
|
||||
};
|
||||
localStorage.setItem(this.storageKey, JSON.stringify(data));
|
||||
} catch (e) {
|
||||
console.warn('Failed to save learning data:', e);
|
||||
}
|
||||
}
|
||||
|
||||
// Load from localStorage
|
||||
private loadFromStorage(): void {
|
||||
try {
|
||||
const stored = localStorage.getItem(this.storageKey);
|
||||
if (stored) {
|
||||
const data = JSON.parse(stored);
|
||||
this.patterns = new Map(data.patterns || []);
|
||||
this.executions = data.executions || [];
|
||||
this.feedback = data.feedback || [];
|
||||
}
|
||||
} catch (e) {
|
||||
console.warn('Failed to load learning data:', e);
|
||||
}
|
||||
}
|
||||
|
||||
// Export learning data
|
||||
export(): Record<string, unknown> {
|
||||
return {
|
||||
patterns: Array.from(this.patterns.entries()),
|
||||
executions: this.executions,
|
||||
feedback: this.feedback,
|
||||
exportedAt: Date.now(),
|
||||
};
|
||||
}
|
||||
|
||||
// Import learning data
|
||||
import(data: Record<string, unknown>): void {
|
||||
if (data.patterns) {
|
||||
this.patterns = new Map(data.patterns as [string, QueryPattern][]);
|
||||
}
|
||||
if (data.executions) {
|
||||
this.executions = data.executions as QueryExecution[];
|
||||
}
|
||||
if (data.feedback) {
|
||||
this.feedback = data.feedback as FeedbackEntry[];
|
||||
}
|
||||
this.saveToStorage();
|
||||
}
|
||||
}
|
||||
|
||||
// ============================================================================
|
||||
// Hook
|
||||
// ============================================================================
|
||||
|
||||
// Singleton learning engine
|
||||
let learningEngineInstance: LearningEngine | null = null;
|
||||
|
||||
function getLearningEngine(): LearningEngine {
|
||||
if (!learningEngineInstance) {
|
||||
learningEngineInstance = new LearningEngine();
|
||||
}
|
||||
return learningEngineInstance;
|
||||
}
|
||||
|
||||
// GNN State interface
|
||||
export interface GnnState {
|
||||
nodes: number;
|
||||
edges: number;
|
||||
layers: number;
|
||||
accuracy: number;
|
||||
isTraining: boolean;
|
||||
lastTrainedAt: number | null;
|
||||
}
|
||||
|
||||
export function useLearning() {
|
||||
// Use the singleton directly, don't access ref during render
|
||||
const engine = getLearningEngine();
|
||||
const engineRef = useRef<LearningEngine>(engine);
|
||||
const [metrics, setMetrics] = useState<LearningMetrics>(() => engine.getMetrics());
|
||||
const [lastQueryId, setLastQueryId] = useState<string | null>(null);
|
||||
|
||||
// GNN State
|
||||
const [gnnState, setGnnState] = useState<GnnState>({
|
||||
nodes: 0,
|
||||
edges: 0,
|
||||
layers: 3,
|
||||
accuracy: 0,
|
||||
isTraining: false,
|
||||
lastTrainedAt: null,
|
||||
});
|
||||
|
||||
// Refresh metrics
|
||||
const refreshMetrics = useCallback(() => {
|
||||
setMetrics(engineRef.current.getMetrics());
|
||||
}, []);
|
||||
|
||||
// Record a query execution
|
||||
const recordQuery = useCallback((
|
||||
query: string,
|
||||
queryType: 'sql' | 'sparql' | 'cypher' | 'vector',
|
||||
executionTime: number,
|
||||
success: boolean,
|
||||
resultCount: number,
|
||||
error?: string
|
||||
) => {
|
||||
const id = engineRef.current.recordExecution(
|
||||
query,
|
||||
queryType,
|
||||
executionTime,
|
||||
success,
|
||||
resultCount,
|
||||
error
|
||||
);
|
||||
setLastQueryId(id);
|
||||
refreshMetrics();
|
||||
return id;
|
||||
}, [refreshMetrics]);
|
||||
|
||||
// Record feedback for a result
|
||||
const recordFeedback = useCallback((
|
||||
query: string,
|
||||
queryType: 'sql' | 'sparql' | 'cypher' | 'vector',
|
||||
helpful: boolean,
|
||||
resultCount: number = 0,
|
||||
executionTime: number = 0
|
||||
) => {
|
||||
engineRef.current.recordFeedback(
|
||||
lastQueryId || `fb_${Date.now()}`,
|
||||
query,
|
||||
queryType,
|
||||
helpful,
|
||||
resultCount,
|
||||
executionTime
|
||||
);
|
||||
refreshMetrics();
|
||||
}, [lastQueryId, refreshMetrics]);
|
||||
|
||||
// Get suggestions for a query type
|
||||
const getSuggestions = useCallback((queryType: 'sql' | 'sparql' | 'cypher' | 'vector') => {
|
||||
return metrics.suggestions.filter(s => s.queryType === queryType);
|
||||
}, [metrics.suggestions]);
|
||||
|
||||
// Get top patterns for a query type
|
||||
const getTopPatterns = useCallback((queryType: 'sql' | 'sparql' | 'cypher' | 'vector', limit: number = 5) => {
|
||||
return engineRef.current.getTopPatterns(queryType, limit);
|
||||
}, []);
|
||||
|
||||
// Get recent query executions
|
||||
const getRecentExecutions = useCallback((limit: number = 10) => {
|
||||
return engineRef.current.getRecentExecutions(limit);
|
||||
}, []);
|
||||
|
||||
// Clear all learning data
|
||||
const clearLearning = useCallback(() => {
|
||||
engineRef.current.clear();
|
||||
refreshMetrics();
|
||||
}, [refreshMetrics]);
|
||||
|
||||
// Export learning data
|
||||
const exportLearning = useCallback(() => {
|
||||
return engineRef.current.export();
|
||||
}, []);
|
||||
|
||||
// Import learning data
|
||||
const importLearning = useCallback((data: Record<string, unknown>) => {
|
||||
engineRef.current.import(data);
|
||||
refreshMetrics();
|
||||
}, [refreshMetrics]);
|
||||
|
||||
// Auto-refresh metrics periodically
|
||||
useEffect(() => {
|
||||
const interval = setInterval(refreshMetrics, 30000); // Every 30 seconds
|
||||
return () => clearInterval(interval);
|
||||
}, [refreshMetrics]);
|
||||
|
||||
// Derive GNN nodes/edges from patterns (computed value, no effect needed)
|
||||
const gnnDerivedState = {
|
||||
nodes: metrics.queryPatterns.length,
|
||||
edges: Math.max(0, metrics.queryPatterns.length * 2 - 1),
|
||||
};
|
||||
|
||||
// ============================================================================
|
||||
// Real Neural Network Implementation (Lightweight GNN for Query Patterns)
|
||||
// ============================================================================
|
||||
|
||||
// Neural network weights (stored in state for persistence)
|
||||
const weightsRef = useRef<{
|
||||
W1: number[][]; // Input to hidden (patternFeatures x hiddenSize)
|
||||
W2: number[][]; // Hidden to output (hiddenSize x outputSize)
|
||||
b1: number[]; // Hidden bias
|
||||
b2: number[]; // Output bias
|
||||
} | null>(null);
|
||||
|
||||
// Initialize weights
|
||||
const initWeights = useCallback((inputSize: number, hiddenSize: number, outputSize: number) => {
|
||||
const xavier = (fan_in: number, fan_out: number) =>
|
||||
Math.sqrt(6 / (fan_in + fan_out)) * (Math.random() * 2 - 1);
|
||||
|
||||
const W1: number[][] = Array(inputSize).fill(0).map(() =>
|
||||
Array(hiddenSize).fill(0).map(() => xavier(inputSize, hiddenSize))
|
||||
);
|
||||
const W2: number[][] = Array(hiddenSize).fill(0).map(() =>
|
||||
Array(outputSize).fill(0).map(() => xavier(hiddenSize, outputSize))
|
||||
);
|
||||
const b1 = Array(hiddenSize).fill(0);
|
||||
const b2 = Array(outputSize).fill(0);
|
||||
|
||||
weightsRef.current = { W1, W2, b1, b2 };
|
||||
}, []);
|
||||
|
||||
// ReLU activation
|
||||
const relu = (x: number) => Math.max(0, x);
|
||||
const reluDerivative = (x: number) => x > 0 ? 1 : 0;
|
||||
|
||||
// Sigmoid activation
|
||||
const sigmoid = (x: number) => 1 / (1 + Math.exp(-Math.min(Math.max(x, -500), 500)));
|
||||
|
||||
// Extract features from a query pattern
|
||||
const extractPatternFeatures = useCallback((pattern: QueryPattern): number[] => {
|
||||
const typeEncoding = {
|
||||
'sql': [1, 0, 0, 0],
|
||||
'sparql': [0, 1, 0, 0],
|
||||
'cypher': [0, 0, 1, 0],
|
||||
'vector': [0, 0, 0, 1],
|
||||
};
|
||||
const typeVec = typeEncoding[pattern.queryType] || [0, 0, 0, 0];
|
||||
|
||||
return [
|
||||
...typeVec,
|
||||
Math.min(pattern.frequency / 100, 1), // Normalized frequency
|
||||
pattern.avgExecutionTime / 1000, // Time in seconds
|
||||
pattern.successRate, // Already 0-1
|
||||
Math.min(pattern.resultCount / 1000, 1), // Normalized results
|
||||
pattern.feedback.helpful / (pattern.feedback.helpful + pattern.feedback.notHelpful + 1),
|
||||
pattern.pattern.length / 500, // Normalized pattern length
|
||||
];
|
||||
}, []);
|
||||
|
||||
// Forward pass
|
||||
const forward = useCallback((input: number[]): { hidden: number[]; output: number[] } => {
|
||||
if (!weightsRef.current) {
|
||||
initWeights(10, 8, 1);
|
||||
}
|
||||
const { W1, W2, b1, b2 } = weightsRef.current!;
|
||||
|
||||
// Hidden layer
|
||||
const hidden: number[] = [];
|
||||
for (let j = 0; j < W1[0].length; j++) {
|
||||
let sum = b1[j];
|
||||
for (let i = 0; i < input.length && i < W1.length; i++) {
|
||||
sum += input[i] * W1[i][j];
|
||||
}
|
||||
hidden.push(relu(sum));
|
||||
}
|
||||
|
||||
// Output layer
|
||||
const output: number[] = [];
|
||||
for (let j = 0; j < W2[0].length; j++) {
|
||||
let sum = b2[j];
|
||||
for (let i = 0; i < hidden.length; i++) {
|
||||
sum += hidden[i] * W2[i][j];
|
||||
}
|
||||
output.push(sigmoid(sum));
|
||||
}
|
||||
|
||||
return { hidden, output };
|
||||
}, [initWeights]);
|
||||
|
||||
// Train GNN with real gradient descent
|
||||
const trainGNN = useCallback(async () => {
|
||||
setGnnState(prev => ({ ...prev, isTraining: true }));
|
||||
|
||||
const patterns = metrics.queryPatterns;
|
||||
if (patterns.length === 0) {
|
||||
setGnnState(prev => ({ ...prev, isTraining: false }));
|
||||
return 0.5;
|
||||
}
|
||||
|
||||
// Initialize weights if needed
|
||||
if (!weightsRef.current) {
|
||||
initWeights(10, 8, 1);
|
||||
}
|
||||
|
||||
const learningRate = 0.01;
|
||||
const epochs = 50;
|
||||
|
||||
// Training loop (async to not block UI)
|
||||
for (let epoch = 0; epoch < epochs; epoch++) {
|
||||
let epochLoss = 0;
|
||||
|
||||
for (const pattern of patterns) {
|
||||
const input = extractPatternFeatures(pattern);
|
||||
const target = pattern.successRate; // Train to predict success
|
||||
|
||||
// Forward pass
|
||||
const { hidden, output } = forward(input);
|
||||
const prediction = output[0];
|
||||
|
||||
// Calculate loss (MSE)
|
||||
const loss = Math.pow(target - prediction, 2);
|
||||
epochLoss += loss;
|
||||
|
||||
// Backpropagation
|
||||
const { W1, W2, b1, b2 } = weightsRef.current!;
|
||||
|
||||
// Output gradient
|
||||
const dOutput = -2 * (target - prediction) * prediction * (1 - prediction);
|
||||
|
||||
// Hidden gradients
|
||||
const dHidden: number[] = [];
|
||||
for (let i = 0; i < W2.length; i++) {
|
||||
dHidden.push(dOutput * W2[i][0] * reluDerivative(hidden[i]));
|
||||
}
|
||||
|
||||
// Update W2 and b2
|
||||
for (let i = 0; i < W2.length; i++) {
|
||||
W2[i][0] -= learningRate * dOutput * hidden[i];
|
||||
}
|
||||
b2[0] -= learningRate * dOutput;
|
||||
|
||||
// Update W1 and b1
|
||||
for (let j = 0; j < W1[0].length; j++) {
|
||||
b1[j] -= learningRate * dHidden[j];
|
||||
for (let i = 0; i < input.length && i < W1.length; i++) {
|
||||
W1[i][j] -= learningRate * dHidden[j] * input[i];
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Yield to UI every 10 epochs
|
||||
if (epoch % 10 === 0) {
|
||||
await new Promise(resolve => setTimeout(resolve, 0));
|
||||
}
|
||||
}
|
||||
|
||||
// Calculate final accuracy
|
||||
let correct = 0;
|
||||
for (const pattern of patterns) {
|
||||
const input = extractPatternFeatures(pattern);
|
||||
const { output } = forward(input);
|
||||
const predicted = output[0] > 0.5;
|
||||
const actual = pattern.successRate > 0.5;
|
||||
if (predicted === actual) correct++;
|
||||
}
|
||||
const accuracy = patterns.length > 0 ? correct / patterns.length : 0.5;
|
||||
|
||||
setGnnState(prev => ({
|
||||
...prev,
|
||||
isTraining: false,
|
||||
accuracy: Math.min(0.99, accuracy),
|
||||
lastTrainedAt: Date.now(),
|
||||
}));
|
||||
|
||||
return accuracy;
|
||||
}, [metrics.queryPatterns, initWeights, extractPatternFeatures, forward]);
|
||||
|
||||
// Get real graph embedding for a query using the trained network
|
||||
const getGraphEmbedding = useCallback((query: string): number[] => {
|
||||
// Create a synthetic pattern from the query
|
||||
const syntheticPattern: QueryPattern = {
|
||||
id: 'temp',
|
||||
queryType: query.toLowerCase().startsWith('select') ? 'sql' :
|
||||
query.toLowerCase().startsWith('match') ? 'cypher' :
|
||||
query.toLowerCase().includes('sparql') || query.includes('?') ? 'sparql' : 'vector',
|
||||
pattern: query,
|
||||
frequency: 1,
|
||||
avgExecutionTime: 0,
|
||||
successRate: 0.5,
|
||||
lastUsed: Date.now(),
|
||||
resultCount: 0,
|
||||
feedback: { helpful: 0, notHelpful: 0 },
|
||||
};
|
||||
|
||||
const input = extractPatternFeatures(syntheticPattern);
|
||||
const { hidden } = forward(input);
|
||||
|
||||
// The hidden layer activations form the embedding
|
||||
return hidden;
|
||||
}, [extractPatternFeatures, forward]);
|
||||
|
||||
// Reset learning (clear all data)
|
||||
const resetLearning = useCallback(() => {
|
||||
engineRef.current.clear();
|
||||
setGnnState({
|
||||
nodes: 0,
|
||||
edges: 0,
|
||||
layers: 3,
|
||||
accuracy: 0,
|
||||
isTraining: false,
|
||||
lastTrainedAt: null,
|
||||
});
|
||||
refreshMetrics();
|
||||
}, [refreshMetrics]);
|
||||
|
||||
// Derive patterns, suggestions, insights from metrics
|
||||
const patterns = metrics.queryPatterns || [];
|
||||
const suggestions = metrics.suggestions || [];
|
||||
const insights = metrics.insights || [];
|
||||
|
||||
return {
|
||||
// Metrics
|
||||
metrics,
|
||||
|
||||
// Derived state (for direct access)
|
||||
patterns,
|
||||
suggestions,
|
||||
insights,
|
||||
|
||||
// GNN State (merged with derived values)
|
||||
gnnState: { ...gnnState, ...gnnDerivedState },
|
||||
|
||||
// Recording
|
||||
recordQuery,
|
||||
recordFeedback,
|
||||
|
||||
// Queries
|
||||
getSuggestions,
|
||||
getTopPatterns,
|
||||
getRecentExecutions,
|
||||
|
||||
// GNN functions
|
||||
trainGNN,
|
||||
getGraphEmbedding,
|
||||
|
||||
// Management
|
||||
clearLearning,
|
||||
resetLearning,
|
||||
exportLearning,
|
||||
importLearning,
|
||||
refreshMetrics,
|
||||
|
||||
// State
|
||||
lastQueryId,
|
||||
};
|
||||
}
|
||||
|
||||
export default useLearning;
|
||||
734
vendor/ruvector/crates/rvlite/examples/dashboard/src/hooks/useRvLite.ts
vendored
Normal file
734
vendor/ruvector/crates/rvlite/examples/dashboard/src/hooks/useRvLite.ts
vendored
Normal file
@@ -0,0 +1,734 @@
|
||||
import { useState, useEffect, useCallback, useRef } from 'react';
|
||||
|
||||
// Import types from the WASM package
|
||||
// The actual module will be loaded dynamically to avoid bundler issues
|
||||
|
||||
// Types matching RvLite WASM API
|
||||
export interface RvLiteConfig {
|
||||
dimensions: number;
|
||||
distance_metric: string;
|
||||
}
|
||||
|
||||
export interface VectorEntry {
|
||||
id: string;
|
||||
vector: number[];
|
||||
metadata?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
export interface SearchResult {
|
||||
id: string;
|
||||
score: number;
|
||||
metadata?: Record<string, unknown>;
|
||||
}
|
||||
|
||||
export interface CypherResult {
|
||||
nodes?: Array<{
|
||||
id: string;
|
||||
labels: string[];
|
||||
properties: Record<string, unknown>;
|
||||
}>;
|
||||
relationships?: Array<{
|
||||
id: string;
|
||||
type: string;
|
||||
start: string;
|
||||
end: string;
|
||||
properties: Record<string, unknown>;
|
||||
}>;
|
||||
message?: string;
|
||||
}
|
||||
|
||||
export interface SparqlResult {
|
||||
type: 'select' | 'ask' | 'construct' | 'describe' | 'update';
|
||||
variables?: string[];
|
||||
bindings?: Array<Record<string, string>>;
|
||||
result?: boolean;
|
||||
triples?: Array<{
|
||||
subject: string;
|
||||
predicate: string;
|
||||
object: string;
|
||||
}>;
|
||||
success?: boolean;
|
||||
}
|
||||
|
||||
export interface SqlResult {
|
||||
rows?: Array<Record<string, unknown>>;
|
||||
rowsAffected?: number;
|
||||
message?: string;
|
||||
}
|
||||
|
||||
export interface RvLiteStats {
|
||||
vectorCount: number;
|
||||
dimensions: number;
|
||||
distanceMetric: string;
|
||||
tripleCount: number;
|
||||
graphNodeCount: number;
|
||||
graphEdgeCount: number;
|
||||
features: string[];
|
||||
version: string;
|
||||
memoryUsage: string;
|
||||
}
|
||||
|
||||
// Internal interface for WASM module
|
||||
interface WasmRvLite {
|
||||
is_ready: () => boolean;
|
||||
get_version: () => string;
|
||||
get_features: () => string[];
|
||||
get_config: () => { get_dimensions: () => number; get_distance_metric: () => string };
|
||||
|
||||
insert: (vector: Float32Array, metadata: unknown) => string;
|
||||
insert_with_id: (id: string, vector: Float32Array, metadata: unknown) => void;
|
||||
search: (queryVector: Float32Array, k: number) => SearchResult[];
|
||||
search_with_filter: (queryVector: Float32Array, k: number, filter: unknown) => SearchResult[];
|
||||
get: (id: string) => VectorEntry | null;
|
||||
delete: (id: string) => boolean;
|
||||
len: () => number;
|
||||
is_empty: () => boolean;
|
||||
|
||||
sql: (query: string) => SqlResult;
|
||||
|
||||
cypher: (query: string) => CypherResult;
|
||||
cypher_stats: () => { nodes: number; relationships: number };
|
||||
cypher_clear: () => void;
|
||||
|
||||
sparql: (query: string) => SparqlResult;
|
||||
add_triple: (subject: string, predicate: string, object: string) => void;
|
||||
triple_count: () => number;
|
||||
clear_triples: () => void;
|
||||
|
||||
save: () => Promise<unknown>;
|
||||
init_storage: () => Promise<unknown>;
|
||||
export_json: () => Record<string, unknown>;
|
||||
import_json: (json: unknown) => void;
|
||||
}
|
||||
|
||||
interface WasmRvLiteConfig {
|
||||
get_dimensions: () => number;
|
||||
get_distance_metric: () => string;
|
||||
with_distance_metric: (metric: string) => WasmRvLiteConfig;
|
||||
}
|
||||
|
||||
interface WasmModule {
|
||||
default: (path?: string) => Promise<unknown>;
|
||||
init: () => void;
|
||||
RvLite: {
|
||||
new(config: WasmRvLiteConfig): WasmRvLite;
|
||||
default: () => WasmRvLite;
|
||||
clear_storage: () => Promise<unknown>;
|
||||
has_saved_state: () => Promise<boolean>;
|
||||
is_storage_available: () => boolean;
|
||||
};
|
||||
RvLiteConfig: {
|
||||
new(dimensions: number): WasmRvLiteConfig;
|
||||
};
|
||||
}
|
||||
|
||||
// Wrapper to normalize WASM API
|
||||
interface RvLiteInstance {
|
||||
is_ready: () => boolean;
|
||||
get_version: () => string;
|
||||
get_features: () => string[];
|
||||
get_config: () => RvLiteConfig;
|
||||
|
||||
insert: (vector: number[], metadata?: Record<string, unknown>) => string;
|
||||
insert_with_id: (id: string, vector: number[], metadata?: Record<string, unknown>) => void;
|
||||
search: (queryVector: number[], k: number) => SearchResult[];
|
||||
search_with_filter: (queryVector: number[], k: number, filter: Record<string, unknown>) => SearchResult[];
|
||||
get: (id: string) => VectorEntry | null;
|
||||
delete: (id: string) => boolean;
|
||||
len: () => number;
|
||||
is_empty: () => boolean;
|
||||
|
||||
sql: (query: string) => SqlResult;
|
||||
|
||||
cypher: (query: string) => CypherResult;
|
||||
cypher_stats: () => { nodes: number; relationships: number };
|
||||
cypher_clear: () => void;
|
||||
|
||||
sparql: (query: string) => SparqlResult;
|
||||
add_triple: (subject: string, predicate: string, object: string) => void;
|
||||
triple_count: () => number;
|
||||
clear_triples: () => void;
|
||||
|
||||
save: () => Promise<boolean>;
|
||||
has_saved_state: () => Promise<boolean>;
|
||||
clear_storage: () => Promise<boolean>;
|
||||
export_json: () => Record<string, unknown>;
|
||||
import_json: (json: Record<string, unknown>) => void;
|
||||
}
|
||||
|
||||
// Wrapper for the real WASM module
|
||||
function createWasmWrapper(wasm: WasmRvLite, WasmModule: WasmModule['RvLite']): RvLiteInstance {
|
||||
return {
|
||||
is_ready: () => wasm.is_ready(),
|
||||
get_version: () => wasm.get_version(),
|
||||
get_features: () => {
|
||||
const features = wasm.get_features();
|
||||
return Array.isArray(features) ? features : [];
|
||||
},
|
||||
get_config: () => {
|
||||
const config = wasm.get_config();
|
||||
// Config may return an object with getter methods or a plain JSON object depending on WASM version
|
||||
// Try getter methods first, fallback to direct property access
|
||||
const dims = typeof config?.get_dimensions === 'function'
|
||||
? config.get_dimensions()
|
||||
: (config as unknown as { dimensions?: number })?.dimensions ?? 128;
|
||||
const metric = typeof config?.get_distance_metric === 'function'
|
||||
? config.get_distance_metric()
|
||||
: (config as unknown as { distance_metric?: string })?.distance_metric ?? 'cosine';
|
||||
return {
|
||||
dimensions: dims,
|
||||
distance_metric: metric,
|
||||
};
|
||||
},
|
||||
|
||||
insert: (vector, metadata) => {
|
||||
return wasm.insert(new Float32Array(vector), metadata || null);
|
||||
},
|
||||
insert_with_id: (id, vector, metadata) => {
|
||||
wasm.insert_with_id(id, new Float32Array(vector), metadata || null);
|
||||
},
|
||||
search: (queryVector, k) => {
|
||||
const results = wasm.search(new Float32Array(queryVector), k);
|
||||
return Array.isArray(results) ? results : [];
|
||||
},
|
||||
search_with_filter: (queryVector, k, filter) => {
|
||||
const results = wasm.search_with_filter(new Float32Array(queryVector), k, filter);
|
||||
return Array.isArray(results) ? results : [];
|
||||
},
|
||||
get: (id) => wasm.get(id),
|
||||
delete: (id) => wasm.delete(id),
|
||||
len: () => wasm.len(),
|
||||
is_empty: () => wasm.is_empty(),
|
||||
|
||||
sql: (query) => wasm.sql(query) || { message: 'No result' },
|
||||
|
||||
cypher: (query) => wasm.cypher(query) || { message: 'No result' },
|
||||
cypher_stats: () => {
|
||||
const stats = wasm.cypher_stats();
|
||||
// WASM returns { node_count, edge_count }, normalize to { nodes, relationships }
|
||||
if (stats && typeof stats === 'object') {
|
||||
const s = stats as Record<string, unknown>;
|
||||
return {
|
||||
nodes: (s.node_count as number) ?? (s.nodes as number) ?? 0,
|
||||
relationships: (s.edge_count as number) ?? (s.relationships as number) ?? 0,
|
||||
};
|
||||
}
|
||||
return { nodes: 0, relationships: 0 };
|
||||
},
|
||||
cypher_clear: () => wasm.cypher_clear(),
|
||||
|
||||
sparql: (query) => wasm.sparql(query) || { type: 'select' as const },
|
||||
add_triple: (subject, predicate, object) => wasm.add_triple(subject, predicate, object),
|
||||
triple_count: () => wasm.triple_count(),
|
||||
clear_triples: () => wasm.clear_triples(),
|
||||
|
||||
save: async () => {
|
||||
try {
|
||||
await wasm.init_storage();
|
||||
await wasm.save();
|
||||
return true;
|
||||
} catch (e) {
|
||||
console.error('Save failed:', e);
|
||||
return false;
|
||||
}
|
||||
},
|
||||
has_saved_state: async () => {
|
||||
try {
|
||||
return await WasmModule.has_saved_state();
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
},
|
||||
clear_storage: async () => {
|
||||
try {
|
||||
await WasmModule.clear_storage();
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
},
|
||||
export_json: () => wasm.export_json() || {},
|
||||
import_json: (json) => wasm.import_json(json),
|
||||
};
|
||||
}
|
||||
|
||||
// No mock implementation - WASM is required
|
||||
|
||||
// Hook for using RvLite
|
||||
export function useRvLite(initialDimensions: number = 128, initialDistanceMetric: string = 'cosine') {
|
||||
const [isReady, setIsReady] = useState(false);
|
||||
const [isLoading, setIsLoading] = useState(true);
|
||||
const [error, setError] = useState<string | null>(null);
|
||||
const [isWasm, setIsWasm] = useState(false);
|
||||
const [currentDimensions] = useState(initialDimensions);
|
||||
const [currentMetric, setCurrentMetric] = useState(initialDistanceMetric);
|
||||
const [stats, setStats] = useState<RvLiteStats>({
|
||||
vectorCount: 0,
|
||||
dimensions: initialDimensions,
|
||||
distanceMetric: initialDistanceMetric,
|
||||
tripleCount: 0,
|
||||
graphNodeCount: 0,
|
||||
graphEdgeCount: 0,
|
||||
features: [],
|
||||
version: '',
|
||||
memoryUsage: '0 KB',
|
||||
});
|
||||
|
||||
// Storage status
|
||||
const [storageStatus, setStorageStatus] = useState<{
|
||||
available: boolean;
|
||||
hasSavedState: boolean;
|
||||
estimatedSize: number;
|
||||
}>({ available: false, hasSavedState: false, estimatedSize: 0 });
|
||||
|
||||
const dbRef = useRef<RvLiteInstance | null>(null);
|
||||
const wasmModuleRef = useRef<WasmModule | null>(null);
|
||||
const initRef = useRef(false);
|
||||
|
||||
// Initialize RvLite
|
||||
useEffect(() => {
|
||||
if (initRef.current) return;
|
||||
initRef.current = true;
|
||||
|
||||
const init = async () => {
|
||||
setIsLoading(true);
|
||||
setError(null);
|
||||
|
||||
try {
|
||||
// Try to load actual WASM module using script injection
|
||||
// This avoids Vite's module transformation which breaks the WASM bindings
|
||||
const wasmJsPath = '/pkg/rvlite.js';
|
||||
const wasmBinaryPath = '/pkg/rvlite_bg.wasm';
|
||||
|
||||
// BBS-style initialization display
|
||||
const bbsInit = () => {
|
||||
const cyan = 'color: #00d4ff; font-weight: bold';
|
||||
const green = 'color: #00ff88; font-weight: bold';
|
||||
const yellow = 'color: #ffcc00; font-weight: bold';
|
||||
const magenta = 'color: #ff00ff; font-weight: bold';
|
||||
const dim = 'color: #888888';
|
||||
|
||||
console.log('%c╔══════════════════════════════════════════════════════════════════╗', cyan);
|
||||
console.log('%c║ ║', cyan);
|
||||
console.log('%c║ %c██████╗ ██╗ ██╗██╗ ██╗████████╗███████╗%c ║', cyan, green, cyan);
|
||||
console.log('%c║ %c██╔══██╗██║ ██║██║ ██║╚══██╔══╝██╔════╝%c ║', cyan, green, cyan);
|
||||
console.log('%c║ %c██████╔╝██║ ██║██║ ██║ ██║ █████╗%c ║', cyan, green, cyan);
|
||||
console.log('%c║ %c██╔══██╗╚██╗ ██╔╝██║ ██║ ██║ ██╔══╝%c ║', cyan, green, cyan);
|
||||
console.log('%c║ %c██║ ██║ ╚████╔╝ ███████╗██║ ██║ ███████╗%c ║', cyan, green, cyan);
|
||||
console.log('%c║ %c╚═╝ ╚═╝ ╚═══╝ ╚══════╝╚═╝ ╚═╝ ╚══════╝%c ║', cyan, green, cyan);
|
||||
console.log('%c║ ║', cyan);
|
||||
console.log('%c║ %cVector Database + SQL + SPARQL + Cypher%c ║', cyan, yellow, cyan);
|
||||
console.log('%c║ %cBrowser-Native WASM Implementation%c ║', cyan, dim, cyan);
|
||||
console.log('%c║ ║', cyan);
|
||||
console.log('%c╠══════════════════════════════════════════════════════════════════╣', cyan);
|
||||
console.log('%c║ %c[ SYSTEM INITIALIZATION ]%c ║', cyan, magenta, cyan);
|
||||
console.log('%c╚══════════════════════════════════════════════════════════════════╝', cyan);
|
||||
};
|
||||
|
||||
const bbsStatus = (label: string, status: string, ok: boolean) => {
|
||||
const cyan = 'color: #00d4ff';
|
||||
const statusColor = ok ? 'color: #00ff88; font-weight: bold' : 'color: #ff4444; font-weight: bold';
|
||||
console.log(`%c ├─ ${label.padEnd(30)} %c[${status}]`, cyan, statusColor);
|
||||
};
|
||||
|
||||
const bbsComplete = (version: string, _wasmLoaded: boolean, config: { dimensions: number; distanceMetric: string }) => {
|
||||
const cyan = 'color: #00d4ff; font-weight: bold';
|
||||
const green = 'color: #00ff88; font-weight: bold';
|
||||
const yellow = 'color: #ffcc00';
|
||||
const white = 'color: #ffffff';
|
||||
|
||||
console.log('%c╔══════════════════════════════════════════════════════════════════╗', cyan);
|
||||
console.log('%c║ %c✓ RVLITE INITIALIZED SUCCESSFULLY%c ║', cyan, green, cyan);
|
||||
console.log('%c╠══════════════════════════════════════════════════════════════════╣', cyan);
|
||||
console.log(`%c║ %cVersion:%c ${version.padEnd(48)}%c║`, cyan, yellow, white, cyan);
|
||||
console.log(`%c║ %cBackend:%c ${'WebAssembly (WASM)'.padEnd(48)}%c║`, cyan, yellow, white, cyan);
|
||||
console.log(`%c║ %cDimensions:%c ${String(config.dimensions).padEnd(48)}%c║`, cyan, yellow, white, cyan);
|
||||
console.log(`%c║ %cMetric:%c ${config.distanceMetric.padEnd(48)}%c║`, cyan, yellow, white, cyan);
|
||||
console.log('%c╠══════════════════════════════════════════════════════════════════╣', cyan);
|
||||
console.log('%c║ %cFeatures:%c ║', cyan, yellow, cyan);
|
||||
console.log('%c║ ✓ Vector Search (k-NN) ✓ SQL Queries%c ║', green, cyan);
|
||||
console.log('%c║ ✓ SPARQL (RDF Triple Store) ✓ Cypher (Graph DB)%c ║', green, cyan);
|
||||
console.log('%c║ ✓ IndexedDB Persistence ✓ JSON Import/Export%c ║', green, cyan);
|
||||
console.log('%c║ ✓ Metadata Filtering ✓ Multiple Metrics%c ║', green, cyan);
|
||||
console.log('%c╠══════════════════════════════════════════════════════════════════╣', cyan);
|
||||
console.log('%c║ %cDistance Metrics:%c ║', cyan, yellow, cyan);
|
||||
console.log('%c║ • cosine - Cosine Similarity (angular distance)%c ║', white, cyan);
|
||||
console.log('%c║ • euclidean - L2 Norm (straight-line distance)%c ║', white, cyan);
|
||||
console.log('%c║ • dotproduct - Inner Product (projection similarity)%c ║', white, cyan);
|
||||
console.log('%c║ • manhattan - L1 Norm (taxicab distance)%c ║', white, cyan);
|
||||
console.log('%c╚══════════════════════════════════════════════════════════════════╝', cyan);
|
||||
};
|
||||
|
||||
bbsInit();
|
||||
|
||||
let loadedIsWasm = false;
|
||||
let loadedVersion = '';
|
||||
|
||||
try {
|
||||
bbsStatus('WASM Binary', 'LOADING', true);
|
||||
|
||||
// Check if WASM binary exists
|
||||
const wasmResponse = await fetch(wasmBinaryPath, { method: 'HEAD' });
|
||||
if (!wasmResponse.ok) {
|
||||
throw new Error('WASM binary not found');
|
||||
}
|
||||
bbsStatus('WASM Binary', 'OK', true);
|
||||
|
||||
bbsStatus('JavaScript Bindings', 'LOADING', true);
|
||||
// Dynamically import the WASM module
|
||||
// Use a blob URL to avoid Vite's module transformation
|
||||
const jsResponse = await fetch(wasmJsPath);
|
||||
if (!jsResponse.ok) {
|
||||
throw new Error(`Failed to fetch WASM JS: ${jsResponse.status}`);
|
||||
}
|
||||
bbsStatus('JavaScript Bindings', 'OK', true);
|
||||
|
||||
const jsCode = await jsResponse.text();
|
||||
|
||||
// Create a module from the JS code
|
||||
const blob = new Blob([jsCode], { type: 'application/javascript' });
|
||||
const blobUrl = URL.createObjectURL(blob);
|
||||
|
||||
try {
|
||||
bbsStatus('WebAssembly Module', 'INSTANTIATING', true);
|
||||
const wasmModule = await import(/* @vite-ignore */ blobUrl) as WasmModule;
|
||||
|
||||
// Initialize the WASM module with the correct path to the binary
|
||||
// The WASM module accepts either string path or object with module_or_path
|
||||
await (wasmModule.default as (path?: unknown) => Promise<unknown>)(wasmBinaryPath);
|
||||
wasmModule.init();
|
||||
bbsStatus('WebAssembly Module', 'OK', true);
|
||||
|
||||
bbsStatus('RvLite Configuration', 'CONFIGURING', true);
|
||||
// Create config with dimensions and distance metric
|
||||
let config = new wasmModule.RvLiteConfig(currentDimensions);
|
||||
if (currentMetric && currentMetric !== 'cosine') {
|
||||
config = config.with_distance_metric(currentMetric);
|
||||
}
|
||||
// Store the WASM module for later use (distance metric changes)
|
||||
wasmModuleRef.current = wasmModule;
|
||||
bbsStatus('RvLite Configuration', 'OK', true);
|
||||
|
||||
bbsStatus('Database Instance', 'CREATING', true);
|
||||
// Create RvLite instance
|
||||
const wasmDb = new wasmModule.RvLite(config);
|
||||
|
||||
// Wrap it with our normalized interface
|
||||
dbRef.current = createWasmWrapper(wasmDb, wasmModule.RvLite);
|
||||
loadedIsWasm = true;
|
||||
loadedVersion = wasmDb.get_version();
|
||||
setIsWasm(true);
|
||||
bbsStatus('Database Instance', 'OK', true);
|
||||
|
||||
bbsStatus('Vector Search Engine', 'READY', true);
|
||||
bbsStatus('SQL Query Engine', 'READY', true);
|
||||
bbsStatus('SPARQL Engine', 'READY', true);
|
||||
bbsStatus('Cypher Graph Engine', 'READY', true);
|
||||
bbsStatus('IndexedDB Persistence', 'AVAILABLE', true);
|
||||
} finally {
|
||||
URL.revokeObjectURL(blobUrl);
|
||||
}
|
||||
} catch (wasmError) {
|
||||
bbsStatus('WASM Module', 'FAILED TO LOAD', false);
|
||||
const errorMsg = wasmError instanceof Error ? wasmError.message : 'WASM module failed to load';
|
||||
throw new Error(`WASM required but failed to load: ${errorMsg}`);
|
||||
}
|
||||
|
||||
if (dbRef.current) {
|
||||
setIsReady(true);
|
||||
// Display completion banner
|
||||
bbsComplete(loadedVersion, loadedIsWasm, { dimensions: currentDimensions, distanceMetric: currentMetric });
|
||||
// Update stats after a short delay to ensure WASM is fully initialized
|
||||
setTimeout(() => updateStatsInternal(), 100);
|
||||
// Update storage status
|
||||
checkStorageStatus();
|
||||
}
|
||||
} catch (err) {
|
||||
const message = err instanceof Error ? err.message : 'Unknown error';
|
||||
setError(message);
|
||||
console.error('RvLite initialization failed:', err);
|
||||
} finally {
|
||||
setIsLoading(false);
|
||||
}
|
||||
};
|
||||
|
||||
init();
|
||||
}, [currentDimensions, currentMetric]);
|
||||
|
||||
// Internal stats update (not a callback to avoid dependency issues)
|
||||
const updateStatsInternal = () => {
|
||||
if (!dbRef.current) return;
|
||||
|
||||
try {
|
||||
const db = dbRef.current;
|
||||
const cypherStats = db.cypher_stats();
|
||||
const config = db.get_config();
|
||||
|
||||
setStats({
|
||||
vectorCount: db.len(),
|
||||
dimensions: config.dimensions,
|
||||
distanceMetric: config.distance_metric,
|
||||
tripleCount: db.triple_count(),
|
||||
graphNodeCount: cypherStats.nodes ?? 0,
|
||||
graphEdgeCount: cypherStats.relationships ?? 0,
|
||||
features: db.get_features(),
|
||||
version: db.get_version(),
|
||||
memoryUsage: `${Math.round((db.len() * currentDimensions * 4) / 1024)} KB`,
|
||||
});
|
||||
} catch (e) {
|
||||
console.error('Failed to update stats:', e);
|
||||
}
|
||||
};
|
||||
|
||||
// Update stats
|
||||
const updateStats = useCallback(() => {
|
||||
updateStatsInternal();
|
||||
}, []);
|
||||
|
||||
// Vector operations
|
||||
const insertVector = useCallback((vector: number[], metadata?: Record<string, unknown>) => {
|
||||
if (!dbRef.current) throw new Error('RvLite not initialized');
|
||||
const id = dbRef.current.insert(vector, metadata);
|
||||
updateStatsInternal();
|
||||
return id;
|
||||
}, []);
|
||||
|
||||
const insertVectorWithId = useCallback((id: string, vector: number[], metadata?: Record<string, unknown>) => {
|
||||
if (!dbRef.current) throw new Error('RvLite not initialized');
|
||||
dbRef.current.insert_with_id(id, vector, metadata);
|
||||
updateStatsInternal();
|
||||
}, []);
|
||||
|
||||
const searchVectors = useCallback((queryVector: number[], k: number = 10) => {
|
||||
if (!dbRef.current) throw new Error('RvLite not initialized');
|
||||
return dbRef.current.search(queryVector, k);
|
||||
}, []);
|
||||
|
||||
const searchVectorsWithFilter = useCallback((queryVector: number[], k: number, filter: Record<string, unknown>) => {
|
||||
if (!dbRef.current) throw new Error('RvLite not initialized');
|
||||
return dbRef.current.search_with_filter(queryVector, k, filter);
|
||||
}, []);
|
||||
|
||||
const getVector = useCallback((id: string) => {
|
||||
if (!dbRef.current) throw new Error('RvLite not initialized');
|
||||
return dbRef.current.get(id);
|
||||
}, []);
|
||||
|
||||
const deleteVector = useCallback((id: string) => {
|
||||
if (!dbRef.current) throw new Error('RvLite not initialized');
|
||||
const result = dbRef.current.delete(id);
|
||||
updateStatsInternal();
|
||||
return result;
|
||||
}, []);
|
||||
|
||||
const getAllVectors = useCallback(() => {
|
||||
if (!dbRef.current) return [];
|
||||
const randomVector = Array(currentDimensions).fill(0).map(() => Math.random());
|
||||
const count = dbRef.current.len();
|
||||
if (count === 0) return [];
|
||||
return dbRef.current.search(randomVector, count);
|
||||
}, [currentDimensions]);
|
||||
|
||||
// SQL operations
|
||||
const executeSql = useCallback((query: string) => {
|
||||
if (!dbRef.current) throw new Error('RvLite not initialized');
|
||||
const result = dbRef.current.sql(query);
|
||||
updateStatsInternal();
|
||||
return result;
|
||||
}, []);
|
||||
|
||||
// Cypher operations
|
||||
const executeCypher = useCallback((query: string) => {
|
||||
if (!dbRef.current) throw new Error('RvLite not initialized');
|
||||
const result = dbRef.current.cypher(query);
|
||||
updateStatsInternal();
|
||||
return result;
|
||||
}, []);
|
||||
|
||||
const getCypherStats = useCallback(() => {
|
||||
if (!dbRef.current) return { nodes: 0, relationships: 0 };
|
||||
return dbRef.current.cypher_stats();
|
||||
}, []);
|
||||
|
||||
const clearCypher = useCallback(() => {
|
||||
if (!dbRef.current) return;
|
||||
dbRef.current.cypher_clear();
|
||||
updateStatsInternal();
|
||||
}, []);
|
||||
|
||||
// SPARQL operations
|
||||
const executeSparql = useCallback((query: string) => {
|
||||
if (!dbRef.current) throw new Error('RvLite not initialized');
|
||||
return dbRef.current.sparql(query);
|
||||
}, []);
|
||||
|
||||
const addTriple = useCallback((subject: string, predicate: string, object: string) => {
|
||||
if (!dbRef.current) throw new Error('RvLite not initialized');
|
||||
dbRef.current.add_triple(subject, predicate, object);
|
||||
updateStatsInternal();
|
||||
}, []);
|
||||
|
||||
const clearTriples = useCallback(() => {
|
||||
if (!dbRef.current) return;
|
||||
dbRef.current.clear_triples();
|
||||
updateStatsInternal();
|
||||
}, []);
|
||||
|
||||
// Persistence operations
|
||||
const saveDatabase = useCallback(async () => {
|
||||
if (!dbRef.current) throw new Error('RvLite not initialized');
|
||||
return dbRef.current.save();
|
||||
}, []);
|
||||
|
||||
const exportDatabase = useCallback(() => {
|
||||
if (!dbRef.current) throw new Error('RvLite not initialized');
|
||||
return dbRef.current.export_json();
|
||||
}, []);
|
||||
|
||||
const importDatabase = useCallback((json: Record<string, unknown>) => {
|
||||
if (!dbRef.current) throw new Error('RvLite not initialized');
|
||||
dbRef.current.import_json(json);
|
||||
updateStatsInternal();
|
||||
}, []);
|
||||
|
||||
const clearDatabase = useCallback(async () => {
|
||||
if (!dbRef.current) return;
|
||||
await dbRef.current.clear_storage();
|
||||
dbRef.current.cypher_clear();
|
||||
dbRef.current.clear_triples();
|
||||
updateStatsInternal();
|
||||
}, []);
|
||||
|
||||
// Generate random vector
|
||||
const generateVector = useCallback((dim?: number) => {
|
||||
const d = dim || currentDimensions;
|
||||
return Array(d).fill(0).map(() => Math.random() * 2 - 1);
|
||||
}, [currentDimensions]);
|
||||
|
||||
// Check storage status
|
||||
const checkStorageStatus = useCallback(async () => {
|
||||
if (!dbRef.current) return;
|
||||
|
||||
try {
|
||||
const hasSaved = await dbRef.current.has_saved_state();
|
||||
const vectorCount = dbRef.current.len();
|
||||
const tripleCount = dbRef.current.triple_count();
|
||||
const cypherStats = dbRef.current.cypher_stats();
|
||||
|
||||
// Estimate storage size (vectors + triples + graph)
|
||||
const vectorBytes = vectorCount * currentDimensions * 4; // float32
|
||||
const tripleBytes = tripleCount * 200; // estimate per triple
|
||||
const graphBytes = (cypherStats.nodes + cypherStats.relationships) * 100;
|
||||
const estimatedSize = vectorBytes + tripleBytes + graphBytes;
|
||||
|
||||
setStorageStatus({
|
||||
available: true,
|
||||
hasSavedState: hasSaved,
|
||||
estimatedSize,
|
||||
});
|
||||
} catch {
|
||||
setStorageStatus(prev => ({ ...prev, available: false }));
|
||||
}
|
||||
}, [currentDimensions]);
|
||||
|
||||
// Change distance metric (recreates the database instance)
|
||||
const changeDistanceMetric = useCallback(async (newMetric: string): Promise<boolean> => {
|
||||
if (!wasmModuleRef.current || !isWasm) {
|
||||
// WASM required - no fallback
|
||||
console.error('WASM module required for distance metric change');
|
||||
return false;
|
||||
}
|
||||
|
||||
try {
|
||||
// Export current data
|
||||
const exportedData = dbRef.current?.export_json();
|
||||
|
||||
// Create new config with new metric
|
||||
const wasmModule = wasmModuleRef.current;
|
||||
let config = new wasmModule.RvLiteConfig(currentDimensions);
|
||||
if (newMetric !== 'cosine') {
|
||||
config = config.with_distance_metric(newMetric);
|
||||
}
|
||||
|
||||
// Create new instance
|
||||
const wasmDb = new wasmModule.RvLite(config);
|
||||
dbRef.current = createWasmWrapper(wasmDb, wasmModule.RvLite);
|
||||
|
||||
// Re-import the data
|
||||
if (exportedData) {
|
||||
dbRef.current.import_json(exportedData);
|
||||
}
|
||||
|
||||
setCurrentMetric(newMetric);
|
||||
updateStatsInternal();
|
||||
|
||||
console.log(`%c Distance metric changed to: ${newMetric}`, 'color: #00ff88; font-weight: bold');
|
||||
return true;
|
||||
} catch (err) {
|
||||
console.error('Failed to change distance metric:', err);
|
||||
return false;
|
||||
}
|
||||
}, [isWasm, currentDimensions]);
|
||||
|
||||
// Clear IndexedDB storage
|
||||
const clearStorageData = useCallback(async (): Promise<boolean> => {
|
||||
if (!dbRef.current) return false;
|
||||
|
||||
try {
|
||||
const result = await dbRef.current.clear_storage();
|
||||
await checkStorageStatus();
|
||||
return result;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}, [checkStorageStatus]);
|
||||
|
||||
return {
|
||||
// State
|
||||
isReady,
|
||||
isLoading,
|
||||
isWasm,
|
||||
error,
|
||||
stats,
|
||||
storageStatus,
|
||||
|
||||
// Vector operations
|
||||
insertVector,
|
||||
insertVectorWithId,
|
||||
searchVectors,
|
||||
searchVectorsWithFilter,
|
||||
getVector,
|
||||
deleteVector,
|
||||
getAllVectors,
|
||||
|
||||
// SQL
|
||||
executeSql,
|
||||
|
||||
// Cypher
|
||||
executeCypher,
|
||||
getCypherStats,
|
||||
clearCypher,
|
||||
|
||||
// SPARQL
|
||||
executeSparql,
|
||||
addTriple,
|
||||
clearTriples,
|
||||
|
||||
// Persistence
|
||||
saveDatabase,
|
||||
exportDatabase,
|
||||
importDatabase,
|
||||
clearDatabase,
|
||||
|
||||
// Configuration
|
||||
changeDistanceMetric,
|
||||
clearStorageData,
|
||||
checkStorageStatus,
|
||||
|
||||
// Utilities
|
||||
generateVector,
|
||||
updateStats,
|
||||
};
|
||||
}
|
||||
|
||||
export default useRvLite;
|
||||
43
vendor/ruvector/crates/rvlite/examples/dashboard/src/index.css
vendored
Normal file
43
vendor/ruvector/crates/rvlite/examples/dashboard/src/index.css
vendored
Normal file
@@ -0,0 +1,43 @@
|
||||
@import "tailwindcss";
|
||||
@plugin "./hero.ts";
|
||||
@source "../node_modules/@heroui/theme/dist/**/*.{js,ts,jsx,tsx}";
|
||||
@custom-variant dark (&:is(.dark *));
|
||||
|
||||
:root {
|
||||
font-family: Inter, system-ui, Avenir, Helvetica, Arial, sans-serif;
|
||||
line-height: 1.5;
|
||||
font-weight: 400;
|
||||
font-synthesis: none;
|
||||
text-rendering: optimizeLegibility;
|
||||
-webkit-font-smoothing: antialiased;
|
||||
-moz-osx-font-smoothing: grayscale;
|
||||
}
|
||||
|
||||
body {
|
||||
margin: 0;
|
||||
min-height: 100vh;
|
||||
}
|
||||
|
||||
/* Custom scrollbar */
|
||||
::-webkit-scrollbar {
|
||||
width: 8px;
|
||||
height: 8px;
|
||||
}
|
||||
|
||||
::-webkit-scrollbar-track {
|
||||
background: #1a1a2e;
|
||||
}
|
||||
|
||||
::-webkit-scrollbar-thumb {
|
||||
background: #4ecca3;
|
||||
border-radius: 4px;
|
||||
}
|
||||
|
||||
::-webkit-scrollbar-thumb:hover {
|
||||
background: #3db892;
|
||||
}
|
||||
|
||||
/* Code editor styling */
|
||||
.code-editor {
|
||||
font-family: 'Fira Code', 'Monaco', 'Consolas', monospace;
|
||||
}
|
||||
752
vendor/ruvector/crates/rvlite/examples/dashboard/src/lib/NeuralEngine.ts
vendored
Normal file
752
vendor/ruvector/crates/rvlite/examples/dashboard/src/lib/NeuralEngine.ts
vendored
Normal file
@@ -0,0 +1,752 @@
|
||||
/**
|
||||
* Real Neural Network Engine for RvLite Dashboard
|
||||
*
|
||||
* Implements actual neural network computations without mocks:
|
||||
* - Multi-layer perceptron with configurable architecture
|
||||
* - Real gradient descent with multiple optimizers (SGD, Adam, RMSprop)
|
||||
* - Xavier/He weight initialization
|
||||
* - Learning rate schedulers
|
||||
* - Regularization (L1, L2, Dropout)
|
||||
* - Real loss functions (MSE, Cross-entropy)
|
||||
*/
|
||||
|
||||
// Types
|
||||
export interface NeuralConfig {
|
||||
inputSize: number;
|
||||
hiddenLayers: number[]; // Array of hidden layer sizes
|
||||
outputSize: number;
|
||||
activation: 'relu' | 'tanh' | 'sigmoid' | 'leaky_relu';
|
||||
outputActivation: 'sigmoid' | 'softmax' | 'linear';
|
||||
learningRate: number;
|
||||
optimizer: 'sgd' | 'adam' | 'rmsprop' | 'adagrad';
|
||||
regularization: 'none' | 'l1' | 'l2' | 'dropout';
|
||||
regularizationStrength: number;
|
||||
dropoutRate: number;
|
||||
batchSize: number;
|
||||
}
|
||||
|
||||
export interface TrainingResult {
|
||||
epoch: number;
|
||||
loss: number;
|
||||
accuracy: number;
|
||||
validationLoss?: number;
|
||||
validationAccuracy?: number;
|
||||
learningRate: number;
|
||||
gradientNorm: number;
|
||||
timestamp: number;
|
||||
}
|
||||
|
||||
export interface LayerWeights {
|
||||
W: number[][]; // Weight matrix
|
||||
b: number[]; // Bias vector
|
||||
// Adam optimizer state
|
||||
mW?: number[][];
|
||||
vW?: number[][];
|
||||
mb?: number[];
|
||||
vb?: number[];
|
||||
}
|
||||
|
||||
export interface NeuralState {
|
||||
weights: LayerWeights[];
|
||||
config: NeuralConfig;
|
||||
trainingHistory: TrainingResult[];
|
||||
epoch: number;
|
||||
totalIterations: number;
|
||||
}
|
||||
|
||||
// Activation functions
|
||||
const activations = {
|
||||
relu: (x: number) => Math.max(0, x),
|
||||
reluDerivative: (x: number) => x > 0 ? 1 : 0,
|
||||
|
||||
leaky_relu: (x: number) => x > 0 ? x : 0.01 * x,
|
||||
leaky_reluDerivative: (x: number) => x > 0 ? 1 : 0.01,
|
||||
|
||||
tanh: (x: number) => Math.tanh(x),
|
||||
tanhDerivative: (x: number) => 1 - Math.pow(Math.tanh(x), 2),
|
||||
|
||||
sigmoid: (x: number) => 1 / (1 + Math.exp(-Math.max(-500, Math.min(500, x)))),
|
||||
sigmoidDerivative: (x: number) => {
|
||||
const s = activations.sigmoid(x);
|
||||
return s * (1 - s);
|
||||
},
|
||||
|
||||
linear: (x: number) => x,
|
||||
linearDerivative: () => 1,
|
||||
|
||||
softmax: (arr: number[]): number[] => {
|
||||
const max = Math.max(...arr);
|
||||
const exps = arr.map(x => Math.exp(Math.min(x - max, 500)));
|
||||
const sum = exps.reduce((a, b) => a + b, 0);
|
||||
return exps.map(e => e / sum);
|
||||
},
|
||||
};
|
||||
|
||||
// Default configuration
|
||||
const defaultConfig: NeuralConfig = {
|
||||
inputSize: 10,
|
||||
hiddenLayers: [16, 8],
|
||||
outputSize: 1,
|
||||
activation: 'relu',
|
||||
outputActivation: 'sigmoid',
|
||||
learningRate: 0.001,
|
||||
optimizer: 'adam',
|
||||
regularization: 'l2',
|
||||
regularizationStrength: 0.0001,
|
||||
dropoutRate: 0.1,
|
||||
batchSize: 32,
|
||||
};
|
||||
|
||||
/**
|
||||
* Real Neural Network Engine
|
||||
* All computations are performed with actual mathematics
|
||||
*/
|
||||
export class NeuralEngine {
|
||||
private config: NeuralConfig;
|
||||
private weights: LayerWeights[] = [];
|
||||
private trainingHistory: TrainingResult[] = [];
|
||||
private epoch: number = 0;
|
||||
private totalIterations: number = 0;
|
||||
private adamBeta1: number = 0.9;
|
||||
private adamBeta2: number = 0.999;
|
||||
private adamEpsilon: number = 1e-8;
|
||||
|
||||
constructor(config: Partial<NeuralConfig> = {}) {
|
||||
this.config = { ...defaultConfig, ...config };
|
||||
this.initializeWeights();
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize weights using Xavier/He initialization
|
||||
*/
|
||||
private initializeWeights(): void {
|
||||
const sizes = [
|
||||
this.config.inputSize,
|
||||
...this.config.hiddenLayers,
|
||||
this.config.outputSize,
|
||||
];
|
||||
|
||||
this.weights = [];
|
||||
|
||||
for (let i = 0; i < sizes.length - 1; i++) {
|
||||
const fanIn = sizes[i];
|
||||
const fanOut = sizes[i + 1];
|
||||
|
||||
// Xavier initialization for tanh/sigmoid, He for ReLU
|
||||
const scale = this.config.activation === 'relu' || this.config.activation === 'leaky_relu'
|
||||
? Math.sqrt(2 / fanIn) // He initialization
|
||||
: Math.sqrt(2 / (fanIn + fanOut)); // Xavier
|
||||
|
||||
const W: number[][] = [];
|
||||
const mW: number[][] = [];
|
||||
const vW: number[][] = [];
|
||||
|
||||
for (let j = 0; j < fanIn; j++) {
|
||||
const row: number[] = [];
|
||||
const mRow: number[] = [];
|
||||
const vRow: number[] = [];
|
||||
for (let k = 0; k < fanOut; k++) {
|
||||
// Box-Muller transform for normal distribution
|
||||
const u1 = Math.random();
|
||||
const u2 = Math.random();
|
||||
const normal = Math.sqrt(-2 * Math.log(u1)) * Math.cos(2 * Math.PI * u2);
|
||||
row.push(normal * scale);
|
||||
mRow.push(0); // Adam momentum
|
||||
vRow.push(0); // Adam velocity
|
||||
}
|
||||
W.push(row);
|
||||
mW.push(mRow);
|
||||
vW.push(vRow);
|
||||
}
|
||||
|
||||
const b: number[] = new Array(fanOut).fill(0);
|
||||
const mb: number[] = new Array(fanOut).fill(0);
|
||||
const vb: number[] = new Array(fanOut).fill(0);
|
||||
|
||||
this.weights.push({ W, b, mW, vW, mb, vb });
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Forward pass through the network
|
||||
*/
|
||||
forward(input: number[], training: boolean = false): {
|
||||
output: number[];
|
||||
activations: number[][];
|
||||
preActivations: number[][];
|
||||
dropoutMasks?: boolean[][];
|
||||
} {
|
||||
const activationsList: number[][] = [input];
|
||||
const preActivationsList: number[][] = [];
|
||||
const dropoutMasks: boolean[][] = [];
|
||||
|
||||
let current = [...input];
|
||||
|
||||
for (let layer = 0; layer < this.weights.length; layer++) {
|
||||
const { W, b } = this.weights[layer];
|
||||
const isOutput = layer === this.weights.length - 1;
|
||||
|
||||
// Matrix multiplication: W^T * current + b
|
||||
const preActivation: number[] = [];
|
||||
for (let j = 0; j < W[0].length; j++) {
|
||||
let sum = b[j];
|
||||
for (let i = 0; i < current.length && i < W.length; i++) {
|
||||
sum += current[i] * W[i][j];
|
||||
}
|
||||
preActivation.push(sum);
|
||||
}
|
||||
preActivationsList.push(preActivation);
|
||||
|
||||
// Apply activation
|
||||
let activated: number[];
|
||||
if (isOutput) {
|
||||
if (this.config.outputActivation === 'softmax') {
|
||||
activated = activations.softmax(preActivation);
|
||||
} else if (this.config.outputActivation === 'linear') {
|
||||
activated = preActivation.map(activations.linear);
|
||||
} else {
|
||||
activated = preActivation.map(activations.sigmoid);
|
||||
}
|
||||
} else {
|
||||
const fn = activations[this.config.activation];
|
||||
activated = preActivation.map(fn);
|
||||
}
|
||||
|
||||
// Apply dropout during training
|
||||
if (training && !isOutput && this.config.regularization === 'dropout') {
|
||||
const mask = activated.map(() => Math.random() > this.config.dropoutRate);
|
||||
dropoutMasks.push(mask);
|
||||
activated = activated.map((val, idx) =>
|
||||
mask[idx] ? val / (1 - this.config.dropoutRate) : 0
|
||||
);
|
||||
}
|
||||
|
||||
activationsList.push(activated);
|
||||
current = activated;
|
||||
}
|
||||
|
||||
return {
|
||||
output: current,
|
||||
activations: activationsList,
|
||||
preActivations: preActivationsList,
|
||||
dropoutMasks: dropoutMasks.length > 0 ? dropoutMasks : undefined,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Backward pass with gradient computation
|
||||
*/
|
||||
private backward(
|
||||
target: number[],
|
||||
forwardResult: ReturnType<typeof this.forward>
|
||||
): { gradients: { dW: number[][]; db: number[] }[]; loss: number } {
|
||||
const { output, activations: acts, preActivations, dropoutMasks } = forwardResult;
|
||||
const gradients: { dW: number[][]; db: number[] }[] = [];
|
||||
|
||||
// Calculate loss (MSE for regression, BCE for classification)
|
||||
let loss = 0;
|
||||
const outputDelta: number[] = [];
|
||||
|
||||
for (let i = 0; i < output.length; i++) {
|
||||
const diff = output[i] - target[i];
|
||||
loss += diff * diff;
|
||||
|
||||
// Output gradient for sigmoid output
|
||||
if (this.config.outputActivation === 'sigmoid') {
|
||||
outputDelta.push(diff * output[i] * (1 - output[i]));
|
||||
} else {
|
||||
outputDelta.push(diff); // Linear or MSE gradient
|
||||
}
|
||||
}
|
||||
loss /= output.length;
|
||||
|
||||
// Backpropagate through layers
|
||||
let delta = outputDelta;
|
||||
|
||||
for (let layer = this.weights.length - 1; layer >= 0; layer--) {
|
||||
const { W } = this.weights[layer];
|
||||
const prevActivations = acts[layer];
|
||||
|
||||
// Gradient for weights: delta * prevActivations^T
|
||||
const dW: number[][] = [];
|
||||
for (let i = 0; i < prevActivations.length; i++) {
|
||||
const row: number[] = [];
|
||||
for (let j = 0; j < delta.length; j++) {
|
||||
let grad = delta[j] * prevActivations[i];
|
||||
|
||||
// L2 regularization
|
||||
if (this.config.regularization === 'l2' && i < W.length && j < W[i].length) {
|
||||
grad += this.config.regularizationStrength * W[i][j];
|
||||
}
|
||||
// L1 regularization
|
||||
if (this.config.regularization === 'l1' && i < W.length && j < W[i].length) {
|
||||
grad += this.config.regularizationStrength * Math.sign(W[i][j]);
|
||||
}
|
||||
|
||||
row.push(grad);
|
||||
}
|
||||
dW.push(row);
|
||||
}
|
||||
|
||||
// Gradient for biases
|
||||
const db = [...delta];
|
||||
|
||||
gradients.unshift({ dW, db });
|
||||
|
||||
// Propagate to previous layer
|
||||
if (layer > 0) {
|
||||
const newDelta: number[] = [];
|
||||
const preAct = preActivations[layer - 1];
|
||||
const derivFn = activations[`${this.config.activation}Derivative` as keyof typeof activations] as (x: number) => number;
|
||||
|
||||
for (let i = 0; i < W.length; i++) {
|
||||
let sum = 0;
|
||||
for (let j = 0; j < delta.length && j < W[i].length; j++) {
|
||||
sum += delta[j] * W[i][j];
|
||||
}
|
||||
const deriv = derivFn ? derivFn(preAct[i] || 0) : 1;
|
||||
let grad = sum * deriv;
|
||||
|
||||
// Apply dropout mask
|
||||
if (dropoutMasks && dropoutMasks[layer - 1]) {
|
||||
grad = dropoutMasks[layer - 1][i] ? grad / (1 - this.config.dropoutRate) : 0;
|
||||
}
|
||||
|
||||
newDelta.push(grad);
|
||||
}
|
||||
delta = newDelta;
|
||||
}
|
||||
}
|
||||
|
||||
return { gradients, loss };
|
||||
}
|
||||
|
||||
/**
|
||||
* Update weights using selected optimizer
|
||||
*/
|
||||
private updateWeights(gradients: { dW: number[][]; db: number[] }[]): number {
|
||||
let gradientNorm = 0;
|
||||
this.totalIterations++;
|
||||
|
||||
for (let layer = 0; layer < this.weights.length; layer++) {
|
||||
const { dW, db } = gradients[layer];
|
||||
const layerWeights = this.weights[layer];
|
||||
|
||||
if (this.config.optimizer === 'adam') {
|
||||
// Adam optimizer
|
||||
const t = this.totalIterations;
|
||||
const lr = this.config.learningRate *
|
||||
Math.sqrt(1 - Math.pow(this.adamBeta2, t)) /
|
||||
(1 - Math.pow(this.adamBeta1, t));
|
||||
|
||||
for (let i = 0; i < dW.length && i < layerWeights.W.length; i++) {
|
||||
for (let j = 0; j < dW[i].length && j < layerWeights.W[i].length; j++) {
|
||||
const g = dW[i][j];
|
||||
gradientNorm += g * g;
|
||||
|
||||
// Update momentum and velocity
|
||||
layerWeights.mW![i][j] = this.adamBeta1 * layerWeights.mW![i][j] + (1 - this.adamBeta1) * g;
|
||||
layerWeights.vW![i][j] = this.adamBeta2 * layerWeights.vW![i][j] + (1 - this.adamBeta2) * g * g;
|
||||
|
||||
// Update weight
|
||||
layerWeights.W[i][j] -= lr * layerWeights.mW![i][j] / (Math.sqrt(layerWeights.vW![i][j]) + this.adamEpsilon);
|
||||
}
|
||||
}
|
||||
|
||||
for (let j = 0; j < db.length && j < layerWeights.b.length; j++) {
|
||||
const g = db[j];
|
||||
gradientNorm += g * g;
|
||||
|
||||
layerWeights.mb![j] = this.adamBeta1 * layerWeights.mb![j] + (1 - this.adamBeta1) * g;
|
||||
layerWeights.vb![j] = this.adamBeta2 * layerWeights.vb![j] + (1 - this.adamBeta2) * g * g;
|
||||
|
||||
layerWeights.b[j] -= lr * layerWeights.mb![j] / (Math.sqrt(layerWeights.vb![j]) + this.adamEpsilon);
|
||||
}
|
||||
|
||||
} else if (this.config.optimizer === 'rmsprop') {
|
||||
// RMSprop optimizer
|
||||
const decay = 0.9;
|
||||
|
||||
for (let i = 0; i < dW.length && i < layerWeights.W.length; i++) {
|
||||
for (let j = 0; j < dW[i].length && j < layerWeights.W[i].length; j++) {
|
||||
const g = dW[i][j];
|
||||
gradientNorm += g * g;
|
||||
|
||||
layerWeights.vW![i][j] = decay * layerWeights.vW![i][j] + (1 - decay) * g * g;
|
||||
layerWeights.W[i][j] -= this.config.learningRate * g / (Math.sqrt(layerWeights.vW![i][j]) + 1e-8);
|
||||
}
|
||||
}
|
||||
|
||||
for (let j = 0; j < db.length && j < layerWeights.b.length; j++) {
|
||||
const g = db[j];
|
||||
gradientNorm += g * g;
|
||||
|
||||
layerWeights.vb![j] = decay * layerWeights.vb![j] + (1 - decay) * g * g;
|
||||
layerWeights.b[j] -= this.config.learningRate * g / (Math.sqrt(layerWeights.vb![j]) + 1e-8);
|
||||
}
|
||||
|
||||
} else if (this.config.optimizer === 'adagrad') {
|
||||
// Adagrad optimizer
|
||||
for (let i = 0; i < dW.length && i < layerWeights.W.length; i++) {
|
||||
for (let j = 0; j < dW[i].length && j < layerWeights.W[i].length; j++) {
|
||||
const g = dW[i][j];
|
||||
gradientNorm += g * g;
|
||||
|
||||
layerWeights.vW![i][j] += g * g;
|
||||
layerWeights.W[i][j] -= this.config.learningRate * g / (Math.sqrt(layerWeights.vW![i][j]) + 1e-8);
|
||||
}
|
||||
}
|
||||
|
||||
for (let j = 0; j < db.length && j < layerWeights.b.length; j++) {
|
||||
const g = db[j];
|
||||
gradientNorm += g * g;
|
||||
|
||||
layerWeights.vb![j] += g * g;
|
||||
layerWeights.b[j] -= this.config.learningRate * g / (Math.sqrt(layerWeights.vb![j]) + 1e-8);
|
||||
}
|
||||
|
||||
} else {
|
||||
// SGD optimizer
|
||||
for (let i = 0; i < dW.length && i < layerWeights.W.length; i++) {
|
||||
for (let j = 0; j < dW[i].length && j < layerWeights.W[i].length; j++) {
|
||||
const g = dW[i][j];
|
||||
gradientNorm += g * g;
|
||||
layerWeights.W[i][j] -= this.config.learningRate * g;
|
||||
}
|
||||
}
|
||||
|
||||
for (let j = 0; j < db.length && j < layerWeights.b.length; j++) {
|
||||
const g = db[j];
|
||||
gradientNorm += g * g;
|
||||
layerWeights.b[j] -= this.config.learningRate * g;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return Math.sqrt(gradientNorm);
|
||||
}
|
||||
|
||||
/**
|
||||
* Train on a single batch
|
||||
*/
|
||||
trainBatch(inputs: number[][], targets: number[][]): { loss: number; gradientNorm: number } {
|
||||
let totalLoss = 0;
|
||||
let totalGradientNorm = 0;
|
||||
|
||||
for (let i = 0; i < inputs.length; i++) {
|
||||
const forwardResult = this.forward(inputs[i], true);
|
||||
const { gradients, loss } = this.backward(targets[i], forwardResult);
|
||||
const gradientNorm = this.updateWeights(gradients);
|
||||
|
||||
totalLoss += loss;
|
||||
totalGradientNorm += gradientNorm;
|
||||
}
|
||||
|
||||
return {
|
||||
loss: totalLoss / inputs.length,
|
||||
gradientNorm: totalGradientNorm / inputs.length,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Train for one epoch over all data
|
||||
*/
|
||||
async trainEpoch(
|
||||
inputs: number[][],
|
||||
targets: number[][],
|
||||
validationInputs?: number[][],
|
||||
validationTargets?: number[][],
|
||||
onProgress?: (result: TrainingResult) => void
|
||||
): Promise<TrainingResult> {
|
||||
this.epoch++;
|
||||
let epochLoss = 0;
|
||||
let gradientNorm = 0;
|
||||
let correct = 0;
|
||||
|
||||
// Shuffle data
|
||||
const indices = Array.from({ length: inputs.length }, (_, i) => i);
|
||||
for (let i = indices.length - 1; i > 0; i--) {
|
||||
const j = Math.floor(Math.random() * (i + 1));
|
||||
[indices[i], indices[j]] = [indices[j], indices[i]];
|
||||
}
|
||||
|
||||
// Train in batches
|
||||
const batchSize = Math.min(this.config.batchSize, inputs.length);
|
||||
const numBatches = Math.ceil(inputs.length / batchSize);
|
||||
|
||||
for (let batch = 0; batch < numBatches; batch++) {
|
||||
const startIdx = batch * batchSize;
|
||||
const endIdx = Math.min(startIdx + batchSize, inputs.length);
|
||||
|
||||
const batchInputs: number[][] = [];
|
||||
const batchTargets: number[][] = [];
|
||||
|
||||
for (let i = startIdx; i < endIdx; i++) {
|
||||
batchInputs.push(inputs[indices[i]]);
|
||||
batchTargets.push(targets[indices[i]]);
|
||||
}
|
||||
|
||||
const result = this.trainBatch(batchInputs, batchTargets);
|
||||
epochLoss += result.loss * batchInputs.length;
|
||||
gradientNorm += result.gradientNorm;
|
||||
|
||||
// Yield to UI
|
||||
if (batch % 10 === 0) {
|
||||
await new Promise(resolve => setTimeout(resolve, 0));
|
||||
}
|
||||
}
|
||||
|
||||
epochLoss /= inputs.length;
|
||||
gradientNorm /= numBatches;
|
||||
|
||||
// Calculate training accuracy
|
||||
for (let i = 0; i < inputs.length; i++) {
|
||||
const { output } = this.forward(inputs[i], false);
|
||||
const predicted = output[0] > 0.5 ? 1 : 0;
|
||||
const actual = targets[i][0] > 0.5 ? 1 : 0;
|
||||
if (predicted === actual) correct++;
|
||||
}
|
||||
const accuracy = correct / inputs.length;
|
||||
|
||||
// Validation metrics
|
||||
let validationLoss: number | undefined;
|
||||
let validationAccuracy: number | undefined;
|
||||
|
||||
if (validationInputs && validationTargets) {
|
||||
let valLoss = 0;
|
||||
let valCorrect = 0;
|
||||
|
||||
for (let i = 0; i < validationInputs.length; i++) {
|
||||
const { output } = this.forward(validationInputs[i], false);
|
||||
const diff = output[0] - validationTargets[i][0];
|
||||
valLoss += diff * diff;
|
||||
|
||||
const predicted = output[0] > 0.5 ? 1 : 0;
|
||||
const actual = validationTargets[i][0] > 0.5 ? 1 : 0;
|
||||
if (predicted === actual) valCorrect++;
|
||||
}
|
||||
|
||||
validationLoss = valLoss / validationInputs.length;
|
||||
validationAccuracy = valCorrect / validationInputs.length;
|
||||
}
|
||||
|
||||
const result: TrainingResult = {
|
||||
epoch: this.epoch,
|
||||
loss: epochLoss,
|
||||
accuracy,
|
||||
validationLoss,
|
||||
validationAccuracy,
|
||||
learningRate: this.config.learningRate,
|
||||
gradientNorm,
|
||||
timestamp: Date.now(),
|
||||
};
|
||||
|
||||
this.trainingHistory.push(result);
|
||||
|
||||
if (onProgress) {
|
||||
onProgress(result);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Train for multiple epochs
|
||||
*/
|
||||
async train(
|
||||
inputs: number[][],
|
||||
targets: number[][],
|
||||
epochs: number,
|
||||
validationSplit: number = 0.2,
|
||||
onProgress?: (result: TrainingResult) => void,
|
||||
earlyStopPatience?: number
|
||||
): Promise<TrainingResult[]> {
|
||||
// Split data for validation
|
||||
const splitIdx = Math.floor(inputs.length * (1 - validationSplit));
|
||||
const trainInputs = inputs.slice(0, splitIdx);
|
||||
const trainTargets = targets.slice(0, splitIdx);
|
||||
const valInputs = inputs.slice(splitIdx);
|
||||
const valTargets = targets.slice(splitIdx);
|
||||
|
||||
let bestValLoss = Infinity;
|
||||
let patienceCounter = 0;
|
||||
|
||||
for (let e = 0; e < epochs; e++) {
|
||||
const result = await this.trainEpoch(
|
||||
trainInputs,
|
||||
trainTargets,
|
||||
valInputs.length > 0 ? valInputs : undefined,
|
||||
valTargets.length > 0 ? valTargets : undefined,
|
||||
onProgress
|
||||
);
|
||||
|
||||
// Early stopping check
|
||||
if (earlyStopPatience && result.validationLoss !== undefined) {
|
||||
if (result.validationLoss < bestValLoss) {
|
||||
bestValLoss = result.validationLoss;
|
||||
patienceCounter = 0;
|
||||
} else {
|
||||
patienceCounter++;
|
||||
if (patienceCounter >= earlyStopPatience) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return this.trainingHistory;
|
||||
}
|
||||
|
||||
/**
|
||||
* Predict output for input
|
||||
*/
|
||||
predict(input: number[]): number[] {
|
||||
return this.forward(input, false).output;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get embedding (hidden layer activations)
|
||||
*/
|
||||
getEmbedding(input: number[], layer: number = -1): number[] {
|
||||
const { activations } = this.forward(input, false);
|
||||
const targetLayer = layer < 0 ? activations.length + layer - 1 : layer;
|
||||
return activations[Math.max(0, Math.min(targetLayer, activations.length - 1))];
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current configuration
|
||||
*/
|
||||
getConfig(): NeuralConfig {
|
||||
return { ...this.config };
|
||||
}
|
||||
|
||||
/**
|
||||
* Update configuration (reinitializes weights if architecture changes)
|
||||
*/
|
||||
updateConfig(newConfig: Partial<NeuralConfig>): void {
|
||||
const architectureChanged =
|
||||
newConfig.inputSize !== this.config.inputSize ||
|
||||
newConfig.outputSize !== this.config.outputSize ||
|
||||
JSON.stringify(newConfig.hiddenLayers) !== JSON.stringify(this.config.hiddenLayers);
|
||||
|
||||
this.config = { ...this.config, ...newConfig };
|
||||
|
||||
if (architectureChanged) {
|
||||
this.initializeWeights();
|
||||
this.trainingHistory = [];
|
||||
this.epoch = 0;
|
||||
this.totalIterations = 0;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get training history
|
||||
*/
|
||||
getTrainingHistory(): TrainingResult[] {
|
||||
return [...this.trainingHistory];
|
||||
}
|
||||
|
||||
/**
|
||||
* Reset network (reinitialize weights)
|
||||
*/
|
||||
reset(): void {
|
||||
this.initializeWeights();
|
||||
this.trainingHistory = [];
|
||||
this.epoch = 0;
|
||||
this.totalIterations = 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get network state for serialization
|
||||
*/
|
||||
getState(): NeuralState {
|
||||
return {
|
||||
weights: this.weights.map(w => ({
|
||||
W: w.W.map(row => [...row]),
|
||||
b: [...w.b],
|
||||
mW: w.mW?.map(row => [...row]),
|
||||
vW: w.vW?.map(row => [...row]),
|
||||
mb: w.mb ? [...w.mb] : undefined,
|
||||
vb: w.vb ? [...w.vb] : undefined,
|
||||
})),
|
||||
config: { ...this.config },
|
||||
trainingHistory: [...this.trainingHistory],
|
||||
epoch: this.epoch,
|
||||
totalIterations: this.totalIterations,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Load network state from serialized data
|
||||
*/
|
||||
loadState(state: NeuralState): void {
|
||||
this.config = { ...state.config };
|
||||
this.weights = state.weights.map(w => ({
|
||||
W: w.W.map(row => [...row]),
|
||||
b: [...w.b],
|
||||
mW: w.mW?.map(row => [...row]) || w.W.map(row => row.map(() => 0)),
|
||||
vW: w.vW?.map(row => [...row]) || w.W.map(row => row.map(() => 0)),
|
||||
mb: w.mb ? [...w.mb] : w.b.map(() => 0),
|
||||
vb: w.vb ? [...w.vb] : w.b.map(() => 0),
|
||||
}));
|
||||
this.trainingHistory = [...state.trainingHistory];
|
||||
this.epoch = state.epoch;
|
||||
this.totalIterations = state.totalIterations;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get weight statistics for visualization
|
||||
*/
|
||||
getWeightStats(): {
|
||||
layerStats: Array<{
|
||||
layer: number;
|
||||
weightCount: number;
|
||||
mean: number;
|
||||
std: number;
|
||||
min: number;
|
||||
max: number;
|
||||
}>;
|
||||
totalParams: number;
|
||||
} {
|
||||
const layerStats = this.weights.map((layer, idx) => {
|
||||
const weights: number[] = [];
|
||||
layer.W.forEach(row => weights.push(...row));
|
||||
weights.push(...layer.b);
|
||||
|
||||
const mean = weights.reduce((a, b) => a + b, 0) / weights.length;
|
||||
const variance = weights.reduce((a, b) => a + (b - mean) ** 2, 0) / weights.length;
|
||||
|
||||
return {
|
||||
layer: idx,
|
||||
weightCount: weights.length,
|
||||
mean,
|
||||
std: Math.sqrt(variance),
|
||||
min: Math.min(...weights),
|
||||
max: Math.max(...weights),
|
||||
};
|
||||
});
|
||||
|
||||
return {
|
||||
layerStats,
|
||||
totalParams: layerStats.reduce((sum, s) => sum + s.weightCount, 0),
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
// Singleton instance
|
||||
let engineInstance: NeuralEngine | null = null;
|
||||
|
||||
export function getNeuralEngine(config?: Partial<NeuralConfig>): NeuralEngine {
|
||||
if (!engineInstance) {
|
||||
engineInstance = new NeuralEngine(config);
|
||||
} else if (config) {
|
||||
engineInstance.updateConfig(config);
|
||||
}
|
||||
return engineInstance;
|
||||
}
|
||||
|
||||
export function resetNeuralEngine(): void {
|
||||
engineInstance = null;
|
||||
}
|
||||
|
||||
export default NeuralEngine;
|
||||
15
vendor/ruvector/crates/rvlite/examples/dashboard/src/main.tsx
vendored
Normal file
15
vendor/ruvector/crates/rvlite/examples/dashboard/src/main.tsx
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
import React from 'react'
|
||||
import ReactDOM from 'react-dom/client'
|
||||
import { HeroUIProvider } from '@heroui/react'
|
||||
import App from './App.tsx'
|
||||
import './index.css'
|
||||
|
||||
ReactDOM.createRoot(document.getElementById('root')!).render(
|
||||
<React.StrictMode>
|
||||
<HeroUIProvider>
|
||||
<main className="dark text-foreground bg-background min-h-screen">
|
||||
<App />
|
||||
</main>
|
||||
</HeroUIProvider>
|
||||
</React.StrictMode>,
|
||||
)
|
||||
Reference in New Issue
Block a user