Merge commit 'd803bfe2b1fe7f5e219e50ac20d6801a0a58ac75' as 'vendor/ruvector'
This commit is contained in:
70
vendor/ruvector/npm/packages/agentic-synth/src/adapters/midstreamer.js
vendored
Normal file
70
vendor/ruvector/npm/packages/agentic-synth/src/adapters/midstreamer.js
vendored
Normal file
@@ -0,0 +1,70 @@
|
||||
/**
|
||||
* Midstreamer integration adapter
|
||||
*/
|
||||
|
||||
export class MidstreamerAdapter {
|
||||
constructor(options = {}) {
|
||||
this.endpoint = options.endpoint || 'http://localhost:8080';
|
||||
this.apiKey = options.apiKey || '';
|
||||
this.connected = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Connect to Midstreamer service
|
||||
*/
|
||||
async connect() {
|
||||
try {
|
||||
// Simulate connection
|
||||
await this._delay(100);
|
||||
this.connected = true;
|
||||
return true;
|
||||
} catch (error) {
|
||||
this.connected = false;
|
||||
throw new Error(`Failed to connect to Midstreamer: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Disconnect from service
|
||||
*/
|
||||
async disconnect() {
|
||||
this.connected = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Stream data to Midstreamer
|
||||
* @param {Array} data - Data to stream
|
||||
*/
|
||||
async stream(data) {
|
||||
if (!this.connected) {
|
||||
throw new Error('Not connected to Midstreamer');
|
||||
}
|
||||
|
||||
if (!Array.isArray(data)) {
|
||||
throw new Error('Data must be an array');
|
||||
}
|
||||
|
||||
// Simulate streaming
|
||||
const results = [];
|
||||
for (const item of data) {
|
||||
results.push({
|
||||
id: item.id,
|
||||
status: 'streamed',
|
||||
timestamp: Date.now()
|
||||
});
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Check connection status
|
||||
*/
|
||||
isConnected() {
|
||||
return this.connected;
|
||||
}
|
||||
|
||||
_delay(ms) {
|
||||
return new Promise(resolve => setTimeout(resolve, ms));
|
||||
}
|
||||
}
|
||||
80
vendor/ruvector/npm/packages/agentic-synth/src/adapters/robotics.js
vendored
Normal file
80
vendor/ruvector/npm/packages/agentic-synth/src/adapters/robotics.js
vendored
Normal file
@@ -0,0 +1,80 @@
|
||||
/**
|
||||
* Agentic Robotics integration adapter
|
||||
*/
|
||||
|
||||
export class RoboticsAdapter {
|
||||
constructor(options = {}) {
|
||||
this.endpoint = options.endpoint || 'http://localhost:9000';
|
||||
this.protocol = options.protocol || 'grpc';
|
||||
this.initialized = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize robotics adapter
|
||||
*/
|
||||
async initialize() {
|
||||
try {
|
||||
await this._delay(100);
|
||||
this.initialized = true;
|
||||
return true;
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to initialize robotics adapter: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Send command to robotics system
|
||||
* @param {Object} command - Command object
|
||||
*/
|
||||
async sendCommand(command) {
|
||||
if (!this.initialized) {
|
||||
throw new Error('Robotics adapter not initialized');
|
||||
}
|
||||
|
||||
if (!command || !command.type) {
|
||||
throw new Error('Invalid command: missing type');
|
||||
}
|
||||
|
||||
// Simulate command execution
|
||||
await this._delay(50);
|
||||
|
||||
return {
|
||||
commandId: this._generateId(),
|
||||
type: command.type,
|
||||
status: 'executed',
|
||||
result: command.payload || {},
|
||||
timestamp: Date.now()
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get system status
|
||||
*/
|
||||
async getStatus() {
|
||||
if (!this.initialized) {
|
||||
throw new Error('Robotics adapter not initialized');
|
||||
}
|
||||
|
||||
return {
|
||||
initialized: this.initialized,
|
||||
protocol: this.protocol,
|
||||
endpoint: this.endpoint,
|
||||
uptime: Date.now()
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Shutdown adapter
|
||||
*/
|
||||
async shutdown() {
|
||||
this.initialized = false;
|
||||
}
|
||||
|
||||
_generateId() {
|
||||
return Math.random().toString(36).substring(2, 15);
|
||||
}
|
||||
|
||||
_delay(ms) {
|
||||
return new Promise(resolve => setTimeout(resolve, ms));
|
||||
}
|
||||
}
|
||||
304
vendor/ruvector/npm/packages/agentic-synth/src/adapters/ruvector.js
vendored
Normal file
304
vendor/ruvector/npm/packages/agentic-synth/src/adapters/ruvector.js
vendored
Normal file
@@ -0,0 +1,304 @@
|
||||
/**
|
||||
* RuVector integration adapter
|
||||
* Uses native @ruvector/core NAPI-RS bindings when available,
|
||||
* falls back to in-memory simulation for environments without native support.
|
||||
*/
|
||||
|
||||
let ruvectorCore = null;
|
||||
|
||||
// Try to load native ruvector bindings
|
||||
async function loadRuvector() {
|
||||
if (ruvectorCore !== null) return ruvectorCore;
|
||||
|
||||
try {
|
||||
// Try @ruvector/core first (native NAPI-RS bindings)
|
||||
const core = await import('@ruvector/core');
|
||||
ruvectorCore = core;
|
||||
return core;
|
||||
} catch (e1) {
|
||||
try {
|
||||
// Fall back to ruvector package
|
||||
const ruvector = await import('ruvector');
|
||||
ruvectorCore = ruvector;
|
||||
return ruvector;
|
||||
} catch (e2) {
|
||||
// No ruvector available
|
||||
ruvectorCore = false;
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export class RuvectorAdapter {
|
||||
constructor(options = {}) {
|
||||
this.vectorDb = null;
|
||||
this.dimensions = options.dimensions || 128;
|
||||
this.initialized = false;
|
||||
this.useNative = false;
|
||||
this.nativeDb = null;
|
||||
this.collectionName = options.collection || 'agentic-synth';
|
||||
this.inMemory = options.inMemory !== false; // Default to in-memory for tests
|
||||
this.path = options.path || null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Initialize RuVector connection
|
||||
* Attempts to use native bindings, falls back to in-memory simulation
|
||||
*/
|
||||
async initialize() {
|
||||
try {
|
||||
const ruvector = await loadRuvector();
|
||||
|
||||
if (ruvector && ruvector.VectorDB) {
|
||||
// Use native RuVector NAPI-RS bindings
|
||||
// VectorDB constructor takes { dimensions: number, path?: string }
|
||||
const dbOptions = { dimensions: this.dimensions };
|
||||
if (!this.inMemory && this.path) {
|
||||
dbOptions.path = this.path;
|
||||
}
|
||||
this.nativeDb = new ruvector.VectorDB(dbOptions);
|
||||
this.useNative = true;
|
||||
this.initialized = true;
|
||||
console.log('[RuvectorAdapter] Using native NAPI-RS bindings (in-memory:', this.inMemory, ')');
|
||||
return true;
|
||||
}
|
||||
|
||||
// Fall back to in-memory simulation
|
||||
this.vectorDb = {
|
||||
vectors: new Map(),
|
||||
metadata: new Map(),
|
||||
config: { dimensions: this.dimensions }
|
||||
};
|
||||
this.useNative = false;
|
||||
this.initialized = true;
|
||||
console.log('[RuvectorAdapter] Using in-memory fallback (install @ruvector/core for native performance)');
|
||||
return true;
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to initialize RuVector: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Insert vectors into database
|
||||
* @param {Array} vectors - Array of {id, vector, metadata?} objects
|
||||
*/
|
||||
async insert(vectors) {
|
||||
if (!this.initialized) {
|
||||
throw new Error('RuVector adapter not initialized');
|
||||
}
|
||||
|
||||
if (!Array.isArray(vectors)) {
|
||||
throw new Error('Vectors must be an array');
|
||||
}
|
||||
|
||||
const results = [];
|
||||
|
||||
if (this.useNative && this.nativeDb) {
|
||||
// Use native RuVector insert
|
||||
for (const item of vectors) {
|
||||
if (!item.id || !item.vector) {
|
||||
throw new Error('Each vector must have id and vector fields');
|
||||
}
|
||||
|
||||
if (item.vector.length !== this.dimensions) {
|
||||
throw new Error(`Vector dimension mismatch: expected ${this.dimensions}, got ${item.vector.length}`);
|
||||
}
|
||||
|
||||
// Native insert - takes { id, vector, metadata? }
|
||||
const vectorArray = item.vector instanceof Float32Array
|
||||
? item.vector
|
||||
: new Float32Array(item.vector);
|
||||
|
||||
this.nativeDb.insert({
|
||||
id: item.id,
|
||||
vector: vectorArray,
|
||||
metadata: item.metadata
|
||||
});
|
||||
results.push({ id: item.id, status: 'inserted', native: true });
|
||||
}
|
||||
} else {
|
||||
// In-memory fallback
|
||||
for (const item of vectors) {
|
||||
if (!item.id || !item.vector) {
|
||||
throw new Error('Each vector must have id and vector fields');
|
||||
}
|
||||
|
||||
if (item.vector.length !== this.dimensions) {
|
||||
throw new Error(`Vector dimension mismatch: expected ${this.dimensions}, got ${item.vector.length}`);
|
||||
}
|
||||
|
||||
this.vectorDb.vectors.set(item.id, item.vector);
|
||||
if (item.metadata) {
|
||||
this.vectorDb.metadata.set(item.id, item.metadata);
|
||||
}
|
||||
results.push({ id: item.id, status: 'inserted', native: false });
|
||||
}
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Batch insert for better performance
|
||||
* @param {Array} vectors - Array of {id, vector, metadata?} objects
|
||||
*/
|
||||
async insertBatch(vectors) {
|
||||
if (!this.initialized) {
|
||||
throw new Error('RuVector adapter not initialized');
|
||||
}
|
||||
|
||||
if (this.useNative && this.nativeDb && this.nativeDb.insertBatch) {
|
||||
// Use native batch insert if available
|
||||
const ids = vectors.map(v => v.id);
|
||||
const embeddings = vectors.map(v =>
|
||||
v.vector instanceof Float32Array ? v.vector : new Float32Array(v.vector)
|
||||
);
|
||||
const metadataList = vectors.map(v => v.metadata || {});
|
||||
|
||||
this.nativeDb.insertBatch(ids, embeddings, metadataList);
|
||||
return vectors.map(v => ({ id: v.id, status: 'inserted', native: true }));
|
||||
}
|
||||
|
||||
// Fall back to sequential insert
|
||||
return this.insert(vectors);
|
||||
}
|
||||
|
||||
/**
|
||||
* Search for similar vectors
|
||||
* @param {Array|Float32Array} query - Query vector
|
||||
* @param {number} k - Number of results
|
||||
*/
|
||||
async search(query, k = 10) {
|
||||
if (!this.initialized) {
|
||||
throw new Error('RuVector adapter not initialized');
|
||||
}
|
||||
|
||||
const queryArray = Array.isArray(query) ? query : Array.from(query);
|
||||
|
||||
if (queryArray.length !== this.dimensions) {
|
||||
throw new Error(`Query dimension mismatch: expected ${this.dimensions}, got ${queryArray.length}`);
|
||||
}
|
||||
|
||||
if (this.useNative && this.nativeDb) {
|
||||
// Use native HNSW search - API: { vector, k }
|
||||
const queryFloat32 = query instanceof Float32Array ? query : new Float32Array(query);
|
||||
const results = await this.nativeDb.search({ vector: queryFloat32, k });
|
||||
return results.map(r => ({
|
||||
id: r.id,
|
||||
score: r.score || r.similarity || r.distance,
|
||||
metadata: r.metadata
|
||||
}));
|
||||
}
|
||||
|
||||
// In-memory cosine similarity search
|
||||
const results = [];
|
||||
for (const [id, vector] of this.vectorDb.vectors.entries()) {
|
||||
const similarity = this._cosineSimilarity(queryArray, vector);
|
||||
results.push({
|
||||
id,
|
||||
score: similarity,
|
||||
metadata: this.vectorDb.metadata.get(id)
|
||||
});
|
||||
}
|
||||
|
||||
results.sort((a, b) => b.score - a.score);
|
||||
return results.slice(0, k);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get vector by ID
|
||||
*/
|
||||
async get(id) {
|
||||
if (!this.initialized) {
|
||||
throw new Error('RuVector adapter not initialized');
|
||||
}
|
||||
|
||||
if (this.useNative && this.nativeDb && this.nativeDb.get) {
|
||||
const result = await this.nativeDb.get(id);
|
||||
return result ? { id: result.id, vector: result.vector, metadata: result.metadata } : null;
|
||||
}
|
||||
|
||||
const vector = this.vectorDb.vectors.get(id);
|
||||
const metadata = this.vectorDb.metadata.get(id);
|
||||
return vector ? { id, vector, metadata } : null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete vector by ID
|
||||
*/
|
||||
async delete(id) {
|
||||
if (!this.initialized) {
|
||||
throw new Error('RuVector adapter not initialized');
|
||||
}
|
||||
|
||||
if (this.useNative && this.nativeDb && this.nativeDb.delete) {
|
||||
return await this.nativeDb.delete(id);
|
||||
}
|
||||
|
||||
const existed = this.vectorDb.vectors.has(id);
|
||||
this.vectorDb.vectors.delete(id);
|
||||
this.vectorDb.metadata.delete(id);
|
||||
return existed;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get database statistics
|
||||
*/
|
||||
async stats() {
|
||||
if (!this.initialized) {
|
||||
throw new Error('RuVector adapter not initialized');
|
||||
}
|
||||
|
||||
if (this.useNative && this.nativeDb) {
|
||||
const count = await this.nativeDb.len();
|
||||
return {
|
||||
count,
|
||||
dimensions: this.dimensions,
|
||||
native: true
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
count: this.vectorDb.vectors.size,
|
||||
dimensions: this.dimensions,
|
||||
native: false
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if using native bindings
|
||||
*/
|
||||
isNative() {
|
||||
return this.useNative;
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate cosine similarity (fallback)
|
||||
* @private
|
||||
*/
|
||||
_cosineSimilarity(a, b) {
|
||||
let dotProduct = 0;
|
||||
let normA = 0;
|
||||
let normB = 0;
|
||||
|
||||
for (let i = 0; i < a.length; i++) {
|
||||
dotProduct += a[i] * b[i];
|
||||
normA += a[i] * a[i];
|
||||
normB += b[i] * b[i];
|
||||
}
|
||||
|
||||
const denominator = Math.sqrt(normA) * Math.sqrt(normB);
|
||||
return denominator === 0 ? 0 : dotProduct / denominator;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a RuVector adapter with automatic native detection
|
||||
*/
|
||||
export async function createRuvectorAdapter(options = {}) {
|
||||
const adapter = new RuvectorAdapter(options);
|
||||
await adapter.initialize();
|
||||
return adapter;
|
||||
}
|
||||
|
||||
export default RuvectorAdapter;
|
||||
78
vendor/ruvector/npm/packages/agentic-synth/src/api/client.js
vendored
Normal file
78
vendor/ruvector/npm/packages/agentic-synth/src/api/client.js
vendored
Normal file
@@ -0,0 +1,78 @@
|
||||
/**
|
||||
* API Client for external service integration
|
||||
*/
|
||||
|
||||
export class APIClient {
|
||||
constructor(options = {}) {
|
||||
this.baseUrl = options.baseUrl || 'https://api.example.com';
|
||||
this.apiKey = options.apiKey || '';
|
||||
this.timeout = options.timeout || 5000;
|
||||
this.retries = options.retries || 3;
|
||||
}
|
||||
|
||||
/**
|
||||
* Make API request
|
||||
* @param {string} endpoint - API endpoint
|
||||
* @param {Object} options - Request options
|
||||
*/
|
||||
async request(endpoint, options = {}) {
|
||||
const url = `${this.baseUrl}${endpoint}`;
|
||||
const headers = {
|
||||
'Content-Type': 'application/json',
|
||||
'Authorization': `Bearer ${this.apiKey}`,
|
||||
...options.headers
|
||||
};
|
||||
|
||||
let lastError;
|
||||
for (let i = 0; i < this.retries; i++) {
|
||||
try {
|
||||
const controller = new AbortController();
|
||||
const timeoutId = setTimeout(() => controller.abort(), this.timeout);
|
||||
|
||||
const response = await fetch(url, {
|
||||
...options,
|
||||
headers,
|
||||
signal: controller.signal
|
||||
});
|
||||
|
||||
clearTimeout(timeoutId);
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`API error: ${response.status} ${response.statusText}`);
|
||||
}
|
||||
|
||||
return await response.json();
|
||||
} catch (error) {
|
||||
lastError = error;
|
||||
if (i < this.retries - 1) {
|
||||
await this._delay(1000 * Math.pow(2, i));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
throw lastError;
|
||||
}
|
||||
|
||||
/**
|
||||
* GET request
|
||||
*/
|
||||
async get(endpoint, params = {}) {
|
||||
const queryString = new URLSearchParams(params).toString();
|
||||
const url = queryString ? `${endpoint}?${queryString}` : endpoint;
|
||||
return this.request(url, { method: 'GET' });
|
||||
}
|
||||
|
||||
/**
|
||||
* POST request
|
||||
*/
|
||||
async post(endpoint, data) {
|
||||
return this.request(endpoint, {
|
||||
method: 'POST',
|
||||
body: JSON.stringify(data)
|
||||
});
|
||||
}
|
||||
|
||||
_delay(ms) {
|
||||
return new Promise(resolve => setTimeout(resolve, ms));
|
||||
}
|
||||
}
|
||||
117
vendor/ruvector/npm/packages/agentic-synth/src/cache/context-cache.js
vendored
Normal file
117
vendor/ruvector/npm/packages/agentic-synth/src/cache/context-cache.js
vendored
Normal file
@@ -0,0 +1,117 @@
|
||||
/**
|
||||
* Context Cache for prompt and response caching
|
||||
*/
|
||||
|
||||
export class ContextCache {
|
||||
constructor(options = {}) {
|
||||
this.maxSize = options.maxSize || 100;
|
||||
this.ttl = options.ttl || 3600000; // 1 hour default
|
||||
this.cache = new Map();
|
||||
this.stats = {
|
||||
hits: 0,
|
||||
misses: 0,
|
||||
evictions: 0
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Get cached value
|
||||
* @param {string} key - Cache key
|
||||
* @returns {*} Cached value or null
|
||||
*/
|
||||
get(key) {
|
||||
const entry = this.cache.get(key);
|
||||
|
||||
if (!entry) {
|
||||
this.stats.misses++;
|
||||
return null;
|
||||
}
|
||||
|
||||
// Check TTL
|
||||
if (Date.now() - entry.timestamp > this.ttl) {
|
||||
this.cache.delete(key);
|
||||
this.stats.misses++;
|
||||
return null;
|
||||
}
|
||||
|
||||
this.stats.hits++;
|
||||
entry.accessCount++;
|
||||
entry.lastAccess = Date.now();
|
||||
return entry.value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Set cache value
|
||||
* @param {string} key - Cache key
|
||||
* @param {*} value - Value to cache
|
||||
*/
|
||||
set(key, value) {
|
||||
// Evict if at capacity
|
||||
if (this.cache.size >= this.maxSize && !this.cache.has(key)) {
|
||||
this._evictLRU();
|
||||
}
|
||||
|
||||
this.cache.set(key, {
|
||||
value,
|
||||
timestamp: Date.now(),
|
||||
lastAccess: Date.now(),
|
||||
accessCount: 0
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if key exists
|
||||
*/
|
||||
has(key) {
|
||||
const entry = this.cache.get(key);
|
||||
if (!entry) return false;
|
||||
|
||||
// Check TTL
|
||||
if (Date.now() - entry.timestamp > this.ttl) {
|
||||
this.cache.delete(key);
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear cache
|
||||
*/
|
||||
clear() {
|
||||
this.cache.clear();
|
||||
this.stats = { hits: 0, misses: 0, evictions: 0 };
|
||||
}
|
||||
|
||||
/**
|
||||
* Get cache statistics
|
||||
*/
|
||||
getStats() {
|
||||
return {
|
||||
...this.stats,
|
||||
size: this.cache.size,
|
||||
hitRate: this.stats.hits / (this.stats.hits + this.stats.misses) || 0
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Evict least recently used entry
|
||||
* @private
|
||||
*/
|
||||
_evictLRU() {
|
||||
let oldestKey = null;
|
||||
let oldestAccess = Infinity;
|
||||
|
||||
for (const [key, entry] of this.cache.entries()) {
|
||||
if (entry.lastAccess < oldestAccess) {
|
||||
oldestAccess = entry.lastAccess;
|
||||
oldestKey = key;
|
||||
}
|
||||
}
|
||||
|
||||
if (oldestKey) {
|
||||
this.cache.delete(oldestKey);
|
||||
this.stats.evictions++;
|
||||
}
|
||||
}
|
||||
}
|
||||
1
vendor/ruvector/npm/packages/agentic-synth/src/cache/index.d.ts.map
vendored
Normal file
1
vendor/ruvector/npm/packages/agentic-synth/src/cache/index.d.ts.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["index.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,OAAO,EAAE,aAAa,EAAE,UAAU,EAAE,MAAM,aAAa,CAAC;AAExD,MAAM,WAAW,UAAU,CAAC,CAAC,GAAG,OAAO;IACrC,GAAG,EAAE,MAAM,CAAC;IACZ,KAAK,EAAE,CAAC,CAAC;IACT,SAAS,EAAE,MAAM,CAAC;IAClB,GAAG,EAAE,MAAM,CAAC;IACZ,IAAI,EAAE,MAAM,CAAC;CACd;AAED,MAAM,WAAW,YAAY;IAC3B,QAAQ,EAAE,aAAa,CAAC;IACxB,GAAG,EAAE,MAAM,CAAC;IACZ,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,OAAO,CAAC,EAAE,CAAC,GAAG,EAAE,MAAM,EAAE,KAAK,EAAE,OAAO,KAAK,IAAI,CAAC;CACjD;AAED,8BAAsB,UAAU;IAC9B,QAAQ,CAAC,GAAG,CAAC,CAAC,EAAE,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,CAAC,GAAG,IAAI,CAAC;IAC/C,QAAQ,CAAC,GAAG,CAAC,CAAC,EAAE,GAAG,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,EAAE,GAAG,CAAC,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IACnE,QAAQ,CAAC,GAAG,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;IAC3C,QAAQ,CAAC,MAAM,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;IAC9C,QAAQ,CAAC,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;IAC/B,QAAQ,CAAC,IAAI,IAAI,OAAO,CAAC,MAAM,CAAC;CACjC;AAED;;GAEG;AACH,qBAAa,WAAY,SAAQ,UAAU;IACzC,OAAO,CAAC,KAAK,CAA0B;IACvC,OAAO,CAAC,OAAO,CAAS;IACxB,OAAO,CAAC,UAAU,CAAS;IAC3B,OAAO,CAAC,OAAO,CAAC,CAAwC;gBAE5C,OAAO,EAAE,IAAI,CAAC,YAAY,EAAE,UAAU,CAAC;IAQ7C,GAAG,CAAC,CAAC,EAAE,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,CAAC,GAAG,IAAI,CAAC;IAqBtC,GAAG,CAAC,CAAC,EAAE,GAAG,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,EAAE,GAAG,CAAC,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAiB1D,GAAG,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;IAKlC,MAAM,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;IAWrC,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;IAStB,IAAI,IAAI,OAAO,CAAC,MAAM,CAAC;YAIf,QAAQ;IAQtB;;OAEG;IACH,QAAQ;;;;;;;CAoBT;AAED;;GAEG;AACH,qBAAa,OAAQ,SAAQ,UAAU;IAC/B,GAAG,CAAC,CAAC,KAAK,OAAO,CAAC,CAAC,GAAG,IAAI,CAAC;IAI3B,GAAG,CAAC,CAAC,KAAK,OAAO,CAAC,IAAI,CAAC;IAIvB,GAAG,IAAI,OAAO,CAAC,OAAO,CAAC;IAIvB,MAAM,IAAI,OAAO,CAAC,OAAO,CAAC;IAI1B,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;IAItB,IAAI,IAAI,OAAO,CAAC,MAAM,CAAC;CAG9B;AAED;;GAEG;AACH,qBAAa,YAAY;IACvB,OAAO,CAAC,KAAK,CAAa;gBAEd,OAAO,EAAE,YAAY;IAkBjC;;OAEG;IACG,GAAG,CAAC,CAAC,EAAE,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,CAAC,GAAG,IAAI,CAAC;IAQ5C;;OAEG;IACG,GAAG,CAAC,CAAC,EAAE,GAAG,EAAE,MAAM,EAAE,KAAK,EAAE,CAAC,EAAE,GAAG,CAAC,EAAE,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC;IAQhE;;OAEG;IACG,GAAG,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;IAQxC;;OAEG;IACG,MAAM,CAAC,GAAG,EAAE,MAAM,GAAG,OAAO,CAAC,OAAO,CAAC;IAQ3C;;OAEG;IACG,KAAK,IAAI,OAAO,CAAC,IAAI,CAAC;IAQ5B;;OAEG;IACG,IAAI,IAAI,OAAO,CAAC,MAAM,CAAC;IAQ7B;;OAEG;IACH,MAAM,CAAC,WAAW,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,GAAG,MAAM;CAO5E;AAED,OAAO,EAAE,aAAa,EAAE,UAAU,EAAE,CAAC"}
|
||||
1
vendor/ruvector/npm/packages/agentic-synth/src/cache/index.js.map
vendored
Normal file
1
vendor/ruvector/npm/packages/agentic-synth/src/cache/index.js.map
vendored
Normal file
File diff suppressed because one or more lines are too long
279
vendor/ruvector/npm/packages/agentic-synth/src/cache/index.ts
vendored
Normal file
279
vendor/ruvector/npm/packages/agentic-synth/src/cache/index.ts
vendored
Normal file
@@ -0,0 +1,279 @@
|
||||
/**
|
||||
* Context caching system for performance optimization
|
||||
*/
|
||||
|
||||
import { CacheStrategy, CacheError } from '../types.js';
|
||||
|
||||
export interface CacheEntry<T = unknown> {
|
||||
key: string;
|
||||
value: T;
|
||||
timestamp: number;
|
||||
ttl: number;
|
||||
hits: number;
|
||||
}
|
||||
|
||||
export interface CacheOptions {
|
||||
strategy: CacheStrategy;
|
||||
ttl: number;
|
||||
maxSize?: number;
|
||||
onEvict?: (key: string, value: unknown) => void;
|
||||
}
|
||||
|
||||
export abstract class CacheStore {
|
||||
abstract get<T>(key: string): Promise<T | null>;
|
||||
abstract set<T>(key: string, value: T, ttl?: number): Promise<void>;
|
||||
abstract has(key: string): Promise<boolean>;
|
||||
abstract delete(key: string): Promise<boolean>;
|
||||
abstract clear(): Promise<void>;
|
||||
abstract size(): Promise<number>;
|
||||
}
|
||||
|
||||
/**
|
||||
* In-memory cache implementation with LRU eviction
|
||||
*/
|
||||
export class MemoryCache extends CacheStore {
|
||||
private cache: Map<string, CacheEntry>;
|
||||
private maxSize: number;
|
||||
private defaultTTL: number;
|
||||
private onEvict?: (key: string, value: unknown) => void;
|
||||
|
||||
constructor(options: Omit<CacheOptions, 'strategy'>) {
|
||||
super();
|
||||
this.cache = new Map();
|
||||
this.maxSize = options.maxSize || 1000;
|
||||
this.defaultTTL = options.ttl;
|
||||
this.onEvict = options.onEvict;
|
||||
}
|
||||
|
||||
async get<T>(key: string): Promise<T | null> {
|
||||
const entry = this.cache.get(key);
|
||||
|
||||
if (!entry) {
|
||||
return null;
|
||||
}
|
||||
|
||||
// Check if expired
|
||||
if (Date.now() - entry.timestamp > entry.ttl * 1000) {
|
||||
await this.delete(key);
|
||||
return null;
|
||||
}
|
||||
|
||||
// Update hits and move to end (LRU)
|
||||
entry.hits++;
|
||||
this.cache.delete(key);
|
||||
this.cache.set(key, entry);
|
||||
|
||||
return entry.value as T;
|
||||
}
|
||||
|
||||
async set<T>(key: string, value: T, ttl?: number): Promise<void> {
|
||||
// Evict if at max size
|
||||
if (this.cache.size >= this.maxSize && !this.cache.has(key)) {
|
||||
await this.evictLRU();
|
||||
}
|
||||
|
||||
const entry: CacheEntry<T> = {
|
||||
key,
|
||||
value,
|
||||
timestamp: Date.now(),
|
||||
ttl: ttl || this.defaultTTL,
|
||||
hits: 0
|
||||
};
|
||||
|
||||
this.cache.set(key, entry);
|
||||
}
|
||||
|
||||
async has(key: string): Promise<boolean> {
|
||||
const value = await this.get(key);
|
||||
return value !== null;
|
||||
}
|
||||
|
||||
async delete(key: string): Promise<boolean> {
|
||||
const entry = this.cache.get(key);
|
||||
const deleted = this.cache.delete(key);
|
||||
|
||||
if (deleted && entry && this.onEvict) {
|
||||
this.onEvict(key, entry.value);
|
||||
}
|
||||
|
||||
return deleted;
|
||||
}
|
||||
|
||||
async clear(): Promise<void> {
|
||||
if (this.onEvict) {
|
||||
for (const [key, entry] of this.cache.entries()) {
|
||||
this.onEvict(key, entry.value);
|
||||
}
|
||||
}
|
||||
this.cache.clear();
|
||||
}
|
||||
|
||||
async size(): Promise<number> {
|
||||
return this.cache.size;
|
||||
}
|
||||
|
||||
private async evictLRU(): Promise<void> {
|
||||
// First entry is least recently used
|
||||
const firstKey = this.cache.keys().next().value;
|
||||
if (firstKey) {
|
||||
await this.delete(firstKey);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get cache statistics
|
||||
*/
|
||||
getStats() {
|
||||
let totalHits = 0;
|
||||
let expiredCount = 0;
|
||||
const now = Date.now();
|
||||
|
||||
for (const entry of this.cache.values()) {
|
||||
totalHits += entry.hits;
|
||||
if (now - entry.timestamp > entry.ttl * 1000) {
|
||||
expiredCount++;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
size: this.cache.size,
|
||||
maxSize: this.maxSize,
|
||||
totalHits,
|
||||
expiredCount,
|
||||
hitRate: totalHits / (this.cache.size || 1)
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* No-op cache for disabled caching
|
||||
*/
|
||||
export class NoCache extends CacheStore {
|
||||
async get<T>(): Promise<T | null> {
|
||||
return null;
|
||||
}
|
||||
|
||||
async set<T>(): Promise<void> {
|
||||
// No-op
|
||||
}
|
||||
|
||||
async has(): Promise<boolean> {
|
||||
return false;
|
||||
}
|
||||
|
||||
async delete(): Promise<boolean> {
|
||||
return false;
|
||||
}
|
||||
|
||||
async clear(): Promise<void> {
|
||||
// No-op
|
||||
}
|
||||
|
||||
async size(): Promise<number> {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Cache manager factory
|
||||
*/
|
||||
export class CacheManager {
|
||||
private store: CacheStore;
|
||||
|
||||
constructor(options: CacheOptions) {
|
||||
switch (options.strategy) {
|
||||
case 'memory':
|
||||
this.store = new MemoryCache(options);
|
||||
break;
|
||||
case 'none':
|
||||
this.store = new NoCache();
|
||||
break;
|
||||
case 'disk':
|
||||
// TODO: Implement disk cache
|
||||
throw new CacheError('Disk cache not yet implemented', { strategy: 'disk' });
|
||||
default:
|
||||
throw new CacheError(`Unknown cache strategy: ${options.strategy}`, {
|
||||
strategy: options.strategy
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get value from cache
|
||||
*/
|
||||
async get<T>(key: string): Promise<T | null> {
|
||||
try {
|
||||
return await this.store.get<T>(key);
|
||||
} catch (error) {
|
||||
throw new CacheError('Failed to get cache value', { key, error });
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Set value in cache
|
||||
*/
|
||||
async set<T>(key: string, value: T, ttl?: number): Promise<void> {
|
||||
try {
|
||||
await this.store.set(key, value, ttl);
|
||||
} catch (error) {
|
||||
throw new CacheError('Failed to set cache value', { key, error });
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if key exists in cache
|
||||
*/
|
||||
async has(key: string): Promise<boolean> {
|
||||
try {
|
||||
return await this.store.has(key);
|
||||
} catch (error) {
|
||||
throw new CacheError('Failed to check cache key', { key, error });
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete key from cache
|
||||
*/
|
||||
async delete(key: string): Promise<boolean> {
|
||||
try {
|
||||
return await this.store.delete(key);
|
||||
} catch (error) {
|
||||
throw new CacheError('Failed to delete cache key', { key, error });
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Clear all cache entries
|
||||
*/
|
||||
async clear(): Promise<void> {
|
||||
try {
|
||||
await this.store.clear();
|
||||
} catch (error) {
|
||||
throw new CacheError('Failed to clear cache', { error });
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get cache size
|
||||
*/
|
||||
async size(): Promise<number> {
|
||||
try {
|
||||
return await this.store.size();
|
||||
} catch (error) {
|
||||
throw new CacheError('Failed to get cache size', { error });
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate cache key from parameters
|
||||
*/
|
||||
static generateKey(prefix: string, params: Record<string, unknown>): string {
|
||||
const sorted = Object.keys(params)
|
||||
.sort()
|
||||
.map(key => `${key}:${JSON.stringify(params[key])}`)
|
||||
.join('|');
|
||||
return `${prefix}:${sorted}`;
|
||||
}
|
||||
}
|
||||
|
||||
export { CacheStrategy, CacheError };
|
||||
139
vendor/ruvector/npm/packages/agentic-synth/src/config/config.js
vendored
Normal file
139
vendor/ruvector/npm/packages/agentic-synth/src/config/config.js
vendored
Normal file
@@ -0,0 +1,139 @@
|
||||
/**
|
||||
* Configuration management
|
||||
*/
|
||||
|
||||
import { readFileSync } from 'fs';
|
||||
import yaml from 'js-yaml';
|
||||
import { config as dotenvConfig } from 'dotenv';
|
||||
|
||||
export class Config {
|
||||
constructor(options = {}) {
|
||||
this.values = {};
|
||||
this.envPrefix = options.envPrefix || 'AGENTIC_SYNTH_';
|
||||
|
||||
if (options.loadEnv !== false) {
|
||||
dotenvConfig();
|
||||
}
|
||||
|
||||
if (options.configPath) {
|
||||
this.loadFromFile(options.configPath);
|
||||
}
|
||||
|
||||
this.values = {
|
||||
...this._getDefaults(),
|
||||
...this.values,
|
||||
...options
|
||||
};
|
||||
}
|
||||
|
||||
get(key, defaultValue = undefined) {
|
||||
const envKey = `${this.envPrefix}${key.toUpperCase().replace(/\./g, '_')}`;
|
||||
if (process.env[envKey]) {
|
||||
return this._parseValue(process.env[envKey]);
|
||||
}
|
||||
|
||||
const keys = key.split('.');
|
||||
let value = this.values;
|
||||
|
||||
for (const k of keys) {
|
||||
if (value && typeof value === 'object' && k in value) {
|
||||
value = value[k];
|
||||
} else {
|
||||
return defaultValue;
|
||||
}
|
||||
}
|
||||
|
||||
return value !== undefined ? value : defaultValue;
|
||||
}
|
||||
|
||||
set(key, value) {
|
||||
const keys = key.split('.');
|
||||
let target = this.values;
|
||||
|
||||
for (let i = 0; i < keys.length - 1; i++) {
|
||||
const k = keys[i];
|
||||
if (!(k in target) || typeof target[k] !== 'object') {
|
||||
target[k] = {};
|
||||
}
|
||||
target = target[k];
|
||||
}
|
||||
|
||||
target[keys[keys.length - 1]] = value;
|
||||
}
|
||||
|
||||
loadFromFile(path) {
|
||||
try {
|
||||
const content = readFileSync(path, 'utf8');
|
||||
|
||||
if (path.endsWith('.json')) {
|
||||
this.values = { ...this.values, ...JSON.parse(content) };
|
||||
} else if (path.endsWith('.yaml') || path.endsWith('.yml')) {
|
||||
this.values = { ...this.values, ...yaml.load(content) };
|
||||
} else {
|
||||
throw new Error('Unsupported config file format');
|
||||
}
|
||||
} catch (error) {
|
||||
throw new Error(`Failed to load config from ${path}: ${error.message}`);
|
||||
}
|
||||
}
|
||||
|
||||
validate(requiredKeys = []) {
|
||||
const missing = [];
|
||||
|
||||
for (const key of requiredKeys) {
|
||||
if (this.get(key) === undefined) {
|
||||
missing.push(key);
|
||||
}
|
||||
}
|
||||
|
||||
if (missing.length > 0) {
|
||||
throw new Error(`Missing required configuration: ${missing.join(', ')}`);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
getAll() {
|
||||
return { ...this.values };
|
||||
}
|
||||
|
||||
_getDefaults() {
|
||||
return {
|
||||
api: {
|
||||
baseUrl: 'https://api.example.com',
|
||||
timeout: 5000,
|
||||
retries: 3
|
||||
},
|
||||
cache: {
|
||||
maxSize: 100,
|
||||
ttl: 3600000
|
||||
},
|
||||
generator: {
|
||||
seed: Date.now(),
|
||||
format: 'json'
|
||||
},
|
||||
router: {
|
||||
strategy: 'round-robin'
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
_parseValue(value) {
|
||||
if (value.startsWith('{') || value.startsWith('[')) {
|
||||
try {
|
||||
return JSON.parse(value);
|
||||
} catch {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
||||
if (value === 'true') return true;
|
||||
if (value === 'false') return false;
|
||||
|
||||
if (!isNaN(value) && value.trim() !== '') {
|
||||
return Number(value);
|
||||
}
|
||||
|
||||
return value;
|
||||
}
|
||||
}
|
||||
63
vendor/ruvector/npm/packages/agentic-synth/src/generators/base.d.ts
vendored
Normal file
63
vendor/ruvector/npm/packages/agentic-synth/src/generators/base.d.ts
vendored
Normal file
@@ -0,0 +1,63 @@
|
||||
/**
|
||||
* Base generator class with API integration
|
||||
*/
|
||||
import { GoogleGenerativeAI } from '@google/generative-ai';
|
||||
import { SynthConfig, GeneratorOptions, GenerationResult, StreamCallback } from '../types.js';
|
||||
import { CacheManager } from '../cache/index.js';
|
||||
import { ModelRouter } from '../routing/index.js';
|
||||
export declare abstract class BaseGenerator<TOptions extends GeneratorOptions = GeneratorOptions> {
|
||||
protected config: SynthConfig;
|
||||
protected cache: CacheManager;
|
||||
protected router: ModelRouter;
|
||||
protected gemini?: GoogleGenerativeAI;
|
||||
constructor(config: SynthConfig);
|
||||
/**
|
||||
* Abstract method for generation logic
|
||||
*/
|
||||
protected abstract generatePrompt(options: TOptions): string;
|
||||
/**
|
||||
* Abstract method for result parsing
|
||||
*/
|
||||
protected abstract parseResult(response: string, options: TOptions): unknown[];
|
||||
/**
|
||||
* Generate synthetic data
|
||||
*/
|
||||
generate<T = unknown>(options: TOptions): Promise<GenerationResult<T>>;
|
||||
/**
|
||||
* Generate with streaming support
|
||||
*/
|
||||
generateStream<T = unknown>(options: TOptions, callback?: StreamCallback<T>): AsyncGenerator<T, void, unknown>;
|
||||
/**
|
||||
* Batch generation with parallel processing
|
||||
*/
|
||||
generateBatch<T = unknown>(batchOptions: TOptions[], concurrency?: number): Promise<GenerationResult<T>[]>;
|
||||
/**
|
||||
* Generate with specific model
|
||||
*/
|
||||
private generateWithModel;
|
||||
/**
|
||||
* Call Gemini API
|
||||
*/
|
||||
private callGemini;
|
||||
/**
|
||||
* Call OpenRouter API
|
||||
*/
|
||||
private callOpenRouter;
|
||||
/**
|
||||
* Validate generation options
|
||||
*/
|
||||
protected validateOptions(options: TOptions): void;
|
||||
/**
|
||||
* Try to parse items from streaming buffer
|
||||
*/
|
||||
protected tryParseStreamBuffer(buffer: string, options: TOptions): unknown[];
|
||||
/**
|
||||
* Format output based on options
|
||||
*/
|
||||
protected formatOutput(data: unknown[], format?: string): string | unknown[];
|
||||
/**
|
||||
* Convert data to CSV format
|
||||
*/
|
||||
private convertToCSV;
|
||||
}
|
||||
//# sourceMappingURL=base.d.ts.map
|
||||
1
vendor/ruvector/npm/packages/agentic-synth/src/generators/base.d.ts.map
vendored
Normal file
1
vendor/ruvector/npm/packages/agentic-synth/src/generators/base.d.ts.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"base.d.ts","sourceRoot":"","sources":["base.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,OAAO,EAAE,kBAAkB,EAAE,MAAM,uBAAuB,CAAC;AAC3D,OAAO,EACL,WAAW,EACX,gBAAgB,EAChB,gBAAgB,EAIhB,cAAc,EACf,MAAM,aAAa,CAAC;AACrB,OAAO,EAAE,YAAY,EAAE,MAAM,mBAAmB,CAAC;AACjD,OAAO,EAAE,WAAW,EAAE,MAAM,qBAAqB,CAAC;AAElD,8BAAsB,aAAa,CAAC,QAAQ,SAAS,gBAAgB,GAAG,gBAAgB;IACtF,SAAS,CAAC,MAAM,EAAE,WAAW,CAAC;IAC9B,SAAS,CAAC,KAAK,EAAE,YAAY,CAAC;IAC9B,SAAS,CAAC,MAAM,EAAE,WAAW,CAAC;IAC9B,SAAS,CAAC,MAAM,CAAC,EAAE,kBAAkB,CAAC;gBAE1B,MAAM,EAAE,WAAW;IA2C/B;;OAEG;IACH,SAAS,CAAC,QAAQ,CAAC,cAAc,CAAC,OAAO,EAAE,QAAQ,GAAG,MAAM;IAE5D;;OAEG;IACH,SAAS,CAAC,QAAQ,CAAC,WAAW,CAAC,QAAQ,EAAE,MAAM,EAAE,OAAO,EAAE,QAAQ,GAAG,OAAO,EAAE;IAE9E;;OAEG;IACG,QAAQ,CAAC,CAAC,GAAG,OAAO,EAAE,OAAO,EAAE,QAAQ,GAAG,OAAO,CAAC,gBAAgB,CAAC,CAAC,CAAC,CAAC;IAsD5E;;OAEG;IACI,cAAc,CAAC,CAAC,GAAG,OAAO,EAC/B,OAAO,EAAE,QAAQ,EACjB,QAAQ,CAAC,EAAE,cAAc,CAAC,CAAC,CAAC,GAC3B,cAAc,CAAC,CAAC,EAAE,IAAI,EAAE,OAAO,CAAC;IAwCnC;;OAEG;IACG,aAAa,CAAC,CAAC,GAAG,OAAO,EAC7B,YAAY,EAAE,QAAQ,EAAE,EACxB,WAAW,GAAE,MAAU,GACtB,OAAO,CAAC,gBAAgB,CAAC,CAAC,CAAC,EAAE,CAAC;IAcjC;;OAEG;YACW,iBAAiB;IA+B/B;;OAEG;YACW,UAAU;IAqBxB;;OAEG;YACW,cAAc;IAsC5B;;OAEG;IACH,SAAS,CAAC,eAAe,CAAC,OAAO,EAAE,QAAQ,GAAG,IAAI;IAUlD;;OAEG;IACH,SAAS,CAAC,oBAAoB,CAAC,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,QAAQ,GAAG,OAAO,EAAE;IAK5E;;OAEG;IACH,SAAS,CAAC,YAAY,CAAC,IAAI,EAAE,OAAO,EAAE,EAAE,MAAM,GAAE,MAAe,GAAG,MAAM,GAAG,OAAO,EAAE;IAYpF;;OAEG;IACH,OAAO,CAAC,YAAY;CAerB"}
|
||||
284
vendor/ruvector/npm/packages/agentic-synth/src/generators/base.js
vendored
Normal file
284
vendor/ruvector/npm/packages/agentic-synth/src/generators/base.js
vendored
Normal file
@@ -0,0 +1,284 @@
|
||||
"use strict";
|
||||
/**
|
||||
* Base generator class with API integration
|
||||
*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.BaseGenerator = void 0;
|
||||
const generative_ai_1 = require("@google/generative-ai");
|
||||
const types_js_1 = require("../types.js");
|
||||
const index_js_1 = require("../cache/index.js");
|
||||
const index_js_2 = require("../routing/index.js");
|
||||
class BaseGenerator {
|
||||
constructor(config) {
|
||||
this.config = config;
|
||||
// Initialize cache
|
||||
this.cache = new index_js_1.CacheManager({
|
||||
strategy: config.cacheStrategy || 'memory',
|
||||
ttl: config.cacheTTL || 3600,
|
||||
maxSize: 1000
|
||||
});
|
||||
// Initialize router with user configuration
|
||||
// Respect user's fallback preferences instead of hardcoding
|
||||
let fallbackChain = undefined;
|
||||
// Only use fallback if explicitly enabled (default: true)
|
||||
if (config.enableFallback !== false) {
|
||||
if (config.fallbackChain && config.fallbackChain.length > 0) {
|
||||
// Use user-provided fallback chain
|
||||
fallbackChain = config.fallbackChain;
|
||||
}
|
||||
else {
|
||||
// Use default fallback chain
|
||||
// The router will still respect the user's primary provider choice
|
||||
// Fallback only triggers if primary provider fails
|
||||
fallbackChain = config.provider === 'gemini' ? ['openrouter'] : ['gemini'];
|
||||
}
|
||||
}
|
||||
this.router = new index_js_2.ModelRouter({
|
||||
defaultProvider: config.provider,
|
||||
providerKeys: {
|
||||
gemini: config.apiKey || process.env.GEMINI_API_KEY,
|
||||
openrouter: process.env.OPENROUTER_API_KEY
|
||||
},
|
||||
fallbackChain
|
||||
});
|
||||
// Initialize Gemini if needed
|
||||
const geminiKey = config.apiKey || process.env.GEMINI_API_KEY;
|
||||
if (config.provider === 'gemini' && geminiKey) {
|
||||
this.gemini = new generative_ai_1.GoogleGenerativeAI(geminiKey);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Generate synthetic data
|
||||
*/
|
||||
async generate(options) {
|
||||
const startTime = Date.now();
|
||||
// Validate options
|
||||
this.validateOptions(options);
|
||||
// Check cache
|
||||
const cacheKey = index_js_1.CacheManager.generateKey('generate', {
|
||||
type: this.constructor.name,
|
||||
options
|
||||
});
|
||||
const cached = await this.cache.get(cacheKey);
|
||||
if (cached) {
|
||||
return {
|
||||
...cached,
|
||||
metadata: {
|
||||
...cached.metadata,
|
||||
cached: true
|
||||
}
|
||||
};
|
||||
}
|
||||
// Select model
|
||||
const route = this.router.selectModel({
|
||||
provider: this.config.provider,
|
||||
preferredModel: this.config.model,
|
||||
capabilities: ['text', 'json']
|
||||
});
|
||||
// Generate with retry logic
|
||||
let lastError = null;
|
||||
const fallbackChain = this.router.getFallbackChain(route);
|
||||
for (const fallbackRoute of fallbackChain) {
|
||||
try {
|
||||
const result = await this.generateWithModel(fallbackRoute, options, startTime);
|
||||
// Cache result
|
||||
await this.cache.set(cacheKey, result, this.config.cacheTTL);
|
||||
return result;
|
||||
}
|
||||
catch (error) {
|
||||
lastError = error;
|
||||
console.warn(`Failed with ${fallbackRoute.model}, trying fallback...`);
|
||||
}
|
||||
}
|
||||
throw new types_js_1.APIError(`All model attempts failed: ${lastError?.message}`, { lastError, fallbackChain });
|
||||
}
|
||||
/**
|
||||
* Generate with streaming support
|
||||
*/
|
||||
async *generateStream(options, callback) {
|
||||
if (!this.config.streaming) {
|
||||
throw new types_js_1.ValidationError('Streaming not enabled in configuration');
|
||||
}
|
||||
const prompt = this.generatePrompt(options);
|
||||
const route = this.router.selectModel({
|
||||
provider: this.config.provider,
|
||||
capabilities: ['streaming']
|
||||
});
|
||||
if (route.provider === 'gemini' && this.gemini) {
|
||||
const model = this.gemini.getGenerativeModel({ model: route.model });
|
||||
const result = await model.generateContentStream(prompt);
|
||||
let buffer = '';
|
||||
for await (const chunk of result.stream) {
|
||||
const text = chunk.text();
|
||||
buffer += text;
|
||||
// Try to parse complete items
|
||||
const items = this.tryParseStreamBuffer(buffer, options);
|
||||
for (const item of items) {
|
||||
if (callback) {
|
||||
await callback({ type: 'data', data: item });
|
||||
}
|
||||
yield item;
|
||||
}
|
||||
}
|
||||
}
|
||||
else {
|
||||
throw new types_js_1.APIError('Streaming not supported for this provider/model', {
|
||||
route
|
||||
});
|
||||
}
|
||||
if (callback) {
|
||||
await callback({ type: 'complete' });
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Batch generation with parallel processing
|
||||
*/
|
||||
async generateBatch(batchOptions, concurrency = 3) {
|
||||
const results = [];
|
||||
for (let i = 0; i < batchOptions.length; i += concurrency) {
|
||||
const batch = batchOptions.slice(i, i + concurrency);
|
||||
const batchResults = await Promise.all(batch.map(options => this.generate(options)));
|
||||
results.push(...batchResults);
|
||||
}
|
||||
return results;
|
||||
}
|
||||
/**
|
||||
* Generate with specific model
|
||||
*/
|
||||
async generateWithModel(route, options, startTime) {
|
||||
const prompt = this.generatePrompt(options);
|
||||
let response;
|
||||
if (route.provider === 'gemini' && this.gemini) {
|
||||
response = await this.callGemini(route.model, prompt);
|
||||
}
|
||||
else if (route.provider === 'openrouter') {
|
||||
response = await this.callOpenRouter(route.model, prompt);
|
||||
}
|
||||
else {
|
||||
throw new types_js_1.APIError(`Unsupported provider: ${route.provider}`, { route });
|
||||
}
|
||||
const data = this.parseResult(response, options);
|
||||
return {
|
||||
data,
|
||||
metadata: {
|
||||
count: data.length,
|
||||
generatedAt: new Date(),
|
||||
provider: route.provider,
|
||||
model: route.model,
|
||||
cached: false,
|
||||
duration: Date.now() - startTime
|
||||
}
|
||||
};
|
||||
}
|
||||
/**
|
||||
* Call Gemini API
|
||||
*/
|
||||
async callGemini(model, prompt) {
|
||||
if (!this.gemini) {
|
||||
throw new types_js_1.APIError('Gemini client not initialized', {
|
||||
provider: 'gemini'
|
||||
});
|
||||
}
|
||||
try {
|
||||
const genModel = this.gemini.getGenerativeModel({ model });
|
||||
const result = await genModel.generateContent(prompt);
|
||||
const response = result.response;
|
||||
return response.text();
|
||||
}
|
||||
catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||
throw new types_js_1.APIError(`Gemini API error: ${errorMessage}`, {
|
||||
model,
|
||||
error
|
||||
});
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Call OpenRouter API
|
||||
*/
|
||||
async callOpenRouter(model, prompt) {
|
||||
const apiKey = process.env.OPENROUTER_API_KEY;
|
||||
if (!apiKey) {
|
||||
throw new types_js_1.APIError('OpenRouter API key not configured', {
|
||||
provider: 'openrouter'
|
||||
});
|
||||
}
|
||||
try {
|
||||
const response = await fetch('https://openrouter.ai/api/v1/chat/completions', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Authorization': `Bearer ${apiKey}`,
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
body: JSON.stringify({
|
||||
model,
|
||||
messages: [{ role: 'user', content: prompt }]
|
||||
})
|
||||
});
|
||||
if (!response.ok) {
|
||||
throw new Error(`HTTP ${response.status}: ${response.statusText}`);
|
||||
}
|
||||
const data = await response.json();
|
||||
return data.choices?.[0]?.message?.content || '';
|
||||
}
|
||||
catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||
throw new types_js_1.APIError(`OpenRouter API error: ${errorMessage}`, {
|
||||
model,
|
||||
error
|
||||
});
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Validate generation options
|
||||
*/
|
||||
validateOptions(options) {
|
||||
if (options.count !== undefined && options.count < 1) {
|
||||
throw new types_js_1.ValidationError('Count must be at least 1', { options });
|
||||
}
|
||||
if (options.format && !['json', 'csv', 'array'].includes(options.format)) {
|
||||
throw new types_js_1.ValidationError('Invalid format', { options });
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Try to parse items from streaming buffer
|
||||
*/
|
||||
tryParseStreamBuffer(buffer, options) {
|
||||
// Override in subclasses for specific parsing logic
|
||||
return [];
|
||||
}
|
||||
/**
|
||||
* Format output based on options
|
||||
*/
|
||||
formatOutput(data, format = 'json') {
|
||||
switch (format) {
|
||||
case 'csv':
|
||||
return this.convertToCSV(data);
|
||||
case 'array':
|
||||
return data;
|
||||
case 'json':
|
||||
default:
|
||||
return JSON.stringify(data, null, 2);
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Convert data to CSV format
|
||||
*/
|
||||
convertToCSV(data) {
|
||||
if (data.length === 0)
|
||||
return '';
|
||||
const firstItem = data[0];
|
||||
if (typeof firstItem !== 'object' || firstItem === null)
|
||||
return '';
|
||||
const headers = Object.keys(firstItem);
|
||||
const rows = data.map(item => {
|
||||
if (typeof item !== 'object' || item === null)
|
||||
return '';
|
||||
const record = item;
|
||||
return headers.map(header => JSON.stringify(record[header] ?? '')).join(',');
|
||||
});
|
||||
return [headers.join(','), ...rows].join('\n');
|
||||
}
|
||||
}
|
||||
exports.BaseGenerator = BaseGenerator;
|
||||
//# sourceMappingURL=base.js.map
|
||||
1
vendor/ruvector/npm/packages/agentic-synth/src/generators/base.js.map
vendored
Normal file
1
vendor/ruvector/npm/packages/agentic-synth/src/generators/base.js.map
vendored
Normal file
File diff suppressed because one or more lines are too long
353
vendor/ruvector/npm/packages/agentic-synth/src/generators/base.ts
vendored
Normal file
353
vendor/ruvector/npm/packages/agentic-synth/src/generators/base.ts
vendored
Normal file
@@ -0,0 +1,353 @@
|
||||
/**
|
||||
* Base generator class with API integration
|
||||
*/
|
||||
|
||||
import { GoogleGenerativeAI } from '@google/generative-ai';
|
||||
import {
|
||||
SynthConfig,
|
||||
GeneratorOptions,
|
||||
GenerationResult,
|
||||
ModelProvider,
|
||||
APIError,
|
||||
ValidationError,
|
||||
StreamCallback
|
||||
} from '../types.js';
|
||||
import { CacheManager } from '../cache/index.js';
|
||||
import { ModelRouter } from '../routing/index.js';
|
||||
|
||||
export abstract class BaseGenerator<TOptions extends GeneratorOptions = GeneratorOptions> {
|
||||
protected config: SynthConfig;
|
||||
protected cache: CacheManager;
|
||||
protected router: ModelRouter;
|
||||
protected gemini?: GoogleGenerativeAI;
|
||||
|
||||
constructor(config: SynthConfig) {
|
||||
this.config = config;
|
||||
|
||||
// Initialize cache
|
||||
this.cache = new CacheManager({
|
||||
strategy: config.cacheStrategy || 'memory',
|
||||
ttl: config.cacheTTL || 3600,
|
||||
maxSize: 1000
|
||||
});
|
||||
|
||||
// Initialize router with user configuration
|
||||
// Respect user's fallback preferences instead of hardcoding
|
||||
let fallbackChain: ModelProvider[] | undefined = undefined;
|
||||
|
||||
// Only use fallback if explicitly enabled (default: true)
|
||||
if (config.enableFallback !== false) {
|
||||
if (config.fallbackChain && config.fallbackChain.length > 0) {
|
||||
// Use user-provided fallback chain
|
||||
fallbackChain = config.fallbackChain;
|
||||
} else {
|
||||
// Use default fallback chain
|
||||
// The router will still respect the user's primary provider choice
|
||||
// Fallback only triggers if primary provider fails
|
||||
fallbackChain = config.provider === 'gemini' ? ['openrouter'] : ['gemini'];
|
||||
}
|
||||
}
|
||||
|
||||
this.router = new ModelRouter({
|
||||
defaultProvider: config.provider,
|
||||
providerKeys: {
|
||||
gemini: config.apiKey || process.env.GEMINI_API_KEY,
|
||||
openrouter: process.env.OPENROUTER_API_KEY
|
||||
},
|
||||
fallbackChain
|
||||
});
|
||||
|
||||
// Initialize Gemini if needed
|
||||
const geminiKey = config.apiKey || process.env.GEMINI_API_KEY;
|
||||
if (config.provider === 'gemini' && geminiKey) {
|
||||
this.gemini = new GoogleGenerativeAI(geminiKey);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Abstract method for generation logic
|
||||
*/
|
||||
protected abstract generatePrompt(options: TOptions): string;
|
||||
|
||||
/**
|
||||
* Abstract method for result parsing
|
||||
*/
|
||||
protected abstract parseResult(response: string, options: TOptions): unknown[];
|
||||
|
||||
/**
|
||||
* Generate synthetic data
|
||||
*/
|
||||
async generate<T = unknown>(options: TOptions): Promise<GenerationResult<T>> {
|
||||
const startTime = Date.now();
|
||||
|
||||
// Validate options
|
||||
this.validateOptions(options);
|
||||
|
||||
// Check cache
|
||||
const cacheKey = CacheManager.generateKey('generate', {
|
||||
type: this.constructor.name,
|
||||
options
|
||||
});
|
||||
|
||||
const cached = await this.cache.get<GenerationResult<T>>(cacheKey);
|
||||
if (cached) {
|
||||
return {
|
||||
...cached,
|
||||
metadata: {
|
||||
...cached.metadata,
|
||||
cached: true
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
// Select model
|
||||
const route = this.router.selectModel({
|
||||
provider: this.config.provider,
|
||||
preferredModel: this.config.model,
|
||||
capabilities: ['text', 'json']
|
||||
});
|
||||
|
||||
// Generate with retry logic
|
||||
let lastError: Error | null = null;
|
||||
const fallbackChain = this.router.getFallbackChain(route);
|
||||
|
||||
for (const fallbackRoute of fallbackChain) {
|
||||
try {
|
||||
const result = await this.generateWithModel<T>(fallbackRoute, options, startTime);
|
||||
|
||||
// Cache result
|
||||
await this.cache.set(cacheKey, result, this.config.cacheTTL);
|
||||
|
||||
return result;
|
||||
} catch (error) {
|
||||
lastError = error as Error;
|
||||
console.warn(`Failed with ${fallbackRoute.model}, trying fallback...`);
|
||||
}
|
||||
}
|
||||
|
||||
throw new APIError(
|
||||
`All model attempts failed: ${lastError?.message}`,
|
||||
{ lastError, fallbackChain }
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate with streaming support
|
||||
*/
|
||||
async *generateStream<T = unknown>(
|
||||
options: TOptions,
|
||||
callback?: StreamCallback<T>
|
||||
): AsyncGenerator<T, void, unknown> {
|
||||
if (!this.config.streaming) {
|
||||
throw new ValidationError('Streaming not enabled in configuration');
|
||||
}
|
||||
|
||||
const prompt = this.generatePrompt(options);
|
||||
const route = this.router.selectModel({
|
||||
provider: this.config.provider,
|
||||
capabilities: ['streaming']
|
||||
});
|
||||
|
||||
if (route.provider === 'gemini' && this.gemini) {
|
||||
const model = this.gemini.getGenerativeModel({ model: route.model });
|
||||
const result = await model.generateContentStream(prompt);
|
||||
|
||||
let buffer = '';
|
||||
for await (const chunk of result.stream) {
|
||||
const text = chunk.text();
|
||||
buffer += text;
|
||||
|
||||
// Try to parse complete items
|
||||
const items = this.tryParseStreamBuffer(buffer, options);
|
||||
for (const item of items) {
|
||||
if (callback) {
|
||||
await callback({ type: 'data', data: item as T });
|
||||
}
|
||||
yield item as T;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
throw new APIError('Streaming not supported for this provider/model', {
|
||||
route
|
||||
});
|
||||
}
|
||||
|
||||
if (callback) {
|
||||
await callback({ type: 'complete' });
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Batch generation with parallel processing
|
||||
*/
|
||||
async generateBatch<T = unknown>(
|
||||
batchOptions: TOptions[],
|
||||
concurrency: number = 3
|
||||
): Promise<GenerationResult<T>[]> {
|
||||
const results: GenerationResult<T>[] = [];
|
||||
|
||||
for (let i = 0; i < batchOptions.length; i += concurrency) {
|
||||
const batch = batchOptions.slice(i, i + concurrency);
|
||||
const batchResults = await Promise.all(
|
||||
batch.map(options => this.generate<T>(options))
|
||||
);
|
||||
results.push(...batchResults);
|
||||
}
|
||||
|
||||
return results;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate with specific model
|
||||
*/
|
||||
private async generateWithModel<T>(
|
||||
route: ReturnType<ModelRouter['selectModel']>,
|
||||
options: TOptions,
|
||||
startTime: number
|
||||
): Promise<GenerationResult<T>> {
|
||||
const prompt = this.generatePrompt(options);
|
||||
|
||||
let response: string;
|
||||
if (route.provider === 'gemini' && this.gemini) {
|
||||
response = await this.callGemini(route.model, prompt);
|
||||
} else if (route.provider === 'openrouter') {
|
||||
response = await this.callOpenRouter(route.model, prompt);
|
||||
} else {
|
||||
throw new APIError(`Unsupported provider: ${route.provider}`, { route });
|
||||
}
|
||||
|
||||
const data = this.parseResult(response, options) as T[];
|
||||
|
||||
return {
|
||||
data,
|
||||
metadata: {
|
||||
count: data.length,
|
||||
generatedAt: new Date(),
|
||||
provider: route.provider,
|
||||
model: route.model,
|
||||
cached: false,
|
||||
duration: Date.now() - startTime
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Call Gemini API
|
||||
*/
|
||||
private async callGemini(model: string, prompt: string): Promise<string> {
|
||||
if (!this.gemini) {
|
||||
throw new APIError('Gemini client not initialized', {
|
||||
provider: 'gemini'
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
const genModel = this.gemini.getGenerativeModel({ model });
|
||||
const result = await genModel.generateContent(prompt);
|
||||
const response = result.response;
|
||||
return response.text();
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||
throw new APIError(`Gemini API error: ${errorMessage}`, {
|
||||
model,
|
||||
error
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Call OpenRouter API
|
||||
*/
|
||||
private async callOpenRouter(model: string, prompt: string): Promise<string> {
|
||||
const apiKey = process.env.OPENROUTER_API_KEY;
|
||||
if (!apiKey) {
|
||||
throw new APIError('OpenRouter API key not configured', {
|
||||
provider: 'openrouter'
|
||||
});
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await fetch('https://openrouter.ai/api/v1/chat/completions', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Authorization': `Bearer ${apiKey}`,
|
||||
'Content-Type': 'application/json'
|
||||
},
|
||||
body: JSON.stringify({
|
||||
model,
|
||||
messages: [{ role: 'user', content: prompt }]
|
||||
})
|
||||
});
|
||||
|
||||
if (!response.ok) {
|
||||
throw new Error(`HTTP ${response.status}: ${response.statusText}`);
|
||||
}
|
||||
|
||||
const data = await response.json() as {
|
||||
choices?: Array<{ message?: { content?: string } }>
|
||||
};
|
||||
return data.choices?.[0]?.message?.content || '';
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||
throw new APIError(`OpenRouter API error: ${errorMessage}`, {
|
||||
model,
|
||||
error
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate generation options
|
||||
*/
|
||||
protected validateOptions(options: TOptions): void {
|
||||
if (options.count !== undefined && options.count < 1) {
|
||||
throw new ValidationError('Count must be at least 1', { options });
|
||||
}
|
||||
|
||||
if (options.format && !['json', 'csv', 'array'].includes(options.format)) {
|
||||
throw new ValidationError('Invalid format', { options });
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Try to parse items from streaming buffer
|
||||
*/
|
||||
protected tryParseStreamBuffer(buffer: string, options: TOptions): unknown[] {
|
||||
// Override in subclasses for specific parsing logic
|
||||
return [];
|
||||
}
|
||||
|
||||
/**
|
||||
* Format output based on options
|
||||
*/
|
||||
protected formatOutput(data: unknown[], format: string = 'json'): string | unknown[] {
|
||||
switch (format) {
|
||||
case 'csv':
|
||||
return this.convertToCSV(data);
|
||||
case 'array':
|
||||
return data;
|
||||
case 'json':
|
||||
default:
|
||||
return JSON.stringify(data, null, 2);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert data to CSV format
|
||||
*/
|
||||
private convertToCSV(data: unknown[]): string {
|
||||
if (data.length === 0) return '';
|
||||
|
||||
const firstItem = data[0];
|
||||
if (typeof firstItem !== 'object' || firstItem === null) return '';
|
||||
|
||||
const headers = Object.keys(firstItem);
|
||||
const rows = data.map(item => {
|
||||
if (typeof item !== 'object' || item === null) return '';
|
||||
const record = item as Record<string, unknown>;
|
||||
return headers.map(header => JSON.stringify(record[header] ?? '')).join(',');
|
||||
});
|
||||
|
||||
return [headers.join(','), ...rows].join('\n');
|
||||
}
|
||||
}
|
||||
93
vendor/ruvector/npm/packages/agentic-synth/src/generators/data-generator.js
vendored
Normal file
93
vendor/ruvector/npm/packages/agentic-synth/src/generators/data-generator.js
vendored
Normal file
@@ -0,0 +1,93 @@
|
||||
/**
|
||||
* Data Generator for synthetic data creation
|
||||
*/
|
||||
|
||||
export class DataGenerator {
|
||||
constructor(options = {}) {
|
||||
this.seed = options.seed || Date.now();
|
||||
this.format = options.format || 'json';
|
||||
this.schema = options.schema || {};
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate synthetic data based on schema
|
||||
* @param {number} count - Number of records to generate
|
||||
* @returns {Array} Generated data
|
||||
*/
|
||||
generate(count = 1) {
|
||||
if (count < 1) {
|
||||
throw new Error('Count must be at least 1');
|
||||
}
|
||||
|
||||
const data = [];
|
||||
for (let i = 0; i < count; i++) {
|
||||
data.push(this._generateRecord(i));
|
||||
}
|
||||
return data;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a single record
|
||||
* @private
|
||||
*/
|
||||
_generateRecord(index) {
|
||||
const record = { id: index };
|
||||
|
||||
for (const [field, config] of Object.entries(this.schema)) {
|
||||
record[field] = this._generateField(config);
|
||||
}
|
||||
|
||||
return record;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate a field value based on type
|
||||
* @private
|
||||
*/
|
||||
_generateField(config) {
|
||||
const type = config.type || 'string';
|
||||
|
||||
switch (type) {
|
||||
case 'string':
|
||||
return this._randomString(config.length || 10);
|
||||
case 'number':
|
||||
return this._randomNumber(config.min || 0, config.max || 100);
|
||||
case 'boolean':
|
||||
return Math.random() > 0.5;
|
||||
case 'array':
|
||||
return this._randomArray(config.items || 5);
|
||||
case 'vector':
|
||||
return this._randomVector(config.dimensions || 128);
|
||||
default:
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
_randomString(length) {
|
||||
const chars = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789';
|
||||
let result = '';
|
||||
for (let i = 0; i < length; i++) {
|
||||
result += chars.charAt(Math.floor(Math.random() * chars.length));
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
_randomNumber(min, max) {
|
||||
return Math.floor(Math.random() * (max - min + 1)) + min;
|
||||
}
|
||||
|
||||
_randomArray(length) {
|
||||
return Array.from({ length }, (_, i) => i);
|
||||
}
|
||||
|
||||
_randomVector(dimensions) {
|
||||
return Array.from({ length: dimensions }, () => Math.random());
|
||||
}
|
||||
|
||||
/**
|
||||
* Set seed for reproducible generation
|
||||
*/
|
||||
setSeed(seed) {
|
||||
this.seed = seed;
|
||||
}
|
||||
}
|
||||
16
vendor/ruvector/npm/packages/agentic-synth/src/generators/events.d.ts
vendored
Normal file
16
vendor/ruvector/npm/packages/agentic-synth/src/generators/events.d.ts
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
/**
|
||||
* Event data generator
|
||||
*/
|
||||
import { BaseGenerator } from './base.js';
|
||||
import { EventOptions } from '../types.js';
|
||||
export declare class EventGenerator extends BaseGenerator<EventOptions> {
|
||||
protected generatePrompt(options: EventOptions): string;
|
||||
protected parseResult(response: string, options: EventOptions): unknown[];
|
||||
/**
|
||||
* Generate synthetic events with local computation
|
||||
*/
|
||||
generateLocal(options: EventOptions): Promise<Array<Record<string, unknown>>>;
|
||||
private generateTimestamps;
|
||||
private generateMetadata;
|
||||
}
|
||||
//# sourceMappingURL=events.d.ts.map
|
||||
1
vendor/ruvector/npm/packages/agentic-synth/src/generators/events.d.ts.map
vendored
Normal file
1
vendor/ruvector/npm/packages/agentic-synth/src/generators/events.d.ts.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"events.d.ts","sourceRoot":"","sources":["events.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,OAAO,EAAE,aAAa,EAAE,MAAM,WAAW,CAAC;AAC1C,OAAO,EAAE,YAAY,EAAmB,MAAM,aAAa,CAAC;AAE5D,qBAAa,cAAe,SAAQ,aAAa,CAAC,YAAY,CAAC;IAC7D,SAAS,CAAC,cAAc,CAAC,OAAO,EAAE,YAAY,GAAG,MAAM;IAyDvD,SAAS,CAAC,WAAW,CAAC,QAAQ,EAAE,MAAM,EAAE,OAAO,EAAE,YAAY,GAAG,OAAO,EAAE;IAuDzE;;OAEG;IACG,aAAa,CAAC,OAAO,EAAE,YAAY,GAAG,OAAO,CAAC,KAAK,CAAC,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC,CAAC;IAiDnF,OAAO,CAAC,kBAAkB;IA4C1B,OAAO,CAAC,gBAAgB;CA2BzB"}
|
||||
191
vendor/ruvector/npm/packages/agentic-synth/src/generators/events.js
vendored
Normal file
191
vendor/ruvector/npm/packages/agentic-synth/src/generators/events.js
vendored
Normal file
@@ -0,0 +1,191 @@
|
||||
"use strict";
|
||||
/**
|
||||
* Event data generator
|
||||
*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.EventGenerator = void 0;
|
||||
const base_js_1 = require("./base.js");
|
||||
const types_js_1 = require("../types.js");
|
||||
class EventGenerator extends base_js_1.BaseGenerator {
|
||||
generatePrompt(options) {
|
||||
const { count = 100, eventTypes = ['click', 'view', 'purchase'], distribution = 'uniform', timeRange, userCount = 50, schema, constraints } = options;
|
||||
const start = timeRange?.start || new Date(Date.now() - 24 * 60 * 60 * 1000);
|
||||
const end = timeRange?.end || new Date();
|
||||
let prompt = `Generate ${count} event log entries with the following specifications:
|
||||
|
||||
Event Configuration:
|
||||
- Event types: ${eventTypes.join(', ')}
|
||||
- Distribution: ${distribution}
|
||||
- Time range: ${start} to ${end}
|
||||
- Unique users: ${userCount}
|
||||
|
||||
`;
|
||||
if (schema) {
|
||||
prompt += `\nSchema:\n${JSON.stringify(schema, null, 2)}\n`;
|
||||
}
|
||||
if (constraints) {
|
||||
prompt += `\nConstraints:\n${JSON.stringify(constraints, null, 2)}\n`;
|
||||
}
|
||||
prompt += `
|
||||
Generate realistic event data where each event has:
|
||||
- eventId: unique identifier
|
||||
- eventType: one of the specified types
|
||||
- timestamp: ISO 8601 formatted date within the time range
|
||||
- userId: user identifier (1 to ${userCount})
|
||||
- metadata: relevant event-specific data
|
||||
|
||||
Distribution patterns:
|
||||
- uniform: events evenly distributed over time
|
||||
- poisson: random but clustered events (realistic web traffic)
|
||||
- normal: events concentrated around mean time
|
||||
|
||||
Ensure:
|
||||
1. Events are chronologically ordered
|
||||
2. Event types follow realistic usage patterns
|
||||
3. User behavior is consistent and realistic
|
||||
4. Metadata is relevant to event type
|
||||
5. Timestamps fall within the specified range
|
||||
|
||||
Return ONLY a JSON array of events, no additional text.`;
|
||||
return prompt;
|
||||
}
|
||||
parseResult(response, options) {
|
||||
try {
|
||||
// Extract JSON from response
|
||||
const jsonMatch = response.match(/\[[\s\S]*\]/);
|
||||
if (!jsonMatch) {
|
||||
throw new Error('No JSON array found in response');
|
||||
}
|
||||
const data = JSON.parse(jsonMatch[0]);
|
||||
if (!Array.isArray(data)) {
|
||||
throw new Error('Response is not an array');
|
||||
}
|
||||
// Validate event structure
|
||||
return data.map((event, index) => {
|
||||
if (typeof event !== 'object' || event === null) {
|
||||
throw new types_js_1.ValidationError(`Invalid event at index ${index}`, { event });
|
||||
}
|
||||
const record = event;
|
||||
if (!record.eventId) {
|
||||
record.eventId = `evt_${Date.now()}_${index}`;
|
||||
}
|
||||
if (!record.eventType) {
|
||||
throw new types_js_1.ValidationError(`Missing eventType at index ${index}`, { event });
|
||||
}
|
||||
if (!record.timestamp) {
|
||||
throw new types_js_1.ValidationError(`Missing timestamp at index ${index}`, { event });
|
||||
}
|
||||
if (!record.userId) {
|
||||
throw new types_js_1.ValidationError(`Missing userId at index ${index}`, { event });
|
||||
}
|
||||
return {
|
||||
eventId: record.eventId,
|
||||
eventType: record.eventType,
|
||||
timestamp: new Date(record.timestamp).toISOString(),
|
||||
userId: record.userId,
|
||||
metadata: record.metadata || {}
|
||||
};
|
||||
});
|
||||
}
|
||||
catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||
throw new types_js_1.ValidationError(`Failed to parse event data: ${errorMessage}`, {
|
||||
response: response.substring(0, 200),
|
||||
error
|
||||
});
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Generate synthetic events with local computation
|
||||
*/
|
||||
async generateLocal(options) {
|
||||
const { count = 100, eventTypes = ['click', 'view', 'purchase'], distribution = 'uniform', timeRange, userCount = 50 } = options;
|
||||
const start = timeRange?.start
|
||||
? new Date(timeRange.start).getTime()
|
||||
: Date.now() - 24 * 60 * 60 * 1000;
|
||||
const end = timeRange?.end ? new Date(timeRange.end).getTime() : Date.now();
|
||||
const events = [];
|
||||
const timestamps = this.generateTimestamps(count, start, end, distribution);
|
||||
for (let i = 0; i < count; i++) {
|
||||
const eventType = eventTypes[Math.floor(Math.random() * eventTypes.length)];
|
||||
const userId = `user_${Math.floor(Math.random() * userCount) + 1}`;
|
||||
const timestamp = timestamps[i];
|
||||
// Ensure we have valid values (strict mode checks)
|
||||
if (eventType === undefined || timestamp === undefined) {
|
||||
throw new types_js_1.ValidationError(`Failed to generate event at index ${i}`, { eventType, timestamp });
|
||||
}
|
||||
events.push({
|
||||
eventId: `evt_${Date.now()}_${i}`,
|
||||
eventType,
|
||||
timestamp: new Date(timestamp).toISOString(),
|
||||
userId,
|
||||
metadata: this.generateMetadata(eventType)
|
||||
});
|
||||
}
|
||||
// Sort by timestamp
|
||||
events.sort((a, b) => {
|
||||
const aTime = typeof a.timestamp === 'string' ? new Date(a.timestamp).getTime() : 0;
|
||||
const bTime = typeof b.timestamp === 'string' ? new Date(b.timestamp).getTime() : 0;
|
||||
return aTime - bTime;
|
||||
});
|
||||
return events;
|
||||
}
|
||||
generateTimestamps(count, start, end, distribution) {
|
||||
const timestamps = [];
|
||||
const range = end - start;
|
||||
switch (distribution) {
|
||||
case 'uniform':
|
||||
for (let i = 0; i < count; i++) {
|
||||
timestamps.push(start + Math.random() * range);
|
||||
}
|
||||
break;
|
||||
case 'poisson':
|
||||
// Exponential inter-arrival times
|
||||
let time = start;
|
||||
const lambda = count / range; // events per ms
|
||||
for (let i = 0; i < count && time < end; i++) {
|
||||
const interval = -Math.log(1 - Math.random()) / lambda;
|
||||
time += interval;
|
||||
timestamps.push(Math.min(time, end));
|
||||
}
|
||||
break;
|
||||
case 'normal':
|
||||
// Normal distribution around midpoint
|
||||
const mean = start + range / 2;
|
||||
const stdDev = range / 6; // 99.7% within range
|
||||
for (let i = 0; i < count; i++) {
|
||||
const u1 = Math.random();
|
||||
const u2 = Math.random();
|
||||
const z = Math.sqrt(-2 * Math.log(u1)) * Math.cos(2 * Math.PI * u2);
|
||||
const timestamp = mean + z * stdDev;
|
||||
timestamps.push(Math.max(start, Math.min(end, timestamp)));
|
||||
}
|
||||
break;
|
||||
}
|
||||
return timestamps.sort((a, b) => a - b);
|
||||
}
|
||||
generateMetadata(eventType) {
|
||||
const metadata = {};
|
||||
switch (eventType.toLowerCase()) {
|
||||
case 'click':
|
||||
metadata.element = ['button', 'link', 'image'][Math.floor(Math.random() * 3)];
|
||||
metadata.position = { x: Math.floor(Math.random() * 1920), y: Math.floor(Math.random() * 1080) };
|
||||
break;
|
||||
case 'view':
|
||||
metadata.page = `/page${Math.floor(Math.random() * 10)}`;
|
||||
metadata.duration = Math.floor(Math.random() * 300); // seconds
|
||||
break;
|
||||
case 'purchase':
|
||||
metadata.amount = Math.floor(Math.random() * 1000) / 10;
|
||||
metadata.currency = 'USD';
|
||||
metadata.items = Math.floor(Math.random() * 5) + 1;
|
||||
break;
|
||||
default:
|
||||
metadata.type = eventType;
|
||||
break;
|
||||
}
|
||||
return metadata;
|
||||
}
|
||||
}
|
||||
exports.EventGenerator = EventGenerator;
|
||||
//# sourceMappingURL=events.js.map
|
||||
1
vendor/ruvector/npm/packages/agentic-synth/src/generators/events.js.map
vendored
Normal file
1
vendor/ruvector/npm/packages/agentic-synth/src/generators/events.js.map
vendored
Normal file
File diff suppressed because one or more lines are too long
244
vendor/ruvector/npm/packages/agentic-synth/src/generators/events.ts
vendored
Normal file
244
vendor/ruvector/npm/packages/agentic-synth/src/generators/events.ts
vendored
Normal file
@@ -0,0 +1,244 @@
|
||||
/**
|
||||
* Event data generator
|
||||
*/
|
||||
|
||||
import { BaseGenerator } from './base.js';
|
||||
import { EventOptions, ValidationError } from '../types.js';
|
||||
|
||||
export class EventGenerator extends BaseGenerator<EventOptions> {
|
||||
protected generatePrompt(options: EventOptions): string {
|
||||
const {
|
||||
count = 100,
|
||||
eventTypes = ['click', 'view', 'purchase'],
|
||||
distribution = 'uniform',
|
||||
timeRange,
|
||||
userCount = 50,
|
||||
schema,
|
||||
constraints
|
||||
} = options;
|
||||
|
||||
const start = timeRange?.start || new Date(Date.now() - 24 * 60 * 60 * 1000);
|
||||
const end = timeRange?.end || new Date();
|
||||
|
||||
let prompt = `Generate ${count} event log entries with the following specifications:
|
||||
|
||||
Event Configuration:
|
||||
- Event types: ${eventTypes.join(', ')}
|
||||
- Distribution: ${distribution}
|
||||
- Time range: ${start} to ${end}
|
||||
- Unique users: ${userCount}
|
||||
|
||||
`;
|
||||
|
||||
if (schema) {
|
||||
prompt += `\nSchema:\n${JSON.stringify(schema, null, 2)}\n`;
|
||||
}
|
||||
|
||||
if (constraints) {
|
||||
prompt += `\nConstraints:\n${JSON.stringify(constraints, null, 2)}\n`;
|
||||
}
|
||||
|
||||
prompt += `
|
||||
Generate realistic event data where each event has:
|
||||
- eventId: unique identifier
|
||||
- eventType: one of the specified types
|
||||
- timestamp: ISO 8601 formatted date within the time range
|
||||
- userId: user identifier (1 to ${userCount})
|
||||
- metadata: relevant event-specific data
|
||||
|
||||
Distribution patterns:
|
||||
- uniform: events evenly distributed over time
|
||||
- poisson: random but clustered events (realistic web traffic)
|
||||
- normal: events concentrated around mean time
|
||||
|
||||
Ensure:
|
||||
1. Events are chronologically ordered
|
||||
2. Event types follow realistic usage patterns
|
||||
3. User behavior is consistent and realistic
|
||||
4. Metadata is relevant to event type
|
||||
5. Timestamps fall within the specified range
|
||||
|
||||
Return ONLY a JSON array of events, no additional text.`;
|
||||
|
||||
return prompt;
|
||||
}
|
||||
|
||||
protected parseResult(response: string, options: EventOptions): unknown[] {
|
||||
try {
|
||||
// Extract JSON from response
|
||||
const jsonMatch = response.match(/\[[\s\S]*\]/);
|
||||
if (!jsonMatch) {
|
||||
throw new Error('No JSON array found in response');
|
||||
}
|
||||
|
||||
const data = JSON.parse(jsonMatch[0]);
|
||||
|
||||
if (!Array.isArray(data)) {
|
||||
throw new Error('Response is not an array');
|
||||
}
|
||||
|
||||
// Validate event structure
|
||||
return data.map((event, index) => {
|
||||
if (typeof event !== 'object' || event === null) {
|
||||
throw new ValidationError(`Invalid event at index ${index}`, { event });
|
||||
}
|
||||
|
||||
const record = event as Record<string, unknown>;
|
||||
|
||||
if (!record.eventId) {
|
||||
record.eventId = `evt_${Date.now()}_${index}`;
|
||||
}
|
||||
|
||||
if (!record.eventType) {
|
||||
throw new ValidationError(`Missing eventType at index ${index}`, { event });
|
||||
}
|
||||
|
||||
if (!record.timestamp) {
|
||||
throw new ValidationError(`Missing timestamp at index ${index}`, { event });
|
||||
}
|
||||
|
||||
if (!record.userId) {
|
||||
throw new ValidationError(`Missing userId at index ${index}`, { event });
|
||||
}
|
||||
|
||||
return {
|
||||
eventId: record.eventId as string,
|
||||
eventType: record.eventType as string,
|
||||
timestamp: new Date(record.timestamp as string | number | Date).toISOString(),
|
||||
userId: record.userId as string,
|
||||
metadata: (record.metadata as Record<string, unknown>) || {}
|
||||
};
|
||||
});
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||
throw new ValidationError(`Failed to parse event data: ${errorMessage}`, {
|
||||
response: response.substring(0, 200),
|
||||
error
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate synthetic events with local computation
|
||||
*/
|
||||
async generateLocal(options: EventOptions): Promise<Array<Record<string, unknown>>> {
|
||||
const {
|
||||
count = 100,
|
||||
eventTypes = ['click', 'view', 'purchase'],
|
||||
distribution = 'uniform',
|
||||
timeRange,
|
||||
userCount = 50
|
||||
} = options;
|
||||
|
||||
const start = timeRange?.start
|
||||
? new Date(timeRange.start).getTime()
|
||||
: Date.now() - 24 * 60 * 60 * 1000;
|
||||
const end = timeRange?.end ? new Date(timeRange.end).getTime() : Date.now();
|
||||
|
||||
const events: Array<Record<string, unknown>> = [];
|
||||
const timestamps = this.generateTimestamps(count, start, end, distribution);
|
||||
|
||||
for (let i = 0; i < count; i++) {
|
||||
const eventType = eventTypes[Math.floor(Math.random() * eventTypes.length)];
|
||||
const userId = `user_${Math.floor(Math.random() * userCount) + 1}`;
|
||||
const timestamp = timestamps[i];
|
||||
|
||||
// Ensure we have valid values (strict mode checks)
|
||||
if (eventType === undefined || timestamp === undefined) {
|
||||
throw new ValidationError(
|
||||
`Failed to generate event at index ${i}`,
|
||||
{ eventType, timestamp }
|
||||
);
|
||||
}
|
||||
|
||||
events.push({
|
||||
eventId: `evt_${Date.now()}_${i}`,
|
||||
eventType,
|
||||
timestamp: new Date(timestamp).toISOString(),
|
||||
userId,
|
||||
metadata: this.generateMetadata(eventType)
|
||||
});
|
||||
}
|
||||
|
||||
// Sort by timestamp
|
||||
events.sort((a, b) => {
|
||||
const aTime = typeof a.timestamp === 'string' ? new Date(a.timestamp).getTime() : 0;
|
||||
const bTime = typeof b.timestamp === 'string' ? new Date(b.timestamp).getTime() : 0;
|
||||
return aTime - bTime;
|
||||
});
|
||||
|
||||
return events;
|
||||
}
|
||||
|
||||
private generateTimestamps(
|
||||
count: number,
|
||||
start: number,
|
||||
end: number,
|
||||
distribution: 'uniform' | 'poisson' | 'normal'
|
||||
): number[] {
|
||||
const timestamps: number[] = [];
|
||||
const range = end - start;
|
||||
|
||||
switch (distribution) {
|
||||
case 'uniform':
|
||||
for (let i = 0; i < count; i++) {
|
||||
timestamps.push(start + Math.random() * range);
|
||||
}
|
||||
break;
|
||||
|
||||
case 'poisson':
|
||||
// Exponential inter-arrival times
|
||||
let time = start;
|
||||
const lambda = count / range; // events per ms
|
||||
for (let i = 0; i < count && time < end; i++) {
|
||||
const interval = -Math.log(1 - Math.random()) / lambda;
|
||||
time += interval;
|
||||
timestamps.push(Math.min(time, end));
|
||||
}
|
||||
break;
|
||||
|
||||
case 'normal':
|
||||
// Normal distribution around midpoint
|
||||
const mean = start + range / 2;
|
||||
const stdDev = range / 6; // 99.7% within range
|
||||
for (let i = 0; i < count; i++) {
|
||||
const u1 = Math.random();
|
||||
const u2 = Math.random();
|
||||
const z = Math.sqrt(-2 * Math.log(u1)) * Math.cos(2 * Math.PI * u2);
|
||||
const timestamp = mean + z * stdDev;
|
||||
timestamps.push(Math.max(start, Math.min(end, timestamp)));
|
||||
}
|
||||
break;
|
||||
}
|
||||
|
||||
return timestamps.sort((a, b) => a - b);
|
||||
}
|
||||
|
||||
private generateMetadata(eventType: string): Record<string, unknown> {
|
||||
const metadata: Record<string, unknown> = {};
|
||||
|
||||
switch (eventType.toLowerCase()) {
|
||||
case 'click':
|
||||
metadata.element = ['button', 'link', 'image'][Math.floor(Math.random() * 3)];
|
||||
metadata.position = { x: Math.floor(Math.random() * 1920), y: Math.floor(Math.random() * 1080) };
|
||||
break;
|
||||
|
||||
case 'view':
|
||||
metadata.page = `/page${Math.floor(Math.random() * 10)}`;
|
||||
metadata.duration = Math.floor(Math.random() * 300); // seconds
|
||||
break;
|
||||
|
||||
case 'purchase':
|
||||
metadata.amount = Math.floor(Math.random() * 1000) / 10;
|
||||
metadata.currency = 'USD';
|
||||
metadata.items = Math.floor(Math.random() * 5) + 1;
|
||||
break;
|
||||
|
||||
default:
|
||||
metadata.type = eventType;
|
||||
break;
|
||||
}
|
||||
|
||||
return metadata;
|
||||
}
|
||||
}
|
||||
1
vendor/ruvector/npm/packages/agentic-synth/src/generators/index.d.ts.map
vendored
Normal file
1
vendor/ruvector/npm/packages/agentic-synth/src/generators/index.d.ts.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["index.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,OAAO,EAAE,aAAa,EAAE,MAAM,WAAW,CAAC;AAC1C,OAAO,EAAE,mBAAmB,EAAE,MAAM,iBAAiB,CAAC;AACtD,OAAO,EAAE,cAAc,EAAE,MAAM,aAAa,CAAC;AAC7C,OAAO,EAAE,mBAAmB,EAAE,MAAM,iBAAiB,CAAC;AAEtD,YAAY,EACV,gBAAgB,EAChB,iBAAiB,EACjB,YAAY,EACb,MAAM,aAAa,CAAC"}
|
||||
1
vendor/ruvector/npm/packages/agentic-synth/src/generators/index.js.map
vendored
Normal file
1
vendor/ruvector/npm/packages/agentic-synth/src/generators/index.js.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"index.js","sourceRoot":"","sources":["index.ts"],"names":[],"mappings":";AAAA;;GAEG;;;AAEH,qCAA0C;AAAjC,wGAAA,aAAa,OAAA;AACtB,iDAAsD;AAA7C,oHAAA,mBAAmB,OAAA;AAC5B,yCAA6C;AAApC,2GAAA,cAAc,OAAA;AACvB,iDAAsD;AAA7C,oHAAA,mBAAmB,OAAA"}
|
||||
14
vendor/ruvector/npm/packages/agentic-synth/src/generators/index.ts
vendored
Normal file
14
vendor/ruvector/npm/packages/agentic-synth/src/generators/index.ts
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
/**
|
||||
* Generator exports
|
||||
*/
|
||||
|
||||
export { BaseGenerator } from './base.js';
|
||||
export { TimeSeriesGenerator } from './timeseries.js';
|
||||
export { EventGenerator } from './events.js';
|
||||
export { StructuredGenerator } from './structured.js';
|
||||
|
||||
export type {
|
||||
GeneratorOptions,
|
||||
TimeSeriesOptions,
|
||||
EventOptions
|
||||
} from '../types.js';
|
||||
20
vendor/ruvector/npm/packages/agentic-synth/src/generators/structured.d.ts
vendored
Normal file
20
vendor/ruvector/npm/packages/agentic-synth/src/generators/structured.d.ts
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
/**
|
||||
* Structured data generator
|
||||
*/
|
||||
import { BaseGenerator } from './base.js';
|
||||
import { GeneratorOptions } from '../types.js';
|
||||
export declare class StructuredGenerator extends BaseGenerator<GeneratorOptions> {
|
||||
protected generatePrompt(options: GeneratorOptions): string;
|
||||
protected parseResult(response: string, options: GeneratorOptions): unknown[];
|
||||
private validateAgainstSchema;
|
||||
/**
|
||||
* Generate structured data with specific domain
|
||||
*/
|
||||
generateDomain(domain: string, options: GeneratorOptions): Promise<unknown[]>;
|
||||
/**
|
||||
* Generate data from JSON schema
|
||||
*/
|
||||
generateFromJSONSchema(jsonSchema: Record<string, unknown>, options: GeneratorOptions): Promise<unknown[]>;
|
||||
private convertJSONSchema;
|
||||
}
|
||||
//# sourceMappingURL=structured.d.ts.map
|
||||
1
vendor/ruvector/npm/packages/agentic-synth/src/generators/structured.d.ts.map
vendored
Normal file
1
vendor/ruvector/npm/packages/agentic-synth/src/generators/structured.d.ts.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"structured.d.ts","sourceRoot":"","sources":["structured.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,OAAO,EAAE,aAAa,EAAE,MAAM,WAAW,CAAC;AAC1C,OAAO,EAAE,gBAAgB,EAA4C,MAAM,aAAa,CAAC;AAEzF,qBAAa,mBAAoB,SAAQ,aAAa,CAAC,gBAAgB,CAAC;IACtE,SAAS,CAAC,cAAc,CAAC,OAAO,EAAE,gBAAgB,GAAG,MAAM;IAiC3D,SAAS,CAAC,WAAW,CAAC,QAAQ,EAAE,MAAM,EAAE,OAAO,EAAE,gBAAgB,GAAG,OAAO,EAAE;IAgC7E,OAAO,CAAC,qBAAqB;IAgD7B;;OAEG;IACG,cAAc,CAAC,MAAM,EAAE,MAAM,EAAE,OAAO,EAAE,gBAAgB,GAAG,OAAO,CAAC,OAAO,EAAE,CAAC;IAyCnF;;OAEG;IACG,sBAAsB,CAAC,UAAU,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,EAAE,OAAO,EAAE,gBAAgB,GAAG,OAAO,CAAC,OAAO,EAAE,CAAC;IAUhH,OAAO,CAAC,iBAAiB;CAwB1B"}
|
||||
172
vendor/ruvector/npm/packages/agentic-synth/src/generators/structured.js
vendored
Normal file
172
vendor/ruvector/npm/packages/agentic-synth/src/generators/structured.js
vendored
Normal file
@@ -0,0 +1,172 @@
|
||||
"use strict";
|
||||
/**
|
||||
* Structured data generator
|
||||
*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.StructuredGenerator = void 0;
|
||||
const base_js_1 = require("./base.js");
|
||||
const types_js_1 = require("../types.js");
|
||||
class StructuredGenerator extends base_js_1.BaseGenerator {
|
||||
generatePrompt(options) {
|
||||
const { count = 10, schema, constraints, format = 'json' } = options;
|
||||
if (!schema) {
|
||||
throw new types_js_1.ValidationError('Schema is required for structured data generation', {
|
||||
options
|
||||
});
|
||||
}
|
||||
let prompt = `Generate ${count} realistic data records matching the following schema:
|
||||
|
||||
Schema:
|
||||
${JSON.stringify(schema, null, 2)}
|
||||
|
||||
`;
|
||||
if (constraints) {
|
||||
prompt += `\nConstraints:\n${JSON.stringify(constraints, null, 2)}\n`;
|
||||
}
|
||||
prompt += `
|
||||
Requirements:
|
||||
1. Generate realistic, diverse data that fits the schema
|
||||
2. Ensure all required fields are present
|
||||
3. Follow data type constraints strictly
|
||||
4. Make data internally consistent and realistic
|
||||
5. Include varied but plausible values
|
||||
|
||||
Return ONLY a JSON array of ${count} objects, no additional text.`;
|
||||
return prompt;
|
||||
}
|
||||
parseResult(response, options) {
|
||||
try {
|
||||
// Extract JSON from response
|
||||
const jsonMatch = response.match(/\[[\s\S]*\]/);
|
||||
if (!jsonMatch) {
|
||||
throw new Error('No JSON array found in response');
|
||||
}
|
||||
const data = JSON.parse(jsonMatch[0]);
|
||||
if (!Array.isArray(data)) {
|
||||
throw new Error('Response is not an array');
|
||||
}
|
||||
// Validate against schema if provided
|
||||
if (options.schema) {
|
||||
return data.map((item, index) => {
|
||||
this.validateAgainstSchema(item, options.schema, index);
|
||||
return item;
|
||||
});
|
||||
}
|
||||
return data;
|
||||
}
|
||||
catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||
throw new types_js_1.ValidationError(`Failed to parse structured data: ${errorMessage}`, {
|
||||
response: response.substring(0, 200),
|
||||
error
|
||||
});
|
||||
}
|
||||
}
|
||||
validateAgainstSchema(item, schema, index) {
|
||||
if (typeof item !== 'object' || item === null) {
|
||||
throw new types_js_1.ValidationError(`Item at index ${index} is not an object`, { item, schema });
|
||||
}
|
||||
const record = item;
|
||||
for (const [key, schemaValue] of Object.entries(schema)) {
|
||||
if (typeof schemaValue !== 'object' || schemaValue === null)
|
||||
continue;
|
||||
const fieldSchema = schemaValue;
|
||||
// Check required fields
|
||||
if (fieldSchema.required && !(key in record)) {
|
||||
throw new types_js_1.ValidationError(`Missing required field '${key}' at index ${index}`, {
|
||||
item,
|
||||
schema
|
||||
});
|
||||
}
|
||||
// Check types
|
||||
if (key in record && fieldSchema.type) {
|
||||
const actualType = typeof record[key];
|
||||
const expectedType = fieldSchema.type;
|
||||
if (expectedType === 'array' && !Array.isArray(record[key])) {
|
||||
throw new types_js_1.ValidationError(`Field '${key}' should be array at index ${index}`, { item, schema });
|
||||
}
|
||||
else if (expectedType !== 'array' && actualType !== expectedType) {
|
||||
throw new types_js_1.ValidationError(`Field '${key}' has wrong type at index ${index}. Expected ${expectedType}, got ${actualType}`, { item, schema });
|
||||
}
|
||||
}
|
||||
// Check nested objects
|
||||
if (fieldSchema.properties && typeof record[key] === 'object') {
|
||||
this.validateAgainstSchema(record[key], fieldSchema.properties, index);
|
||||
}
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Generate structured data with specific domain
|
||||
*/
|
||||
async generateDomain(domain, options) {
|
||||
const domainSchemas = {
|
||||
users: {
|
||||
id: { type: 'string', required: true },
|
||||
name: { type: 'string', required: true },
|
||||
email: { type: 'string', required: true },
|
||||
age: { type: 'number', required: true },
|
||||
role: { type: 'string', required: false },
|
||||
createdAt: { type: 'string', required: true }
|
||||
},
|
||||
products: {
|
||||
id: { type: 'string', required: true },
|
||||
name: { type: 'string', required: true },
|
||||
price: { type: 'number', required: true },
|
||||
category: { type: 'string', required: true },
|
||||
inStock: { type: 'boolean', required: true },
|
||||
description: { type: 'string', required: false }
|
||||
},
|
||||
transactions: {
|
||||
id: { type: 'string', required: true },
|
||||
userId: { type: 'string', required: true },
|
||||
amount: { type: 'number', required: true },
|
||||
currency: { type: 'string', required: true },
|
||||
status: { type: 'string', required: true },
|
||||
timestamp: { type: 'string', required: true }
|
||||
}
|
||||
};
|
||||
const schema = domainSchemas[domain.toLowerCase()];
|
||||
if (!schema) {
|
||||
throw new types_js_1.ValidationError(`Unknown domain: ${domain}`, {
|
||||
availableDomains: Object.keys(domainSchemas)
|
||||
});
|
||||
}
|
||||
return this.generate({
|
||||
...options,
|
||||
schema
|
||||
}).then(result => result.data);
|
||||
}
|
||||
/**
|
||||
* Generate data from JSON schema
|
||||
*/
|
||||
async generateFromJSONSchema(jsonSchema, options) {
|
||||
// Convert JSON Schema to internal schema format
|
||||
const schema = this.convertJSONSchema(jsonSchema);
|
||||
return this.generate({
|
||||
...options,
|
||||
schema
|
||||
}).then(result => result.data);
|
||||
}
|
||||
convertJSONSchema(jsonSchema) {
|
||||
const schema = {};
|
||||
if (jsonSchema.properties && typeof jsonSchema.properties === 'object') {
|
||||
const properties = jsonSchema.properties;
|
||||
for (const [key, value] of Object.entries(properties)) {
|
||||
if (typeof value !== 'object' || value === null)
|
||||
continue;
|
||||
const prop = value;
|
||||
const field = {
|
||||
type: typeof prop.type === 'string' ? prop.type : 'string',
|
||||
required: Array.isArray(jsonSchema.required) && jsonSchema.required.includes(key) || false
|
||||
};
|
||||
if (prop.properties) {
|
||||
field.properties = this.convertJSONSchema(prop);
|
||||
}
|
||||
schema[key] = field;
|
||||
}
|
||||
}
|
||||
return schema;
|
||||
}
|
||||
}
|
||||
exports.StructuredGenerator = StructuredGenerator;
|
||||
//# sourceMappingURL=structured.js.map
|
||||
1
vendor/ruvector/npm/packages/agentic-synth/src/generators/structured.js.map
vendored
Normal file
1
vendor/ruvector/npm/packages/agentic-synth/src/generators/structured.js.map
vendored
Normal file
File diff suppressed because one or more lines are too long
203
vendor/ruvector/npm/packages/agentic-synth/src/generators/structured.ts
vendored
Normal file
203
vendor/ruvector/npm/packages/agentic-synth/src/generators/structured.ts
vendored
Normal file
@@ -0,0 +1,203 @@
|
||||
/**
|
||||
* Structured data generator
|
||||
*/
|
||||
|
||||
import { BaseGenerator } from './base.js';
|
||||
import { GeneratorOptions, ValidationError, DataSchema, SchemaField } from '../types.js';
|
||||
|
||||
export class StructuredGenerator extends BaseGenerator<GeneratorOptions> {
|
||||
protected generatePrompt(options: GeneratorOptions): string {
|
||||
const { count = 10, schema, constraints, format = 'json' } = options;
|
||||
|
||||
if (!schema) {
|
||||
throw new ValidationError('Schema is required for structured data generation', {
|
||||
options
|
||||
});
|
||||
}
|
||||
|
||||
let prompt = `Generate ${count} realistic data records matching the following schema:
|
||||
|
||||
Schema:
|
||||
${JSON.stringify(schema, null, 2)}
|
||||
|
||||
`;
|
||||
|
||||
if (constraints) {
|
||||
prompt += `\nConstraints:\n${JSON.stringify(constraints, null, 2)}\n`;
|
||||
}
|
||||
|
||||
prompt += `
|
||||
Requirements:
|
||||
1. Generate realistic, diverse data that fits the schema
|
||||
2. Ensure all required fields are present
|
||||
3. Follow data type constraints strictly
|
||||
4. Make data internally consistent and realistic
|
||||
5. Include varied but plausible values
|
||||
|
||||
Return ONLY a JSON array of ${count} objects, no additional text.`;
|
||||
|
||||
return prompt;
|
||||
}
|
||||
|
||||
protected parseResult(response: string, options: GeneratorOptions): unknown[] {
|
||||
try {
|
||||
// Extract JSON from response
|
||||
const jsonMatch = response.match(/\[[\s\S]*\]/);
|
||||
if (!jsonMatch) {
|
||||
throw new Error('No JSON array found in response');
|
||||
}
|
||||
|
||||
const data = JSON.parse(jsonMatch[0]);
|
||||
|
||||
if (!Array.isArray(data)) {
|
||||
throw new Error('Response is not an array');
|
||||
}
|
||||
|
||||
// Validate against schema if provided
|
||||
if (options.schema) {
|
||||
return data.map((item, index) => {
|
||||
this.validateAgainstSchema(item, options.schema!, index);
|
||||
return item;
|
||||
});
|
||||
}
|
||||
|
||||
return data;
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||
throw new ValidationError(`Failed to parse structured data: ${errorMessage}`, {
|
||||
response: response.substring(0, 200),
|
||||
error
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
private validateAgainstSchema(
|
||||
item: unknown,
|
||||
schema: Record<string, unknown>,
|
||||
index: number
|
||||
): void {
|
||||
if (typeof item !== 'object' || item === null) {
|
||||
throw new ValidationError(`Item at index ${index} is not an object`, { item, schema });
|
||||
}
|
||||
|
||||
const record = item as Record<string, unknown>;
|
||||
for (const [key, schemaValue] of Object.entries(schema)) {
|
||||
if (typeof schemaValue !== 'object' || schemaValue === null) continue;
|
||||
|
||||
const fieldSchema = schemaValue as Record<string, unknown>;
|
||||
|
||||
// Check required fields
|
||||
if (fieldSchema.required && !(key in record)) {
|
||||
throw new ValidationError(`Missing required field '${key}' at index ${index}`, {
|
||||
item,
|
||||
schema
|
||||
});
|
||||
}
|
||||
|
||||
// Check types
|
||||
if (key in record && fieldSchema.type) {
|
||||
const actualType = typeof record[key];
|
||||
const expectedType = fieldSchema.type;
|
||||
|
||||
if (expectedType === 'array' && !Array.isArray(record[key])) {
|
||||
throw new ValidationError(
|
||||
`Field '${key}' should be array at index ${index}`,
|
||||
{ item, schema }
|
||||
);
|
||||
} else if (expectedType !== 'array' && actualType !== expectedType) {
|
||||
throw new ValidationError(
|
||||
`Field '${key}' has wrong type at index ${index}. Expected ${expectedType}, got ${actualType}`,
|
||||
{ item, schema }
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Check nested objects
|
||||
if (fieldSchema.properties && typeof record[key] === 'object') {
|
||||
this.validateAgainstSchema(record[key], fieldSchema.properties as Record<string, unknown>, index);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate structured data with specific domain
|
||||
*/
|
||||
async generateDomain(domain: string, options: GeneratorOptions): Promise<unknown[]> {
|
||||
const domainSchemas: Record<string, DataSchema> = {
|
||||
users: {
|
||||
id: { type: 'string', required: true },
|
||||
name: { type: 'string', required: true },
|
||||
email: { type: 'string', required: true },
|
||||
age: { type: 'number', required: true },
|
||||
role: { type: 'string', required: false },
|
||||
createdAt: { type: 'string', required: true }
|
||||
},
|
||||
products: {
|
||||
id: { type: 'string', required: true },
|
||||
name: { type: 'string', required: true },
|
||||
price: { type: 'number', required: true },
|
||||
category: { type: 'string', required: true },
|
||||
inStock: { type: 'boolean', required: true },
|
||||
description: { type: 'string', required: false }
|
||||
},
|
||||
transactions: {
|
||||
id: { type: 'string', required: true },
|
||||
userId: { type: 'string', required: true },
|
||||
amount: { type: 'number', required: true },
|
||||
currency: { type: 'string', required: true },
|
||||
status: { type: 'string', required: true },
|
||||
timestamp: { type: 'string', required: true }
|
||||
}
|
||||
};
|
||||
|
||||
const schema = domainSchemas[domain.toLowerCase()];
|
||||
if (!schema) {
|
||||
throw new ValidationError(`Unknown domain: ${domain}`, {
|
||||
availableDomains: Object.keys(domainSchemas)
|
||||
});
|
||||
}
|
||||
|
||||
return this.generate({
|
||||
...options,
|
||||
schema
|
||||
}).then(result => result.data);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate data from JSON schema
|
||||
*/
|
||||
async generateFromJSONSchema(jsonSchema: Record<string, unknown>, options: GeneratorOptions): Promise<unknown[]> {
|
||||
// Convert JSON Schema to internal schema format
|
||||
const schema = this.convertJSONSchema(jsonSchema);
|
||||
|
||||
return this.generate({
|
||||
...options,
|
||||
schema
|
||||
}).then(result => result.data);
|
||||
}
|
||||
|
||||
private convertJSONSchema(jsonSchema: Record<string, unknown>): DataSchema {
|
||||
const schema: DataSchema = {};
|
||||
|
||||
if (jsonSchema.properties && typeof jsonSchema.properties === 'object') {
|
||||
const properties = jsonSchema.properties as Record<string, unknown>;
|
||||
for (const [key, value] of Object.entries(properties)) {
|
||||
if (typeof value !== 'object' || value === null) continue;
|
||||
|
||||
const prop = value as Record<string, unknown>;
|
||||
const field: SchemaField = {
|
||||
type: typeof prop.type === 'string' ? prop.type : 'string',
|
||||
required: Array.isArray(jsonSchema.required) && jsonSchema.required.includes(key) || false
|
||||
};
|
||||
|
||||
if (prop.properties) {
|
||||
field.properties = this.convertJSONSchema(prop);
|
||||
}
|
||||
|
||||
schema[key] = field;
|
||||
}
|
||||
}
|
||||
|
||||
return schema;
|
||||
}
|
||||
}
|
||||
15
vendor/ruvector/npm/packages/agentic-synth/src/generators/timeseries.d.ts
vendored
Normal file
15
vendor/ruvector/npm/packages/agentic-synth/src/generators/timeseries.d.ts
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
/**
|
||||
* Time-series data generator
|
||||
*/
|
||||
import { BaseGenerator } from './base.js';
|
||||
import { TimeSeriesOptions } from '../types.js';
|
||||
export declare class TimeSeriesGenerator extends BaseGenerator<TimeSeriesOptions> {
|
||||
protected generatePrompt(options: TimeSeriesOptions): string;
|
||||
protected parseResult(response: string, options: TimeSeriesOptions): unknown[];
|
||||
/**
|
||||
* Generate synthetic time-series with local computation (faster for simple patterns)
|
||||
*/
|
||||
generateLocal(options: TimeSeriesOptions): Promise<Array<Record<string, unknown>>>;
|
||||
private parseInterval;
|
||||
}
|
||||
//# sourceMappingURL=timeseries.d.ts.map
|
||||
1
vendor/ruvector/npm/packages/agentic-synth/src/generators/timeseries.d.ts.map
vendored
Normal file
1
vendor/ruvector/npm/packages/agentic-synth/src/generators/timeseries.d.ts.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"timeseries.d.ts","sourceRoot":"","sources":["timeseries.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,OAAO,EAAE,aAAa,EAAE,MAAM,WAAW,CAAC;AAC1C,OAAO,EAAE,iBAAiB,EAAmB,MAAM,aAAa,CAAC;AAEjE,qBAAa,mBAAoB,SAAQ,aAAa,CAAC,iBAAiB,CAAC;IACvE,SAAS,CAAC,cAAc,CAAC,OAAO,EAAE,iBAAiB,GAAG,MAAM;IA0D5D,SAAS,CAAC,WAAW,CAAC,QAAQ,EAAE,MAAM,EAAE,OAAO,EAAE,iBAAiB,GAAG,OAAO,EAAE;IAkD9E;;OAEG;IACG,aAAa,CAAC,OAAO,EAAE,iBAAiB,GAAG,OAAO,CAAC,KAAK,CAAC,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC,CAAC;IAgDxF,OAAO,CAAC,aAAa;CA2BtB"}
|
||||
147
vendor/ruvector/npm/packages/agentic-synth/src/generators/timeseries.js
vendored
Normal file
147
vendor/ruvector/npm/packages/agentic-synth/src/generators/timeseries.js
vendored
Normal file
@@ -0,0 +1,147 @@
|
||||
"use strict";
|
||||
/**
|
||||
* Time-series data generator
|
||||
*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.TimeSeriesGenerator = void 0;
|
||||
const base_js_1 = require("./base.js");
|
||||
const types_js_1 = require("../types.js");
|
||||
class TimeSeriesGenerator extends base_js_1.BaseGenerator {
|
||||
generatePrompt(options) {
|
||||
const { count = 100, startDate = new Date(), endDate, interval = '1h', metrics = ['value'], trend = 'stable', seasonality = false, noise = 0.1, schema, constraints } = options;
|
||||
const end = endDate || new Date(Date.now() + 24 * 60 * 60 * 1000);
|
||||
let prompt = `Generate ${count} time-series data points with the following specifications:
|
||||
|
||||
Time Range:
|
||||
- Start: ${startDate}
|
||||
- End: ${end}
|
||||
- Interval: ${interval}
|
||||
|
||||
Metrics: ${metrics.join(', ')}
|
||||
|
||||
Characteristics:
|
||||
- Trend: ${trend}
|
||||
- Seasonality: ${seasonality ? 'Include daily/weekly patterns' : 'No seasonality'}
|
||||
- Noise level: ${noise * 100}%
|
||||
|
||||
`;
|
||||
if (schema) {
|
||||
prompt += `\nSchema:\n${JSON.stringify(schema, null, 2)}\n`;
|
||||
}
|
||||
if (constraints) {
|
||||
prompt += `\nConstraints:\n${JSON.stringify(constraints, null, 2)}\n`;
|
||||
}
|
||||
prompt += `
|
||||
Generate realistic time-series data with timestamps and metric values.
|
||||
Return the data as a JSON array where each object has:
|
||||
- timestamp: ISO 8601 formatted date string
|
||||
- ${metrics.map(m => `${m}: numeric value`).join('\n- ')}
|
||||
|
||||
Ensure:
|
||||
1. Timestamps are evenly spaced according to the interval
|
||||
2. Values follow the specified trend pattern
|
||||
3. Noise is applied realistically
|
||||
4. Seasonality patterns are natural if enabled
|
||||
5. All values are within reasonable ranges
|
||||
|
||||
Return ONLY the JSON array, no additional text.`;
|
||||
return prompt;
|
||||
}
|
||||
parseResult(response, options) {
|
||||
try {
|
||||
// Extract JSON from response
|
||||
const jsonMatch = response.match(/\[[\s\S]*\]/);
|
||||
if (!jsonMatch) {
|
||||
throw new Error('No JSON array found in response');
|
||||
}
|
||||
const data = JSON.parse(jsonMatch[0]);
|
||||
if (!Array.isArray(data)) {
|
||||
throw new Error('Response is not an array');
|
||||
}
|
||||
// Validate and transform data
|
||||
return data.map((item, index) => {
|
||||
if (typeof item !== 'object' || item === null) {
|
||||
throw new types_js_1.ValidationError(`Invalid data item at index ${index}`, { item });
|
||||
}
|
||||
const record = item;
|
||||
if (!record.timestamp) {
|
||||
throw new types_js_1.ValidationError(`Missing timestamp at index ${index}`, { item });
|
||||
}
|
||||
// Ensure all specified metrics are present
|
||||
const metrics = options.metrics || ['value'];
|
||||
for (const metric of metrics) {
|
||||
if (typeof record[metric] !== 'number') {
|
||||
throw new types_js_1.ValidationError(`Missing or invalid metric '${metric}' at index ${index}`, { item });
|
||||
}
|
||||
}
|
||||
return {
|
||||
timestamp: new Date(record.timestamp).toISOString(),
|
||||
...record
|
||||
};
|
||||
});
|
||||
}
|
||||
catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||
throw new types_js_1.ValidationError(`Failed to parse time-series data: ${errorMessage}`, {
|
||||
response: response.substring(0, 200),
|
||||
error
|
||||
});
|
||||
}
|
||||
}
|
||||
/**
|
||||
* Generate synthetic time-series with local computation (faster for simple patterns)
|
||||
*/
|
||||
async generateLocal(options) {
|
||||
const { count = 100, startDate = new Date(), interval = '1h', metrics = ['value'], trend = 'stable', seasonality = false, noise = 0.1 } = options;
|
||||
const start = new Date(startDate).getTime();
|
||||
const intervalMs = this.parseInterval(interval);
|
||||
const data = [];
|
||||
let baseValue = 100;
|
||||
const trendRate = trend === 'up' ? 0.01 : trend === 'down' ? -0.01 : 0;
|
||||
for (let i = 0; i < count; i++) {
|
||||
const timestamp = new Date(start + i * intervalMs);
|
||||
const point = { timestamp: timestamp.toISOString() };
|
||||
for (const metric of metrics) {
|
||||
let value = baseValue;
|
||||
// Apply trend
|
||||
value += baseValue * trendRate * i;
|
||||
// Apply seasonality
|
||||
if (seasonality) {
|
||||
const hourOfDay = timestamp.getHours();
|
||||
const dayOfWeek = timestamp.getDay();
|
||||
value += Math.sin((hourOfDay / 24) * Math.PI * 2) * baseValue * 0.1;
|
||||
value += Math.sin((dayOfWeek / 7) * Math.PI * 2) * baseValue * 0.05;
|
||||
}
|
||||
// Apply noise
|
||||
value += (Math.random() - 0.5) * 2 * baseValue * noise;
|
||||
point[metric] = Math.round(value * 100) / 100;
|
||||
}
|
||||
data.push(point);
|
||||
}
|
||||
return data;
|
||||
}
|
||||
parseInterval(interval) {
|
||||
const match = interval.match(/^(\d+)(s|m|h|d)$/);
|
||||
if (!match) {
|
||||
throw new types_js_1.ValidationError('Invalid interval format', { interval });
|
||||
}
|
||||
const [, amount, unit] = match;
|
||||
// Strict mode: ensure captured groups are defined
|
||||
if (!amount || !unit) {
|
||||
throw new types_js_1.ValidationError('Invalid interval format: missing amount or unit', { interval, match });
|
||||
}
|
||||
const multipliers = {
|
||||
s: 1000,
|
||||
m: 60 * 1000,
|
||||
h: 60 * 60 * 1000,
|
||||
d: 24 * 60 * 60 * 1000
|
||||
};
|
||||
const multiplier = multipliers[unit];
|
||||
if (multiplier === undefined) {
|
||||
throw new types_js_1.ValidationError('Invalid interval unit', { interval, unit });
|
||||
}
|
||||
return parseInt(amount, 10) * multiplier;
|
||||
}
|
||||
}
|
||||
exports.TimeSeriesGenerator = TimeSeriesGenerator;
|
||||
//# sourceMappingURL=timeseries.js.map
|
||||
1
vendor/ruvector/npm/packages/agentic-synth/src/generators/timeseries.js.map
vendored
Normal file
1
vendor/ruvector/npm/packages/agentic-synth/src/generators/timeseries.js.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"timeseries.js","sourceRoot":"","sources":["timeseries.ts"],"names":[],"mappings":";AAAA;;GAEG;;;AAEH,uCAA0C;AAC1C,0CAAiE;AAEjE,MAAa,mBAAoB,SAAQ,uBAAgC;IAC7D,cAAc,CAAC,OAA0B;QACjD,MAAM,EACJ,KAAK,GAAG,GAAG,EACX,SAAS,GAAG,IAAI,IAAI,EAAE,EACtB,OAAO,EACP,QAAQ,GAAG,IAAI,EACf,OAAO,GAAG,CAAC,OAAO,CAAC,EACnB,KAAK,GAAG,QAAQ,EAChB,WAAW,GAAG,KAAK,EACnB,KAAK,GAAG,GAAG,EACX,MAAM,EACN,WAAW,EACZ,GAAG,OAAO,CAAC;QAEZ,MAAM,GAAG,GAAG,OAAO,IAAI,IAAI,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,IAAI,CAAC,CAAC;QAElE,IAAI,MAAM,GAAG,YAAY,KAAK;;;WAGvB,SAAS;SACX,GAAG;cACE,QAAQ;;WAEX,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC;;;WAGlB,KAAK;iBACC,WAAW,CAAC,CAAC,CAAC,+BAA+B,CAAC,CAAC,CAAC,gBAAgB;iBAChE,KAAK,GAAG,GAAG;;CAE3B,CAAC;QAEE,IAAI,MAAM,EAAE,CAAC;YACX,MAAM,IAAI,cAAc,IAAI,CAAC,SAAS,CAAC,MAAM,EAAE,IAAI,EAAE,CAAC,CAAC,IAAI,CAAC;QAC9D,CAAC;QAED,IAAI,WAAW,EAAE,CAAC;YAChB,MAAM,IAAI,mBAAmB,IAAI,CAAC,SAAS,CAAC,WAAW,EAAE,IAAI,EAAE,CAAC,CAAC,IAAI,CAAC;QACxE,CAAC;QAED,MAAM,IAAI;;;;IAIV,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,GAAG,CAAC,iBAAiB,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC;;;;;;;;;gDASR,CAAC;QAE7C,OAAO,MAAM,CAAC;IAChB,CAAC;IAES,WAAW,CAAC,QAAgB,EAAE,OAA0B;QAChE,IAAI,CAAC;YACH,6BAA6B;YAC7B,MAAM,SAAS,GAAG,QAAQ,CAAC,KAAK,CAAC,aAAa,CAAC,CAAC;YAChD,IAAI,CAAC,SAAS,EAAE,CAAC;gBACf,MAAM,IAAI,KAAK,CAAC,iCAAiC,CAAC,CAAC;YACrD,CAAC;YAED,MAAM,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC,CAAC,CAAC;YAEtC,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,EAAE,CAAC;gBACzB,MAAM,IAAI,KAAK,CAAC,0BAA0B,CAAC,CAAC;YAC9C,CAAC;YAED,8BAA8B;YAC9B,OAAO,IAAI,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,KAAK,EAAE,EAAE;gBAC9B,IAAI,OAAO,IAAI,KAAK,QAAQ,IAAI,IAAI,KAAK,IAAI,EAAE,CAAC;oBAC9C,MAAM,IAAI,0BAAe,CAAC,8BAA8B,KAAK,EAAE,EAAE,EAAE,IAAI,EAAE,CAAC,CAAC;gBAC7E,CAAC;gBAED,MAAM,MAAM,GAAG,IAA+B,CAAC;gBAC/C,IAAI,CAAC,MAAM,CAAC,SAAS,EAAE,CAAC;oBACtB,MAAM,IAAI,0BAAe,CAAC,8BAA8B,KAAK,EAAE,EAAE,EAAE,IAAI,EAAE,CAAC,CAAC;gBAC7E,CAAC;gBAED,2CAA2C;gBAC3C,MAAM,OAAO,GAAG,OAAO,CAAC,OAAO,IAAI,CAAC,OAAO,CAAC,CAAC;gBAC7C,KAAK,MAAM,MAAM,IAAI,OAAO,EAAE,CAAC;oBAC7B,IAAI,OAAO,MAAM,CAAC,MAAM,CAAC,KAAK,QAAQ,EAAE,CAAC;wBACvC,MAAM,IAAI,0BAAe,CACvB,8BAA8B,MAAM,cAAc,KAAK,EAAE,EACzD,EAAE,IAAI,EAAE,CACT,CAAC;oBACJ,CAAC;gBACH,CAAC;gBAED,OAAO;oBACL,SAAS,EAAE,IAAI,IAAI,CAAC,MAAM,CAAC,SAAmC,CAAC,CAAC,WAAW,EAAE;oBAC7E,GAAG,MAAM;iBACV,CAAC;YACJ,CAAC,CAAC,CAAC;QACL,CAAC;QAAC,OAAO,KAAK,EAAE,CAAC;YACf,MAAM,YAAY,GAAG,KAAK,YAAY,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,eAAe,CAAC;YAC9E,MAAM,IAAI,0BAAe,CAAC,qCAAqC,YAAY,EAAE,EAAE;gBAC7E,QAAQ,EAAE,QAAQ,CAAC,SAAS,CAAC,CAAC,EAAE,GAAG,CAAC;gBACpC,KAAK;aACN,CAAC,CAAC;QACL,CAAC;IACH,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,aAAa,CAAC,OAA0B;QAC5C,MAAM,EACJ,KAAK,GAAG,GAAG,EACX,SAAS,GAAG,IAAI,IAAI,EAAE,EACtB,QAAQ,GAAG,IAAI,EACf,OAAO,GAAG,CAAC,OAAO,CAAC,EACnB,KAAK,GAAG,QAAQ,EAChB,WAAW,GAAG,KAAK,EACnB,KAAK,GAAG,GAAG,EACZ,GAAG,OAAO,CAAC;QAEZ,MAAM,KAAK,GAAG,IAAI,IAAI,CAAC,SAAS,CAAC,CAAC,OAAO,EAAE,CAAC;QAC5C,MAAM,UAAU,GAAG,IAAI,CAAC,aAAa,CAAC,QAAQ,CAAC,CAAC;QAChD,MAAM,IAAI,GAAmC,EAAE,CAAC;QAEhD,IAAI,SAAS,GAAG,GAAG,CAAC;QACpB,MAAM,SAAS,GAAG,KAAK,KAAK,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,KAAK,KAAK,MAAM,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAC;QAEvE,KAAK,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,KAAK,EAAE,CAAC,EAAE,EAAE,CAAC;YAC/B,MAAM,SAAS,GAAG,IAAI,IAAI,CAAC,KAAK,GAAG,CAAC,GAAG,UAAU,CAAC,CAAC;YACnD,MAAM,KAAK,GAA4B,EAAE,SAAS,EAAE,SAAS,CAAC,WAAW,EAAE,EAAE,CAAC;YAE9E,KAAK,MAAM,MAAM,IAAI,OAAO,EAAE,CAAC;gBAC7B,IAAI,KAAK,GAAG,SAAS,CAAC;gBAEtB,cAAc;gBACd,KAAK,IAAI,SAAS,GAAG,SAAS,GAAG,CAAC,CAAC;gBAEnC,oBAAoB;gBACpB,IAAI,WAAW,EAAE,CAAC;oBAChB,MAAM,SAAS,GAAG,SAAS,CAAC,QAAQ,EAAE,CAAC;oBACvC,MAAM,SAAS,GAAG,SAAS,CAAC,MAAM,EAAE,CAAC;oBACrC,KAAK,IAAI,IAAI,CAAC,GAAG,CAAC,CAAC,SAAS,GAAG,EAAE,CAAC,GAAG,IAAI,CAAC,EAAE,GAAG,CAAC,CAAC,GAAG,SAAS,GAAG,GAAG,CAAC;oBACpE,KAAK,IAAI,IAAI,CAAC,GAAG,CAAC,CAAC,SAAS,GAAG,CAAC,CAAC,GAAG,IAAI,CAAC,EAAE,GAAG,CAAC,CAAC,GAAG,SAAS,GAAG,IAAI,CAAC;gBACtE,CAAC;gBAED,cAAc;gBACd,KAAK,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE,GAAG,GAAG,CAAC,GAAG,CAAC,GAAG,SAAS,GAAG,KAAK,CAAC;gBAEvD,KAAK,CAAC,MAAM,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC,KAAK,GAAG,GAAG,CAAC,GAAG,GAAG,CAAC;YAChD,CAAC;YAED,IAAI,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC;QACnB,CAAC;QAED,OAAO,IAAI,CAAC;IACd,CAAC;IAEO,aAAa,CAAC,QAAgB;QACpC,MAAM,KAAK,GAAG,QAAQ,CAAC,KAAK,CAAC,kBAAkB,CAAC,CAAC;QACjD,IAAI,CAAC,KAAK,EAAE,CAAC;YACX,MAAM,IAAI,0BAAe,CAAC,yBAAyB,EAAE,EAAE,QAAQ,EAAE,CAAC,CAAC;QACrE,CAAC;QAED,MAAM,CAAC,EAAE,MAAM,EAAE,IAAI,CAAC,GAAG,KAAK,CAAC;QAE/B,kDAAkD;QAClD,IAAI,CAAC,MAAM,IAAI,CAAC,IAAI,EAAE,CAAC;YACrB,MAAM,IAAI,0BAAe,CAAC,iDAAiD,EAAE,EAAE,QAAQ,EAAE,KAAK,EAAE,CAAC,CAAC;QACpG,CAAC;QAED,MAAM,WAAW,GAA2B;YAC1C,CAAC,EAAE,IAAI;YACP,CAAC,EAAE,EAAE,GAAG,IAAI;YACZ,CAAC,EAAE,EAAE,GAAG,EAAE,GAAG,IAAI;YACjB,CAAC,EAAE,EAAE,GAAG,EAAE,GAAG,EAAE,GAAG,IAAI;SACvB,CAAC;QAEF,MAAM,UAAU,GAAG,WAAW,CAAC,IAAI,CAAC,CAAC;QACrC,IAAI,UAAU,KAAK,SAAS,EAAE,CAAC;YAC7B,MAAM,IAAI,0BAAe,CAAC,uBAAuB,EAAE,EAAE,QAAQ,EAAE,IAAI,EAAE,CAAC,CAAC;QACzE,CAAC;QAED,OAAO,QAAQ,CAAC,MAAM,EAAE,EAAE,CAAC,GAAG,UAAU,CAAC;IAC3C,CAAC;CACF;AA3LD,kDA2LC"}
|
||||
195
vendor/ruvector/npm/packages/agentic-synth/src/generators/timeseries.ts
vendored
Normal file
195
vendor/ruvector/npm/packages/agentic-synth/src/generators/timeseries.ts
vendored
Normal file
@@ -0,0 +1,195 @@
|
||||
/**
|
||||
* Time-series data generator
|
||||
*/
|
||||
|
||||
import { BaseGenerator } from './base.js';
|
||||
import { TimeSeriesOptions, ValidationError } from '../types.js';
|
||||
|
||||
export class TimeSeriesGenerator extends BaseGenerator<TimeSeriesOptions> {
|
||||
protected generatePrompt(options: TimeSeriesOptions): string {
|
||||
const {
|
||||
count = 100,
|
||||
startDate = new Date(),
|
||||
endDate,
|
||||
interval = '1h',
|
||||
metrics = ['value'],
|
||||
trend = 'stable',
|
||||
seasonality = false,
|
||||
noise = 0.1,
|
||||
schema,
|
||||
constraints
|
||||
} = options;
|
||||
|
||||
const end = endDate || new Date(Date.now() + 24 * 60 * 60 * 1000);
|
||||
|
||||
let prompt = `Generate ${count} time-series data points with the following specifications:
|
||||
|
||||
Time Range:
|
||||
- Start: ${startDate}
|
||||
- End: ${end}
|
||||
- Interval: ${interval}
|
||||
|
||||
Metrics: ${metrics.join(', ')}
|
||||
|
||||
Characteristics:
|
||||
- Trend: ${trend}
|
||||
- Seasonality: ${seasonality ? 'Include daily/weekly patterns' : 'No seasonality'}
|
||||
- Noise level: ${noise * 100}%
|
||||
|
||||
`;
|
||||
|
||||
if (schema) {
|
||||
prompt += `\nSchema:\n${JSON.stringify(schema, null, 2)}\n`;
|
||||
}
|
||||
|
||||
if (constraints) {
|
||||
prompt += `\nConstraints:\n${JSON.stringify(constraints, null, 2)}\n`;
|
||||
}
|
||||
|
||||
prompt += `
|
||||
Generate realistic time-series data with timestamps and metric values.
|
||||
Return the data as a JSON array where each object has:
|
||||
- timestamp: ISO 8601 formatted date string
|
||||
- ${metrics.map(m => `${m}: numeric value`).join('\n- ')}
|
||||
|
||||
Ensure:
|
||||
1. Timestamps are evenly spaced according to the interval
|
||||
2. Values follow the specified trend pattern
|
||||
3. Noise is applied realistically
|
||||
4. Seasonality patterns are natural if enabled
|
||||
5. All values are within reasonable ranges
|
||||
|
||||
Return ONLY the JSON array, no additional text.`;
|
||||
|
||||
return prompt;
|
||||
}
|
||||
|
||||
protected parseResult(response: string, options: TimeSeriesOptions): unknown[] {
|
||||
try {
|
||||
// Extract JSON from response
|
||||
const jsonMatch = response.match(/\[[\s\S]*\]/);
|
||||
if (!jsonMatch) {
|
||||
throw new Error('No JSON array found in response');
|
||||
}
|
||||
|
||||
const data = JSON.parse(jsonMatch[0]);
|
||||
|
||||
if (!Array.isArray(data)) {
|
||||
throw new Error('Response is not an array');
|
||||
}
|
||||
|
||||
// Validate and transform data
|
||||
return data.map((item, index) => {
|
||||
if (typeof item !== 'object' || item === null) {
|
||||
throw new ValidationError(`Invalid data item at index ${index}`, { item });
|
||||
}
|
||||
|
||||
const record = item as Record<string, unknown>;
|
||||
if (!record.timestamp) {
|
||||
throw new ValidationError(`Missing timestamp at index ${index}`, { item });
|
||||
}
|
||||
|
||||
// Ensure all specified metrics are present
|
||||
const metrics = options.metrics || ['value'];
|
||||
for (const metric of metrics) {
|
||||
if (typeof record[metric] !== 'number') {
|
||||
throw new ValidationError(
|
||||
`Missing or invalid metric '${metric}' at index ${index}`,
|
||||
{ item }
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
timestamp: new Date(record.timestamp as string | number | Date).toISOString(),
|
||||
...record
|
||||
};
|
||||
});
|
||||
} catch (error) {
|
||||
const errorMessage = error instanceof Error ? error.message : 'Unknown error';
|
||||
throw new ValidationError(`Failed to parse time-series data: ${errorMessage}`, {
|
||||
response: response.substring(0, 200),
|
||||
error
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate synthetic time-series with local computation (faster for simple patterns)
|
||||
*/
|
||||
async generateLocal(options: TimeSeriesOptions): Promise<Array<Record<string, unknown>>> {
|
||||
const {
|
||||
count = 100,
|
||||
startDate = new Date(),
|
||||
interval = '1h',
|
||||
metrics = ['value'],
|
||||
trend = 'stable',
|
||||
seasonality = false,
|
||||
noise = 0.1
|
||||
} = options;
|
||||
|
||||
const start = new Date(startDate).getTime();
|
||||
const intervalMs = this.parseInterval(interval);
|
||||
const data: Array<Record<string, unknown>> = [];
|
||||
|
||||
let baseValue = 100;
|
||||
const trendRate = trend === 'up' ? 0.01 : trend === 'down' ? -0.01 : 0;
|
||||
|
||||
for (let i = 0; i < count; i++) {
|
||||
const timestamp = new Date(start + i * intervalMs);
|
||||
const point: Record<string, unknown> = { timestamp: timestamp.toISOString() };
|
||||
|
||||
for (const metric of metrics) {
|
||||
let value = baseValue;
|
||||
|
||||
// Apply trend
|
||||
value += baseValue * trendRate * i;
|
||||
|
||||
// Apply seasonality
|
||||
if (seasonality) {
|
||||
const hourOfDay = timestamp.getHours();
|
||||
const dayOfWeek = timestamp.getDay();
|
||||
value += Math.sin((hourOfDay / 24) * Math.PI * 2) * baseValue * 0.1;
|
||||
value += Math.sin((dayOfWeek / 7) * Math.PI * 2) * baseValue * 0.05;
|
||||
}
|
||||
|
||||
// Apply noise
|
||||
value += (Math.random() - 0.5) * 2 * baseValue * noise;
|
||||
|
||||
point[metric] = Math.round(value * 100) / 100;
|
||||
}
|
||||
|
||||
data.push(point);
|
||||
}
|
||||
|
||||
return data;
|
||||
}
|
||||
|
||||
private parseInterval(interval: string): number {
|
||||
const match = interval.match(/^(\d+)(s|m|h|d)$/);
|
||||
if (!match) {
|
||||
throw new ValidationError('Invalid interval format', { interval });
|
||||
}
|
||||
|
||||
const [, amount, unit] = match;
|
||||
|
||||
// Strict mode: ensure captured groups are defined
|
||||
if (!amount || !unit) {
|
||||
throw new ValidationError('Invalid interval format: missing amount or unit', { interval, match });
|
||||
}
|
||||
|
||||
const multipliers: Record<string, number> = {
|
||||
s: 1000,
|
||||
m: 60 * 1000,
|
||||
h: 60 * 60 * 1000,
|
||||
d: 24 * 60 * 60 * 1000
|
||||
};
|
||||
|
||||
const multiplier = multipliers[unit];
|
||||
if (multiplier === undefined) {
|
||||
throw new ValidationError('Invalid interval unit', { interval, unit });
|
||||
}
|
||||
|
||||
return parseInt(amount, 10) * multiplier;
|
||||
}
|
||||
}
|
||||
1
vendor/ruvector/npm/packages/agentic-synth/src/index.d.ts.map
vendored
Normal file
1
vendor/ruvector/npm/packages/agentic-synth/src/index.d.ts.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["index.ts"],"names":[],"mappings":"AAAA;;;;GAIG;AAEH,OAAO,eAAe,CAAC;AACvB,OAAO,EACL,WAAW,EAEX,gBAAgB,EAChB,iBAAiB,EACjB,YAAY,EACZ,gBAAgB,EAEhB,QAAQ,EACT,MAAM,YAAY,CAAC;AAMpB;;GAEG;AACH,qBAAa,YAAY;IACvB,OAAO,CAAC,MAAM,CAAc;IAC5B,OAAO,CAAC,aAAa,CAAsB;IAC3C,OAAO,CAAC,QAAQ,CAAiB;IACjC,OAAO,CAAC,aAAa,CAAsB;gBAE/B,MAAM,GAAE,OAAO,CAAC,WAAW,CAAM;IAuB7C;;OAEG;IACG,kBAAkB,CAAC,CAAC,GAAG,OAAO,EAClC,OAAO,GAAE,OAAO,CAAC,iBAAiB,CAAM,GACvC,OAAO,CAAC,gBAAgB,CAAC,CAAC,CAAC,CAAC;IAI/B;;OAEG;IACG,cAAc,CAAC,CAAC,GAAG,OAAO,EAC9B,OAAO,GAAE,OAAO,CAAC,YAAY,CAAM,GAClC,OAAO,CAAC,gBAAgB,CAAC,CAAC,CAAC,CAAC;IAI/B;;OAEG;IACG,kBAAkB,CAAC,CAAC,GAAG,OAAO,EAClC,OAAO,GAAE,OAAO,CAAC,gBAAgB,CAAM,GACtC,OAAO,CAAC,gBAAgB,CAAC,CAAC,CAAC,CAAC;IAI/B;;OAEG;IACG,QAAQ,CAAC,CAAC,GAAG,OAAO,EACxB,IAAI,EAAE,QAAQ,EACd,OAAO,GAAE,OAAO,CAAC,gBAAgB,CAAM,GACtC,OAAO,CAAC,gBAAgB,CAAC,CAAC,CAAC,CAAC;IAc/B;;OAEG;IACI,cAAc,CAAC,CAAC,GAAG,OAAO,EAC/B,IAAI,EAAE,QAAQ,EACd,OAAO,GAAE,OAAO,CAAC,gBAAgB,CAAM,GACtC,cAAc,CAAC,CAAC,EAAE,IAAI,EAAE,OAAO,CAAC;IAKnC;;OAEG;IACG,aAAa,CAAC,CAAC,GAAG,OAAO,EAC7B,IAAI,EAAE,QAAQ,EACd,YAAY,EAAE,OAAO,CAAC,gBAAgB,CAAC,EAAE,EACzC,WAAW,GAAE,MAAU,GACtB,OAAO,CAAC,gBAAgB,CAAC,CAAC,CAAC,EAAE,CAAC;IAKjC;;OAEG;IACH,OAAO,CAAC,YAAY;IAcpB;;OAEG;IACH,SAAS,CAAC,MAAM,EAAE,OAAO,CAAC,WAAW,CAAC,GAAG,IAAI;IAS7C;;OAEG;IACH,SAAS,IAAI,WAAW;CAGzB;AAED;;GAEG;AACH,wBAAgB,WAAW,CAAC,MAAM,CAAC,EAAE,OAAO,CAAC,WAAW,CAAC,GAAG,YAAY,CAEvE;AAGD,cAAc,YAAY,CAAC;AAC3B,cAAc,uBAAuB,CAAC;AACtC,cAAc,kBAAkB,CAAC;AACjC,cAAc,oBAAoB,CAAC;AAGnC,eAAe,YAAY,CAAC"}
|
||||
1
vendor/ruvector/npm/packages/agentic-synth/src/index.js.map
vendored
Normal file
1
vendor/ruvector/npm/packages/agentic-synth/src/index.js.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"index.js","sourceRoot":"","sources":["index.ts"],"names":[],"mappings":";AAAA;;;;GAIG;;;;;;;;;;;;;;;;;AAgKH,kCAEC;AAhKD,yBAAuB;AACvB,yCASoB;AACpB,8DAAiE;AACjE,sDAAwD;AACxD,8DAAiE;AAGjE;;GAEG;AACH,MAAa,YAAY;IAMvB,YAAY,SAA+B,EAAE;QAC3C,4BAA4B;QAC5B,MAAM,aAAa,GAAgB;YACjC,QAAQ,EAAE,QAAQ;YAClB,MAAM,EAAE,OAAO,CAAC,GAAG,CAAC,cAAc;YAClC,KAAK,EAAE,sBAAsB;YAC7B,aAAa,EAAE,QAAQ;YACvB,QAAQ,EAAE,IAAI;YACd,UAAU,EAAE,CAAC;YACb,OAAO,EAAE,KAAK;YACd,SAAS,EAAE,KAAK;YAChB,UAAU,EAAE,KAAK;YACjB,QAAQ,EAAE,KAAK;SAChB,CAAC;QAEF,IAAI,CAAC,MAAM,GAAG,4BAAiB,CAAC,KAAK,CAAC,EAAE,GAAG,aAAa,EAAE,GAAG,MAAM,EAAE,CAAC,CAAC;QAEvE,wBAAwB;QACxB,IAAI,CAAC,aAAa,GAAG,IAAI,mCAAmB,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QAC1D,IAAI,CAAC,QAAQ,GAAG,IAAI,0BAAc,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QAChD,IAAI,CAAC,aAAa,GAAG,IAAI,mCAAmB,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;IAC5D,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,kBAAkB,CACtB,UAAsC,EAAE;QAExC,OAAO,IAAI,CAAC,aAAa,CAAC,QAAQ,CAAI,OAA4B,CAAC,CAAC;IACtE,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,cAAc,CAClB,UAAiC,EAAE;QAEnC,OAAO,IAAI,CAAC,QAAQ,CAAC,QAAQ,CAAI,OAAuB,CAAC,CAAC;IAC5D,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,kBAAkB,CACtB,UAAqC,EAAE;QAEvC,OAAO,IAAI,CAAC,aAAa,CAAC,QAAQ,CAAI,OAA2B,CAAC,CAAC;IACrE,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,QAAQ,CACZ,IAAc,EACd,UAAqC,EAAE;QAEvC,QAAQ,IAAI,EAAE,CAAC;YACb,KAAK,YAAY;gBACf,OAAO,IAAI,CAAC,kBAAkB,CAAI,OAA4B,CAAC,CAAC;YAClE,KAAK,QAAQ;gBACX,OAAO,IAAI,CAAC,cAAc,CAAI,OAAuB,CAAC,CAAC;YACzD,KAAK,YAAY,CAAC;YAClB,KAAK,MAAM;gBACT,OAAO,IAAI,CAAC,kBAAkB,CAAI,OAAO,CAAC,CAAC;YAC7C;gBACE,MAAM,IAAI,KAAK,CAAC,0BAA0B,IAAI,EAAE,CAAC,CAAC;QACtD,CAAC;IACH,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,CAAC,cAAc,CACnB,IAAc,EACd,UAAqC,EAAE;QAEvC,MAAM,SAAS,GAAG,IAAI,CAAC,YAAY,CAAC,IAAI,CAAC,CAAC;QAC1C,KAAK,CAAC,CAAC,SAAS,CAAC,cAAc,CAAI,OAA2B,CAAC,CAAC;IAClE,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,aAAa,CACjB,IAAc,EACd,YAAyC,EACzC,cAAsB,CAAC;QAEvB,MAAM,SAAS,GAAG,IAAI,CAAC,YAAY,CAAC,IAAI,CAAC,CAAC;QAC1C,OAAO,SAAS,CAAC,aAAa,CAAI,YAAkC,EAAE,WAAW,CAAC,CAAC;IACrF,CAAC;IAED;;OAEG;IACK,YAAY,CAAC,IAAc;QACjC,QAAQ,IAAI,EAAE,CAAC;YACb,KAAK,YAAY;gBACf,OAAO,IAAI,CAAC,aAAa,CAAC;YAC5B,KAAK,QAAQ;gBACX,OAAO,IAAI,CAAC,QAAQ,CAAC;YACvB,KAAK,YAAY,CAAC;YAClB,KAAK,MAAM;gBACT,OAAO,IAAI,CAAC,aAAa,CAAC;YAC5B;gBACE,MAAM,IAAI,KAAK,CAAC,0BAA0B,IAAI,EAAE,CAAC,CAAC;QACtD,CAAC;IACH,CAAC;IAED;;OAEG;IACH,SAAS,CAAC,MAA4B;QACpC,IAAI,CAAC,MAAM,GAAG,4BAAiB,CAAC,KAAK,CAAC,EAAE,GAAG,IAAI,CAAC,MAAM,EAAE,GAAG,MAAM,EAAE,CAAC,CAAC;QAErE,sCAAsC;QACtC,IAAI,CAAC,aAAa,GAAG,IAAI,mCAAmB,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QAC1D,IAAI,CAAC,QAAQ,GAAG,IAAI,0BAAc,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;QAChD,IAAI,CAAC,aAAa,GAAG,IAAI,mCAAmB,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;IAC5D,CAAC;IAED;;OAEG;IACH,SAAS;QACP,OAAO,EAAE,GAAG,IAAI,CAAC,MAAM,EAAE,CAAC;IAC5B,CAAC;CACF;AAtID,oCAsIC;AAED;;GAEG;AACH,SAAgB,WAAW,CAAC,MAA6B;IACvD,OAAO,IAAI,YAAY,CAAC,MAAM,CAAC,CAAC;AAClC,CAAC;AAED,6BAA6B;AAC7B,6CAA2B;AAC3B,wDAAsC;AACtC,mDAAiC;AACjC,qDAAmC;AAEnC,iBAAiB;AACjB,kBAAe,YAAY,CAAC"}
|
||||
176
vendor/ruvector/npm/packages/agentic-synth/src/index.ts
vendored
Normal file
176
vendor/ruvector/npm/packages/agentic-synth/src/index.ts
vendored
Normal file
@@ -0,0 +1,176 @@
|
||||
/**
|
||||
* agentic-synth - AI-powered synthetic data generation
|
||||
*
|
||||
* @packageDocumentation
|
||||
*/
|
||||
|
||||
import 'dotenv/config';
|
||||
import {
|
||||
SynthConfig,
|
||||
SynthConfigSchema,
|
||||
GeneratorOptions,
|
||||
TimeSeriesOptions,
|
||||
EventOptions,
|
||||
GenerationResult,
|
||||
ModelProvider,
|
||||
DataType
|
||||
} from './types.js';
|
||||
import { TimeSeriesGenerator } from './generators/timeseries.js';
|
||||
import { EventGenerator } from './generators/events.js';
|
||||
import { StructuredGenerator } from './generators/structured.js';
|
||||
import { CacheManager } from './cache/index.js';
|
||||
|
||||
/**
|
||||
* Main AgenticSynth class for data generation
|
||||
*/
|
||||
export class AgenticSynth {
|
||||
private config: SynthConfig;
|
||||
private timeSeriesGen: TimeSeriesGenerator;
|
||||
private eventGen: EventGenerator;
|
||||
private structuredGen: StructuredGenerator;
|
||||
|
||||
constructor(config: Partial<SynthConfig> = {}) {
|
||||
// Validate and merge config
|
||||
const defaultConfig: SynthConfig = {
|
||||
provider: 'gemini',
|
||||
apiKey: process.env.GEMINI_API_KEY,
|
||||
model: 'gemini-2.0-flash-exp',
|
||||
cacheStrategy: 'memory',
|
||||
cacheTTL: 3600,
|
||||
maxRetries: 3,
|
||||
timeout: 30000,
|
||||
streaming: false,
|
||||
automation: false,
|
||||
vectorDB: false
|
||||
};
|
||||
|
||||
this.config = SynthConfigSchema.parse({ ...defaultConfig, ...config });
|
||||
|
||||
// Initialize generators
|
||||
this.timeSeriesGen = new TimeSeriesGenerator(this.config);
|
||||
this.eventGen = new EventGenerator(this.config);
|
||||
this.structuredGen = new StructuredGenerator(this.config);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate time-series data
|
||||
*/
|
||||
async generateTimeSeries<T = unknown>(
|
||||
options: Partial<TimeSeriesOptions> = {}
|
||||
): Promise<GenerationResult<T>> {
|
||||
return this.timeSeriesGen.generate<T>(options as TimeSeriesOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate event data
|
||||
*/
|
||||
async generateEvents<T = unknown>(
|
||||
options: Partial<EventOptions> = {}
|
||||
): Promise<GenerationResult<T>> {
|
||||
return this.eventGen.generate<T>(options as EventOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate structured data
|
||||
*/
|
||||
async generateStructured<T = unknown>(
|
||||
options: Partial<GeneratorOptions> = {}
|
||||
): Promise<GenerationResult<T>> {
|
||||
return this.structuredGen.generate<T>(options as GeneratorOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate data by type
|
||||
*/
|
||||
async generate<T = unknown>(
|
||||
type: DataType,
|
||||
options: Partial<GeneratorOptions> = {}
|
||||
): Promise<GenerationResult<T>> {
|
||||
switch (type) {
|
||||
case 'timeseries':
|
||||
return this.generateTimeSeries<T>(options as TimeSeriesOptions);
|
||||
case 'events':
|
||||
return this.generateEvents<T>(options as EventOptions);
|
||||
case 'structured':
|
||||
case 'json':
|
||||
return this.generateStructured<T>(options);
|
||||
default:
|
||||
throw new Error(`Unsupported data type: ${type}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate with streaming
|
||||
*/
|
||||
async *generateStream<T = unknown>(
|
||||
type: DataType,
|
||||
options: Partial<GeneratorOptions> = {}
|
||||
): AsyncGenerator<T, void, unknown> {
|
||||
const generator = this.getGenerator(type);
|
||||
yield* generator.generateStream<T>(options as GeneratorOptions);
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate multiple batches in parallel
|
||||
*/
|
||||
async generateBatch<T = unknown>(
|
||||
type: DataType,
|
||||
batchOptions: Partial<GeneratorOptions>[],
|
||||
concurrency: number = 3
|
||||
): Promise<GenerationResult<T>[]> {
|
||||
const generator = this.getGenerator(type);
|
||||
return generator.generateBatch<T>(batchOptions as GeneratorOptions[], concurrency);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get generator for data type
|
||||
*/
|
||||
private getGenerator(type: DataType) {
|
||||
switch (type) {
|
||||
case 'timeseries':
|
||||
return this.timeSeriesGen;
|
||||
case 'events':
|
||||
return this.eventGen;
|
||||
case 'structured':
|
||||
case 'json':
|
||||
return this.structuredGen;
|
||||
default:
|
||||
throw new Error(`Unsupported data type: ${type}`);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Configure instance
|
||||
*/
|
||||
configure(config: Partial<SynthConfig>): void {
|
||||
this.config = SynthConfigSchema.parse({ ...this.config, ...config });
|
||||
|
||||
// Recreate generators with new config
|
||||
this.timeSeriesGen = new TimeSeriesGenerator(this.config);
|
||||
this.eventGen = new EventGenerator(this.config);
|
||||
this.structuredGen = new StructuredGenerator(this.config);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get current configuration
|
||||
*/
|
||||
getConfig(): SynthConfig {
|
||||
return { ...this.config };
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new AgenticSynth instance
|
||||
*/
|
||||
export function createSynth(config?: Partial<SynthConfig>): AgenticSynth {
|
||||
return new AgenticSynth(config);
|
||||
}
|
||||
|
||||
// Export types and utilities
|
||||
export * from './types.js';
|
||||
export * from './generators/index.js';
|
||||
export * from './cache/index.js';
|
||||
export * from './routing/index.js';
|
||||
|
||||
// Default export
|
||||
export default AgenticSynth;
|
||||
1
vendor/ruvector/npm/packages/agentic-synth/src/routing/index.d.ts.map
vendored
Normal file
1
vendor/ruvector/npm/packages/agentic-synth/src/routing/index.d.ts.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["index.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,OAAO,EAAE,aAAa,EAAE,UAAU,EAAc,MAAM,aAAa,CAAC;AAEpE,MAAM,WAAW,YAAY;IAC3B,eAAe,EAAE,aAAa,CAAC;IAC/B,YAAY,EAAE;QACZ,MAAM,CAAC,EAAE,MAAM,CAAC;QAChB,UAAU,CAAC,EAAE,MAAM,CAAC;KACrB,CAAC;IACF,aAAa,CAAC,EAAE,aAAa,EAAE,CAAC;IAChC,YAAY,CAAC,EAAE,UAAU,EAAE,CAAC;CAC7B;AAED;;GAEG;AACH,qBAAa,WAAW;IACtB,OAAO,CAAC,MAAM,CAAe;IAC7B,OAAO,CAAC,MAAM,CAA0B;gBAE5B,MAAM,EAAE,YAAY;IAMhC,OAAO,CAAC,gBAAgB;IAsDxB;;OAEG;IACH,WAAW,CAAC,YAAY,EAAE;QACxB,YAAY,CAAC,EAAE,MAAM,EAAE,CAAC;QACxB,QAAQ,CAAC,EAAE,aAAa,CAAC;QACzB,cAAc,CAAC,EAAE,MAAM,CAAC;KACzB,GAAG,UAAU;IAoDd;;OAEG;IACH,gBAAgB,CAAC,OAAO,EAAE,UAAU,GAAG,UAAU,EAAE;IA8BnD;;OAEG;IACH,SAAS,IAAI,UAAU,EAAE;IAIzB;;OAEG;IACH,QAAQ,CAAC,KAAK,EAAE,UAAU,GAAG,IAAI;IAKjC;;OAEG;IACH,cAAc,CAAC,KAAK,EAAE,UAAU,GAAG;QACjC,QAAQ,EAAE,aAAa,CAAC;QACxB,KAAK,EAAE,MAAM,CAAC;QACd,MAAM,CAAC,EAAE,MAAM,CAAC;KACjB;CAOF;AAED,OAAO,EAAE,aAAa,EAAE,UAAU,EAAE,CAAC"}
|
||||
1
vendor/ruvector/npm/packages/agentic-synth/src/routing/index.js.map
vendored
Normal file
1
vendor/ruvector/npm/packages/agentic-synth/src/routing/index.js.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"index.js","sourceRoot":"","sources":["index.ts"],"names":[],"mappings":";AAAA;;GAEG;;;AAEH,0CAAoE;AAYpE;;GAEG;AACH,MAAa,WAAW;IAItB,YAAY,MAAoB;QAC9B,IAAI,CAAC,MAAM,GAAG,MAAM,CAAC;QACrB,IAAI,CAAC,MAAM,GAAG,IAAI,GAAG,EAAE,CAAC;QACxB,IAAI,CAAC,gBAAgB,EAAE,CAAC;IAC1B,CAAC;IAEO,gBAAgB;QACtB,wBAAwB;QACxB,MAAM,YAAY,GAAiB;YACjC;gBACE,QAAQ,EAAE,QAAQ;gBAClB,KAAK,EAAE,sBAAsB;gBAC7B,QAAQ,EAAE,EAAE;gBACZ,YAAY,EAAE,CAAC,MAAM,EAAE,MAAM,EAAE,WAAW,EAAE,MAAM,CAAC;aACpD;YACD;gBACE,QAAQ,EAAE,QAAQ;gBAClB,KAAK,EAAE,gBAAgB;gBACvB,QAAQ,EAAE,CAAC;gBACX,YAAY,EAAE,CAAC,MAAM,EAAE,MAAM,EAAE,SAAS,EAAE,WAAW,CAAC;aACvD;YACD;gBACE,QAAQ,EAAE,QAAQ;gBAClB,KAAK,EAAE,kBAAkB;gBACzB,QAAQ,EAAE,CAAC;gBACX,YAAY,EAAE,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,WAAW,CAAC;aACpD;SACF,CAAC;QAEF,4BAA4B;QAC5B,MAAM,gBAAgB,GAAiB;YACrC;gBACE,QAAQ,EAAE,YAAY;gBACtB,KAAK,EAAE,6BAA6B;gBACpC,QAAQ,EAAE,EAAE;gBACZ,YAAY,EAAE,CAAC,MAAM,EAAE,MAAM,EAAE,WAAW,EAAE,SAAS,CAAC;aACvD;YACD;gBACE,QAAQ,EAAE,YAAY;gBACtB,KAAK,EAAE,oBAAoB;gBAC3B,QAAQ,EAAE,CAAC;gBACX,YAAY,EAAE,CAAC,MAAM,EAAE,MAAM,EAAE,WAAW,CAAC;aAC5C;YACD;gBACE,QAAQ,EAAE,YAAY;gBACtB,KAAK,EAAE,mCAAmC;gBAC1C,QAAQ,EAAE,CAAC;gBACX,YAAY,EAAE,CAAC,MAAM,EAAE,MAAM,EAAE,MAAM,CAAC;aACvC;SACF,CAAC;QAEF,iBAAiB;QACjB,CAAC,GAAG,YAAY,EAAE,GAAG,gBAAgB,EAAE,GAAG,CAAC,IAAI,CAAC,MAAM,CAAC,YAAY,IAAI,EAAE,CAAC,CAAC,CAAC,OAAO,CACjF,KAAK,CAAC,EAAE;YACN,MAAM,GAAG,GAAG,GAAG,KAAK,CAAC,QAAQ,IAAI,KAAK,CAAC,KAAK,EAAE,CAAC;YAC/C,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;QAC9B,CAAC,CACF,CAAC;IACJ,CAAC;IAED;;OAEG;IACH,WAAW,CAAC,YAIX;QACC,MAAM,EAAE,YAAY,GAAG,EAAE,EAAE,QAAQ,EAAE,cAAc,EAAE,GAAG,YAAY,CAAC;QAErE,6CAA6C;QAC7C,IAAI,QAAQ,IAAI,cAAc,EAAE,CAAC;YAC/B,MAAM,GAAG,GAAG,GAAG,QAAQ,IAAI,cAAc,EAAE,CAAC;YAC5C,MAAM,KAAK,GAAG,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC;YACnC,IAAI,KAAK,EAAE,CAAC;gBACV,OAAO,KAAK,CAAC;YACf,CAAC;QACH,CAAC;QAED,kCAAkC;QAClC,IAAI,UAAU,GAAG,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC,CAAC;QAClD,IAAI,QAAQ,EAAE,CAAC;YACb,UAAU,GAAG,UAAU,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,QAAQ,KAAK,QAAQ,CAAC,CAAC;QAC/D,CAAC;aAAM,CAAC;YACN,uBAAuB;YACvB,UAAU,GAAG,UAAU,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,QAAQ,KAAK,IAAI,CAAC,MAAM,CAAC,eAAe,CAAC,CAAC;QAClF,CAAC;QAED,yBAAyB;QACzB,IAAI,YAAY,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;YAC5B,UAAU,GAAG,UAAU,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE,CACrC,YAAY,CAAC,KAAK,CAAC,GAAG,CAAC,EAAE,CAAC,KAAK,CAAC,YAAY,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAC5D,CAAC;QACJ,CAAC;QAED,mCAAmC;QACnC,UAAU,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,QAAQ,GAAG,CAAC,CAAC,QAAQ,CAAC,CAAC;QAEnD,IAAI,UAAU,CAAC,MAAM,KAAK,CAAC,EAAE,CAAC;YAC5B,MAAM,IAAI,qBAAU,CAClB,0CAA0C,EAC1C,gBAAgB,EAChB,EAAE,YAAY,EAAE,CACjB,CAAC;QACJ,CAAC;QAED,2CAA2C;QAC3C,MAAM,aAAa,GAAG,UAAU,CAAC,CAAC,CAAC,CAAC;QACpC,IAAI,CAAC,aAAa,EAAE,CAAC;YACnB,MAAM,IAAI,qBAAU,CAClB,wDAAwD,EACxD,uBAAuB,EACvB,EAAE,UAAU,EAAE,CACf,CAAC;QACJ,CAAC;QAED,OAAO,aAAa,CAAC;IACvB,CAAC;IAED;;OAEG;IACH,gBAAgB,CAAC,OAAmB;QAClC,MAAM,KAAK,GAAiB,CAAC,OAAO,CAAC,CAAC;QAEtC,IAAI,IAAI,CAAC,MAAM,CAAC,aAAa,EAAE,CAAC;YAC9B,0DAA0D;YAC1D,sEAAsE;YACtE,MAAM,qBAAqB,GAAG,OAAO,CAAC,YAAY,CAAC,MAAM,CACvD,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,WAAW,EAAE,MAAM,EAAE,WAAW,EAAE,SAAS,EAAE,WAAW,CAAC,CAAC,QAAQ,CAAC,GAAG,CAAC,CACjF,CAAC;YAEF,KAAK,MAAM,QAAQ,IAAI,IAAI,CAAC,MAAM,CAAC,aAAa,EAAE,CAAC;gBACjD,IAAI,CAAC;oBACH,MAAM,QAAQ,GAAG,IAAI,CAAC,WAAW,CAAC;wBAChC,QAAQ;wBACR,YAAY,EAAE,qBAAqB,CAAC,MAAM,GAAG,CAAC,CAAC,CAAC,CAAC,qBAAqB,CAAC,CAAC,CAAC,SAAS;qBACnF,CAAC,CAAC;oBAEH,IAAI,QAAQ,CAAC,KAAK,KAAK,OAAO,CAAC,KAAK,EAAE,CAAC;wBACrC,KAAK,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC;oBACvB,CAAC;gBACH,CAAC;gBAAC,OAAO,KAAK,EAAE,CAAC;oBACf,yDAAyD;oBACzD,OAAO,CAAC,IAAI,CAAC,iDAAiD,QAAQ,EAAE,CAAC,CAAC;gBAC5E,CAAC;YACH,CAAC;QACH,CAAC;QAED,OAAO,KAAK,CAAC;IACf,CAAC;IAED;;OAEG;IACH,SAAS;QACP,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,MAAM,CAAC,MAAM,EAAE,CAAC,CAAC;IAC1C,CAAC;IAED;;OAEG;IACH,QAAQ,CAAC,KAAiB;QACxB,MAAM,GAAG,GAAG,GAAG,KAAK,CAAC,QAAQ,IAAI,KAAK,CAAC,KAAK,EAAE,CAAC;QAC/C,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC;IAC9B,CAAC;IAED;;OAEG;IACH,cAAc,CAAC,KAAiB;QAK9B,OAAO;YACL,QAAQ,EAAE,KAAK,CAAC,QAAQ;YACxB,KAAK,EAAE,KAAK,CAAC,KAAK;YAClB,MAAM,EAAE,IAAI,CAAC,MAAM,CAAC,YAAY,CAAC,KAAK,CAAC,QAAQ,CAAC;SACjD,CAAC;IACJ,CAAC;CACF;AAzLD,kCAyLC"}
|
||||
207
vendor/ruvector/npm/packages/agentic-synth/src/routing/index.ts
vendored
Normal file
207
vendor/ruvector/npm/packages/agentic-synth/src/routing/index.ts
vendored
Normal file
@@ -0,0 +1,207 @@
|
||||
/**
|
||||
* Model routing logic for Gemini and OpenRouter
|
||||
*/
|
||||
|
||||
import { ModelProvider, ModelRoute, SynthError } from '../types.js';
|
||||
|
||||
export interface RouterConfig {
|
||||
defaultProvider: ModelProvider;
|
||||
providerKeys: {
|
||||
gemini?: string;
|
||||
openrouter?: string;
|
||||
};
|
||||
fallbackChain?: ModelProvider[];
|
||||
customRoutes?: ModelRoute[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Model router for intelligent provider selection
|
||||
*/
|
||||
export class ModelRouter {
|
||||
private config: RouterConfig;
|
||||
private routes: Map<string, ModelRoute>;
|
||||
|
||||
constructor(config: RouterConfig) {
|
||||
this.config = config;
|
||||
this.routes = new Map();
|
||||
this.initializeRoutes();
|
||||
}
|
||||
|
||||
private initializeRoutes(): void {
|
||||
// Default Gemini models
|
||||
const geminiRoutes: ModelRoute[] = [
|
||||
{
|
||||
provider: 'gemini',
|
||||
model: 'gemini-2.0-flash-exp',
|
||||
priority: 10,
|
||||
capabilities: ['text', 'json', 'streaming', 'fast']
|
||||
},
|
||||
{
|
||||
provider: 'gemini',
|
||||
model: 'gemini-1.5-pro',
|
||||
priority: 8,
|
||||
capabilities: ['text', 'json', 'complex', 'reasoning']
|
||||
},
|
||||
{
|
||||
provider: 'gemini',
|
||||
model: 'gemini-1.5-flash',
|
||||
priority: 9,
|
||||
capabilities: ['text', 'json', 'fast', 'efficient']
|
||||
}
|
||||
];
|
||||
|
||||
// Default OpenRouter models
|
||||
const openrouterRoutes: ModelRoute[] = [
|
||||
{
|
||||
provider: 'openrouter',
|
||||
model: 'anthropic/claude-3.5-sonnet',
|
||||
priority: 10,
|
||||
capabilities: ['text', 'json', 'reasoning', 'complex']
|
||||
},
|
||||
{
|
||||
provider: 'openrouter',
|
||||
model: 'openai/gpt-4-turbo',
|
||||
priority: 9,
|
||||
capabilities: ['text', 'json', 'reasoning']
|
||||
},
|
||||
{
|
||||
provider: 'openrouter',
|
||||
model: 'meta-llama/llama-3.1-70b-instruct',
|
||||
priority: 7,
|
||||
capabilities: ['text', 'json', 'fast']
|
||||
}
|
||||
];
|
||||
|
||||
// Add all routes
|
||||
[...geminiRoutes, ...openrouterRoutes, ...(this.config.customRoutes || [])].forEach(
|
||||
route => {
|
||||
const key = `${route.provider}:${route.model}`;
|
||||
this.routes.set(key, route);
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Select best model for given requirements
|
||||
*/
|
||||
selectModel(requirements: {
|
||||
capabilities?: string[];
|
||||
provider?: ModelProvider;
|
||||
preferredModel?: string;
|
||||
}): ModelRoute {
|
||||
const { capabilities = [], provider, preferredModel } = requirements;
|
||||
|
||||
// If specific model requested, try to use it
|
||||
if (provider && preferredModel) {
|
||||
const key = `${provider}:${preferredModel}`;
|
||||
const route = this.routes.get(key);
|
||||
if (route) {
|
||||
return route;
|
||||
}
|
||||
}
|
||||
|
||||
// Filter by provider if specified
|
||||
let candidates = Array.from(this.routes.values());
|
||||
if (provider) {
|
||||
candidates = candidates.filter(r => r.provider === provider);
|
||||
} else {
|
||||
// Use default provider
|
||||
candidates = candidates.filter(r => r.provider === this.config.defaultProvider);
|
||||
}
|
||||
|
||||
// Filter by capabilities
|
||||
if (capabilities.length > 0) {
|
||||
candidates = candidates.filter(route =>
|
||||
capabilities.every(cap => route.capabilities.includes(cap))
|
||||
);
|
||||
}
|
||||
|
||||
// Sort by priority (highest first)
|
||||
candidates.sort((a, b) => b.priority - a.priority);
|
||||
|
||||
if (candidates.length === 0) {
|
||||
throw new SynthError(
|
||||
'No suitable model found for requirements',
|
||||
'NO_MODEL_FOUND',
|
||||
{ requirements }
|
||||
);
|
||||
}
|
||||
|
||||
// Safe to access: we've checked length > 0
|
||||
const selectedRoute = candidates[0];
|
||||
if (!selectedRoute) {
|
||||
throw new SynthError(
|
||||
'Unexpected error: no route selected despite candidates',
|
||||
'ROUTE_SELECTION_ERROR',
|
||||
{ candidates }
|
||||
);
|
||||
}
|
||||
|
||||
return selectedRoute;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get fallback chain for resilience
|
||||
*/
|
||||
getFallbackChain(primary: ModelRoute): ModelRoute[] {
|
||||
const chain: ModelRoute[] = [primary];
|
||||
|
||||
if (this.config.fallbackChain) {
|
||||
// Only require essential capabilities for fallback models
|
||||
// Filter out optimization flags like 'streaming', 'fast', 'efficient'
|
||||
const essentialCapabilities = primary.capabilities.filter(
|
||||
cap => !['streaming', 'fast', 'efficient', 'complex', 'reasoning'].includes(cap)
|
||||
);
|
||||
|
||||
for (const provider of this.config.fallbackChain) {
|
||||
try {
|
||||
const fallback = this.selectModel({
|
||||
provider,
|
||||
capabilities: essentialCapabilities.length > 0 ? essentialCapabilities : undefined
|
||||
});
|
||||
|
||||
if (fallback.model !== primary.model) {
|
||||
chain.push(fallback);
|
||||
}
|
||||
} catch (error) {
|
||||
// Skip this fallback provider if no suitable model found
|
||||
console.warn(`No suitable fallback model found for provider ${provider}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return chain;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get all available routes
|
||||
*/
|
||||
getRoutes(): ModelRoute[] {
|
||||
return Array.from(this.routes.values());
|
||||
}
|
||||
|
||||
/**
|
||||
* Add custom route
|
||||
*/
|
||||
addRoute(route: ModelRoute): void {
|
||||
const key = `${route.provider}:${route.model}`;
|
||||
this.routes.set(key, route);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get model configuration
|
||||
*/
|
||||
getModelConfig(route: ModelRoute): {
|
||||
provider: ModelProvider;
|
||||
model: string;
|
||||
apiKey?: string;
|
||||
} {
|
||||
return {
|
||||
provider: route.provider,
|
||||
model: route.model,
|
||||
apiKey: this.config.providerKeys[route.provider]
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
export { ModelProvider, ModelRoute };
|
||||
148
vendor/ruvector/npm/packages/agentic-synth/src/routing/model-router.js
vendored
Normal file
148
vendor/ruvector/npm/packages/agentic-synth/src/routing/model-router.js
vendored
Normal file
@@ -0,0 +1,148 @@
|
||||
/**
|
||||
* Model Router for intelligent model selection
|
||||
*/
|
||||
|
||||
export class ModelRouter {
|
||||
constructor(options = {}) {
|
||||
this.models = options.models || [];
|
||||
this.strategy = options.strategy || 'round-robin';
|
||||
this.currentIndex = 0;
|
||||
this.modelStats = new Map();
|
||||
|
||||
// Initialize stats for provided models
|
||||
this.models.forEach(model => {
|
||||
this.modelStats.set(model.id, {
|
||||
requests: 0,
|
||||
errors: 0,
|
||||
totalLatency: 0,
|
||||
avgLatency: 0
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Route request to appropriate model
|
||||
* @param {Object} request - Request object
|
||||
* @returns {string} Selected model ID
|
||||
*/
|
||||
route(request) {
|
||||
if (this.models.length === 0) {
|
||||
throw new Error('No models available for routing');
|
||||
}
|
||||
|
||||
switch (this.strategy) {
|
||||
case 'round-robin':
|
||||
return this._roundRobin();
|
||||
case 'least-latency':
|
||||
return this._leastLatency();
|
||||
case 'cost-optimized':
|
||||
return this._costOptimized(request);
|
||||
case 'capability-based':
|
||||
return this._capabilityBased(request);
|
||||
default:
|
||||
return this.models[0];
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Register model
|
||||
*/
|
||||
registerModel(model) {
|
||||
if (!model.id || !model.endpoint) {
|
||||
throw new Error('Model must have id and endpoint');
|
||||
}
|
||||
|
||||
this.models.push(model);
|
||||
this.modelStats.set(model.id, {
|
||||
requests: 0,
|
||||
errors: 0,
|
||||
totalLatency: 0,
|
||||
avgLatency: 0
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Record model performance
|
||||
*/
|
||||
recordMetrics(modelId, latency, success = true) {
|
||||
const stats = this.modelStats.get(modelId);
|
||||
if (!stats) return;
|
||||
|
||||
stats.requests++;
|
||||
if (!success) stats.errors++;
|
||||
stats.totalLatency += latency;
|
||||
stats.avgLatency = stats.totalLatency / stats.requests;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get model statistics
|
||||
*/
|
||||
getStats(modelId = null) {
|
||||
if (modelId) {
|
||||
return this.modelStats.get(modelId);
|
||||
}
|
||||
return Object.fromEntries(this.modelStats);
|
||||
}
|
||||
|
||||
/**
|
||||
* Round-robin routing
|
||||
* @private
|
||||
*/
|
||||
_roundRobin() {
|
||||
const model = this.models[this.currentIndex];
|
||||
this.currentIndex = (this.currentIndex + 1) % this.models.length;
|
||||
return model.id;
|
||||
}
|
||||
|
||||
/**
|
||||
* Route to model with least latency
|
||||
* @private
|
||||
*/
|
||||
_leastLatency() {
|
||||
let bestModel = this.models[0];
|
||||
let lowestLatency = Infinity;
|
||||
|
||||
for (const model of this.models) {
|
||||
const stats = this.modelStats.get(model.id);
|
||||
if (stats && stats.avgLatency < lowestLatency) {
|
||||
lowestLatency = stats.avgLatency;
|
||||
bestModel = model;
|
||||
}
|
||||
}
|
||||
|
||||
return bestModel.id;
|
||||
}
|
||||
|
||||
/**
|
||||
* Cost-optimized routing
|
||||
* @private
|
||||
*/
|
||||
_costOptimized(request) {
|
||||
const requestSize = JSON.stringify(request).length;
|
||||
|
||||
// Route small requests to cheaper models
|
||||
if (requestSize < 1000) {
|
||||
return this.models[0].id;
|
||||
}
|
||||
|
||||
return this.models[this.models.length - 1].id;
|
||||
}
|
||||
|
||||
/**
|
||||
* Capability-based routing
|
||||
* @private
|
||||
*/
|
||||
_capabilityBased(request) {
|
||||
const requiredCapability = request.capability || 'general';
|
||||
|
||||
const capableModels = this.models.filter(model =>
|
||||
model.capabilities?.includes(requiredCapability)
|
||||
);
|
||||
|
||||
if (capableModels.length === 0) {
|
||||
return this.models[0].id;
|
||||
}
|
||||
|
||||
return capableModels[0].id;
|
||||
}
|
||||
}
|
||||
253
vendor/ruvector/npm/packages/agentic-synth/src/types.d.ts
vendored
Normal file
253
vendor/ruvector/npm/packages/agentic-synth/src/types.d.ts
vendored
Normal file
@@ -0,0 +1,253 @@
|
||||
/**
|
||||
* Core types and interfaces for agentic-synth
|
||||
*/
|
||||
import { z } from 'zod';
|
||||
export type JsonPrimitive = string | number | boolean | null;
|
||||
export type JsonArray = JsonValue[];
|
||||
export type JsonObject = {
|
||||
[key: string]: JsonValue;
|
||||
};
|
||||
export type JsonValue = JsonPrimitive | JsonArray | JsonObject;
|
||||
export interface SchemaField {
|
||||
type: string;
|
||||
required?: boolean;
|
||||
properties?: Record<string, SchemaField>;
|
||||
items?: SchemaField;
|
||||
enum?: unknown[];
|
||||
minimum?: number;
|
||||
maximum?: number;
|
||||
pattern?: string;
|
||||
}
|
||||
export type DataSchema = Record<string, SchemaField>;
|
||||
export type DataConstraints = Record<string, unknown>;
|
||||
export declare const ModelProviderSchema: z.ZodEnum<["gemini", "openrouter"]>;
|
||||
export type ModelProvider = z.infer<typeof ModelProviderSchema>;
|
||||
export declare const CacheStrategySchema: z.ZodEnum<["none", "memory", "disk"]>;
|
||||
export type CacheStrategy = z.infer<typeof CacheStrategySchema>;
|
||||
export declare const DataTypeSchema: z.ZodEnum<["timeseries", "events", "structured", "text", "json", "csv"]>;
|
||||
export type DataType = z.infer<typeof DataTypeSchema>;
|
||||
export interface SynthConfig {
|
||||
provider: ModelProvider;
|
||||
apiKey?: string;
|
||||
model?: string;
|
||||
cacheStrategy?: CacheStrategy;
|
||||
cacheTTL?: number;
|
||||
maxRetries?: number;
|
||||
timeout?: number;
|
||||
streaming?: boolean;
|
||||
automation?: boolean;
|
||||
vectorDB?: boolean;
|
||||
enableFallback?: boolean;
|
||||
fallbackChain?: ModelProvider[];
|
||||
}
|
||||
export declare const SynthConfigSchema: z.ZodObject<{
|
||||
provider: z.ZodEnum<["gemini", "openrouter"]>;
|
||||
apiKey: z.ZodOptional<z.ZodString>;
|
||||
model: z.ZodOptional<z.ZodString>;
|
||||
cacheStrategy: z.ZodDefault<z.ZodOptional<z.ZodEnum<["none", "memory", "disk"]>>>;
|
||||
cacheTTL: z.ZodDefault<z.ZodOptional<z.ZodNumber>>;
|
||||
maxRetries: z.ZodDefault<z.ZodOptional<z.ZodNumber>>;
|
||||
timeout: z.ZodDefault<z.ZodOptional<z.ZodNumber>>;
|
||||
streaming: z.ZodDefault<z.ZodOptional<z.ZodBoolean>>;
|
||||
automation: z.ZodDefault<z.ZodOptional<z.ZodBoolean>>;
|
||||
vectorDB: z.ZodDefault<z.ZodOptional<z.ZodBoolean>>;
|
||||
enableFallback: z.ZodDefault<z.ZodOptional<z.ZodBoolean>>;
|
||||
fallbackChain: z.ZodOptional<z.ZodArray<z.ZodEnum<["gemini", "openrouter"]>, "many">>;
|
||||
}, "strip", z.ZodTypeAny, {
|
||||
maxRetries: number;
|
||||
provider: "gemini" | "openrouter";
|
||||
cacheStrategy: "none" | "memory" | "disk";
|
||||
cacheTTL: number;
|
||||
timeout: number;
|
||||
streaming: boolean;
|
||||
automation: boolean;
|
||||
vectorDB: boolean;
|
||||
enableFallback: boolean;
|
||||
apiKey?: string | undefined;
|
||||
model?: string | undefined;
|
||||
fallbackChain?: ("gemini" | "openrouter")[] | undefined;
|
||||
}, {
|
||||
provider: "gemini" | "openrouter";
|
||||
maxRetries?: number | undefined;
|
||||
apiKey?: string | undefined;
|
||||
model?: string | undefined;
|
||||
cacheStrategy?: "none" | "memory" | "disk" | undefined;
|
||||
cacheTTL?: number | undefined;
|
||||
timeout?: number | undefined;
|
||||
streaming?: boolean | undefined;
|
||||
automation?: boolean | undefined;
|
||||
vectorDB?: boolean | undefined;
|
||||
enableFallback?: boolean | undefined;
|
||||
fallbackChain?: ("gemini" | "openrouter")[] | undefined;
|
||||
}>;
|
||||
export interface GeneratorOptions {
|
||||
count?: number;
|
||||
schema?: DataSchema;
|
||||
format?: 'json' | 'csv' | 'array';
|
||||
seed?: string | number;
|
||||
constraints?: DataConstraints;
|
||||
}
|
||||
export declare const GeneratorOptionsSchema: z.ZodObject<{
|
||||
count: z.ZodDefault<z.ZodOptional<z.ZodNumber>>;
|
||||
schema: z.ZodOptional<z.ZodRecord<z.ZodString, z.ZodUnknown>>;
|
||||
format: z.ZodDefault<z.ZodOptional<z.ZodEnum<["json", "csv", "array"]>>>;
|
||||
seed: z.ZodOptional<z.ZodUnion<[z.ZodString, z.ZodNumber]>>;
|
||||
constraints: z.ZodOptional<z.ZodRecord<z.ZodString, z.ZodUnknown>>;
|
||||
}, "strip", z.ZodTypeAny, {
|
||||
count: number;
|
||||
format: "json" | "csv" | "array";
|
||||
schema?: Record<string, unknown> | undefined;
|
||||
seed?: string | number | undefined;
|
||||
constraints?: Record<string, unknown> | undefined;
|
||||
}, {
|
||||
count?: number | undefined;
|
||||
schema?: Record<string, unknown> | undefined;
|
||||
format?: "json" | "csv" | "array" | undefined;
|
||||
seed?: string | number | undefined;
|
||||
constraints?: Record<string, unknown> | undefined;
|
||||
}>;
|
||||
export interface TimeSeriesOptions extends GeneratorOptions {
|
||||
startDate?: Date | string;
|
||||
endDate?: Date | string;
|
||||
interval?: string;
|
||||
metrics?: string[];
|
||||
trend?: 'up' | 'down' | 'stable' | 'random';
|
||||
seasonality?: boolean;
|
||||
noise?: number;
|
||||
}
|
||||
export declare const TimeSeriesOptionsSchema: z.ZodObject<{
|
||||
count: z.ZodDefault<z.ZodOptional<z.ZodNumber>>;
|
||||
schema: z.ZodOptional<z.ZodRecord<z.ZodString, z.ZodUnknown>>;
|
||||
format: z.ZodDefault<z.ZodOptional<z.ZodEnum<["json", "csv", "array"]>>>;
|
||||
seed: z.ZodOptional<z.ZodUnion<[z.ZodString, z.ZodNumber]>>;
|
||||
constraints: z.ZodOptional<z.ZodRecord<z.ZodString, z.ZodUnknown>>;
|
||||
} & {
|
||||
startDate: z.ZodOptional<z.ZodUnion<[z.ZodDate, z.ZodString]>>;
|
||||
endDate: z.ZodOptional<z.ZodUnion<[z.ZodDate, z.ZodString]>>;
|
||||
interval: z.ZodDefault<z.ZodOptional<z.ZodString>>;
|
||||
metrics: z.ZodOptional<z.ZodArray<z.ZodString, "many">>;
|
||||
trend: z.ZodDefault<z.ZodOptional<z.ZodEnum<["up", "down", "stable", "random"]>>>;
|
||||
seasonality: z.ZodDefault<z.ZodOptional<z.ZodBoolean>>;
|
||||
noise: z.ZodDefault<z.ZodOptional<z.ZodNumber>>;
|
||||
}, "strip", z.ZodTypeAny, {
|
||||
count: number;
|
||||
format: "json" | "csv" | "array";
|
||||
interval: string;
|
||||
trend: "up" | "down" | "stable" | "random";
|
||||
seasonality: boolean;
|
||||
noise: number;
|
||||
metrics?: string[] | undefined;
|
||||
schema?: Record<string, unknown> | undefined;
|
||||
seed?: string | number | undefined;
|
||||
constraints?: Record<string, unknown> | undefined;
|
||||
startDate?: string | Date | undefined;
|
||||
endDate?: string | Date | undefined;
|
||||
}, {
|
||||
metrics?: string[] | undefined;
|
||||
count?: number | undefined;
|
||||
schema?: Record<string, unknown> | undefined;
|
||||
format?: "json" | "csv" | "array" | undefined;
|
||||
seed?: string | number | undefined;
|
||||
constraints?: Record<string, unknown> | undefined;
|
||||
startDate?: string | Date | undefined;
|
||||
endDate?: string | Date | undefined;
|
||||
interval?: string | undefined;
|
||||
trend?: "up" | "down" | "stable" | "random" | undefined;
|
||||
seasonality?: boolean | undefined;
|
||||
noise?: number | undefined;
|
||||
}>;
|
||||
export interface EventOptions extends GeneratorOptions {
|
||||
eventTypes?: string[];
|
||||
distribution?: 'uniform' | 'poisson' | 'normal';
|
||||
timeRange?: {
|
||||
start: Date | string;
|
||||
end: Date | string;
|
||||
};
|
||||
userCount?: number;
|
||||
}
|
||||
export declare const EventOptionsSchema: z.ZodObject<{
|
||||
count: z.ZodDefault<z.ZodOptional<z.ZodNumber>>;
|
||||
schema: z.ZodOptional<z.ZodRecord<z.ZodString, z.ZodUnknown>>;
|
||||
format: z.ZodDefault<z.ZodOptional<z.ZodEnum<["json", "csv", "array"]>>>;
|
||||
seed: z.ZodOptional<z.ZodUnion<[z.ZodString, z.ZodNumber]>>;
|
||||
constraints: z.ZodOptional<z.ZodRecord<z.ZodString, z.ZodUnknown>>;
|
||||
} & {
|
||||
eventTypes: z.ZodOptional<z.ZodArray<z.ZodString, "many">>;
|
||||
distribution: z.ZodDefault<z.ZodOptional<z.ZodEnum<["uniform", "poisson", "normal"]>>>;
|
||||
timeRange: z.ZodOptional<z.ZodObject<{
|
||||
start: z.ZodUnion<[z.ZodDate, z.ZodString]>;
|
||||
end: z.ZodUnion<[z.ZodDate, z.ZodString]>;
|
||||
}, "strip", z.ZodTypeAny, {
|
||||
start: string | Date;
|
||||
end: string | Date;
|
||||
}, {
|
||||
start: string | Date;
|
||||
end: string | Date;
|
||||
}>>;
|
||||
userCount: z.ZodOptional<z.ZodNumber>;
|
||||
}, "strip", z.ZodTypeAny, {
|
||||
count: number;
|
||||
format: "json" | "csv" | "array";
|
||||
distribution: "uniform" | "poisson" | "normal";
|
||||
schema?: Record<string, unknown> | undefined;
|
||||
seed?: string | number | undefined;
|
||||
constraints?: Record<string, unknown> | undefined;
|
||||
eventTypes?: string[] | undefined;
|
||||
timeRange?: {
|
||||
start: string | Date;
|
||||
end: string | Date;
|
||||
} | undefined;
|
||||
userCount?: number | undefined;
|
||||
}, {
|
||||
count?: number | undefined;
|
||||
schema?: Record<string, unknown> | undefined;
|
||||
format?: "json" | "csv" | "array" | undefined;
|
||||
seed?: string | number | undefined;
|
||||
constraints?: Record<string, unknown> | undefined;
|
||||
eventTypes?: string[] | undefined;
|
||||
distribution?: "uniform" | "poisson" | "normal" | undefined;
|
||||
timeRange?: {
|
||||
start: string | Date;
|
||||
end: string | Date;
|
||||
} | undefined;
|
||||
userCount?: number | undefined;
|
||||
}>;
|
||||
export interface GenerationResult<T = JsonValue> {
|
||||
data: T[];
|
||||
metadata: {
|
||||
count: number;
|
||||
generatedAt: Date;
|
||||
provider: ModelProvider;
|
||||
model: string;
|
||||
cached: boolean;
|
||||
duration: number;
|
||||
};
|
||||
}
|
||||
export declare class SynthError extends Error {
|
||||
code: string;
|
||||
details?: unknown | undefined;
|
||||
constructor(message: string, code: string, details?: unknown | undefined);
|
||||
}
|
||||
export declare class ValidationError extends SynthError {
|
||||
constructor(message: string, details?: unknown);
|
||||
}
|
||||
export declare class APIError extends SynthError {
|
||||
constructor(message: string, details?: unknown);
|
||||
}
|
||||
export declare class CacheError extends SynthError {
|
||||
constructor(message: string, details?: unknown);
|
||||
}
|
||||
export interface ModelRoute {
|
||||
provider: ModelProvider;
|
||||
model: string;
|
||||
priority: number;
|
||||
capabilities: string[];
|
||||
}
|
||||
export interface StreamChunk<T = JsonValue> {
|
||||
type: 'data' | 'metadata' | 'error' | 'complete';
|
||||
data?: T;
|
||||
metadata?: Record<string, unknown>;
|
||||
error?: Error;
|
||||
}
|
||||
export type StreamCallback<T = JsonValue> = (chunk: StreamChunk<T>) => void | Promise<void>;
|
||||
//# sourceMappingURL=types.d.ts.map
|
||||
1
vendor/ruvector/npm/packages/agentic-synth/src/types.d.ts.map
vendored
Normal file
1
vendor/ruvector/npm/packages/agentic-synth/src/types.d.ts.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["types.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,OAAO,EAAE,CAAC,EAAE,MAAM,KAAK,CAAC;AAGxB,MAAM,MAAM,aAAa,GAAG,MAAM,GAAG,MAAM,GAAG,OAAO,GAAG,IAAI,CAAC;AAC7D,MAAM,MAAM,SAAS,GAAG,SAAS,EAAE,CAAC;AACpC,MAAM,MAAM,UAAU,GAAG;IAAE,CAAC,GAAG,EAAE,MAAM,GAAG,SAAS,CAAA;CAAE,CAAC;AACtD,MAAM,MAAM,SAAS,GAAG,aAAa,GAAG,SAAS,GAAG,UAAU,CAAC;AAG/D,MAAM,WAAW,WAAW;IAC1B,IAAI,EAAE,MAAM,CAAC;IACb,QAAQ,CAAC,EAAE,OAAO,CAAC;IACnB,UAAU,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,WAAW,CAAC,CAAC;IACzC,KAAK,CAAC,EAAE,WAAW,CAAC;IACpB,IAAI,CAAC,EAAE,OAAO,EAAE,CAAC;IACjB,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,OAAO,CAAC,EAAE,MAAM,CAAC;CAClB;AAED,MAAM,MAAM,UAAU,GAAG,MAAM,CAAC,MAAM,EAAE,WAAW,CAAC,CAAC;AACrD,MAAM,MAAM,eAAe,GAAG,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;AAGtD,eAAO,MAAM,mBAAmB,qCAAmC,CAAC;AACpE,MAAM,MAAM,aAAa,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,mBAAmB,CAAC,CAAC;AAEhE,eAAO,MAAM,mBAAmB,uCAAqC,CAAC;AACtE,MAAM,MAAM,aAAa,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,mBAAmB,CAAC,CAAC;AAEhE,eAAO,MAAM,cAAc,0EAOzB,CAAC;AACH,MAAM,MAAM,QAAQ,GAAG,CAAC,CAAC,KAAK,CAAC,OAAO,cAAc,CAAC,CAAC;AAGtD,MAAM,WAAW,WAAW;IAC1B,QAAQ,EAAE,aAAa,CAAC;IACxB,MAAM,CAAC,EAAE,MAAM,CAAC;IAChB,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,aAAa,CAAC,EAAE,aAAa,CAAC;IAC9B,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,UAAU,CAAC,EAAE,MAAM,CAAC;IACpB,OAAO,CAAC,EAAE,MAAM,CAAC;IACjB,SAAS,CAAC,EAAE,OAAO,CAAC;IACpB,UAAU,CAAC,EAAE,OAAO,CAAC;IACrB,QAAQ,CAAC,EAAE,OAAO,CAAC;IACnB,cAAc,CAAC,EAAE,OAAO,CAAC;IACzB,aAAa,CAAC,EAAE,aAAa,EAAE,CAAC;CACjC;AAED,eAAO,MAAM,iBAAiB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EAa5B,CAAC;AAGH,MAAM,WAAW,gBAAgB;IAC/B,KAAK,CAAC,EAAE,MAAM,CAAC;IACf,MAAM,CAAC,EAAE,UAAU,CAAC;IACpB,MAAM,CAAC,EAAE,MAAM,GAAG,KAAK,GAAG,OAAO,CAAC;IAClC,IAAI,CAAC,EAAE,MAAM,GAAG,MAAM,CAAC;IACvB,WAAW,CAAC,EAAE,eAAe,CAAC;CAC/B;AAED,eAAO,MAAM,sBAAsB;;;;;;;;;;;;;;;;;;EAMjC,CAAC;AAGH,MAAM,WAAW,iBAAkB,SAAQ,gBAAgB;IACzD,SAAS,CAAC,EAAE,IAAI,GAAG,MAAM,CAAC;IAC1B,OAAO,CAAC,EAAE,IAAI,GAAG,MAAM,CAAC;IACxB,QAAQ,CAAC,EAAE,MAAM,CAAC;IAClB,OAAO,CAAC,EAAE,MAAM,EAAE,CAAC;IACnB,KAAK,CAAC,EAAE,IAAI,GAAG,MAAM,GAAG,QAAQ,GAAG,QAAQ,CAAC;IAC5C,WAAW,CAAC,EAAE,OAAO,CAAC;IACtB,KAAK,CAAC,EAAE,MAAM,CAAC;CAChB;AAED,eAAO,MAAM,uBAAuB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EAQlC,CAAC;AAGH,MAAM,WAAW,YAAa,SAAQ,gBAAgB;IACpD,UAAU,CAAC,EAAE,MAAM,EAAE,CAAC;IACtB,YAAY,CAAC,EAAE,SAAS,GAAG,SAAS,GAAG,QAAQ,CAAC;IAChD,SAAS,CAAC,EAAE;QACV,KAAK,EAAE,IAAI,GAAG,MAAM,CAAC;QACrB,GAAG,EAAE,IAAI,GAAG,MAAM,CAAC;KACpB,CAAC;IACF,SAAS,CAAC,EAAE,MAAM,CAAC;CACpB;AAED,eAAO,MAAM,kBAAkB;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;EAQ7B,CAAC;AAGH,MAAM,WAAW,gBAAgB,CAAC,CAAC,GAAG,SAAS;IAC7C,IAAI,EAAE,CAAC,EAAE,CAAC;IACV,QAAQ,EAAE;QACR,KAAK,EAAE,MAAM,CAAC;QACd,WAAW,EAAE,IAAI,CAAC;QAClB,QAAQ,EAAE,aAAa,CAAC;QACxB,KAAK,EAAE,MAAM,CAAC;QACd,MAAM,EAAE,OAAO,CAAC;QAChB,QAAQ,EAAE,MAAM,CAAC;KAClB,CAAC;CACH;AAGD,qBAAa,UAAW,SAAQ,KAAK;IAG1B,IAAI,EAAE,MAAM;IACZ,OAAO,CAAC,EAAE,OAAO;gBAFxB,OAAO,EAAE,MAAM,EACR,IAAI,EAAE,MAAM,EACZ,OAAO,CAAC,EAAE,OAAO,YAAA;CAK3B;AAED,qBAAa,eAAgB,SAAQ,UAAU;gBACjC,OAAO,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,OAAO;CAI/C;AAED,qBAAa,QAAS,SAAQ,UAAU;gBAC1B,OAAO,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,OAAO;CAI/C;AAED,qBAAa,UAAW,SAAQ,UAAU;gBAC5B,OAAO,EAAE,MAAM,EAAE,OAAO,CAAC,EAAE,OAAO;CAI/C;AAGD,MAAM,WAAW,UAAU;IACzB,QAAQ,EAAE,aAAa,CAAC;IACxB,KAAK,EAAE,MAAM,CAAC;IACd,QAAQ,EAAE,MAAM,CAAC;IACjB,YAAY,EAAE,MAAM,EAAE,CAAC;CACxB;AAGD,MAAM,WAAW,WAAW,CAAC,CAAC,GAAG,SAAS;IACxC,IAAI,EAAE,MAAM,GAAG,UAAU,GAAG,OAAO,GAAG,UAAU,CAAC;IACjD,IAAI,CAAC,EAAE,CAAC,CAAC;IACT,QAAQ,CAAC,EAAE,MAAM,CAAC,MAAM,EAAE,OAAO,CAAC,CAAC;IACnC,KAAK,CAAC,EAAE,KAAK,CAAC;CACf;AAED,MAAM,MAAM,cAAc,CAAC,CAAC,GAAG,SAAS,IAAI,CAAC,KAAK,EAAE,WAAW,CAAC,CAAC,CAAC,KAAK,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC,CAAC"}
|
||||
89
vendor/ruvector/npm/packages/agentic-synth/src/types.js
vendored
Normal file
89
vendor/ruvector/npm/packages/agentic-synth/src/types.js
vendored
Normal file
@@ -0,0 +1,89 @@
|
||||
"use strict";
|
||||
/**
|
||||
* Core types and interfaces for agentic-synth
|
||||
*/
|
||||
Object.defineProperty(exports, "__esModule", { value: true });
|
||||
exports.CacheError = exports.APIError = exports.ValidationError = exports.SynthError = exports.EventOptionsSchema = exports.TimeSeriesOptionsSchema = exports.GeneratorOptionsSchema = exports.SynthConfigSchema = exports.DataTypeSchema = exports.CacheStrategySchema = exports.ModelProviderSchema = void 0;
|
||||
const zod_1 = require("zod");
|
||||
// Configuration schemas
|
||||
exports.ModelProviderSchema = zod_1.z.enum(['gemini', 'openrouter']);
|
||||
exports.CacheStrategySchema = zod_1.z.enum(['none', 'memory', 'disk']);
|
||||
exports.DataTypeSchema = zod_1.z.enum([
|
||||
'timeseries',
|
||||
'events',
|
||||
'structured',
|
||||
'text',
|
||||
'json',
|
||||
'csv'
|
||||
]);
|
||||
exports.SynthConfigSchema = zod_1.z.object({
|
||||
provider: exports.ModelProviderSchema,
|
||||
apiKey: zod_1.z.string().optional(),
|
||||
model: zod_1.z.string().optional(),
|
||||
cacheStrategy: exports.CacheStrategySchema.optional().default('memory'),
|
||||
cacheTTL: zod_1.z.number().optional().default(3600),
|
||||
maxRetries: zod_1.z.number().optional().default(3),
|
||||
timeout: zod_1.z.number().optional().default(30000),
|
||||
streaming: zod_1.z.boolean().optional().default(false),
|
||||
automation: zod_1.z.boolean().optional().default(false),
|
||||
vectorDB: zod_1.z.boolean().optional().default(false),
|
||||
enableFallback: zod_1.z.boolean().optional().default(true),
|
||||
fallbackChain: zod_1.z.array(exports.ModelProviderSchema).optional()
|
||||
});
|
||||
exports.GeneratorOptionsSchema = zod_1.z.object({
|
||||
count: zod_1.z.number().optional().default(1),
|
||||
schema: zod_1.z.record(zod_1.z.string(), zod_1.z.unknown()).optional(),
|
||||
format: zod_1.z.enum(['json', 'csv', 'array']).optional().default('json'),
|
||||
seed: zod_1.z.union([zod_1.z.string(), zod_1.z.number()]).optional(),
|
||||
constraints: zod_1.z.record(zod_1.z.string(), zod_1.z.unknown()).optional()
|
||||
});
|
||||
exports.TimeSeriesOptionsSchema = exports.GeneratorOptionsSchema.extend({
|
||||
startDate: zod_1.z.union([zod_1.z.date(), zod_1.z.string()]).optional(),
|
||||
endDate: zod_1.z.union([zod_1.z.date(), zod_1.z.string()]).optional(),
|
||||
interval: zod_1.z.string().optional().default('1h'),
|
||||
metrics: zod_1.z.array(zod_1.z.string()).optional(),
|
||||
trend: zod_1.z.enum(['up', 'down', 'stable', 'random']).optional().default('stable'),
|
||||
seasonality: zod_1.z.boolean().optional().default(false),
|
||||
noise: zod_1.z.number().min(0).max(1).optional().default(0.1)
|
||||
});
|
||||
exports.EventOptionsSchema = exports.GeneratorOptionsSchema.extend({
|
||||
eventTypes: zod_1.z.array(zod_1.z.string()).optional(),
|
||||
distribution: zod_1.z.enum(['uniform', 'poisson', 'normal']).optional().default('uniform'),
|
||||
timeRange: zod_1.z.object({
|
||||
start: zod_1.z.union([zod_1.z.date(), zod_1.z.string()]),
|
||||
end: zod_1.z.union([zod_1.z.date(), zod_1.z.string()])
|
||||
}).optional(),
|
||||
userCount: zod_1.z.number().optional()
|
||||
});
|
||||
// Error types
|
||||
class SynthError extends Error {
|
||||
constructor(message, code, details) {
|
||||
super(message);
|
||||
this.code = code;
|
||||
this.details = details;
|
||||
this.name = 'SynthError';
|
||||
}
|
||||
}
|
||||
exports.SynthError = SynthError;
|
||||
class ValidationError extends SynthError {
|
||||
constructor(message, details) {
|
||||
super(message, 'VALIDATION_ERROR', details);
|
||||
this.name = 'ValidationError';
|
||||
}
|
||||
}
|
||||
exports.ValidationError = ValidationError;
|
||||
class APIError extends SynthError {
|
||||
constructor(message, details) {
|
||||
super(message, 'API_ERROR', details);
|
||||
this.name = 'APIError';
|
||||
}
|
||||
}
|
||||
exports.APIError = APIError;
|
||||
class CacheError extends SynthError {
|
||||
constructor(message, details) {
|
||||
super(message, 'CACHE_ERROR', details);
|
||||
this.name = 'CacheError';
|
||||
}
|
||||
}
|
||||
exports.CacheError = CacheError;
|
||||
//# sourceMappingURL=types.js.map
|
||||
1
vendor/ruvector/npm/packages/agentic-synth/src/types.js.map
vendored
Normal file
1
vendor/ruvector/npm/packages/agentic-synth/src/types.js.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"types.js","sourceRoot":"","sources":["types.ts"],"names":[],"mappings":";AAAA;;GAEG;;;AAEH,6BAAwB;AAuBxB,wBAAwB;AACX,QAAA,mBAAmB,GAAG,OAAC,CAAC,IAAI,CAAC,CAAC,QAAQ,EAAE,YAAY,CAAC,CAAC,CAAC;AAGvD,QAAA,mBAAmB,GAAG,OAAC,CAAC,IAAI,CAAC,CAAC,MAAM,EAAE,QAAQ,EAAE,MAAM,CAAC,CAAC,CAAC;AAGzD,QAAA,cAAc,GAAG,OAAC,CAAC,IAAI,CAAC;IACnC,YAAY;IACZ,QAAQ;IACR,YAAY;IACZ,MAAM;IACN,MAAM;IACN,KAAK;CACN,CAAC,CAAC;AAmBU,QAAA,iBAAiB,GAAG,OAAC,CAAC,MAAM,CAAC;IACxC,QAAQ,EAAE,2BAAmB;IAC7B,MAAM,EAAE,OAAC,CAAC,MAAM,EAAE,CAAC,QAAQ,EAAE;IAC7B,KAAK,EAAE,OAAC,CAAC,MAAM,EAAE,CAAC,QAAQ,EAAE;IAC5B,aAAa,EAAE,2BAAmB,CAAC,QAAQ,EAAE,CAAC,OAAO,CAAC,QAAQ,CAAC;IAC/D,QAAQ,EAAE,OAAC,CAAC,MAAM,EAAE,CAAC,QAAQ,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC;IAC7C,UAAU,EAAE,OAAC,CAAC,MAAM,EAAE,CAAC,QAAQ,EAAE,CAAC,OAAO,CAAC,CAAC,CAAC;IAC5C,OAAO,EAAE,OAAC,CAAC,MAAM,EAAE,CAAC,QAAQ,EAAE,CAAC,OAAO,CAAC,KAAK,CAAC;IAC7C,SAAS,EAAE,OAAC,CAAC,OAAO,EAAE,CAAC,QAAQ,EAAE,CAAC,OAAO,CAAC,KAAK,CAAC;IAChD,UAAU,EAAE,OAAC,CAAC,OAAO,EAAE,CAAC,QAAQ,EAAE,CAAC,OAAO,CAAC,KAAK,CAAC;IACjD,QAAQ,EAAE,OAAC,CAAC,OAAO,EAAE,CAAC,QAAQ,EAAE,CAAC,OAAO,CAAC,KAAK,CAAC;IAC/C,cAAc,EAAE,OAAC,CAAC,OAAO,EAAE,CAAC,QAAQ,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC;IACpD,aAAa,EAAE,OAAC,CAAC,KAAK,CAAC,2BAAmB,CAAC,CAAC,QAAQ,EAAE;CACvD,CAAC,CAAC;AAWU,QAAA,sBAAsB,GAAG,OAAC,CAAC,MAAM,CAAC;IAC7C,KAAK,EAAE,OAAC,CAAC,MAAM,EAAE,CAAC,QAAQ,EAAE,CAAC,OAAO,CAAC,CAAC,CAAC;IACvC,MAAM,EAAE,OAAC,CAAC,MAAM,CAAC,OAAC,CAAC,MAAM,EAAE,EAAE,OAAC,CAAC,OAAO,EAAE,CAAC,CAAC,QAAQ,EAAE;IACpD,MAAM,EAAE,OAAC,CAAC,IAAI,CAAC,CAAC,MAAM,EAAE,KAAK,EAAE,OAAO,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,OAAO,CAAC,MAAM,CAAC;IACnE,IAAI,EAAE,OAAC,CAAC,KAAK,CAAC,CAAC,OAAC,CAAC,MAAM,EAAE,EAAE,OAAC,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,QAAQ,EAAE;IAClD,WAAW,EAAE,OAAC,CAAC,MAAM,CAAC,OAAC,CAAC,MAAM,EAAE,EAAE,OAAC,CAAC,OAAO,EAAE,CAAC,CAAC,QAAQ,EAAE;CAC1D,CAAC,CAAC;AAaU,QAAA,uBAAuB,GAAG,8BAAsB,CAAC,MAAM,CAAC;IACnE,SAAS,EAAE,OAAC,CAAC,KAAK,CAAC,CAAC,OAAC,CAAC,IAAI,EAAE,EAAE,OAAC,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,QAAQ,EAAE;IACrD,OAAO,EAAE,OAAC,CAAC,KAAK,CAAC,CAAC,OAAC,CAAC,IAAI,EAAE,EAAE,OAAC,CAAC,MAAM,EAAE,CAAC,CAAC,CAAC,QAAQ,EAAE;IACnD,QAAQ,EAAE,OAAC,CAAC,MAAM,EAAE,CAAC,QAAQ,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC;IAC7C,OAAO,EAAE,OAAC,CAAC,KAAK,CAAC,OAAC,CAAC,MAAM,EAAE,CAAC,CAAC,QAAQ,EAAE;IACvC,KAAK,EAAE,OAAC,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,MAAM,EAAE,QAAQ,EAAE,QAAQ,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,OAAO,CAAC,QAAQ,CAAC;IAC9E,WAAW,EAAE,OAAC,CAAC,OAAO,EAAE,CAAC,QAAQ,EAAE,CAAC,OAAO,CAAC,KAAK,CAAC;IAClD,KAAK,EAAE,OAAC,CAAC,MAAM,EAAE,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC;CACxD,CAAC,CAAC;AAaU,QAAA,kBAAkB,GAAG,8BAAsB,CAAC,MAAM,CAAC;IAC9D,UAAU,EAAE,OAAC,CAAC,KAAK,CAAC,OAAC,CAAC,MAAM,EAAE,CAAC,CAAC,QAAQ,EAAE;IAC1C,YAAY,EAAE,OAAC,CAAC,IAAI,CAAC,CAAC,SAAS,EAAE,SAAS,EAAE,QAAQ,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,OAAO,CAAC,SAAS,CAAC;IACpF,SAAS,EAAE,OAAC,CAAC,MAAM,CAAC;QAClB,KAAK,EAAE,OAAC,CAAC,KAAK,CAAC,CAAC,OAAC,CAAC,IAAI,EAAE,EAAE,OAAC,CAAC,MAAM,EAAE,CAAC,CAAC;QACtC,GAAG,EAAE,OAAC,CAAC,KAAK,CAAC,CAAC,OAAC,CAAC,IAAI,EAAE,EAAE,OAAC,CAAC,MAAM,EAAE,CAAC,CAAC;KACrC,CAAC,CAAC,QAAQ,EAAE;IACb,SAAS,EAAE,OAAC,CAAC,MAAM,EAAE,CAAC,QAAQ,EAAE;CACjC,CAAC,CAAC;AAeH,cAAc;AACd,MAAa,UAAW,SAAQ,KAAK;IACnC,YACE,OAAe,EACR,IAAY,EACZ,OAAiB;QAExB,KAAK,CAAC,OAAO,CAAC,CAAC;QAHR,SAAI,GAAJ,IAAI,CAAQ;QACZ,YAAO,GAAP,OAAO,CAAU;QAGxB,IAAI,CAAC,IAAI,GAAG,YAAY,CAAC;IAC3B,CAAC;CACF;AATD,gCASC;AAED,MAAa,eAAgB,SAAQ,UAAU;IAC7C,YAAY,OAAe,EAAE,OAAiB;QAC5C,KAAK,CAAC,OAAO,EAAE,kBAAkB,EAAE,OAAO,CAAC,CAAC;QAC5C,IAAI,CAAC,IAAI,GAAG,iBAAiB,CAAC;IAChC,CAAC;CACF;AALD,0CAKC;AAED,MAAa,QAAS,SAAQ,UAAU;IACtC,YAAY,OAAe,EAAE,OAAiB;QAC5C,KAAK,CAAC,OAAO,EAAE,WAAW,EAAE,OAAO,CAAC,CAAC;QACrC,IAAI,CAAC,IAAI,GAAG,UAAU,CAAC;IACzB,CAAC;CACF;AALD,4BAKC;AAED,MAAa,UAAW,SAAQ,UAAU;IACxC,YAAY,OAAe,EAAE,OAAiB;QAC5C,KAAK,CAAC,OAAO,EAAE,aAAa,EAAE,OAAO,CAAC,CAAC;QACvC,IAAI,CAAC,IAAI,GAAG,YAAY,CAAC;IAC3B,CAAC;CACF;AALD,gCAKC"}
|
||||
197
vendor/ruvector/npm/packages/agentic-synth/src/types.ts
vendored
Normal file
197
vendor/ruvector/npm/packages/agentic-synth/src/types.ts
vendored
Normal file
@@ -0,0 +1,197 @@
|
||||
/**
|
||||
* Core types and interfaces for agentic-synth
|
||||
*/
|
||||
|
||||
import { z } from 'zod';
|
||||
|
||||
// JSON types
|
||||
export type JsonPrimitive = string | number | boolean | null;
|
||||
export type JsonArray = JsonValue[];
|
||||
export type JsonObject = { [key: string]: JsonValue };
|
||||
export type JsonValue = JsonPrimitive | JsonArray | JsonObject;
|
||||
|
||||
// Schema types
|
||||
export interface SchemaField {
|
||||
type: string;
|
||||
required?: boolean;
|
||||
properties?: Record<string, SchemaField>;
|
||||
items?: SchemaField;
|
||||
enum?: unknown[];
|
||||
minimum?: number;
|
||||
maximum?: number;
|
||||
pattern?: string;
|
||||
}
|
||||
|
||||
export type DataSchema = Record<string, SchemaField>;
|
||||
export type DataConstraints = Record<string, unknown>;
|
||||
|
||||
// Configuration schemas
|
||||
export const ModelProviderSchema = z.enum(['gemini', 'openrouter']);
|
||||
export type ModelProvider = z.infer<typeof ModelProviderSchema>;
|
||||
|
||||
export const CacheStrategySchema = z.enum(['none', 'memory', 'disk']);
|
||||
export type CacheStrategy = z.infer<typeof CacheStrategySchema>;
|
||||
|
||||
export const DataTypeSchema = z.enum([
|
||||
'timeseries',
|
||||
'events',
|
||||
'structured',
|
||||
'text',
|
||||
'json',
|
||||
'csv'
|
||||
]);
|
||||
export type DataType = z.infer<typeof DataTypeSchema>;
|
||||
|
||||
// Configuration interface
|
||||
export interface SynthConfig {
|
||||
provider: ModelProvider;
|
||||
apiKey?: string;
|
||||
model?: string;
|
||||
cacheStrategy?: CacheStrategy;
|
||||
cacheTTL?: number;
|
||||
maxRetries?: number;
|
||||
timeout?: number;
|
||||
streaming?: boolean;
|
||||
automation?: boolean;
|
||||
vectorDB?: boolean;
|
||||
enableFallback?: boolean;
|
||||
fallbackChain?: ModelProvider[];
|
||||
}
|
||||
|
||||
export const SynthConfigSchema = z.object({
|
||||
provider: ModelProviderSchema,
|
||||
apiKey: z.string().optional(),
|
||||
model: z.string().optional(),
|
||||
cacheStrategy: CacheStrategySchema.optional().default('memory'),
|
||||
cacheTTL: z.number().optional().default(3600),
|
||||
maxRetries: z.number().optional().default(3),
|
||||
timeout: z.number().optional().default(30000),
|
||||
streaming: z.boolean().optional().default(false),
|
||||
automation: z.boolean().optional().default(false),
|
||||
vectorDB: z.boolean().optional().default(false),
|
||||
enableFallback: z.boolean().optional().default(true),
|
||||
fallbackChain: z.array(ModelProviderSchema).optional()
|
||||
});
|
||||
|
||||
// Generator options
|
||||
export interface GeneratorOptions {
|
||||
count?: number;
|
||||
schema?: DataSchema;
|
||||
format?: 'json' | 'csv' | 'array';
|
||||
seed?: string | number;
|
||||
constraints?: DataConstraints;
|
||||
}
|
||||
|
||||
export const GeneratorOptionsSchema = z.object({
|
||||
count: z.number().optional().default(1),
|
||||
schema: z.record(z.string(), z.unknown()).optional(),
|
||||
format: z.enum(['json', 'csv', 'array']).optional().default('json'),
|
||||
seed: z.union([z.string(), z.number()]).optional(),
|
||||
constraints: z.record(z.string(), z.unknown()).optional()
|
||||
});
|
||||
|
||||
// Time series specific options
|
||||
export interface TimeSeriesOptions extends GeneratorOptions {
|
||||
startDate?: Date | string;
|
||||
endDate?: Date | string;
|
||||
interval?: string; // e.g., '1h', '1d', '5m'
|
||||
metrics?: string[];
|
||||
trend?: 'up' | 'down' | 'stable' | 'random';
|
||||
seasonality?: boolean;
|
||||
noise?: number; // 0-1
|
||||
}
|
||||
|
||||
export const TimeSeriesOptionsSchema = GeneratorOptionsSchema.extend({
|
||||
startDate: z.union([z.date(), z.string()]).optional(),
|
||||
endDate: z.union([z.date(), z.string()]).optional(),
|
||||
interval: z.string().optional().default('1h'),
|
||||
metrics: z.array(z.string()).optional(),
|
||||
trend: z.enum(['up', 'down', 'stable', 'random']).optional().default('stable'),
|
||||
seasonality: z.boolean().optional().default(false),
|
||||
noise: z.number().min(0).max(1).optional().default(0.1)
|
||||
});
|
||||
|
||||
// Event specific options
|
||||
export interface EventOptions extends GeneratorOptions {
|
||||
eventTypes?: string[];
|
||||
distribution?: 'uniform' | 'poisson' | 'normal';
|
||||
timeRange?: {
|
||||
start: Date | string;
|
||||
end: Date | string;
|
||||
};
|
||||
userCount?: number;
|
||||
}
|
||||
|
||||
export const EventOptionsSchema = GeneratorOptionsSchema.extend({
|
||||
eventTypes: z.array(z.string()).optional(),
|
||||
distribution: z.enum(['uniform', 'poisson', 'normal']).optional().default('uniform'),
|
||||
timeRange: z.object({
|
||||
start: z.union([z.date(), z.string()]),
|
||||
end: z.union([z.date(), z.string()])
|
||||
}).optional(),
|
||||
userCount: z.number().optional()
|
||||
});
|
||||
|
||||
// Generation result
|
||||
export interface GenerationResult<T = JsonValue> {
|
||||
data: T[];
|
||||
metadata: {
|
||||
count: number;
|
||||
generatedAt: Date;
|
||||
provider: ModelProvider;
|
||||
model: string;
|
||||
cached: boolean;
|
||||
duration: number;
|
||||
};
|
||||
}
|
||||
|
||||
// Error types
|
||||
export class SynthError extends Error {
|
||||
constructor(
|
||||
message: string,
|
||||
public code: string,
|
||||
public details?: unknown
|
||||
) {
|
||||
super(message);
|
||||
this.name = 'SynthError';
|
||||
}
|
||||
}
|
||||
|
||||
export class ValidationError extends SynthError {
|
||||
constructor(message: string, details?: unknown) {
|
||||
super(message, 'VALIDATION_ERROR', details);
|
||||
this.name = 'ValidationError';
|
||||
}
|
||||
}
|
||||
|
||||
export class APIError extends SynthError {
|
||||
constructor(message: string, details?: unknown) {
|
||||
super(message, 'API_ERROR', details);
|
||||
this.name = 'APIError';
|
||||
}
|
||||
}
|
||||
|
||||
export class CacheError extends SynthError {
|
||||
constructor(message: string, details?: unknown) {
|
||||
super(message, 'CACHE_ERROR', details);
|
||||
this.name = 'CacheError';
|
||||
}
|
||||
}
|
||||
|
||||
// Model routing
|
||||
export interface ModelRoute {
|
||||
provider: ModelProvider;
|
||||
model: string;
|
||||
priority: number;
|
||||
capabilities: string[];
|
||||
}
|
||||
|
||||
// Streaming types
|
||||
export interface StreamChunk<T = JsonValue> {
|
||||
type: 'data' | 'metadata' | 'error' | 'complete';
|
||||
data?: T;
|
||||
metadata?: Record<string, unknown>;
|
||||
error?: Error;
|
||||
}
|
||||
|
||||
export type StreamCallback<T = JsonValue> = (chunk: StreamChunk<T>) => void | Promise<void>;
|
||||
1
vendor/ruvector/npm/packages/agentic-synth/src/types/index.d.ts.map
vendored
Normal file
1
vendor/ruvector/npm/packages/agentic-synth/src/types/index.d.ts.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["index.ts"],"names":[],"mappings":"AAAA;;GAEG;AAEH,MAAM,WAAW,gBAAgB;IAC/B,IAAI,EAAE,MAAM,CAAC;IACb,UAAU,EAAE,MAAM,CAAC;IACnB,SAAS,EAAE,MAAM,CAAC;IAClB,MAAM,EAAE,OAAO,CAAC;IAChB,SAAS,EAAE,MAAM,CAAC;IAClB,SAAS,EAAE,MAAM,CAAC;CACnB;AAED,MAAM,WAAW,qBAAqB;IACpC,OAAO,EAAE,gBAAgB,EAAE,CAAC;IAC5B,WAAW,EAAE,MAAM,CAAC;IACpB,YAAY,EAAE,MAAM,CAAC;IACrB,YAAY,EAAE,MAAM,CAAC;IACrB,eAAe,EAAE,MAAM,CAAC;CACzB;AAED,MAAM,WAAW,eAAe;IAC9B,MAAM,EAAE,MAAM,EAAE,CAAC;IACjB,WAAW,EAAE,MAAM,CAAC;IACpB,WAAW,EAAE,MAAM,CAAC;IACpB,gBAAgB,EAAE,MAAM,CAAC;IACzB,mBAAmB,EAAE,MAAM,CAAC;CAC7B;AAED,MAAM,WAAW,YAAY;IAC3B,GAAG,EAAE,OAAO,CAAC;IACb,GAAG,EAAE,MAAM,CAAC;IACZ,IAAI,CAAC,EAAE,MAAM,CAAC;IACd,GAAG,CAAC,EAAE,MAAM,CAAC;CACd;AAED,MAAM,WAAW,kBAAkB;IACjC,UAAU,EAAE,MAAM,CAAC;IACnB,YAAY,EAAE,MAAM,CAAC;IACrB,YAAY,EAAE,MAAM,CAAC;IACrB,YAAY,EAAE,MAAM,CAAC;IACrB,YAAY,EAAE,MAAM,CAAC;IACrB,YAAY,EAAE,MAAM,CAAC;IACrB,aAAa,EAAE,MAAM,CAAC;IACtB,eAAe,EAAE,MAAM,CAAC;IACxB,kBAAkB,EAAE,MAAM,CAAC;IAC3B,SAAS,EAAE,MAAM,CAAC;CACnB;AAED,MAAM,WAAW,0BAA0B;IACzC,QAAQ,EAAE,OAAO,GAAG,SAAS,GAAG,QAAQ,GAAG,aAAa,GAAG,aAAa,CAAC;IACzE,QAAQ,EAAE,KAAK,GAAG,QAAQ,GAAG,MAAM,GAAG,UAAU,CAAC;IACjD,KAAK,EAAE,MAAM,CAAC;IACd,cAAc,EAAE,MAAM,CAAC;IACvB,oBAAoB,EAAE,MAAM,CAAC;IAC7B,oBAAoB,EAAE,KAAK,GAAG,QAAQ,GAAG,MAAM,CAAC;CACjD;AAED,MAAM,WAAW,eAAe;IAC9B,IAAI,EAAE,MAAM,CAAC;IACb,UAAU,EAAE,MAAM,CAAC;IACnB,WAAW,EAAE,MAAM,CAAC;IACpB,gBAAgB,EAAE,MAAM,CAAC;IACzB,OAAO,EAAE,MAAM,CAAC;IAChB,UAAU,CAAC,EAAE,MAAM,CAAC;CACrB;AAED,MAAM,WAAW,eAAe;IAC9B,MAAM,EAAE,eAAe,CAAC;IACxB,OAAO,EAAE,kBAAkB,CAAC;IAC5B,eAAe,EAAE,0BAA0B,EAAE,CAAC;IAC9C,SAAS,EAAE,MAAM,CAAC;IAClB,QAAQ,EAAE,MAAM,CAAC;IACjB,OAAO,EAAE,OAAO,CAAC;CAClB"}
|
||||
1
vendor/ruvector/npm/packages/agentic-synth/src/types/index.js.map
vendored
Normal file
1
vendor/ruvector/npm/packages/agentic-synth/src/types/index.js.map
vendored
Normal file
@@ -0,0 +1 @@
|
||||
{"version":3,"file":"index.js","sourceRoot":"","sources":["index.ts"],"names":[],"mappings":";AAAA;;GAEG"}
|
||||
75
vendor/ruvector/npm/packages/agentic-synth/src/types/index.ts
vendored
Normal file
75
vendor/ruvector/npm/packages/agentic-synth/src/types/index.ts
vendored
Normal file
@@ -0,0 +1,75 @@
|
||||
/**
|
||||
* Core type definitions for agentic-synth
|
||||
*/
|
||||
|
||||
export interface GenerationResult {
|
||||
data: string;
|
||||
tokensUsed: number;
|
||||
latencyMs: number;
|
||||
cached: boolean;
|
||||
modelUsed: string;
|
||||
timestamp: number;
|
||||
}
|
||||
|
||||
export interface BatchGenerationResult {
|
||||
results: GenerationResult[];
|
||||
totalTokens: number;
|
||||
avgLatencyMs: number;
|
||||
cacheHitRate: number;
|
||||
totalDurationMs: number;
|
||||
}
|
||||
|
||||
export interface StreamingResult {
|
||||
chunks: string[];
|
||||
totalChunks: number;
|
||||
totalTokens: number;
|
||||
streamDurationMs: number;
|
||||
firstChunkLatencyMs: number;
|
||||
}
|
||||
|
||||
export interface CachedResult {
|
||||
hit: boolean;
|
||||
key: string;
|
||||
data?: string;
|
||||
ttl?: number;
|
||||
}
|
||||
|
||||
export interface PerformanceMetrics {
|
||||
throughput: number; // requests per second
|
||||
p50LatencyMs: number;
|
||||
p95LatencyMs: number;
|
||||
p99LatencyMs: number;
|
||||
avgLatencyMs: number;
|
||||
cacheHitRate: number;
|
||||
memoryUsageMB: number;
|
||||
cpuUsagePercent: number;
|
||||
concurrentRequests: number;
|
||||
errorRate: number;
|
||||
}
|
||||
|
||||
export interface OptimizationRecommendation {
|
||||
category: 'cache' | 'routing' | 'memory' | 'concurrency' | 'compilation';
|
||||
severity: 'low' | 'medium' | 'high' | 'critical';
|
||||
issue: string;
|
||||
recommendation: string;
|
||||
estimatedImprovement: string;
|
||||
implementationEffort: 'low' | 'medium' | 'high';
|
||||
}
|
||||
|
||||
export interface BenchmarkConfig {
|
||||
name: string;
|
||||
iterations: number;
|
||||
concurrency: number;
|
||||
warmupIterations: number;
|
||||
timeout: number;
|
||||
outputPath?: string;
|
||||
}
|
||||
|
||||
export interface BenchmarkResult {
|
||||
config: BenchmarkConfig;
|
||||
metrics: PerformanceMetrics;
|
||||
recommendations: OptimizationRecommendation[];
|
||||
timestamp: number;
|
||||
duration: number;
|
||||
success: boolean;
|
||||
}
|
||||
Reference in New Issue
Block a user