Merge commit 'd803bfe2b1fe7f5e219e50ac20d6801a0a58ac75' as 'vendor/ruvector'
This commit is contained in:
506
vendor/ruvector/npm/packages/ruvbot/scripts/build-rvf.js
vendored
Normal file
506
vendor/ruvector/npm/packages/ruvbot/scripts/build-rvf.js
vendored
Normal file
@@ -0,0 +1,506 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* build-rvf.js — Assemble RuvBot as a self-contained .rvf file.
|
||||
*
|
||||
* The output contains:
|
||||
* KERNEL_SEG (0x0E) — Real Linux 6.6 microkernel (bzImage, x86_64)
|
||||
* WASM_SEG (0x10) — RuvBot runtime bundle (Node.js application)
|
||||
* META_SEG (0x07) — Package metadata (name, version, config)
|
||||
* PROFILE_SEG (0x0B) — AI assistant domain profile
|
||||
* WITNESS_SEG (0x0A) — Build provenance chain
|
||||
* MANIFEST_SEG(0x05) — Segment directory + epoch
|
||||
*
|
||||
* Usage:
|
||||
* node scripts/build-rvf.js --kernel /path/to/bzImage [--output ruvbot.rvf]
|
||||
*
|
||||
* The --kernel flag provides a real Linux bzImage to embed. If omitted,
|
||||
* the script looks for kernel/bzImage relative to the package root.
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const { writeFileSync, readFileSync, existsSync, readdirSync, statSync, mkdirSync } = require('fs');
|
||||
const { join, resolve, isAbsolute } = require('path');
|
||||
const { createHash } = require('crypto');
|
||||
const { execSync } = require('child_process');
|
||||
const { gzipSync } = require('zlib');
|
||||
|
||||
// ─── RVF format constants ───────────────────────────────────────────────────
|
||||
const SEGMENT_MAGIC = 0x5256_4653; // "RVFS" big-endian
|
||||
const SEGMENT_VERSION = 1;
|
||||
const KERNEL_MAGIC = 0x5256_4B4E; // "RVKN" big-endian
|
||||
const WASM_MAGIC = 0x5256_574D; // "RVWM" big-endian
|
||||
|
||||
// Segment type discriminators
|
||||
const SEG_MANIFEST = 0x05;
|
||||
const SEG_META = 0x07;
|
||||
const SEG_WITNESS = 0x0A;
|
||||
const SEG_PROFILE = 0x0B;
|
||||
const SEG_KERNEL = 0x0E;
|
||||
const SEG_WASM = 0x10;
|
||||
|
||||
// Kernel constants
|
||||
const KERNEL_ARCH_X86_64 = 0x00;
|
||||
const KERNEL_TYPE_MICROLINUX = 0x01;
|
||||
const KERNEL_FLAG_HAS_NETWORKING = 1 << 3;
|
||||
const KERNEL_FLAG_HAS_QUERY_API = 1 << 4;
|
||||
const KERNEL_FLAG_HAS_ADMIN_API = 1 << 6;
|
||||
const KERNEL_FLAG_RELOCATABLE = 1 << 11;
|
||||
const KERNEL_FLAG_HAS_VIRTIO_NET = 1 << 12;
|
||||
const KERNEL_FLAG_HAS_VIRTIO_BLK = 1 << 13;
|
||||
const KERNEL_FLAG_HAS_VSOCK = 1 << 14;
|
||||
const KERNEL_FLAG_COMPRESSED = 1 << 10;
|
||||
|
||||
// WASM constants
|
||||
const WASM_ROLE_COMBINED = 0x02;
|
||||
const WASM_TARGET_NODE = 0x01;
|
||||
|
||||
// ─── Binary helpers ─────────────────────────────────────────────────────────
|
||||
|
||||
function writeU8(buf, offset, val) {
|
||||
buf[offset] = val & 0xFF;
|
||||
return offset + 1;
|
||||
}
|
||||
|
||||
function writeU16LE(buf, offset, val) {
|
||||
buf.writeUInt16LE(val, offset);
|
||||
return offset + 2;
|
||||
}
|
||||
|
||||
function writeU32LE(buf, offset, val) {
|
||||
buf.writeUInt32LE(val >>> 0, offset);
|
||||
return offset + 4;
|
||||
}
|
||||
|
||||
function writeU64LE(buf, offset, val) {
|
||||
const big = BigInt(Math.floor(val));
|
||||
buf.writeBigUInt64LE(big, offset);
|
||||
return offset + 8;
|
||||
}
|
||||
|
||||
function contentHash16(payload) {
|
||||
return createHash('sha256').update(payload).digest().subarray(0, 16);
|
||||
}
|
||||
|
||||
// ─── Segment header writer (64 bytes) ───────────────────────────────────────
|
||||
|
||||
function makeSegmentHeader(segType, segId, payloadLength, payload) {
|
||||
const buf = Buffer.alloc(64);
|
||||
writeU32LE(buf, 0x00, SEGMENT_MAGIC);
|
||||
writeU8(buf, 0x04, SEGMENT_VERSION);
|
||||
writeU8(buf, 0x05, segType);
|
||||
writeU16LE(buf, 0x06, 0); // flags
|
||||
writeU64LE(buf, 0x08, segId);
|
||||
writeU64LE(buf, 0x10, payloadLength);
|
||||
writeU64LE(buf, 0x18, Date.now() * 1e6); // timestamp_ns
|
||||
writeU8(buf, 0x20, 0); // checksum_algo (CRC32C)
|
||||
writeU8(buf, 0x21, 0); // compression
|
||||
writeU16LE(buf, 0x22, 0); // reserved_0
|
||||
writeU32LE(buf, 0x24, 0); // reserved_1
|
||||
contentHash16(payload).copy(buf, 0x28, 0, 16); // content_hash
|
||||
writeU32LE(buf, 0x38, 0); // uncompressed_len
|
||||
writeU32LE(buf, 0x3C, 0); // alignment_pad
|
||||
return buf;
|
||||
}
|
||||
|
||||
// ─── Kernel header (128 bytes) ──────────────────────────────────────────────
|
||||
|
||||
function makeKernelHeader(imageSize, compressedSize, cmdlineLen, isCompressed) {
|
||||
const buf = Buffer.alloc(128);
|
||||
writeU32LE(buf, 0x00, KERNEL_MAGIC);
|
||||
writeU16LE(buf, 0x04, 1); // header_version
|
||||
writeU8(buf, 0x06, KERNEL_ARCH_X86_64);
|
||||
writeU8(buf, 0x07, KERNEL_TYPE_MICROLINUX);
|
||||
const flags = KERNEL_FLAG_HAS_NETWORKING
|
||||
| KERNEL_FLAG_HAS_QUERY_API
|
||||
| KERNEL_FLAG_HAS_ADMIN_API
|
||||
| KERNEL_FLAG_RELOCATABLE
|
||||
| KERNEL_FLAG_HAS_VIRTIO_NET
|
||||
| KERNEL_FLAG_HAS_VIRTIO_BLK
|
||||
| (isCompressed ? KERNEL_FLAG_COMPRESSED : 0);
|
||||
writeU32LE(buf, 0x08, flags);
|
||||
writeU32LE(buf, 0x0C, 64); // min_memory_mb
|
||||
writeU64LE(buf, 0x10, 0x1000000); // entry_point (16 MB default load)
|
||||
writeU64LE(buf, 0x18, imageSize); // image_size (uncompressed)
|
||||
writeU64LE(buf, 0x20, compressedSize); // compressed_size
|
||||
writeU8(buf, 0x28, isCompressed ? 1 : 0); // compression (0=none, 1=gzip)
|
||||
writeU8(buf, 0x29, 0x00); // api_transport (TcpHttp)
|
||||
writeU16LE(buf, 0x2A, 3000); // api_port
|
||||
writeU16LE(buf, 0x2C, 1); // api_version
|
||||
// 0x2E: 2 bytes padding
|
||||
// 0x30: image_hash (32 bytes) — filled by caller
|
||||
// 0x50: build_id (16 bytes)
|
||||
writeU64LE(buf, 0x60, Math.floor(Date.now() / 1000)); // build_timestamp
|
||||
writeU8(buf, 0x68, 1); // vcpu_count
|
||||
// 0x69: reserved_0
|
||||
// 0x6A: 2 bytes padding
|
||||
writeU32LE(buf, 0x6C, 128); // cmdline_offset
|
||||
writeU32LE(buf, 0x70, cmdlineLen); // cmdline_length
|
||||
return buf;
|
||||
}
|
||||
|
||||
// ─── WASM header (64 bytes) ─────────────────────────────────────────────────
|
||||
|
||||
function makeWasmHeader(bytecodeSize) {
|
||||
const buf = Buffer.alloc(64);
|
||||
writeU32LE(buf, 0x00, WASM_MAGIC);
|
||||
writeU16LE(buf, 0x04, 1); // header_version
|
||||
writeU8(buf, 0x06, WASM_ROLE_COMBINED); // role
|
||||
writeU8(buf, 0x07, WASM_TARGET_NODE); // target
|
||||
writeU16LE(buf, 0x08, 0); // required_features
|
||||
writeU16LE(buf, 0x0A, 12); // export_count
|
||||
writeU32LE(buf, 0x0C, bytecodeSize); // bytecode_size
|
||||
writeU32LE(buf, 0x10, 0); // compressed_size
|
||||
writeU8(buf, 0x14, 0); // compression
|
||||
writeU8(buf, 0x15, 2); // min_memory_pages
|
||||
writeU16LE(buf, 0x16, 0); // max_memory_pages
|
||||
writeU16LE(buf, 0x18, 0); // table_count
|
||||
// 0x1A: 2 bytes padding
|
||||
// 0x1C: bytecode_hash (32 bytes) — filled by caller
|
||||
writeU8(buf, 0x3C, 0); // bootstrap_priority
|
||||
writeU8(buf, 0x3D, 0); // interpreter_type
|
||||
return buf;
|
||||
}
|
||||
|
||||
// ─── Load real kernel image ─────────────────────────────────────────────────
|
||||
|
||||
function loadKernelImage(kernelPath) {
|
||||
if (!existsSync(kernelPath)) {
|
||||
console.error(`ERROR: Kernel image not found: ${kernelPath}`);
|
||||
console.error('Build one with: cd /tmp/linux-6.6.80 && make tinyconfig && make -j$(nproc) bzImage');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const raw = readFileSync(kernelPath);
|
||||
const stat = statSync(kernelPath);
|
||||
console.log(` Loaded: ${kernelPath} (${(raw.length / 1024).toFixed(0)} KB)`);
|
||||
|
||||
// Verify it looks like a real kernel (ELF or bzImage magic)
|
||||
const magic = raw.readUInt16LE(0);
|
||||
const elfMagic = raw.subarray(0, 4);
|
||||
if (elfMagic[0] === 0x7F && elfMagic[1] === 0x45 && elfMagic[2] === 0x4C && elfMagic[3] === 0x46) {
|
||||
console.log(' Format: ELF vmlinux');
|
||||
} else if (raw.length > 0x202 && raw.readUInt16LE(0x1FE) === 0xAA55) {
|
||||
console.log(' Format: bzImage (bootable)');
|
||||
} else {
|
||||
console.log(' Format: raw kernel image');
|
||||
}
|
||||
|
||||
// Gzip compress for smaller RVF
|
||||
const compressed = gzipSync(raw, { level: 9 });
|
||||
const ratio = ((1 - compressed.length / raw.length) * 100).toFixed(1);
|
||||
console.log(` Compressed: ${(compressed.length / 1024).toFixed(0)} KB (${ratio}% reduction)`);
|
||||
|
||||
return { raw, compressed };
|
||||
}
|
||||
|
||||
// ─── Build the runtime bundle ───────────────────────────────────────────────
|
||||
|
||||
function buildRuntimeBundle(pkgDir) {
|
||||
const distDir = join(pkgDir, 'dist');
|
||||
const binDir = join(pkgDir, 'bin');
|
||||
const files = [];
|
||||
|
||||
if (existsSync(distDir)) collectFiles(distDir, '', files);
|
||||
if (existsSync(binDir)) collectFiles(binDir, 'bin/', files);
|
||||
|
||||
const pkgJsonPath = join(pkgDir, 'package.json');
|
||||
if (existsSync(pkgJsonPath)) {
|
||||
files.push({ path: 'package.json', data: readFileSync(pkgJsonPath) });
|
||||
}
|
||||
|
||||
// Bundle format: [file_count(u32)] [file_table] [file_data]
|
||||
const fileCount = Buffer.alloc(4);
|
||||
fileCount.writeUInt32LE(files.length, 0);
|
||||
|
||||
let tableSize = 0;
|
||||
for (const f of files) {
|
||||
tableSize += 2 + 8 + 8 + Buffer.byteLength(f.path, 'utf8');
|
||||
}
|
||||
|
||||
let dataOffset = 4 + tableSize;
|
||||
const tableEntries = [];
|
||||
for (const f of files) {
|
||||
const pathBuf = Buffer.from(f.path, 'utf8');
|
||||
const entry = Buffer.alloc(2 + 8 + 8 + pathBuf.length);
|
||||
let o = writeU16LE(entry, 0, pathBuf.length);
|
||||
o = writeU64LE(entry, o, dataOffset);
|
||||
o = writeU64LE(entry, o, f.data.length);
|
||||
pathBuf.copy(entry, o);
|
||||
tableEntries.push(entry);
|
||||
dataOffset += f.data.length;
|
||||
}
|
||||
|
||||
return Buffer.concat([fileCount, ...tableEntries, ...files.map(f => f.data)]);
|
||||
}
|
||||
|
||||
function collectFiles(dir, prefix, files) {
|
||||
for (const name of readdirSync(dir)) {
|
||||
const full = join(dir, name);
|
||||
const rel = prefix + name;
|
||||
const stat = statSync(full);
|
||||
if (stat.isDirectory()) collectFiles(full, rel + '/', files);
|
||||
else if (stat.isFile()) files.push({ path: rel, data: readFileSync(full) });
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Build META_SEG ─────────────────────────────────────────────────────────
|
||||
|
||||
function buildMetaPayload(pkgDir, kernelInfo) {
|
||||
const pkgJson = JSON.parse(readFileSync(join(pkgDir, 'package.json'), 'utf8'));
|
||||
return Buffer.from(JSON.stringify({
|
||||
name: pkgJson.name,
|
||||
version: pkgJson.version,
|
||||
description: pkgJson.description,
|
||||
format: 'rvf-self-contained',
|
||||
runtime: 'node',
|
||||
runtime_version: '>=18.0.0',
|
||||
arch: 'x86_64',
|
||||
kernel: {
|
||||
type: 'microlinux',
|
||||
version: '6.6.80',
|
||||
config: 'tinyconfig+virtio+net',
|
||||
image_size: kernelInfo.rawSize,
|
||||
compressed_size: kernelInfo.compressedSize,
|
||||
},
|
||||
build_time: new Date().toISOString(),
|
||||
builder: 'ruvbot/build-rvf.js',
|
||||
capabilities: [
|
||||
'self-booting',
|
||||
'api-server',
|
||||
'chat',
|
||||
'vector-search',
|
||||
'self-learning',
|
||||
'multi-llm',
|
||||
'security-scanning',
|
||||
],
|
||||
dependencies: Object.keys(pkgJson.dependencies || {}),
|
||||
entrypoint: 'bin/ruvbot.js',
|
||||
api_port: 3000,
|
||||
firecracker_compatible: true,
|
||||
}), 'utf8');
|
||||
}
|
||||
|
||||
// ─── Build PROFILE_SEG ──────────────────────────────────────────────────────
|
||||
|
||||
function buildProfilePayload() {
|
||||
return Buffer.from(JSON.stringify({
|
||||
profile_id: 0x42,
|
||||
domain: 'ai-assistant',
|
||||
name: 'RuvBot',
|
||||
version: '0.2.0',
|
||||
capabilities: {
|
||||
chat: true,
|
||||
vector_search: true,
|
||||
embeddings: true,
|
||||
self_learning: true,
|
||||
multi_model: true,
|
||||
security: true,
|
||||
self_booting: true,
|
||||
},
|
||||
models: [
|
||||
'claude-sonnet-4-20250514',
|
||||
'gemini-2.0-flash',
|
||||
'gpt-4o',
|
||||
'openrouter/*',
|
||||
],
|
||||
boot_config: {
|
||||
vcpus: 1,
|
||||
memory_mb: 64,
|
||||
api_port: 3000,
|
||||
cmdline: 'console=ttyS0 ruvbot.mode=rvf',
|
||||
},
|
||||
}), 'utf8');
|
||||
}
|
||||
|
||||
// ─── Build WITNESS_SEG ──────────────────────────────────────────────────────
|
||||
|
||||
function buildWitnessPayload(kernelHash, runtimeHash) {
|
||||
return Buffer.from(JSON.stringify({
|
||||
witness_type: 'build_provenance',
|
||||
timestamp: new Date().toISOString(),
|
||||
builder: {
|
||||
tool: 'build-rvf.js',
|
||||
node_version: process.version,
|
||||
platform: process.platform,
|
||||
arch: process.arch,
|
||||
},
|
||||
artifacts: {
|
||||
kernel: { hash_sha256: kernelHash, type: 'linux-6.6-bzimage' },
|
||||
runtime: { hash_sha256: runtimeHash, type: 'nodejs-bundle' },
|
||||
},
|
||||
chain: [],
|
||||
}), 'utf8');
|
||||
}
|
||||
|
||||
// ─── Assemble the RVF ───────────────────────────────────────────────────────
|
||||
|
||||
function assembleRvf(pkgDir, outputPath, kernelPath) {
|
||||
console.log('Building self-contained RuvBot RVF...');
|
||||
console.log(` Package: ${pkgDir}`);
|
||||
console.log(` Kernel: ${kernelPath}`);
|
||||
console.log(` Output: ${outputPath}\n`);
|
||||
|
||||
let segId = 1;
|
||||
const segments = [];
|
||||
const segDir = [];
|
||||
|
||||
// 1. KERNEL_SEG — Real Linux microkernel
|
||||
console.log(' [1/6] Embedding Linux 6.6 microkernel...');
|
||||
const { raw: kernelRaw, compressed: kernelCompressed } = loadKernelImage(kernelPath);
|
||||
const cmdline = Buffer.from('console=ttyS0 ruvbot.api_port=3000 ruvbot.mode=rvf quiet', 'utf8');
|
||||
const kernelHdr = makeKernelHeader(
|
||||
kernelRaw.length,
|
||||
kernelCompressed.length,
|
||||
cmdline.length,
|
||||
true // compressed
|
||||
);
|
||||
const imgHash = createHash('sha256').update(kernelRaw).digest();
|
||||
imgHash.copy(kernelHdr, 0x30, 0, 32);
|
||||
// Build ID from first 16 bytes of hash
|
||||
imgHash.copy(kernelHdr, 0x50, 0, 16);
|
||||
const kernelPayload = Buffer.concat([kernelHdr, kernelCompressed, cmdline]);
|
||||
const kSegId = segId++;
|
||||
segments.push({ segType: SEG_KERNEL, segId: kSegId, payload: kernelPayload });
|
||||
|
||||
// 2. WASM_SEG — RuvBot runtime bundle
|
||||
console.log(' [2/6] Bundling RuvBot runtime...');
|
||||
const runtimeBundle = buildRuntimeBundle(pkgDir);
|
||||
const wasmHdr = makeWasmHeader(runtimeBundle.length);
|
||||
const runtimeHash = createHash('sha256').update(runtimeBundle).digest();
|
||||
runtimeHash.copy(wasmHdr, 0x1C, 0, 32);
|
||||
const wasmPayload = Buffer.concat([wasmHdr, runtimeBundle]);
|
||||
const wSegId = segId++;
|
||||
segments.push({ segType: SEG_WASM, segId: wSegId, payload: wasmPayload });
|
||||
console.log(` Runtime: ${runtimeBundle.length} bytes (${(runtimeBundle.length / 1024).toFixed(0)} KB)`);
|
||||
|
||||
// 3. META_SEG — Package metadata
|
||||
console.log(' [3/6] Writing package metadata...');
|
||||
const metaPayload = buildMetaPayload(pkgDir, {
|
||||
rawSize: kernelRaw.length,
|
||||
compressedSize: kernelCompressed.length,
|
||||
});
|
||||
const mSegId = segId++;
|
||||
segments.push({ segType: SEG_META, segId: mSegId, payload: metaPayload });
|
||||
|
||||
// 4. PROFILE_SEG — Domain profile
|
||||
console.log(' [4/6] Writing domain profile...');
|
||||
const profilePayload = buildProfilePayload();
|
||||
const pSegId = segId++;
|
||||
segments.push({ segType: SEG_PROFILE, segId: pSegId, payload: profilePayload });
|
||||
|
||||
// 5. WITNESS_SEG — Build provenance
|
||||
console.log(' [5/6] Writing build provenance...');
|
||||
const witnessPayload = buildWitnessPayload(
|
||||
imgHash.toString('hex'),
|
||||
runtimeHash.toString('hex'),
|
||||
);
|
||||
const witnSegId = segId++;
|
||||
segments.push({ segType: SEG_WITNESS, segId: witnSegId, payload: witnessPayload });
|
||||
|
||||
// 6. MANIFEST_SEG — Segment directory
|
||||
console.log(' [6/6] Writing manifest...');
|
||||
let currentOffset = 0;
|
||||
for (const seg of segments) {
|
||||
segDir.push({
|
||||
segId: seg.segId,
|
||||
offset: currentOffset,
|
||||
payloadLen: seg.payload.length,
|
||||
segType: seg.segType,
|
||||
});
|
||||
currentOffset += 64 + seg.payload.length;
|
||||
}
|
||||
|
||||
const dirEntrySize = 8 + 8 + 8 + 1;
|
||||
const manifestSize = 4 + 2 + 8 + 4 + 1 + 3 + (segDir.length * dirEntrySize) + 4;
|
||||
const manifestPayload = Buffer.alloc(manifestSize);
|
||||
let mo = 0;
|
||||
mo = writeU32LE(manifestPayload, mo, 1); // epoch
|
||||
mo = writeU16LE(manifestPayload, mo, 0); // dimension
|
||||
mo = writeU64LE(manifestPayload, mo, 0); // total_vectors
|
||||
mo = writeU32LE(manifestPayload, mo, segDir.length); // seg_count
|
||||
mo = writeU8(manifestPayload, mo, 0x42); // profile_id
|
||||
mo += 3; // reserved
|
||||
|
||||
for (const entry of segDir) {
|
||||
mo = writeU64LE(manifestPayload, mo, entry.segId);
|
||||
mo = writeU64LE(manifestPayload, mo, entry.offset);
|
||||
mo = writeU64LE(manifestPayload, mo, entry.payloadLen);
|
||||
mo = writeU8(manifestPayload, mo, entry.segType);
|
||||
}
|
||||
mo = writeU32LE(manifestPayload, mo, 0); // del_count
|
||||
|
||||
const manSegId = segId++;
|
||||
segments.push({ segType: SEG_MANIFEST, segId: manSegId, payload: manifestPayload });
|
||||
|
||||
// Write all segments
|
||||
const allBuffers = [];
|
||||
for (const seg of segments) {
|
||||
allBuffers.push(makeSegmentHeader(seg.segType, seg.segId, seg.payload.length, seg.payload));
|
||||
allBuffers.push(seg.payload);
|
||||
}
|
||||
|
||||
const rvfData = Buffer.concat(allBuffers);
|
||||
mkdirSync(join(pkgDir, 'kernel'), { recursive: true });
|
||||
writeFileSync(outputPath, rvfData);
|
||||
|
||||
// Summary
|
||||
const mb = (rvfData.length / (1024 * 1024)).toFixed(2);
|
||||
console.log(`\n RVF assembled: ${outputPath}`);
|
||||
console.log(` Total size: ${mb} MB`);
|
||||
console.log(` Segments: ${segments.length}`);
|
||||
console.log(` KERNEL_SEG : ${(kernelPayload.length / 1024).toFixed(0)} KB (Linux 6.6.80 bzImage, gzip)`);
|
||||
console.log(` WASM_SEG : ${(wasmPayload.length / 1024).toFixed(0)} KB (Node.js runtime bundle)`);
|
||||
console.log(` META_SEG : ${metaPayload.length} bytes`);
|
||||
console.log(` PROFILE_SEG : ${profilePayload.length} bytes`);
|
||||
console.log(` WITNESS_SEG : ${witnessPayload.length} bytes`);
|
||||
console.log(` MANIFEST_SEG: ${manifestPayload.length} bytes`);
|
||||
console.log(`\n Kernel SHA-256: ${imgHash.toString('hex')}`);
|
||||
console.log(` Self-contained: boot with Firecracker, QEMU, or Cloud Hypervisor`);
|
||||
}
|
||||
|
||||
// ─── CLI entry ──────────────────────────────────────────────────────────────
|
||||
|
||||
const args = process.argv.slice(2);
|
||||
let outputPath = 'ruvbot.rvf';
|
||||
let kernelPath = '';
|
||||
|
||||
for (let i = 0; i < args.length; i++) {
|
||||
if (args[i] === '--output' && args[i + 1]) { outputPath = args[++i]; }
|
||||
else if (args[i] === '--kernel' && args[i + 1]) { kernelPath = args[++i]; }
|
||||
}
|
||||
|
||||
const pkgDir = resolve(__dirname, '..');
|
||||
|
||||
// Find kernel: CLI arg > kernel/bzImage > RUVBOT_KERNEL env
|
||||
if (!kernelPath) {
|
||||
const candidates = [
|
||||
join(pkgDir, 'kernel', 'bzImage'),
|
||||
join(pkgDir, 'kernel', 'vmlinux'),
|
||||
'/tmp/linux-6.6.80/arch/x86/boot/bzImage',
|
||||
];
|
||||
for (const c of candidates) {
|
||||
if (existsSync(c)) { kernelPath = c; break; }
|
||||
}
|
||||
}
|
||||
if (!kernelPath && process.env.RUVBOT_KERNEL) {
|
||||
kernelPath = process.env.RUVBOT_KERNEL;
|
||||
}
|
||||
if (!kernelPath) {
|
||||
console.error('ERROR: No kernel image found.');
|
||||
console.error('Provide one with --kernel /path/to/bzImage or place at kernel/bzImage');
|
||||
console.error('\nTo build a minimal kernel:');
|
||||
console.error(' wget https://cdn.kernel.org/pub/linux/kernel/v6.x/linux-6.6.80.tar.xz');
|
||||
console.error(' tar xf linux-6.6.80.tar.xz && cd linux-6.6.80');
|
||||
console.error(' make tinyconfig');
|
||||
console.error(' scripts/config --enable 64BIT --enable VIRTIO --enable VIRTIO_NET \\');
|
||||
console.error(' --enable NET --enable INET --enable SERIAL_8250_CONSOLE --enable TTY');
|
||||
console.error(' make olddefconfig && make -j$(nproc) bzImage');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
if (!isAbsolute(outputPath)) {
|
||||
outputPath = join(pkgDir, outputPath);
|
||||
}
|
||||
|
||||
assembleRvf(pkgDir, outputPath, kernelPath);
|
||||
738
vendor/ruvector/npm/packages/ruvbot/scripts/install.sh
vendored
Executable file
738
vendor/ruvector/npm/packages/ruvbot/scripts/install.sh
vendored
Executable file
@@ -0,0 +1,738 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# RuvBot Installer
|
||||
#
|
||||
# Usage:
|
||||
# curl -fsSL https://get.ruvector.dev/ruvbot | bash
|
||||
# curl -fsSL https://raw.githubusercontent.com/ruvnet/ruvector/main/npm/packages/ruvbot/scripts/install.sh | bash
|
||||
#
|
||||
# Options (via environment variables):
|
||||
# RUVBOT_VERSION - Specific version to install (default: latest)
|
||||
# RUVBOT_GLOBAL - Install globally (default: true)
|
||||
# RUVBOT_INIT - Run init after install (default: false)
|
||||
# RUVBOT_CHANNEL - Configure channel: slack, discord, telegram
|
||||
# RUVBOT_DEPLOY - Deploy target: local, docker, cloudrun, k8s
|
||||
# RUVBOT_WIZARD - Run interactive wizard (default: false)
|
||||
#
|
||||
# Examples:
|
||||
# # Basic install
|
||||
# curl -fsSL https://get.ruvector.dev/ruvbot | bash
|
||||
#
|
||||
# # Install specific version
|
||||
# RUVBOT_VERSION=0.1.3 curl -fsSL https://get.ruvector.dev/ruvbot | bash
|
||||
#
|
||||
# # Install and initialize
|
||||
# RUVBOT_INIT=true curl -fsSL https://get.ruvector.dev/ruvbot | bash
|
||||
#
|
||||
# # Install with Slack configuration
|
||||
# RUVBOT_CHANNEL=slack curl -fsSL https://get.ruvector.dev/ruvbot | bash
|
||||
#
|
||||
# # Install and deploy to Cloud Run
|
||||
# RUVBOT_DEPLOY=cloudrun curl -fsSL https://get.ruvector.dev/ruvbot | bash
|
||||
#
|
||||
# # Run full interactive wizard
|
||||
# RUVBOT_WIZARD=true curl -fsSL https://get.ruvector.dev/ruvbot | bash
|
||||
|
||||
set -e
|
||||
|
||||
# Colors
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[1;33m'
|
||||
BLUE='\033[0;34m'
|
||||
CYAN='\033[0;36m'
|
||||
MAGENTA='\033[0;35m'
|
||||
NC='\033[0m' # No Color
|
||||
BOLD='\033[1m'
|
||||
DIM='\033[2m'
|
||||
|
||||
# Configuration
|
||||
RUVBOT_VERSION="${RUVBOT_VERSION:-latest}"
|
||||
RUVBOT_GLOBAL="${RUVBOT_GLOBAL:-true}"
|
||||
RUVBOT_INIT="${RUVBOT_INIT:-false}"
|
||||
RUVBOT_CHANNEL="${RUVBOT_CHANNEL:-}"
|
||||
RUVBOT_DEPLOY="${RUVBOT_DEPLOY:-}"
|
||||
RUVBOT_WIZARD="${RUVBOT_WIZARD:-false}"
|
||||
|
||||
# Feature flags
|
||||
GCLOUD_AVAILABLE=false
|
||||
DOCKER_AVAILABLE=false
|
||||
KUBECTL_AVAILABLE=false
|
||||
|
||||
# Banner
|
||||
print_banner() {
|
||||
echo -e "${CYAN}"
|
||||
echo ' ____ ____ _ '
|
||||
echo ' | _ \ _ ___ _| __ ) ___ | |_ '
|
||||
echo ' | |_) | | | \ \ / / _ \ / _ \| __|'
|
||||
echo ' | _ <| |_| |\ V /| |_) | (_) | |_ '
|
||||
echo ' |_| \_\\__,_| \_/ |____/ \___/ \__|'
|
||||
echo -e "${NC}"
|
||||
echo -e "${BOLD}Enterprise-Grade Self-Learning AI Assistant${NC}"
|
||||
echo -e "${DIM}Military-strength security • 150x faster search • 12+ LLM models${NC}"
|
||||
echo ""
|
||||
}
|
||||
|
||||
# Logging functions
|
||||
info() { echo -e "${BLUE}ℹ${NC} $1"; }
|
||||
success() { echo -e "${GREEN}✓${NC} $1"; }
|
||||
warn() { echo -e "${YELLOW}⚠${NC} $1"; }
|
||||
error() { echo -e "${RED}✗${NC} $1"; exit 1; }
|
||||
step() { echo -e "\n${MAGENTA}▸${NC} ${BOLD}$1${NC}"; }
|
||||
|
||||
# Check dependencies
|
||||
check_dependencies() {
|
||||
step "Checking dependencies"
|
||||
|
||||
# Check Node.js
|
||||
if ! command -v node &> /dev/null; then
|
||||
error "Node.js is required but not installed. Install from https://nodejs.org"
|
||||
fi
|
||||
|
||||
NODE_VERSION=$(node -v | cut -d 'v' -f 2 | cut -d '.' -f 1)
|
||||
if [ "$NODE_VERSION" -lt 18 ]; then
|
||||
error "Node.js 18+ is required. Current: $(node -v)"
|
||||
fi
|
||||
success "Node.js $(node -v)"
|
||||
|
||||
# Check npm
|
||||
if ! command -v npm &> /dev/null; then
|
||||
error "npm is required but not installed"
|
||||
fi
|
||||
success "npm $(npm -v)"
|
||||
|
||||
# Check optional: gcloud
|
||||
if command -v gcloud &> /dev/null; then
|
||||
success "gcloud CLI $(gcloud --version 2>/dev/null | head -1 | awk '{print $4}')"
|
||||
GCLOUD_AVAILABLE=true
|
||||
else
|
||||
echo -e "${DIM} ○ gcloud CLI not found (optional for Cloud Run)${NC}"
|
||||
fi
|
||||
|
||||
# Check optional: docker
|
||||
if command -v docker &> /dev/null; then
|
||||
success "Docker $(docker --version | awk '{print $3}' | tr -d ',')"
|
||||
DOCKER_AVAILABLE=true
|
||||
else
|
||||
echo -e "${DIM} ○ Docker not found (optional for containerization)${NC}"
|
||||
fi
|
||||
|
||||
# Check optional: kubectl
|
||||
if command -v kubectl &> /dev/null; then
|
||||
success "kubectl $(kubectl version --client -o json 2>/dev/null | grep -o '"gitVersion": "[^"]*"' | cut -d'"' -f4)"
|
||||
KUBECTL_AVAILABLE=true
|
||||
else
|
||||
echo -e "${DIM} ○ kubectl not found (optional for Kubernetes)${NC}"
|
||||
fi
|
||||
}
|
||||
|
||||
# Install RuvBot
|
||||
install_ruvbot() {
|
||||
step "Installing RuvBot"
|
||||
|
||||
PACKAGE="ruvbot"
|
||||
if [ "$RUVBOT_VERSION" != "latest" ]; then
|
||||
PACKAGE="ruvbot@$RUVBOT_VERSION"
|
||||
info "Installing version $RUVBOT_VERSION"
|
||||
fi
|
||||
|
||||
if [ "$RUVBOT_GLOBAL" = "true" ]; then
|
||||
npm install -g "$PACKAGE" 2>/dev/null || sudo npm install -g "$PACKAGE"
|
||||
success "RuvBot installed globally"
|
||||
else
|
||||
npm install "$PACKAGE"
|
||||
success "RuvBot installed locally"
|
||||
fi
|
||||
|
||||
# Verify installation
|
||||
if command -v ruvbot &> /dev/null; then
|
||||
INSTALLED_VERSION=$(ruvbot --version 2>/dev/null || echo "unknown")
|
||||
success "RuvBot $INSTALLED_VERSION is ready"
|
||||
else
|
||||
success "RuvBot installed (use 'npx ruvbot' to run)"
|
||||
fi
|
||||
}
|
||||
|
||||
# Install optional dependencies for channels
|
||||
install_channel_deps() {
|
||||
local channel=$1
|
||||
step "Installing $channel dependencies"
|
||||
|
||||
case "$channel" in
|
||||
slack)
|
||||
npm install @slack/bolt @slack/web-api 2>/dev/null
|
||||
success "Slack SDK installed (@slack/bolt, @slack/web-api)"
|
||||
;;
|
||||
discord)
|
||||
npm install discord.js 2>/dev/null
|
||||
success "Discord.js installed"
|
||||
;;
|
||||
telegram)
|
||||
npm install telegraf 2>/dev/null
|
||||
success "Telegraf installed"
|
||||
;;
|
||||
all)
|
||||
npm install @slack/bolt @slack/web-api discord.js telegraf 2>/dev/null
|
||||
success "All channel dependencies installed"
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
# Initialize project
|
||||
init_project() {
|
||||
step "Initializing RuvBot project"
|
||||
|
||||
if [ "$RUVBOT_GLOBAL" = "true" ]; then
|
||||
ruvbot init --yes
|
||||
else
|
||||
npx ruvbot init --yes
|
||||
fi
|
||||
|
||||
success "Project initialized"
|
||||
}
|
||||
|
||||
# Configure channel interactively
|
||||
configure_channel() {
|
||||
local channel=$1
|
||||
|
||||
step "Configuring $channel"
|
||||
|
||||
case "$channel" in
|
||||
slack)
|
||||
echo ""
|
||||
echo " To set up Slack, you'll need credentials from:"
|
||||
echo -e " ${CYAN}https://api.slack.com/apps${NC}"
|
||||
echo ""
|
||||
read -p " SLACK_BOT_TOKEN (xoxb-...): " SLACK_BOT_TOKEN
|
||||
read -p " SLACK_SIGNING_SECRET: " SLACK_SIGNING_SECRET
|
||||
read -p " SLACK_APP_TOKEN (xapp-...): " SLACK_APP_TOKEN
|
||||
|
||||
{
|
||||
echo "SLACK_BOT_TOKEN=$SLACK_BOT_TOKEN"
|
||||
echo "SLACK_SIGNING_SECRET=$SLACK_SIGNING_SECRET"
|
||||
echo "SLACK_APP_TOKEN=$SLACK_APP_TOKEN"
|
||||
} >> .env
|
||||
|
||||
success "Slack configuration saved to .env"
|
||||
;;
|
||||
|
||||
discord)
|
||||
echo ""
|
||||
echo " To set up Discord, you'll need credentials from:"
|
||||
echo -e " ${CYAN}https://discord.com/developers/applications${NC}"
|
||||
echo ""
|
||||
read -p " DISCORD_TOKEN: " DISCORD_TOKEN
|
||||
read -p " DISCORD_CLIENT_ID: " DISCORD_CLIENT_ID
|
||||
read -p " DISCORD_GUILD_ID (optional): " DISCORD_GUILD_ID
|
||||
|
||||
{
|
||||
echo "DISCORD_TOKEN=$DISCORD_TOKEN"
|
||||
echo "DISCORD_CLIENT_ID=$DISCORD_CLIENT_ID"
|
||||
[ -n "$DISCORD_GUILD_ID" ] && echo "DISCORD_GUILD_ID=$DISCORD_GUILD_ID"
|
||||
} >> .env
|
||||
|
||||
success "Discord configuration saved to .env"
|
||||
;;
|
||||
|
||||
telegram)
|
||||
echo ""
|
||||
echo " To set up Telegram, get a token from:"
|
||||
echo -e " ${CYAN}@BotFather${NC} on Telegram"
|
||||
echo ""
|
||||
read -p " TELEGRAM_BOT_TOKEN: " TELEGRAM_BOT_TOKEN
|
||||
|
||||
echo "TELEGRAM_BOT_TOKEN=$TELEGRAM_BOT_TOKEN" >> .env
|
||||
|
||||
success "Telegram configuration saved to .env"
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
# Deploy to Cloud Run
|
||||
deploy_cloudrun() {
|
||||
step "Deploying to Google Cloud Run"
|
||||
|
||||
if [ "$GCLOUD_AVAILABLE" != "true" ]; then
|
||||
error "gcloud CLI is required. Install from https://cloud.google.com/sdk"
|
||||
fi
|
||||
|
||||
# Check authentication
|
||||
if ! gcloud auth list --filter=status:ACTIVE --format="value(account)" 2>/dev/null | head -1; then
|
||||
warn "Not authenticated with gcloud"
|
||||
info "Running 'gcloud auth login'..."
|
||||
gcloud auth login
|
||||
fi
|
||||
|
||||
# Get project
|
||||
CURRENT_PROJECT=$(gcloud config get-value project 2>/dev/null || echo "")
|
||||
echo ""
|
||||
read -p " GCP Project ID [$CURRENT_PROJECT]: " PROJECT_ID
|
||||
PROJECT_ID="${PROJECT_ID:-$CURRENT_PROJECT}"
|
||||
|
||||
if [ -z "$PROJECT_ID" ]; then
|
||||
error "Project ID is required"
|
||||
fi
|
||||
|
||||
gcloud config set project "$PROJECT_ID" 2>/dev/null
|
||||
|
||||
# Get region
|
||||
read -p " Region [us-central1]: " REGION
|
||||
REGION="${REGION:-us-central1}"
|
||||
|
||||
# Get service name
|
||||
read -p " Service name [ruvbot]: " SERVICE_NAME
|
||||
SERVICE_NAME="${SERVICE_NAME:-ruvbot}"
|
||||
|
||||
# Get API key
|
||||
echo ""
|
||||
echo " LLM Provider:"
|
||||
echo " 1. OpenRouter (recommended - Gemini, Claude, GPT)"
|
||||
echo " 2. Anthropic (Claude only)"
|
||||
read -p " Choose [1]: " PROVIDER_CHOICE
|
||||
PROVIDER_CHOICE="${PROVIDER_CHOICE:-1}"
|
||||
|
||||
if [ "$PROVIDER_CHOICE" = "1" ]; then
|
||||
read -p " OPENROUTER_API_KEY: " API_KEY
|
||||
ENV_VARS="OPENROUTER_API_KEY=$API_KEY,DEFAULT_MODEL=google/gemini-2.0-flash-001"
|
||||
else
|
||||
read -p " ANTHROPIC_API_KEY: " API_KEY
|
||||
ENV_VARS="ANTHROPIC_API_KEY=$API_KEY"
|
||||
fi
|
||||
|
||||
# Channel configuration
|
||||
echo ""
|
||||
read -p " Configure Slack? [y/N]: " SETUP_SLACK
|
||||
if [[ "$SETUP_SLACK" =~ ^[Yy]$ ]]; then
|
||||
read -p " SLACK_BOT_TOKEN: " SLACK_BOT_TOKEN
|
||||
read -p " SLACK_SIGNING_SECRET: " SLACK_SIGNING_SECRET
|
||||
ENV_VARS="$ENV_VARS,SLACK_BOT_TOKEN=$SLACK_BOT_TOKEN,SLACK_SIGNING_SECRET=$SLACK_SIGNING_SECRET"
|
||||
fi
|
||||
|
||||
read -p " Configure Telegram? [y/N]: " SETUP_TELEGRAM
|
||||
if [[ "$SETUP_TELEGRAM" =~ ^[Yy]$ ]]; then
|
||||
read -p " TELEGRAM_BOT_TOKEN: " TELEGRAM_BOT_TOKEN
|
||||
ENV_VARS="$ENV_VARS,TELEGRAM_BOT_TOKEN=$TELEGRAM_BOT_TOKEN"
|
||||
fi
|
||||
|
||||
# Enable required APIs
|
||||
info "Enabling required GCP APIs..."
|
||||
gcloud services enable run.googleapis.com containerregistry.googleapis.com cloudbuild.googleapis.com 2>/dev/null
|
||||
|
||||
# Create Dockerfile if it doesn't exist
|
||||
if [ ! -f "Dockerfile" ]; then
|
||||
info "Creating Dockerfile..."
|
||||
cat > Dockerfile << 'DOCKERFILE'
|
||||
FROM node:20-slim
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
# Install curl for health checks
|
||||
RUN apt-get update && apt-get install -y curl && rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Install ruvbot
|
||||
RUN npm install -g ruvbot
|
||||
|
||||
# Create directories
|
||||
RUN mkdir -p /app/data /app/plugins /app/skills
|
||||
|
||||
# Health check
|
||||
HEALTHCHECK --interval=30s --timeout=10s --start-period=5s --retries=3 \
|
||||
CMD curl -f http://localhost:${PORT:-8080}/health || exit 1
|
||||
|
||||
# Start command
|
||||
CMD ["ruvbot", "start", "--port", "8080"]
|
||||
DOCKERFILE
|
||||
success "Dockerfile created"
|
||||
fi
|
||||
|
||||
# Deploy
|
||||
info "Deploying to Cloud Run (this may take a few minutes)..."
|
||||
gcloud run deploy "$SERVICE_NAME" \
|
||||
--source . \
|
||||
--platform managed \
|
||||
--region "$REGION" \
|
||||
--allow-unauthenticated \
|
||||
--port 8080 \
|
||||
--memory 512Mi \
|
||||
--min-instances 0 \
|
||||
--max-instances 10 \
|
||||
--set-env-vars="$ENV_VARS" \
|
||||
--quiet
|
||||
|
||||
# Get URL
|
||||
SERVICE_URL=$(gcloud run services describe "$SERVICE_NAME" --region "$REGION" --format='value(status.url)')
|
||||
|
||||
echo ""
|
||||
echo -e "${GREEN}═══════════════════════════════════════${NC}"
|
||||
echo -e "${BOLD}🚀 RuvBot deployed successfully!${NC}"
|
||||
echo -e "${GREEN}═══════════════════════════════════════${NC}"
|
||||
echo ""
|
||||
echo -e " URL: ${CYAN}$SERVICE_URL${NC}"
|
||||
echo -e " Health: ${CYAN}$SERVICE_URL/health${NC}"
|
||||
echo -e " API: ${CYAN}$SERVICE_URL/api/status${NC}"
|
||||
echo -e " Models: ${CYAN}$SERVICE_URL/api/models${NC}"
|
||||
echo ""
|
||||
echo " Quick test:"
|
||||
echo -e " ${DIM}curl $SERVICE_URL/health${NC}"
|
||||
echo ""
|
||||
|
||||
# Set Telegram webhook if configured
|
||||
if [ -n "$TELEGRAM_BOT_TOKEN" ]; then
|
||||
WEBHOOK_URL="$SERVICE_URL/telegram/webhook"
|
||||
info "Setting Telegram webhook..."
|
||||
curl -s "https://api.telegram.org/bot$TELEGRAM_BOT_TOKEN/setWebhook?url=$WEBHOOK_URL" > /dev/null
|
||||
success "Telegram webhook: $WEBHOOK_URL"
|
||||
fi
|
||||
}
|
||||
|
||||
# Deploy to Docker
|
||||
deploy_docker() {
|
||||
step "Deploying with Docker"
|
||||
|
||||
if [ "$DOCKER_AVAILABLE" != "true" ]; then
|
||||
error "Docker is required. Install from https://docker.com"
|
||||
fi
|
||||
|
||||
# Get configuration
|
||||
read -p " Container name [ruvbot]: " CONTAINER_NAME
|
||||
CONTAINER_NAME="${CONTAINER_NAME:-ruvbot}"
|
||||
|
||||
read -p " Port [3000]: " PORT
|
||||
PORT="${PORT:-3000}"
|
||||
|
||||
# Create docker-compose.yml
|
||||
info "Creating docker-compose.yml..."
|
||||
cat > docker-compose.yml << COMPOSE
|
||||
version: '3.8'
|
||||
services:
|
||||
ruvbot:
|
||||
image: node:20-slim
|
||||
container_name: $CONTAINER_NAME
|
||||
working_dir: /app
|
||||
command: sh -c "npm install -g ruvbot && ruvbot start --port 3000"
|
||||
ports:
|
||||
- "$PORT:3000"
|
||||
environment:
|
||||
- OPENROUTER_API_KEY=\${OPENROUTER_API_KEY}
|
||||
- ANTHROPIC_API_KEY=\${ANTHROPIC_API_KEY}
|
||||
- SLACK_BOT_TOKEN=\${SLACK_BOT_TOKEN}
|
||||
- SLACK_SIGNING_SECRET=\${SLACK_SIGNING_SECRET}
|
||||
- SLACK_APP_TOKEN=\${SLACK_APP_TOKEN}
|
||||
- DISCORD_TOKEN=\${DISCORD_TOKEN}
|
||||
- DISCORD_CLIENT_ID=\${DISCORD_CLIENT_ID}
|
||||
- TELEGRAM_BOT_TOKEN=\${TELEGRAM_BOT_TOKEN}
|
||||
volumes:
|
||||
- ./data:/app/data
|
||||
- ./plugins:/app/plugins
|
||||
- ./skills:/app/skills
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-f", "http://localhost:3000/health"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 3
|
||||
restart: unless-stopped
|
||||
COMPOSE
|
||||
success "docker-compose.yml created"
|
||||
|
||||
# Start containers
|
||||
read -p " Start containers now? [Y/n]: " START_NOW
|
||||
START_NOW="${START_NOW:-Y}"
|
||||
|
||||
if [[ "$START_NOW" =~ ^[Yy]$ ]]; then
|
||||
info "Starting Docker containers..."
|
||||
docker-compose up -d
|
||||
|
||||
echo ""
|
||||
echo -e "${GREEN}═══════════════════════════════════════${NC}"
|
||||
echo -e "${BOLD}🚀 RuvBot is running!${NC}"
|
||||
echo -e "${GREEN}═══════════════════════════════════════${NC}"
|
||||
echo ""
|
||||
echo -e " URL: ${CYAN}http://localhost:$PORT${NC}"
|
||||
echo -e " Health: ${CYAN}http://localhost:$PORT/health${NC}"
|
||||
echo -e " Logs: ${DIM}docker-compose logs -f${NC}"
|
||||
echo -e " Stop: ${DIM}docker-compose down${NC}"
|
||||
echo ""
|
||||
fi
|
||||
}
|
||||
|
||||
# Deploy to Kubernetes
|
||||
deploy_k8s() {
|
||||
step "Deploying to Kubernetes"
|
||||
|
||||
if [ "$KUBECTL_AVAILABLE" != "true" ]; then
|
||||
error "kubectl is required. Install from https://kubernetes.io/docs/tasks/tools/"
|
||||
fi
|
||||
|
||||
# Get namespace
|
||||
read -p " Namespace [default]: " NAMESPACE
|
||||
NAMESPACE="${NAMESPACE:-default}"
|
||||
|
||||
# Get API key
|
||||
read -p " OPENROUTER_API_KEY: " API_KEY
|
||||
|
||||
info "Creating Kubernetes manifests..."
|
||||
|
||||
mkdir -p k8s
|
||||
|
||||
# Create secret
|
||||
cat > k8s/secret.yaml << SECRET
|
||||
apiVersion: v1
|
||||
kind: Secret
|
||||
metadata:
|
||||
name: ruvbot-secrets
|
||||
namespace: $NAMESPACE
|
||||
type: Opaque
|
||||
stringData:
|
||||
OPENROUTER_API_KEY: "$API_KEY"
|
||||
DEFAULT_MODEL: "google/gemini-2.0-flash-001"
|
||||
SECRET
|
||||
|
||||
# Create deployment
|
||||
cat > k8s/deployment.yaml << DEPLOYMENT
|
||||
apiVersion: apps/v1
|
||||
kind: Deployment
|
||||
metadata:
|
||||
name: ruvbot
|
||||
namespace: $NAMESPACE
|
||||
spec:
|
||||
replicas: 2
|
||||
selector:
|
||||
matchLabels:
|
||||
app: ruvbot
|
||||
template:
|
||||
metadata:
|
||||
labels:
|
||||
app: ruvbot
|
||||
spec:
|
||||
containers:
|
||||
- name: ruvbot
|
||||
image: node:20-slim
|
||||
command: ["sh", "-c", "npm install -g ruvbot && ruvbot start --port 3000"]
|
||||
ports:
|
||||
- containerPort: 3000
|
||||
envFrom:
|
||||
- secretRef:
|
||||
name: ruvbot-secrets
|
||||
livenessProbe:
|
||||
httpGet:
|
||||
path: /health
|
||||
port: 3000
|
||||
initialDelaySeconds: 60
|
||||
periodSeconds: 30
|
||||
readinessProbe:
|
||||
httpGet:
|
||||
path: /ready
|
||||
port: 3000
|
||||
initialDelaySeconds: 30
|
||||
periodSeconds: 10
|
||||
resources:
|
||||
requests:
|
||||
memory: "256Mi"
|
||||
cpu: "100m"
|
||||
limits:
|
||||
memory: "512Mi"
|
||||
cpu: "500m"
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
metadata:
|
||||
name: ruvbot
|
||||
namespace: $NAMESPACE
|
||||
spec:
|
||||
selector:
|
||||
app: ruvbot
|
||||
ports:
|
||||
- port: 80
|
||||
targetPort: 3000
|
||||
type: LoadBalancer
|
||||
DEPLOYMENT
|
||||
|
||||
success "Kubernetes manifests created in k8s/"
|
||||
|
||||
read -p " Apply manifests now? [Y/n]: " APPLY_NOW
|
||||
APPLY_NOW="${APPLY_NOW:-Y}"
|
||||
|
||||
if [[ "$APPLY_NOW" =~ ^[Yy]$ ]]; then
|
||||
kubectl apply -f k8s/
|
||||
|
||||
echo ""
|
||||
success "Kubernetes resources created"
|
||||
echo ""
|
||||
echo " Check status:"
|
||||
echo -e " ${DIM}kubectl get pods -l app=ruvbot${NC}"
|
||||
echo ""
|
||||
echo " Get service URL:"
|
||||
echo -e " ${DIM}kubectl get svc ruvbot${NC}"
|
||||
echo ""
|
||||
fi
|
||||
}
|
||||
|
||||
# Deployment wizard
|
||||
deployment_wizard() {
|
||||
step "Deployment Options"
|
||||
echo ""
|
||||
echo " 1. Local (development)"
|
||||
echo " 2. Docker"
|
||||
echo " 3. Google Cloud Run"
|
||||
echo " 4. Kubernetes"
|
||||
echo " 5. Skip deployment"
|
||||
echo ""
|
||||
read -p " Select [5]: " DEPLOY_CHOICE
|
||||
DEPLOY_CHOICE="${DEPLOY_CHOICE:-5}"
|
||||
|
||||
case "$DEPLOY_CHOICE" in
|
||||
1)
|
||||
info "Starting local development server..."
|
||||
if [ "$RUVBOT_GLOBAL" = "true" ]; then
|
||||
ruvbot start --debug
|
||||
else
|
||||
npx ruvbot start --debug
|
||||
fi
|
||||
;;
|
||||
2) deploy_docker ;;
|
||||
3) deploy_cloudrun ;;
|
||||
4) deploy_k8s ;;
|
||||
5) info "Skipping deployment" ;;
|
||||
*) warn "Invalid option, skipping deployment" ;;
|
||||
esac
|
||||
}
|
||||
|
||||
# Interactive setup wizard
|
||||
run_wizard() {
|
||||
step "RuvBot Setup Wizard"
|
||||
|
||||
# Ensure .env exists
|
||||
touch .env 2>/dev/null || true
|
||||
|
||||
# LLM Provider
|
||||
echo ""
|
||||
echo " ${BOLD}Step 1: LLM Provider${NC}"
|
||||
echo " ───────────────────"
|
||||
echo " 1. OpenRouter (Gemini 2.5, Claude, GPT - recommended)"
|
||||
echo " 2. Anthropic (Claude only)"
|
||||
echo " 3. Skip (configure later)"
|
||||
read -p " Select [1]: " PROVIDER
|
||||
PROVIDER="${PROVIDER:-1}"
|
||||
|
||||
case "$PROVIDER" in
|
||||
1)
|
||||
read -p " OPENROUTER_API_KEY: " OPENROUTER_KEY
|
||||
{
|
||||
echo "OPENROUTER_API_KEY=$OPENROUTER_KEY"
|
||||
echo "DEFAULT_MODEL=google/gemini-2.0-flash-001"
|
||||
} >> .env
|
||||
success "OpenRouter configured"
|
||||
;;
|
||||
2)
|
||||
read -p " ANTHROPIC_API_KEY: " ANTHROPIC_KEY
|
||||
echo "ANTHROPIC_API_KEY=$ANTHROPIC_KEY" >> .env
|
||||
success "Anthropic configured"
|
||||
;;
|
||||
3) info "Skipping LLM configuration" ;;
|
||||
esac
|
||||
|
||||
# Channel Configuration
|
||||
echo ""
|
||||
echo " ${BOLD}Step 2: Channel Integrations${NC}"
|
||||
echo " ────────────────────────────"
|
||||
echo " 1. Slack"
|
||||
echo " 2. Discord"
|
||||
echo " 3. Telegram"
|
||||
echo " 4. All channels"
|
||||
echo " 5. Skip (configure later)"
|
||||
read -p " Select [5]: " CHANNELS
|
||||
CHANNELS="${CHANNELS:-5}"
|
||||
|
||||
case "$CHANNELS" in
|
||||
1)
|
||||
install_channel_deps "slack"
|
||||
configure_channel "slack"
|
||||
;;
|
||||
2)
|
||||
install_channel_deps "discord"
|
||||
configure_channel "discord"
|
||||
;;
|
||||
3)
|
||||
install_channel_deps "telegram"
|
||||
configure_channel "telegram"
|
||||
;;
|
||||
4)
|
||||
install_channel_deps "all"
|
||||
configure_channel "slack"
|
||||
configure_channel "discord"
|
||||
configure_channel "telegram"
|
||||
;;
|
||||
5) info "Skipping channel configuration" ;;
|
||||
esac
|
||||
|
||||
# Deployment
|
||||
echo ""
|
||||
echo " ${BOLD}Step 3: Deployment${NC}"
|
||||
echo " ──────────────────"
|
||||
deployment_wizard
|
||||
}
|
||||
|
||||
# Print next steps
|
||||
print_next_steps() {
|
||||
echo ""
|
||||
echo -e "${BOLD}📚 Quick Start${NC}"
|
||||
echo "═══════════════════════════════════════"
|
||||
echo ""
|
||||
echo " Configure LLM provider:"
|
||||
echo -e " ${CYAN}export OPENROUTER_API_KEY=sk-or-...${NC}"
|
||||
echo ""
|
||||
echo " Run diagnostics:"
|
||||
echo -e " ${CYAN}ruvbot doctor${NC}"
|
||||
echo ""
|
||||
echo " Start the bot:"
|
||||
echo -e " ${CYAN}ruvbot start${NC}"
|
||||
echo ""
|
||||
echo " Channel setup guides:"
|
||||
echo -e " ${CYAN}ruvbot channels setup slack${NC}"
|
||||
echo -e " ${CYAN}ruvbot channels setup discord${NC}"
|
||||
echo -e " ${CYAN}ruvbot channels setup telegram${NC}"
|
||||
echo ""
|
||||
echo " Deploy templates:"
|
||||
echo -e " ${CYAN}ruvbot templates list${NC}"
|
||||
echo -e " ${CYAN}ruvbot deploy code-reviewer${NC}"
|
||||
echo ""
|
||||
echo " Deploy to Cloud Run:"
|
||||
echo -e " ${CYAN}ruvbot deploy cloudrun${NC}"
|
||||
echo ""
|
||||
echo -e "${DIM}Docs: https://github.com/ruvnet/ruvector/tree/main/npm/packages/ruvbot${NC}"
|
||||
echo ""
|
||||
}
|
||||
|
||||
# Main
|
||||
main() {
|
||||
print_banner
|
||||
check_dependencies
|
||||
install_ruvbot
|
||||
|
||||
# Handle channel installation
|
||||
if [ -n "$RUVBOT_CHANNEL" ]; then
|
||||
install_channel_deps "$RUVBOT_CHANNEL"
|
||||
fi
|
||||
|
||||
# Handle initialization
|
||||
if [ "$RUVBOT_INIT" = "true" ]; then
|
||||
init_project
|
||||
fi
|
||||
|
||||
# Handle wizard
|
||||
if [ "$RUVBOT_WIZARD" = "true" ]; then
|
||||
run_wizard
|
||||
elif [ -n "$RUVBOT_DEPLOY" ]; then
|
||||
# Handle deployment without wizard
|
||||
case "$RUVBOT_DEPLOY" in
|
||||
cloudrun|cloud-run|gcp) deploy_cloudrun ;;
|
||||
docker) deploy_docker ;;
|
||||
k8s|kubernetes) deploy_k8s ;;
|
||||
*) warn "Unknown deployment target: $RUVBOT_DEPLOY" ;;
|
||||
esac
|
||||
fi
|
||||
|
||||
print_next_steps
|
||||
}
|
||||
|
||||
main "$@"
|
||||
51
vendor/ruvector/npm/packages/ruvbot/scripts/postinstall.js
vendored
Normal file
51
vendor/ruvector/npm/packages/ruvbot/scripts/postinstall.js
vendored
Normal file
@@ -0,0 +1,51 @@
|
||||
#!/usr/bin/env node
|
||||
|
||||
/**
|
||||
* Post-install script for @ruvector/ruvbot
|
||||
*
|
||||
* Downloads optional native binaries and initializes data directories.
|
||||
*/
|
||||
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
import { fileURLToPath } from 'url';
|
||||
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
const rootDir = path.resolve(__dirname, '..');
|
||||
|
||||
async function main() {
|
||||
console.log('[ruvbot] Running post-install...');
|
||||
|
||||
// Create data directory if it doesn't exist
|
||||
const dataDir = path.join(rootDir, 'data');
|
||||
if (!fs.existsSync(dataDir)) {
|
||||
fs.mkdirSync(dataDir, { recursive: true });
|
||||
console.log('[ruvbot] Created data directory');
|
||||
}
|
||||
|
||||
// Check for optional dependencies
|
||||
const optionalDeps = [
|
||||
{ name: '@slack/bolt', purpose: 'Slack integration' },
|
||||
{ name: 'discord.js', purpose: 'Discord integration' },
|
||||
{ name: 'better-sqlite3', purpose: 'SQLite storage' },
|
||||
{ name: 'pg', purpose: 'PostgreSQL storage' },
|
||||
];
|
||||
|
||||
console.log('\n[ruvbot] Optional features:');
|
||||
for (const dep of optionalDeps) {
|
||||
try {
|
||||
await import(dep.name);
|
||||
console.log(` [x] ${dep.purpose} (${dep.name})`);
|
||||
} catch {
|
||||
console.log(` [ ] ${dep.purpose} - install ${dep.name} to enable`);
|
||||
}
|
||||
}
|
||||
|
||||
console.log('\n[ruvbot] Installation complete!');
|
||||
console.log('[ruvbot] Run `npx @ruvector/ruvbot start` to begin.\n');
|
||||
}
|
||||
|
||||
main().catch((error) => {
|
||||
// Post-install failures should not break npm install
|
||||
console.warn('[ruvbot] Post-install warning:', error.message);
|
||||
});
|
||||
378
vendor/ruvector/npm/packages/ruvbot/scripts/run-rvf.js
vendored
Normal file
378
vendor/ruvector/npm/packages/ruvbot/scripts/run-rvf.js
vendored
Normal file
@@ -0,0 +1,378 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* run-rvf.js — Extract and boot the self-contained RuvBot RVF.
|
||||
*
|
||||
* Modes:
|
||||
* --boot Extract kernel from KERNEL_SEG, boot with QEMU (default)
|
||||
* --runtime Extract Node.js bundle from WASM_SEG, run directly
|
||||
* --inspect Print segment manifest without running
|
||||
*
|
||||
* Usage:
|
||||
* node scripts/run-rvf.js [ruvbot.rvf] [--boot|--runtime|--inspect]
|
||||
*/
|
||||
|
||||
'use strict';
|
||||
|
||||
const { readFileSync, writeFileSync, mkdirSync, existsSync } = require('fs');
|
||||
const { join, resolve } = require('path');
|
||||
const { gunzipSync } = require('zlib');
|
||||
const { execSync, spawn } = require('child_process');
|
||||
|
||||
const SEGMENT_MAGIC = 0x52564653;
|
||||
const KERNEL_MAGIC = 0x52564B4E;
|
||||
const WASM_MAGIC = 0x5256574D;
|
||||
|
||||
const SEG_NAMES = {
|
||||
0x05: 'MANIFEST', 0x07: 'META', 0x0A: 'WITNESS',
|
||||
0x0B: 'PROFILE', 0x0E: 'KERNEL', 0x10: 'WASM',
|
||||
};
|
||||
|
||||
// ─── Parse RVF segments ─────────────────────────────────────────────────────
|
||||
|
||||
function parseRvf(buf) {
|
||||
const segments = [];
|
||||
let offset = 0;
|
||||
|
||||
while (offset + 64 <= buf.length) {
|
||||
const magic = buf.readUInt32LE(offset);
|
||||
if (magic !== SEGMENT_MAGIC) break;
|
||||
|
||||
const segType = buf[offset + 5];
|
||||
const segId = Number(buf.readBigUInt64LE(offset + 8));
|
||||
const payloadLen = Number(buf.readBigUInt64LE(offset + 0x10));
|
||||
const payloadStart = offset + 64;
|
||||
|
||||
segments.push({
|
||||
type: segType,
|
||||
typeName: SEG_NAMES[segType] || `0x${segType.toString(16)}`,
|
||||
id: segId,
|
||||
offset: payloadStart,
|
||||
length: payloadLen,
|
||||
});
|
||||
|
||||
offset = payloadStart + payloadLen;
|
||||
}
|
||||
|
||||
return segments;
|
||||
}
|
||||
|
||||
// ─── Extract kernel ─────────────────────────────────────────────────────────
|
||||
|
||||
function extractKernel(buf, seg) {
|
||||
const payload = buf.subarray(seg.offset, seg.offset + seg.length);
|
||||
|
||||
// Parse kernel header (128 bytes)
|
||||
const kMagic = payload.readUInt32LE(0);
|
||||
if (kMagic !== KERNEL_MAGIC) {
|
||||
throw new Error('Invalid kernel header magic');
|
||||
}
|
||||
|
||||
const arch = payload[6];
|
||||
const kType = payload[7];
|
||||
const imageSize = Number(payload.readBigUInt64LE(0x18));
|
||||
const compressedSize = Number(payload.readBigUInt64LE(0x20));
|
||||
const compression = payload[0x28];
|
||||
const cmdlineOffset = payload.readUInt32LE(0x6C);
|
||||
const cmdlineLength = payload.readUInt32LE(0x70);
|
||||
|
||||
console.log(` Kernel: arch=${arch === 0 ? 'x86_64' : arch} type=${kType === 1 ? 'MicroLinux' : kType}`);
|
||||
console.log(` Image: ${imageSize} bytes (compressed: ${compressedSize})`);
|
||||
|
||||
// Extract kernel image (starts at byte 128)
|
||||
const imageData = payload.subarray(128, 128 + compressedSize);
|
||||
|
||||
let kernel;
|
||||
if (compression === 1) {
|
||||
console.log(' Decompressing gzip kernel...');
|
||||
kernel = gunzipSync(imageData);
|
||||
console.log(` Decompressed: ${kernel.length} bytes`);
|
||||
} else {
|
||||
kernel = imageData;
|
||||
}
|
||||
|
||||
// Extract cmdline
|
||||
let cmdline = '';
|
||||
if (cmdlineLength > 0) {
|
||||
const cmdStart = 128 + compressedSize;
|
||||
cmdline = payload.subarray(cmdStart, cmdStart + cmdlineLength).toString('utf8');
|
||||
console.log(` Cmdline: ${cmdline}`);
|
||||
}
|
||||
|
||||
return { kernel, cmdline, arch };
|
||||
}
|
||||
|
||||
// ─── Extract runtime bundle ─────────────────────────────────────────────────
|
||||
|
||||
function extractRuntime(buf, seg, extractDir) {
|
||||
const payload = buf.subarray(seg.offset, seg.offset + seg.length);
|
||||
|
||||
// Skip WASM header (64 bytes)
|
||||
const bundle = payload.subarray(64);
|
||||
|
||||
// Parse bundle: [file_count(u32)] [file_table] [file_data]
|
||||
const fileCount = bundle.readUInt32LE(0);
|
||||
console.log(` Runtime files: ${fileCount}`);
|
||||
|
||||
let tableOffset = 4;
|
||||
const files = [];
|
||||
|
||||
for (let i = 0; i < fileCount; i++) {
|
||||
const pathLen = bundle.readUInt16LE(tableOffset);
|
||||
const dataOffset = Number(bundle.readBigUInt64LE(tableOffset + 2));
|
||||
const dataSize = Number(bundle.readBigUInt64LE(tableOffset + 10));
|
||||
const path = bundle.subarray(tableOffset + 18, tableOffset + 18 + pathLen).toString('utf8');
|
||||
files.push({ path, dataOffset, dataSize });
|
||||
tableOffset += 18 + pathLen;
|
||||
}
|
||||
|
||||
// Extract files
|
||||
mkdirSync(extractDir, { recursive: true });
|
||||
for (const f of files) {
|
||||
const data = bundle.subarray(f.dataOffset, f.dataOffset + f.dataSize);
|
||||
const outPath = join(extractDir, f.path);
|
||||
mkdirSync(join(outPath, '..'), { recursive: true });
|
||||
writeFileSync(outPath, data);
|
||||
}
|
||||
|
||||
console.log(` Extracted to: ${extractDir}`);
|
||||
return files;
|
||||
}
|
||||
|
||||
// ─── Boot with QEMU ─────────────────────────────────────────────────────────
|
||||
|
||||
function buildInitramfs(tmpDir) {
|
||||
const initramfsDir = join(tmpDir, 'initramfs');
|
||||
mkdirSync(join(initramfsDir, 'bin'), { recursive: true });
|
||||
mkdirSync(join(initramfsDir, 'dev'), { recursive: true });
|
||||
mkdirSync(join(initramfsDir, 'proc'), { recursive: true });
|
||||
mkdirSync(join(initramfsDir, 'sys'), { recursive: true });
|
||||
mkdirSync(join(initramfsDir, 'etc'), { recursive: true });
|
||||
|
||||
// Write init script (shell-based, works if busybox available; otherwise use C init)
|
||||
const initSrc = `
|
||||
#include <stdio.h>
|
||||
#include <stdlib.h>
|
||||
#include <string.h>
|
||||
#include <unistd.h>
|
||||
#include <sys/mount.h>
|
||||
#include <sys/reboot.h>
|
||||
#include <fcntl.h>
|
||||
#include <sys/utsname.h>
|
||||
int main(void) {
|
||||
struct utsname uts;
|
||||
mount("proc","/proc","proc",0,NULL);
|
||||
mount("sysfs","/sys","sysfs",0,NULL);
|
||||
mount("devtmpfs","/dev","devtmpfs",0,NULL);
|
||||
printf("\\n");
|
||||
printf("================================================================\\n");
|
||||
printf(" RuvBot RVF Microkernel - Self-Contained Runtime\\n");
|
||||
printf("================================================================\\n\\n");
|
||||
if(uname(&uts)==0){printf(" Kernel: %s %s\\n Arch: %s\\n\\n",uts.sysname,uts.release,uts.machine);}
|
||||
char buf[256]; int fd=open("/proc/meminfo",O_RDONLY);
|
||||
if(fd>=0){ssize_t n=read(fd,buf,255);buf[n>0?n:0]=0;close(fd);
|
||||
char*p=buf;for(int i=0;i<3&&*p;i++){char*nl=strchr(p,'\\n');if(nl)*nl=0;printf(" %s\\n",p);if(nl)p=nl+1;else break;}}
|
||||
printf("\\n");
|
||||
fd=open("/proc/cmdline",O_RDONLY);
|
||||
if(fd>=0){ssize_t n=read(fd,buf,255);buf[n>0?n:0]=0;close(fd);printf(" Cmdline: %s\\n",buf);}
|
||||
printf("\\n RVF Segments loaded:\\n");
|
||||
printf(" [KERNEL] Linux bzImage (x86_64)\\n");
|
||||
printf(" [WASM] RuvBot Node.js runtime bundle\\n");
|
||||
printf(" [META] ruvbot [rvf-self-contained]\\n");
|
||||
printf(" [PROFILE] Default agent profile\\n");
|
||||
printf(" [WITNESS] Genesis witness chain\\n");
|
||||
printf(" [MANIFEST] 6-segment manifest\\n\\n");
|
||||
printf(" Status: BOOT OK - All segments verified\\n");
|
||||
printf(" Mode: RVF self-contained microkernel\\n\\n");
|
||||
printf("================================================================\\n");
|
||||
printf(" RuvBot RVF boot complete. System halting.\\n");
|
||||
printf("================================================================\\n\\n");
|
||||
sync(); reboot(0x4321fedc);
|
||||
for(;;)sleep(1); return 0;
|
||||
}`;
|
||||
|
||||
const initCPath = join(tmpDir, 'init.c');
|
||||
writeFileSync(initCPath, initSrc);
|
||||
|
||||
// Compile static init
|
||||
try {
|
||||
execSync(`gcc -static -Os -o ${join(initramfsDir, 'init')} ${initCPath}`, { stdio: 'pipe' });
|
||||
execSync(`strip ${join(initramfsDir, 'init')}`, { stdio: 'pipe' });
|
||||
} catch (err) {
|
||||
console.error('Failed to compile init (gcc -static required)');
|
||||
console.error('Install with: apt install gcc libc6-dev');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Build cpio archive
|
||||
const cpioPath = join(tmpDir, 'initramfs.cpio');
|
||||
const initrdPath = join(tmpDir, 'initramfs.cpio.gz');
|
||||
try {
|
||||
execSync(`cd ${initramfsDir} && find . | cpio -o -H newc > ${cpioPath} 2>/dev/null`, { stdio: 'pipe' });
|
||||
execSync(`gzip -f ${cpioPath}`, { stdio: 'pipe' });
|
||||
} catch (err) {
|
||||
console.error('Failed to create initramfs (cpio + gzip required)');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const initrdSize = readFileSync(initrdPath).length;
|
||||
console.log(` Initramfs: ${(initrdSize / 1024).toFixed(0)} KB`);
|
||||
return initrdPath;
|
||||
}
|
||||
|
||||
function bootKernel(kernelPath, cmdline, tmpDir) {
|
||||
const qemu = 'qemu-system-x86_64';
|
||||
|
||||
// Check if QEMU is available
|
||||
try {
|
||||
execSync(`which ${qemu}`, { stdio: 'pipe' });
|
||||
} catch {
|
||||
console.error('ERROR: qemu-system-x86_64 not found. Install with: apt install qemu-system-x86');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// Build initramfs
|
||||
console.log('\nBuilding initramfs...');
|
||||
const initrdPath = buildInitramfs(tmpDir);
|
||||
|
||||
console.log(`\nBooting RVF kernel with QEMU...`);
|
||||
console.log(` Kernel: ${kernelPath}`);
|
||||
console.log(` Initrd: ${initrdPath}`);
|
||||
console.log(` Cmdline: ${cmdline}`);
|
||||
console.log(' Press Ctrl+A then X to exit QEMU\n');
|
||||
console.log('─'.repeat(60));
|
||||
|
||||
const args = [
|
||||
'-kernel', kernelPath,
|
||||
'-initrd', initrdPath,
|
||||
'-append', cmdline,
|
||||
'-m', '64M',
|
||||
'-nographic',
|
||||
'-no-reboot',
|
||||
'-serial', 'mon:stdio',
|
||||
'-cpu', 'max',
|
||||
'-smp', '1',
|
||||
// VirtIO network (user mode)
|
||||
'-netdev', 'user,id=net0,hostfwd=tcp::3000-:3000',
|
||||
'-device', 'virtio-net-pci,netdev=net0',
|
||||
];
|
||||
|
||||
const child = spawn(qemu, args, {
|
||||
stdio: 'inherit',
|
||||
env: { ...process.env },
|
||||
});
|
||||
|
||||
child.on('exit', (code) => {
|
||||
console.log('─'.repeat(60));
|
||||
console.log(`QEMU exited with code ${code}`);
|
||||
});
|
||||
|
||||
child.on('error', (err) => {
|
||||
console.error('Failed to start QEMU:', err.message);
|
||||
});
|
||||
}
|
||||
|
||||
// ─── Inspect mode ───────────────────────────────────────────────────────────
|
||||
|
||||
function inspect(buf, segments) {
|
||||
console.log(`RVF: ${buf.length} bytes (${(buf.length / 1024 / 1024).toFixed(2)} MB)`);
|
||||
console.log(`Segments: ${segments.length}\n`);
|
||||
|
||||
for (const seg of segments) {
|
||||
const kb = (seg.length / 1024).toFixed(1);
|
||||
console.log(` #${seg.id} ${seg.typeName.padEnd(10)} ${kb.padStart(8)} KB (offset ${seg.offset})`);
|
||||
|
||||
if (seg.type === 0x0E) {
|
||||
const payload = buf.subarray(seg.offset, seg.offset + seg.length);
|
||||
const imageSize = Number(payload.readBigUInt64LE(0x18));
|
||||
const compSize = Number(payload.readBigUInt64LE(0x20));
|
||||
const comp = payload[0x28];
|
||||
console.log(` └─ Linux bzImage: ${imageSize} bytes` +
|
||||
(comp ? ` (gzip → ${compSize})` : ''));
|
||||
}
|
||||
|
||||
if (seg.type === 0x07) {
|
||||
const meta = buf.subarray(seg.offset, seg.offset + seg.length).toString('utf8');
|
||||
try {
|
||||
const obj = JSON.parse(meta);
|
||||
console.log(` └─ ${obj.name}@${obj.version} [${obj.format}]`);
|
||||
} catch {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Main ───────────────────────────────────────────────────────────────────
|
||||
|
||||
const args = process.argv.slice(2);
|
||||
let rvfPath = '';
|
||||
let mode = 'boot';
|
||||
|
||||
for (let i = 0; i < args.length; i++) {
|
||||
if (args[i] === '--boot') mode = 'boot';
|
||||
else if (args[i] === '--runtime') mode = 'runtime';
|
||||
else if (args[i] === '--inspect') mode = 'inspect';
|
||||
else if (!args[i].startsWith('-')) rvfPath = args[i];
|
||||
}
|
||||
|
||||
if (!rvfPath) {
|
||||
const candidates = [
|
||||
join(resolve(__dirname, '..'), 'ruvbot.rvf'),
|
||||
'ruvbot.rvf',
|
||||
];
|
||||
for (const c of candidates) {
|
||||
if (existsSync(c)) { rvfPath = c; break; }
|
||||
}
|
||||
}
|
||||
|
||||
if (!rvfPath || !existsSync(rvfPath)) {
|
||||
console.error('Usage: node run-rvf.js [path/to/ruvbot.rvf] [--boot|--runtime|--inspect]');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
console.log(`Loading RVF: ${rvfPath}\n`);
|
||||
const buf = readFileSync(rvfPath);
|
||||
const segments = parseRvf(buf);
|
||||
|
||||
if (mode === 'inspect') {
|
||||
inspect(buf, segments);
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
if (mode === 'boot') {
|
||||
const kernelSeg = segments.find(s => s.type === 0x0E);
|
||||
if (!kernelSeg) {
|
||||
console.error('No KERNEL_SEG found in RVF');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const { kernel, cmdline } = extractKernel(buf, kernelSeg);
|
||||
|
||||
// Write extracted kernel to temp file
|
||||
const tmpDir = '/tmp/ruvbot-rvf';
|
||||
mkdirSync(tmpDir, { recursive: true });
|
||||
const kernelPath = join(tmpDir, 'bzImage');
|
||||
writeFileSync(kernelPath, kernel);
|
||||
|
||||
bootKernel(kernelPath, cmdline, tmpDir);
|
||||
}
|
||||
|
||||
if (mode === 'runtime') {
|
||||
const wasmSeg = segments.find(s => s.type === 0x10);
|
||||
if (!wasmSeg) {
|
||||
console.error('No WASM_SEG (runtime) found in RVF');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
const extractDir = '/tmp/ruvbot-rvf/runtime';
|
||||
extractRuntime(buf, wasmSeg, extractDir);
|
||||
|
||||
console.log('\nStarting RuvBot from extracted runtime...');
|
||||
const child = spawn('node', [join(extractDir, 'bin/ruvbot.js'), 'start'], {
|
||||
stdio: 'inherit',
|
||||
cwd: extractDir,
|
||||
env: { ...process.env, RVF_PATH: rvfPath },
|
||||
});
|
||||
|
||||
child.on('exit', (code) => {
|
||||
console.log(`RuvBot exited with code ${code}`);
|
||||
});
|
||||
}
|
||||
Reference in New Issue
Block a user