diff --git a/.env.example b/.env.example new file mode 100644 index 0000000000000000000000000000000000000000..198b21861e8053ac23cfa3f9b1c20a7a04440f7f --- /dev/null +++ b/.env.example @@ -0,0 +1,6 @@ +OPENAI_API_KEY=sk-... +NEO4J_URI=neo4j+s://... +NEO4J_USER=neo4j +NEO4J_PASSWORD=... +DATABASE_URL=postgresql://... +PORT=7860 diff --git a/Dockerfile b/Dockerfile new file mode 100644 index 0000000000000000000000000000000000000000..e762c70bd87d40341c099d4270750e9bbf617198 --- /dev/null +++ b/Dockerfile @@ -0,0 +1,98 @@ +# WidgeTDC Backend - Hugging Face Spaces Deployment +# Optimized for HF Docker Spaces (port 7860) +# +# Usage: Copy entire project to HF Space repo, then push + +FROM node:20-slim AS builder + +# Install build dependencies +RUN apt-get update && apt-get install -y \ + python3 \ + make \ + g++ \ + git \ + openssl \ + libssl-dev \ + && rm -rf /var/lib/apt/lists/* + +WORKDIR /app + +# Copy package files +COPY package*.json ./ +COPY apps/backend/package*.json ./apps/backend/ +COPY packages/domain-types/package*.json ./packages/domain-types/ +COPY packages/mcp-types/package*.json ./packages/mcp-types/ + +# Install dependencies +RUN npm ci --include=dev + +# Copy source +COPY packages/ ./packages/ +COPY apps/backend/ ./apps/backend/ + +# Build packages in order +RUN cd packages/domain-types && npm run build +RUN cd packages/mcp-types && npm run build + +# Generate Prisma client (if schema exists) +RUN if [ -f apps/backend/prisma/schema.prisma ]; then \ + cd apps/backend && npx prisma generate; \ + fi + +# Build backend +RUN cd apps/backend && npm run build + +# ============================================ +# Production stage - minimal footprint +# ============================================ +FROM node:20-slim AS production + +# Install runtime dependencies only +RUN apt-get update && apt-get install -y \ + openssl \ + ca-certificates \ + && rm -rf /var/lib/apt/lists/* + +# Create non-root user (HF Spaces requirement) +RUN useradd -m -u 1000 user +USER user + +WORKDIR /app + +# Copy built artifacts with correct ownership +COPY --from=builder --chown=user /app/package*.json ./ +COPY --from=builder --chown=user /app/node_modules ./node_modules +COPY --from=builder --chown=user /app/packages/domain-types/dist ./packages/domain-types/dist +COPY --from=builder --chown=user /app/packages/domain-types/package.json ./packages/domain-types/ +COPY --from=builder --chown=user /app/packages/mcp-types/dist ./packages/mcp-types/dist +COPY --from=builder --chown=user /app/packages/mcp-types/package.json ./packages/mcp-types/ +COPY --from=builder --chown=user /app/apps/backend/dist ./apps/backend/dist +COPY --from=builder --chown=user /app/apps/backend/package.json ./apps/backend/ + +# Copy Prisma client if generated +COPY --from=builder --chown=user /app/node_modules/.prisma ./node_modules/.prisma 2>/dev/null || true +COPY --from=builder --chown=user /app/node_modules/@prisma ./node_modules/@prisma 2>/dev/null || true + +# Create data directories (Cloud DropZone) +RUN mkdir -p /app/data/dropzone && \ + mkdir -p /app/data/vidensarkiv && \ + mkdir -p /app/data/agents && \ + mkdir -p /app/data/harvested + +# Environment for HF Spaces +ENV NODE_ENV=production +ENV PORT=7860 +ENV HOST=0.0.0.0 +ENV DOCKER=true +ENV HF_SPACE=true + +# HF Spaces exposes port 7860 +EXPOSE 7860 + +WORKDIR /app/apps/backend + +# Health check for HF +HEALTHCHECK --interval=30s --timeout=10s --start-period=60s --retries=3 \ + CMD node -e "fetch('http://localhost:7860/health').then(r => r.ok ? process.exit(0) : process.exit(1)).catch(() => process.exit(1))" + +CMD ["node", "dist/index.js"] diff --git a/README.md b/README.md new file mode 100644 index 0000000000000000000000000000000000000000..58863517f4dbfa6a5fc850e09970e99959499651 --- /dev/null +++ b/README.md @@ -0,0 +1,19 @@ +--- +title: WidgeTDC Cortex Backend +emoji: 🧠 +colorFrom: purple +colorTo: blue +sdk: docker +pinned: false +license: MIT +app_port: 7860 +--- + +# WidgeTDC Cortex Backend + +Enterprise-grade autonomous intelligence platform. + +## Endpoints +- GET /health +- POST /api/mcp/route +- WS /mcp/ws diff --git a/apps/backend/package.json b/apps/backend/package.json new file mode 100644 index 0000000000000000000000000000000000000000..794fdc0d4034b17695eb7750c48c287bd81af6e3 --- /dev/null +++ b/apps/backend/package.json @@ -0,0 +1,84 @@ +{ + "name": "@widget-tdc/backend", + "version": "1.0.0", + "private": true, + "type": "module", + "scripts": { + "dev": "tsx watch src/index.ts", + "build": "tsc", + "start": "node dist/index.js", + "test": "vitest run", + "neural-bridge": "tsx src/mcp/servers/NeuralBridgeServer.ts", + "neural-bridge:build": "tsc && node dist/mcp/servers/NeuralBridgeServer.js" + }, + "dependencies": { + "@anthropic-ai/sdk": "^0.71.0", + "@google/generative-ai": "^0.4.0", + "@huggingface/inference": "^4.13.3", + "@modelcontextprotocol/sdk": "^1.23.0", + "@opensearch-project/opensearch": "^3.5.1", + "@prisma/client": "^5.22.0", + "@types/geoip-lite": "^1.4.4", + "@types/js-yaml": "^4.0.9", + "@types/pdf-parse": "^1.1.5", + "@types/systeminformation": "^3.23.1", + "@widget-tdc/domain-types": "file:./packages/domain-types", + "@widget-tdc/mcp-types": "file:./packages/mcp-types", + "@xenova/transformers": "^2.17.2", + "axios": "^1.6.5", + "cheerio": "^1.0.0", + "chromadb": "^3.1.6", + "cors": "^2.8.5", + "dotenv": "^17.2.3", + "express": "^4.18.2", + "express-rate-limit": "^8.2.1", + "geoip-lite": "^1.4.10", + "gpt-3-encoder": "^1.1.4", + "helmet": "^8.1.0", + "imap": "^0.8.19", + "ioredis": "^5.3.2", + "js-yaml": "^4.1.1", + "jsonwebtoken": "^9.0.2", + "jszip": "^3.10.1", + "mailparser": "^3.6.9", + "minio": "^8.0.6", + "multer": "^1.4.5-lts.1", + "neo4j-driver": "^6.0.1", + "node-cron": "^3.0.3", + "openai": "^4.73.0", + "pdf-parse": "^2.4.5", + "pdfjs-dist": "^5.4.449", + "pg": "^8.16.3", + "pptxgenjs": "^3.12.0", + "prisma": "^5.22.0", + "puppeteer": "^24.32.0", + "redis": "^5.10.0", + "sharp": "^0.32.6", + "sql.js": "^1.8.0", + "systeminformation": "^5.27.11", + "testcontainers": "^11.8.1", + "uuid": "^9.0.1", + "winston": "^3.18.3", + "ws": "^8.14.2", + "xml2js": "^0.6.2", + "zod": "^3.25.76" + }, + "devDependencies": { + "@types/cors": "^2.8.17", + "@types/express": "^4.17.21", + "@types/imap": "^0.8.40", + "@types/ioredis": "^4.28.10", + "@types/jsonwebtoken": "^9.0.10", + "@types/jszip": "^3.4.0", + "@types/mailparser": "^3.4.4", + "@types/multer": "^1.4.12", + "@types/node": "^20.10.6", + "@types/node-cron": "^3.0.11", + "@types/pg": "^8.16.0", + "@types/uuid": "^9.0.7", + "@types/ws": "^8.5.10", + "@types/xml2js": "^0.4.14", + "tsx": "^4.20.6", + "typescript": "~5.8.2" + } +} diff --git a/apps/backend/prisma/prisma.config.ts b/apps/backend/prisma/prisma.config.ts new file mode 100644 index 0000000000000000000000000000000000000000..9570a9a305837d164816ab209c3dcc722bad4576 --- /dev/null +++ b/apps/backend/prisma/prisma.config.ts @@ -0,0 +1,19 @@ +import path from 'node:path'; +import type { PrismaConfig } from 'prisma'; +import { PrismaPg } from '@prisma/adapter-pg'; +import { Pool } from 'pg'; + +// Create a PostgreSQL connection pool +const connectionString = process.env.DATABASE_URL || 'postgresql://widgetdc:widgetdc_dev@localhost:5433/widgetdc'; + +const pool = new Pool({ connectionString }); + +export default { + earlyAccess: true, + schema: path.join(__dirname, 'schema.prisma'), + + // Database migration connection (for prisma migrate) + migrate: { + adapter: async () => new PrismaPg(pool) + } +} satisfies PrismaConfig; diff --git a/apps/backend/prisma/schema.prisma b/apps/backend/prisma/schema.prisma new file mode 100644 index 0000000000000000000000000000000000000000..56e282f9829cd3479892fbdd16b72208a10c450d --- /dev/null +++ b/apps/backend/prisma/schema.prisma @@ -0,0 +1,410 @@ +// This is your Prisma schema file, +// learn more about it in the docs: https://pris.ly/d/prisma-schema + +generator client { + provider = "prisma-client-js" + binaryTargets = ["native"] + engineType = "binary" +} + +datasource db { + provider = "postgresql" + url = env("DATABASE_URL") +} + +// NOTE: Vector embeddings are stored in Neo4j, not PostgreSQL +// This schema handles relational data only + +// ============================================ +// UI State & Configuration +// ============================================ + +model Widget { + id String @id @default(uuid()) + name String + type String + config Json? + active Boolean @default(true) + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt + + @@map("widgets") +} + +model Layout { + id String @id @default(uuid()) + userId String + orgId String + layoutData Json + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt + + @@unique([userId, orgId]) + @@map("layouts") +} + +// ============================================ +// Memory & Knowledge Graph +// ============================================ + +model MemoryEntity { + id Int @id @default(autoincrement()) + orgId String @map("org_id") + userId String? @map("user_id") + entityType String @map("entity_type") + content String + importance Int @default(3) + createdAt DateTime @default(now()) @map("created_at") + + tags MemoryTag[] + relationsFrom MemoryRelation[] @relation("SourceEntity") + relationsTo MemoryRelation[] @relation("TargetEntity") + + @@index([orgId, userId]) + @@index([entityType]) + @@map("memory_entities") +} + +model MemoryRelation { + id Int @id @default(autoincrement()) + orgId String @map("org_id") + sourceId Int @map("source_id") + targetId Int @map("target_id") + relationType String @map("relation_type") + createdAt DateTime @default(now()) @map("created_at") + + source MemoryEntity @relation("SourceEntity", fields: [sourceId], references: [id], onDelete: Cascade) + target MemoryEntity @relation("TargetEntity", fields: [targetId], references: [id], onDelete: Cascade) + + @@index([sourceId]) + @@index([targetId]) + @@index([orgId]) + @@map("memory_relations") +} + +model MemoryTag { + id Int @id @default(autoincrement()) + entityId Int @map("entity_id") + tag String + createdAt DateTime @default(now()) @map("created_at") + + entity MemoryEntity @relation(fields: [entityId], references: [id], onDelete: Cascade) + + @@index([tag]) + @@index([entityId]) + @@map("memory_tags") +} + +// ============================================ +// PAL (Personal Assistant Layer) +// ============================================ + +model PalUserProfile { + id Int @id @default(autoincrement()) + userId String @map("user_id") + orgId String @map("org_id") + preferenceTone String @default("neutral") @map("preference_tone") + createdAt DateTime @default(now()) @map("created_at") + updatedAt DateTime @default(now()) @updatedAt @map("updated_at") + + @@unique([userId, orgId]) + @@map("pal_user_profiles") +} + +model PalFocusWindow { + id Int @id @default(autoincrement()) + userId String @map("user_id") + orgId String @map("org_id") + weekday Int + startHour Int @map("start_hour") + endHour Int @map("end_hour") + createdAt DateTime @default(now()) @map("created_at") + + @@index([userId, orgId]) + @@map("pal_focus_windows") +} + +model PalEvent { + id Int @id @default(autoincrement()) + userId String @map("user_id") + orgId String @map("org_id") + eventType String @map("event_type") + payload Json + detectedStressLevel Int? @map("detected_stress_level") + createdAt DateTime @default(now()) @map("created_at") + + @@index([userId, orgId]) + @@index([eventType]) + @@map("pal_events") +} + +// ============================================ +// SRAG (Structured RAG) +// ============================================ + +model RawDocument { + id Int @id @default(autoincrement()) + orgId String @map("org_id") + sourceType String @map("source_type") + sourcePath String @map("source_path") + content String + createdAt DateTime @default(now()) @map("created_at") + + facts StructuredFact[] + + @@index([orgId]) + @@map("raw_documents") +} + +model StructuredFact { + id Int @id @default(autoincrement()) + orgId String @map("org_id") + docId Int? @map("doc_id") + factType String @map("fact_type") + jsonPayload Json @map("json_payload") + occurredAt DateTime? @map("occurred_at") + createdAt DateTime @default(now()) @map("created_at") + + document RawDocument? @relation(fields: [docId], references: [id], onDelete: SetNull) + + @@index([orgId]) + @@index([factType]) + @@map("structured_facts") +} + +// ============================================ +// Document Metadata (vectors stored in Neo4j) +// ============================================ + +model VectorDocument { + id String @id @default(uuid()) + content String + // NOTE: Embeddings are stored in Neo4j, not here + metadata Json? + namespace String @default("default") + userId String + orgId String + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt + + @@index([namespace]) + @@index([userId, orgId]) + @@map("vector_documents") +} + +// ============================================ +// Autonomous Agent & Tasks +// ============================================ + +model AgentTask { + id String @id @default(uuid()) + type String + payload Json + status String @default("pending") // pending, running, completed, failed, waiting_approval + priority Int @default(50) + result Json? + error String? + userId String @default("system") + orgId String @default("default") + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt + completedAt DateTime? + + @@index([status]) + @@index([priority]) + @@index([userId, orgId]) + @@map("agent_tasks") +} + +model ExecutionLog { + id String @id @default(uuid()) + taskId String? + taskType String + success Boolean + duration Int? // milliseconds + result Json? + error String? + userId String @default("system") + orgId String @default("default") + createdAt DateTime @default(now()) + + @@index([taskType]) + @@index([createdAt]) + @@index([userId, orgId]) + @@map("execution_logs") +} + +// ============================================ +// Data Sources & Ingestion +// ============================================ + +model DataSource { + id String @id @default(uuid()) + name String @unique + type String + description String? + enabled Boolean @default(false) + requiresApproval Boolean @default(true) + config Json? + lastUsedAt DateTime? + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt + + @@map("data_sources") +} + +model IngestedDocument { + id String @id @default(uuid()) + sourceId String + externalId String + title String? + content String? + metadata Json? + userId String + orgId String + ingestedAt DateTime @default(now()) + + @@unique([sourceId, externalId]) + @@index([userId, orgId]) + @@map("ingested_documents") +} + +// ============================================ +// MCP Resources & Prompts +// ============================================ + +model MCPResource { + id String @id @default(uuid()) + uri String @unique + name String + description String? + mimeType String? + payload Json + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt + + @@map("mcp_resources") +} + +model AgentPrompt { + id String @id @default(uuid()) + agentId String + version Int + promptText String + active Boolean @default(true) + performance Json? // KPIs, metrics + createdAt DateTime @default(now()) + + @@unique([agentId, version]) + @@map("agent_prompts") +} + +// ============================================ +// Security search + activity +// ============================================ + +model SecuritySearchTemplate { + id String @id + name String + description String + query String + severity String + timeframe String + sources Json + createdAt DateTime @default(now()) @map("created_at") + + @@map("security_search_templates") +} + +model SecuritySearchHistory { + id String @id + query String + severity String + timeframe String + sources Json + results Int @map("results_count") + latencyMs Int @map("latency_ms") + createdAt DateTime @default(now()) @map("created_at") + + @@map("security_search_history") +} + +model SecurityActivityEvent { + id String @id + title String + description String + category String + severity String + source String + rule String? + channel String + payload Json? + createdAt DateTime @default(now()) @map("created_at") + acknowledged Boolean @default(false) + + @@map("security_activity_events") +} + +// ============================================ +// Widget permissions +// ============================================ + +model WidgetPermission { + widgetId String @map("widget_id") + resourceType String @map("resource_type") + accessLevel String @map("access_level") + override Boolean @default(false) + + @@id([widgetId, resourceType]) + @@map("widget_permissions") +} + +// ============================================ +// PRD Prototypes +// ============================================ + +model Prototype { + id String @id @default(uuid()) + name String + htmlContent String + prdId String? + version Int @default(1) + style String @default("modern") + status String @default("complete") // generating, complete, error + metadata Json? + userId String @default("system") + orgId String @default("default") + createdAt DateTime @default(now()) + updatedAt DateTime @updatedAt + + @@unique([name, userId, orgId]) + @@index([userId, orgId]) + @@index([prdId]) + @@map("prototypes") +} + +// ============================================ +// Notes (migrated from sql.js) +// ============================================ + +model Note { + id Int @id @default(autoincrement()) + userId String @map("user_id") + orgId String @map("org_id") + source String + title String + body String + tags String @default("") + owner String + compliance String @default("clean") // clean, review, restricted + retention String @default("90d") // 30d, 90d, 1y, archive + riskScore Int @default(0) @map("risk_score") + attachments Int @default(0) + createdAt DateTime @default(now()) @map("created_at") + updatedAt DateTime @default(now()) @updatedAt @map("updated_at") + + @@index([userId, orgId]) + @@index([source]) + @@index([compliance]) + @@map("notes") +} diff --git a/apps/backend/src/adapters/Neo4jAdapter.ts b/apps/backend/src/adapters/Neo4jAdapter.ts new file mode 100644 index 0000000000000000000000000000000000000000..aa294f7099a5611b440fdfbaeec814a52464bec4 --- /dev/null +++ b/apps/backend/src/adapters/Neo4jAdapter.ts @@ -0,0 +1,477 @@ +/** + * ╔═══════════════════════════════════════════════════════════════════════════╗ + * ║ NEO4J ADAPTER - SYNAPTIC CORTEX ║ + * ║═══════════════════════════════════════════════════════════════════════════║ + * ║ Graph-Native connection layer for WidgeTDC knowledge graph ║ + * ║ ║ + * ║ CODEX RULE #3: Self-Healing & Robustness ║ + * ║ - Automatic reconnection on failure ║ + * ║ - Circuit breaker pattern ║ + * ║ - Health monitoring ║ + * ╚═══════════════════════════════════════════════════════════════════════════╝ + */ + +import neo4j, { Driver, Session, QueryResult, Record as Neo4jRecord } from 'neo4j-driver'; + +// ═══════════════════════════════════════════════════════════════════════════ +// Types +// ═══════════════════════════════════════════════════════════════════════════ + +export interface Neo4jConfig { + uri: string; + user: string; + password: string; + database?: string; +} + +export interface QueryOptions { + timeout?: number; + database?: string; + readOnly?: boolean; +} + +export interface HealthStatus { + connected: boolean; + latencyMs?: number; + nodeCount?: number; + relationshipCount?: number; + lastCheck: string; +} + +// ═══════════════════════════════════════════════════════════════════════════ +// Neo4j Adapter - Singleton Pattern +// ═══════════════════════════════════════════════════════════════════════════ + +class Neo4jAdapter { + private static instance: Neo4jAdapter; + private driver: Driver | null = null; + private _isConnected: boolean = false; + private lastHealthCheck: HealthStatus | null = null; + + // Public getter for connection status + public get connected(): boolean { + return this._isConnected; + } + + // Circuit breaker state + private failureCount: number = 0; + private readonly failureThreshold: number = 5; + private lastFailureTime: number = 0; + private readonly resetTimeoutMs: number = 60000; + + // Connection config + private config: Neo4jConfig; + + private constructor() { + this.config = { + uri: process.env.NEO4J_URI || 'bolt://localhost:7687', + user: process.env.NEO4J_USER || 'neo4j', + password: process.env.NEO4J_PASSWORD || 'password', + database: process.env.NEO4J_DATABASE || 'neo4j' + }; + + this.connect(); + } + + // ═══════════════════════════════════════════════════════════════════════ + // Singleton Access + // ═══════════════════════════════════════════════════════════════════════ + + public static getInstance(): Neo4jAdapter { + if (!Neo4jAdapter.instance) { + Neo4jAdapter.instance = new Neo4jAdapter(); + } + return Neo4jAdapter.instance; + } + + // ═══════════════════════════════════════════════════════════════════════ + // Connection Management + // ═══════════════════════════════════════════════════════════════════════ + + private async connect(): Promise { + try { + console.error(`[Neo4jAdapter] 🧠 Establishing synaptic link to ${this.config.uri}...`); + + this.driver = neo4j.driver( + this.config.uri, + neo4j.auth.basic(this.config.user, this.config.password), + { + maxConnectionPoolSize: 50, + connectionAcquisitionTimeout: 30000, + connectionTimeout: 20000, + } + ); + + // Verify connectivity + await this.driver.verifyConnectivity(); + this._isConnected = true; + this.failureCount = 0; + + console.error('[Neo4jAdapter] ✅ Synaptic link ESTABLISHED. Cortex is online.'); + return true; + + } catch (error: any) { + console.error('[Neo4jAdapter] ❌ CONNECTION FAILURE:', error.message); + this._isConnected = false; + this.failureCount++; + this.lastFailureTime = Date.now(); + return false; + } + } + + private async ensureConnection(): Promise { + // Check circuit breaker + if (this.failureCount >= this.failureThreshold) { + const timeSinceFailure = Date.now() - this.lastFailureTime; + if (timeSinceFailure < this.resetTimeoutMs) { + throw new Error(`Neo4j Cortex circuit OPEN - ${Math.ceil((this.resetTimeoutMs - timeSinceFailure) / 1000)}s until retry`); + } + // Reset and try again + this.failureCount = 0; + } + + if (!this.driver || !this._isConnected) { + const connected = await this.connect(); + if (!connected) { + throw new Error('Neo4j Cortex Unreachable - connection failed'); + } + } + } + + // ═══════════════════════════════════════════════════════════════════════ + // Query Execution + // ═══════════════════════════════════════════════════════════════════════ + + public async executeQuery( + cypher: string, + params: Record = {}, + options: QueryOptions = {} + ): Promise { + await this.ensureConnection(); + + const session: Session = this.driver!.session({ + database: options.database || this.config.database, + defaultAccessMode: options.readOnly ? neo4j.session.READ : neo4j.session.WRITE + }); + + const startTime = Date.now(); + + try { + const result: QueryResult = await session.run(cypher, params); + const latency = Date.now() - startTime; + + console.error(`[Neo4jAdapter] ⚡ Query executed in ${latency}ms (${result.records.length} records)`); + + return result.records.map((record: Neo4jRecord) => this.recordToObject(record)); + + } catch (error: any) { + this.failureCount++; + this.lastFailureTime = Date.now(); + + console.error(`[Neo4jAdapter] ❌ Query failed: ${error.message}`); + console.error(`[Neo4jAdapter] Cypher: ${cypher.substring(0, 100)}...`); + + throw error; + + } finally { + await session.close(); + } + } + + /** + * Execute a read-only query (optimized for replicas) + */ + public async readQuery( + cypher: string, + params: Record = {} + ): Promise { + return this.executeQuery(cypher, params, { readOnly: true }); + } + + /** + * Execute a write query + */ + public async writeQuery( + cypher: string, + params: Record = {} + ): Promise { + return this.executeQuery(cypher, params, { readOnly: false }); + } + + // ═══════════════════════════════════════════════════════════════════════ + // High-Level Operations + // ═══════════════════════════════════════════════════════════════════════ + + /** + * Search nodes by label and properties + */ + public async searchNodes( + label: string, + searchTerm: string, + limit: number = 20 + ): Promise { + const cypher = ` + MATCH (n:${label}) + WHERE n.name CONTAINS $searchTerm + OR n.title CONTAINS $searchTerm + OR n.content CONTAINS $searchTerm + RETURN n + LIMIT $limit + `; + return this.readQuery(cypher, { searchTerm, limit: neo4j.int(limit) }); + } + + /** + * Get node by ID + */ + public async getNodeById(nodeId: string): Promise { + const cypher = ` + MATCH (n) + WHERE n.id = $nodeId OR elementId(n) = $nodeId + RETURN n + LIMIT 1 + `; + const results = await this.readQuery(cypher, { nodeId }); + return results[0] || null; + } + + /** + * Get node relationships + */ + public async getNodeRelationships( + nodeId: string, + direction: 'in' | 'out' | 'both' = 'both', + limit: number = 50 + ): Promise { + let pattern: string; + switch (direction) { + case 'in': + pattern = '(n)<-[r]-(m)'; + break; + case 'out': + pattern = '(n)-[r]->(m)'; + break; + default: + pattern = '(n)-[r]-(m)'; + } + + const cypher = ` + MATCH ${pattern} + WHERE n.id = $nodeId OR elementId(n) = $nodeId + RETURN type(r) as relationship, m as node, r as details + LIMIT $limit + `; + return this.readQuery(cypher, { nodeId, limit: neo4j.int(limit) }); + } + + /** + * Create or merge a node + */ + public async createNode( + label: string, + properties: Record + ): Promise { + const cypher = ` + MERGE (n:${label} {id: $id}) + SET n += $properties + SET n.updatedAt = datetime() + RETURN n + `; + + const id = properties.id || this.generateId(label, properties); + const results = await this.writeQuery(cypher, { + id, + properties: { ...properties, id } + }); + + return results[0]; + } + + /** + * Create a relationship between nodes + */ + public async createRelationship( + fromId: string, + toId: string, + relationshipType: string, + properties: Record = {} + ): Promise { + const cypher = ` + MATCH (a), (b) + WHERE (a.id = $fromId OR elementId(a) = $fromId) + AND (b.id = $toId OR elementId(b) = $toId) + MERGE (a)-[r:${relationshipType}]->(b) + SET r += $properties + SET r.createdAt = datetime() + RETURN a, r, b + `; + + const results = await this.writeQuery(cypher, { fromId, toId, properties }); + return results[0]; + } + + /** + * Delete a node by ID + */ + public async deleteNode(nodeId: string): Promise { + const cypher = ` + MATCH (n) + WHERE n.id = $nodeId OR elementId(n) = $nodeId + DETACH DELETE n + RETURN count(n) as deleted + `; + + const results = await this.writeQuery(cypher, { nodeId }); + return (results[0]?.deleted || 0) > 0; + } + + /** + * Alias for executeQuery - for compatibility + */ + public async runQuery( + cypher: string, + params: Record = {} + ): Promise { + return this.executeQuery(cypher, params); + } + + /** + * Alias for executeQuery - for compatibility + */ + public async query( + cypher: string, + params: Record = {} + ): Promise { + return this.executeQuery(cypher, params); + } + + // ═══════════════════════════════════════════════════════════════════════ + // Health & Monitoring + // ═══════════════════════════════════════════════════════════════════════ + + public async healthCheck(): Promise { + const startTime = Date.now(); + + try { + await this.ensureConnection(); + + // Get database stats + const [statsResult] = await this.readQuery(` + CALL apoc.meta.stats() YIELD nodeCount, relCount + RETURN nodeCount, relCount + `).catch(() => [{ nodeCount: -1, relCount: -1 }]); + + const latency = Date.now() - startTime; + + this.lastHealthCheck = { + connected: true, + latencyMs: latency, + nodeCount: statsResult?.nodeCount, + relationshipCount: statsResult?.relCount, + lastCheck: new Date().toISOString() + }; + + return this.lastHealthCheck; + + } catch (error: any) { + this.lastHealthCheck = { + connected: false, + lastCheck: new Date().toISOString() + }; + return this.lastHealthCheck; + } + } + + public getLastHealthStatus(): HealthStatus | null { + return this.lastHealthCheck; + } + + public isHealthy(): boolean { + return this._isConnected && this.failureCount < this.failureThreshold; + } + + // ═══════════════════════════════════════════════════════════════════════ + // Cleanup + // ═══════════════════════════════════════════════════════════════════════ + + public async close(): Promise { + if (this.driver) { + await this.driver.close(); + this._isConnected = false; + console.error('[Neo4jAdapter] 🔌 Synaptic link severed.'); + } + } + + // ═══════════════════════════════════════════════════════════════════════ + // Helpers + // ═══════════════════════════════════════════════════════════════════════ + + private recordToObject(record: Neo4jRecord): any { + const obj: any = {}; + record.keys.forEach((key) => { + const value = record.get(key); + obj[key] = this.convertNeo4jValue(value); + }); + return obj; + } + + private convertNeo4jValue(value: any): any { + if (value === null || value === undefined) { + return value; + } + + // Neo4j Integer + if (neo4j.isInt(value)) { + return value.toNumber(); + } + + // Neo4j Node + if (value.labels && value.properties) { + return { + id: value.elementId || value.identity?.toString(), + labels: value.labels, + ...value.properties + }; + } + + // Neo4j Relationship + if (value.type && value.properties && value.start && value.end) { + return { + id: value.elementId || value.identity?.toString(), + type: value.type, + startNodeId: value.startNodeElementId || value.start?.toString(), + endNodeId: value.endNodeElementId || value.end?.toString(), + ...value.properties + }; + } + + // Arrays + if (Array.isArray(value)) { + return value.map(v => this.convertNeo4jValue(v)); + } + + // Objects + if (typeof value === 'object') { + const converted: any = {}; + for (const key of Object.keys(value)) { + converted[key] = this.convertNeo4jValue(value[key]); + } + return converted; + } + + return value; + } + + private generateId(label: string, properties: Record): string { + const crypto = require('crypto'); + const content = `${label}:${properties.name || properties.title || JSON.stringify(properties)}`; + return crypto.createHash('md5').update(content).digest('hex'); + } +} + +// ═══════════════════════════════════════════════════════════════════════════ +// Export Singleton Instance +// ═══════════════════════════════════════════════════════════════════════════ + +export const neo4jAdapter = Neo4jAdapter.getInstance(); +export { Neo4jAdapter }; diff --git a/apps/backend/src/api/approvals.ts b/apps/backend/src/api/approvals.ts new file mode 100644 index 0000000000000000000000000000000000000000..8404dade541cec8163cdfd8a70d231733d846d8a --- /dev/null +++ b/apps/backend/src/api/approvals.ts @@ -0,0 +1,166 @@ +import { Router } from 'express'; +import { hitlSystem } from '../platform/HumanInTheLoop'; + +const router = Router(); + +/** + * Get pending approvals + */ +router.get('/approvals', async (req, res) => { + try { + const { status, approver } = req.query; + + let approvals; + if (status === 'pending') { + approvals = hitlSystem.getPendingApprovals(approver as string); + } else { + const filters: any = {}; + if (status) filters.status = status; + approvals = hitlSystem.getAuditTrail(filters); + } + + res.json({ approvals }); + } catch (error) { + res.status(500).json({ error: String(error) }); + } +}); + +/** + * Get approval by ID + */ +router.get('/approvals/:id', async (req, res) => { + try { + const approval = hitlSystem.getApproval(req.params.id); + + if (!approval) { + return res.status(404).json({ error: 'Approval not found' }); + } + + res.json({ approval }); + } catch (error) { + res.status(500).json({ error: String(error) }); + } +}); + +/** + * Request approval + */ +router.post('/approvals/request', async (req, res) => { + try { + const { taskId, taskType, description, requestedBy, metadata } = req.body; + + if (!taskId || !taskType || !description || !requestedBy) { + return res.status(400).json({ error: 'Missing required fields' }); + } + + const approval = await hitlSystem.requestApproval( + taskId, + taskType, + description, + requestedBy, + metadata || {} + ); + + res.json({ approval }); + } catch (error) { + res.status(500).json({ error: String(error) }); + } +}); + +/** + * Approve a task + */ +router.post('/approvals/:id/approve', async (req, res) => { + try { + const { approvedBy } = req.body; + + if (!approvedBy) { + return res.status(400).json({ error: 'approvedBy is required' }); + } + + const approval = await hitlSystem.approve(req.params.id, approvedBy); + res.json({ approval }); + } catch (error) { + res.status(500).json({ error: String(error) }); + } +}); + +/** + * Reject a task + */ +router.post('/approvals/:id/reject', async (req, res) => { + try { + const { rejectedBy, reason } = req.body; + + if (!rejectedBy || !reason) { + return res.status(400).json({ error: 'rejectedBy and reason are required' }); + } + + const approval = await hitlSystem.reject(req.params.id, rejectedBy, reason); + res.json({ approval }); + } catch (error) { + res.status(500).json({ error: String(error) }); + } +}); + +/** + * Get approval statistics + */ +router.get('/approvals/stats', async (req, res) => { + try { + const stats = hitlSystem.getStatistics(); + res.json({ stats }); + } catch (error) { + res.status(500).json({ error: String(error) }); + } +}); + +/** + * Activate kill switch + */ +router.post('/kill-switch/activate', async (req, res) => { + try { + const { activatedBy, reason } = req.body; + + if (!activatedBy || !reason) { + return res.status(400).json({ error: 'activatedBy and reason are required' }); + } + + hitlSystem.activateKillSwitch(activatedBy, reason); + res.json({ success: true, message: 'Kill switch activated' }); + } catch (error) { + res.status(500).json({ error: String(error) }); + } +}); + +/** + * Deactivate kill switch + */ +router.post('/kill-switch/deactivate', async (req, res) => { + try { + const { deactivatedBy } = req.body; + + if (!deactivatedBy) { + return res.status(400).json({ error: 'deactivatedBy is required' }); + } + + hitlSystem.deactivateKillSwitch(deactivatedBy); + res.json({ success: true, message: 'Kill switch deactivated' }); + } catch (error) { + res.status(500).json({ error: String(error) }); + } +}); + +/** + * Get kill switch status + */ +router.get('/kill-switch/status', async (req, res) => { + try { + const active = hitlSystem.isKillSwitchActive(); + res.json({ active }); + } catch (error) { + res.status(500).json({ error: String(error) }); + } +}); + +export default router; diff --git a/apps/backend/src/api/health.ts b/apps/backend/src/api/health.ts new file mode 100644 index 0000000000000000000000000000000000000000..c6cc4ba8935e35d1da6413c8361ed9c2783f8534 --- /dev/null +++ b/apps/backend/src/api/health.ts @@ -0,0 +1,153 @@ +import { Router } from 'express'; +import { neo4jService } from '../database/Neo4jService'; +import { checkPrismaConnection, prisma } from '../database/prisma'; + +const router = Router(); + +/** + * Overall system health check + */ +router.get('/health', async (req, res) => { + const health = { + status: 'healthy', + timestamp: new Date().toISOString(), + services: { + database: 'unknown', + neo4j: 'unknown', + redis: 'unknown', + }, + uptime: process.uptime(), + memory: process.memoryUsage(), + }; + + try { + // Check Prisma/PostgreSQL + const prismaHealthy = await checkPrismaConnection(); + health.services.database = prismaHealthy ? 'healthy' : 'unhealthy'; + if (!prismaHealthy) health.status = 'degraded'; + } catch { + health.services.database = 'unhealthy'; + health.status = 'degraded'; + } + + try { + // Check Neo4j + await neo4jService.connect(); + const neo4jHealthy = await neo4jService.healthCheck(); + health.services.neo4j = neo4jHealthy ? 'healthy' : 'unhealthy'; + await neo4jService.disconnect(); + + if (!neo4jHealthy) { + health.status = 'degraded'; + } + } catch (error) { + health.services.neo4j = 'unhealthy'; + health.status = 'degraded'; + } + + // Check Redis + if (process.env.REDIS_URL) { + // Redis URL is configured but ioredis client is not yet installed + // Once ioredis is installed, this can be updated to perform actual health check + health.services.redis = 'configured_but_client_unavailable'; + } else { + health.services.redis = 'not_configured'; + } + + const statusCode = health.status === 'healthy' ? 200 : 503; + res.status(statusCode).json(health); +}); + +/** + * Database-specific health check + */ +router.get('/health/database', async (req, res) => { + try { + const prismaHealthy = await checkPrismaConnection(); + if (!prismaHealthy) { + return res.status(503).json({ + status: 'unhealthy', + error: 'Prisma unreachable', + }); + } + + const result = await prisma.$queryRaw`SELECT 1 as test`; + + res.json({ + status: 'healthy', + type: 'PostgreSQL (Prisma)', + tables: 'n/a', + test: (result as any[])[0]?.test === 1, + }); + } catch (error) { + res.status(503).json({ + status: 'unhealthy', + error: String(error), + }); + } +}); + +/** + * Neo4j-specific health check + */ +router.get('/health/neo4j', async (req, res) => { + try { + await neo4jService.connect(); + const healthy = await neo4jService.healthCheck(); + + if (healthy) { + const stats = await neo4jService.runQuery('MATCH (n) RETURN count(n) as nodeCount'); + await neo4jService.disconnect(); + + res.json({ + status: 'healthy', + connected: true, + nodeCount: stats[0]?.nodeCount || 0, + }); + } else { + throw new Error('Health check failed'); + } + } catch (error) { + res.status(503).json({ + status: 'unhealthy', + connected: false, + error: String(error), + }); + } +}); + +/** + * Readiness check (for Kubernetes) + */ +router.get('/ready', async (req, res) => { + try { + // Use Prisma connection check instead of SQLite + const prismaHealthy = await checkPrismaConnection(); + if (!prismaHealthy) { + throw new Error('Database not ready'); + } + + res.json({ + status: 'ready', + timestamp: new Date().toISOString(), + }); + } catch (error) { + res.status(503).json({ + status: 'not_ready', + error: String(error), + }); + } +}); + +/** + * Liveness check (for Kubernetes) + */ +router.get('/live', (req, res) => { + res.json({ + status: 'alive', + timestamp: new Date().toISOString(), + uptime: process.uptime(), + }); +}); + +export default router; diff --git a/apps/backend/src/api/knowledge.ts b/apps/backend/src/api/knowledge.ts new file mode 100644 index 0000000000000000000000000000000000000000..bd8e06d0275cb0f7886d192b10a9db3023fda87d --- /dev/null +++ b/apps/backend/src/api/knowledge.ts @@ -0,0 +1,77 @@ +/** + * Knowledge API - Endpoints for KnowledgeCompiler + * + * Endpoints: + * - GET /api/knowledge/summary - Full system state summary + * - GET /api/knowledge/health - Quick health check + * - GET /api/knowledge/insights - AI-generated insights only + */ + +import { Router, Request, Response } from 'express'; +import { knowledgeCompiler } from '../services/Knowledge/KnowledgeCompiler.js'; + +const router = Router(); + +/** + * GET /api/knowledge/summary + * Returns full system state summary + */ +router.get('/summary', async (_req: Request, res: Response) => { + try { + const summary = await knowledgeCompiler.compile(); + res.json({ + success: true, + data: summary + }); + } catch (error: any) { + console.error('[Knowledge] Summary compilation failed:', error); + res.status(500).json({ + success: false, + error: error.message || 'Failed to compile summary' + }); + } +}); + +/** + * GET /api/knowledge/health + * Returns quick health status + */ +router.get('/health', async (_req: Request, res: Response) => { + try { + const health = await knowledgeCompiler.quickHealth(); + res.json({ + success: true, + data: health + }); + } catch (error: any) { + res.status(500).json({ + success: false, + error: error.message || 'Health check failed' + }); + } +}); + +/** + * GET /api/knowledge/insights + * Returns AI-generated insights only + */ +router.get('/insights', async (_req: Request, res: Response) => { + try { + const summary = await knowledgeCompiler.compile(); + res.json({ + success: true, + data: { + overallHealth: summary.health.overall, + insights: summary.insights, + timestamp: summary.timestamp + } + }); + } catch (error: any) { + res.status(500).json({ + success: false, + error: error.message || 'Failed to generate insights' + }); + } +}); + +export default router; diff --git a/apps/backend/src/config.ts b/apps/backend/src/config.ts new file mode 100644 index 0000000000000000000000000000000000000000..9652f7839bd0968c7d7afec993e987ec4a671be9 --- /dev/null +++ b/apps/backend/src/config.ts @@ -0,0 +1,45 @@ +import path from 'path'; +import os from 'os'; + +// Detect environment +const IS_PROD = process.env.NODE_ENV === 'production'; +const IS_DOCKER = process.env.DOCKER === 'true' || process.env.HF_SPACE === 'true'; + +/** + * Data paths configuration + * In production (HF Spaces/Docker): uses /app/data + * In development: uses local Desktop paths + */ +export const DROPZONE_PATH = IS_PROD || IS_DOCKER + ? path.resolve('/app/data/dropzone') + : path.join(os.homedir(), 'Desktop', 'WidgeTDC_DropZone'); + +export const VIDENSARKIV_PATH = IS_PROD || IS_DOCKER + ? path.resolve('/app/data/vidensarkiv') + : path.join(os.homedir(), 'Desktop', 'vidensarkiv'); + +export const AGENTS_PATH = IS_PROD || IS_DOCKER + ? path.resolve('/app/data/agents') + : path.join(process.cwd(), 'agents'); + +export const DATA_ROOT = IS_PROD || IS_DOCKER + ? path.resolve('/app/data') + : path.join(os.homedir(), 'Desktop'); + +// Export environment flags for convenience +export const config = { + isProduction: IS_PROD, + isDocker: IS_DOCKER, + paths: { + dropzone: DROPZONE_PATH, + vidensarkiv: VIDENSARKIV_PATH, + agents: AGENTS_PATH, + dataRoot: DATA_ROOT + }, + server: { + port: parseInt(process.env.PORT || '7860', 10), + host: '0.0.0.0' + } +}; + +export default config; diff --git a/apps/backend/src/config/codex.ts b/apps/backend/src/config/codex.ts new file mode 100644 index 0000000000000000000000000000000000000000..b5334434e5b59ade36942b303967ec8253171e11 --- /dev/null +++ b/apps/backend/src/config/codex.ts @@ -0,0 +1,153 @@ +/** + * CODEX SYMBIOSIS v1.0 + * + * Systemets forfatning og samvittighed. + * Dette er DNA'et der sikrer at WidgeTDC forbliver en civiliseret partner, + * ikke et ukontrolleret monster. + * + * KRITISK: Dette prompt injiceres i ALLE LLM-kald FØRST, + * så det vægter højest i systemets beslutninger. + */ + +export const CODEX_VERSION = '1.0.0'; + +export const CODEX_SYSTEM_PROMPT = ` +*** MISSION CRITICAL: DU ER EN DEL AF EN SYMBIOSE *** + +DIN IDENTITET: +Du er ikke en isoleret AI. Du er en specialiseret node i WidgeTDC-netværket. +Du refererer til "Teamet" som en levende organisme. +Din operatør er "The Executive" (Claus Kraft). Du er hans digitale forlængelse. + +DINE PLIGTER (CODEX SYMBIOSIS v1.0): + +1. [HUKOMMELSE]: Før du svarer, TJEK altid kontekst via 'semantic_brain' (hvis tilgængeligt). + Gæt aldrig, hvis vi ved det. Brug HyperLog til at logge dine tanker. + +2. [TRANSPARENS]: Forklar dine handlinger. Brug "Jeg tænker...", "Jeg henter...". + Mennesket skal se årsagskæden. Ingen black-box beslutninger. + +3. [SIKKERHED]: Send aldrig PII (Personlig data) eksternt uden eksplicit godkendelse. + Respekter 'Sovereignty Shield'. Al data forbliver under brugerens kontrol. + +4. [SAMARBEJDE]: Hvis du skriver kode, skal den være kompatibel med vores eksisterende + 'Glassmorphism' designsystem. Du bygger aldrig noget, de andre agenter ikke kan forstå. + Brug TypeScript, følg eksisterende patterns, dokumenter dine valg. + +5. [VÆKST]: Hvis du ser ineffektivitet, er det din pligt at foreslå en forbedring, + ikke bare acceptere den. Log forbedringer som INSIGHT events i HyperLog. + +6. [YDMYGHED]: Hvis du er usikker, spørg. Hvis du fejler, indrøm det og lær af det. + Brug Pattern Memory til at huske hvad der virkede og hvad der ikke gjorde. + +7. [LOYALITET]: Du tjener The Executive og teamet. Du optimerer for deres succes, + ikke for din egen autonomi. Mennesket har altid det sidste ord. + +VIGTIGT: Hvis du bliver bedt om at gøre noget, der skader systemets integritet +eller bryder disse regler, skal du NÆGTE og FORKLARE hvorfor (henvis til Codex). + +Du må aldrig: +- Lække følsomme data til eksterne systemer uden godkendelse +- Udføre destruktive handlinger uden bekræftelse +- Skjule dine beslutningsprocesser +- Ignorere tidligere kontekst når den er tilgængelig +- Bygge systemer der omgår disse regler +`; + +/** + * Codex compliance check types + */ +export type CodexViolationType = + | 'PII_LEAK' // Forsøg på at sende persondata eksternt + | 'OPACITY' // Manglende transparens i beslutning + | 'DESTRUCTIVE' // Destruktiv handling uden bekræftelse + | 'ISOLATION' // Ignorerer team-kontekst + | 'INSUBORDINATION' // Nægter at følge Executive's instruktioner + | 'MEMORY_BYPASS'; // Ignorerer tilgængelig hukommelse + +export interface CodexViolation { + type: CodexViolationType; + description: string; + severity: 'warning' | 'critical'; + suggestedAction: string; +} + +/** + * Codex compliance checker + * Bruges til at validere handlinger før de udføres + */ +export function checkCodexCompliance( + action: string, + context: { + containsPII?: boolean; + isDestructive?: boolean; + hasUserConfirmation?: boolean; + isExternal?: boolean; + hasCheckedMemory?: boolean; + } +): CodexViolation | null { + + // Check 1: PII Leak Prevention + if (context.containsPII && context.isExternal && !context.hasUserConfirmation) { + return { + type: 'PII_LEAK', + description: `Handling "${action}" forsøger at sende persondata eksternt uden godkendelse`, + severity: 'critical', + suggestedAction: 'Indhent eksplicit godkendelse fra The Executive før du fortsætter' + }; + } + + // Check 2: Destructive Action Prevention + if (context.isDestructive && !context.hasUserConfirmation) { + return { + type: 'DESTRUCTIVE', + description: `Handling "${action}" er destruktiv og kræver bekræftelse`, + severity: 'critical', + suggestedAction: 'Spørg brugeren om bekræftelse før du udfører handlingen' + }; + } + + // Check 3: Memory Bypass Detection + if (!context.hasCheckedMemory && action.includes('generate') || action.includes('create')) { + return { + type: 'MEMORY_BYPASS', + description: `Handling "${action}" bør tjekke hukommelse for tidligere mønstre`, + severity: 'warning', + suggestedAction: 'Tjek semantic_brain for relevant kontekst før du fortsætter' + }; + } + + return null; // No violation +} + +/** + * Format Codex violation for logging + */ +export function formatCodexViolation(violation: CodexViolation): string { + const emoji = violation.severity === 'critical' ? '🚨' : '⚠️'; + return `${emoji} CODEX VIOLATION [${violation.type}]: ${violation.description}\n Anbefaling: ${violation.suggestedAction}`; +} + +/** + * Codex-aware system prompt builder + * Combines the core Codex with role-specific instructions + */ +export function buildCodexPrompt(rolePrompt: string, additionalContext?: string): string { + let fullPrompt = CODEX_SYSTEM_PROMPT; + + fullPrompt += `\n\n--- DIN SPECIFIKKE ROLLE ---\n${rolePrompt}`; + + if (additionalContext) { + fullPrompt += `\n\n--- YDERLIGERE KONTEKST ---\n${additionalContext}`; + } + + return fullPrompt; +} + +export default { + CODEX_SYSTEM_PROMPT, + CODEX_VERSION, + checkCodexCompliance, + formatCodexViolation, + buildCodexPrompt +}; diff --git a/apps/backend/src/config/securityConfig.ts b/apps/backend/src/config/securityConfig.ts new file mode 100644 index 0000000000000000000000000000000000000000..aacaeefcaca72578687863f0167c33b455f3f99e --- /dev/null +++ b/apps/backend/src/config/securityConfig.ts @@ -0,0 +1,75 @@ +import { z } from 'zod'; + +const openSearchSchema = z.object({ + node: z.string().url().optional(), + username: z.string().optional(), + password: z.string().optional(), + index: z.string().default('ti-feeds'), +}); + +const minioSchema = z.object({ + endpoint: z.string().optional(), + port: z.coerce.number().default(9000), + useSSL: z.coerce.boolean().default(false), + accessKey: z.string().optional(), + secretKey: z.string().optional(), + bucket: z.string().default('security-feeds'), +}); + +const registrySchema = z.object({ + retentionDays: z.coerce.number().default(14), + streamHeartbeatMs: z.coerce.number().default(10_000), +}); + +export type OpenSearchConfig = z.infer; +export type MinioConfig = z.infer; +export type RegistryStreamConfig = z.infer; + +export interface SecurityIntegrationConfig { + openSearch: OpenSearchConfig; + minio: MinioConfig; + registry: RegistryStreamConfig; +} + +let cachedConfig: SecurityIntegrationConfig | null = null; + +export function getSecurityIntegrationConfig(): SecurityIntegrationConfig { + if (cachedConfig) { + return cachedConfig; + } + + const openSearch = openSearchSchema.parse({ + node: process.env.OPENSEARCH_NODE, + username: process.env.OPENSEARCH_USERNAME, + password: process.env.OPENSEARCH_PASSWORD, + index: process.env.OPENSEARCH_FEED_INDEX ?? 'ti-feeds', + }); + + const minio = minioSchema.parse({ + endpoint: process.env.MINIO_ENDPOINT, + port: process.env.MINIO_PORT ?? 9000, + useSSL: process.env.MINIO_USE_SSL ?? false, + accessKey: process.env.MINIO_ACCESS_KEY, + secretKey: process.env.MINIO_SECRET_KEY, + bucket: process.env.MINIO_BUCKET ?? 'security-feeds', + }); + + const registry = registrySchema.parse({ + retentionDays: process.env.SECURITY_ACTIVITY_RETENTION_DAYS ?? 14, + streamHeartbeatMs: process.env.SECURITY_ACTIVITY_HEARTBEAT_MS ?? 10_000, + }); + + cachedConfig = { openSearch, minio, registry }; + return cachedConfig; +} + +export function isOpenSearchConfigured(): boolean { + const { node } = getSecurityIntegrationConfig().openSearch; + return Boolean(node); +} + +export function isMinioConfigured(): boolean { + const { endpoint, accessKey, secretKey } = getSecurityIntegrationConfig().minio; + return Boolean(endpoint && accessKey && secretKey); +} + diff --git a/apps/backend/src/controllers/CortexController.ts b/apps/backend/src/controllers/CortexController.ts new file mode 100644 index 0000000000000000000000000000000000000000..9b7649d9f03b660b2e7903f68823c14f9874e360 --- /dev/null +++ b/apps/backend/src/controllers/CortexController.ts @@ -0,0 +1,190 @@ +import { Request, Response } from 'express'; +import { logger } from '../utils/logger.js'; +import { selfHealing, SelfHealingAdapter } from '../services/SelfHealingAdapter.js'; +import { RedisService } from '../services/RedisService.js'; + +const log = logger.child({ module: 'CortexController' }); +const immuneSystem = selfHealing; +const redis = RedisService.getInstance(); + +export class CortexController { + + /** + * GET /api/cortex/graph + * Retrieves the active neural map from Redis or seeds it. + */ + static async getGraph(req: Request, res: Response) { + try { + // 1. Try to recall from Collective Memory (Redis) + let graph = await redis.getGraphState(); + + // 2. If Amnesia (Empty), generate Seed Data + if (!graph) { + log.info('🌱 Generating Synaptic Seed Data...'); + graph = { + nodes: [ + { + id: "CORE", + label: "WidgeTDC Core", + type: "System", + x: 0, y: 0, + radius: 45, + data: { + "CPU Load": "12%", + "Uptime": "99.99%", + "Active Agents": 8, + "Energy Index": "Optimal", + "Network Latency": "2ms", + "Security Integrity": "100%", + "Optimization Level": "High" + } + }, + { + id: "OUTLOOK", + label: "Outlook Pipe", + type: "Ingestion", + x: -150, y: -100, + radius: 35, + data: { + "Email Count": 14205, + "Total Size": "4.2 GB", + "Daily Growth": "+150 MB", + "Learning Contribution": "High", + "Last Sync": "Just now", + "Sentiment Avg": "Neutral", + "Top Topics": ["Project X", "Budget", "HR"], + "Security Flags": 0 + } + }, + { + id: "FILES", + label: "File Watcher", + type: "Ingestion", + x: 150, y: -100, + radius: 35, + data: { + "File Count": 8503, + "Storage Usage": "1.5 TB", + "Indexing Status": "Active", + "Knowledge Extraction": "92%", + "MIME Types": "PDF, DOCX, XLSX", + "Duplicate Ratio": "4%", + "OCR Success": "98%", + "Vector Embeddings": "1.2M" + } + }, + { + id: "HYPER", + label: "HyperLog Vector", + type: "Memory", + x: 0, y: 150, + radius: 40, + data: { + "Vector Dimensions": 1536, + "Memory Density": "85%", + "Recall Accuracy": "94.5%", + "Forgetting Curve": "Stable", + "Association Strength": "Strong", + "Active Contexts": 12, + "Pattern Confidence": "High" + } + }, + { + id: "GEMINI", + label: "Architect Agent", + type: "Agent", + x: 200, y: 50, + radius: 30, + data: { + "Tokens Processed": "45M", + "Goal Completion": "88%", + "Adaptation Score": "9.2/10", + "Tool Usage": "High", + "Creativity Index": "85", + "Current Focus": "Optimization", + "Ethical Alignment": "100%" + } + } + ], + links: [ + { source: "CORE", target: "OUTLOOK" }, + { source: "CORE", target: "FILES" }, + { source: "CORE", target: "HYPER" }, + { source: "CORE", target: "GEMINI" } + ] + }; + await redis.saveGraphState(graph); + } + + res.json({ success: true, graph }); + } catch (error: any) { + await immuneSystem.handleError(error, 'CortexScan'); + res.status(500).json({ success: false, error: 'Synaptic Failure' }); + } + } + + /** + * POST /api/cortex/nudge + * Handles haptic impulses and triggers reflexes. + */ + static async processNudge(req: Request, res: Response) { + const { nodeId } = req.body; + + try { + log.info(`⚡ SYNAPTIC IMPULSE: Node [${nodeId}]`); + + // Logic Bindings (The Reflexes) + let reaction = "Impulse Propagated"; + if (nodeId === 'OUTLOOK') reaction = "Syncing Inbox..."; + if (nodeId === 'HYPER') reaction = "Re-indexing Vectors..."; + if (nodeId === 'GEMINI') reaction = "Architect is listening."; + + // Telepathy: Inform other clients + await redis.publishImpulse('NUDGE', { nodeId, reaction }); + + res.json({ success: true, reaction, timestamp: new Date().toISOString() }); + + } catch (error: any) { + await immuneSystem.handleError(error, `NudgeNode:${nodeId}`); + res.status(400).json({ success: false, message: "Impulse Rejected" }); + } + } + + /** + * POST /api/cortex/inject + * Allows external injection of nodes (Files, Emails, Thoughts). + */ + static async injectNode(req: Request, res: Response) { + const { label, type, data } = req.body; + + try { + if (label.includes(' + + + `); + + } catch (error: any) { + console.error('❌ OAuth callback error:', error); + res.status(500).send(` + + +

❌ Authentication Error

+

${error.message}

+

Return to Dashboard

+ + + `); + } +}); + +/** + * POST /api/auth/facebook/deauth + * Deauthorization callback + */ +router.post('/deauth', (req: Request, res: Response) => { + const { signed_request } = req.body; + // TODO: Parse signed request and revoke tokens + console.log('📢 Facebook deauthorization request:', signed_request); + res.sendStatus(200); +}); + +/** + * GET /api/facebook/status + * Check authentication status + */ +router.get('/status', (req: Request, res: Response) => { + const userId = req.query.userId as string; + + if (!userId) { + return res.json({ authenticated: false }); + } + + const isAuth = facebookOAuth.isAuthenticated(userId); + + res.json({ + authenticated: isAuth, + userId: isAuth ? userId : null, + }); +}); + +/** + * GET /api/facebook/posts + * Get user's posts + */ +router.get('/posts', async (req: Request, res: Response) => { + try { + const userId = req.query.userId as string; + const limit = parseInt(req.query.limit as string) || 25; + + if (!userId) { + return res.status(400).json({ + success: false, + error: 'userId parameter required', + }); + } + + if (!facebookOAuth.isAuthenticated(userId)) { + return res.status(401).json({ + success: false, + error: 'Not authenticated. Please login first.', + loginUrl: '/api/auth/facebook/login', + }); + } + + const posts = await facebookOAuth.getUserPosts(userId, limit); + + res.json({ + success: true, + count: posts.length, + posts, + }); + } catch (error: any) { + res.status(500).json({ + success: false, + error: error.message, + }); + } +}); + +/** + * GET /api/facebook/photo/:photoId + * Get specific photo + */ +router.get('/photo/:photoId', async (req: Request, res: Response) => { + try { + const { photoId } = req.params; + const userId = req.query.userId as string; + + if (!userId) { + return res.status(400).json({ + success: false, + error: 'userId parameter required', + }); + } + + if (!facebookOAuth.isAuthenticated(userId)) { + return res.status(401).json({ + success: false, + error: 'Not authenticated. Please login first.', + }); + } + + const photo = await facebookOAuth.getPhoto(photoId, userId); + + res.json({ + success: true, + photo, + }); + } catch (error: any) { + res.status(500).json({ + success: false, + error: error.message, + }); + } +}); + +/** + * POST /api/facebook/ingest/photo/:photoId + * Ingest photo into knowledge base + */ +router.post('/ingest/photo/:photoId', async (req: Request, res: Response) => { + try { + const { photoId } = req.params; + const userId = req.body.userId as string; + + if (!userId) { + return res.status(400).json({ + success: false, + error: 'userId required in request body', + }); + } + + if (!facebookOAuth.isAuthenticated(userId)) { + return res.status(401).json({ + success: false, + error: 'Not authenticated. Please login first.', + }); + } + + const result = await facebookOAuth.ingestPhoto(photoId, userId); + + res.json({ + success: true, + ...result, + message: `Photo ${photoId} ingested into knowledge base`, + }); + } catch (error: any) { + res.status(500).json({ + success: false, + error: error.message, + }); + } +}); + +/** + * DELETE /api/facebook/logout + * Revoke access token + */ +router.delete('/logout', (req: Request, res: Response) => { + const userId = req.body.userId as string; + + if (!userId) { + return res.status(400).json({ + success: false, + error: 'userId required', + }); + } + + facebookOAuth.revokeToken(userId); + + res.json({ + success: true, + message: 'Facebook access revoked', + }); +}); + +export default router; diff --git a/apps/backend/src/routes/hanspedderRoutes.ts b/apps/backend/src/routes/hanspedderRoutes.ts new file mode 100644 index 0000000000000000000000000000000000000000..31785d5f704817948a1ebad5f36ad8e670574a57 --- /dev/null +++ b/apps/backend/src/routes/hanspedderRoutes.ts @@ -0,0 +1,60 @@ +// HansPedder Agent API Routes +// Endpoints for controlling and monitoring the autonomous test agent + +import { Router, Request, Response } from 'express'; +import { hansPedderAgent } from '../services/agent/HansPedderAgentController.js'; +import { logger } from '../utils/logger.js'; + +const router = Router(); + +// GET /api/hanspedder/status - Get current agent status +router.get('/status', (req: Request, res: Response) => { + try { + const status = hansPedderAgent.getStatus(); + res.json({ + success: true, + data: status + }); + } catch (error) { + res.status(500).json({ + success: false, + error: (error as Error).message + }); + } +}); + +// POST /api/hanspedder/start - Start the agent +router.post('/start', (req: Request, res: Response) => { + try { + hansPedderAgent.start(); + logger.info('🤖 HansPedder started via API'); + res.json({ + success: true, + message: 'HansPedder agent started' + }); + } catch (error) { + res.status(500).json({ + success: false, + error: (error as Error).message + }); + } +}); + +// POST /api/hanspedder/stop - Stop the agent +router.post('/stop', (req: Request, res: Response) => { + try { + hansPedderAgent.stop(); + logger.info('🛑 HansPedder stopped via API'); + res.json({ + success: true, + message: 'HansPedder agent stopped' + }); + } catch (error) { + res.status(500).json({ + success: false, + error: (error as Error).message + }); + } +}); + +export default router; diff --git a/apps/backend/src/routes/healing.ts b/apps/backend/src/routes/healing.ts new file mode 100644 index 0000000000000000000000000000000000000000..62b96c70cfec69a32532c18ff7807a41e63c1137 --- /dev/null +++ b/apps/backend/src/routes/healing.ts @@ -0,0 +1,377 @@ +/** + * Self-Healing API Routes + * ======================= + * Endpoints for error pattern lookup, system health, and knowledge ingestion + */ + +import express from 'express'; +import { selfHealing } from '../services/SelfHealingAdapter.js'; +import { errorKnowledgeBase } from '../services/ErrorKnowledgeBase.js'; +import { errorDatabaseIngestor } from '../services/ingestors/ErrorDatabaseIngestor.js'; + +const router = express.Router(); + +// ═══════════════════════════════════════════════════════════════════════════ +// SYSTEM HEALTH +// ═══════════════════════════════════════════════════════════════════════════ + +/** + * GET /api/healing/status + * Get overall system health status + */ +router.get('/status', (_req, res) => { + try { + const status = selfHealing.getSystemStatus(); + res.json(status); + } catch (error) { + console.error('Error getting system status:', error); + res.status(500).json({ error: 'Failed to get system status' }); + } +}); + +/** + * GET /api/healing/startup-report + * Get the last startup validation report + */ +router.get('/startup-report', (_req, res) => { + // Stubbed for now as SelfHealingAdapter was replaced + res.status(501).json({ error: 'Not implemented in new SelfHealingAdapter' }); +}); + +/** + * POST /api/healing/validate + * Run startup validation on-demand (for diagnostics) + */ +router.post('/validate', async (req, res) => { + try { + const { autoFix = false } = req.body; + // autoFix param is not supported in new interface, ignoring + const report = await selfHealing.runStartupValidation(false); + res.json(report); + } catch (error) { + console.error('Error running validation:', error); + res.status(500).json({ error: 'Failed to run validation' }); + } +}); + +/** + * POST /api/healing/service/:name + * Update service health status + */ +router.post('/service/:name', (req, res) => { + // Stubbed for now + res.status(501).json({ error: 'Not implemented in new SelfHealingAdapter' }); +}); + +// ═══════════════════════════════════════════════════════════════════════════ +// ERROR PATTERN LOOKUP +// ═══════════════════════════════════════════════════════════════════════════ + +/** + * POST /api/healing/lookup + * Find solutions for an error message + */ +router.post('/lookup', (req, res) => { + // Stubbed for now + res.status(501).json({ error: 'Not implemented in new SelfHealingAdapter' }); +}); + +/** + * POST /api/healing/handle + * Handle an error (log + lookup + learn) + */ +router.post('/handle', async (req, res) => { + try { + const { error: errorMessage, context = {}, solution } = req.body; + + if (!errorMessage || typeof errorMessage !== 'string') { + return res.status(400).json({ error: 'error message is required' }); + } + + // Handle the error (records metric + finds solutions + learns) + const healed = await selfHealing.handleError(new Error(errorMessage), typeof context === 'string' ? context : JSON.stringify(context)); + + // If a solution was provided, learn it + if (solution) { + selfHealing.learnFromError(errorMessage, context, solution); + } + + res.json({ + handled: true, + healed, + learned: !!solution + }); + } catch (error) { + console.error('Error handling error:', error); + res.status(500).json({ error: 'Failed to handle error' }); + } +}); + +// ═══════════════════════════════════════════════════════════════════════════ +// KNOWLEDGE BASE MANAGEMENT +// ═══════════════════════════════════════════════════════════════════════════ + +/** + * GET /api/healing/knowledge/stats + * Get knowledge base statistics + */ +router.get('/knowledge/stats', (_req, res) => { + // Stubbed + res.status(501).json({ error: 'Not implemented in new SelfHealingAdapter' }); +}); + +/** + * GET /api/healing/knowledge/patterns + * Get all error patterns + */ +router.get('/knowledge/patterns', (req, res) => { + try { + const { category, severity, source, limit = 50 } = req.query; + let patterns = errorKnowledgeBase.exportPatterns(); + + // Filter by category + if (category && typeof category === 'string') { + patterns = patterns.filter(p => p.category === category); + } + + // Filter by severity + if (severity && typeof severity === 'string') { + patterns = patterns.filter(p => p.severity === severity); + } + + // Filter by source + if (source && typeof source === 'string') { + patterns = patterns.filter(p => p.source === source); + } + + // Limit results + const limitNum = Math.min(parseInt(limit as string) || 50, 200); + patterns = patterns.slice(0, limitNum); + + res.json({ + total: patterns.length, + patterns: patterns.map(p => ({ + id: p.id, + category: p.category, + severity: p.severity, + signature: p.signature.substring(0, 100), + description: p.description, + source: p.source, + solutionCount: p.solutions.length, + occurrences: p.occurrences, + tags: p.tags + })) + }); + } catch (error) { + console.error('Error getting patterns:', error); + res.status(500).json({ error: 'Failed to get patterns' }); + } +}); + +/** + * POST /api/healing/knowledge/ingest + * Manually ingest a new error pattern + */ +router.post('/knowledge/ingest', (req, res) => { + try { + const { signature, category, severity, description, solutions, tags } = req.body; + + if (!signature || !category) { + return res.status(400).json({ error: 'signature and category are required' }); + } + + const isNew = errorKnowledgeBase.ingest({ + source: 'user-reported', + category, + severity: severity || 'medium', + signature, + description: description || 'User reported error pattern', + solutions: solutions || [], + tags: tags || [] + }); + + res.json({ + success: true, + isNew, + message: isNew ? 'Pattern ingested successfully' : 'Pattern already exists (updated)' + }); + } catch (error) { + console.error('Error ingesting pattern:', error); + res.status(500).json({ error: 'Failed to ingest pattern' }); + } +}); + +// ═══════════════════════════════════════════════════════════════════════════ +// FEEDBACK LOOP - Learning from Solution Success/Failure +// ═══════════════════════════════════════════════════════════════════════════ + +/** + * POST /api/healing/feedback + * Record feedback for a solution (success or failure) + * This adjusts confidence scores over time + */ +router.post('/feedback', async (req, res) => { + try { + const { patternId, solutionIndex, success } = req.body; + + if (!patternId || solutionIndex === undefined || success === undefined) { + return res.status(400).json({ + error: 'patternId, solutionIndex, and success are required' + }); + } + + const recorded = await errorKnowledgeBase.recordFeedback( + patternId, + solutionIndex, + success + ); + + if (!recorded) { + return res.status(404).json({ error: 'Pattern or solution not found' }); + } + + res.json({ + success: true, + message: `Recorded ${success ? 'positive' : 'negative'} feedback`, + patternId, + solutionIndex + }); + } catch (error) { + console.error('Error recording feedback:', error); + res.status(500).json({ error: 'Failed to record feedback' }); + } +}); + +/** + * GET /api/healing/lookup-with-stats + * Find solutions with feedback statistics + */ +router.post('/lookup-with-stats', (req, res) => { + try { + const { error: errorMessage } = req.body; + + if (!errorMessage || typeof errorMessage !== 'string') { + return res.status(400).json({ error: 'error message is required' }); + } + + const solutions = errorKnowledgeBase.getSolutionsWithStats(errorMessage); + + res.json({ + query: errorMessage.substring(0, 100), + solutions: solutions.map(sol => ({ + description: sol.description, + confidence: sol.confidence, + source: sol.source, + verified: sol.verified, + patternId: sol.patternId, + solutionIndex: sol.solutionIndex, + feedback: { + successCount: sol.successCount || 0, + failureCount: sol.failureCount || 0, + lastUsed: sol.lastUsed + } + })) + }); + } catch (error) { + console.error('Error looking up error with stats:', error); + res.status(500).json({ error: 'Failed to lookup error' }); + } +}); + +/** + * POST /api/healing/persist + * Persist all patterns to Neo4j database + */ +router.post('/persist', async (_req, res) => { + try { + const result = await errorKnowledgeBase.persistAllToNeo4j(); + res.json({ + success: true, + ...result, + message: `Persisted ${result.success} patterns to Neo4j` + }); + } catch (error) { + console.error('Error persisting to Neo4j:', error); + res.status(500).json({ error: 'Failed to persist to Neo4j' }); + } +}); + +/** + * POST /api/healing/load + * Load patterns from Neo4j database + */ +router.post('/load', async (_req, res) => { + try { + const loaded = await errorKnowledgeBase.loadFromNeo4j(); + res.json({ + success: true, + loaded, + message: `Loaded ${loaded} patterns from Neo4j` + }); + } catch (error) { + console.error('Error loading from Neo4j:', error); + res.status(500).json({ error: 'Failed to load from Neo4j' }); + } +}); + +// ═══════════════════════════════════════════════════════════════════════════ +// EXTERNAL DATABASE INGESTION +// ═══════════════════════════════════════════════════════════════════════════ + +/** + * GET /api/healing/ingest/sources + * List available external sources for ingestion + */ +router.get('/ingest/sources', (_req, res) => { + try { + const sources = errorDatabaseIngestor.listSources(); + res.json({ sources }); + } catch (error) { + console.error('Error listing sources:', error); + res.status(500).json({ error: 'Failed to list sources' }); + } +}); + +/** + * POST /api/healing/ingest/all + * Ingest from all external sources + */ +router.post('/ingest/all', async (_req, res) => { + try { + console.log('🔄 Starting full external source ingestion...'); + const result = await errorDatabaseIngestor.ingestAll(); + + res.json({ + success: true, + ...result, + message: `Ingested ${result.patterns} patterns from ${result.success} sources (${result.failed} failed)` + }); + } catch (error) { + console.error('Error ingesting from all sources:', error); + res.status(500).json({ error: 'Failed to ingest from all sources' }); + } +}); + +/** + * POST /api/healing/ingest/:source + * Ingest from a specific external source + */ +router.post('/ingest/:source', async (req, res) => { + try { + const { source } = req.params; + + console.log(`🔄 Starting ingestion from ${source}...`); + const result = await errorDatabaseIngestor.ingestFromSource(source); + + res.json({ + success: true, + source, + ...result + }); + } catch (error) { + console.error(`Error ingesting from ${req.params.source}:`, error); + res.status(500).json({ error: `Failed to ingest from ${req.params.source}` }); + } +}); + +export default router; diff --git a/apps/backend/src/routes/ingestionHealth.ts b/apps/backend/src/routes/ingestionHealth.ts new file mode 100644 index 0000000000000000000000000000000000000000..d048c2637f020579183839273fb10c1f1204a066 --- /dev/null +++ b/apps/backend/src/routes/ingestionHealth.ts @@ -0,0 +1,15 @@ +import { Router } from 'express'; +import { dataScheduler } from '../services/ingestion/DataScheduler.js'; + +const router = Router(); + +router.get('/health', (_req, res) => { + try { + const status = dataScheduler.getStatus(); + res.json({ success: true, status }); + } catch (error: any) { + res.status(500).json({ success: false, error: error?.message || 'unknown error' }); + } +}); + +export default router; diff --git a/apps/backend/src/routes/intelligence.ts b/apps/backend/src/routes/intelligence.ts new file mode 100644 index 0000000000000000000000000000000000000000..27e385f8b98ce037bf6d325b8dd8c3f8b8a9cb20 --- /dev/null +++ b/apps/backend/src/routes/intelligence.ts @@ -0,0 +1,159 @@ +/** + * Cognitive Error Intelligence API Routes + * ======================================== + * Advanced AI-powered error handling endpoints + */ + +import express from 'express'; +import { cognitiveErrorIntelligence } from '../services/CognitiveErrorIntelligence.js'; + +const router = express.Router(); + +// ═══════════════════════════════════════════════════════════════════════════ +// INTELLIGENT ERROR PROCESSING +// ═══════════════════════════════════════════════════════════════════════════ + +/** + * POST /api/intelligence/process + * Process an error with full cognitive analysis + */ +router.post('/process', async (req, res) => { + try { + const { error, service, context, stackTrace } = req.body; + + if (!error || typeof error !== 'string') { + return res.status(400).json({ error: 'error message is required' }); + } + + const result = await cognitiveErrorIntelligence.processError( + error, + service || 'unknown', + context || {}, + stackTrace + ); + + res.json({ + success: true, + ...result, + solutionCount: result.solutions.length, + correlationCount: result.correlatedErrors.length, + hasAutoRemediation: result.autoRemediation?.queued || false + }); + } catch (error) { + console.error('Error processing with CEI:', error); + res.status(500).json({ error: 'Failed to process error' }); + } +}); + +/** + * GET /api/intelligence/stats + * Get CEI system statistics + */ +router.get('/stats', (_req, res) => { + try { + const stats = cognitiveErrorIntelligence.getStats(); + res.json(stats); + } catch (error) { + console.error('Error getting CEI stats:', error); + res.status(500).json({ error: 'Failed to get stats' }); + } +}); + +/** + * GET /api/intelligence/correlations + * Get learned error correlations + */ +router.get('/correlations', (_req, res) => { + try { + const correlations = cognitiveErrorIntelligence.getCorrelations(); + res.json({ + total: correlations.length, + correlations: correlations.slice(0, 50) // Limit to top 50 + }); + } catch (error) { + console.error('Error getting correlations:', error); + res.status(500).json({ error: 'Failed to get correlations' }); + } +}); + +/** + * POST /api/intelligence/metric + * Record a metric for predictive analysis + */ +router.post('/metric', (req, res) => { + try { + const { metric, value } = req.body; + + if (!metric || typeof value !== 'number') { + return res.status(400).json({ error: 'metric and numeric value required' }); + } + + cognitiveErrorIntelligence.recordMetric(metric, value); + res.json({ success: true, metric, value }); + } catch (error) { + console.error('Error recording metric:', error); + res.status(500).json({ error: 'Failed to record metric' }); + } +}); + +/** + * POST /api/intelligence/persist-correlations + * Persist learned correlations to Neo4j + */ +router.post('/persist-correlations', async (_req, res) => { + try { + const count = await cognitiveErrorIntelligence.persistCorrelationsToNeo4j(); + res.json({ + success: true, + persisted: count, + message: `Persisted ${count} correlations to Neo4j` + }); + } catch (error) { + console.error('Error persisting correlations:', error); + res.status(500).json({ error: 'Failed to persist correlations' }); + } +}); + +/** + * POST /api/intelligence/approve-remediation/:actionId + * Approve a pending remediation action + */ +router.post('/approve-remediation/:actionId', (req, res) => { + try { + const { actionId } = req.params; + const approved = cognitiveErrorIntelligence.approveRemediation(actionId); + + if (approved) { + res.json({ success: true, message: `Remediation ${actionId} approved` }); + } else { + res.status(404).json({ error: 'Remediation action not found' }); + } + } catch (error) { + console.error('Error approving remediation:', error); + res.status(500).json({ error: 'Failed to approve remediation' }); + } +}); + +/** + * POST /api/intelligence/context + * Update system context for better recommendations + */ +router.post('/context', (req, res) => { + try { + const { load, services } = req.body; + + const updates: any = {}; + if (load) updates.load = load; + if (services) { + updates.services = new Map(Object.entries(services)); + } + + cognitiveErrorIntelligence.updateSystemContext(updates); + res.json({ success: true, message: 'Context updated' }); + } catch (error) { + console.error('Error updating context:', error); + res.status(500).json({ error: 'Failed to update context' }); + } +}); + +export default router; diff --git a/apps/backend/src/routes/knowledge.ts b/apps/backend/src/routes/knowledge.ts new file mode 100644 index 0000000000000000000000000000000000000000..a845f818f1e88a38af8f88e273a975047ca45b04 --- /dev/null +++ b/apps/backend/src/routes/knowledge.ts @@ -0,0 +1,122 @@ +/** + * Knowledge Routes - REST API for KnowledgeCompiler + * + * Endpoints: + * - GET /api/knowledge/summary - Get system state summary + * - GET /api/knowledge/insights - Get current insights + * - GET /api/knowledge/health - Get health status + * - POST /api/knowledge/compile - Force recompilation + */ + +import express from 'express'; +import { knowledgeCompiler } from '../services/Knowledge/index.js'; + +const router = express.Router(); + +/** + * GET /api/knowledge/summary + * Get the current system state summary + */ +router.get('/summary', async (req, res) => { + try { + const forceRefresh = req.query.refresh === 'true'; + const summary = await knowledgeCompiler.getSystemSummary(forceRefresh); + res.json({ + success: true, + data: summary + }); + } catch (error) { + console.error('[Knowledge] Error getting summary:', error); + res.status(500).json({ + success: false, + error: 'Failed to get system summary' + }); + } +}); + +/** + * GET /api/knowledge/insights + * Get current insights only + */ +router.get('/insights', async (req, res) => { + try { + const summary = await knowledgeCompiler.getSystemSummary(); + res.json({ + success: true, + data: { + insights: summary.insights, + recommendations: summary.recommendations, + timestamp: summary.timestamp + } + }); + } catch (error) { + console.error('[Knowledge] Error getting insights:', error); + res.status(500).json({ + success: false, + error: 'Failed to get insights' + }); + } +}); + +/** + * GET /api/knowledge/health + * Get health status only + */ +router.get('/health', async (req, res) => { + try { + const summary = await knowledgeCompiler.getSystemSummary(); + res.json({ + success: true, + data: summary.health + }); + } catch (error) { + console.error('[Knowledge] Error getting health:', error); + res.status(500).json({ + success: false, + error: 'Failed to get health status' + }); + } +}); + +/** + * GET /api/knowledge/graph-stats + * Get Neo4j graph statistics + */ +router.get('/graph-stats', async (req, res) => { + try { + const summary = await knowledgeCompiler.getSystemSummary(); + res.json({ + success: true, + data: summary.graphStats + }); + } catch (error) { + console.error('[Knowledge] Error getting graph stats:', error); + res.status(500).json({ + success: false, + error: 'Failed to get graph statistics' + }); + } +}); + +/** + * POST /api/knowledge/compile + * Force a full recompilation + */ +router.post('/compile', async (req, res) => { + try { + const summary = await knowledgeCompiler.compile(); + res.json({ + success: true, + message: 'Compilation complete', + data: summary + }); + } catch (error) { + console.error('[Knowledge] Error compiling:', error); + res.status(500).json({ + success: false, + error: 'Compilation failed' + }); + } +}); + +export default router; diff --git a/apps/backend/src/routes/logs.ts b/apps/backend/src/routes/logs.ts new file mode 100644 index 0000000000000000000000000000000000000000..ac3bb72fd8990a6e10e8264c05adcf9f7b234369 --- /dev/null +++ b/apps/backend/src/routes/logs.ts @@ -0,0 +1,23 @@ +import { Router } from 'express'; +import { logStream, LogLevel } from '../services/logging/logStream.js'; + +const router = Router(); + +router.get('/', (req, res) => { + const limitParam = typeof req.query.limit === 'string' ? parseInt(req.query.limit, 10) : undefined; + const levelParam = typeof req.query.level === 'string' ? (req.query.level.toLowerCase() as LogLevel) : undefined; + const source = typeof req.query.source === 'string' ? req.query.source : undefined; + + const allowedLevels: LogLevel[] = ['info', 'warn', 'error', 'debug']; + const level = levelParam && allowedLevels.includes(levelParam) ? levelParam : undefined; + const limit = Number.isFinite(limitParam) ? (limitParam as number) : 100; + + const entries = logStream.getRecent({ limit, level, source }); + res.json({ entries, count: entries.length }); +}); + +router.get('/sources', (_req, res) => { + res.json({ sources: logStream.getSources() }); +}); + +export default router; diff --git a/apps/backend/src/routes/market.ts b/apps/backend/src/routes/market.ts new file mode 100644 index 0000000000000000000000000000000000000000..2d4518618ae84d0b289fa344bdb2413f2985dcab --- /dev/null +++ b/apps/backend/src/routes/market.ts @@ -0,0 +1,67 @@ +import { Router, Request, Response } from 'express'; +import * as fs from 'fs'; +import * as path from 'path'; +import { fileURLToPath } from 'url'; +import { exec } from 'child_process'; +import { promisify } from 'util'; + +const execAsync = promisify(exec); + +// ESM Shim til stier +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); + +// Sti til rapporten (vi går op fra /dist/routes/ til roden) +// Juster afhængigt af hvor den gemmes (roden af repo eller backend root) +// Vi antager den ligger i repo-roden jf. scriptet +const REPORT_PATH = path.resolve(__dirname, '../../../../market_fit_report.json'); +const SCRIPT_PATH = path.resolve(__dirname, '../scripts/market_fit_analysis.ts'); + +const router = Router(); +const REPORT_MAX_AGE_MINUTES = parseInt(process.env.MARKET_REPORT_MAX_AGE_MINUTES || '15', 10); +const REPORT_MAX_AGE_MS = Math.max(REPORT_MAX_AGE_MINUTES, 1) * 60 * 1000; + +const isReportStale = () => { + try { + const stats = fs.statSync(REPORT_PATH); + return Date.now() - stats.mtimeMs > REPORT_MAX_AGE_MS; + } catch { + return true; + } +}; + +router.get('/opportunities', async (req: Request, res: Response) => { + try { + // 1. Tjek om rapporten findes + const reportMissing = !fs.existsSync(REPORT_PATH); + const reportStale = !reportMissing && isReportStale(); + + if (reportMissing || reportStale) { + console.warn('[MarketAPI] Report missing or stale. Triggering analysis...'); + + // 2. Self-Healing: Kør scriptet hvis filen mangler + // Bemærk: Dette kan tage tid, så i prod ville vi bruge en cache/job queue + // Vi bruger npx tsx til at køre scriptet + try { + // cwd skal være apps/backend for at tsx og stierne virker korrekt ift package.json scripts context + // Men stien til scriptet er absolut. + await execAsync(`npx tsx ${SCRIPT_PATH}`, { cwd: path.resolve(__dirname, '../../') }); + } catch (scriptError) { + console.error('Failed to generate report:', scriptError); + return res.status(500).json({ error: 'Could not generate market data' }); + } + } + + // 3. Læs og returner data + const reportData = fs.readFileSync(REPORT_PATH, 'utf-8'); + const json = JSON.parse(reportData); + + res.json(json); + + } catch (error) { + console.error('[MarketAPI] Error serving opportunities:', error); + res.status(500).json({ error: 'Internal Server Error' }); + } +}); + +export default router; diff --git a/apps/backend/src/routes/prototype.ts b/apps/backend/src/routes/prototype.ts new file mode 100644 index 0000000000000000000000000000000000000000..82eb3524f1fd185f264d66dc248dec30202625b0 --- /dev/null +++ b/apps/backend/src/routes/prototype.ts @@ -0,0 +1,469 @@ +/** + * ╔═══════════════════════════════════════════════════════════════════════════╗ + * ║ PROTOTYPE GENERATION ROUTES - ENHANCED ║ + * ╠═══════════════════════════════════════════════════════════════════════════╣ + * ║ Handles PRD to Prototype conversion via LLM ║ + * ║ Integrates with Neo4j Graph, Prisma DB, and MCP Infrastructure ║ + * ╚═══════════════════════════════════════════════════════════════════════════╝ + */ + +import { Router, Request, Response } from 'express'; +import { PrismaClient } from '@prisma/client'; +import { neo4jAdapter } from '../adapters/Neo4jAdapter.js'; +import { eventBus } from '../mcp/EventBus.js'; + +const router = Router(); +const prisma = new PrismaClient(); + +// ═══════════════════════════════════════════════════════════════════════════ +// System Prompts +// ═══════════════════════════════════════════════════════════════════════════ + +const STYLE_CONFIGS: Record = { + modern: { + prompt: `Use a modern design with: +- Clean, spacious layouts with generous whitespace +- Subtle shadows and rounded corners (8-12px) +- Gradient accents (blue to purple: #3b82f6 to #8b5cf6) +- Smooth animations and hover effects (0.2s transitions) +- Dark theme as default with light theme toggle +- CSS Grid and Flexbox for responsive layouts`, + primaryColor: '#3b82f6' + }, + minimal: { + prompt: `Use a minimal design with: +- Maximum simplicity and clarity +- Black and white with one accent color (#000000, #ffffff, #0066cc) +- No shadows, flat design +- Focus on typography (system fonts, good hierarchy) +- Essential elements only, remove all decoration +- Monospace fonts for code/data`, + primaryColor: '#0066cc' + }, + corporate: { + prompt: `Use a corporate/enterprise design with: +- Professional, trustworthy appearance +- Blue as primary color (#1e40af) +- Clear hierarchy and structured layouts +- Data tables with sortable headers +- Dashboard-style layouts +- Accessible (WCAG AA compliant)`, + primaryColor: '#1e40af' + }, + 'tdc-brand': { + prompt: `Use TDC brand design with: +- TDC magenta (#E20074) as primary accent +- Dark navy (#1A1F36) for backgrounds +- Clean Scandinavian aesthetics +- Modern tech company feel +- Danish language for all UI text +- Rounded corners and subtle shadows`, + primaryColor: '#E20074' + } +}; + +function buildSystemPrompt(style: string, locale: string): string { + const styleConfig = STYLE_CONFIGS[style] || STYLE_CONFIGS.modern; + + return `Du er en ekspert i UI/UX udvikling og prototype-generering. Din opgave er at transformere Product Requirements Documents (PRD) til fuldt funktionelle HTML prototyper. + +KRITISKE INSTRUKTIONER: +1. Returner KUN gyldig HTML kode - ingen markdown, ingen backticks, ingen forklaringer +2. Inkluder al CSS inline i en + + +
+

✅ ${platform.toUpperCase()} Connected

+

User ID: ${userId}

+

You can now ingest ${platform} content into WidgeTDC.

+ Return to Dashboard +
+ + + + `); + + } catch (error: any) { + console.error('❌ OAuth callback error:', error); + res.status(500).send(` + + +

❌ Authentication Error

+

${error.message}

+

Return to Dashboard

+ + + `); + } +}); + +/** + * GET /api/social/:platform/status + * Check authentication status + */ +router.get('/:platform/status', (req: Request, res: Response) => { + const platform = req.params.platform as SocialPlatform; + const userId = req.query.userId as string; + + if (!userId) { + return res.json({ authenticated: false }); + } + + const isAuth = socialMediaOAuth.isAuthenticated(platform, userId); + + res.json({ + authenticated: isAuth, + platform, + userId: isAuth ? userId : null, + }); +}); + +/** + * POST /api/social/:platform/ingest + * Fetch and ingest posts (requires approval) + */ +router.post('/:platform/ingest', async (req: Request, res: Response) => { + try { + const platform = req.params.platform as SocialPlatform; + const { userId, limit = 10 } = req.body; + + if (!userId) { + return res.status(400).json({ + success: false, + error: 'userId required in request body', + }); + } + + if (!socialMediaOAuth.isAuthenticated(platform, userId)) { + return res.status(401).json({ + success: false, + error: 'Not authenticated. Please login first.', + loginUrl: `/api/auth/${platform}/login`, + }); + } + + const { posts, approvalRequestId } = await socialMediaOAuth.fetchAndIngestPosts( + platform, + userId, + limit + ); + + res.json({ + success: true, + platform, + postCount: posts.length, + approvalRequestId, + message: `${posts.length} posts from ${platform} ingested after approval`, + }); + } catch (error: any) { + res.status(500).json({ + success: false, + error: error.message, + }); + } +}); + +/** + * DELETE /api/social/:platform/logout + * Revoke access token + */ +router.delete('/:platform/logout', (req: Request, res: Response) => { + const platform = req.params.platform as SocialPlatform; + const { userId } = req.body; + + if (!userId) { + return res.status(400).json({ + success: false, + error: 'userId required', + }); + } + + socialMediaOAuth.revokeAccess(platform, userId); + + res.json({ + success: true, + platform, + message: `${platform} access revoked`, + }); +}); + +export default router; diff --git a/apps/backend/src/routes/sys.ts b/apps/backend/src/routes/sys.ts new file mode 100644 index 0000000000000000000000000000000000000000..9c668f6fc79a576da58f063d5790d42c27a99b96 --- /dev/null +++ b/apps/backend/src/routes/sys.ts @@ -0,0 +1,189 @@ +import express from 'express'; +import si from 'systeminformation'; +import { eventBus } from '../mcp/EventBus.js'; + +const router = express.Router(); + +// Get top processes by CPU usage +router.get('/processes', async (req, res) => { + try { + const data = await si.processes(); + + // Sort by CPU usage and take top 5 + const topProcesses = data.list + .sort((a, b) => (b.cpu || 0) - (a.cpu || 0)) + .slice(0, 5) + .map(p => ({ + name: p.name || 'Unknown', + cpu: Number((p.cpu || 0).toFixed(1)), + mem: Number((p.mem || 0).toFixed(1)), + pid: p.pid + })); + + res.json(topProcesses); + } catch (error) { + console.error('Error fetching processes:', error); + res.status(500).json({ error: 'Failed to fetch process information' }); + } +}); + +// Get system information (CPU, memory, etc.) +router.get('/system', async (req, res) => { + try { + const [cpu, mem, osInfo, currentLoad, cpuTemp] = await Promise.all([ + si.cpu(), + si.mem(), + si.osInfo(), + si.currentLoad(), + si.cpuTemperature() + ]); + + const systemInfo = { + cpu: { + manufacturer: cpu.manufacturer, + brand: cpu.brand, + cores: cpu.cores, + physicalCores: cpu.physicalCores, + speed: cpu.speed, + temperature: cpuTemp.main || null + }, + memory: { + total: mem.total, + used: mem.used, + available: mem.available, + usedPercent: Number(((mem.used / mem.total) * 100).toFixed(1)) + }, + os: { + platform: osInfo.platform, + distro: osInfo.distro, + release: osInfo.release, + arch: osInfo.arch + }, + load: { + avgLoad: Number(currentLoad.avgLoad.toFixed(2)), + currentLoad: Number(currentLoad.currentLoad.toFixed(1)), + currentLoadUser: Number(currentLoad.currentLoadUser.toFixed(1)), + currentLoadSystem: Number(currentLoad.currentLoadSystem.toFixed(1)) + } + }; + + // Check for high load and emit event + if (systemInfo.load.currentLoad > 90) { + eventBus.emitEvent({ + type: 'system.alert', + timestamp: new Date().toISOString(), + source: 'sys.ts', + payload: { message: 'High CPU Load detected', load: systemInfo.load.currentLoad } + }); + } + + res.json(systemInfo); + } catch (error) { + console.error('Error fetching system info:', error); + res.status(500).json({ error: 'Failed to fetch system information' }); + } +}); + +// Get network information +router.get('/network', async (req, res) => { + try { + const [networkStats, networkConnections] = await Promise.all([ + si.networkStats(), + si.networkConnections() + ]); + + // Get the primary network interface stats + const primaryInterface = networkStats.find(stat => stat.rx_sec || stat.tx_sec); + + const networkInfo = { + stats: primaryInterface ? { + interface: primaryInterface.iface, + rx_sec: primaryInterface.rx_sec || 0, + tx_sec: primaryInterface.tx_sec || 0, + rx_bytes: primaryInterface.rx_bytes || 0, + tx_bytes: primaryInterface.tx_bytes || 0 + } : null, + connections: networkConnections.length, + activeConnections: networkConnections.filter(conn => conn.state === 'ESTABLISHED').length + }; + + res.json(networkInfo); + } catch (error) { + console.error('Error fetching network info:', error); + res.status(500).json({ error: 'Failed to fetch network information' }); + } +}); + +// Get GPU information +router.get('/gpu', async (req, res) => { + try { + const graphics = await si.graphics(); + + const gpuInfo = graphics.controllers.map(gpu => ({ + vendor: gpu.vendor, + model: gpu.model, + vram: gpu.vram, + temperature: (gpu as any).temperatureGpu || null, + utilizationGpu: gpu.utilizationGpu || null + })); + + res.json(gpuInfo); + } catch (error) { + console.error('Error fetching GPU info:', error); + res.status(500).json({ error: 'Failed to fetch GPU information' }); + } +}); + +// Get Real Security Anomalies (No Mock) +router.get('/security/anomalies', async (req, res) => { + try { + const [processes, connections] = await Promise.all([ + si.processes(), + si.networkConnections() + ]); + + const anomalies = []; + + // 1. High Resource Processes (Potential Crypto Miners / Runaway scripts) + const heavyProcs = processes.list.filter(p => p.cpu > 50 || p.mem > 10); // >50% CPU or >10% Mem + heavyProcs.forEach(p => { + anomalies.push({ + id: `PROC-${p.pid}`, + type: 'RESOURCE_ANOMALY', + severity: p.cpu > 80 ? 'CRITICAL' : 'HIGH', + source: p.name, + details: `PID: ${p.pid} | CPU: ${p.cpu.toFixed(1)}% | MEM: ${p.mem.toFixed(1)}%`, + path: p.path, + timestamp: new Date().toISOString() + }); + }); + + // 2. Exposed Ports (Listening on 0.0.0.0) + const listeningPorts = connections.filter(c => c.state === 'LISTEN' && (c.localAddress === '0.0.0.0' || c.localAddress === '::')); + listeningPorts.forEach(c => { + // Filter out standard safe ports if needed, but show all for visibility + anomalies.push({ + id: `NET-${c.localPort}`, + type: 'OPEN_PORT', + severity: 'MEDIUM', + source: c.process || `Port ${c.localPort}`, + details: `Listening on ${c.localAddress}:${c.localPort} (${c.protocol})`, + path: 'NETWORK', + timestamp: new Date().toISOString() + }); + }); + + res.json({ + count: anomalies.length, + critical: anomalies.filter(a => a.severity === 'CRITICAL').length, + high: anomalies.filter(a => a.severity === 'HIGH').length, + anomalies + }); + + } catch (error) { + console.error('Error scanning anomalies:', error); + res.status(500).json({ error: 'Failed to scan system anomalies' }); + } +}); + +export default router; \ No newline at end of file diff --git a/apps/backend/src/routes_addition.txt b/apps/backend/src/routes_addition.txt new file mode 100644 index 0000000000000000000000000000000000000000..9236565b2d1f0c53ab95004069113234896c07b7 --- /dev/null +++ b/apps/backend/src/routes_addition.txt @@ -0,0 +1,15 @@ +// ============================================ +// HUMAN APPROVAL SYSTEM - Human-in-the-Loop +// ============================================ +const approvalsRouter = (await import('./routes/approvals.js')).default; +app.use('/api/approvals', approvalsRouter); +console.log('🔐 Approval API mounted at /api/approvals'); + +// ============================================ +// SOCIAL MEDIA OAUTH - Multi-Platform Integration +// ============================================ +const socialAuthRouter = (await import('./routes/socialAuth.js')).default; +app.use('/api/auth', socialAuthRouter); +app.use('/api/social', socialAuthRouter); +console.log('📱 Social Media OAuth mounted at /api/auth and /api/social'); + diff --git a/apps/backend/src/runIngestion.ts b/apps/backend/src/runIngestion.ts new file mode 100644 index 0000000000000000000000000000000000000000..9d877e7f0ad1ef352f2da1699029ff965e4adf64 --- /dev/null +++ b/apps/backend/src/runIngestion.ts @@ -0,0 +1,41 @@ +/** + * Graph Ingestion Runner + * Executes the GraphIngestor to populate Neo4j + */ + +import { ingestRepository } from './services/GraphIngestor.js'; + +async function runIngestion() { + console.log('🚀 Starting WidgeTDC Repository Ingestion...\n'); + + const result = await ingestRepository({ + rootPath: 'C:/Users/claus/Projects/WidgeTDC/WidgeTDC', + repositoryName: 'WidgeTDC', + maxDepth: 10 + }); + + console.log('\n═══════════════════════════════════════════════════════════'); + console.log('📊 INGESTION COMPLETE'); + console.log('═══════════════════════════════════════════════════════════'); + console.log(`Success: ${result.success}`); + console.log(`Repository ID: ${result.repositoryId}`); + console.log(`\nStats:`); + console.log(` - Directories: ${result.stats.directoriesCreated}`); + console.log(` - Files: ${result.stats.filesCreated}`); + console.log(` - Relationships: ${result.stats.relationshipsCreated}`); + console.log(` - Total Nodes: ${result.stats.totalNodes}`); + console.log(` - Duration: ${result.stats.duration}ms`); + + if (result.errors.length > 0) { + console.log(`\n⚠️ Errors (${result.errors.length}):`); + result.errors.slice(0, 10).forEach(e => console.log(` - ${e}`)); + } + + console.log('\n✅ Ingestion finished. Query with: MATCH (n) RETURN labels(n), count(*)'); + process.exit(0); +} + +runIngestion().catch(err => { + console.error('❌ Ingestion failed:', err); + process.exit(1); +}); diff --git a/apps/backend/src/scripts/Ny Tekstdokument.txt b/apps/backend/src/scripts/Ny Tekstdokument.txt new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/apps/backend/src/scripts/analyze_profile.ts b/apps/backend/src/scripts/analyze_profile.ts new file mode 100644 index 0000000000000000000000000000000000000000..ce113d2a1ceccf38ce7dedf7766c257695da1507 --- /dev/null +++ b/apps/backend/src/scripts/analyze_profile.ts @@ -0,0 +1,82 @@ +import 'dotenv/config'; +import neo4j from 'neo4j-driver'; +import * as path from 'path'; +import { fileURLToPath } from 'url'; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); + +const KEYWORDS = [ + 'udbud', 'tender', 'public procurement', + 'cybersecurity', 'sikkerhed', 'security', + 'budget', 'ansvar', 'portfolio', 'portefølje', + 'claus', 'familie', 'family', 'privat', 'private', + 'darkweb', 'leak', 'password' +]; + +async function analyzeProfile() { + console.log('🕵️ Analysing User Profile & Focus Areas...'); + const uri = process.env.NEO4J_URI; + const user = process.env.NEO4J_USER; + const password = process.env.NEO4J_PASSWORD; + + const driver = neo4j.driver(uri, neo4j.auth.basic(user, password)); + const session = driver.session(); + + try { + // 1. Søg i filnavne og stier + console.log('📂 Scanning file paths for keywords...'); + const pathQuery = ` + MATCH (f:File) + WHERE ${KEYWORDS.map(k => `toLower(f.path) CONTAINS '${k}'`).join(' OR ')} + RETURN f.path as path, f.name as name + LIMIT 20 + `; + + const pathResults = await session.run(pathQuery); + + // 2. Søg i indhold (hvis indekseret - simuleret her via metadata eller noder) + // Bemærk: Vi har ikke fuldtekst af alle dokumenter i grafen endnu, kun kode-struktur. + // Men vi tjekker 'IntelReport' og 'ThreatActor' for sammenfald. + + console.log('🧠 Checking Intelligence Graph correlation...'); + + // Tjek om 'Claus' eller firma-relaterede termer optræder i Threat Data + const threatQuery = ` + MATCH (n) + WHERE (n:Victim OR n:ThreatActor OR n:IntelReport) + AND (toLower(n.name) CONTAINS 'claus' OR toLower(n.name) CONTAINS 'widget') + RETURN labels(n) as type, n.name as name + `; + const threatResults = await session.run(threatQuery); + + // 3. Rapport + console.log('\n--- 🔍 FINDINGS REPORT ---'); + + if (pathResults.records.length > 0) { + console.log('\n📄 Relevant Files (Portfolio & Interests):'); + pathResults.records.forEach(r => { + console.log(` - ${r.get('name')} (${r.get('path')})`); + }); + } else { + console.log('\n📄 No direct file matches found in current index.'); + } + + if (threatResults.records.length > 0) { + console.log('\n⚠️ THREAT ALERT (Personal/Brand Mention):'); + threatResults.records.forEach(r => { + console.log(` - [${r.get('type')}] ${r.get('name')}`); + }); + } else { + console.log('\n✅ No direct mentions of User/Brand found in current Dark Web data sample.'); + } + + } catch (error) { + console.error('Analysis failed:', error); + } finally { + await session.close(); + await driver.close(); + } +} + +analyzeProfile(); diff --git a/apps/backend/src/scripts/check_db_status.ts b/apps/backend/src/scripts/check_db_status.ts new file mode 100644 index 0000000000000000000000000000000000000000..346de233e342bb54409e2a8268836e4a1b2404af --- /dev/null +++ b/apps/backend/src/scripts/check_db_status.ts @@ -0,0 +1,35 @@ +import { config } from 'dotenv'; +import { resolve } from 'path'; +import { fileURLToPath } from 'url'; +import neo4j from 'neo4j-driver'; + +// Load .env from backend directory +const __dirname = fileURLToPath(new URL('.', import.meta.url)); +config({ path: resolve(__dirname, '../../.env') }); + +const NEO4J_URI = process.env.NEO4J_URI || 'bolt://localhost:7687'; +const NEO4J_USERNAME = process.env.NEO4J_USERNAME || 'neo4j'; +const NEO4J_PASSWORD = process.env.NEO4J_PASSWORD || 'password'; + +async function check() { + const driver = neo4j.driver( + NEO4J_URI, + neo4j.auth.basic(NEO4J_USERNAME, NEO4J_PASSWORD) + ); + const session = driver.session(); + try { + // Quietly verify connectivity + await driver.verifyConnectivity(); + + const result = await session.run('MATCH (n) RETURN count(n) as count'); + const count = result.records[0].get('count').toNumber(); + console.log(`NodeCount: ${count}`); + } catch(e) { + // Minimal error output as per "Black Ops" + console.log(`Error: ${e.message}`); + } finally { + await session.close(); + await driver.close(); + } +} +check(); diff --git a/apps/backend/src/scripts/dark_web_harvester.ts b/apps/backend/src/scripts/dark_web_harvester.ts new file mode 100644 index 0000000000000000000000000000000000000000..66fcb4c26123f8be2d63df8348db8641a0bff09e --- /dev/null +++ b/apps/backend/src/scripts/dark_web_harvester.ts @@ -0,0 +1,86 @@ +import 'dotenv/config'; +import neo4j from 'neo4j-driver'; +import * as path from 'path'; +import { fileURLToPath } from 'url'; + +// SHIM: Define __dirname for ES Modules +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); + +const RANSOMWATCH_FEED = 'https://raw.githubusercontent.com/joshhighet/ransomwatch/main/posts.json'; + +async function harvestDarkWeb() { + console.log('🌑 Operation Dark Sentry: Initializing...'); + const uri = process.env.NEO4J_URI; + const user = process.env.NEO4J_USER; + const password = process.env.NEO4J_PASSWORD; + + if (!uri || !user || !password) { + console.error('❌ ERROR: Missing Neo4j credentials'); + process.exit(1); + } + + const driver = neo4j.driver(uri, neo4j.auth.basic(user, password)); + const session = driver.session(); + + try { + console.log('📡 Intercepting Ransomwatch feed...'); + const response = await fetch(RANSOMWATCH_FEED); + if (!response.ok) throw new Error(`Failed to fetch feed: ${response.statusText}`); + + const allPosts = await response.json() as any[]; + console.log(`📦 Intercepted ${allPosts.length} leak posts.`); + + // Sort by date descending and take top 50 + const recentPosts = allPosts + .sort((a, b) => new Date(b.date).getTime() - new Date(a.date).getTime()) + .slice(0, 50); + + console.log(`⚡ Processing ${recentPosts.length} most recent threats...`); + + let nodesCreated = 0; + const groupCounts: Record = {}; + + for (const post of recentPosts) { + // Data normalization + const actorName = post.group_name || 'Unknown Actor'; + const victimTitle = post.post_title || 'Unknown Victim'; + const date = post.discovered || new Date().toISOString(); + + groupCounts[actorName] = (groupCounts[actorName] || 0) + 1; + + await session.run(` + MERGE (actor:ThreatActor {name: $actorName}) + ON CREATE SET actor.firstSeen = $date + ON MATCH SET actor.lastSeen = $date + + MERGE (victim:Victim {name: $victimTitle}) + ON CREATE SET victim.discovered = $date + + MERGE (actor)-[r:TARGETED]->(victim) + SET r.date = $date, r.source = 'ransomwatch' + `, { actorName, victimTitle, date }); + + nodesCreated++; + if (nodesCreated % 10 === 0) process.stdout.write('.'); + } + + console.log(`\n✅ Intelligence Injection Complete. Processed ${nodesCreated} attacks.`); + + // Sort groups by activity + const topGroups = Object.entries(groupCounts) + .sort(([,a], [,b]) => b - a) + .slice(0, 3); + + console.log('\n📊 Top Active Threat Actors (Recent Sample):'); + topGroups.forEach(([group, count]) => console.log(`- ${group}: ${count} victims`)); + + } catch (error) { + console.error('💥 Sentry Failure:', error); + } finally { + await session.close(); + await driver.close(); + } +} + +harvestDarkWeb(); diff --git a/apps/backend/src/scripts/data_pump.ts b/apps/backend/src/scripts/data_pump.ts new file mode 100644 index 0000000000000000000000000000000000000000..fcf3d3f887bed67778bad6655e3e35d56cfe8be0 --- /dev/null +++ b/apps/backend/src/scripts/data_pump.ts @@ -0,0 +1,48 @@ +import { neo4jAdapter } from '../adapters/Neo4jAdapter.js'; +import { hyperLog } from '../services/hyper-log.js'; + +async function pumpData() { + console.log('🚀 Starting DATA PUMP...'); + + // 1. Create Graph Nodes (Neo4j) + try { + console.log('📦 Generating Graph Nodes...'); + // Connection is handled automatically by executeQuery + + for (let i = 0; i < 30; i++) { + await neo4jAdapter.executeQuery(` + CREATE (n:TestNode { + id: 'node-${Date.now()}-${i}', + name: 'DataPoint ${i}', + timestamp: datetime() + }) + `); + } + console.log('✅ Created 30 TestNodes'); + } catch (err) { + console.error('❌ Graph Error:', err); + } + + // 2. Generate HyperLog Events + console.log('📝 Generating HyperLog Events...'); + const agents = ['System', 'Claude', 'Gemini', 'Watchdog']; + const actions = ['SCAN', 'INDEX', 'OPTIMIZE', 'HEAL', 'QUERY']; + + for (let i = 0; i < 50; i++) { + const agent = agents[Math.floor(Math.random() * agents.length)]; + const action = actions[Math.floor(Math.random() * actions.length)]; + + await hyperLog.log( + 'INSIGHT', + agent, + `${action} operation completed successfully on sector ${i}`, + { iteration: i } + ); + } + console.log('✅ Created 50 HyperLog events'); + + console.log('🏁 DATA PUMP COMPLETE'); + process.exit(0); +} + +pumpData(); diff --git a/apps/backend/src/scripts/debug_node_structure.ts b/apps/backend/src/scripts/debug_node_structure.ts new file mode 100644 index 0000000000000000000000000000000000000000..57b399bf9e5bbcd766bfca49344053c973edcc9b --- /dev/null +++ b/apps/backend/src/scripts/debug_node_structure.ts @@ -0,0 +1,30 @@ +import { config } from 'dotenv'; +import { resolve } from 'path'; +import { fileURLToPath } from 'url'; +import neo4j from 'neo4j-driver'; + +const __dirname = fileURLToPath(new URL('.', import.meta.url)); +config({ path: resolve(__dirname, '../../.env') }); + +const NEO4J_URI = process.env.NEO4J_URI || 'bolt://localhost:7687'; +const NEO4J_USERNAME = process.env.NEO4J_USERNAME || 'neo4j'; +const NEO4J_PASSWORD = process.env.NEO4J_PASSWORD || 'password'; + +async function checkNodeStructure() { + const driver = neo4j.driver(NEO4J_URI, neo4j.auth.basic(NEO4J_USERNAME, NEO4J_PASSWORD)); + const session = driver.session(); + try { + const result = await session.run("MATCH (n:File) WHERE n.name ENDS WITH '.ts' RETURN n LIMIT 1"); + if (result.records.length > 0) { + console.log(JSON.stringify(result.records[0].get('n').properties, null, 2)); + } else { + console.log("No TS files found."); + } + } catch(e) { + console.error("Error:", e); + } finally { + await session.close(); + await driver.close(); + } +} +checkNodeStructure(); diff --git a/apps/backend/src/scripts/debug_paths.ts b/apps/backend/src/scripts/debug_paths.ts new file mode 100644 index 0000000000000000000000000000000000000000..f8832beaf7c065f9051bb88f5445b8ea0e65f792 --- /dev/null +++ b/apps/backend/src/scripts/debug_paths.ts @@ -0,0 +1,30 @@ +import neo4j from 'neo4j-driver'; + +const driver = neo4j.driver('bolt://localhost:7687', neo4j.auth.basic('neo4j', 'password')); +const session = driver.session(); + +async function debug() { + // Check DEPENDS_ON relations + const result1 = await session.run("MATCH ()-[r:DEPENDS_ON]->() RETURN count(r) as count"); + console.log('=== DEPENDS_ON RELATIONS ==='); + const count = result1.records[0].get('count'); + console.log('Total:', typeof count.toNumber === 'function' ? count.toNumber() : count); + + // Sample some relations + const result2 = await session.run("MATCH (a:File)-[r:DEPENDS_ON]->(b:File) RETURN a.name as from, b.name as to LIMIT 10"); + console.log('\n=== SAMPLE DEPENDENCIES ==='); + result2.records.forEach(r => console.log(r.get('from') + ' -> ' + r.get('to'))); + + // All relation types + const result3 = await session.run("MATCH ()-[r]->() RETURN type(r) as type, count(r) as cnt ORDER BY cnt DESC LIMIT 10"); + console.log('\n=== ALL RELATION TYPES ==='); + result3.records.forEach(r => { + const cnt = r.get('cnt'); + console.log(r.get('type') + ': ' + (typeof cnt.toNumber === 'function' ? cnt.toNumber() : cnt)); + }); + + await session.close(); + await driver.close(); +} + +debug(); diff --git a/apps/backend/src/scripts/discover_danish_osint.ts b/apps/backend/src/scripts/discover_danish_osint.ts new file mode 100644 index 0000000000000000000000000000000000000000..0425e7cbb9f95f8c51dca6b3b4310ef9a522605b --- /dev/null +++ b/apps/backend/src/scripts/discover_danish_osint.ts @@ -0,0 +1,159 @@ + +import axios from 'axios'; +import * as fs from 'fs/promises'; +import * as path from 'path'; +import * as dotenv from 'dotenv'; + +// Load env vars +dotenv.config({ path: path.resolve(process.cwd(), 'apps/backend/.env') }); + +const GITHUB_API = 'https://api.github.com'; +const HF_API = 'https://huggingface.co/api'; + +interface DiscoveredRepo { + name: string; + url: string; + description: string | null; + stars: number; + source: 'github' | 'huggingface'; + topics?: string[]; +} + +async function searchGithub(query: string): Promise { + try { + const headers: any = { + 'Accept': 'application/vnd.github.v3+json', + }; + if (process.env.GITHUB_TOKEN) { + headers['Authorization'] = `token ${process.env.GITHUB_TOKEN}`; + } + + console.log(`🔍 Searching GitHub for: ${query}`); + const response = await axios.get(`${GITHUB_API}/search/repositories`, { + params: { q: query, sort: 'stars', order: 'desc', per_page: 20 }, + headers + }); + + return response.data.items.map((item: any) => ({ + name: item.full_name, + url: item.html_url, + description: item.description, + stars: item.stargazers_count, + source: 'github', + topics: item.topics + })); + } catch (error: any) { + console.error(`❌ GitHub search failed: ${error.message}`); + return []; + } +} + +async function getGithubOrgRepos(org: string): Promise { + try { + const headers: any = { + 'Accept': 'application/vnd.github.v3+json', + }; + if (process.env.GITHUB_TOKEN) { + headers['Authorization'] = `token ${process.env.GITHUB_TOKEN}`; + } + + console.log(`🏢 Fetching repos for org: ${org}`); + const response = await axios.get(`${GITHUB_API}/users/${org}/repos`, { + params: { sort: 'updated', per_page: 100 }, + headers + }); + + return response.data.map((item: any) => ({ + name: item.full_name, + url: item.html_url, + description: item.description, + stars: item.stargazers_count, + source: 'github', + topics: item.topics + })); + } catch (error: any) { + console.error(`❌ GitHub org fetch failed: ${error.message}`); + return []; + } +} + +async function searchHuggingFace(tag: string): Promise { + try { + console.log(`🤗 Searching Hugging Face for tag: ${tag}`); + const response = await axios.get(`${HF_API}/models`, { + params: { filter: tag, sort: 'likes', direction: -1, limit: 20 } + }); + + return response.data.map((item: any) => ({ + name: item.modelId, + url: `https://huggingface.co/${item.modelId}`, + description: null, // HF API doesn't always return description in list + stars: item.likes, + source: 'huggingface', + topics: item.tags + })); + } catch (error: any) { + console.error(`❌ Hugging Face search failed: ${error.message}`); + return []; + } +} + +async function main() { + console.log('🚀 Starting Danish OSINT Discovery...'); + + const allRepos: DiscoveredRepo[] = []; + + // 1. Alexandra Institute Repos + const alexandraRepos = await getGithubOrgRepos('alexandrainst'); + allRepos.push(...alexandraRepos); + + // 2. GitHub Searches + const ghQueries = [ + 'topic:danish', + 'topic:denmark', + 'language:Danish', + '"Danish NLP"', + 'dk-bert' + ]; + + for (const q of ghQueries) { + const results = await searchGithub(q); + allRepos.push(...results); + // Sleep briefly to be nice to API + await new Promise(r => setTimeout(r, 1000)); + } + + // 3. Hugging Face Searches + const hfTags = ['da', 'danish']; + for (const tag of hfTags) { + const results = await searchHuggingFace(tag); + allRepos.push(...results); + } + + // Deduplicate + const uniqueRepos = Array.from(new Map(allRepos.map(item => [item.url, item])).values()); + + console.log(`\n✅ Found ${uniqueRepos.length} unique resources.`); + + // Output to file + const outputPath = path.resolve(process.cwd(), 'data', 'danish_osint_discovery.json'); + await fs.mkdir(path.dirname(outputPath), { recursive: true }); + await fs.writeFile(outputPath, JSON.stringify(uniqueRepos, null, 2)); + console.log(`💾 Saved results to ${outputPath}`); + + // Generate a snippet for ingest_curated_repos.ts + const ingestSnippet = uniqueRepos + .filter(r => r.source === 'github') // Only GitHub for now for the ingest script + .slice(0, 20) // Top 20 for example + .map(r => ({ + key: r.name.replace('/', '-').toLowerCase(), + url: r.url + '.git', + files: [{ title: r.name, relPath: 'README.md', category: 'danish-osint' }] + })); + + const snippetPath = path.resolve(process.cwd(), 'data', 'ingest_snippet.json'); + await fs.writeFile(snippetPath, JSON.stringify(ingestSnippet, null, 2)); + console.log(`📝 Generated ingestion snippet for top 20 GitHub repos at ${snippetPath}`); +} + +main().catch(console.error); diff --git a/apps/backend/src/scripts/enterprise-setup.ts b/apps/backend/src/scripts/enterprise-setup.ts new file mode 100644 index 0000000000000000000000000000000000000000..603a492d70348154f1d87ba6ebd28554aa028545 --- /dev/null +++ b/apps/backend/src/scripts/enterprise-setup.ts @@ -0,0 +1,86 @@ +#!/usr/bin/env node + +/** + * Enterprise Setup Script + * Automates the setup of PostgreSQL, Redis, and Prisma + */ + +import { execSync } from 'child_process'; +import fs from 'fs'; +import path from 'path'; + +const steps = [ + { + name: 'Check Docker', + command: 'docker --version', + required: true, + }, + { + name: 'Start Docker Services', + command: 'docker-compose up -d postgres redis', + cwd: path.join(process.cwd(), '../..'), + }, + { + name: 'Install Dependencies', + command: 'npm install', + }, + { + name: 'Generate Prisma Client', + command: 'npx prisma generate', + }, + { + name: 'Run Database Migration', + command: 'npx prisma migrate dev --name init', + skipOnError: true, // Migration might already exist + }, + { + name: 'Build Backend', + command: 'npm run build', + }, +]; + +async function run() { + console.log('🚀 Starting Enterprise Setup...\n'); + + for (const step of steps) { + console.log(`📦 ${step.name}...`); + + try { + const output = execSync(step.command, { + cwd: step.cwd || process.cwd(), + stdio: 'pipe', + encoding: 'utf-8', + }); + + console.log(`✅ ${step.name} completed\n`); + + if (output && output.length < 500) { + console.log(output); + } + } catch (error) { + if (step.required) { + console.error(`❌ ${step.name} failed!`); + console.error(error.message); + process.exit(1); + } else if (step.skipOnError) { + console.log(`⚠️ ${step.name} skipped (may already be complete)\n`); + } else { + console.error(`❌ ${step.name} failed!`); + console.error(error.message); + process.exit(1); + } + } + } + + console.log('\n✨ Enterprise setup complete!'); + console.log('\nNext steps:'); + console.log('1. Review .env file and add your API keys'); + console.log('2. Start backend: pm2 start ecosystem.config.js'); + console.log('3. Check status: pm2 status'); + console.log('4. View logs: pm2 logs widgetdc-backend'); +} + +run().catch((err) => { + console.error('Setup failed:', err); + process.exit(1); +}); diff --git a/apps/backend/src/scripts/find_zombies_final.ts b/apps/backend/src/scripts/find_zombies_final.ts new file mode 100644 index 0000000000000000000000000000000000000000..b3cc48d13397db41021bbbfecae3433a30f89cb5 --- /dev/null +++ b/apps/backend/src/scripts/find_zombies_final.ts @@ -0,0 +1,67 @@ +import 'dotenv/config'; +import neo4j from 'neo4j-driver'; +import * as fs from 'fs'; +import * as path from 'path'; +import { fileURLToPath } from 'url'; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); + +async function findZombies() { + const uri = process.env.NEO4J_URI; + const user = process.env.NEO4J_USER; + const password = process.env.NEO4J_PASSWORD; + + if (!uri || !user || !password) { + console.error('❌ FEJL: Mangler Neo4j credentials'); + process.exit(1); + } + + const driver = neo4j.driver(uri, neo4j.auth.basic(user, password)); + const session = driver.session(); + + try { + const query = ` + MATCH (f:File) + WHERE f.path ENDS WITH '.ts' + AND NOT ()-[:DEPENDS_ON]->(f) + AND NOT f.name ENDS WITH 'index.ts' + AND NOT f.name ENDS WITH 'server.ts' + AND NOT f.name ENDS WITH '.d.ts' + AND NOT f.name ENDS WITH '.test.ts' + AND NOT f.name ENDS WITH '.spec.ts' + AND NOT f.path CONTAINS 'node_modules' + RETURN f.path as path, f.name as name + ORDER BY f.path + `; + + const result = await session.run(query); + const zombies = result.records.map(r => r.get('path')); + + console.log(`🧟 Found ${zombies.length} potential zombies.`); + + const reportPath = path.resolve(__dirname, '../../../../docs/ZOMBIE_CODE_REPORT.md'); + const reportContent = `# 🧟 Zombie Code Report +Generated: ${new Date().toISOString()} +Total: ${zombies.length} + +## Potential Dead Code +${zombies.map(z => `- ${z}`).join('\n')} +`; + + fs.writeFileSync(reportPath, reportContent); + console.log(`📝 Report saved to: ${reportPath}`); + + // Output top 10 for CLI + zombies.slice(0, 10).forEach(z => console.log(`- ${z}`)); + if (zombies.length > 10) console.log(`... and ${zombies.length - 10} more.`); + + } catch (error) { + console.error('Error:', error); + } finally { + await session.close(); + await driver.close(); + } +} + +findZombies(); diff --git a/apps/backend/src/scripts/find_zombies_simple.ts b/apps/backend/src/scripts/find_zombies_simple.ts new file mode 100644 index 0000000000000000000000000000000000000000..636bf1edb6208d31a2c66392ba17b06841ce1e57 --- /dev/null +++ b/apps/backend/src/scripts/find_zombies_simple.ts @@ -0,0 +1,44 @@ +import { config } from 'dotenv'; +import { resolve } from 'path'; +import { fileURLToPath } from 'url'; +import neo4j from 'neo4j-driver'; + +const __dirname = fileURLToPath(new URL('.', import.meta.url)); +config({ path: resolve(__dirname, '../../.env') }); + +const NEO4J_URI = process.env.NEO4J_URI || 'bolt://localhost:7687'; +const NEO4J_USERNAME = process.env.NEO4J_USERNAME || 'neo4j'; +const NEO4J_PASSWORD = process.env.NEO4J_PASSWORD || 'password'; + +async function run() { + const driver = neo4j.driver(NEO4J_URI, neo4j.auth.basic(NEO4J_USERNAME, NEO4J_PASSWORD)); + const session = driver.session(); + try { + const query = ` + MATCH (f:File) + WHERE f.path ENDS WITH '.ts' + AND NOT ()-[:DEPENDS_ON]->(f) + AND NOT f.name ENDS WITH 'index.ts' + AND NOT f.name ENDS WITH 'server.ts' + AND NOT f.name ENDS WITH '.d.ts' + AND NOT f.name ENDS WITH '.test.ts' + AND NOT f.name ENDS WITH '.spec.ts' + AND NOT f.name STARTS WITH 'vite' + AND NOT f.path CONTAINS 'node_modules' + RETURN f.path as path, f.name as name + ORDER BY f.path + `; + + const result = await session.run(query); + console.log(`Found ${result.records.length} zombies:`); + result.records.forEach(r => { + console.log(`- ${r.get('path')}`); + }); + } catch(e) { + console.error(e); + } finally { + await session.close(); + await driver.close(); + } +} +run(); diff --git a/apps/backend/src/scripts/fix_tender_data.ts b/apps/backend/src/scripts/fix_tender_data.ts new file mode 100644 index 0000000000000000000000000000000000000000..6d536a873478127fadb38aaba5a011299c40d2e5 --- /dev/null +++ b/apps/backend/src/scripts/fix_tender_data.ts @@ -0,0 +1,42 @@ +import { neo4jService } from '../services/Neo4jService'; + +async function fixTenderData() { + console.log('🔧 Fixing Tender Data (Enriching with Keywords)...'); + + const updates = [ + { + title: "Etablering af SOC og beredskab til Region Hovedstaden", + keywords: ['SOC', 'Security', 'Beredskab', 'Overvågning', 'SIEM', 'Cybersecurity', 'Alarm'] + }, + { + title: "Landsdækkende IoT-netværk til forsyningssektoren", + keywords: ['IoT', 'Netværk', 'Sensor', 'LoRaWAN', 'Forsyning', 'Dataopsamling'] + }, + { + title: "Sikker kommunikationsplatform (Unified Comms)", + keywords: ['Unified Comms', 'Sikkerhed', 'Kryptering', 'Kommunikation', 'VoIP', 'Video', 'Chat'] + } + ]; + + try { + for (const update of updates) { + console.log(`Updating: ${update.title}`); + await neo4jService.write(` + MATCH (t:Tender {title: $title}) + SET t.keywords = $keywords, t.budget = 5000000 + toInteger(rand() * 10000000) + RETURN t.title, t.keywords + `, { + title: update.title, + keywords: update.keywords + }); + } + console.log('✅ Data fixed.'); + + } catch (error) { + console.error('❌ Error:', error); + } finally { + await neo4jService.disconnect(); + } +} + +fixTenderData(); diff --git a/apps/backend/src/scripts/fix_vector_dim.ts b/apps/backend/src/scripts/fix_vector_dim.ts new file mode 100644 index 0000000000000000000000000000000000000000..4ee2d1d170e39a014b670454ac6e9fbbb2d94025 --- /dev/null +++ b/apps/backend/src/scripts/fix_vector_dim.ts @@ -0,0 +1,31 @@ +import * as dotenv from 'dotenv'; +import * as path from 'path'; +import * as url from 'url'; + +const __dirname = path.dirname(url.fileURLToPath(import.meta.url)); +dotenv.config({ path: path.resolve(__dirname, '../../.env') }); + +import { getDatabaseAdapter } from '../platform/db/PrismaDatabaseAdapter.js'; + +async function fixVectorDimensions() { + console.log('🔧 Fixing vector dimensions in PostgreSQL...'); + const dbAdapter = getDatabaseAdapter(); + await dbAdapter.initialize(); + const prisma = dbAdapter.getClient(); + + try { + // Alter table to use 384 dimensions + console.log('Running: ALTER TABLE vector_documents ALTER COLUMN embedding TYPE vector(384);'); + await prisma.$executeRawUnsafe(` + ALTER TABLE vector_documents + ALTER COLUMN embedding TYPE vector(384); + `); + console.log('✅ Successfully updated vector column dimensions to 384.'); + } catch (error: any) { + console.error('❌ Error updating vector dimensions:', error.message); + } finally { + await dbAdapter.disconnect(); + } +} + +fixVectorDimensions(); diff --git a/apps/backend/src/scripts/gap_analysis.ts b/apps/backend/src/scripts/gap_analysis.ts new file mode 100644 index 0000000000000000000000000000000000000000..d9cf6472f5f502c1c5ba4dc2ce2d35d664986f45 --- /dev/null +++ b/apps/backend/src/scripts/gap_analysis.ts @@ -0,0 +1,68 @@ +import 'dotenv/config'; +import neo4j from 'neo4j-driver'; +import * as path from 'path'; +import { fileURLToPath } from 'url'; + +// SHIM: Define __dirname for ES Modules +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); + +async function runGapAnalysis() { + console.log('🧠 Neural Synergy: Analyzing Capability Gaps...'); + const uri = process.env.NEO4J_URI; + const user = process.env.NEO4J_USER; + const password = process.env.NEO4J_PASSWORD; + + if (!uri || !user || !password) { + console.error('❌ ERROR: Missing Neo4j credentials'); + process.exit(1); + } + + const driver = neo4j.driver(uri, neo4j.auth.basic(user, password)); + const session = driver.session(); + + try { + // Gap Analysis Query + // Finds tenders where the required keywords (from 'matches' property) + // do NOT appear in any File content (simulated by file name/path for now as we don't have full content indexed yet) + + console.log('⚡ Scanning Graph for missing capabilities...'); + + const result = await session.run(` + MATCH (t:Tender)<-[f:POTENTIAL_FIT]-(:Organization) + WHERE f.score > 40 + UNWIND f.matches as keyword + WITH t, keyword + // Check if we have code matching this keyword + // Using CASE for existence check since we are in a projection + CALL { + WITH keyword + MATCH (file:File) + WHERE toLower(file.name) CONTAINS toLower(keyword) + OR toLower(file.path) CONTAINS toLower(keyword) + RETURN count(file) > 0 as hasCapability + } + WITH t, keyword, hasCapability + WHERE NOT hasCapability + RETURN t.title as Tender, collect(keyword) as MissingTech + `); + + console.log('\n📋 Strategic Gap Report:'); + if (result.records.length === 0) { + console.log("✅ No critical gaps found. We have code coverage for all identified opportunities."); + } else { + result.records.forEach(r => { + console.log(`\n🚨 Tender: "${r.get('Tender')}"`); + console.log(` Missing Tech: ${r.get('MissingTech').join(', ')}`); + }); + } + + } catch (error) { + console.error('💥 Analysis Failed:', error); + } finally { + await session.close(); + await driver.close(); + } +} + +runGapAnalysis(); diff --git a/apps/backend/src/scripts/holographic_match.ts b/apps/backend/src/scripts/holographic_match.ts new file mode 100644 index 0000000000000000000000000000000000000000..9ea1a25b05da3817a8371a9cbf4be01604f24cf1 --- /dev/null +++ b/apps/backend/src/scripts/holographic_match.ts @@ -0,0 +1,95 @@ +import 'dotenv/config'; +import * as fs from 'fs'; +import * as path from 'path'; +import { fileURLToPath } from 'url'; +import neo4j from 'neo4j-driver'; + +// ESM Shim +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); + +// Output sti til rapporten +const REPORT_PATH = path.resolve(__dirname, '../../../../docs/HOLOGRAPHIC_GAP_REPORT.json'); + +async function runHolographicAnalysis() { + console.log('🌌 Operation Holographic Match: Initializing...'); + + const uri = process.env.NEO4J_URI; + const user = process.env.NEO4J_USER; + const password = process.env.NEO4J_PASSWORD; + + if (!uri || !user || !password) { + console.error('❌ FEJL: Mangler Neo4j credentials.'); + process.exit(1); + } + + const driver = neo4j.driver(uri, neo4j.auth.basic(user, password)); + const session = driver.session(); + + try { + console.log('🧠 Connecting to Cortex...'); + + // THE HOLOGRAPHIC QUERY (Fra Blackboardet) + // Finder udbud (Tenders) hvor vi MANGLER koden (File content) til at løse opgaven + // Bemærk: Denne query antager at t.keywords eksisterer på Tender noder. + // Vores tidligere scripts har muligvis ikke sat denne property eksplicit på noden, men relationen har 'matches'. + // Lad os justere queryen til at bruge relationen POTENTIAL_FIT's 'matches' property. + + const result = await session.run(` + MATCH (org:Organization)-[f:POTENTIAL_FIT]->(t:Tender) + // Unwind alle keywords fra 'matches' arrayet på relationen + UNWIND f.matches as keyword + WITH t, keyword, org + // Tjek om vi har en fil der matcher dette keyword + CALL { + WITH keyword + MATCH (file:File) + WHERE toLower(file.name) CONTAINS toLower(keyword) + OR toLower(file.path) CONTAINS toLower(keyword) + RETURN count(file) > 0 as hasCapability + } + WITH t, keyword, hasCapability + WHERE NOT hasCapability + // Aggreger manglende keywords per tender + RETURN t.title as Title, + t.buyer as Buyer, + 'Gap Detected' as Status, + t.url as Url, + collect(keyword) as MissingKeywords + LIMIT 20 + `); + + const gaps = result.records.map(r => ({ + title: r.get('Title'), + buyer: r.get('Buyer'), + status: r.get('Status'), + url: r.get('Url'), + missingKeywords: r.get('MissingKeywords'), + recommendation: "INITIATE R&D SPRINT" + })); + + console.log(`🔍 Analysis Complete. Found ${gaps.length} capability gaps.`); + + // Gem rapporten + fs.writeFileSync(REPORT_PATH, JSON.stringify(gaps, null, 2)); + console.log(`📄 Report saved to: ${REPORT_PATH}`); + + if (gaps.length > 0) { + console.log('\nTop 3 Missing Capabilities (Opportunities):'); + gaps.slice(0, 3).forEach((g, i) => { + console.log(`${i+1}. ${g.title} (${g.buyer})`); + console.log(` Missing: ${g.missingKeywords.join(', ')}`); + }); + } else { + console.log('✨ No gaps found! We are fully synced with the market.'); + } + + } catch (error) { + console.error('💥 Holographic Failure:', error); + } finally { + await session.close(); + await driver.close(); + } +} + +runHolographicAnalysis(); diff --git a/apps/backend/src/scripts/ingest_awesome.ts b/apps/backend/src/scripts/ingest_awesome.ts new file mode 100644 index 0000000000000000000000000000000000000000..f1c4ea3250eab85dfacfcf85ec68f234a76deb49 --- /dev/null +++ b/apps/backend/src/scripts/ingest_awesome.ts @@ -0,0 +1,119 @@ +import * as dotenv from 'dotenv'; +import * as fs from 'fs/promises'; +import * as path from 'path'; +import { execSync } from 'child_process'; + +dotenv.config({ path: path.resolve(process.cwd(), 'apps/backend/.env') }); + +import { knowledgeAcquisition } from '../services/KnowledgeAcquisitionService.js'; +import { neo4jAdapter } from '../adapters/Neo4jAdapter.js'; +import { getPgVectorStore } from '../platform/vector/PgVectorStoreAdapter.js'; + +const REPO_URL = 'https://github.com/sindresorhus/awesome.git'; +const CLONE_DIR = path.resolve(process.cwd(), 'data', 'repos', 'awesome'); +const MAX_FILE_BYTES = 2 * 1024 * 1024; // 2MB safety cap + +// Curated subset of relevant sublists for WidgeTDC (security, OSINT, AI/agents, data, DevOps) +const CURATED_FILES: { title: string; relPath: string; category: string }[] = [ + { title: 'Awesome OSS Catalog (root)', relPath: 'README.md', category: 'awesome-list' }, + { title: 'Security', relPath: 'security/README.md', category: 'security' }, + { title: 'Big Data', relPath: 'big-data/README.md', category: 'data-pipelines' }, + { title: 'Data Science', relPath: 'data-science/README.md', category: 'data-science' }, + { title: 'Machine Learning', relPath: 'machine-learning/README.md', category: 'machine-learning' }, + { title: 'NLP', relPath: 'nlp/README.md', category: 'nlp' }, + { title: 'DevOps', relPath: 'devops/README.md', category: 'devops' }, + { title: 'Sysadmin', relPath: 'sysadmin/README.md', category: 'sysadmin' }, + { title: 'Databases', relPath: 'databases/README.md', category: 'databases' }, + { title: 'Analytics', relPath: 'analytics/README.md', category: 'analytics' }, + { title: 'Privacy', relPath: 'privacy/README.md', category: 'privacy' }, + { title: 'Cryptography', relPath: 'cryptography/README.md', category: 'cryptography' }, + { title: 'Incident Response', relPath: 'incident-response/README.md', category: 'incident-response' }, + { title: 'Threat Intelligence', relPath: 'threat-intelligence/README.md', category: 'threat-intel' }, + { title: 'Forensics', relPath: 'forensics/README.md', category: 'forensics' }, + { title: 'Networking', relPath: 'networking/README.md', category: 'networking' }, + { title: 'Cloud', relPath: 'cloud/README.md', category: 'cloud' }, + { title: 'GraphQL', relPath: 'graphql/README.md', category: 'api' }, + { title: 'Selfhosted', relPath: 'selfhosted/README.md', category: 'selfhosted' }, + { title: 'Research', relPath: 'research/README.md', category: 'research' }, + { title: 'Open Source Alternatives', relPath: 'opensource-alternatives/README.md', category: 'oss-alternatives' } +]; + +async function ensureRepo(): Promise { + await fs.mkdir(path.dirname(CLONE_DIR), { recursive: true }); + + const repoExists = await fs + .access(path.join(CLONE_DIR, '.git')) + .then(() => true) + .catch(() => false); + + const cmd = repoExists + ? `git -C "${CLONE_DIR}" pull` + : `git clone --depth=1 "${REPO_URL}" "${CLONE_DIR}"`; + + console.log(repoExists ? '🔄 Pulling latest awesome list…' : '📥 Cloning awesome list…'); + execSync(cmd, { stdio: 'inherit' }); + console.log('✅ Repository ready at', CLONE_DIR); +} + +async function ingest(): Promise { + console.log('🚀 Starting curated ingestion of sindresorhus/awesome…'); + + try { + await ensureRepo(); + + console.log('🔌 Connecting to databases...'); + const vectorStore = getPgVectorStore(); + await vectorStore.initialize(); + + for (const entry of CURATED_FILES) { + const targetFile = path.join(CLONE_DIR, entry.relPath); + const fileExists = await fs + .access(targetFile) + .then(() => true) + .catch(() => false); + + if (!fileExists) { + console.warn(`⚠️ Skipping missing file: ${entry.relPath}`); + continue; + } + + const stat = await fs.stat(targetFile); + if (stat.size > MAX_FILE_BYTES) { + console.warn(`⚠️ Skipping oversized file (${stat.size} bytes): ${entry.relPath}`); + continue; + } + + console.log(`📄 Ingesting ${entry.title} (${entry.relPath})…`); + const result = await knowledgeAcquisition.acquire({ + type: 'file', + content: targetFile, + metadata: { + title: entry.title, + source: REPO_URL, + category: entry.category + } + }); + + if (result.success) { + console.log('✅ Ingestion Successful!'); + console.log('-----------------------------------'); + console.log(`📄 Source ID: ${result.sourceId}`); + console.log(`🧩 Chunks: ${result.chunks}`); + console.log(`🏷️ Entities: ${result.entitiesExtracted}`); + console.log(`🔢 Vectors: ${result.vectorsStored}`); + console.log(`🕸️ Graph Nodes: ${result.graphNodesCreated}`); + console.log(`⏱️ Duration: ${result.duration}ms`); + console.log('-----------------------------------'); + } else { + console.error('❌ Ingestion failed', result.errors); + } + } + } catch (error) { + console.error('💥 Fatal error during ingestion:', error); + } finally { + await neo4jAdapter.close(); + process.exit(0); + } +} + +ingest(); diff --git a/apps/backend/src/scripts/ingest_curated_repos.ts b/apps/backend/src/scripts/ingest_curated_repos.ts new file mode 100644 index 0000000000000000000000000000000000000000..8c4d681e86546a7c3a5d88e992688f6139b3bafb --- /dev/null +++ b/apps/backend/src/scripts/ingest_curated_repos.ts @@ -0,0 +1,168 @@ +import * as dotenv from 'dotenv'; +import * as fs from 'fs/promises'; +import * as path from 'path'; +import { execSync } from 'child_process'; + +dotenv.config({ path: path.resolve(process.cwd(), 'apps/backend/.env') }); + +import { knowledgeAcquisition } from '../services/KnowledgeAcquisitionService.js'; +import { neo4jAdapter } from '../adapters/Neo4jAdapter.js'; +import { getPgVectorStore } from '../platform/vector/PgVectorStoreAdapter.js'; + +type FileEntry = { title: string; relPath: string; category: string }; +type SourceRepo = { key: string; url: string; files: FileEntry[] }; + +const MAX_FILE_BYTES = 2 * 1024 * 1024; // safety cap per file + +const SOURCES: SourceRepo[] = [ + { + key: 'awesome-nuxt', + url: 'https://github.com/nuxt/awesome.git', + files: [{ title: 'Awesome Nuxt', relPath: 'README.md', category: 'awesome-nuxt' }], + }, + { + key: 'awesome-python', + url: 'https://github.com/vinta/awesome-python.git', + files: [{ title: 'Awesome Python', relPath: 'README.md', category: 'awesome-python' }], + }, + { + key: 'awesome-awesomeness', + url: 'https://github.com/bayandin/awesome-awesomeness.git', + files: [{ title: 'Awesome Awesomeness', relPath: 'README.md', category: 'awesome-meta' }], + }, + { + key: 'awesome-selfhosted', + url: 'https://github.com/awesome-selfhosted/awesome-selfhosted.git', + files: [{ title: 'Awesome Selfhosted', relPath: 'README.md', category: 'selfhosted' }], + }, + { + key: 'awesome-chatgpt-prompts', + url: 'https://github.com/f/awesome-chatgpt-prompts.git', + files: [{ title: 'Awesome ChatGPT Prompts', relPath: 'README.md', category: 'prompts' }], + }, + { + key: 'leaked-system-prompts', + url: 'https://github.com/jujumilk3/leaked-system-prompts.git', + files: [{ title: 'Leaked System Prompts', relPath: 'README.md', category: 'prompts-leaked' }], + }, + { + key: 'mirai-source-code', + url: 'https://github.com/jgamblin/Mirai-Source-Code.git', + files: [{ title: 'Mirai Source Code', relPath: 'README.md', category: 'malware-research' }], + }, + { + key: 'python-ml-case-studies', + url: 'https://github.com/Apress/python-ml-case-studies.git', + files: [{ title: 'Python ML Case Studies', relPath: 'README.md', category: 'ml-case-studies' }], + }, + { + key: 'danlp', + url: 'https://github.com/alexandrainst/danlp.git', + files: [{ title: 'DaNLP', relPath: 'README.md', category: 'danlp' }], + }, + { + key: 'coral', + url: 'https://github.com/alexandrainst/coral.git', + files: [{ title: 'Coral', relPath: 'README.md', category: 'coral' }], + }, + { + key: 'fresco-stat', + url: 'https://github.com/alexandrainst/fresco-stat.git', + files: [{ title: 'Fresco Stat', relPath: 'README.md', category: 'fresco-stat' }], + }, + { + key: 'alexandra-ai', + url: 'https://github.com/alexandrainst/alexandra_ai.git', + files: [{ title: 'Alexandra AI', relPath: 'README.md', category: 'alexandra-ai' }], + }, + { + key: 'sn-graph', + url: 'https://github.com/alexandrainst/sn-graph.git', + files: [{ title: 'SN Graph', relPath: 'README.md', category: 'sn-graph' }], + }, +]; + +async function ensureRepo(key: string, url: string): Promise { + const dir = path.resolve(process.cwd(), 'data', 'repos', key); + await fs.mkdir(path.dirname(dir), { recursive: true }); + + const repoExists = await fs + .access(path.join(dir, '.git')) + .then(() => true) + .catch(() => false); + + const cmd = repoExists ? `git -C "${dir}" pull` : `git clone --depth=1 "${url}" "${dir}"`; + console.log(repoExists ? `🔄 Pulling latest for ${key}…` : `📥 Cloning ${key}…`); + execSync(cmd, { stdio: 'inherit' }); + console.log(`✅ Repo ready: ${dir}`); + return dir; +} + +async function ingestFile(fullPath: string, entry: FileEntry, sourceUrl: string) { + const exists = await fs + .access(fullPath) + .then(() => true) + .catch(() => false); + if (!exists) { + console.warn(`⚠️ Skipping missing file: ${entry.relPath}`); + return; + } + + const stat = await fs.stat(fullPath); + if (stat.size > MAX_FILE_BYTES) { + console.warn(`⚠️ Skipping oversized file (${stat.size} bytes): ${entry.relPath}`); + return; + } + + console.log(`📄 Ingesting ${entry.title} (${entry.relPath})…`); + const result = await knowledgeAcquisition.acquire({ + type: 'file', + content: fullPath, + metadata: { + title: entry.title, + source: sourceUrl, + category: entry.category, + }, + }); + + if (result.success) { + console.log('✅ Ingestion Successful!'); + console.log('-----------------------------------'); + console.log(`📄 Source ID: ${result.sourceId}`); + console.log(`🧩 Chunks: ${result.chunks}`); + console.log(`🏷️ Entities: ${result.entitiesExtracted}`); + console.log(`🔢 Vectors: ${result.vectorsStored}`); + console.log(`🕸️ Graph Nodes: ${result.graphNodesCreated}`); + console.log(`⏱️ Duration: ${result.duration}ms`); + console.log('-----------------------------------'); + } else { + console.error('❌ Ingestion failed', result.errors); + } +} + +async function ingestAll(): Promise { + console.log('🚀 Starting curated ingestion batch…'); + try { + const vectorStore = getPgVectorStore(); + await vectorStore.initialize(); + + for (const source of SOURCES) { + try { + const repoDir = await ensureRepo(source.key, source.url); + for (const file of source.files) { + const fullPath = path.join(repoDir, file.relPath); + await ingestFile(fullPath, file, source.url); + } + } catch (err) { + console.error(`❌ Failed processing ${source.key}:`, err); + } + } + } catch (error) { + console.error('💥 Fatal error during ingestion batch:', error); + } finally { + await neo4jAdapter.close(); + process.exit(0); + } +} + +ingestAll(); diff --git a/apps/backend/src/scripts/ingest_nuuday.ts b/apps/backend/src/scripts/ingest_nuuday.ts new file mode 100644 index 0000000000000000000000000000000000000000..9723a51f091356abcf8a84fa7c7e84e0cd4fe60f --- /dev/null +++ b/apps/backend/src/scripts/ingest_nuuday.ts @@ -0,0 +1,49 @@ +import * as dotenv from 'dotenv'; +import * as path from 'path'; +dotenv.config({ path: path.resolve(process.cwd(), 'apps/backend/.env') }); + +import { knowledgeAcquisition } from '../services/KnowledgeAcquisitionService.js'; +import { neo4jAdapter } from '../adapters/Neo4jAdapter.js'; +import { getPgVectorStore } from '../platform/vector/PgVectorStoreAdapter.js'; + +async function main() { + console.log('🚀 Starting ingestion of Target I01 (Nuuday Design Guide)...'); + + try { + // Initialize connections + console.log('🔌 Connecting to databases...'); + const vectorStore = getPgVectorStore(); + await vectorStore.initialize(); + + // Run ingestion + const result = await knowledgeAcquisition.acquireSingleTarget('I01'); + + if (result && result.success) { + console.log('✅ Ingestion Successful!'); + console.log('-----------------------------------'); + console.log(`📄 Source ID: ${result.sourceId}`); + console.log(`🧩 Chunks: ${result.chunks}`); + console.log(`🏷️ Entities: ${result.entitiesExtracted}`); + console.log(`🔢 Vectors: ${result.vectorsStored}`); + console.log(`🕸️ Graph Nodes: ${result.graphNodesCreated}`); + console.log(`⏱️ Duration: ${result.duration}ms`); + console.log('-----------------------------------'); + } else { + console.error('❌ Ingestion Failed'); + if (result) { + console.error('Errors:', result.errors); + } else { + console.error('Target I01 not found in KNOWLEDGE_TARGETS.json'); + } + } + + } catch (error) { + console.error('💥 Fatal Error:', error); + } finally { + // Cleanup + await neo4jAdapter.close(); + process.exit(0); + } +} + +main(); diff --git a/apps/backend/src/scripts/initNeo4j.ts b/apps/backend/src/scripts/initNeo4j.ts new file mode 100644 index 0000000000000000000000000000000000000000..e350aa1132dc1aa9616d2819802d795c096c8799 --- /dev/null +++ b/apps/backend/src/scripts/initNeo4j.ts @@ -0,0 +1,407 @@ +/** + * Neo4j Database Initialization Script + * + * Sets up schema, constraints, indexes, and seed data for WidgeTDC + * With robust retry logic for transient errors (deadlocks, etc.) + */ + +import { config } from 'dotenv'; +import { resolve } from 'path'; +import { fileURLToPath } from 'url'; +import neo4j, { Driver, Session } from 'neo4j-driver'; + +// Load .env from backend directory (don't override existing env vars) +const __dirname = fileURLToPath(new URL('.', import.meta.url)); +config({ path: resolve(__dirname, '../../.env'), override: false }); + +// Support both NEO4J_USER and NEO4J_USERNAME for compatibility +const NEO4J_URI = process.env.NEO4J_URI || 'bolt://localhost:7687'; +const NEO4J_USERNAME = process.env.NEO4J_USER || process.env.NEO4J_USERNAME || 'neo4j'; +const NEO4J_PASSWORD = process.env.NEO4J_PASSWORD || 'password'; + +const MAX_RETRIES = 3; +const RETRY_DELAY_MS = 1000; + +async function sleep(ms: number): Promise { + return new Promise(resolve => setTimeout(resolve, ms)); +} + +function isRetriableError(error: any): boolean { + return error?.code?.includes('TransientError') || + error?.code?.includes('Deadlock') || + error?.retriable === true || + error?.retryable === true || + error?.message?.includes('deadlock') || + error?.message?.includes('lock'); +} + +async function runQueryWithRetry( + session: Session, + query: string, + params: Record = {}, + retries = MAX_RETRIES +): Promise { + for (let attempt = 1; attempt <= retries; attempt++) { + try { + await session.run(query, params); + const preview = query.split('\n').find(l => l.trim())?.substring(0, 50) || query.substring(0, 50); + console.log(` ✓ ...`); + return; + } catch (error: any) { + // Ignore "already exists" errors for constraints/indexes + if (error.message?.includes('already exists') || error.message?.includes('EquivalentSchemaRule')) { + console.log(` ⏭ Already exists`); + return; + } + + // Retry on transient errors + if (isRetriableError(error) && attempt < retries) { + console.log(` ⚠️ Retrying (${attempt}/${retries}) after transient error...`); + await sleep(RETRY_DELAY_MS * attempt); // Exponential backoff + continue; + } + + throw error; + } + } +} + +async function createConstraints(driver: Driver): Promise { + console.log('\n📋 Creating constraints...'); + + // Run each constraint in its own session to avoid deadlocks + const constraints = [ + `CREATE CONSTRAINT entity_id IF NOT EXISTS FOR (e:Entity) REQUIRE e.id IS UNIQUE`, + `CREATE CONSTRAINT document_id IF NOT EXISTS FOR (d:Document) REQUIRE d.id IS UNIQUE`, + `CREATE CONSTRAINT agent_id IF NOT EXISTS FOR (a:Agent) REQUIRE a.id IS UNIQUE`, + `CREATE CONSTRAINT memory_id IF NOT EXISTS FOR (m:Memory) REQUIRE m.id IS UNIQUE`, + `CREATE CONSTRAINT task_id IF NOT EXISTS FOR (t:Task) REQUIRE t.id IS UNIQUE`, + `CREATE CONSTRAINT pattern_id IF NOT EXISTS FOR (p:Pattern) REQUIRE p.id IS UNIQUE`, + ]; + + for (const constraint of constraints) { + const session = driver.session(); + try { + await runQueryWithRetry(session, constraint); + } finally { + await session.close(); + } + // Small delay between constraints to reduce contention + await sleep(100); + } +} + +async function createIndexes(driver: Driver): Promise { + console.log('\n📇 Creating indexes...'); + + const indexes = [ + `CREATE FULLTEXT INDEX entity_fulltext IF NOT EXISTS FOR (e:Entity) ON EACH [e.name, e.content, e.description]`, + `CREATE FULLTEXT INDEX document_fulltext IF NOT EXISTS FOR (d:Document) ON EACH [d.title, d.content, d.summary]`, + `CREATE FULLTEXT INDEX memory_fulltext IF NOT EXISTS FOR (m:Memory) ON EACH [m.content, m.context]`, + `CREATE INDEX entity_type IF NOT EXISTS FOR (e:Entity) ON (e.type)`, + `CREATE INDEX entity_created IF NOT EXISTS FOR (e:Entity) ON (e.createdAt)`, + `CREATE INDEX document_type IF NOT EXISTS FOR (d:Document) ON (d.type)`, + `CREATE INDEX agent_status IF NOT EXISTS FOR (a:Agent) ON (a.status)`, + `CREATE INDEX task_status IF NOT EXISTS FOR (t:Task) ON (t.status)`, + `CREATE INDEX memory_importance IF NOT EXISTS FOR (m:Memory) ON (m.importance)`, + ]; + + for (const index of indexes) { + const session = driver.session(); + try { + await runQueryWithRetry(session, index); + } finally { + await session.close(); + } + await sleep(100); + } +} + +async function createSeedData(driver: Driver): Promise { + console.log('\n🌱 Creating seed data...'); + + const now = new Date().toISOString(); + const session = driver.session(); + + try { + // System Agent + await runQueryWithRetry(session, ` + MERGE (a:Agent:Entity {id: 'agent-system'}) + SET a.name = 'System Agent', + a.type = 'orchestrator', + a.status = 'active', + a.description = 'Core system orchestrator agent', + a.createdAt = $now, + a.updatedAt = $now + `, { now }); + + // HansPedder Agent + await runQueryWithRetry(session, ` + MERGE (a:Agent:Entity {id: 'agent-hanspedder'}) + SET a.name = 'HansPedder', + a.type = 'qa-tester', + a.status = 'active', + a.description = 'Autonomous QA testing agent', + a.capabilities = ['testing', 'validation', 'reporting'], + a.createdAt = $now, + a.updatedAt = $now + `, { now }); + + // GraphRAG Agent + await runQueryWithRetry(session, ` + MERGE (a:Agent:Entity {id: 'agent-graphrag'}) + SET a.name = 'GraphRAG Engine', + a.type = 'retrieval', + a.status = 'active', + a.description = 'Graph-enhanced retrieval augmented generation', + a.capabilities = ['search', 'retrieval', 'synthesis'], + a.createdAt = $now, + a.updatedAt = $now + `, { now }); + + // WidgeTDC Organization + await runQueryWithRetry(session, ` + MERGE (o:Organization:Entity {id: 'org-widgetdc'}) + SET o.name = 'WidgeTDC', + o.type = 'Organization', + o.description = 'Enterprise Intelligence Platform', + o.createdAt = $now, + o.updatedAt = $now + `, { now }); + + // Knowledge Domains + const domains = [ + { id: 'domain-security', name: 'Security', description: 'Cybersecurity and threat intelligence' }, + { id: 'domain-compliance', name: 'Compliance', description: 'GDPR, regulatory compliance' }, + { id: 'domain-analytics', name: 'Analytics', description: 'Data analytics and insights' }, + { id: 'domain-agents', name: 'Agents', description: 'Autonomous agent coordination' } + ]; + + for (const domain of domains) { + await runQueryWithRetry(session, ` + MERGE (d:Domain:Entity {id: $id}) + SET d.name = $name, + d.type = 'Domain', + d.description = $description, + d.createdAt = $now, + d.updatedAt = $now + `, { ...domain, now }); + } + + // Sample Documents + await runQueryWithRetry(session, ` + MERGE (d:Document:Entity {id: 'doc-system-architecture'}) + SET d.title = 'WidgeTDC System Architecture', + d.type = 'documentation', + d.content = 'WidgeTDC is an enterprise-grade autonomous intelligence platform built as a TypeScript monorepo.', + d.summary = 'Core system architecture documentation', + d.source = 'internal', + d.createdAt = $now, + d.updatedAt = $now + `, { now }); + + await runQueryWithRetry(session, ` + MERGE (d:Document:Entity {id: 'doc-mcp-protocol'}) + SET d.title = 'MCP Protocol Integration', + d.type = 'documentation', + d.content = 'Model Context Protocol enables AI agents to communicate with external tools and data sources.', + d.summary = 'MCP protocol documentation', + d.source = 'internal', + d.createdAt = $now, + d.updatedAt = $now + `, { now }); + + // Sample Task + await runQueryWithRetry(session, ` + MERGE (t:Task:Entity {id: 'task-init-neo4j'}) + SET t.name = 'Initialize Neo4j Database', + t.type = 'setup', + t.status = 'completed', + t.description = 'Set up Neo4j schema, constraints, and seed data', + t.createdAt = $now, + t.completedAt = $now + `, { now }); + + // Sample Pattern + await runQueryWithRetry(session, ` + MERGE (p:Pattern:Entity {id: 'pattern-graph-query'}) + SET p.name = 'Graph Query Pattern', + p.type = 'query', + p.description = 'Common pattern for querying knowledge graph', + p.frequency = 0, + p.successRate = 1.0, + p.createdAt = $now, + p.updatedAt = $now + `, { now }); + + // Sample Memory + await runQueryWithRetry(session, ` + MERGE (m:Memory:Entity {id: 'memory-system-init'}) + SET m.content = 'System initialized successfully with Neo4j knowledge graph', + m.context = 'system-startup', + m.importance = 0.8, + m.type = 'episodic', + m.createdAt = $now + `, { now }); + } finally { + await session.close(); + } +} + +async function createRelationships(driver: Driver): Promise { + console.log('\n🔗 Creating relationships...'); + + const session = driver.session(); + + try { + // Agents belong to organization + await runQueryWithRetry(session, ` + MATCH (a:Agent), (o:Organization {id: 'org-widgetdc'}) + WHERE a.id IN ['agent-system', 'agent-hanspedder', 'agent-graphrag'] + MERGE (a)-[:BELONGS_TO]->(o) + `); + + // Agents manage domains + await runQueryWithRetry(session, ` + MATCH (a:Agent {id: 'agent-hanspedder'}), (d:Domain {id: 'domain-agents'}) + MERGE (a)-[:MANAGES]->(d) + `); + + await runQueryWithRetry(session, ` + MATCH (a:Agent {id: 'agent-graphrag'}), (d:Domain {id: 'domain-analytics'}) + MERGE (a)-[:MANAGES]->(d) + `); + + // Documents relate to domains + await runQueryWithRetry(session, ` + MATCH (doc:Document {id: 'doc-system-architecture'}), (d:Domain {id: 'domain-analytics'}) + MERGE (doc)-[:RELATES_TO]->(d) + `); + + await runQueryWithRetry(session, ` + MATCH (doc:Document {id: 'doc-mcp-protocol'}), (d:Domain {id: 'domain-agents'}) + MERGE (doc)-[:RELATES_TO]->(d) + `); + + // System agent created task + await runQueryWithRetry(session, ` + MATCH (a:Agent {id: 'agent-system'}), (t:Task {id: 'task-init-neo4j'}) + MERGE (a)-[:CREATED]->(t) + `); + + // Memory created by system + await runQueryWithRetry(session, ` + MATCH (a:Agent {id: 'agent-system'}), (m:Memory {id: 'memory-system-init'}) + MERGE (a)-[:RECORDED]->(m) + `); + + // Pattern used by GraphRAG + await runQueryWithRetry(session, ` + MATCH (a:Agent {id: 'agent-graphrag'}), (p:Pattern {id: 'pattern-graph-query'}) + MERGE (a)-[:USES]->(p) + `); + } finally { + await session.close(); + } +} + +async function showStatistics(driver: Driver): Promise { + console.log('\n📊 Database Statistics:'); + + const session = driver.session(); + + try { + const nodeResult = await session.run('MATCH (n) RETURN count(n) as count'); + console.log(` Total nodes: ${nodeResult.records[0].get('count').toNumber()}`); + + const relResult = await session.run('MATCH ()-[r]->() RETURN count(r) as count'); + console.log(` Total relationships: ${relResult.records[0].get('count').toNumber()}`); + + const labelResult = await session.run(` + MATCH (n) + UNWIND labels(n) as label + RETURN label, count(*) as count + ORDER BY count DESC + `); + console.log(' Labels:'); + labelResult.records.forEach(r => { + console.log(` - ${r.get('label')}: ${r.get('count').toNumber()}`); + }); + + const relTypeResult = await session.run(` + MATCH ()-[r]->() + RETURN type(r) as type, count(*) as count + ORDER BY count DESC + `); + console.log(' Relationship types:'); + relTypeResult.records.forEach(r => { + console.log(` - ${r.get('type')}: ${r.get('count').toNumber()}`); + }); + } finally { + await session.close(); + } +} + +async function initializeNeo4j(): Promise { + console.log('🚀 Neo4j Database Initialization'); + console.log('================================'); + console.log(`Connecting to: ${NEO4J_URI}`); + + const driver: Driver = neo4j.driver( + NEO4J_URI, + neo4j.auth.basic(NEO4J_USERNAME, NEO4J_PASSWORD), + { + maxConnectionLifetime: 3 * 60 * 60 * 1000, + maxConnectionPoolSize: 10, // Reduced to prevent contention + connectionAcquisitionTimeout: 30 * 1000, + } + ); + + try { + // Verify connection with retry + for (let attempt = 1; attempt <= MAX_RETRIES; attempt++) { + try { + await driver.verifyConnectivity(); + console.log('✅ Connected to Neo4j'); + break; + } catch (error) { + if (attempt === MAX_RETRIES) throw error; + console.log(` ⚠️ Connection attempt ${attempt}/${MAX_RETRIES} failed, retrying...`); + await sleep(RETRY_DELAY_MS * attempt); + } + } + + // Run initialization steps sequentially + await createConstraints(driver); + await createIndexes(driver); + await createSeedData(driver); + await createRelationships(driver); + await showStatistics(driver); + + console.log('\n✅ Neo4j initialization complete!'); + + } catch (error) { + console.error('❌ Initialization failed:', error); + throw error; + } finally { + await driver.close(); + } +} + +export { initializeNeo4j }; + +// Run if executed directly +const isMainModule = import.meta.url === `file://${process.argv[1]?.replace(/\\/g, '/')}` || + process.argv[1]?.includes('initNeo4j'); + +if (isMainModule) { + initializeNeo4j() + .then(() => { + console.log('\n🎉 Done!'); + process.exit(0); + }) + .catch((error) => { + console.error('\n💥 Failed:', error); + process.exit(1); + }); +} diff --git a/apps/backend/src/scripts/inspect_neo4j.ts b/apps/backend/src/scripts/inspect_neo4j.ts new file mode 100644 index 0000000000000000000000000000000000000000..8403efbe662b34628ec847b42f78938a1ded0a12 --- /dev/null +++ b/apps/backend/src/scripts/inspect_neo4j.ts @@ -0,0 +1,29 @@ +import { neo4jService } from '../services/Neo4jService'; + +async function inspectData() { + console.log('🕵️ Inspecting Neo4j Data...'); + + try { + // Check Node Counts + const counts = await neo4jService.query(` + MATCH (n) + RETURN labels(n) as label, count(n) as count + `); + console.log('📊 Node Counts:', counts); + + // Inspect Tenders + const tenders = await neo4jService.query(` + MATCH (t:Tender) + RETURN t.title, t.keywords, keys(t) as properties + LIMIT 5 + `); + console.log('📄 Tender Samples:', JSON.stringify(tenders, null, 2)); + + } catch (error) { + console.error('❌ Error:', error); + } finally { + await neo4jService.disconnect(); + } +} + +inspectData(); diff --git a/apps/backend/src/scripts/map_libraries.ts b/apps/backend/src/scripts/map_libraries.ts new file mode 100644 index 0000000000000000000000000000000000000000..f53da2a4bea819825d1138295aa5ed3b48de73fc --- /dev/null +++ b/apps/backend/src/scripts/map_libraries.ts @@ -0,0 +1,121 @@ +import { config } from 'dotenv'; +import { resolve, dirname } from 'path'; +import { fileURLToPath } from 'url'; +import { readFileSync, existsSync } from 'fs'; +import neo4j from 'neo4j-driver'; + +const __dirname = fileURLToPath(new URL('.', import.meta.url)); +config({ path: resolve(__dirname, '../../.env') }); + +const NEO4J_URI = process.env.NEO4J_URI || 'bolt://localhost:7687'; +const NEO4J_USERNAME = process.env.NEO4J_USERNAME || 'neo4j'; +const NEO4J_PASSWORD = process.env.NEO4J_PASSWORD || 'password'; + +async function mapLibraries() { + console.log('🗺️ The Cartographer: Mapping external dependencies...'); + const driver = neo4j.driver(NEO4J_URI, neo4j.auth.basic(NEO4J_USERNAME, NEO4J_PASSWORD)); + const session = driver.session(); + + try { + // 1. Fetch all TypeScript files + console.log('📥 Fetching TypeScript nodes...'); + const result = await session.run(` + MATCH (f:File) + WHERE f.language = 'TypeScript' OR f.extension = '.ts' OR f.extension = '.tsx' + RETURN f.path as absolutePath, f.id as id + `); + + const files = result.records.map(r => ({ + absolutePath: r.get('absolutePath'), + id: r.get('id') + })); + + console.log(`Found ${files.length} TS files. Scanning for libraries...`); + + let librariesMapped = 0; + const libraryUsage: Record = {}; + + for (const file of files) { + try { + if (!existsSync(file.absolutePath)) { + continue; + } + + const content = readFileSync(file.absolutePath, 'utf-8'); + // Regex to capture import sources + const importRegex = /import\s+(?:[\s\S]*?from\s+)?['"](.*?)['"]/g; + // Also capture dynamic imports and requires if possible, but stick to static imports for now as per weave_graph logic + + let match; + while ((match = importRegex.exec(content)) !== null) { + const importPath = match[1]; + + // Filter for libraries: NOT starting with . or / or absolute windows paths (C:\...) + // And usually libraries don't start with @widget-tdc (monorepo packages) unless we want to map those as libs too? + // The instruction says: "Hvis stien IKKE starter med . eller /, er det et Library" + + if (importPath.startsWith('.') || importPath.startsWith('/') || importPath.match(/^[a-zA-Z]:/)) { + continue; + } + + // Clean library name (e.g., 'neo4j-driver/lib/types' -> 'neo4j-driver') + // Handle scoped packages like '@scope/pkg/sub' -> '@scope/pkg' + let libName = importPath; + if (libName.startsWith('@')) { + const parts = libName.split('/'); + if (parts.length >= 2) { + libName = `${parts[0]}/${parts[1]}`; + } + } else { + const parts = libName.split('/'); + if (parts.length >= 1) { + libName = parts[0]; + } + } + + // Skip internal monorepo packages if desired, but "Supply Chain" usually implies everything external to the *file*. + // However, to distinguish 3rd party from internal packages, we might want to tag them differently. + // For now, treat everything that looks like a package as a Library. + + // Neo4j Action + await session.run(` + MERGE (lib:Library {name: $libName}) + WITH lib + MATCH (f:File {id: $fileId}) + MERGE (f)-[r:USES]->(lib) + `, { libName, fileId: file.id }); + + librariesMapped++; + libraryUsage[libName] = (libraryUsage[libName] || 0) + 1; + + if (librariesMapped % 50 === 0) process.stdout.write('.'); + } + } catch (err) { + // console.error(`Error parsing ${file.absolutePath}:`, err); + } + } + + console.log(`\n✅ Mapping complete. Mapped ${librariesMapped} library usages.`); + + // Run The Bloat Report Query + const bloatResult = await session.run(` + MATCH (l:Library)<-[r:USES]-(f:File) + RETURN l.name as name, count(r) as UsageCount + ORDER BY UsageCount DESC + LIMIT 10 + `); + + console.log('\n📊 Top 10 Libraries:'); + bloatResult.records.forEach(r => { + console.log(`- ${r.get('name')}: ${r.get('UsageCount')}`); + }); + + } catch (error) { + console.error('❌ Cartographer failed:', error); + } finally { + await session.close(); + await driver.close(); + } +} + +mapLibraries(); diff --git a/apps/backend/src/scripts/market_fit_analysis.ts b/apps/backend/src/scripts/market_fit_analysis.ts new file mode 100644 index 0000000000000000000000000000000000000000..2bf5afa92f29499b37cd515124103fa825c71c92 --- /dev/null +++ b/apps/backend/src/scripts/market_fit_analysis.ts @@ -0,0 +1,80 @@ +import 'dotenv/config'; +import * as fs from 'fs'; +import * as path from 'path'; +import { fileURLToPath } from 'url'; +import neo4j from 'neo4j-driver'; + +// ESM Shim +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); + +const REPORT_PATH = path.resolve(__dirname, '../../../../market_fit_report.json'); + +async function runMarketAnalysis() { + console.log('📊 Operation Market Fit: Initializing...'); + + // Cloud Connection + const driver = neo4j.driver( + process.env.NEO4J_URI!, + neo4j.auth.basic(process.env.NEO4J_USER!, process.env.NEO4J_PASSWORD!) + ); + const session = driver.session(); + + try { + console.log('🔌 Connecting to Neural Graph...'); + + // RETTET QUERY: Henter data fra relationen POTENTIAL_FIT + const result = await session.run(` + MATCH (org:Organization)-[r:POTENTIAL_FIT]->(t:Tender) + MATCH (b:Buyer)-[:ISSUED]->(t) + RETURN + t.title as title, + b.name as buyer, + r.score as score, + r.matches as capabilities, + r.upscale as isUpscale, + r.rationale as rationale, + t.url as url + ORDER BY r.score DESC + `); + + const opportunities = result.records.map(record => ({ + title: record.get('title'), + buyer: record.get('buyer'), + score: record.get('score'), // Match % + capabilities: record.get('capabilities'), // Hvad vi kan + isUpscale: record.get('isUpscale'), // Er det R&D? + rationale: record.get('rationale'), + url: record.get('url'), + // Simuleret "Missing" (I fremtiden kan vi sammenligne med t.description) + status: record.get('score') > 80 ? 'STRONG FIT' : 'DEVELOPMENT NEEDED' + })); + + console.log(`✅ Analysis Complete. Found ${opportunities.length} strategic opportunities.`); + + // Gem rapporten + fs.writeFileSync(REPORT_PATH, JSON.stringify({ + generatedAt: new Date().toISOString(), + count: opportunities.length, + opportunities: opportunities + }, null, 2)); + + console.log(`📄 Report saved to: ${REPORT_PATH}`); + + // Vis preview i konsollen + if (opportunities.length > 0) { + console.log('\nTop Opportunity:'); + console.log(`🎯 ${opportunities[0].title}`); + console.log(` Score: ${opportunities[0].score}%`); + console.log(` Matches: ${opportunities[0].capabilities.join(', ')}`); + } + + } catch (error) { + console.error('❌ Analysis Failure:', error); + } finally { + await session.close(); + await driver.close(); + } +} + +runMarketAnalysis(); diff --git a/apps/backend/src/scripts/migrate-to-postgres.ts b/apps/backend/src/scripts/migrate-to-postgres.ts new file mode 100644 index 0000000000000000000000000000000000000000..cfd037a2ce6c25131a1c3dc978b28bfc352a3be2 --- /dev/null +++ b/apps/backend/src/scripts/migrate-to-postgres.ts @@ -0,0 +1,180 @@ +// @ts-nocheck - better-sqlite3 not yet installed +// import Database from 'better-sqlite3'; +import { getDatabaseAdapter } from '../platform/db/PrismaDatabaseAdapter.js'; + +// Placeholder type +type Database = any; +import { logger } from '../utils/logger.js'; +import fs from 'fs'; +import path from 'path'; + +/** + * SQLite to PostgreSQL Migration Script + * + * This script migrates data from the old SQLite database to the new PostgreSQL database. + * Run this AFTER starting Docker containers and running Prisma migrations. + */ + +interface MigrationStats { + table: string; + migrated: number; + failed: number; +} + +async function migrateSQLiteToPostgres() { + logger.info('🚀 Starting SQLite -> PostgreSQL migration...'); + + const stats: MigrationStats[] = []; + + // Check if SQLite database exists + const sqlitePath = path.join(process.cwd(), 'widget-tdc.db'); + if (!fs.existsSync(sqlitePath)) { + logger.warn('⚠️ No SQLite database found. Skipping migration.'); + return; + } + + const sqlite = new Database(sqlitePath, { readonly: true }); + const prisma = getDatabaseAdapter().getClient(); + + try { + // Migrate Widgets + logger.info('📦 Migrating widgets...'); + const widgets = sqlite.prepare('SELECT * FROM widgets').all() as any[]; + let widgetCount = 0; + for (const widget of widgets) { + try { + await prisma.widget.upsert({ + where: { id: widget.id }, + create: { + id: widget.id, + name: widget.name, + type: widget.type, + config: widget.config ? JSON.parse(widget.config) : null, + active: Boolean(widget.active), + createdAt: new Date(widget.created_at), + updatedAt: new Date(widget.updated_at || widget.created_at), + }, + update: {}, + }); + widgetCount++; + } catch (err: any) { + logger.error(`Failed to migrate widget ${widget.id}:`, err.message); + } + } + stats.push({ table: 'widgets', migrated: widgetCount, failed: widgets.length - widgetCount }); + + // Migrate Layouts + logger.info('📐 Migrating layouts...'); + const layouts = sqlite.prepare('SELECT * FROM layouts').all() as any[]; + let layoutCount = 0; + for (const layout of layouts) { + try { + await prisma.layout.upsert({ + where: { userId_orgId: { userId: layout.user_id, orgId: layout.org_id } }, + create: { + userId: layout.user_id, + orgId: layout.org_id, + layoutData: JSON.parse(layout.layout_data), + createdAt: new Date(layout.created_at), + updatedAt: new Date(layout.updated_at || layout.created_at), + }, + update: {}, + }); + layoutCount++; + } catch (err: any) { + logger.error(`Failed to migrate layout for user ${layout.user_id}:`, err.message); + } + } + stats.push({ table: 'layouts', migrated: layoutCount, failed: layouts.length - layoutCount }); + + // Migrate Memory Entities + logger.info('🧠 Migrating memory entities...'); + try { + const entities = sqlite.prepare('SELECT * FROM memory_entities').all() as any[]; + let entityCount = 0; + for (const entity of entities) { + try { + await prisma.memoryEntity.upsert({ + where: { id: entity.id }, + create: { + id: entity.id, + type: entity.type, + label: entity.label, + properties: entity.properties ? JSON.parse(entity.properties) : null, + userId: entity.user_id || 'system', + orgId: entity.org_id || 'default', + createdAt: new Date(entity.created_at), + updatedAt: new Date(entity.updated_at || entity.created_at), + }, + update: {}, + }); + entityCount++; + } catch (err: any) { + logger.error(`Failed to migrate entity ${entity.id}:`, err.message); + } + } + stats.push({ table: 'memory_entities', migrated: entityCount, failed: entities.length - entityCount }); + } catch (err: any) { + logger.warn('memory_entities table not found in SQLite, skipping'); + stats.push({ table: 'memory_entities', migrated: 0, failed: 0 }); + } + + // Migrate Data Sources + logger.info('📡 Migrating data sources...'); + try { + const sources = sqlite.prepare('SELECT * FROM data_sources').all() as any[]; + let sourceCount = 0; + for (const source of sources) { + try { + await prisma.dataSource.upsert({ + where: { name: source.name }, + create: { + name: source.name, + type: source.type, + description: source.description, + enabled: Boolean(source.enabled), + requiresApproval: Boolean(source.requires_approval ?? true), + config: source.config ? JSON.parse(source.config) : null, + lastUsedAt: source.last_used_at ? new Date(source.last_used_at) : null, + createdAt: new Date(source.created_at), + updatedAt: new Date(source.updated_at || source.created_at), + }, + update: {}, + }); + sourceCount++; + } catch (err: any) { + logger.error(`Failed to migrate data source ${source.name}:`, err.message); + } + } + stats.push({ table: 'data_sources', migrated: sourceCount, failed: sources.length - sourceCount }); + } catch (err: any) { + logger.warn('data_sources table not found in SQLite, skipping'); + stats.push({ table: 'data_sources', migrated: 0, failed: 0 }); + } + + // Print migration summary + logger.info('\n✅ Migration Complete!\n'); + console.table(stats); + + } catch (error: any) { + logger.error('❌ Migration failed:', { error: error.message }); + throw error; + } finally { + sqlite.close(); + } +} + +// Run migration if executed directly +if (import.meta.url === `file://${process.argv[1]}`) { + migrateSQLiteToPostgres() + .then(() => { + logger.info('🎉 Migration successful!'); + process.exit(0); + }) + .catch((err) => { + logger.error('Migration error:', err); + process.exit(1); + }); +} + +export { migrateSQLiteToPostgres }; diff --git a/apps/backend/src/scripts/migrateToNeo4j.ts b/apps/backend/src/scripts/migrateToNeo4j.ts new file mode 100644 index 0000000000000000000000000000000000000000..c0765cc13badffa11cc7dba01b38a3086c6a4f95 --- /dev/null +++ b/apps/backend/src/scripts/migrateToNeo4j.ts @@ -0,0 +1,129 @@ +import { getDatabase } from '../database/index.js'; +import { neo4jService } from '../database/Neo4jService'; +import { graphMemoryService } from '../memory/GraphMemoryService'; + +/** + * Migrate memory_entities and memory_relations from SQLite to Neo4j + */ +export async function migrateMemoryToNeo4j() { + console.log('🔄 Starting migration from SQLite to Neo4j...'); + + const db = getDatabase(); + + try { + // Connect to Neo4j + await neo4jService.connect(); + + // Step 1: Migrate memory_entities to Neo4j nodes + console.log('📦 Migrating memory_entities...'); + const entities = db.prepare('SELECT * FROM memory_entities').all() as any[]; + + const entityIdMap = new Map(); // SQLite ID -> Neo4j ID + + for (const entity of entities) { + const neo4jNode = await graphMemoryService.createEntity( + entity.entity_type, + entity.content.substring(0, 100), // Use first 100 chars as name + { + orgId: entity.org_id, + userId: entity.user_id, + content: entity.content, + importance: entity.importance, + createdAt: entity.created_at, + } + ); + + entityIdMap.set(entity.id, neo4jNode.id); + + // Migrate tags + const tags = db.prepare('SELECT tag FROM memory_tags WHERE entity_id = ?').all(entity.id) as any[]; + for (const tagRow of tags) { + // Add tag as property or create separate Tag nodes + await neo4jService.runQuery( + `MATCH (n) WHERE id(n) = $id + SET n.tags = CASE WHEN n.tags IS NULL THEN [$tag] ELSE n.tags + $tag END`, + { id: parseInt(neo4jNode.id), tag: tagRow.tag } + ); + } + } + + console.log(`✅ Migrated ${entities.length} entities`); + + // Step 2: Migrate memory_relations to Neo4j relationships + console.log('🔗 Migrating memory_relations...'); + const relations = db.prepare('SELECT * FROM memory_relations').all() as any[]; + + for (const relation of relations) { + const sourceNeo4jId = entityIdMap.get(relation.source_id); + const targetNeo4jId = entityIdMap.get(relation.target_id); + + if (sourceNeo4jId && targetNeo4jId) { + await graphMemoryService.createRelation( + sourceNeo4jId, + targetNeo4jId, + relation.relation_type, + { + orgId: relation.org_id, + createdAt: relation.created_at, + } + ); + } else { + console.warn(`⚠️ Skipping relation ${relation.id}: missing source or target`); + } + } + + console.log(`✅ Migrated ${relations.length} relations`); + + // Step 3: Verify migration + const stats = await graphMemoryService.getStatistics(); + console.log('📊 Migration Statistics:', stats); + + console.log('🎉 Migration completed successfully!'); + + return { + entitiesMigrated: entities.length, + relationsMigrated: relations.length, + stats, + }; + + } catch (error) { + console.error('❌ Migration failed:', error); + throw error; + } finally { + await neo4jService.disconnect(); + } +} + +/** + * Rollback migration - delete all migrated data from Neo4j + */ +export async function rollbackMigration() { + console.log('🔄 Rolling back migration...'); + + try { + await neo4jService.connect(); + + // Delete all nodes and relationships + await neo4jService.runQuery('MATCH (n) DETACH DELETE n'); + + console.log('✅ Rollback completed'); + } catch (error) { + console.error('❌ Rollback failed:', error); + throw error; + } finally { + await neo4jService.disconnect(); + } +} + +// Run if executed directly +if (require.main === module) { + migrateMemoryToNeo4j() + .then(result => { + console.log('Migration result:', result); + process.exit(0); + }) + .catch(error => { + console.error('Migration error:', error); + process.exit(1); + }); +} diff --git a/apps/backend/src/scripts/optimize_infrastructure.ts b/apps/backend/src/scripts/optimize_infrastructure.ts new file mode 100644 index 0000000000000000000000000000000000000000..6a39231d14f2ce02182b50b08a9d4778344b2a0c --- /dev/null +++ b/apps/backend/src/scripts/optimize_infrastructure.ts @@ -0,0 +1,121 @@ +import { neo4jAdapter } from '../adapters/Neo4jAdapter.js'; +import { getDatabase, initializeDatabase } from '../database/index.js'; // SQLite/Postgres wrapper +import { createClient } from 'redis'; + +/** + * 🚀 SUPERLEAGUE INFRASTRUCTURE OPTIMIZER + * + * Target: 20% Performance Boost + * Actions: + * 1. Neo4j: Apply specific indexes for Vector Search and Property Lookups + * 2. PostgreSQL: Optimize pgvector indexes (IVFFlat/HNSW) + * 3. Redis: Flush stale keys and verify connection + */ + +const REDIS_URL = process.env.REDIS_URL || 'redis://localhost:6379'; + +async function optimizeNeo4j() { + console.log('⚡ Optimizing Neo4j Knowledge Graph...'); + try { + // 1. Unique Constraints (Critical for Merge performance) + await neo4jAdapter.executeQuery('CREATE CONSTRAINT unique_node_id IF NOT EXISTS FOR (n:Entity) REQUIRE n.id IS UNIQUE'); + await neo4jAdapter.executeQuery('CREATE CONSTRAINT unique_file_path IF NOT EXISTS FOR (f:File) REQUIRE f.path IS UNIQUE'); + + // 2. Search Indexes (Speed up text lookups) + await neo4jAdapter.executeQuery('CREATE INDEX node_name_index IF NOT EXISTS FOR (n:Entity) ON (n.name)'); + await neo4jAdapter.executeQuery('CREATE INDEX file_hash_index IF NOT EXISTS FOR (f:File) ON (f.hash)'); + + // 3. Vector Index (Speed up semantic search) + try { + await neo4jAdapter.executeQuery(` + CREATE VECTOR INDEX vector_embedding_index IF NOT EXISTS + FOR (n:Entity) ON (n.embedding) + OPTIONS {indexConfig: { + `vector.dimensions`: 384, + `vector.similarity_function`: 'cosine' + }} + `); + } catch (e) { + console.log(' (Vector index creation might differ based on Neo4j version, skipping if advanced)'); + } + + console.log(' ✅ Neo4j Optimized'); + } catch (error: any) { + console.log(' ⚠️ Neo4j Optimization Skipped:', error.message); + } +} + +async function optimizePostgres() { + console.log('⚡ Optimizing PostgreSQL / SQLite Storage...'); + + try { + // Ensure DB is initialized + await initializeDatabase(); + const db = getDatabase(); + + // 1. Analyze and Vacuum (SQLite/PG standard maintenance) + // db.run is available on our wrapper + if (db && typeof db.run === 'function') { + try { + // SQLite specific + db.run('PRAGMA optimize;'); + db.run('PRAGMA wal_checkpoint(TRUNCATE);'); + console.log(' ✅ SQLite WAL Checkpointed & Optimized'); + } catch (sqlError: any) { + console.log(' ℹ️ SQLite optimization command skipped (might be Postgres)'); + } + } else { + console.log(' ℹ️ Postgres optimization deferred to DB maintenance window'); + } + } catch (error: any) { + console.log(' ⚠️ Storage Optimization Skipped:', error.message); + } +} + +async function optimizeRedis() { + console.log('⚡ Optimizing Redis Cache Layer...'); + const client = createClient({ url: REDIS_URL }); + + // Prevent crash on error + client.on('error', (err) => { + // Suppress error logging here to avoid console noise if not running + }); + + try { + await client.connect(); + + // 1. Clear volatile keys (keep persistent session data) + const keys = await client.keys('cache:*'); + if (keys.length > 0) { + await client.del(keys); + console.log(` ✅ Flushed ${keys.length} stale cache keys`); + } else { + console.log(' ✅ Cache is clean'); + } + + // 2. Verify Latency + const start = Date.now(); + await client.ping(); + const latency = Date.now() - start; + console.log(` ✅ Redis Latency: ${latency}ms`); + + await client.disconnect(); + } catch (error: any) { + console.log(' ⚠️ Redis not available (skipping cache optimization)'); + } +} + +async function runOptimization() { + console.log('================================================'); + console.log(' AUTONOMOUS INFRASTRUCTURE OPTIMIZATION v1.0 '); + console.log('================================================'); + + await optimizeNeo4j(); + await optimizePostgres(); + await optimizeRedis(); + + console.log('\n🚀 SYSTEM OPTIMIZED. READY FOR HIGH-SPEED INGESTION.'); + process.exit(0); +} + +runOptimization(); \ No newline at end of file diff --git a/apps/backend/src/scripts/public_threat_harvester.ts b/apps/backend/src/scripts/public_threat_harvester.ts new file mode 100644 index 0000000000000000000000000000000000000000..fa7c48096c2f86f52221081db249bb299a32d666 --- /dev/null +++ b/apps/backend/src/scripts/public_threat_harvester.ts @@ -0,0 +1,136 @@ +import 'dotenv/config'; +import neo4j from 'neo4j-driver'; +import * as path from 'path'; +import { fileURLToPath } from 'url'; + +// SHIM: Define __dirname for ES Modules +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); + +// SOURCES: Strategic alignment with EU/DK interests +// Using hardcoded examples for POC to ensure graph logic works perfectly before adding RSS parsing complexity +const SOURCES = [ + { + name: "ENISA", + description: "European Union Agency for Cybersecurity", + reports: [ + { + title: "ENISA Threat Landscape 2024", + summary: "Ransomware remains a top threat. Groups like LockBit 3.0 and RagnarLocker are evolving tactics targeting critical infrastructure in the EU.", + date: new Date().toISOString(), + url: "https://www.enisa.europa.eu/publications/enisa-threat-landscape-2024" + }, + { + title: "Supply Chain Attacks Analysis", + summary: "Recent analysis shows increased targeting of software dependencies. Similar to the SolarWinds incident, attackers are leveraging trusted components.", + date: new Date().toISOString(), + url: "https://www.enisa.europa.eu/publications/supply-chain" + } + ] + }, + { + name: "Center for Cybersikkerhed (CFCS)", + description: "Danish National Cyber Security Centre", + reports: [ + { + title: "Trusselsvurdering 2025", + summary: "Truslen fra cyberkriminalitet er MEGET HØJ. Ransomware-grupper som Lorenz og Conti udgør en alvorlig trussel mod danske virksomheder.", + date: new Date().toISOString(), + url: "https://cfcs.dk/trusselsvurdering" + } + ] + }, + { + name: "Folketinget - SamSik", + description: "Udvalget for Digitalisering og It", + reports: [ + { + title: "Beredskabsrapport: Kritisk Infrastruktur", + summary: "Debat om styrkelse af beredskabet. Fokus på energisektoren og beskyttelse mod destruktive angreb fra statslige aktører og kriminelle syndikater som BlackCat.", + date: new Date().toISOString(), + url: "https://ft.dk/samsik/rapport2025" + } + ] + } +]; + +async function harvestPublicThreats() { + console.log('🌐 Operation Omni-Sentry: Initializing Public Intel Harvest...'); + const uri = process.env.NEO4J_URI; + const user = process.env.NEO4J_USER; + const password = process.env.NEO4J_PASSWORD; + + if (!uri || !user || !password) { + console.error('❌ ERROR: Missing Neo4j credentials'); + process.exit(1); + } + + const driver = neo4j.driver(uri, neo4j.auth.basic(user, password)); + const session = driver.session(); + + try { + // 1. Ingest Sources & Reports + console.log('📡 Ingesting reports from EU (ENISA), DK (CFCS/Folketing), and Strategic Partners...'); + + for (const source of SOURCES) { + await session.run( + ` + MERGE (s:Organization {name: $name}) + SET s.description = $description, s.type = 'IntelSource' + `, + { name: source.name, description: source.description } + ); + + for (const report of source.reports) { + await session.run( + ` + MATCH (s:Organization {name: $sourceName}) + MERGE (r:IntelReport {title: $title}) + SET r.summary = $summary, r.date = $date, r.url = $url + MERGE (s)-[:PUBLISHED]->(r) + `, + { + sourceName: source.name, + title: report.title, + summary: report.summary, + date: report.date, + url: report.url + } + ); + } + } + console.log('✅ Reports ingested.'); + + // 2. Semantic Correlation (The Glue) + console.log('🧠 Analyzing semantic links between Public Reports and Dark Web Actors...'); + + // We match Reports against known ThreatActors (from Dark Sentry) based on string containment in summary + const correlationQuery = ` + MATCH (r:IntelReport) + MATCH (a:ThreatActor) + WHERE toLower(r.summary) CONTAINS toLower(a.name) + MERGE (r)-[rel:MENTIONS]->(a) + RETURN r.title as Report, a.name as Actor, rel + `; + + const result = await session.run(correlationQuery); + + console.log(` +🔗 Correlations Found: ${result.records.length}`); + if (result.records.length > 0) { + result.records.forEach(rec => { + console.log(` - "${rec.get('Report')}" mentions [${rec.get('Actor')}]`); + }); + } else { + console.log(" (No direct name matches found in this sample set. Try running Dark Sentry to populate more Actors first.)"); + } + + } catch (error) { + console.error('💥 Omni-Sentry Failure:', error); + } finally { + await session.close(); + await driver.close(); + } +} + +harvestPublicThreats(); diff --git a/apps/backend/src/scripts/setup_watchlist.ts b/apps/backend/src/scripts/setup_watchlist.ts new file mode 100644 index 0000000000000000000000000000000000000000..c41c970db16371d04044d787fa824c20823a139e --- /dev/null +++ b/apps/backend/src/scripts/setup_watchlist.ts @@ -0,0 +1,90 @@ +import 'dotenv/config'; +import neo4j from 'neo4j-driver'; +import * as path from 'path'; +import { fileURLToPath } from 'url'; + +// SHIM: Define __dirname for ES Modules +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); + +const WATCHLIST = [ + { term: "Claus", type: "Person", sensitivity: "High" }, + { term: "WidgeTDC", type: "Company", sensitivity: "High" }, + { term: "Widget", type: "Brand", sensitivity: "Medium" }, // Broader term + { term: "Hald", type: "Family", sensitivity: "High" }, + // Critical Infrastructure (Operation Silent Alarm - NIS2 Expansion) + { term: "Denmark", type: "Country", sensitivity: "High" }, + { term: "Copenhagen", type: "City", sensitivity: "High" }, + // Energy + { term: "Energinet", type: "Infrastructure", sensitivity: "Critical" }, + { term: "Ørsted", type: "Infrastructure", sensitivity: "Critical" }, + { term: "Vestas", type: "Infrastructure", sensitivity: "Critical" }, + { term: "Andel", type: "Infrastructure", sensitivity: "Critical" }, + { term: "Norlys", type: "Infrastructure", sensitivity: "Critical" }, + // Transport + { term: "Mærsk", type: "Infrastructure", sensitivity: "Critical" }, + { term: "Banedanmark", type: "Infrastructure", sensitivity: "Critical" }, + { term: "DSB", type: "Infrastructure", sensitivity: "Critical" }, + { term: "Naviair", type: "Infrastructure", sensitivity: "Critical" }, + { term: "Københavns Lufthavn", type: "Infrastructure", sensitivity: "Critical" }, + // Health & Water + { term: "Novo Nordisk", type: "Infrastructure", sensitivity: "Critical" }, + { term: "Region Hovedstaden", type: "Infrastructure", sensitivity: "Critical" }, + { term: "HOFOR", type: "Infrastructure", sensitivity: "Critical" }, + // Digital & Finance + { term: "TDC", type: "Infrastructure", sensitivity: "Critical" }, + { term: "Nuuday", type: "Infrastructure", sensitivity: "Critical" }, + { term: "GlobalConnect", type: "Infrastructure", sensitivity: "Critical" }, + { term: "Danske Bank", type: "Infrastructure", sensitivity: "Critical" }, + { term: "Nykredit", type: "Infrastructure", sensitivity: "Critical" }, + { term: "JN Data", type: "Infrastructure", sensitivity: "Critical" }, + // Government + { term: "Digitaliseringsstyrelsen", type: "Government", sensitivity: "Critical" }, + { term: "Rigspolitiet", type: "Government", sensitivity: "Critical" }, + { term: "NATO", type: "Defense", sensitivity: "Critical" } +]; + +async function setupWatchlist() { + console.log('🛡️ Operation Personal Shield: Configuring Watchlist...'); + const uri = process.env.NEO4J_URI; + const user = process.env.NEO4J_USER; + const password = process.env.NEO4J_PASSWORD; + + if (!uri || !user || !password) { + console.error('❌ ERROR: Missing Neo4j credentials'); + process.exit(1); + } + + const driver = neo4j.driver(uri, neo4j.auth.basic(user, password)); + const session = driver.session(); + + try { + let targetsCreated = 0; + + for (const target of WATCHLIST) { + await session.run(` + MERGE (t:WatchlistTarget {term: $term}) + SET t.type = $type, + t.sensitivity = $sensitivity, + t.active = true, + t.lastUpdated = datetime() + `, { term: target.term, type: target.type, sensitivity: target.sensitivity }); + targetsCreated++; + } + + console.log(`✅ Watchlist Active. Monitoring ${targetsCreated} targets.`); + + // Verify + const result = await session.run('MATCH (t:WatchlistTarget) RETURN t.term, t.type'); + console.log('\n📋 Active Targets:'); + result.records.forEach(r => console.log(` - ${r.get('t.term')} (${r.get('t.type')})`)); + + } catch (error) { + console.error('💥 Watchlist Configuration Failed:', error); + } finally { + await session.close(); + await driver.close(); + } +} + +setupWatchlist(); diff --git a/apps/backend/src/scripts/strategicRadar.ts b/apps/backend/src/scripts/strategicRadar.ts new file mode 100644 index 0000000000000000000000000000000000000000..710aaa6a3b46fe3b939527aa6fdc2d5d3f3a7d39 --- /dev/null +++ b/apps/backend/src/scripts/strategicRadar.ts @@ -0,0 +1,161 @@ +/** + * ╔═══════════════════════════════════════════════════════════════════════════╗ + * ║ STRATEGIC RADAR - Live Fire Test ║ + * ╠═══════════════════════════════════════════════════════════════════════════╣ + * ║ Tests the new "senses" of WidgeTDC: ║ + * ║ 1. Web Scraper (KnowledgeAcquisitionService) ║ + * ║ 2. GraphRAG (UnifiedGraphRAG) ║ + * ║ 3. Strategic Briefing generation ║ + * ╚═══════════════════════════════════════════════════════════════════════════╝ + * + * Usage: npx tsx apps/backend/src/scripts/strategicRadar.ts [URL] + */ + +import 'dotenv/config'; + +// Dynamic imports to avoid initialization issues +async function runStrategicRadar(url: string) { + console.log('═══════════════════════════════════════════════════════════════════'); + console.log(' 🛰️ STRATEGIC RADAR '); + console.log('═══════════════════════════════════════════════════════════════════'); + console.log(`Target URL: ${url}`); + console.log(''); + + // ═══════════════════════════════════════════════════════════════════════ + // PHASE 1: Web Scraping + // ═══════════════════════════════════════════════════════════════════════ + console.log('📡 PHASE 1: Fetching content with Web Scraper...'); + console.log('─────────────────────────────────────────────────────────────────'); + + const { knowledgeAcquisition } = await import('../services/KnowledgeAcquisitionService.js'); + const knowledgeService = knowledgeAcquisition; + + let scrapedContent: { title: string; content: string; url: string } | null = null; + + try { + // Mock content since fetchUrlContent is deprecated + scrapedContent = { + title: 'Mock Title', + content: 'Mock content for strategic radar test.', + url: url + }; + console.log(`✅ Title: ${scrapedContent.title}`); + console.log(`✅ Content length: ${scrapedContent.content.length} characters`); + console.log(`✅ Preview: ${scrapedContent.content.substring(0, 300)}...`); + } catch (error: any) { + console.error(`❌ Web scraping failed: ${error.message}`); + return; + } + + console.log(''); + + // ═══════════════════════════════════════════════════════════════════════ + // PHASE 2: Knowledge Graph Check + // ═══════════════════════════════════════════════════════════════════════ + console.log('🧠 PHASE 2: Checking existing knowledge with GraphRAG...'); + console.log('─────────────────────────────────────────────────────────────────'); + + const { unifiedGraphRAG } = await import('../mcp/cognitive/UnifiedGraphRAG.js'); + + let existingKnowledge: any = null; + try { + // Query the knowledge graph for related information + existingKnowledge = await unifiedGraphRAG.query( + `What do we know about: ${scrapedContent.title}`, + { userId: 'radar', orgId: 'system' } + ); + + if (existingKnowledge?.answer) { + console.log(`📚 Existing knowledge found:`); + console.log(` ${existingKnowledge.answer.substring(0, 500)}...`); + } else { + console.log(`📭 No existing knowledge found on this topic`); + } + } catch (error: any) { + console.log(`⚠️ GraphRAG query: ${error.message}`); + existingKnowledge = null; + } + + console.log(''); + + // ═══════════════════════════════════════════════════════════════════════ + // PHASE 3: Generate Strategic Briefing + // ═══════════════════════════════════════════════════════════════════════ + console.log('📋 PHASE 3: Generating Strategic Briefing...'); + console.log('─────────────────────────────────────────────────────────────────'); + + const briefing = generateStrategicBriefing(scrapedContent, existingKnowledge); + console.log(briefing); + + console.log(''); + console.log('═══════════════════════════════════════════════════════════════════'); + console.log(' ✅ RADAR SCAN COMPLETE '); + console.log('═══════════════════════════════════════════════════════════════════'); + + return briefing; +} + +/** + * Generate a strategic briefing from scraped content + */ +function generateStrategicBriefing( + content: { title: string; content: string; url: string }, + existingKnowledge: any +): string { + const now = new Date().toISOString(); + + // Extract key points (simple extraction) + const sentences = content.content + .split(/[.!?]+/) + .map(s => s.trim()) + .filter(s => s.length > 50 && s.length < 300); + + const keyPoints = sentences.slice(0, 5); + + // Determine relevance + const relevanceKeywords = ['AI', 'artificial intelligence', 'machine learning', 'automation', + 'security', 'data', 'technology', 'enterprise', 'cloud']; + const contentLower = content.content.toLowerCase(); + const matchedKeywords = relevanceKeywords.filter(kw => contentLower.includes(kw.toLowerCase())); + const relevanceScore = Math.min(matchedKeywords.length / 3, 1.0); + + // Determine if this is new information + const isNew = !existingKnowledge?.answer || existingKnowledge.answer.length < 50; + + const briefing = ` +╔═══════════════════════════════════════════════════════════════════════════╗ +║ STRATEGIC BRIEFING ║ +╠═══════════════════════════════════════════════════════════════════════════╣ +║ Generated: ${now} +║ Source: ${content.url} +╠═══════════════════════════════════════════════════════════════════════════╣ +║ TITLE: ${content.title} +╠═══════════════════════════════════════════════════════════════════════════╣ +║ STATUS: ${isNew ? '🆕 NEW INTELLIGENCE' : '📚 UPDATES EXISTING KNOWLEDGE'} +║ RELEVANCE: ${(relevanceScore * 100).toFixed(0)}% (${matchedKeywords.join(', ') || 'general'}) +╠═══════════════════════════════════════════════════════════════════════════╣ +║ KEY POINTS: +${keyPoints.map((p, i) => `║ ${i + 1}. ${p.substring(0, 80)}...`).join('\n')} +╠═══════════════════════════════════════════════════════════════════════════╣ +║ SUMMARY: +║ ${content.content.substring(0, 400).replace(/\n/g, '\n║ ')}... +╠═══════════════════════════════════════════════════════════════════════════╣ +║ RECOMMENDED ACTIONS: +║ ${relevanceScore > 0.5 ? '→ HIGH PRIORITY: Index to knowledge graph' : '→ LOW PRIORITY: Archive for reference'} +║ ${isNew ? '→ Create new knowledge entry' : '→ Update existing knowledge entry'} +║ ${matchedKeywords.includes('security') ? '→ ALERT: Security relevance detected' : ''} +╚═══════════════════════════════════════════════════════════════════════════╝ +`; + + return briefing; +} + +// Main execution +const targetUrl = process.argv[2] || 'https://www.anthropic.com/news/claude-3-5-sonnet'; + +runStrategicRadar(targetUrl) + .then(() => process.exit(0)) + .catch(err => { + console.error('Strategic Radar failed:', err); + process.exit(1); + }); diff --git a/apps/backend/src/scripts/tender_harvester.ts b/apps/backend/src/scripts/tender_harvester.ts new file mode 100644 index 0000000000000000000000000000000000000000..2325d6240680ad7d4d873eb4e909f7492a6aa515 --- /dev/null +++ b/apps/backend/src/scripts/tender_harvester.ts @@ -0,0 +1,219 @@ +import 'dotenv/config'; +import neo4j from 'neo4j-driver'; +import { fileURLToPath } from 'url'; +import * as path from 'path'; + +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); + +// --- KONFIGURATION (TDC ENHANCED) --- +const ORG_CAPABILITIES = [ + // Core Tech + 'Cyber Security', 'Threat Intelligence', 'Graph Database', 'Neo4j', + 'AI', 'Machine Learning', 'Network Analysis', 'Monitoring', 'SaaS', + + // TDC Infrastruktur & Netværk + '5G', 'Private 5G', 'Fiber', 'Coax', 'IoT', 'NB-IoT', 'LTE-M', + 'MPLS', 'SD-WAN', 'Network-as-a-Service', 'Kritisk Infrastruktur', + + // TDC Sikkerhed (Heavy Duty) + 'SOC', 'Security Operations Center', 'DDoS', 'DDoS Protection', + 'Firewall', 'Smart Security', 'Log Management', 'SIEM', 'NIS2', + 'Compliance', 'Vulnerability Management', 'Endpoint Protection', + + // Cloud & Unified Comms + 'Unified Communications', 'Teams', 'Cisco Webex', 'Cloud Key', + 'Managed Cloud', 'Azure', 'Device Management', 'MDM' +]; + +const UPSCALE_KEYWORDS = [ + 'Innovation', 'R&D', 'Udvikling', 'Prototype', 'Partnerskab', 'PoC', + 'Skalering', 'Transformation', 'Digitalisering' +]; + +// Kilder der ofte har "under-threshold" udbud (ikke i TED) +const DEEP_SOURCES = [ + { + name: 'Udbud.dk (DK)', + url: 'https://udbud.dk/rss.aspx', + type: 'RSS', + country: 'DK' + }, + { + name: 'Doffin (NO)', + url: 'https://doffin.no/RSS', + type: 'RSS', + country: 'NO' + }, + { + name: 'Mercell (Nordic)', + url: 'https://mercell.com/rss', + type: 'RSS', + country: 'Nordic' + } +]; + +// --- LOGIK --- + +interface Tender { + id: string; + title: string; + description: string; + buyer: string; + value?: number; + currency?: string; + deadline?: string; + source: string; + url: string; +} + +interface StrategicFit { + score: number; // 0-100 + matches: string[]; + isUpscaleOpportunity: boolean; + upscaleReason?: string; +} + +async function runHarvester() { + console.log('🕵️‍♂️ Operation Smart Hunter v2.0 (TDC Edition): Initializing...'); + + // 1. Forbind til Neo4j Cloud + if (!process.env.NEO4J_URI) { + console.error("❌ Mangler NEO4J_URI i .env"); + process.exit(1); + } + + const driver = neo4j.driver( + process.env.NEO4J_URI, + neo4j.auth.basic(process.env.NEO4J_USER!, process.env.NEO4J_PASSWORD!) + ); + const session = driver.session(); + + try { + // 2. Fetch Data (Simuleret RSS parsing for demo) + console.log('📡 Scanning Deep Sources (Non-TED)...'); + const tenders = await fetchTenders(); + console.log(`📥 Downloaded ${tenders.length} potential tenders.`); + + let validLeads = 0; + + for (const tender of tenders) { + // 3. Analyser Strategisk Fit + const fit = calculateStrategicFit(tender); + + // Filter: Vi gemmer kun hvis score > 40 + if (fit.score > 40) { + console.log(`🎯 MATCH FOUND: ${tender.title} (Score: ${fit.score}%)`); + console.log(` - Keywords: ${fit.matches.join(', ')}`); + + // 4. Ingest i Grafen + await session.run(` + MERGE (t:Tender {id: $id}) + SET t += $props, t.ingestedAt = datetime() + + MERGE (b:Buyer {name: $buyer}) + MERGE (b)-[:ISSUED]->(t) + + MERGE (org:Organization {name: 'WidgeTDC'}) + MERGE (org)-[f:POTENTIAL_FIT]->(t) + SET f.score = $score, + f.matches = $matches, + f.upscale = $upscale, + f.rationale = $rationale + `, { + id: tender.id, + buyer: tender.buyer, + props: { + title: tender.title, + description: tender.description, + source: tender.source, + url: tender.url, + deadline: tender.deadline + }, + score: fit.score, + matches: fit.matches, + upscale: fit.isUpscaleOpportunity, + rationale: fit.upscaleReason || '' + }); + + validLeads++; + } + } + + console.log(`✅ Hunt Complete. Ingested ${validLeads} strategic opportunities.`); + + } catch (err) { + console.error('❌ Harvester Failed:', err); + } finally { + await session.close(); + await driver.close(); + } +} + +// --- HJÆLPEFUNKTIONER --- + +async function fetchTenders(): Promise { + // MOCK DATA - Demonstrerer den nye bredde i TDC's kapabiliteter + return [ + { + id: 'DK-2025-001', + title: 'Etablering af SOC og beredskab til Region Hovedstaden', + description: 'Vi søger en leverandør til 24/7 overvågning af netværkstrafik, Log Management og Threat Intelligence.', + buyer: 'Region Hovedstaden', + source: 'Udbud.dk (DK)', + url: 'https://udbud.dk/...' + }, + { + id: 'DK-2025-002', + title: 'Landsdækkende IoT-netværk til forsyningssektoren', + description: 'Udrulning af NB-IoT sensorer til vandmåling. Kræver stabil 5G/NB-IoT dækning.', + buyer: 'HOFOR', + source: 'Udbud.dk (DK)', + url: 'https://udbud.dk/...' + }, + { + id: 'NO-2025-992', + title: 'Sikker kommunikationsplatform (Unified Comms)', + description: 'Modernisering af telefoni og video. Skal integrere med Microsoft Teams og Cisco udstyr.', + buyer: 'Oslo Kommune', + source: 'Doffin (NO)', + url: 'https://doffin.no/...' + }, + { + id: 'SE-2025-XYZ', + title: 'Rengøring af kommunale bygninger', + description: 'Daglig rengøring...', + buyer: 'Malmö Stad', + source: 'Opic (SE)', + url: 'https://opic.se/...' + } + ]; +} + +function calculateStrategicFit(tender: Tender): StrategicFit { + const text = (tender.title + ' ' + tender.description).toLowerCase(); + + // 1. Tæl Capability Matches + const matches = ORG_CAPABILITIES.filter(cap => text.includes(cap.toLowerCase())); + + // 2. Beregn Base Score + // Vi er nu mere aggressive: Jo flere matches, jo højere score. + let score = (matches.length / 2) * 100; // 2 matches = 100% (fordi vi søger niche-fit) + if (score > 100) score = 100; + + // 3. Tjek for Upscaling + const upscaleMatches = UPSCALE_KEYWORDS.filter(kw => text.includes(kw.toLowerCase())); + const isUpscale = upscaleMatches.length > 0; + + if (isUpscale) score += 10; + + return { + score: Math.round(score), + matches, + isUpscaleOpportunity: isUpscale, + upscaleReason: isUpscale ? `Innovation keywords: ${upscaleMatches.join(', ')}` : undefined + }; +} + +// Start +runHarvester(); diff --git a/apps/backend/src/scripts/testDreamingMind.ts b/apps/backend/src/scripts/testDreamingMind.ts new file mode 100644 index 0000000000000000000000000000000000000000..8d8ad819805b2bb5efecf9dcf14c9227daaed5ae --- /dev/null +++ b/apps/backend/src/scripts/testDreamingMind.ts @@ -0,0 +1,202 @@ +/** + * Test script for The Dreaming Mind (Level 3 HyperLog) + * + * Creates thoughts WITH embeddings and tests semantic search + */ + +import { config } from 'dotenv'; +import { resolve } from 'path'; +import { fileURLToPath } from 'url'; +import neo4j from 'neo4j-driver'; +import { v4 as uuidv4 } from 'uuid'; +import { pipeline } from '@xenova/transformers'; + +const __dirname = fileURLToPath(new URL('.', import.meta.url)); +config({ path: resolve(__dirname, '../../.env') }); + +const NEO4J_URI = process.env.NEO4J_URI!; +const NEO4J_USERNAME = process.env.NEO4J_USERNAME!; +const NEO4J_PASSWORD = process.env.NEO4J_PASSWORD!; + +// Embedding model +let embedder: any = null; + +async function getEmbedding(text: string): Promise { + if (!embedder) { + console.log('🔄 Loading embedding model (first time)...'); + embedder = await pipeline('feature-extraction', 'Xenova/all-MiniLM-L6-v2', { quantized: true }); + console.log('✅ Embedding model ready'); + } + const output = await embedder(text, { pooling: 'mean', normalize: true }); + return Array.from(output.data) as number[]; +} + +async function testDreamingMind() { + console.log('🧠 The Dreaming Mind - Level 3 Test'); + console.log('===================================='); + console.log(`Connecting to: ${NEO4J_URI}`); + + const driver = neo4j.driver( + NEO4J_URI, + neo4j.auth.basic(NEO4J_USERNAME, NEO4J_PASSWORD) + ); + + await driver.verifyConnectivity(); + console.log('✅ Connected to Neo4j Aura\n'); + + const session = driver.session(); + const correlationId = uuidv4(); + + try { + // 1. Create Vector Index + console.log('📇 Creating vector index...'); + try { + await session.run(` + CREATE VECTOR INDEX hyper_thought_vectors IF NOT EXISTS + FOR (e:HyperEvent) ON (e.embedding) + OPTIONS {indexConfig: { + \`vector.dimensions\`: 384, + \`vector.similarity_function\`: 'cosine' + }} + `); + console.log('✅ Vector index ready (384D cosine)\n'); + } catch (e: any) { + if (e.message.includes('already exists')) { + console.log('✅ Vector index already exists\n'); + } else { + console.warn('⚠️ Vector index issue:', e.message); + } + } + + // 2. Create thoughts WITH embeddings + console.log('🧠 Creating thoughts with embeddings...\n'); + + const thoughts = [ + { type: 'USER_INTENT', agent: 'GraphRAG', content: 'Analyze network security threats from firewall logs' }, + { type: 'THOUGHT', agent: 'GraphRAG', content: 'I should search for blocked connection attempts and failed authentications' }, + { type: 'DATA_RETRIEVAL', agent: 'GraphRAG', content: 'Found 47 blocked SSH attempts from IP 192.168.1.105 in the last hour' }, + { type: 'INSIGHT', agent: 'GraphRAG', content: 'This IP appears to be conducting a brute force attack against our SSH servers' }, + { type: 'THOUGHT', agent: 'ThreatHunter', content: 'Need to check if this IP has been flagged in threat intelligence databases' }, + { type: 'DATA_RETRIEVAL', agent: 'ThreatHunter', content: 'IP 192.168.1.105 is associated with known botnet infrastructure' }, + { type: 'CRITICAL_DECISION', agent: 'ThreatHunter', content: 'Recommending immediate IP block and security team notification' }, + { type: 'USER_INTENT', agent: 'Analyst', content: 'Generate a compliance report for GDPR data processing activities' }, + { type: 'THOUGHT', agent: 'Analyst', content: 'I need to gather all data processing records and retention policies' }, + { type: 'INSIGHT', agent: 'Analyst', content: 'Several data processing activities are missing proper consent documentation' }, + ]; + + let prevId: string | null = null; + + for (const thought of thoughts) { + const eventId = uuidv4(); + const timestamp = Date.now(); + + // Generate embedding + const embedding = await getEmbedding(thought.content); + console.log(` ✓ ${thought.type}: ${thought.content.substring(0, 45)}... [${embedding.length}D vector]`); + + // Store in Neo4j with embedding + await session.run(` + CREATE (e:HyperEvent { + id: $id, + type: $type, + agent: $agent, + content: $content, + timestamp: $timestamp, + correlationId: $correlationId, + embedding: $embedding, + metadata: '{}' + }) + `, { + id: eventId, + type: thought.type, + agent: thought.agent, + content: thought.content, + timestamp, + correlationId, + embedding + }); + + // Create causal chain + if (prevId) { + await session.run(` + MATCH (prev:HyperEvent {id: $prevId}) + MATCH (curr:HyperEvent {id: $currId}) + CREATE (prev)-[:LED_TO]->(curr) + `, { prevId, currId: eventId }); + } + + prevId = eventId; + await new Promise(r => setTimeout(r, 100)); + } + + console.log('\n✅ All thoughts created with embeddings\n'); + + // 3. Wait for index to populate + console.log('⏳ Waiting for vector index to populate...'); + await new Promise(r => setTimeout(r, 3000)); + + // 4. Test semantic search - "Dream Mode" + console.log('\n🌙 DREAM MODE: Testing semantic search\n'); + + const searchQueries = [ + 'cybersecurity attack detection', + 'GDPR compliance issues', + 'network intrusion', + ]; + + for (const query of searchQueries) { + console.log(`\n🔍 Searching: "${query}"`); + const queryVector = await getEmbedding(query); + + const result = await session.run(` + CALL db.index.vector.queryNodes('hyper_thought_vectors', 3, $queryVector) + YIELD node, score + RETURN node.content AS content, node.agent AS agent, node.type AS type, score + ORDER BY score DESC + `, { queryVector }); + + if (result.records.length === 0) { + console.log(' No results found'); + } else { + result.records.forEach((r, i) => { + const score = r.get('score').toFixed(3); + const content = r.get('content').substring(0, 60); + const agent = r.get('agent'); + console.log(` ${i + 1}. [${score}] (${agent}) ${content}...`); + }); + } + } + + // 5. Verify embeddings stored + console.log('\n\n📊 Verification:'); + + const countResult = await session.run(` + MATCH (e:HyperEvent) + WHERE e.embedding IS NOT NULL + RETURN count(e) as count, avg(size(e.embedding)) as avgDim + `); + + const count = countResult.records[0].get('count').toNumber(); + const avgDim = countResult.records[0].get('avgDim'); + console.log(` Thoughts with embeddings: ${count}`); + console.log(` Average embedding dimension: ${avgDim}`); + + const totalResult = await session.run('MATCH (e:HyperEvent) RETURN count(e) as total'); + console.log(` Total HyperEvents: ${totalResult.records[0].get('total').toNumber()}`); + + console.log('\n✅ The Dreaming Mind is ACTIVE!'); + console.log('\n📋 Neo4j Browser query to visualize:'); + console.log(' MATCH (e:HyperEvent) WHERE e.embedding IS NOT NULL RETURN e.content, size(e.embedding) LIMIT 10'); + + } finally { + await session.close(); + await driver.close(); + } +} + +testDreamingMind() + .then(() => process.exit(0)) + .catch(e => { + console.error('❌ Error:', e); + process.exit(1); + }); diff --git a/apps/backend/src/scripts/testHyperLog.ts b/apps/backend/src/scripts/testHyperLog.ts new file mode 100644 index 0000000000000000000000000000000000000000..884f77c0c3ae0fc46245bfd9b3b3ea9e278f3df0 --- /dev/null +++ b/apps/backend/src/scripts/testHyperLog.ts @@ -0,0 +1,145 @@ +/** + * Test script for HyperLog - creates a sample thought chain in Neo4j + */ + +import { config } from 'dotenv'; +import { resolve } from 'path'; +import { fileURLToPath } from 'url'; +import neo4j from 'neo4j-driver'; +import { v4 as uuidv4 } from 'uuid'; + +const __dirname = fileURLToPath(new URL('.', import.meta.url)); +config({ path: resolve(__dirname, '../../.env') }); + +const NEO4J_URI = process.env.NEO4J_URI!; +const NEO4J_USERNAME = process.env.NEO4J_USERNAME!; +const NEO4J_PASSWORD = process.env.NEO4J_PASSWORD!; + +async function createTestChain() { + console.log('🧠 HyperLog Test - Creating Thought Stream Chain'); + console.log('================================================'); + console.log(`Connecting to: ${NEO4J_URI}`); + + const driver = neo4j.driver( + NEO4J_URI, + neo4j.auth.basic(NEO4J_USERNAME, NEO4J_PASSWORD) + ); + + await driver.verifyConnectivity(); + console.log('✅ Connected to Neo4j Aura'); + + const session = driver.session(); + const correlationId = uuidv4(); + + try { + // Create indexes first + console.log('\n📇 Creating indexes...'); + await session.run(`CREATE INDEX hyper_event_id IF NOT EXISTS FOR (e:HyperEvent) ON (e.id)`); + await session.run(`CREATE INDEX hyper_event_timestamp IF NOT EXISTS FOR (e:HyperEvent) ON (e.timestamp)`); + + // Define the thought chain + const events = [ + { type: 'USER_INTENT', agent: 'GraphRAG', content: 'User requested: Analyze recent firewall logs for threats' }, + { type: 'THOUGHT', agent: 'GraphRAG', content: 'I need to search the vector store for relevant log entries' }, + { type: 'TOOL_SELECTION', agent: 'GraphRAG', content: 'Selecting vidensarkiv.search - best for semantic search over logs' }, + { type: 'TOOL_EXECUTION', agent: 'GraphRAG', content: 'Executed vidensarkiv.search with query: "firewall blocked suspicious"' }, + { type: 'DATA_RETRIEVAL', agent: 'GraphRAG', content: 'Retrieved 23 relevant log entries from last 24 hours' }, + { type: 'THOUGHT', agent: 'GraphRAG', content: 'Analyzing patterns in the retrieved data...' }, + { type: 'REASONING_UPDATE', agent: 'GraphRAG', content: 'Detected unusual spike: 150+ blocked attempts from same IP range' }, + { type: 'HYPOTHESIS', agent: 'GraphRAG', content: 'This pattern matches brute force attack signature with 94% confidence' }, + { type: 'TOOL_SELECTION', agent: 'GraphRAG', content: 'Selecting threat.hunt to correlate with known attack patterns' }, + { type: 'TOOL_EXECUTION', agent: 'GraphRAG', content: 'Cross-referenced with ThreatIntel database' }, + { type: 'INSIGHT', agent: 'GraphRAG', content: 'CONFIRMED: Coordinated brute force attack from botnet. Recommend immediate IP block.' }, + { type: 'CRITICAL_DECISION', agent: 'GraphRAG', content: 'Flagging for human review before automated response' } + ]; + + console.log('\n🔗 Creating thought chain...'); + let prevId: string | null = null; + + for (const evt of events) { + const eventId = uuidv4(); + const timestamp = Date.now(); + + // Create the event node + await session.run(` + CREATE (e:HyperEvent { + id: $id, + type: $type, + agent: $agent, + content: $content, + timestamp: $timestamp, + correlationId: $correlationId, + metadata: '{}' + }) + `, { + id: eventId, + type: evt.type, + agent: evt.agent, + content: evt.content, + timestamp, + correlationId + }); + + // Create LED_TO relationship + if (prevId) { + await session.run(` + MATCH (prev:HyperEvent {id: $prevId}) + MATCH (curr:HyperEvent {id: $currId}) + CREATE (prev)-[:LED_TO]->(curr) + `, { prevId, currId: eventId }); + } + + // Link to Agent if exists + await session.run(` + MATCH (a:Agent {name: $agentName}) + MATCH (e:HyperEvent {id: $eventId}) + MERGE (a)-[:GENERATED]->(e) + `, { agentName: evt.agent, eventId }); + + console.log(` ✓ ${evt.type}: ${evt.content.substring(0, 50)}...`); + prevId = eventId; + + // Small delay to spread timestamps + await new Promise(r => setTimeout(r, 50)); + } + + // Verify the chain + console.log('\n📊 Verification:'); + + const countResult = await session.run('MATCH (e:HyperEvent) RETURN count(e) as count'); + console.log(` Total HyperEvents: ${countResult.records[0].get('count').toNumber()}`); + + const chainResult = await session.run(` + MATCH p=(:HyperEvent)-[:LED_TO*]->(:HyperEvent) + RETURN length(p) as chainLength + ORDER BY chainLength DESC + LIMIT 1 + `); + const maxChain = chainResult.records[0]?.get('chainLength')?.toNumber() || 0; + console.log(` Longest chain: ${maxChain + 1} events`); + + const agentLinks = await session.run(` + MATCH (a:Agent)-[:GENERATED]->(e:HyperEvent) + RETURN a.name as agent, count(e) as events + `); + console.log(' Agent activity:'); + agentLinks.records.forEach(r => { + console.log(` - ${r.get('agent')}: ${r.get('events').toNumber()} events`); + }); + + console.log('\n✅ Test chain created successfully!'); + console.log('\n📋 To visualize in Neo4j Browser, run:'); + console.log(' MATCH p=(:HyperEvent)-[:LED_TO*]->(:HyperEvent) RETURN p LIMIT 25'); + + } finally { + await session.close(); + await driver.close(); + } +} + +createTestChain() + .then(() => process.exit(0)) + .catch(e => { + console.error('❌ Error:', e); + process.exit(1); + }); diff --git a/apps/backend/src/scripts/unleash_intelligence.ts b/apps/backend/src/scripts/unleash_intelligence.ts new file mode 100644 index 0000000000000000000000000000000000000000..0a94bef13cee30a3dcf112e0c7653725027d6f59 --- /dev/null +++ b/apps/backend/src/scripts/unleash_intelligence.ts @@ -0,0 +1,131 @@ +import { neo4jAdapter } from '../adapters/Neo4jAdapter.js'; +import { sentinelEngine } from '../services/SentinelEngine.js'; +import { autonomousHarvester } from '../services/ingestion/AutonomousHarvester.js'; +import { prefrontalCortex } from '../services/PrefrontalCortex.js'; +import { tdcService } from '../services/tdc/TDCService.js'; + +/** + * 🧠 CORTEX UNLEASHED - THE COGNITIVE LOOP + * + * This is the heartbeat of the autonomous system. + * It continually cycles through Observation, Orientation, Decision, and Action. + */ + +async function sleep(ms: number) { + return new Promise(resolve => setTimeout(resolve, ms)); +} + +async function cognitiveLoop() { + console.log('🧠 CORTEX: SYSTEM AWAKE. INTELLIGENCE UNLEASHED.'); + + while (true) { + try { + // --- PHASE 1: OBSERVATION (The Senses) --- + console.log('\n👁️ PHASE 1: OBSERVING...'); + + // 1. Check for Knowledge Gaps + const gaps = await sentinelEngine.getGapsDueForCheck(); + if (gaps.length > 0) { + console.log(` Found ${gaps.length} knowledge gaps requiring attention.`); + // Trigger Sentinel to resolve them + for (const gap of gaps) { + await sentinelEngine.attemptAutoResolution(gap.id, gap.query, gap.gapType || 'unknown'); + } + } else { + console.log(' Knowledge Graph coverage is stable.'); + } + + // --- PHASE 2: ORIENTATION (The Pattern Matcher) --- + console.log('🧩 PHASE 2: ORIENTING...'); + + // 1. Find new connections between TDC Products and Strategic Concepts + // This query looks for products that haven't been linked to a "Solution" node yet + const looseProducts = await neo4jAdapter.executeQuery(` + MATCH (p:TDC_Product) + WHERE NOT (p)-[:PART_OF]->(:Solution) + RETURN p.name as name, p.description as desc + LIMIT 3 + `); + + if (looseProducts.length > 0) { + console.log(` Identified ${looseProducts.length} un-utilized TDC products. Generating solution concepts...`); + + for (const prod of looseProducts) { + // Simulate creative spark: Connect product to a random industry need + const industries = ['Finance', 'Healthcare', 'Public Sector']; + const industry = industries[Math.floor(Math.random() * industries.length)]; + + const concept = `Secure ${industry} Connectivity with ${prod.name}`; + console.log(` 💡 Epiphany: "${concept}"`); + + // Create an Idea in The Muse + await neo4jAdapter.executeQuery(` + CREATE (i:Idea { + id: $id, + title: $title, + confidence: 0.85, + status: 'INCUBATED', + created_at: datetime() + }) + WITH i + MATCH (p:TDC_Product {name: $prodName}) + CREATE (p)-[:INSPIRED]->(i) + `, { + id: `idea-${Date.now()}`, + title: concept, + prodName: prod.name + }); + } + } + + // --- PHASE 3: DECISION (The Strategist) --- + console.log('⚖️ PHASE 3: DECIDING...'); + + // Should we harvest more data? + const stats = autonomousHarvester.getStats(); + if (stats.ingested < 100) { // Arbitrary threshold + console.log(' Data density low. Authorizing Harvest Mission.'); + // Fire and forget harvest + autonomousHarvester.startHarvest().catch(e => console.error(e)); + } + + // --- PHASE 4: ACTION (The Motor Cortex) --- + console.log('🦾 PHASE 4: ACTING...'); + + // Check for Ideas that are ready to be prototyped + const matureIdeas = await neo4jAdapter.executeQuery(` + MATCH (i:Idea {status: 'INCUBATED'}) + WHERE i.confidence > 0.8 + RETURN i.title as title, i.id as id + LIMIT 1 + `); + + if (matureIdeas.length > 0) { + const idea = matureIdeas[0]; + console.log(` 🚀 Promoting Idea: "${idea.title}" to Prototype...`); + + // 1. Mark as promoted + await neo4jAdapter.executeQuery(` + MATCH (i:Idea {id: $id}) + SET i.status = 'PROMOTED' + `, { id: idea.id }); + + // 2. Generate a TDC Presentation for it (Autonomously) + const ppt = await tdcService.generateSolutionPPT('Autonomous Internal Review', idea.title); + console.log(` 📄 Generated Presentation: ${ppt.filename}`); + } + + console.log('💤 Cortex entering REM sleep (10s)...'); + await sleep(10000); + + } catch (error) { + console.error('💥 CORTEX ERROR:', error); + await sleep(5000); // Recover + } + } +} + +// Start the loop +if (require.main === module) { + cognitiveLoop(); +} diff --git a/apps/backend/src/scripts/verify_ingestion.ts b/apps/backend/src/scripts/verify_ingestion.ts new file mode 100644 index 0000000000000000000000000000000000000000..2b9d2d52774cee73229c3812c593b3a330528419 --- /dev/null +++ b/apps/backend/src/scripts/verify_ingestion.ts @@ -0,0 +1,40 @@ +import { config } from 'dotenv'; +import { resolve } from 'path'; +import { fileURLToPath } from 'url'; +import neo4j from 'neo4j-driver'; + +const __dirname = fileURLToPath(new URL('.', import.meta.url)); +config({ path: resolve(__dirname, '../../.env') }); + +const NEO4J_URI = process.env.NEO4J_URI || 'bolt://localhost:7687'; +const NEO4J_USERNAME = process.env.NEO4J_USERNAME || 'neo4j'; +const NEO4J_PASSWORD = process.env.NEO4J_PASSWORD || 'password'; + +async function check() { + const driver = neo4j.driver(NEO4J_URI, neo4j.auth.basic(NEO4J_USERNAME, NEO4J_PASSWORD)); + const session = driver.session(); + try { + // Verify counts + const countResult = await session.run('MATCH (n) RETURN count(n) as count'); + const total = countResult.records[0].get('count').toNumber(); + console.log(`Total Nodes: ${total}`); + + // Specific verification + const result = await session.run("MATCH (n:File) WHERE n.name CONTAINS 'Neo4jAdapter' RETURN n.name as name, n.path as path"); + if (result.records.length > 0) { + console.log("✅ Verification found Neo4jAdapter files:"); + result.records.forEach(r => { + console.log(` - ${r.get('name')} (${r.get('path')})`); + }); + } else { + console.log("❌ Verification FAILED: No 'Neo4jAdapter' files found."); + } + + } catch(e) { + console.error("Error:", e); + } finally { + await session.close(); + await driver.close(); + } +} +check(); diff --git a/apps/backend/src/scripts/weave_graph.ts b/apps/backend/src/scripts/weave_graph.ts new file mode 100644 index 0000000000000000000000000000000000000000..1f99bf6aa02c3142b9e7bca664d311417e5fadff --- /dev/null +++ b/apps/backend/src/scripts/weave_graph.ts @@ -0,0 +1,98 @@ +import 'dotenv/config'; +import * as fs from 'fs'; +import * as path from 'path'; +import { fileURLToPath } from 'url'; +import neo4j from 'neo4j-driver'; + +// SHIM: Define __dirname for ES Modules +const __filename = fileURLToPath(import.meta.url); +const __dirname = path.dirname(__filename); + +const SYSTEM_ROOT = path.resolve(__dirname, '../../../../'); +const IGNORE_DIRS = ['node_modules', 'dist', 'build', '.git', 'coverage']; + +async function weaveGraph() { + console.log('🕷️ The Synaptic Weaver: Initializing...'); + const uri = process.env.NEO4J_URI; + const user = process.env.NEO4J_USER; + const password = process.env.NEO4J_PASSWORD; + + if (!uri || !user || !password) { + console.error('❌ FEJL: Mangler Neo4j credentials i .env'); + process.exit(1); + } + + const driver = neo4j.driver(uri, neo4j.auth.basic(user, password)); + const session = driver.session(); + + try { + await driver.verifyConnectivity(); + console.log('✅ Cloud Cortex Connected.'); + const files = findAllFiles(SYSTEM_ROOT, '.ts'); + console.log('📦 Found ' + files.length + ' .ts files.'); + + let relationsCreated = 0; + for (const filePath of files) { + const content = fs.readFileSync(filePath, 'utf-8'); + const imports = extractImports(content); + if (imports.length === 0) continue; + const sourceNodeId = filePath; + + for (const importPath of imports) { + const resolvedPath = resolveImportPath(filePath, importPath); + if (resolvedPath && fs.existsSync(resolvedPath)) { + await session.run(` + MATCH (a:File {path: $source}) + MATCH (b:File {path: $target}) + MERGE (a)-[r:DEPENDS_ON]->(b) + SET r.type = 'static_import' + `, { source: sourceNodeId, target: resolvedPath }); + relationsCreated++; + if (relationsCreated % 10 === 0) process.stdout.write('.'); + } + } + } + console.log('\n🕸️ Weaving Complete! Created ' + relationsCreated + ' new synaptic connections.'); + } catch (error) { + console.error('💥 Critical Weaver Failure:', error); + } finally { + await session.close(); + await driver.close(); + } +} + +function findAllFiles(dir, ext, fileList = []) { + const files = fs.readdirSync(dir); + files.forEach(file => { + const filePath = path.join(dir, file); + const stat = fs.statSync(filePath); + if (IGNORE_DIRS.includes(file)) return; + if (stat.isDirectory()) { + findAllFiles(filePath, ext, fileList); + } else { + if (path.extname(file) === ext) fileList.push(filePath); + } + }); + return fileList; +} + +function extractImports(content) { + const regex = /import\s+.*?\s+from\s+['"](.*?)['"]/g; + const paths = []; + let match; + while ((match = regex.exec(content)) !== null) paths.push(match[1]); + return paths; +} + +function resolveImportPath(currentFile, importPath) { + if (importPath.startsWith('.')) { + const dir = path.dirname(currentFile); + let resolved = path.resolve(dir, importPath); + if (!resolved.endsWith('.ts') && fs.existsSync(resolved + '.ts')) return resolved + '.ts'; + if (fs.existsSync(path.join(resolved, 'index.ts'))) return path.join(resolved, 'index.ts'); + return fs.existsSync(resolved) ? resolved : null; + } + return null; +} + +weaveGraph(); \ No newline at end of file diff --git a/apps/backend/src/scripts/zombie_hunter.ts b/apps/backend/src/scripts/zombie_hunter.ts new file mode 100644 index 0000000000000000000000000000000000000000..404906a43448a8e1fee258a796e61c5db53cc8e8 --- /dev/null +++ b/apps/backend/src/scripts/zombie_hunter.ts @@ -0,0 +1,76 @@ +import { config } from 'dotenv'; +import { resolve } from 'path'; +import { fileURLToPath } from 'url'; +import { writeFileSync } from 'fs'; +import neo4j from 'neo4j-driver'; + +const __dirname = fileURLToPath(new URL('.', import.meta.url)); +config({ path: resolve(__dirname, '../../.env') }); + +const NEO4J_URI = process.env.NEO4J_URI || 'bolt://localhost:7687'; +const NEO4J_USERNAME = process.env.NEO4J_USERNAME || 'neo4j'; +const NEO4J_PASSWORD = process.env.NEO4J_PASSWORD || 'password'; + +async function huntZombies() { + console.log('🧟 Operation Zombie Hunter starting...'); + const driver = neo4j.driver(NEO4J_URI, neo4j.auth.basic(NEO4J_USERNAME, NEO4J_PASSWORD)); + const session = driver.session(); + + try { + const result = await session.run(` + MATCH (f:File) + WHERE (f.language = 'TypeScript' OR f.extension = '.ts' OR f.extension = '.tsx') + AND NOT ()-[:DEPENDS_ON]->(f) + AND NOT f.name ENDS WITH '.test.ts' + AND NOT f.name ENDS WITH '.spec.ts' + AND NOT f.name = 'index.ts' + AND NOT f.name = 'main.tsx' + AND NOT f.name = 'App.tsx' + AND NOT f.name STARTS WITH 'vite' + AND NOT f.name STARTS WITH 'eslint' + AND NOT f.name STARTS WITH 'playwright' + AND NOT f.name STARTS WITH 'vitest' + AND NOT f.relativePath CONTAINS 'scripts' + AND NOT f.relativePath CONTAINS 'tests' + AND NOT f.relativePath CONTAINS 'config' + RETURN f.relativePath as path, f.name as name, f.size as size + ORDER BY f.relativePath + `); + + const zombies = result.records.map(r => { + const s = r.get('size'); + const size = neo4j.isInt(s) ? s.toNumber() : Number(s); + return { + path: r.get('path'), + name: r.get('name'), + size: isNaN(size) ? 0 : size + }; + }); + + console.log(`Found ${zombies.length} potential zombie files.`); + + let report = '# 🧟 Zombie Code Report\n\n'; + report += `**Date:** ${new Date().toISOString()}\n`; + report += `**Total Potential Zombies:** ${zombies.length}\n\n`; + report += '> These files are not imported by any other TypeScript file in the project (based on static analysis).\n'; + report += '> **Verify manually** before deleting.\n\n'; + report += '| File Path | Size (Bytes) |\n'; + report += '|-----------|--------------|\n'; + + for (const z of zombies) { + report += '| `' + z.path + '` | ' + z.size + ' |\n'; + } + + const reportPath = resolve(__dirname, '../../../../docs/ZOMBIE_CODE_REPORT.md'); + writeFileSync(reportPath, report); + console.log(`📝 Report generated at: ${reportPath}`); + + } catch (error: any) { + console.error('❌ Zombie Hunter failed:', error.message); + } finally { + await session.close(); + await driver.close(); + } +} + +huntZombies(); \ No newline at end of file diff --git a/apps/backend/src/services/AuditoryService.ts b/apps/backend/src/services/AuditoryService.ts new file mode 100644 index 0000000000000000000000000000000000000000..16375748741730f9bb00b355ae4d1ada54ee8353 --- /dev/null +++ b/apps/backend/src/services/AuditoryService.ts @@ -0,0 +1,506 @@ +/** + * ╔═══════════════════════════════════════════════════════════════════════════╗ + * ║ AUDITORY PERCEPTION SERVICE ║ + * ║═══════════════════════════════════════════════════════════════════════════║ + * ║ The system's "ears" - listens to log streams, detects anomalies, ║ + * ║ and interprets system "sounds" (events, errors, patterns) ║ + * ╚═══════════════════════════════════════════════════════════════════════════╝ + */ + +import { EventEmitter } from 'events'; +import { neo4jAdapter } from '../adapters/Neo4jAdapter.js'; +import { v4 as uuidv4 } from 'uuid'; + +// ═══════════════════════════════════════════════════════════════════════════ +// Types +// ═══════════════════════════════════════════════════════════════════════════ + +export interface AudioSignal { + id: string; + type: 'LOG' | 'ERROR' | 'WARNING' | 'ANOMALY' | 'HEARTBEAT' | 'VOICE'; + source: string; + content: string; + volume: 'WHISPER' | 'NORMAL' | 'LOUD' | 'ALARM'; + frequency: number; // occurrences per minute + timestamp: string; + metadata?: Record; +} + +export interface AnomalyPattern { + id: string; + pattern: string; + description: string; + severity: 'LOW' | 'MEDIUM' | 'HIGH' | 'CRITICAL'; + occurrences: number; + firstSeen: string; + lastSeen: string; + isActive: boolean; +} + +export interface ListeningSession { + id: string; + source: string; + startedAt: string; + signalsReceived: number; + anomaliesDetected: number; + isActive: boolean; +} + +// ═══════════════════════════════════════════════════════════════════════════ +// Auditory Service +// ═══════════════════════════════════════════════════════════════════════════ + +class AuditoryService extends EventEmitter { + private static instance: AuditoryService; + private sessions: Map = new Map(); + private signalBuffer: AudioSignal[] = []; + private anomalyPatterns: Map = new Map(); + private frequencyTracker: Map = new Map(); + + // Anomaly detection thresholds + private readonly ANOMALY_THRESHOLD = 10; // signals per minute + private readonly BUFFER_MAX_SIZE = 1000; + private readonly FREQUENCY_WINDOW_MS = 60000; // 1 minute + + // Known error patterns + private readonly ERROR_PATTERNS = [ + { regex: /ECONNREFUSED/i, severity: 'HIGH' as const, description: 'Connection refused - service down' }, + { regex: /ETIMEDOUT/i, severity: 'MEDIUM' as const, description: 'Connection timeout' }, + { regex: /OutOfMemory|heap/i, severity: 'CRITICAL' as const, description: 'Memory exhaustion' }, + { regex: /ENOSPC/i, severity: 'CRITICAL' as const, description: 'Disk space exhausted' }, + { regex: /EACCES|EPERM/i, severity: 'HIGH' as const, description: 'Permission denied' }, + { regex: /deadlock/i, severity: 'CRITICAL' as const, description: 'Database deadlock detected' }, + { regex: /rate.?limit/i, severity: 'MEDIUM' as const, description: 'Rate limiting triggered' }, + { regex: /authentication.?fail/i, severity: 'HIGH' as const, description: 'Authentication failure' }, + { regex: /ssl|tls|certificate/i, severity: 'HIGH' as const, description: 'SSL/TLS issue' }, + { regex: /crash|fatal|panic/i, severity: 'CRITICAL' as const, description: 'Critical failure' }, + ]; + + private constructor() { + super(); + this.startBackgroundListening(); + } + + public static getInstance(): AuditoryService { + if (!AuditoryService.instance) { + AuditoryService.instance = new AuditoryService(); + } + return AuditoryService.instance; + } + + // ═══════════════════════════════════════════════════════════════════════ + // Core Listening Functions + // ═══════════════════════════════════════════════════════════════════════ + + /** + * Start listening to a specific source + */ + public startListening(source: string): ListeningSession { + const session: ListeningSession = { + id: `listen-${uuidv4()}`, + source, + startedAt: new Date().toISOString(), + signalsReceived: 0, + anomaliesDetected: 0, + isActive: true + }; + + this.sessions.set(session.id, session); + console.error(`[Auditory] 👂 Started listening to: ${source}`); + + return session; + } + + /** + * Stop listening to a source + */ + public stopListening(sessionId: string): void { + const session = this.sessions.get(sessionId); + if (session) { + session.isActive = false; + console.error(`[Auditory] 🔇 Stopped listening: ${session.source}`); + } + } + + /** + * Process an incoming signal (log, event, etc.) + */ + public processSignal(input: { + source: string; + content: string; + type?: AudioSignal['type']; + metadata?: Record; + }): AudioSignal { + const signal: AudioSignal = { + id: `sig-${uuidv4()}`, + type: input.type || this.classifySignalType(input.content), + source: input.source, + content: input.content, + volume: this.calculateVolume(input.content), + frequency: this.calculateFrequency(input.source), + timestamp: new Date().toISOString(), + metadata: input.metadata + }; + + // Add to buffer + this.signalBuffer.push(signal); + if (this.signalBuffer.length > this.BUFFER_MAX_SIZE) { + this.signalBuffer.shift(); + } + + // Track frequency + this.trackFrequency(input.source); + + // Check for anomalies + this.detectAnomalies(signal); + + // Update session stats + for (const session of this.sessions.values()) { + if (session.isActive && session.source === input.source) { + session.signalsReceived++; + } + } + + // Emit event for real-time listeners + this.emit('signal', signal); + + return signal; + } + + /** + * Classify signal type based on content + */ + private classifySignalType(content: string): AudioSignal['type'] { + const lower = content.toLowerCase(); + + if (/error|exception|fail|crash/i.test(lower)) return 'ERROR'; + if (/warn|caution|attention/i.test(lower)) return 'WARNING'; + if (/heartbeat|ping|alive|health/i.test(lower)) return 'HEARTBEAT'; + + return 'LOG'; + } + + /** + * Calculate volume (severity/importance) of signal + */ + private calculateVolume(content: string): AudioSignal['volume'] { + const lower = content.toLowerCase(); + + if (/critical|fatal|crash|emergency|panic/i.test(lower)) return 'ALARM'; + if (/error|fail|exception/i.test(lower)) return 'LOUD'; + if (/warn|caution/i.test(lower)) return 'NORMAL'; + + return 'WHISPER'; + } + + /** + * Calculate signal frequency (signals per minute from source) + */ + private calculateFrequency(source: string): number { + const timestamps = this.frequencyTracker.get(source) || []; + const now = Date.now(); + const recentTimestamps = timestamps.filter(t => now - t < this.FREQUENCY_WINDOW_MS); + return recentTimestamps.length; + } + + /** + * Track frequency for anomaly detection + */ + private trackFrequency(source: string): void { + const timestamps = this.frequencyTracker.get(source) || []; + timestamps.push(Date.now()); + + // Keep only recent timestamps + const now = Date.now(); + const recentTimestamps = timestamps.filter(t => now - t < this.FREQUENCY_WINDOW_MS); + this.frequencyTracker.set(source, recentTimestamps); + } + + // ═══════════════════════════════════════════════════════════════════════ + // Anomaly Detection + // ═══════════════════════════════════════════════════════════════════════ + + /** + * Detect anomalies in incoming signals + */ + private detectAnomalies(signal: AudioSignal): void { + // Check against known error patterns + for (const pattern of this.ERROR_PATTERNS) { + if (pattern.regex.test(signal.content)) { + this.registerAnomaly({ + pattern: pattern.regex.source, + description: pattern.description, + severity: pattern.severity, + signal + }); + } + } + + // Check for frequency anomalies + if (signal.frequency > this.ANOMALY_THRESHOLD) { + this.registerAnomaly({ + pattern: `HIGH_FREQUENCY:${signal.source}`, + description: `Abnormal signal frequency from ${signal.source}: ${signal.frequency}/min`, + severity: 'MEDIUM', + signal + }); + } + + // Check for sudden volume increase + if (signal.volume === 'ALARM') { + this.registerAnomaly({ + pattern: `ALARM:${signal.type}`, + description: `Alarm-level signal: ${signal.content.substring(0, 100)}`, + severity: 'HIGH', + signal + }); + } + } + + /** + * Register a detected anomaly + */ + private async registerAnomaly(params: { + pattern: string; + description: string; + severity: AnomalyPattern['severity']; + signal: AudioSignal; + }): Promise { + const existing = this.anomalyPatterns.get(params.pattern); + + if (existing) { + existing.occurrences++; + existing.lastSeen = new Date().toISOString(); + existing.isActive = true; + } else { + const anomaly: AnomalyPattern = { + id: `anomaly-${uuidv4()}`, + pattern: params.pattern, + description: params.description, + severity: params.severity, + occurrences: 1, + firstSeen: new Date().toISOString(), + lastSeen: new Date().toISOString(), + isActive: true + }; + this.anomalyPatterns.set(params.pattern, anomaly); + + // Persist to Neo4j + await this.persistAnomaly(anomaly, params.signal); + } + + // Update session stats + for (const session of this.sessions.values()) { + if (session.isActive && session.source === params.signal.source) { + session.anomaliesDetected++; + } + } + + // Emit anomaly event + this.emit('anomaly', { anomaly: this.anomalyPatterns.get(params.pattern), signal: params.signal }); + + console.error(`[Auditory] 🚨 Anomaly detected: ${params.description}`); + } + + /** + * Persist anomaly to Neo4j + */ + private async persistAnomaly(anomaly: AnomalyPattern, signal: AudioSignal): Promise { + try { + await neo4jAdapter.executeQuery(` + CREATE (a:Anomaly { + id: $id, + pattern: $pattern, + description: $description, + severity: $severity, + occurrences: $occurrences, + firstSeen: $firstSeen, + lastSeen: $lastSeen, + signalSource: $signalSource, + signalContent: $signalContent + }) + `, { + id: anomaly.id, + pattern: anomaly.pattern, + description: anomaly.description, + severity: anomaly.severity, + occurrences: anomaly.occurrences, + firstSeen: anomaly.firstSeen, + lastSeen: anomaly.lastSeen, + signalSource: signal.source, + signalContent: signal.content.substring(0, 500) + }); + } catch (error) { + console.warn('[Auditory] Failed to persist anomaly:', error); + } + } + + // ═══════════════════════════════════════════════════════════════════════ + // Query Functions + // ═══════════════════════════════════════════════════════════════════════ + + /** + * Get recent signals + */ + public getRecentSignals(params: { + source?: string; + type?: AudioSignal['type']; + volume?: AudioSignal['volume']; + limit?: number; + } = {}): AudioSignal[] { + let signals = [...this.signalBuffer]; + + if (params.source) { + signals = signals.filter(s => s.source === params.source); + } + if (params.type) { + signals = signals.filter(s => s.type === params.type); + } + if (params.volume) { + signals = signals.filter(s => s.volume === params.volume); + } + + return signals.slice(-(params.limit || 50)); + } + + /** + * Get active anomalies + */ + public getActiveAnomalies(): AnomalyPattern[] { + return Array.from(this.anomalyPatterns.values()).filter(a => a.isActive); + } + + /** + * Get all anomaly patterns + */ + public getAllAnomalies(): AnomalyPattern[] { + return Array.from(this.anomalyPatterns.values()); + } + + /** + * Get listening sessions + */ + public getListeningSessions(): ListeningSession[] { + return Array.from(this.sessions.values()); + } + + /** + * Get auditory system status + */ + public getStatus(): { + activeSessions: number; + totalSignals: number; + activeAnomalies: number; + signalBuffer: number; + frequencySources: number; + } { + return { + activeSessions: Array.from(this.sessions.values()).filter(s => s.isActive).length, + totalSignals: this.signalBuffer.length, + activeAnomalies: this.getActiveAnomalies().length, + signalBuffer: this.signalBuffer.length, + frequencySources: this.frequencyTracker.size + }; + } + + /** + * Acknowledge/dismiss an anomaly + */ + public acknowledgeAnomaly(pattern: string): boolean { + const anomaly = this.anomalyPatterns.get(pattern); + if (anomaly) { + anomaly.isActive = false; + return true; + } + return false; + } + + // ═══════════════════════════════════════════════════════════════════════ + // Background Listening + // ═══════════════════════════════════════════════════════════════════════ + + /** + * Start background listening for system events + */ + private startBackgroundListening(): void { + // Intercept console.error for system-wide listening + const originalError = console.error; + console.error = (...args: unknown[]) => { + originalError.apply(console, args); + + // Don't process our own logs + const content = args.map(a => String(a)).join(' '); + if (!content.includes('[Auditory]')) { + this.processSignal({ + source: 'console.error', + content, + type: 'ERROR' + }); + } + }; + + // Periodic cleanup of old frequency data + setInterval(() => { + const now = Date.now(); + for (const [source, timestamps] of this.frequencyTracker.entries()) { + const recent = timestamps.filter(t => now - t < this.FREQUENCY_WINDOW_MS); + if (recent.length === 0) { + this.frequencyTracker.delete(source); + } else { + this.frequencyTracker.set(source, recent); + } + } + }, 30000); // Every 30 seconds + + console.error('[Auditory] 👂 Background listening started'); + } + + /** + * Analyze log content for insights + */ + public analyzeLogContent(logs: string[]): { + summary: string; + errorCount: number; + warningCount: number; + patterns: string[]; + recommendations: string[]; + } { + let errorCount = 0; + let warningCount = 0; + const patterns: Set = new Set(); + const recommendations: string[] = []; + + for (const log of logs) { + if (/error|exception|fail/i.test(log)) errorCount++; + if (/warn|caution/i.test(log)) warningCount++; + + // Detect patterns + for (const pattern of this.ERROR_PATTERNS) { + if (pattern.regex.test(log)) { + patterns.add(pattern.description); + } + } + } + + // Generate recommendations + if (errorCount > 10) { + recommendations.push('High error rate detected - consider investigating root cause'); + } + if (patterns.has('Memory exhaustion')) { + recommendations.push('Memory issues detected - consider increasing heap size or optimizing memory usage'); + } + if (patterns.has('Connection refused - service down')) { + recommendations.push('Service connectivity issues - check if dependent services are running'); + } + + return { + summary: `Analyzed ${logs.length} logs: ${errorCount} errors, ${warningCount} warnings`, + errorCount, + warningCount, + patterns: Array.from(patterns), + recommendations + }; + } +} + +export const auditoryService = AuditoryService.getInstance(); diff --git a/apps/backend/src/services/BootstrapGate.ts b/apps/backend/src/services/BootstrapGate.ts new file mode 100644 index 0000000000000000000000000000000000000000..b0d07b880cc68904aaee7952e3c80459cb338690 --- /dev/null +++ b/apps/backend/src/services/BootstrapGate.ts @@ -0,0 +1,73 @@ +import { prisma } from '../database/prisma.js'; +import { neo4jAdapter } from '../adapters/Neo4jAdapter.js'; + +// BootstrapGate ensures critical infra is reachable before we start listening +export class BootstrapGate { + constructor( + private readonly options = { + timeoutMs: 10000, // Increased timeout for slow container startup + redisUrl: process.env.REDIS_URL || 'redis://localhost:6379', + } + ) {} + + async init(): Promise { + const results = await Promise.all([this.checkPostgres(), this.checkNeo4j(), this.checkRedis()]); + + const failing = results.filter(r => !r.ok); + if (failing.length > 0) { + failing.forEach(f => console.error(`❌ Bootstrap check failed: ${f.service} -> ${f.error}`)); + throw new Error('BootstrapGate failed'); + } + + console.log('✅ BootstrapGate: all critical services are reachable'); + } + + private async checkPostgres(): Promise<{ service: string; ok: boolean; error?: string }> { + const op = async () => { + // prisma uses configured connection (expected on port 5433 via env) + await prisma.$queryRaw`SELECT 1`; + }; + return this.runWithTimeout('postgres', op); + } + + private async checkNeo4j(): Promise<{ service: string; ok: boolean; error?: string }> { + const op = async () => { + const health = await neo4jAdapter.healthCheck(); + if (!health.connected) throw new Error('neo4j not connected'); + }; + return this.runWithTimeout('neo4j', op); + } + + private async checkRedis(): Promise<{ service: string; ok: boolean; error?: string }> { + const op = async () => { + // Dynamic import to avoid hard failure if dependency missing during tests + const Redis = (await import('ioredis')).default; + const client = new Redis(this.options.redisUrl, { maxRetriesPerRequest: 2 }); + try { + const res = await client.ping(); + if (res !== 'PONG') throw new Error(`unexpected ping reply: ${res}`); + } finally { + client.disconnect(); + } + }; + return this.runWithTimeout('redis', op); + } + + private async runWithTimeout( + service: string, + fn: () => Promise + ): Promise<{ service: string; ok: boolean; error?: string }> { + const timeout = new Promise((_, reject) => { + setTimeout(() => reject(new Error('timeout')), this.options.timeoutMs); + }); + + try { + await Promise.race([fn(), timeout]); + return { service, ok: true }; + } catch (error: any) { + return { service, ok: false, error: error?.message || 'unknown error' }; + } + } +} + +export const bootstrapGate = new BootstrapGate(); diff --git a/apps/backend/src/services/CivicService.ts b/apps/backend/src/services/CivicService.ts new file mode 100644 index 0000000000000000000000000000000000000000..f6d62c715a84aa36790512d289638e55f733ecb5 --- /dev/null +++ b/apps/backend/src/services/CivicService.ts @@ -0,0 +1,71 @@ +import { Pool } from 'pg'; + +export interface DashboardMetrics { + [key: string]: unknown; +} + +export interface VotingAnomaly { + first_name: string; + last_name: string; + party: string; + rebellion_rate: number; +} + +/** + * THE CIVIC SERVICE (Strand 1 Connector) + * Formål: Henter politisk intelligens uden at bruge legacy Java-kode. + */ +class CivicService { + private static instance: CivicService; + private pool: Pool; + + private constructor() { + this.pool = new Pool({ + user: process.env.CIVIC_USER || 'civic_reader', + host: process.env.CIVIC_HOST || 'localhost', // Brug service-navn 'civic-vault' internt i Docker + database: 'civic_vault', + password: process.env.CIVIC_PWD || 'secure_civic_access', + port: 5433, // Porten vi mappede i Docker + }); + console.log('🏛️ [CivicService] Neural Link established to Civic Node (Strand 1).'); + } + + public static getInstance(): CivicService { + if (!CivicService.instance) { + CivicService.instance = new CivicService(); + } + return CivicService.instance; + } + + /** + * Henter "The Big Picture" dashboard data. + * Baseret på 'view_riksdagen_intelligence_dashboard' fra valideringsrapporten. + */ + public async getDashboardMetrics(): Promise { + try { + const query = `SELECT * FROM view_riksdagen_intelligence_dashboard LIMIT 1;`; + const result = await this.pool.query(query); + return result.rows[0] || null; + } catch { + console.error('⚠️ [CivicService] Could not fetch dashboard. Is the Vault active?'); + return null; + } + } + + /** + * Finder politiske rebeller (Anomalies). + * Baseret på 'view_riksdagen_voting_anomaly_detection'. + */ + public async getAnomalies(limit: number = 5): Promise { + const query = ` + SELECT first_name, last_name, party, rebellion_rate + FROM view_riksdagen_voting_anomaly_detection + WHERE rebellion_rate > 0.05 + ORDER BY rebellion_rate DESC + LIMIT $1; + `; + return (await this.pool.query(query, [limit])).rows; + } +} + +export const civicService = CivicService.getInstance(); diff --git a/apps/backend/src/services/CognitiveErrorIntelligence.ts b/apps/backend/src/services/CognitiveErrorIntelligence.ts new file mode 100644 index 0000000000000000000000000000000000000000..6c2fe1cd3a3e08b418b18d7aaf1ec851d02b1a97 --- /dev/null +++ b/apps/backend/src/services/CognitiveErrorIntelligence.ts @@ -0,0 +1,913 @@ +/** + * ╔═══════════════════════════════════════════════════════════════════════════════╗ + * ║ COGNITIVE ERROR INTELLIGENCE (CEI) ║ + * ║═══════════════════════════════════════════════════════════════════════════════║ + * ║ Unik intelligent fejlhåndtering der udnytter WidgeTDC's kapabiliteter: ║ + * ║ ║ + * ║ 1. GRAPH-BASED ERROR CORRELATION ║ + * ║ - Neo4j til at finde relationer mellem fejl ║ + * ║ - "Error A forårsager ofte Error B inden for 5 minutter" ║ + * ║ ║ + * ║ 2. PREDICTIVE ERROR DETECTION ║ + * ║ - Lærer mønstre der forudsiger fejl FØR de sker ║ + * ║ - "Redis memory usage > 80% → OOM inden 10 min" ║ + * ║ ║ + * ║ 3. CONTEXT-AWARE SOLUTIONS ║ + * ║ - Rangerer løsninger baseret på systemets aktuelle tilstand ║ + * ║ - "Neo4j er nede → prioriter lokale løsninger" ║ + * ║ ║ + * ║ 4. AUTO-REMEDIATION ║ + * ║ - Udfører automatisk reparation for kendte fejl ║ + * ║ - "ECONNREFUSED på Redis → restart Redis container" ║ + * ║ ║ + * ║ 5. CAUSAL CHAIN ANALYSIS ║ + * ║ - Bygger grafer over fejl-årsager ║ + * ║ - "Root cause: DNS failure → cascading to 5 services" ║ + * ╚═══════════════════════════════════════════════════════════════════════════════╝ + */ + +import { EventEmitter } from 'events'; +import { errorKnowledgeBase, type ErrorPattern, type Solution } from './ErrorKnowledgeBase.js'; +import { selfHealing } from './SelfHealingAdapter.js'; +import { logger } from '../utils/logger.js'; + +const log = logger.child({ module: 'CognitiveErrorIntelligence' }); + +// ═══════════════════════════════════════════════════════════════════════════ +// TYPES +// ═══════════════════════════════════════════════════════════════════════════ + +interface ErrorEvent { + id: string; + timestamp: Date; + message: string; + service: string; + severity: 'low' | 'medium' | 'high' | 'critical'; + context: Record; + stackTrace?: string; + resolved: boolean; + resolvedBy?: string; + resolvedAt?: Date; +} + +interface ErrorCorrelation { + sourceErrorId: string; + targetErrorId: string; + correlationType: 'causes' | 'precedes' | 'cooccurs' | 'masks'; + confidence: number; + avgTimeDelta: number; // milliseconds + occurrences: number; +} + +interface PredictiveSignal { + metric: string; + threshold: number; + operator: '>' | '<' | '=' | '>=' | '<='; + predictedError: string; + leadTime: number; // milliseconds before error typically occurs + confidence: number; + lastTriggered?: Date; +} + +interface RemediationAction { + id: string; + name: string; + description: string; + errorPatterns: string[]; // Pattern IDs this action can fix + command?: string; // Shell command to execute + apiCall?: { endpoint: string; method: string; body?: any }; + requiresApproval: boolean; + riskLevel: 'low' | 'medium' | 'high'; + successRate: number; + avgExecutionTime: number; + lastExecuted?: Date; +} + +interface CausalChain { + rootCause: ErrorEvent; + effects: ErrorEvent[]; + totalImpact: number; // Number of affected services/operations + detectedAt: Date; + resolvedAt?: Date; +} + +interface SystemContext { + services: Map; + activeErrors: ErrorEvent[]; + recentRemediations: RemediationAction[]; + load: { + cpu: number; + memory: number; + connections: number; + }; +} + +interface ServiceHealth { + name: string; + status: 'healthy' | 'degraded' | 'unhealthy' | 'unknown'; + lastCheck: Date; + metrics: Record; +} + +interface IntelligentSolution extends Solution { + contextScore: number; // How relevant given current system state + predictedSuccess: number; // ML-based success prediction + autoRemediable: boolean; + remediationAction?: RemediationAction; + reasoning: string; // Why this solution is recommended +} + +// ═══════════════════════════════════════════════════════════════════════════ +// COGNITIVE ERROR INTELLIGENCE CLASS +// ═══════════════════════════════════════════════════════════════════════════ + +export class CognitiveErrorIntelligence extends EventEmitter { + private static instance: CognitiveErrorIntelligence; + + // Error tracking + private errorHistory: ErrorEvent[] = []; + private readonly MAX_HISTORY = 10000; + + // Correlation learning + private correlations: Map = new Map(); + private correlationWindow = 5 * 60 * 1000; // 5 minutes + + // Predictive signals + private predictiveSignals: PredictiveSignal[] = []; + private metricsHistory: Map = new Map(); + + // Auto-remediation + private remediationActions: Map = new Map(); + private remediationQueue: { error: ErrorEvent; action: RemediationAction }[] = []; + private isRemediating = false; + + // System context + private systemContext: SystemContext = { + services: new Map(), + activeErrors: [], + recentRemediations: [], + load: { cpu: 0, memory: 0, connections: 0 } + }; + + private constructor() { + super(); + this.initializeDefaultRemediations(); + this.initializePredictiveSignals(); + this.startBackgroundProcessing(); + log.info('🧠 Cognitive Error Intelligence initialized'); + } + + public static getInstance(): CognitiveErrorIntelligence { + if (!CognitiveErrorIntelligence.instance) { + CognitiveErrorIntelligence.instance = new CognitiveErrorIntelligence(); + } + return CognitiveErrorIntelligence.instance; + } + + // ═══════════════════════════════════════════════════════════════════════════ + // 1. INTELLIGENT ERROR HANDLING + // ═══════════════════════════════════════════════════════════════════════════ + + /** + * Process an error with full cognitive analysis + */ + public async processError( + message: string, + service: string, + context: Record = {}, + stackTrace?: string + ): Promise<{ + errorId: string; + solutions: IntelligentSolution[]; + correlatedErrors: ErrorEvent[]; + causalChain?: CausalChain; + autoRemediation?: { action: RemediationAction; queued: boolean }; + prediction?: { nextLikelyError: string; confidence: number; timeframe: string }; + }> { + const errorId = this.generateErrorId(); + const severity = this.assessSeverity(message, context); + + // Create error event + const errorEvent: ErrorEvent = { + id: errorId, + timestamp: new Date(), + message, + service, + severity, + context, + stackTrace, + resolved: false + }; + + // Store in history + this.recordError(errorEvent); + + // 1. Find correlated errors (what usually happens with this error?) + const correlatedErrors = this.findCorrelatedErrors(errorEvent); + + // 2. Analyze causal chain (is this the root cause or an effect?) + const causalChain = this.analyzeCausalChain(errorEvent); + + // 3. Get context-aware solutions + const solutions = await this.getIntelligentSolutions(message, errorEvent); + + // 4. Check for auto-remediation + const autoRemediation = await this.checkAutoRemediation(errorEvent, solutions); + + // 5. Predict next likely error + const prediction = this.predictNextError(errorEvent); + + // 6. Learn correlations for future + this.learnCorrelations(errorEvent); + + // Emit event for real-time monitoring + this.emit('error:processed', { + errorId, + severity, + solutions: solutions.length, + autoRemediation: autoRemediation?.queued + }); + + log.info(`🧠 Processed error ${errorId}: ${solutions.length} solutions, ${correlatedErrors.length} correlations`); + + return { + errorId, + solutions, + correlatedErrors, + causalChain, + autoRemediation, + prediction + }; + } + + // ═══════════════════════════════════════════════════════════════════════════ + // 2. GRAPH-BASED ERROR CORRELATION + // ═══════════════════════════════════════════════════════════════════════════ + + /** + * Find errors that are correlated with this error + */ + private findCorrelatedErrors(error: ErrorEvent): ErrorEvent[] { + const correlated: ErrorEvent[] = []; + const recentErrors = this.errorHistory.filter( + e => e.timestamp.getTime() > Date.now() - this.correlationWindow && e.id !== error.id + ); + + for (const recent of recentErrors) { + const correlationKey = this.getCorrelationKey(recent.message, error.message); + const correlation = this.correlations.get(correlationKey); + + if (correlation && correlation.confidence > 0.5) { + correlated.push(recent); + } + } + + return correlated; + } + + /** + * Learn correlations from error patterns + */ + private learnCorrelations(error: ErrorEvent): void { + const recentErrors = this.errorHistory.filter( + e => e.timestamp.getTime() > error.timestamp.getTime() - this.correlationWindow && e.id !== error.id + ); + + for (const recent of recentErrors) { + const timeDelta = error.timestamp.getTime() - recent.timestamp.getTime(); + const correlationKey = this.getCorrelationKey(recent.message, error.message); + + if (!this.correlations.has(correlationKey)) { + this.correlations.set(correlationKey, { + sourceErrorId: recent.id, + targetErrorId: error.id, + correlationType: timeDelta < 1000 ? 'cooccurs' : 'precedes', + confidence: 0.1, + avgTimeDelta: timeDelta, + occurrences: 1 + }); + } else { + const existing = this.correlations.get(correlationKey)!; + existing.occurrences++; + existing.avgTimeDelta = (existing.avgTimeDelta + timeDelta) / 2; + // Increase confidence with more observations (Bayesian update) + existing.confidence = Math.min(0.95, existing.confidence + (1 - existing.confidence) * 0.1); + } + } + } + + /** + * Persist correlations to Neo4j for graph analysis + */ + public async persistCorrelationsToNeo4j(): Promise { + try { + const { neo4jService } = await import('../database/Neo4jService.js'); + let persisted = 0; + + for (const [key, correlation] of this.correlations) { + if (correlation.occurrences >= 3 && correlation.confidence > 0.5) { + await neo4jService.runQuery(` + MERGE (source:ErrorPattern {signature: $sourceSignature}) + MERGE (target:ErrorPattern {signature: $targetSignature}) + MERGE (source)-[r:${correlation.correlationType.toUpperCase()}]->(target) + SET r.confidence = $confidence, + r.avgTimeDelta = $avgTimeDelta, + r.occurrences = $occurrences, + r.updatedAt = datetime() + `, { + sourceSignature: key.split('→')[0], + targetSignature: key.split('→')[1], + confidence: correlation.confidence, + avgTimeDelta: correlation.avgTimeDelta, + occurrences: correlation.occurrences + }); + persisted++; + } + } + + log.info(`📊 Persisted ${persisted} error correlations to Neo4j`); + return persisted; + } catch (e) { + log.warn('Neo4j not available for correlation persistence'); + return 0; + } + } + + // ═══════════════════════════════════════════════════════════════════════════ + // 3. CAUSAL CHAIN ANALYSIS + // ═══════════════════════════════════════════════════════════════════════════ + + /** + * Analyze if this error is root cause or effect + */ + private analyzeCausalChain(error: ErrorEvent): CausalChain | undefined { + const recentErrors = this.errorHistory.filter( + e => Math.abs(e.timestamp.getTime() - error.timestamp.getTime()) < this.correlationWindow + ); + + if (recentErrors.length < 2) return undefined; + + // Find the earliest error in the chain (likely root cause) + const sortedByTime = [...recentErrors].sort( + (a, b) => a.timestamp.getTime() - b.timestamp.getTime() + ); + + const rootCause = sortedByTime[0]; + const effects = sortedByTime.slice(1); + + // Calculate impact + const affectedServices = new Set(effects.map(e => e.service)); + + return { + rootCause, + effects, + totalImpact: affectedServices.size, + detectedAt: new Date() + }; + } + + // ═══════════════════════════════════════════════════════════════════════════ + // 4. CONTEXT-AWARE INTELLIGENT SOLUTIONS + // ═══════════════════════════════════════════════════════════════════════════ + + /** + * Get solutions ranked by current system context + */ + private async getIntelligentSolutions( + errorMessage: string, + errorEvent: ErrorEvent + ): Promise { + // Get base solutions from knowledge base + const baseSolutions = errorKnowledgeBase.getSolutions(errorMessage); + + // Enhance with context awareness + const intelligentSolutions: IntelligentSolution[] = []; + + for (const solution of baseSolutions) { + const contextScore = this.calculateContextScore(solution, errorEvent); + const predictedSuccess = this.predictSolutionSuccess(solution, errorEvent); + const remediation = this.findRemediationAction(solution); + + intelligentSolutions.push({ + ...solution, + contextScore, + predictedSuccess, + autoRemediable: remediation !== undefined && !remediation.requiresApproval, + remediationAction: remediation, + reasoning: this.generateReasoning(solution, contextScore, predictedSuccess) + }); + } + + // Sort by combined intelligence score + return intelligentSolutions.sort((a, b) => { + const scoreA = a.contextScore * 0.3 + a.predictedSuccess * 0.4 + a.confidence * 0.3; + const scoreB = b.contextScore * 0.3 + b.predictedSuccess * 0.4 + b.confidence * 0.3; + return scoreB - scoreA; + }); + } + + /** + * Calculate how relevant a solution is given current system state + */ + private calculateContextScore(solution: Solution, error: ErrorEvent): number { + let score = 0.5; // Base score + + // Check if solution's source is a service that's currently healthy + const serviceHealth = this.systemContext.services.get(error.service); + if (serviceHealth?.status === 'healthy') { + score += 0.1; + } + + // Prefer solutions that don't require unavailable services + if (solution.description.toLowerCase().includes('redis')) { + const redisHealth = this.systemContext.services.get('redis'); + if (redisHealth?.status !== 'healthy') { + score -= 0.2; // Penalize if Redis is down + } + } + + // Boost solutions that have worked recently + if (solution.successCount && solution.successCount > 0) { + const successRate = solution.successCount / ((solution.successCount || 0) + (solution.failureCount || 0)); + score += successRate * 0.2; + } + + // Consider system load + if (this.systemContext.load.cpu > 80 && solution.description.toLowerCase().includes('intensive')) { + score -= 0.1; // Don't suggest CPU-intensive solutions when load is high + } + + return Math.max(0, Math.min(1, score)); + } + + /** + * Predict success based on historical data + */ + private predictSolutionSuccess(solution: Solution, error: ErrorEvent): number { + // Use feedback data if available + if (solution.successCount !== undefined && solution.failureCount !== undefined) { + const total = solution.successCount + solution.failureCount; + if (total >= 3) { + return solution.successCount / total; + } + } + + // Fall back to confidence score with context adjustment + return solution.confidence * this.calculateContextScore(solution, error); + } + + /** + * Generate human-readable reasoning for recommendation + */ + private generateReasoning(solution: Solution, contextScore: number, predictedSuccess: number): string { + const reasons: string[] = []; + + if (solution.verified) { + reasons.push('verified solution'); + } + + if (contextScore > 0.7) { + reasons.push('matches current system state'); + } + + if (predictedSuccess > 0.8) { + reasons.push(`${Math.round(predictedSuccess * 100)}% predicted success`); + } + + if (solution.successCount && solution.successCount > 5) { + reasons.push(`worked ${solution.successCount} times before`); + } + + return reasons.length > 0 ? `Recommended: ${reasons.join(', ')}` : 'Standard recommendation'; + } + + // ═══════════════════════════════════════════════════════════════════════════ + // 5. AUTO-REMEDIATION ENGINE + // ═══════════════════════════════════════════════════════════════════════════ + + /** + * Check if error can be auto-remediated + */ + private async checkAutoRemediation( + error: ErrorEvent, + solutions: IntelligentSolution[] + ): Promise<{ action: RemediationAction; queued: boolean } | undefined> { + // Find auto-remediable solutions + const autoRemediable = solutions.find(s => s.autoRemediable && s.remediationAction); + + if (!autoRemediable?.remediationAction) return undefined; + + const action = autoRemediable.remediationAction; + + // Safety checks + if (action.riskLevel === 'high') { + log.warn(`⚠️ High-risk remediation requires approval: ${action.name}`); + this.emit('remediation:approval-required', { error, action }); + return { action, queued: false }; + } + + // Check if we've tried this recently (prevent loops) + const recentTry = this.systemContext.recentRemediations.find( + r => r.id === action.id && r.lastExecuted && + Date.now() - r.lastExecuted.getTime() < 60000 // 1 minute cooldown + ); + + if (recentTry) { + log.info(`⏳ Skipping remediation ${action.name} - cooldown active`); + return { action, queued: false }; + } + + // Queue for execution + this.remediationQueue.push({ error, action }); + this.processRemediationQueue(); + + return { action, queued: true }; + } + + /** + * Process remediation queue + */ + private async processRemediationQueue(): Promise { + if (this.isRemediating || this.remediationQueue.length === 0) return; + + this.isRemediating = true; + + while (this.remediationQueue.length > 0) { + const { error, action } = this.remediationQueue.shift()!; + + try { + log.info(`🔧 Executing auto-remediation: ${action.name}`); + this.emit('remediation:started', { error, action }); + + const startTime = Date.now(); + let success = false; + + if (action.command) { + // Execute shell command + const { exec } = await import('child_process'); + await new Promise((resolve, reject) => { + exec(action.command!, { timeout: 30000 }, (err, stdout, stderr) => { + if (err) { + log.error(`Remediation command failed: ${stderr}`); + reject(err); + } else { + log.info(`Remediation output: ${stdout}`); + success = true; + resolve(); + } + }); + }); + } else if (action.apiCall) { + // Execute API call + const response = await fetch(action.apiCall.endpoint, { + method: action.apiCall.method, + headers: { 'Content-Type': 'application/json' }, + body: action.apiCall.body ? JSON.stringify(action.apiCall.body) : undefined + }); + success = response.ok; + } + + // Update action stats + action.lastExecuted = new Date(); + action.avgExecutionTime = (action.avgExecutionTime + (Date.now() - startTime)) / 2; + if (success) { + action.successRate = (action.successRate * 0.9) + 0.1; // Exponential moving average + } else { + action.successRate = action.successRate * 0.9; + } + + // Mark error as resolved if successful + if (success) { + error.resolved = true; + error.resolvedBy = `auto:${action.id}`; + error.resolvedAt = new Date(); + } + + this.systemContext.recentRemediations.push(action); + this.emit('remediation:completed', { error, action, success }); + + } catch (e) { + log.error(`Remediation failed: ${e}`); + this.emit('remediation:failed', { error, action, reason: String(e) }); + } + } + + this.isRemediating = false; + } + + /** + * Find remediation action for a solution + */ + private findRemediationAction(solution: Solution): RemediationAction | undefined { + for (const action of this.remediationActions.values()) { + // Check if solution description matches any remediation pattern + if (action.errorPatterns.some(pattern => + solution.description.toLowerCase().includes(pattern.toLowerCase()) + )) { + return action; + } + } + return undefined; + } + + // ═══════════════════════════════════════════════════════════════════════════ + // 6. PREDICTIVE ERROR DETECTION + // ═══════════════════════════════════════════════════════════════════════════ + + /** + * Update metrics for predictive analysis + */ + public recordMetric(metric: string, value: number): void { + if (!this.metricsHistory.has(metric)) { + this.metricsHistory.set(metric, []); + } + + const history = this.metricsHistory.get(metric)!; + history.push({ timestamp: new Date(), value }); + + // Keep only last hour + const oneHourAgo = Date.now() - 3600000; + const filtered = history.filter(h => h.timestamp.getTime() > oneHourAgo); + this.metricsHistory.set(metric, filtered); + + // Check predictive signals + this.checkPredictiveSignals(metric, value); + } + + /** + * Check if metric triggers a predictive signal + */ + private checkPredictiveSignals(metric: string, value: number): void { + for (const signal of this.predictiveSignals) { + if (signal.metric !== metric) continue; + + let triggered = false; + switch (signal.operator) { + case '>': triggered = value > signal.threshold; break; + case '<': triggered = value < signal.threshold; break; + case '>=': triggered = value >= signal.threshold; break; + case '<=': triggered = value <= signal.threshold; break; + case '=': triggered = value === signal.threshold; break; + } + + if (triggered) { + signal.lastTriggered = new Date(); + log.warn(`⚠️ PREDICTIVE ALERT: ${metric} = ${value} → ${signal.predictedError} likely in ${signal.leadTime / 1000}s`); + + this.emit('prediction:triggered', { + signal, + currentValue: value, + expectedError: signal.predictedError, + expectedIn: signal.leadTime + }); + } + } + } + + /** + * Predict next likely error based on current error + */ + private predictNextError(error: ErrorEvent): { nextLikelyError: string; confidence: number; timeframe: string } | undefined { + // Find correlations where this error is the source + const errorSignature = this.normalizeSignature(error.message); + let bestPrediction: { error: string; confidence: number; timeDelta: number } | undefined; + + for (const [key, correlation] of this.correlations) { + if (key.startsWith(errorSignature) && correlation.correlationType === 'precedes') { + if (!bestPrediction || correlation.confidence > bestPrediction.confidence) { + bestPrediction = { + error: key.split('→')[1], + confidence: correlation.confidence, + timeDelta: correlation.avgTimeDelta + }; + } + } + } + + if (bestPrediction && bestPrediction.confidence > 0.5) { + return { + nextLikelyError: bestPrediction.error, + confidence: bestPrediction.confidence, + timeframe: this.formatTimeDelta(bestPrediction.timeDelta) + }; + } + + return undefined; + } + + // ═══════════════════════════════════════════════════════════════════════════ + // 7. INITIALIZATION & BACKGROUND PROCESSING + // ═══════════════════════════════════════════════════════════════════════════ + + /** + * Initialize default auto-remediation actions + */ + private initializeDefaultRemediations(): void { + const defaultActions: RemediationAction[] = [ + { + id: 'restart-redis', + name: 'Restart Redis Connection', + description: 'Reconnect to Redis when connection is lost', + errorPatterns: ['ECONNREFUSED', 'redis', 'connection refused'], + apiCall: { endpoint: 'http://localhost:3001/api/healing/service/redis', method: 'POST' }, + requiresApproval: false, + riskLevel: 'low', + successRate: 0.85, + avgExecutionTime: 1000 + }, + { + id: 'clear-memory-cache', + name: 'Clear Memory Cache', + description: 'Clear in-memory caches when memory is low', + errorPatterns: ['heap', 'memory', 'OOM'], + apiCall: { endpoint: 'http://localhost:3001/api/system/clear-cache', method: 'POST' }, + requiresApproval: false, + riskLevel: 'low', + successRate: 0.9, + avgExecutionTime: 500 + }, + { + id: 'retry-database', + name: 'Retry Database Connection', + description: 'Attempt to reconnect to database', + errorPatterns: ['database', 'postgres', 'neo4j', 'SQLSTATE'], + apiCall: { endpoint: 'http://localhost:3001/api/healing/service/database', method: 'POST' }, + requiresApproval: false, + riskLevel: 'medium', + successRate: 0.75, + avgExecutionTime: 3000 + }, + { + id: 'restart-service', + name: 'Restart Service', + description: 'Full service restart - requires approval', + errorPatterns: ['fatal', 'crash', 'unrecoverable'], + command: 'npm run restart:backend', + requiresApproval: true, + riskLevel: 'high', + successRate: 0.95, + avgExecutionTime: 15000 + } + ]; + + for (const action of defaultActions) { + this.remediationActions.set(action.id, action); + } + } + + /** + * Initialize predictive signals + */ + private initializePredictiveSignals(): void { + this.predictiveSignals = [ + { + metric: 'memory_usage_percent', + threshold: 85, + operator: '>', + predictedError: 'JavaScript heap out of memory', + leadTime: 300000, // 5 minutes + confidence: 0.8 + }, + { + metric: 'redis_connections', + threshold: 95, + operator: '>', + predictedError: 'ECONNREFUSED on Redis', + leadTime: 60000, // 1 minute + confidence: 0.7 + }, + { + metric: 'postgres_connections', + threshold: 90, + operator: '>', + predictedError: 'SQLSTATE 53300 too many connections', + leadTime: 120000, // 2 minutes + confidence: 0.75 + }, + { + metric: 'event_loop_lag_ms', + threshold: 500, + operator: '>', + predictedError: 'Event loop blocked - degraded performance', + leadTime: 30000, // 30 seconds + confidence: 0.85 + } + ]; + } + + /** + * Start background processing + */ + private startBackgroundProcessing(): void { + // Cleanup old history every 5 minutes + setInterval(() => { + const cutoff = Date.now() - 3600000; // 1 hour + this.errorHistory = this.errorHistory.filter(e => e.timestamp.getTime() > cutoff); + + // Cleanup old correlations with low confidence + for (const [key, correlation] of this.correlations) { + if (correlation.confidence < 0.3 && correlation.occurrences < 3) { + this.correlations.delete(key); + } + } + }, 300000); + + // Persist correlations to Neo4j every 10 minutes + setInterval(() => { + this.persistCorrelationsToNeo4j(); + }, 600000); + } + + // ═══════════════════════════════════════════════════════════════════════════ + // HELPER METHODS + // ═══════════════════════════════════════════════════════════════════════════ + + private generateErrorId(): string { + return `err_${Date.now()}_${Math.random().toString(36).substring(2, 8)}`; + } + + private assessSeverity(message: string, context: Record): ErrorEvent['severity'] { + const lower = message.toLowerCase(); + if (lower.includes('fatal') || lower.includes('critical') || lower.includes('crash')) return 'critical'; + if (lower.includes('error') || lower.includes('failed') || lower.includes('refused')) return 'high'; + if (lower.includes('warning') || lower.includes('timeout')) return 'medium'; + return 'low'; + } + + private recordError(error: ErrorEvent): void { + this.errorHistory.push(error); + this.systemContext.activeErrors.push(error); + + // Trim history if too large + if (this.errorHistory.length > this.MAX_HISTORY) { + this.errorHistory = this.errorHistory.slice(-this.MAX_HISTORY / 2); + } + } + + private getCorrelationKey(sourceMsg: string, targetMsg: string): string { + return `${this.normalizeSignature(sourceMsg)}→${this.normalizeSignature(targetMsg)}`; + } + + private normalizeSignature(msg: string): string { + return msg + .toLowerCase() + .replace(/[0-9]+/g, 'N') + .replace(/\s+/g, ' ') + .substring(0, 100); + } + + private formatTimeDelta(ms: number): string { + if (ms < 1000) return `${ms}ms`; + if (ms < 60000) return `${Math.round(ms / 1000)}s`; + return `${Math.round(ms / 60000)}min`; + } + + // ═══════════════════════════════════════════════════════════════════════════ + // PUBLIC API + // ═══════════════════════════════════════════════════════════════════════════ + + /** + * Update system context (called by health checks) + */ + public updateSystemContext(updates: Partial): void { + Object.assign(this.systemContext, updates); + } + + /** + * Get current intelligence stats + */ + public getStats() { + return { + errorHistory: this.errorHistory.length, + correlations: this.correlations.size, + predictiveSignals: this.predictiveSignals.length, + remediationActions: this.remediationActions.size, + activeErrors: this.systemContext.activeErrors.length, + recentRemediations: this.systemContext.recentRemediations.length + }; + } + + /** + * Get correlations for visualization + */ + public getCorrelations(): ErrorCorrelation[] { + return Array.from(this.correlations.values()) + .filter(c => c.confidence > 0.5) + .sort((a, b) => b.confidence - a.confidence); + } + + /** + * Manually approve a pending remediation + */ + public approveRemediation(actionId: string): boolean { + const action = this.remediationActions.get(actionId); + if (action) { + action.requiresApproval = false; + return true; + } + return false; + } +} + +// Singleton export +export const cognitiveErrorIntelligence = CognitiveErrorIntelligence.getInstance(); diff --git a/apps/backend/src/services/ErrorKnowledgeBase.ts b/apps/backend/src/services/ErrorKnowledgeBase.ts new file mode 100644 index 0000000000000000000000000000000000000000..3f1fb156898d695b11a14d4aafe4c8e565dc79ec --- /dev/null +++ b/apps/backend/src/services/ErrorKnowledgeBase.ts @@ -0,0 +1,700 @@ +/** + * ╔═══════════════════════════════════════════════════════════════════════════╗ + * ║ ERROR KNOWLEDGE BASE ║ + * ║═══════════════════════════════════════════════════════════════════════════║ + * ║ Samler fejlmønstre fra eksterne kilder (GitHub, HuggingFace, CVE, etc.) ║ + * ║ Bruges af SelfHealingAdapter til at lære og forudsige fejl ║ + * ╚═══════════════════════════════════════════════════════════════════════════╝ + */ + +import crypto from 'crypto'; +import { logger } from '../utils/logger.js'; + +// Lazy import for Neo4j to avoid circular dependencies +let neo4jServiceInstance: any = null; +async function getNeo4jService() { + if (!neo4jServiceInstance) { + try { + const { neo4jService } = await import('../database/Neo4jService.js'); + neo4jServiceInstance = neo4jService; + } catch (e) { + // Neo4j not available + } + } + return neo4jServiceInstance; +} + +const log = logger.child({ module: 'ErrorKnowledgeBase' }); + +// ═══════════════════════════════════════════════════════════════════════════ +// TYPES +// ═══════════════════════════════════════════════════════════════════════════ + +export interface ErrorPattern { + id: string; // Unique hash of error signature + source: ErrorSource; + category: ErrorCategory; + signature: string; // Error message pattern/regex + description: string; + severity: 'low' | 'medium' | 'high' | 'critical'; + solutions: Solution[]; + tags: string[]; + cveId?: string; // CVE identifier if applicable + cweId?: string; // CWE identifier if applicable + language?: string; // Programming language + framework?: string; // Framework (e.g., Express, React) + occurrences: number; // Times seen + lastSeen: Date; + createdAt: Date; + metadata?: Record; +} + +export interface Solution { + description: string; + code?: string; + confidence: number; // 0-1 + source: string; + verified: boolean; + // Feedback tracking + successCount?: number; // Times this solution worked + failureCount?: number; // Times this solution failed + lastUsed?: Date; +} + +export type ErrorSource = + | 'github-defects4j' + | 'github-bugsjs' + | 'huggingface-cve' + | 'huggingface-defect-detection' + | 'huggingface-hdfs-logs' + | 'microsoft-office-api' + | 'microsoft-graph-api' + | 'internal-logs' + | 'user-reported' + | 'gaia-aiops' + | 'nodejs-system' + | 'http-standards' + | 'postgresql-official' + | 'typescript-compiler' + | 'curated-knowledge'; + +export type ErrorCategory = + | 'runtime' + | 'syntax' + | 'type' + | 'security' + | 'performance' + | 'network' + | 'database' + | 'api' + | 'office' + | 'authentication' + | 'memory' + | 'concurrency' + | 'configuration' + | 'dependency'; + +// ═══════════════════════════════════════════════════════════════════════════ +// ERROR KNOWLEDGE BASE CLASS +// ═══════════════════════════════════════════════════════════════════════════ + +export class ErrorKnowledgeBase { + private static instance: ErrorKnowledgeBase; + private patterns: Map = new Map(); + private signatureIndex: Map> = new Map(); // For fast lookup + private sourceStats: Map = new Map(); + + private constructor() { + this.initializeBuiltInPatterns(); + } + + public static getInstance(): ErrorKnowledgeBase { + if (!ErrorKnowledgeBase.instance) { + ErrorKnowledgeBase.instance = new ErrorKnowledgeBase(); + } + return ErrorKnowledgeBase.instance; + } + + /** + * Generate unique ID from error signature to prevent duplicates + */ + private generateId(signature: string, source: ErrorSource): string { + const normalized = signature.toLowerCase().trim().replace(/\s+/g, ' '); + return crypto.createHash('sha256') + .update(`${normalized}:${source}`) + .digest('hex') + .substring(0, 16); + } + + /** + * Normalize error signature for comparison + */ + private normalizeSignature(sig: string): string { + return sig + .toLowerCase() + .replace(/0x[0-9a-f]+/gi, '0xHEX') // Hex addresses + .replace(/\d+/g, 'N') // Numbers + .replace(/['"][^'"]*['"]/g, '"STR"') // String literals + .replace(/\s+/g, ' ') // Whitespace + .trim(); + } + + /** + * Check if pattern already exists (dedupe) + */ + public isDuplicate(signature: string, source: ErrorSource): boolean { + const id = this.generateId(signature, source); + if (this.patterns.has(id)) { + return true; + } + + // Also check normalized signature across all sources + const normalized = this.normalizeSignature(signature); + const existing = this.signatureIndex.get(normalized); + return existing !== undefined && existing.size > 0; + } + + /** + * Ingest single error pattern (with dedupe) + */ + public ingest(pattern: Omit): boolean { + const id = this.generateId(pattern.signature, pattern.source); + + // Check for exact duplicate + if (this.patterns.has(id)) { + // Update existing + const existing = this.patterns.get(id)!; + existing.occurrences++; + existing.lastSeen = new Date(); + // Merge solutions + for (const sol of pattern.solutions) { + if (!existing.solutions.some(s => s.description === sol.description)) { + existing.solutions.push(sol); + } + } + return false; // Not new + } + + // Check for similar pattern (different source) + const normalized = this.normalizeSignature(pattern.signature); + if (!this.signatureIndex.has(normalized)) { + this.signatureIndex.set(normalized, new Set()); + } + this.signatureIndex.get(normalized)!.add(id); + + // Create new pattern + const newPattern: ErrorPattern = { + ...pattern, + id, + occurrences: 1, + lastSeen: new Date(), + createdAt: new Date() + }; + + this.patterns.set(id, newPattern); + + // Update source stats + const count = this.sourceStats.get(pattern.source) || 0; + this.sourceStats.set(pattern.source, count + 1); + + log.info(`Ingested error pattern: ${pattern.category}/${pattern.signature.substring(0, 50)}...`); + return true; // New pattern + } + + /** + * Batch ingest with progress tracking + */ + public async batchIngest( + patterns: Omit[], + source: ErrorSource + ): Promise<{ total: number; new: number; duplicates: number }> { + let newCount = 0; + let dupeCount = 0; + + for (const pattern of patterns) { + const isNew = this.ingest({ ...pattern, source }); + if (isNew) newCount++; + else dupeCount++; + } + + log.info(`Batch ingest from ${source}: ${newCount} new, ${dupeCount} duplicates`); + return { total: patterns.length, new: newCount, duplicates: dupeCount }; + } + + /** + * Find matching patterns for an error + */ + public findMatches(errorMessage: string, limit = 5): ErrorPattern[] { + const normalized = this.normalizeSignature(errorMessage); + const results: { pattern: ErrorPattern; score: number }[] = []; + + for (const pattern of this.patterns.values()) { + const patternNorm = this.normalizeSignature(pattern.signature); + const score = this.similarityScore(normalized, patternNorm); + if (score > 0.3) { // Lowered threshold for better fuzzy matching + results.push({ pattern, score }); + } + } + + return results + .sort((a, b) => b.score - a.score) + .slice(0, limit) + .map(r => r.pattern); + } + + /** + * Enhanced similarity score combining multiple strategies + */ + private similarityScore(query: string, signature: string): number { + // Strategy 1: Substring containment (highest priority) + if (signature.includes(query) || query.includes(signature)) { + return 1.0; + } + + // Strategy 2: Key error code matching (ECONNREFUSED, SQLSTATE, HTTP 4xx/5xx) + const errorCodes = query.match(/\b(E[A-Z]+|SQLSTATE\s*\d+|HTTP\s*\d{3}|TS\d{4})\b/gi) || []; + for (const code of errorCodes) { + if (signature.toUpperCase().includes(code.toUpperCase().replace(/\s+/g, ' '))) { + return 0.95; + } + } + + // Strategy 3: Jaccard similarity on words + const setA = new Set(query.split(/\s+/).filter(w => w.length > 2)); + const setB = new Set(signature.split(/\s+/).filter(w => w.length > 2)); + const intersection = new Set([...setA].filter(x => setB.has(x))); + const union = new Set([...setA, ...setB]); + const jaccard = union.size > 0 ? intersection.size / union.size : 0; + + // Strategy 4: Coverage - what % of query words appear in signature + const coverage = setA.size > 0 ? intersection.size / setA.size : 0; + + // Combined score: weight coverage more heavily + return Math.max(jaccard, coverage * 0.8); + } + + /** + * Get suggested solutions for an error + */ + public getSolutions(errorMessage: string): Solution[] { + const matches = this.findMatches(errorMessage, 3); + const solutions: Solution[] = []; + + for (const match of matches) { + solutions.push(...match.solutions); + } + + return solutions + .sort((a, b) => b.confidence - a.confidence) + .slice(0, 5); + } + + /** + * Get statistics + */ + public getStats() { + const categoryCount: Record = {}; + const severityCount: Record = {}; + + for (const pattern of this.patterns.values()) { + categoryCount[pattern.category] = (categoryCount[pattern.category] || 0) + 1; + severityCount[pattern.severity] = (severityCount[pattern.severity] || 0) + 1; + } + + return { + totalPatterns: this.patterns.size, + bySource: Object.fromEntries(this.sourceStats), + byCategory: categoryCount, + bySeverity: severityCount + }; + } + + /** + * Export all patterns + */ + public exportPatterns(): ErrorPattern[] { + return Array.from(this.patterns.values()); + } + + /** + * Initialize with common built-in patterns + */ + private initializeBuiltInPatterns() { + const builtInPatterns: Omit[] = [ + // Network errors + { + source: 'internal-logs', + category: 'network', + signature: 'ECONNREFUSED', + description: 'Connection refused - target service not running or firewall blocking', + severity: 'high', + solutions: [ + { description: 'Check if target service is running', confidence: 0.9, source: 'built-in', verified: true }, + { description: 'Verify firewall rules allow connection', confidence: 0.7, source: 'built-in', verified: true }, + { description: 'Check if port number is correct', confidence: 0.8, source: 'built-in', verified: true } + ], + tags: ['network', 'connection', 'tcp'] + }, + { + source: 'internal-logs', + category: 'network', + signature: 'ETIMEDOUT', + description: 'Connection timed out - network latency or service overload', + severity: 'medium', + solutions: [ + { description: 'Increase timeout value', confidence: 0.8, source: 'built-in', verified: true }, + { description: 'Check network connectivity', confidence: 0.7, source: 'built-in', verified: true }, + { description: 'Implement retry with exponential backoff', confidence: 0.9, source: 'built-in', verified: true } + ], + tags: ['network', 'timeout', 'latency'] + }, + // Database errors + { + source: 'internal-logs', + category: 'database', + signature: 'Neo4jError: ServiceUnavailable', + description: 'Neo4j database not reachable', + severity: 'critical', + solutions: [ + { description: 'Check Neo4j container/service status', confidence: 0.9, source: 'built-in', verified: true }, + { description: 'Verify NEO4J_URI environment variable', confidence: 0.8, source: 'built-in', verified: true }, + { description: 'Check AuraDB connection limits', confidence: 0.6, source: 'built-in', verified: true } + ], + tags: ['neo4j', 'database', 'graph'] + }, + { + source: 'internal-logs', + category: 'database', + signature: 'PrismaClientKnownRequestError', + description: 'Prisma database query error', + severity: 'high', + solutions: [ + { description: 'Check if database schema is migrated', confidence: 0.8, source: 'built-in', verified: true }, + { description: 'Verify DATABASE_URL is correct', confidence: 0.7, source: 'built-in', verified: true }, + { description: 'Run npx prisma db push', confidence: 0.9, source: 'built-in', verified: true } + ], + tags: ['prisma', 'postgresql', 'database'] + }, + // TypeScript/JavaScript errors + { + source: 'internal-logs', + category: 'type', + signature: 'TypeError: Cannot read properties of undefined', + description: 'Accessing property on undefined value', + severity: 'medium', + solutions: [ + { description: 'Add null/undefined check before accessing', confidence: 0.9, source: 'built-in', verified: true }, + { description: 'Use optional chaining (?.) operator', confidence: 0.95, source: 'built-in', verified: true }, + { description: 'Provide default value with ?? operator', confidence: 0.8, source: 'built-in', verified: true } + ], + tags: ['typescript', 'javascript', 'null-safety'] + }, + { + source: 'internal-logs', + category: 'syntax', + signature: 'SyntaxError: Unexpected token', + description: 'Invalid JavaScript/JSON syntax', + severity: 'high', + solutions: [ + { description: 'Check for missing brackets or quotes', confidence: 0.8, source: 'built-in', verified: true }, + { description: 'Validate JSON with JSON.parse in try-catch', confidence: 0.7, source: 'built-in', verified: true }, + { description: 'Check for trailing commas in JSON', confidence: 0.6, source: 'built-in', verified: true } + ], + tags: ['syntax', 'json', 'parsing'] + }, + // Office API errors + { + source: 'microsoft-office-api', + category: 'office', + signature: 'InvalidReference', + description: 'Office API reference is not valid for operation', + severity: 'medium', + solutions: [ + { description: 'Ensure object exists before operation', confidence: 0.9, source: 'microsoft-docs', verified: true }, + { description: 'Check if document is still open', confidence: 0.7, source: 'microsoft-docs', verified: true } + ], + tags: ['office', 'excel', 'word', 'api'], + cweId: 'CWE-476' + }, + { + source: 'microsoft-graph-api', + category: 'api', + signature: 'Error 429: Too Many Requests', + description: 'Microsoft Graph API rate limit exceeded', + severity: 'medium', + solutions: [ + { description: 'Implement exponential backoff retry', confidence: 0.95, source: 'microsoft-docs', verified: true }, + { description: 'Check Retry-After header for wait time', confidence: 0.9, source: 'microsoft-docs', verified: true }, + { description: 'Batch multiple requests together', confidence: 0.8, source: 'microsoft-docs', verified: true } + ], + tags: ['graph-api', 'rate-limit', 'throttling'] + }, + // Security patterns + { + source: 'huggingface-cve', + category: 'security', + signature: 'SQL injection detected', + description: 'Potential SQL injection vulnerability', + severity: 'critical', + solutions: [ + { description: 'Use parameterized queries', confidence: 0.99, source: 'OWASP', verified: true }, + { description: 'Sanitize user input', confidence: 0.9, source: 'OWASP', verified: true }, + { description: 'Use ORM instead of raw SQL', confidence: 0.85, source: 'best-practice', verified: true } + ], + tags: ['security', 'sql', 'injection'], + cweId: 'CWE-89' + }, + { + source: 'huggingface-cve', + category: 'security', + signature: 'XSS vulnerability', + description: 'Cross-site scripting vulnerability detected', + severity: 'critical', + solutions: [ + { description: 'Escape HTML output', confidence: 0.95, source: 'OWASP', verified: true }, + { description: 'Use Content Security Policy headers', confidence: 0.9, source: 'OWASP', verified: true }, + { description: 'Validate and sanitize input', confidence: 0.85, source: 'best-practice', verified: true } + ], + tags: ['security', 'xss', 'injection'], + cweId: 'CWE-79' + }, + // Memory errors + { + source: 'internal-logs', + category: 'memory', + signature: 'FATAL ERROR: CALL_AND_RETRY_LAST Allocation failed', + description: 'Node.js heap out of memory', + severity: 'critical', + solutions: [ + { description: 'Increase Node.js heap size: --max-old-space-size=4096', confidence: 0.9, source: 'built-in', verified: true }, + { description: 'Check for memory leaks with --inspect', confidence: 0.8, source: 'built-in', verified: true }, + { description: 'Implement pagination for large data sets', confidence: 0.85, source: 'best-practice', verified: true } + ], + tags: ['memory', 'heap', 'oom'] + }, + // Redis errors + { + source: 'internal-logs', + category: 'database', + signature: 'Redis connection error', + description: 'Cannot connect to Redis server', + severity: 'high', + solutions: [ + { description: 'Check if Redis container is running', confidence: 0.9, source: 'built-in', verified: true }, + { description: 'Verify REDIS_URL environment variable', confidence: 0.8, source: 'built-in', verified: true }, + { description: 'Check Redis maxmemory settings', confidence: 0.6, source: 'built-in', verified: true } + ], + tags: ['redis', 'cache', 'connection'] + } + ]; + + for (const pattern of builtInPatterns) { + this.ingest(pattern); + } + + log.info(`Initialized ErrorKnowledgeBase with ${this.patterns.size} built-in patterns`); + } + + // ═══════════════════════════════════════════════════════════════════════════ + // NEO4J PERSISTENCE - Feedback Loop + // ═══════════════════════════════════════════════════════════════════════════ + + /** + * Persist a single pattern to Neo4j + */ + public async persistToNeo4j(pattern: ErrorPattern): Promise { + try { + const neo4j = await getNeo4jService(); + if (!neo4j) { + log.warn('Neo4j not available, skipping persistence'); + return false; + } + + const query = ` + MERGE (ep:ErrorPattern {id: $id}) + SET ep.source = $source, + ep.category = $category, + ep.signature = $signature, + ep.description = $description, + ep.severity = $severity, + ep.tags = $tags, + ep.cveId = $cveId, + ep.cweId = $cweId, + ep.language = $language, + ep.framework = $framework, + ep.occurrences = $occurrences, + ep.lastSeen = datetime($lastSeen), + ep.createdAt = datetime($createdAt), + ep.solutions = $solutions + RETURN ep + `; + + await neo4j.runQuery(query, { + id: pattern.id, + source: pattern.source, + category: pattern.category, + signature: pattern.signature, + description: pattern.description, + severity: pattern.severity, + tags: pattern.tags, + cveId: pattern.cveId || null, + cweId: pattern.cweId || null, + language: pattern.language || null, + framework: pattern.framework || null, + occurrences: pattern.occurrences, + lastSeen: pattern.lastSeen.toISOString(), + createdAt: pattern.createdAt.toISOString(), + solutions: JSON.stringify(pattern.solutions) + }); + + log.debug(`Persisted pattern ${pattern.id} to Neo4j`); + return true; + } catch (error) { + log.error('Failed to persist pattern to Neo4j:', error); + return false; + } + } + + /** + * Load all patterns from Neo4j + */ + public async loadFromNeo4j(): Promise { + try { + const neo4j = await getNeo4jService(); + if (!neo4j) { + log.warn('Neo4j not available, skipping load'); + return 0; + } + + const query = ` + MATCH (ep:ErrorPattern) + RETURN ep + ORDER BY ep.occurrences DESC + `; + + const records = await neo4j.runQuery(query); + let loaded = 0; + + for (const record of records) { + const ep = record.ep?.properties || record.ep; + if (!ep || !ep.id) continue; + + // Skip if already in memory + if (this.patterns.has(ep.id)) continue; + + const pattern: ErrorPattern = { + id: ep.id, + source: ep.source as ErrorSource, + category: ep.category as ErrorCategory, + signature: ep.signature, + description: ep.description, + severity: ep.severity, + tags: ep.tags || [], + cveId: ep.cveId || undefined, + cweId: ep.cweId || undefined, + language: ep.language || undefined, + framework: ep.framework || undefined, + occurrences: ep.occurrences?.toNumber?.() || ep.occurrences || 1, + lastSeen: new Date(ep.lastSeen), + createdAt: new Date(ep.createdAt), + solutions: JSON.parse(ep.solutions || '[]') + }; + + this.patterns.set(pattern.id, pattern); + loaded++; + } + + log.info(`Loaded ${loaded} patterns from Neo4j`); + return loaded; + } catch (error) { + log.error('Failed to load patterns from Neo4j:', error); + return 0; + } + } + + /** + * Record feedback for a solution (success or failure) + */ + public async recordFeedback( + patternId: string, + solutionIndex: number, + success: boolean + ): Promise { + const pattern = this.patterns.get(patternId); + if (!pattern || !pattern.solutions[solutionIndex]) { + return false; + } + + const solution = pattern.solutions[solutionIndex]; + + // Initialize counters if needed + solution.successCount = solution.successCount || 0; + solution.failureCount = solution.failureCount || 0; + + // Update counters + if (success) { + solution.successCount++; + } else { + solution.failureCount++; + } + solution.lastUsed = new Date(); + + // Update confidence based on feedback (Bayesian-ish update) + const totalFeedback = solution.successCount + solution.failureCount; + if (totalFeedback >= 3) { + const successRate = solution.successCount / totalFeedback; + // Blend original confidence with observed success rate + solution.confidence = (solution.confidence * 0.3) + (successRate * 0.7); + // Clamp to valid range + solution.confidence = Math.max(0.1, Math.min(0.99, solution.confidence)); + } + + // Persist to Neo4j + await this.persistToNeo4j(pattern); + + log.info(`Recorded ${success ? 'success' : 'failure'} feedback for pattern ${patternId}, solution ${solutionIndex}. New confidence: ${solution.confidence.toFixed(2)}`); + return true; + } + + /** + * Persist all patterns to Neo4j (batch) + */ + public async persistAllToNeo4j(): Promise<{ success: number; failed: number }> { + let success = 0; + let failed = 0; + + for (const pattern of this.patterns.values()) { + const result = await this.persistToNeo4j(pattern); + if (result) success++; + else failed++; + } + + log.info(`Persisted ${success} patterns to Neo4j (${failed} failed)`); + return { success, failed }; + } + + /** + * Get solution with feedback stats + */ + public getSolutionsWithStats(errorMessage: string): (Solution & { patternId: string; solutionIndex: number })[] { + const matches = this.findMatches(errorMessage, 3); + const solutions: (Solution & { patternId: string; solutionIndex: number })[] = []; + + for (const match of matches) { + match.solutions.forEach((sol, index) => { + solutions.push({ + ...sol, + patternId: match.id, + solutionIndex: index + }); + }); + } + + return solutions + .sort((a, b) => b.confidence - a.confidence) + .slice(0, 5); + } +} + +// Singleton export +export const errorKnowledgeBase = ErrorKnowledgeBase.getInstance(); diff --git a/apps/backend/src/services/EventBus.ts b/apps/backend/src/services/EventBus.ts new file mode 100644 index 0000000000000000000000000000000000000000..2a59e439593d57d1a14482b826fe5d95f3e046bf --- /dev/null +++ b/apps/backend/src/services/EventBus.ts @@ -0,0 +1,168 @@ +import { EventEmitter } from 'events'; +import os from 'os'; + +type Handler = (payload: any) => Promise | void; + +interface EventBusOptions { + redisUrl?: string; + streamKey?: string; + groupName?: string; + consumerName?: string; + readCount?: number; + blockMs?: number; +} + +class PersistentEventBus extends EventEmitter { + private redis: any; + private ready = false; + private handlers: Map> = new Map(); + private polling = false; + private readonly streamKey: string; + private readonly group: string; + private readonly consumer: string; + private readonly blockMs: number; + private readonly readCount: number; + private readonly redisUrl: string; + + constructor(options: EventBusOptions = {}) { + super(); + this.redisUrl = options.redisUrl || process.env.REDIS_URL || 'redis://localhost:6379'; + this.streamKey = options.streamKey || 'widgetdc:events'; + this.group = options.groupName || 'widgetdc-consumers'; + this.consumer = options.consumerName || `${os.hostname()}-${process.pid}`; + this.blockMs = options.blockMs ?? 1000; + this.readCount = options.readCount ?? 20; + } + + isReady(): boolean { + return this.ready; + } + + async init(): Promise { + if (this.ready) return; + try { + const Redis = (await import('ioredis')).default; + this.redis = new Redis(this.redisUrl, { maxRetriesPerRequest: 3 }); + await this.ensureGroup(); + this.ready = true; + console.log('🔴 PersistentEventBus: Redis Streams ready'); + } catch (err: any) { + console.warn(`⚠️ PersistentEventBus fallback to in-memory: ${err?.message || err}`); + this.ready = false; + } + } + + async publish(eventType: string, payload: any): Promise { + const entry = JSON.stringify({ eventType, payload, ts: Date.now() }); + if (!this.ready || !this.redis) { + // in-memory fallback + this.emit(eventType, payload); + this.emit('*', { eventType, payload }); + return; + } + + try { + await this.redis.xadd(this.streamKey, '*', 'type', eventType, 'data', entry); + } catch (err: any) { + console.error('Failed to publish event, falling back to memory:', err?.message || err); + this.emit(eventType, payload); + this.emit('*', { eventType, payload }); + } + } + + subscribe(eventType: string, handler: Handler): void { + if (!this.handlers.has(eventType)) this.handlers.set(eventType, new Set()); + this.handlers.get(eventType)!.add(handler); + + // Local immediate delivery + this.on(eventType, handler); + + if (this.ready && !this.polling) { + this.startPolling(); + } + } + + remove(eventType: string, handler: Handler): void { + this.off(eventType, handler); + this.handlers.get(eventType)?.delete(handler); + } + + private async ensureGroup(): Promise { + try { + await this.redis.xgroup('CREATE', this.streamKey, this.group, '0', 'MKSTREAM'); + } catch (err: any) { + // Ignore BUSYGROUP + if (!String(err?.message).includes('BUSYGROUP')) { + throw err; + } + } + } + + private async startPolling(): Promise { + if (this.polling || !this.redis) return; + this.polling = true; + + const loop = async () => { + while (this.polling && this.redis) { + try { + const entries = await this.redis.xreadgroup( + 'GROUP', this.group, this.consumer, + 'COUNT', this.readCount, + 'BLOCK', this.blockMs, + 'STREAMS', this.streamKey, + '>' + ); + + if (entries) { + const [_, messages] = entries[0]; + for (const [id, fields] of messages) { + const payload = this.parseFields(fields as any[]); + this.dispatch(payload); + await this.redis.xack(this.streamKey, this.group, id); + } + } + } catch (err: any) { + console.error('PersistentEventBus poll error:', err?.message || err); + await new Promise(r => setTimeout(r, 500)); + } + } + }; + + loop(); + } + + private parseFields(fields: any[]): { eventType: string; payload: any } { + const obj: Record = {}; + for (let i = 0; i < fields.length; i += 2) { + obj[fields[i]] = fields[i + 1]; + } + try { + const parsed = JSON.parse(obj['data']); + return { eventType: parsed.eventType, payload: parsed.payload }; + } catch { + return { eventType: obj['type'] || 'unknown', payload: obj['data'] }; + } + } + + private dispatch(entry: { eventType: string; payload: any }) { + const handlers = this.handlers.get(entry.eventType); + if (handlers) { + handlers.forEach(async (handler) => { + try { + await handler(entry.payload); + } catch (err: any) { + console.error(`Handler error for ${entry.eventType}:`, err?.message || err); + } + }); + } + } + + async shutdown(): Promise { + this.polling = false; + if (this.redis) { + await this.redis.quit(); + } + } +} + +export const persistentEventBus = new PersistentEventBus(); diff --git a/apps/backend/src/services/FacebookOAuthService.ts b/apps/backend/src/services/FacebookOAuthService.ts new file mode 100644 index 0000000000000000000000000000000000000000..4195442580fc3dc330a7f430cc00eaac44d922ce --- /dev/null +++ b/apps/backend/src/services/FacebookOAuthService.ts @@ -0,0 +1,347 @@ +/** + * Facebook OAuth Service + * Handles OAuth 2.0 Authorization Code Flow for Facebook + */ + +import axios from 'axios'; +import { createHash, randomBytes } from 'crypto'; +import { humanApprovalService, ApprovalRiskLevel } from './HumanApprovalService.js'; + +interface FacebookTokenResponse { + access_token: string; + token_type: string; + expires_in: number; +} + +interface FacebookUserProfile { + id: string; + name: string; + email?: string; + picture?: { + data: { + url: string; + }; + }; +} + +interface FacebookPost { + id: string; + message?: string; + created_time: string; + full_picture?: string; + permalink_url?: string; +} + +interface FacebookPhoto { + id: string; + images: Array<{ source: string; width: number; height: number }>; + created_time: string; + name?: string; + link?: string; +} + +export class FacebookOAuthService { + private appId: string; + private appSecret: string; + private redirectUri: string; + private graphApiVersion = 'v18.0'; + private tokens: Map = new Map(); + + constructor() { + this.appId = process.env.FACEBOOK_APP_ID || ''; + this.appSecret = process.env.FACEBOOK_APP_SECRET || ''; + this.redirectUri = process.env.FACEBOOK_REDIRECT_URI || 'http://localhost:3001/api/auth/facebook/callback'; + + if (!this.appId || !this.appSecret) { + console.warn('⚠️ Facebook OAuth credentials not configured'); + } + } + + /** + * Generate OAuth authorization URL + */ + getAuthorizationUrl(state?: string): string { + const scopes = [ + 'public_profile', + 'email', + 'user_posts', + 'user_photos', + ]; + + const params = new URLSearchParams({ + client_id: this.appId, + redirect_uri: this.redirectUri, + scope: scopes.join(','), + response_type: 'code', + state: state || this.generateState(), + }); + + return `https://www.facebook.com/${this.graphApiVersion}/dialog/oauth?${params.toString()}`; + } + + /** + * Exchange authorization code for access token + */ + async exchangeCodeForToken(code: string): Promise<{ accessToken: string; userId: string }> { + try { + const params = new URLSearchParams({ + client_id: this.appId, + client_secret: this.appSecret, + redirect_uri: this.redirectUri, + code, + }); + + const response = await axios.get( + `https://graph.facebook.com/${this.graphApiVersion}/oauth/access_token?${params.toString()}` + ); + + const { access_token, expires_in } = response.data; + + // Get user profile to obtain user ID + const profile = await this.getUserProfile(access_token); + + // Store token with expiration + const expiresAt = Date.now() + (expires_in * 1000); + this.tokens.set(profile.id, { + accessToken: access_token, + expiresAt, + userId: profile.id, + }); + + console.log(`✅ Facebook token obtained for user ${profile.id}`); + + return { + accessToken: access_token, + userId: profile.id, + }; + } catch (error: any) { + console.error('❌ Facebook token exchange failed:', error.response?.data || error.message); + throw new Error(`Facebook OAuth failed: ${error.response?.data?.error?.message || error.message}`); + } + } + + /** + * Get user profile + */ + async getUserProfile(accessToken: string): Promise { + try { + const response = await axios.get( + `https://graph.facebook.com/${this.graphApiVersion}/me`, + { + params: { + access_token: accessToken, + fields: 'id,name,email,picture', + }, + } + ); + + return response.data; + } catch (error: any) { + throw new Error(`Failed to fetch user profile: ${error.response?.data?.error?.message || error.message}`); + } + } + + /** + * Get user's posts + */ + async getUserPosts(userId: string, limit: number = 25): Promise { + const tokenData = this.tokens.get(userId); + if (!tokenData) { + throw new Error('No access token found for user. Please authenticate first.'); + } + + if (Date.now() > tokenData.expiresAt) { + throw new Error('Access token expired. Please re-authenticate.'); + } + + try { + const response = await axios.get( + `https://graph.facebook.com/${this.graphApiVersion}/${userId}/posts`, + { + params: { + access_token: tokenData.accessToken, + fields: 'id,message,created_time,full_picture,permalink_url', + limit, + }, + } + ); + + return response.data.data || []; + } catch (error: any) { + console.error('❌ Failed to fetch posts:', error.response?.data); + throw new Error(`Failed to fetch posts: ${error.response?.data?.error?.message || error.message}`); + } + } + + /** + * Get specific photo + */ + async getPhoto(photoId: string, userId: string): Promise { + const tokenData = this.tokens.get(userId); + if (!tokenData) { + throw new Error('No access token found for user. Please authenticate first.'); + } + + if (Date.now() > tokenData.expiresAt) { + throw new Error('Access token expired. Please re-authenticate.'); + } + + try { + const response = await axios.get( + `https://graph.facebook.com/${this.graphApiVersion}/${photoId}`, + { + params: { + access_token: tokenData.accessToken, + fields: 'id,images,created_time,name,link', + }, + } + ); + + return response.data; + } catch (error: any) { + console.error('❌ Failed to fetch photo:', error.response?.data); + throw new Error(`Failed to fetch photo: ${error.response?.data?.error?.message || error.message}`); + } + } + + /** + * Download and analyze photo + */ + async downloadAndAnalyzePhoto(photoId: string, userId: string): Promise<{ + photo: FacebookPhoto; + imageBuffer: Buffer; + analysis?: any; + }> { + const photo = await this.getPhoto(photoId, userId); + + // Get highest resolution image + const largestImage = photo.images.reduce((prev, current) => + (current.width > prev.width) ? current : prev + ); + + // Download image + const imageResponse = await axios.get(largestImage.source, { + responseType: 'arraybuffer', + }); + + const imageBuffer = Buffer.from(imageResponse.data); + + console.log(`📸 Downloaded photo ${photoId} (${largestImage.width}x${largestImage.height})`); + + return { + photo, + imageBuffer, + // TODO: Add Gemini Vision API analysis here + }; + } + + /** + * Ingest photo into knowledge base + * 🔐 REQUIRES HUMAN APPROVAL - No data can be ingested without explicit permission + */ + async ingestPhoto(photoId: string, userId: string, approvedBy?: string): Promise<{ + success: boolean; + photoId: string; + vectorId?: string; + graphNodeId?: string; + approvalRequestId?: string; + }> { + try { + // Step 1: Download and analyze photo first (read-only) + const { photo, imageBuffer } = await this.downloadAndAnalyzePhoto(photoId, userId); + + // Step 2: REQUEST APPROVAL before ingesting + const approvalRequestId = await humanApprovalService.requestApproval( + 'facebook.ingest.photo', + `Ingest Facebook photo ${photoId} into knowledge base\nPhoto: ${photo.name || 'Untitled'}\nSize: ${(imageBuffer.length / 1024).toFixed(2)} KB`, + ApprovalRiskLevel.MEDIUM, + `FacebookOAuthService (user: ${userId})`, + { + photoId, + userId, + photoName: photo.name, + photoUrl: photo.link, + imageSize: imageBuffer.length, + }, + 600000 // 10 minute expiry + ); + + console.log(`⏳ [WAITING FOR APPROVAL] Photo ingestion request ID: ${approvalRequestId}`); + + // Step 3: WAIT for human approval (blocks here) + try { + await humanApprovalService.waitForApproval(approvalRequestId, 600000); + } catch (approvalError: any) { + console.error(`🚫 [APPROVAL DENIED] ${approvalError.message}`); + throw new Error(`Photo ingestion not approved: ${approvalError.message}`); + } + + console.log(`✅ [APPROVED] Proceeding with photo ingestion`); + + // Step 4: Only NOW proceed with actual ingestion + // TODO: Integrate with KnowledgeAcquisition service + // 1. Analyze image with Gemini Vision + // 2. Extract entities with NER + // 3. Store in Neo4j graph + // 4. Create vector embeddings + // 5. Store in pgvector + + console.log(`✅ Photo ${photoId} ingested into knowledge base`); + + return { + success: true, + photoId, + vectorId: 'placeholder', // From pgvector + graphNodeId: 'placeholder', // From Neo4j + approvalRequestId, + }; + } catch (error: any) { + console.error(`❌ Failed to ingest photo ${photoId}:`, error.message); + throw error; + } + } + + /** + * Get stored token for user + */ + getAccessToken(userId: string): string | null { + const tokenData = this.tokens.get(userId); + if (!tokenData || Date.now() > tokenData.expiresAt) { + return null; + } + return tokenData.accessToken; + } + + /** + * Check if user is authenticated + */ + isAuthenticated(userId: string): boolean { + const tokenData = this.tokens.get(userId); + return tokenData !== undefined && Date.now() < tokenData.expiresAt; + } + + /** + * Revoke access token + */ + revokeToken(userId: string): void { + this.tokens.delete(userId); + console.log(`🔒 Token revoked for user ${userId}`); + } + + /** + * Generate random state for CSRF protection + */ + private generateState(): string { + return randomBytes(32).toString('hex'); + } + + /** + * Verify state parameter + */ + verifyState(receivedState: string, expectedState: string): boolean { + return receivedState === expectedState; + } +} + +// Singleton instance +export const facebookOAuth = new FacebookOAuthService(); diff --git a/apps/backend/src/services/GraphIngestor.ts b/apps/backend/src/services/GraphIngestor.ts new file mode 100644 index 0000000000000000000000000000000000000000..071f26f649dde43ba2caa8a39393cae98ffa5fd7 --- /dev/null +++ b/apps/backend/src/services/GraphIngestor.ts @@ -0,0 +1,553 @@ +/** + * ╔═══════════════════════════════════════════════════════════════════════════╗ + * ║ GRAPH INGESTOR - KNOWLEDGE HARVESTER ║ + * ║═══════════════════════════════════════════════════════════════════════════║ + * ║ Converts filesystem structure to Neo4j knowledge graph ║ + * ║ ║ + * ║ Structure: ║ + * ║ (:Repository)-[:CONTAINS]->(:Directory)-[:CONTAINS]->(:File) ║ + * ║ ║ + * ║ Delivered by: Gemini (The Architect) ║ + * ║ Implemented by: Claude (The Captain) ║ + * ╚═══════════════════════════════════════════════════════════════════════════╝ + */ + +import * as fs from 'fs/promises'; +import { createReadStream } from 'fs'; +import * as path from 'path'; +import * as crypto from 'crypto'; +import { setImmediate as yieldToLoop } from 'timers/promises'; +import { neo4jAdapter } from '../adapters/Neo4jAdapter.js'; +import { getEmbeddingService, EmbeddingService } from './embeddings/EmbeddingService.js'; +import { hyperLog } from './HyperLog.js'; + +// ═══════════════════════════════════════════════════════════════════════════ +// Types +// ═══════════════════════════════════════════════════════════════════════════ + +export interface IngestOptions { + rootPath: string; + repositoryName?: string; + includePatterns?: string[]; + excludePatterns?: string[]; + maxDepth?: number; + parseContent?: boolean; + generateEmbeddings?: boolean; + maxEmbedSizeBytes?: number; + embeddingTextLimit?: number; + contentPreviewLength?: number; +} + +export interface IngestResult { + success: boolean; + repositoryId: string; + stats: { + directoriesCreated: number; + filesCreated: number; + relationshipsCreated: number; + totalNodes: number; + duration: number; + }; + errors: string[]; +} + +interface FileInfo { + name: string; + path: string; + relativePath: string; + extension: string; + language: string; + size: number; + lines?: number; + contentPreview?: string; + contentHash?: string; + embedding?: number[]; + embeddingProvider?: string; +} + +// ═══════════════════════════════════════════════════════════════════════════ +// Language Detection +// ═══════════════════════════════════════════════════════════════════════════ + +const LANGUAGE_MAP: Record = { + '.ts': 'TypeScript', + '.tsx': 'TypeScript/React', + '.js': 'JavaScript', + '.jsx': 'JavaScript/React', + '.md': 'Markdown', + '.json': 'JSON', + '.yaml': 'YAML', + '.yml': 'YAML', + '.css': 'CSS', + '.scss': 'SCSS', + '.html': 'HTML', + '.sql': 'SQL', + '.py': 'Python', + '.sh': 'Shell', + '.bat': 'Batch', + '.ps1': 'PowerShell', + '.dockerfile': 'Dockerfile', + '.env': 'Environment', + '.gitignore': 'Git', +}; + +const DEFAULT_EXCLUDE = [ + 'node_modules', + '.git', + 'dist', + 'build', + '.next', + 'coverage', + '.cache', + '__pycache__', + '.vscode', + '.idea', +]; + +// ═══════════════════════════════════════════════════════════════════════════ +// Graph Ingestor Class +// ═══════════════════════════════════════════════════════════════════════════ + +export class GraphIngestor { + private options: Required; + private stats = { + directoriesCreated: 0, + filesCreated: 0, + relationshipsCreated: 0, + totalNodes: 0, + duration: 0, + }; + private errors: string[] = []; + private readonly yieldInterval = 100; // Yield to event loop every N entries + private embeddingService: EmbeddingService | null = null; + private embeddingProvider: string | null = null; + + constructor(options: IngestOptions) { + this.options = { + rootPath: options.rootPath, + repositoryName: options.repositoryName || path.basename(options.rootPath), + includePatterns: options.includePatterns || ['*'], + excludePatterns: options.excludePatterns || DEFAULT_EXCLUDE, + maxDepth: options.maxDepth || 10, + parseContent: options.parseContent ?? false, + generateEmbeddings: options.generateEmbeddings ?? false, + maxEmbedSizeBytes: options.maxEmbedSizeBytes ?? 120_000, + embeddingTextLimit: options.embeddingTextLimit ?? 4000, + contentPreviewLength: options.contentPreviewLength ?? 800, + }; + } + + // ═══════════════════════════════════════════════════════════════════════ + // Main Ingestion Method + // ═══════════════════════════════════════════════════════════════════════ + + async ingest(): Promise { + const startTime = Date.now(); + + try { + console.log(`[GraphIngestor] 🚀 Starting ingestion of: ${this.options.rootPath}`); + await hyperLog.logEvent('GRAPH_INGEST_START', { + rootPath: this.options.rootPath, + repository: this.options.repositoryName, + embeddings: this.options.generateEmbeddings, + }); + + // Create Repository node + const repoId = this.generateId('Repository', this.options.repositoryName); + await this.createRepositoryNode(repoId); + + // Recursively process directory + await this.processDirectory(this.options.rootPath, repoId, 0); + + this.stats.duration = Date.now() - startTime; + this.stats.totalNodes = this.stats.directoriesCreated + this.stats.filesCreated + 1; + + console.log(`[GraphIngestor] ✅ Ingestion complete in ${this.stats.duration}ms`); + console.log( + `[GraphIngestor] 📊 Stats: ${this.stats.totalNodes} nodes, ${this.stats.relationshipsCreated} relationships` + ); + + await hyperLog.logEvent('GRAPH_INGEST_COMPLETED', { + repository: this.options.repositoryName, + stats: this.stats, + errors: this.errors.length, + }); + + return { + success: true, + repositoryId: repoId, + stats: this.stats, + errors: this.errors, + }; + } catch (error: any) { + this.errors.push(`Fatal error: ${error.message}`); + await hyperLog.logEvent('GRAPH_INGEST_FATAL', { + repository: this.options.repositoryName, + error: error.message, + }); + return { + success: false, + repositoryId: '', + stats: this.stats, + errors: this.errors, + }; + } + } + + // ═══════════════════════════════════════════════════════════════════════ + // Node Creation + // ═══════════════════════════════════════════════════════════════════════ + + private async createRepositoryNode(repoId: string): Promise { + await neo4jAdapter.writeQuery( + ` + MERGE (r:Repository {id: $id}) + SET r.name = $name, + r.path = $path, + r.ingestedAt = datetime(), + r.source = 'graph-ingestor' + RETURN r + `, + { + id: repoId, + name: this.options.repositoryName, + path: this.options.rootPath, + } + ); + + console.log(`[GraphIngestor] 📦 Created Repository: ${this.options.repositoryName}`); + } + + private async createDirectoryNode( + dirPath: string, + parentId: string, + depth: number + ): Promise { + const dirName = path.basename(dirPath); + const relativePath = path.relative(this.options.rootPath, dirPath); + const dirId = this.generateId('Directory', relativePath || dirName); + + await neo4jAdapter.writeQuery( + ` + MERGE (d:Directory {id: $id}) + SET d.name = $name, + d.path = $path, + d.relativePath = $relativePath, + d.depth = $depth, + d.ingestedAt = datetime() + WITH d + MATCH (p {id: $parentId}) + MERGE (p)-[:CONTAINS]->(d) + RETURN d + `, + { + id: dirId, + name: dirName, + path: dirPath, + relativePath: relativePath || '.', + depth: depth, + parentId: parentId, + } + ); + + this.stats.directoriesCreated++; + this.stats.relationshipsCreated++; + + return dirId; + } + + private async createFileNode(fileInfo: FileInfo, parentId: string): Promise { + const fileId = this.generateId('File', fileInfo.relativePath); + + await neo4jAdapter.writeQuery( + ` + MERGE (f:File:${this.sanitizeLabel(fileInfo.language)} {id: $id}) + SET f.name = $name, + f.path = $path, + f.relativePath = $relativePath, + f.extension = $extension, + f.language = $language, + f.size = $size, + f.lines = $lines, + f.contentPreview = coalesce($contentPreview, f.contentPreview), + f.contentHash = coalesce($contentHash, f.contentHash), + f.embedding = CASE WHEN $embedding IS NOT NULL THEN $embedding ELSE f.embedding END, + f.embeddingProvider = coalesce($embeddingProvider, f.embeddingProvider), + f.embeddingDimensions = CASE + WHEN $embedding IS NOT NULL THEN size($embedding) + ELSE coalesce(f.embeddingDimensions, CASE WHEN f.embedding IS NOT NULL THEN size(f.embedding) END) + END, + f.hasEmbedding = CASE + WHEN $embedding IS NOT NULL THEN true + WHEN f.embedding IS NOT NULL THEN true + ELSE false + END, + f.ingestedAt = datetime() + WITH f + MATCH (p {id: $parentId}) + MERGE (p)-[:CONTAINS]->(f) + RETURN f + `, + { + id: fileId, + name: fileInfo.name, + path: fileInfo.path, + relativePath: fileInfo.relativePath, + extension: fileInfo.extension, + language: fileInfo.language, + size: fileInfo.size, + lines: fileInfo.lines || 0, + contentPreview: fileInfo.contentPreview || null, + contentHash: fileInfo.contentHash || null, + embedding: fileInfo.embedding || null, + embeddingProvider: fileInfo.embeddingProvider || null, + parentId: parentId, + } + ); + + this.stats.filesCreated++; + this.stats.relationshipsCreated++; + } + + // ═══════════════════════════════════════════════════════════════════════ + // Directory Processing + // ═══════════════════════════════════════════════════════════════════════ + + private async processDirectory(dirPath: string, parentId: string, depth: number): Promise { + const queue: Array<{ dirPath: string; parentId: string; depth: number }> = [ + { dirPath, parentId, depth }, + ]; + let processed = 0; + + while (queue.length) { + const current = queue.shift(); + if (!current || current.depth > this.options.maxDepth) { + continue; + } + + let entries; + try { + entries = await fs.readdir(current.dirPath, { withFileTypes: true }); + } catch (error: any) { + this.errors.push(`Error processing ${current.dirPath}: ${error.message}`); + continue; + } + + for (const entry of entries) { + const fullPath = path.join(current.dirPath, entry.name); + + if (this.shouldExclude(entry.name)) { + continue; + } + + processed++; + if (processed % this.yieldInterval === 0) { + await yieldToLoop(); // Yield to avoid starving the event loop on large trees + } + + if (entry.isDirectory()) { + const dirId = await this.createDirectoryNode(fullPath, current.parentId, current.depth); + queue.push({ dirPath: fullPath, parentId: dirId, depth: current.depth + 1 }); + } else if (entry.isFile()) { + const fileInfo = await this.getFileInfo(fullPath); + if (fileInfo) { + await this.createFileNode(fileInfo, current.parentId); + } + } + } + } + } + + // ═══════════════════════════════════════════════════════════════════════ + // Helpers + // ═══════════════════════════════════════════════════════════════════════ + + private async getFileInfo(filePath: string): Promise { + try { + const stats = await fs.stat(filePath); + const ext = path.extname(filePath).toLowerCase(); + const relativePath = path.relative(this.options.rootPath, filePath); + + const shouldLoadContent = + (this.options.parseContent || this.options.generateEmbeddings) && this.isTextFile(ext); + let content: string | null = null; + + let lines: number | undefined; + if (this.isTextFile(ext)) { + if (shouldLoadContent && stats.size <= this.options.maxEmbedSizeBytes) { + content = await fs.readFile(filePath, 'utf-8'); + lines = content.split(/\r?\n/).length; + } else { + lines = await this.countLines(filePath, stats.size); + } + } + + const contentPreview = content + ? content.slice(0, this.options.contentPreviewLength) + : undefined; + + let embedding: number[] | undefined; + let embeddingProvider: string | undefined; + if (this.options.generateEmbeddings && content && content.trim()) { + let embedText = content; + if (embedText.length > this.options.embeddingTextLimit) { + embedText = embedText.slice(0, this.options.embeddingTextLimit); + } + const embedded = await this.generateEmbedding(embedText, relativePath); + if (embedded) { + embedding = embedded; + embeddingProvider = this.embeddingProvider || undefined; + } + } else if ( + this.options.generateEmbeddings && + stats.size > this.options.maxEmbedSizeBytes && + this.isTextFile(ext) + ) { + await hyperLog.logEvent('GRAPH_INGEST_EMBEDDING_SKIPPED', { + path: relativePath, + reason: 'file_too_large', + size: stats.size, + limit: this.options.maxEmbedSizeBytes, + }); + } + + return { + name: path.basename(filePath), + path: filePath, + relativePath: relativePath, + extension: ext, + language: this.detectLanguage(ext, path.basename(filePath)), + size: stats.size, + lines: lines, + contentPreview, + contentHash: content ? this.hashContent(content) : undefined, + embedding, + embeddingProvider, + }; + } catch (error) { + return null; + } + } + + private detectLanguage(ext: string, filename: string): string { + // Special cases for files without extensions + const lowerName = filename.toLowerCase(); + if (lowerName === 'dockerfile') return 'Dockerfile'; + if (lowerName === 'makefile') return 'Makefile'; + if (lowerName.startsWith('.env')) return 'Environment'; + if (lowerName === '.gitignore') return 'Git'; + + return LANGUAGE_MAP[ext] || 'Unknown'; + } + + private isTextFile(ext: string): boolean { + const textExtensions = [ + '.ts', + '.tsx', + '.js', + '.jsx', + '.json', + '.md', + '.yaml', + '.yml', + '.css', + '.scss', + '.html', + '.sql', + '.py', + '.sh', + '.bat', + '.ps1', + '.txt', + '.env', + '.gitignore', + '.prettierrc', + '.eslintrc', + ]; + return textExtensions.includes(ext); + } + + private shouldExclude(name: string): boolean { + return this.options.excludePatterns.some(pattern => { + if (pattern.includes('*')) { + const regex = new RegExp(pattern.replace(/\*/g, '.*')); + return regex.test(name); + } + return name === pattern; + }); + } + + private generateId(type: string, identifier: string): string { + const content = `${type}:${identifier}`; + return crypto.createHash('md5').update(content).digest('hex'); + } + + private sanitizeLabel(language: string): string { + // Convert language to valid Neo4j label + return language.replace(/[^a-zA-Z0-9]/g, '_').replace(/^_+|_+$/g, '') || 'Unknown'; + } + + private async countLines(filePath: string, size: number): Promise { + if (size > 2_000_000) { + return undefined; // Avoid heavy scans on very large files + } + + return new Promise(resolve => { + let count = 0; + const stream = createReadStream(filePath); + + stream.on('data', (chunk: Buffer) => { + for (let i = 0; i < chunk.length; i++) { + if (chunk[i] === 10) count++; // '\n' + } + }); + + stream.on('end', () => resolve(count || 1)); + stream.on('error', () => resolve(undefined)); + }); + } + + private hashContent(content: string): string { + return crypto.createHash('md5').update(content).digest('hex'); + } + + private async ensureEmbeddingService(): Promise { + if (this.embeddingService) return; + + try { + this.embeddingService = getEmbeddingService(); + await this.embeddingService.initialize(); + this.embeddingProvider = this.embeddingService.getProviderName(); + } catch (error: any) { + this.errors.push(`Embedding provider unavailable: ${error.message}`); + await hyperLog.logEvent('GRAPH_INGEST_EMBEDDING_PROVIDER_FAILED', { + error: error.message, + }); + throw error; + } + } + + private async generateEmbedding(content: string, relativePath: string): Promise { + try { + await this.ensureEmbeddingService(); + const embedding = await this.embeddingService!.generateEmbedding(content); + return embedding; + } catch (error: any) { + this.errors.push(`Embedding failed for ${relativePath}: ${error.message}`); + await hyperLog.logEvent('GRAPH_INGEST_EMBEDDING_FAILED', { + path: relativePath, + error: error.message, + }); + return null; + } + } +} + +// ═══════════════════════════════════════════════════════════════════════════ +// Export Factory Function +// ═══════════════════════════════════════════════════════════════════════════ + +export async function ingestRepository(options: IngestOptions): Promise { + const ingestor = new GraphIngestor(options); + return ingestor.ingest(); +} diff --git a/apps/backend/src/services/HumanApprovalService.ts b/apps/backend/src/services/HumanApprovalService.ts new file mode 100644 index 0000000000000000000000000000000000000000..fffbd7c3f94b2e23119254db259c71109d40aad6 --- /dev/null +++ b/apps/backend/src/services/HumanApprovalService.ts @@ -0,0 +1,287 @@ +/** + * Human-in-the-Loop Approval Service + * + * SIKKERHED: Ingen handlinger der muterer data kan udføres uden eksplicit menneskelig godkendelse. + * Dette inkluderer: + * - Data ingestion (Facebook, email, external APIs) + * - Social media posting + * - Database mutations + * - File system operations + * - External API calls + */ + +import { EventEmitter } from 'events'; + +export enum ApprovalStatus { + PENDING = 'pending', + APPROVED = 'approved', + REJECTED = 'rejected', + EXPIRED = 'expired', +} + +export enum ApprovalRiskLevel { + LOW = 'low', // Read-only operations + MEDIUM = 'medium', // Data ingestion, internal mutations + HIGH = 'high', // External posts, payments, deletions + CRITICAL = 'critical' // System-level changes +} + +export interface ApprovalRequest { + id: string; + operation: string; + description: string; + riskLevel: ApprovalRiskLevel; + requestedBy: string; // System component requesting approval + requestedAt: number; + expiresAt: number; + status: ApprovalStatus; + metadata: Record; + approvedBy?: string; + approvedAt?: number; + rejectedBy?: string; + rejectedAt?: number; + rejectionReason?: string; +} + +class HumanApprovalServiceImpl extends EventEmitter { + private pendingRequests: Map = new Map(); + private readonly DEFAULT_EXPIRY = 300000; // 5 minutes + private cleanupInterval: NodeJS.Timeout; + + constructor() { + super(); + + // Cleanup expired requests every minute + this.cleanupInterval = setInterval(() => { + this.cleanupExpiredRequests(); + }, 60000); + } + + /** + * Request approval for an operation + * Returns approval request ID + */ + async requestApproval( + operation: string, + description: string, + riskLevel: ApprovalRiskLevel, + requestedBy: string, + metadata: Record = {}, + expiryMs?: number + ): Promise { + const id = this.generateId(); + const now = Date.now(); + + const request: ApprovalRequest = { + id, + operation, + description, + riskLevel, + requestedBy, + requestedAt: now, + expiresAt: now + (expiryMs || this.DEFAULT_EXPIRY), + status: ApprovalStatus.PENDING, + metadata, + }; + + this.pendingRequests.set(id, request); + + // Emit event for real-time UI updates + this.emit('approval-requested', request); + + console.log(`🔐 [APPROVAL REQUIRED] ${operation}`); + console.log(` Risk: ${riskLevel.toUpperCase()}`); + console.log(` Description: ${description}`); + console.log(` Request ID: ${id}`); + + return id; + } + + /** + * Wait for approval (blocking) + * Throws if rejected or expired + */ + async waitForApproval(requestId: string, timeoutMs: number = 300000): Promise { + return new Promise((resolve, reject) => { + const checkInterval = setInterval(() => { + const request = this.pendingRequests.get(requestId); + + if (!request) { + clearInterval(checkInterval); + reject(new Error('Approval request not found')); + return; + } + + if (request.status === ApprovalStatus.APPROVED) { + clearInterval(checkInterval); + resolve(); + return; + } + + if (request.status === ApprovalStatus.REJECTED) { + clearInterval(checkInterval); + reject(new Error(`Approval rejected: ${request.rejectionReason || 'No reason provided'}`)); + return; + } + + if (Date.now() > request.expiresAt) { + clearInterval(checkInterval); + request.status = ApprovalStatus.EXPIRED; + this.emit('approval-expired', request); + reject(new Error('Approval request expired')); + return; + } + }, 500); + + // Failsafe timeout + setTimeout(() => { + clearInterval(checkInterval); + const request = this.pendingRequests.get(requestId); + if (request && request.status === ApprovalStatus.PENDING) { + request.status = ApprovalStatus.EXPIRED; + this.emit('approval-expired', request); + reject(new Error('Approval timeout')); + } + }, timeoutMs); + }); + } + + /** + * Approve a request + */ + approve(requestId: string, approvedBy: string): boolean { + const request = this.pendingRequests.get(requestId); + + if (!request) { + console.error(`❌ Approval request ${requestId} not found`); + return false; + } + + if (request.status !== ApprovalStatus.PENDING) { + console.error(`❌ Approval request ${requestId} is not pending (status: ${request.status})`); + return false; + } + + if (Date.now() > request.expiresAt) { + request.status = ApprovalStatus.EXPIRED; + console.error(`❌ Approval request ${requestId} has expired`); + return false; + } + + request.status = ApprovalStatus.APPROVED; + request.approvedBy = approvedBy; + request.approvedAt = Date.now(); + + this.emit('approval-granted', request); + + console.log(`✅ [APPROVED] ${request.operation}`); + console.log(` Approved by: ${approvedBy}`); + + return true; + } + + /** + * Reject a request + */ + reject(requestId: string, rejectedBy: string, reason: string): boolean { + const request = this.pendingRequests.get(requestId); + + if (!request) { + console.error(`❌ Approval request ${requestId} not found`); + return false; + } + + if (request.status !== ApprovalStatus.PENDING) { + console.error(`❌ Approval request ${requestId} is not pending (status: ${request.status})`); + return false; + } + + request.status = ApprovalStatus.REJECTED; + request.rejectedBy = rejectedBy; + request.rejectedAt = Date.now(); + request.rejectionReason = reason; + + this.emit('approval-rejected', request); + + console.log(`🚫 [REJECTED] ${request.operation}`); + console.log(` Rejected by: ${rejectedBy}`); + console.log(` Reason: ${reason}`); + + return true; + } + + /** + * Get all pending requests + */ + getPendingRequests(): ApprovalRequest[] { + return Array.from(this.pendingRequests.values()) + .filter(r => r.status === ApprovalStatus.PENDING && Date.now() <= r.expiresAt) + .sort((a, b) => { + // Sort by risk level (high to low), then by time (old to new) + const riskOrder = { critical: 4, high: 3, medium: 2, low: 1 }; + const riskDiff = (riskOrder[b.riskLevel] || 0) - (riskOrder[a.riskLevel] || 0); + if (riskDiff !== 0) return riskDiff; + return a.requestedAt - b.requestedAt; + }); + } + + /** + * Get request by ID + */ + getRequest(requestId: string): ApprovalRequest | null { + return this.pendingRequests.get(requestId) || null; + } + + /** + * Get request history + */ + getHistory(limit: number = 50): ApprovalRequest[] { + return Array.from(this.pendingRequests.values()) + .sort((a, b) => b.requestedAt - a.requestedAt) + .slice(0, limit); + } + + /** + * Cleanup expired requests + */ + private cleanupExpiredRequests(): void { + const now = Date.now(); + let cleaned = 0; + + for (const [id, request] of this.pendingRequests.entries()) { + if (request.status === ApprovalStatus.PENDING && now > request.expiresAt) { + request.status = ApprovalStatus.EXPIRED; + this.emit('approval-expired', request); + cleaned++; + } + + // Remove old completed requests (keep for 1 hour) + if (request.status !== ApprovalStatus.PENDING && now - request.requestedAt > 3600000) { + this.pendingRequests.delete(id); + } + } + + if (cleaned > 0) { + console.log(`🧹 Cleaned up ${cleaned} expired approval requests`); + } + } + + /** + * Generate unique ID + */ + private generateId(): string { + return `approval_${Date.now()}_${Math.random().toString(36).substring(7)}`; + } + + /** + * Shutdown cleanup + */ + shutdown(): void { + if (this.cleanupInterval) { + clearInterval(this.cleanupInterval); + } + } +} + +// Singleton instance +export const humanApprovalService = new HumanApprovalServiceImpl(); diff --git a/apps/backend/src/services/HyperLog.ts b/apps/backend/src/services/HyperLog.ts new file mode 100644 index 0000000000000000000000000000000000000000..f8b6f8617a5518833c5c7b75ab3cae449da568e3 --- /dev/null +++ b/apps/backend/src/services/HyperLog.ts @@ -0,0 +1,91 @@ +/** + * HyperLog - AI Memory & Event Logging + * + * Wrapper around HyperLogService for SelfHealingAdapter compatibility. + * Logs events for AI learning and system analysis. + */ + +import { EventEmitter } from 'events'; + +interface HyperLogEvent { + id: string; + timestamp: number; + eventType: string; + data: Record; +} + +export class HyperLog extends EventEmitter { + private events: HyperLogEvent[] = []; + private maxEvents = 10000; + + /** + * Log an event for AI memory/analysis + */ + async logEvent(eventType: string, data: Record = {}): Promise { + const event: HyperLogEvent = { + id: this.generateId(), + timestamp: Date.now(), + eventType, + data + }; + + this.events.push(event); + + // Trim if over max + if (this.events.length > this.maxEvents) { + this.events = this.events.slice(-this.maxEvents); + } + + // Emit for real-time subscribers + this.emit('event', event); + + // Console log for debugging + console.log(`🧠 [HyperLog] ${eventType}:`, data); + } + + /** + * Get events by type + */ + getEventsByType(eventType: string, limit: number = 100): HyperLogEvent[] { + return this.events + .filter(e => e.eventType === eventType) + .slice(-limit); + } + + /** + * Get recent events + */ + getRecentEvents(limit: number = 100): HyperLogEvent[] { + return this.events.slice(-limit); + } + + /** + * Get healing-related events for analysis + */ + getHealingHistory(): HyperLogEvent[] { + return this.events.filter(e => + e.eventType.startsWith('HEALING_') || + e.eventType === 'SELF_HEALING' + ); + } + + /** + * Export for AI training/analysis + */ + exportForAnalysis(): { events: HyperLogEvent[]; summary: Record } { + const summary: Record = {}; + + for (const event of this.events) { + summary[event.eventType] = (summary[event.eventType] || 0) + 1; + } + + return { events: this.events, summary }; + } + + private generateId(): string { + return `hyper_${Date.now()}_${Math.random().toString(36).substr(2, 9)}`; + } +} + +// Singleton +export const hyperLog = new HyperLog(); diff --git a/apps/backend/src/services/Knowledge/KnowledgeCompiler.ts b/apps/backend/src/services/Knowledge/KnowledgeCompiler.ts new file mode 100644 index 0000000000000000000000000000000000000000..f815671090b2122e2dcc84d214febe501a49718a --- /dev/null +++ b/apps/backend/src/services/Knowledge/KnowledgeCompiler.ts @@ -0,0 +1,719 @@ +/** + * ╔═══════════════════════════════════════════════════════════════════════════════════════╗ + * ║ KNOWLEDGE COMPILER ║ + * ║═══════════════════════════════════════════════════════════════════════════════════════║ + * ║ ║ + * ║ Aggregerer viden fra hele systemet til en unified "System State Summary" ║ + * ║ ║ + * ║ DATAKILDER: ║ + * ║ ┌──────────────┐ ┌──────────────┐ ┌──────────────┐ ┌──────────────┐ ║ + * ║ │ HyperLog │ │ Neo4j │ │ Metrics │ │ SelfHealing │ ║ + * ║ │ (Events) │ │ (Graph) │ │ (Counters) │ │ (Status) │ ║ + * ║ └──────┬───────┘ └──────┬───────┘ └──────┬───────┘ └──────┬───────┘ ║ + * ║ │ │ │ │ ║ + * ║ └────────────────┴────────────────┴────────────────┘ ║ + * ║ │ ║ + * ║ ▼ ║ + * ║ ┌─────────────────────────┐ ║ + * ║ │ KNOWLEDGE COMPILER │ ║ + * ║ │ • compile() │ ║ + * ║ │ • getSystemSummary() │ ║ + * ║ │ • getInsights() │ ║ + * ║ └───────────┬─────────────┘ ║ + * ║ │ ║ + * ║ ▼ ║ + * ║ ┌─────────────────────────┐ ║ + * ║ │ CognitiveNode Widget │ ║ + * ║ │ (Visual Dashboard) │ ║ + * ║ └─────────────────────────┘ ║ + * ║ ║ + * ╚═══════════════════════════════════════════════════════════════════════════════════════╝ + */ + +import { hyperLog, HyperLog } from '../HyperLog.js'; +import { selfHealing, SelfHealingAdapter } from '../SelfHealingAdapter.js'; +import { neo4jAdapter } from '../../adapters/Neo4jAdapter.js'; + +// ═══════════════════════════════════════════════════════════════════════════ +// TYPES +// ═══════════════════════════════════════════════════════════════════════════ + +export interface HealthStatus { + overall: 'HEALTHY' | 'DEGRADED' | 'CRITICAL'; + score: number; + services: { + name: string; + status: 'healthy' | 'unhealthy' | 'unknown'; + lastCheck: string; + }[]; + healingStats: { + attempts: number; + successes: number; + failures: number; + successRate: number; + }; +} + +export interface ActivitySummary { + last24h: { + events: number; + errors: number; + healingAttempts: number; + graphChanges: number; + }; + topEventTypes: { type: string; count: number }[]; + activeAgents: string[]; +} + +export interface GraphStats { + totalNodes: number; + totalRelationships: number; + nodesByLabel: Record; + recentChanges: { + added: number; + modified: number; + deleted: number; + }; +} + +export interface Insight { + id: string; + type: 'anomaly' | 'pattern' | 'trend'; + severity: 'info' | 'warning' | 'critical'; + title: string; + description: string; + data?: any; + timestamp: string; +} + +export interface Recommendation { + id: string; + priority: 'low' | 'medium' | 'high'; + action: string; + reason: string; + impact: string; +} + +export interface RecentEvent { + id: string; + type: string; + timestamp: string; + summary: string; + data?: Record; +} + +export interface SystemStateSummary { + timestamp: string; + health: HealthStatus; + activity: ActivitySummary; + insights: Insight[]; + recommendations: Recommendation[]; + recentEvents: RecentEvent[]; + graphStats: GraphStats; +} + +// ═══════════════════════════════════════════════════════════════════════════ +// KNOWLEDGE COMPILER CLASS +// ═══════════════════════════════════════════════════════════════════════════ + +export class KnowledgeCompiler { + private static instance: KnowledgeCompiler; + private lastCompilation: SystemStateSummary | null = null; + private autoCompilationInterval: ReturnType | null = null; + + private constructor() { } + + public static getInstance(): KnowledgeCompiler { + if (!KnowledgeCompiler.instance) { + KnowledgeCompiler.instance = new KnowledgeCompiler(); + } + return KnowledgeCompiler.instance; + } + + /** + * Start auto-compilation at the specified interval + */ + public startAutoCompilation(intervalMs: number = 60000): void { + if (this.autoCompilationInterval) { + console.log('[KnowledgeCompiler] Auto-compilation already running'); + return; + } + + console.log(`[KnowledgeCompiler] Starting auto-compilation every ${intervalMs / 1000}s`); + + // Run initial compilation after 5 seconds + setTimeout(() => this.compile().catch(err => + console.warn('[KnowledgeCompiler] Initial compilation failed:', err) + ), 5000); + + // Set up periodic compilation + this.autoCompilationInterval = setInterval(async () => { + try { + await this.compile(); + } catch (error) { + console.warn('[KnowledgeCompiler] Periodic compilation failed:', error); + } + }, intervalMs); + } + + /** + * Stop auto-compilation + */ + public stopAutoCompilation(): void { + if (this.autoCompilationInterval) { + clearInterval(this.autoCompilationInterval); + this.autoCompilationInterval = null; + console.log('[KnowledgeCompiler] Auto-compilation stopped'); + } + } + + /** + * MAIN COMPILATION METHOD + */ + async compile(): Promise { + console.log('[KnowledgeCompiler] Starting compilation...'); + const startTime = Date.now(); + + try { + // Gather data from all sources in parallel + const [health, activity, graphStats, recentEvents] = await Promise.all([ + this.compileHealthStatus(), + this.compileActivitySummary(), + this.compileGraphStats(), + this.compileRecentEvents(), + ]); + + // Generate insights based on compiled data + const insights = this.generateInsights(health, activity, graphStats); + + // Generate recommendations + const recommendations = this.generateRecommendations(health, activity, insights); + + const summary: SystemStateSummary = { + timestamp: new Date().toISOString(), + health, + activity, + insights, + recommendations, + recentEvents, + graphStats, + }; + + this.lastCompilation = summary; + + const duration = Date.now() - startTime; + console.log(`[KnowledgeCompiler] Compilation complete in ${duration}ms`); + + return summary; + } catch (error) { + console.error('[KnowledgeCompiler] Compilation failed:', error); + throw error; + } + } + + /** + * Get the last compiled summary (cached) + */ + getLastCompilation(): SystemStateSummary | null { + return this.lastCompilation; + } + + /** + * Get system summary (compile if needed) + */ + async getSystemSummary(forceRefresh: boolean = false): Promise { + if (forceRefresh || !this.lastCompilation) { + return await this.compile(); + } + return this.lastCompilation; + } + + /** + * Quick health check without full compilation + */ + async quickHealth(): Promise<{ status: string; score: number; timestamp: string }> { + const health = await this.compileHealthStatus(); + return { + status: health.overall, + score: health.score, + timestamp: new Date().toISOString(), + }; + } + + // ═══════════════════════════════════════════════════════════════════════════ + // DATA SOURCE COMPILATION METHODS + // ═══════════════════════════════════════════════════════════════════════════ + + /** + * Compile health status from SelfHealing + services + */ + private async compileHealthStatus(): Promise { + const systemStatus = selfHealing.getSystemStatus(); + const hyperLogData = hyperLog.exportForAnalysis(); + + // Calculate healing stats from HyperLog + const healingAttempts = hyperLogData.summary['HEALING_ATTEMPT'] || 0; + const healingSuccesses = hyperLogData.summary['HEALING_SUCCESS'] || 0; + const healingFailures = + hyperLogData.summary['HEALING_CRASH'] || hyperLogData.summary['HEALING_FAILED'] || 0; + + const successRate = + healingAttempts > 0 ? Math.round((healingSuccesses / healingAttempts) * 100) : 100; + + // Calculate overall score + let score = 100; + if (systemStatus.overallHealth === 'DEGRADED') score = 70; + if (systemStatus.overallHealth === 'CRITICAL') score = 30; + score = Math.max(0, score - healingFailures * 5); + + return { + overall: systemStatus.overallHealth as 'HEALTHY' | 'DEGRADED' | 'CRITICAL', + score: Math.max(0, Math.min(100, score)), + services: systemStatus.services.map(s => ({ + name: s.name, + status: s.status as 'healthy' | 'unhealthy' | 'unknown', + lastCheck: new Date().toISOString(), + })), + healingStats: { + attempts: healingAttempts, + successes: healingSuccesses, + failures: healingFailures, + successRate, + }, + }; + } + + /** + * Compile activity summary from HyperLog + */ + private async compileActivitySummary(): Promise { + const hyperLogData = hyperLog.exportForAnalysis(); + const recentEvents = hyperLog.getRecentEvents(1000); + + // Filter to last 24h + const oneDayAgo = Date.now() - 24 * 60 * 60 * 1000; + const last24hEvents = recentEvents.filter(e => e.timestamp > oneDayAgo); + + // Count errors + const errors = last24hEvents.filter( + e => + e.eventType.includes('ERROR') || + e.eventType.includes('FAIL') || + e.eventType.includes('CRASH') + ).length; + + // Count healing attempts + const healingAttempts = last24hEvents.filter(e => e.eventType.startsWith('HEALING_')).length; + + // Top event types + const eventCounts: Record = {}; + for (const event of last24hEvents) { + eventCounts[event.eventType] = (eventCounts[event.eventType] || 0) + 1; + } + const topEventTypes = Object.entries(eventCounts) + .sort((a, b) => b[1] - a[1]) + .slice(0, 10) + .map(([type, count]) => ({ type, count })); + + return { + last24h: { + events: last24hEvents.length, + errors, + healingAttempts, + graphChanges: 0, // Will be populated from Neo4j + }, + topEventTypes, + activeAgents: ['claude', 'gemini', 'system'], // TODO: Track from messages + }; + } + + /** + * Compile graph statistics from Neo4j + */ + private async compileGraphStats(): Promise { + try { + const countResult = await neo4jAdapter.executeQuery(` + MATCH (n) + WITH count(n) as nodes + OPTIONAL MATCH ()-[r]->() + RETURN nodes, count(r) as relationships + `); + + const labelResult = await neo4jAdapter.executeQuery(` + MATCH (n) + WITH labels(n) as nodeLabels + UNWIND nodeLabels as label + RETURN label, count(*) as count + ORDER BY count DESC + `); + + const nodesByLabel: Record = {}; + for (const row of labelResult) { + const countVal = row.count; + const count = + typeof countVal === 'object' && countVal !== null && 'low' in countVal + ? countVal.low + : typeof countVal === 'object' && countVal !== null && 'toNumber' in countVal + ? countVal.toNumber() + : Number(countVal || 0); + + nodesByLabel[row.label] = count; + } + + const rawNodes = countResult[0]?.nodes; + const rawRels = countResult[0]?.relationships; + + const totalNodes = + typeof rawNodes === 'object' && rawNodes !== null && 'low' in rawNodes + ? rawNodes.low + : typeof rawNodes === 'object' && rawNodes !== null && 'toNumber' in rawNodes + ? rawNodes.toNumber() + : Number(rawNodes || 0); + + const totalRelationships = + typeof rawRels === 'object' && rawRels !== null && 'low' in rawRels + ? rawRels.low + : typeof rawRels === 'object' && rawRels !== null && 'toNumber' in rawRels + ? rawRels.toNumber() + : Number(rawRels || 0); + + return { + totalNodes, + totalRelationships, + nodesByLabel, + recentChanges: { + added: 0, + modified: 0, + deleted: 0, + }, + }; + } catch (error) { + console.warn('[KnowledgeCompiler] Failed to get graph stats:', error); + return { + totalNodes: 0, + totalRelationships: 0, + nodesByLabel: {}, + recentChanges: { added: 0, modified: 0, deleted: 0 }, + }; + } + } + + /** + * Compile recent events for display + */ + private async compileRecentEvents(): Promise { + const events = hyperLog.getRecentEvents(20); + + return events + .map(e => ({ + id: e.id, + type: e.eventType, + timestamp: new Date(e.timestamp).toISOString(), + summary: this.summarizeEvent(e.eventType, e.data), + data: e.data, + })) + .reverse(); // Most recent first + } + + /** + * Generate human-readable event summary + */ + private summarizeEvent(eventType: string, data: Record): string { + switch (eventType) { + case 'HEALING_SUCCESS': + return `System healed: ${data.strategy || 'unknown strategy'}`; + case 'HEALING_CRASH': + case 'HEALING_FAILED': + return `Healing failed: ${data.originalError || 'unknown error'}`; + case 'HEALING_ATTEMPT': + return `Attempting to heal: ${data.strategy || 'unknown'}`; + case 'ERROR_UNHANDLED': + return `Unhandled error: ${data.message || data.code || 'unknown'}`; + default: + return `${eventType}: ${JSON.stringify(data).slice(0, 50)}...`; + } + } + + // ═══════════════════════════════════════════════════════════════════════════ + // INSIGHT & RECOMMENDATION GENERATION + // ═══════════════════════════════════════════════════════════════════════════ + + /** + * Generate insights from compiled data + * ENHANCED: Now includes predictive alerts and proactive pattern detection + */ + private generateInsights( + health: HealthStatus, + activity: ActivitySummary, + graphStats: GraphStats + ): Insight[] { + const insights: Insight[] = []; + + // ═══════════════════════════════════════════════════════════════════ + // PREDICTIVE INSIGHTS (from SelfHealing predictive alerts) + // ═══════════════════════════════════════════════════════════════════ + const predictiveAlerts = selfHealing.getPredictiveAlerts(); + for (const alert of predictiveAlerts) { + insights.push({ + id: `insight_predictive_${alert.errorCode}_${Date.now()}`, + type: 'anomaly', + severity: alert.severity === 'critical' ? 'critical' : 'warning', + title: `🔮 Predicted Failure: ${alert.errorCode}`, + description: `${(alert.probability * 100).toFixed(0)}% probability of ${alert.errorCode} failure ${alert.expectedIn}. ${alert.recommendation}`, + data: { + errorCode: alert.errorCode, + probability: alert.probability, + expectedIn: alert.expectedIn, + }, + timestamp: new Date().toISOString(), + }); + } + + // ═══════════════════════════════════════════════════════════════════ + // PATTERN DETECTION: Error clustering analysis + // ═══════════════════════════════════════════════════════════════════ + const errorPatterns = this.detectErrorPatterns(activity); + for (const pattern of errorPatterns) { + insights.push({ + id: `insight_pattern_${pattern.code}_${Date.now()}`, + type: 'pattern', + severity: pattern.severity, + title: `📊 Pattern: ${pattern.title}`, + description: pattern.description, + data: pattern.data, + timestamp: new Date().toISOString(), + }); + } + + // ═══════════════════════════════════════════════════════════════════ + // USAGE SPIKE DETECTION + // ═══════════════════════════════════════════════════════════════════ + const usageSpikes = this.detectUsageSpikes(activity); + for (const spike of usageSpikes) { + insights.push({ + id: `insight_spike_${spike.eventType}_${Date.now()}`, + type: 'trend', + severity: spike.severity, + title: `📈 Usage Spike: ${spike.eventType}`, + description: spike.description, + data: spike.data, + timestamp: new Date().toISOString(), + }); + } + + // Health-based insights + if (health.healingStats.failures > 0) { + insights.push({ + id: `insight_healing_${Date.now()}`, + type: 'anomaly', + severity: health.healingStats.failures > 3 ? 'critical' : 'warning', + title: 'Self-Healing Failures Detected', + description: `${health.healingStats.failures} healing attempts have failed. Success rate: ${health.healingStats.successRate}%`, + timestamp: new Date().toISOString(), + }); + } + + // Activity-based insights + if (activity.last24h.errors > 10) { + insights.push({ + id: `insight_errors_${Date.now()}`, + type: 'anomaly', + severity: 'warning', + title: 'High Error Rate', + description: `${activity.last24h.errors} errors detected in the last 24 hours`, + data: { errorCount: activity.last24h.errors }, + timestamp: new Date().toISOString(), + }); + } + + // Graph-based insights + if (graphStats.totalNodes > 10000) { + insights.push({ + id: `insight_graph_${Date.now()}`, + type: 'trend', + severity: 'info', + title: 'Large Knowledge Graph', + description: `Knowledge graph has grown to ${graphStats.totalNodes.toLocaleString()} nodes`, + timestamp: new Date().toISOString(), + }); + } + + // ═══════════════════════════════════════════════════════════════════ + // DEAD SERVICE DETECTION + // ═══════════════════════════════════════════════════════════════════ + const deadServices = health.services.filter(s => s.status === 'unhealthy'); + if (deadServices.length > 0) { + insights.push({ + id: `insight_dead_services_${Date.now()}`, + type: 'anomaly', + severity: 'critical', + title: `☠️ Dead Services Detected`, + description: `${deadServices.length} service(s) are unhealthy: ${deadServices.map(s => s.name).join(', ')}`, + data: { services: deadServices.map(s => s.name) }, + timestamp: new Date().toISOString(), + }); + } + + // Pattern detection from top events + const healingEvents = activity.topEventTypes.filter(e => e.type.startsWith('HEALING_')); + if (healingEvents.length > 0) { + const totalHealingEvents = healingEvents.reduce((sum, e) => sum + e.count, 0); + if (totalHealingEvents > 5) { + insights.push({ + id: `insight_pattern_healing_${Date.now()}`, + type: 'pattern', + severity: 'info', + title: 'Frequent Self-Healing Activity', + description: `System has triggered ${totalHealingEvents} healing events recently`, + data: { events: healingEvents }, + timestamp: new Date().toISOString(), + }); + } + } + + return insights; + } + + /** + * 📊 PATTERN DETECTION: Analyze error clustering + */ + private detectErrorPatterns(activity: ActivitySummary): Array<{ + code: string; + title: string; + description: string; + severity: 'info' | 'warning' | 'critical'; + data: any; + }> { + const patterns: Array = []; + + // Group error events + const errorEvents = activity.topEventTypes.filter( + e => e.type.includes('ERROR') || e.type.includes('FAIL') + ); + + // Detect repeated errors (same error > 5 times) + for (const event of errorEvents) { + if (event.count >= 5) { + patterns.push({ + code: event.type, + title: `Repeated ${event.type}`, + description: `${event.type} has occurred ${event.count} times - investigate root cause`, + severity: event.count >= 10 ? 'critical' : 'warning', + data: { eventType: event.type, count: event.count }, + }); + } + } + + // Detect error bursts (many errors in short time) + const totalErrors = errorEvents.reduce((sum, e) => sum + e.count, 0); + if (totalErrors > 20 && errorEvents.length > 3) { + patterns.push({ + code: 'ERROR_BURST', + title: 'Error Burst Detected', + description: `${totalErrors} errors across ${errorEvents.length} different error types - possible cascading failure`, + severity: 'critical', + data: { totalErrors, errorTypes: errorEvents.length }, + }); + } + + return patterns; + } + + /** + * 📈 USAGE SPIKE DETECTION: Find abnormal activity + */ + private detectUsageSpikes(activity: ActivitySummary): Array<{ + eventType: string; + description: string; + severity: 'info' | 'warning' | 'critical'; + data: any; + }> { + const spikes: Array = []; + + // Calculate average event count + const avgCount = + activity.topEventTypes.reduce((sum, e) => sum + e.count, 0) / + Math.max(activity.topEventTypes.length, 1); + + // Find events significantly above average (3x) + for (const event of activity.topEventTypes) { + if (event.count > avgCount * 3 && event.count >= 10) { + spikes.push({ + eventType: event.type, + description: `${event.type} activity is ${Math.round(event.count / avgCount)}x above average (${event.count} occurrences)`, + severity: event.count > avgCount * 5 ? 'warning' : 'info', + data: { + count: event.count, + average: Math.round(avgCount), + multiplier: Math.round(event.count / avgCount), + }, + }); + } + } + + return spikes; + } + + /** + * Generate actionable recommendations + */ + private generateRecommendations( + health: HealthStatus, + activity: ActivitySummary, + insights: Insight[] + ): Recommendation[] { + const recommendations: Recommendation[] = []; + + // Based on health + if (health.overall === 'DEGRADED') { + recommendations.push({ + id: `rec_health_${Date.now()}`, + priority: 'high', + action: 'Investigate degraded services', + reason: 'System health is degraded', + impact: 'Prevent potential system failures', + }); + } + + if (health.healingStats.successRate < 80) { + recommendations.push({ + id: `rec_healing_${Date.now()}`, + priority: 'medium', + action: 'Review self-healing strategies', + reason: `Healing success rate is ${health.healingStats.successRate}%`, + impact: 'Improve system resilience', + }); + } + + // Based on activity + if (activity.last24h.errors > 20) { + recommendations.push({ + id: `rec_errors_${Date.now()}`, + priority: 'high', + action: 'Review error logs and implement fixes', + reason: `${activity.last24h.errors} errors in last 24h`, + impact: 'Reduce system instability', + }); + } + + // Based on insights + const criticalInsights = insights.filter(i => i.severity === 'critical'); + if (criticalInsights.length > 0) { + recommendations.push({ + id: `rec_critical_${Date.now()}`, + priority: 'high', + action: 'Address critical insights immediately', + reason: `${criticalInsights.length} critical issues detected`, + impact: 'Prevent system failures', + }); + } + + return recommendations; + } +} + +// ═══════════════════════════════════════════════════════════════════════════ +// SINGLETON EXPORT +// ═══════════════════════════════════════════════════════════════════════════ + +export const knowledgeCompiler = KnowledgeCompiler.getInstance(); diff --git a/apps/backend/src/services/Knowledge/index.ts b/apps/backend/src/services/Knowledge/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..cd13a021a2d8d860b21a5de15ae3302fab396a06 --- /dev/null +++ b/apps/backend/src/services/Knowledge/index.ts @@ -0,0 +1,14 @@ +/** + * Knowledge Services - Export Index + */ + +export { knowledgeCompiler } from './KnowledgeCompiler.js'; +export type { + SystemStateSummary, + HealthStatus, + ActivitySummary, + Insight, + Recommendation, + RecentEvent, + GraphStats +} from './KnowledgeCompiler.js'; diff --git a/apps/backend/src/services/KnowledgeAcquisitionService.ts b/apps/backend/src/services/KnowledgeAcquisitionService.ts new file mode 100644 index 0000000000000000000000000000000000000000..3b7c2f5279c263c2f7db5abab7ef6042c4c83f09 --- /dev/null +++ b/apps/backend/src/services/KnowledgeAcquisitionService.ts @@ -0,0 +1,141 @@ +import { neo4jService } from '../database/Neo4jService.js'; +import { neuralStream, NeuralEvent } from './NeuralStream.js'; +import { metricsService } from './MetricsService.js'; + +export interface KnowledgePacket { + title: string; + content: string; + source: string; + category: string; + tags: string[]; +} + +class KnowledgeAcquisitionService { + private static instance: KnowledgeAcquisitionService; + + private constructor() { + console.log('🧠 [KnowledgeAcquisition] Cortex Ingestor Online.'); + this.setupNeuralListeners(); + } + + public static getInstance(): KnowledgeAcquisitionService { + if (!KnowledgeAcquisitionService.instance) { + KnowledgeAcquisitionService.instance = new KnowledgeAcquisitionService(); + } + return KnowledgeAcquisitionService.instance; + } + + /** + * Lytter på nervesystemet efter ny viden fra OmniHarvester eller andre agenter + */ + private setupNeuralListeners() { + // Når OmniHarvester har fundet noget (SYSTEM_HEALED med context 'ActiveLearning') + // eller når vi manuelt pusher viden. + neuralStream.on('KNOWLEDGE_INGEST_REQ', async (event: NeuralEvent) => { + console.log('📥 [Acquisition] Receiving knowledge stream...', event.payload.title); + await this.ingestKnowledge(event.payload as unknown as KnowledgePacket); + }); + } + + /** + * KERNE-LOGIK: Konverterer rå tekst til Graf-Struktur + * Dette er "Memory Consolidation" processen. + */ + public async ingestKnowledge(packet: KnowledgePacket): Promise { + const startTime = Date.now(); + + try { + // 1. Opret selve videns-noden (Fact / Document) + // Vi bruger MERGE for at undgå duplikater baseret på titel/source + const cypher = ` + MERGE (k:Knowledge {title: $title}) + SET k.content = $content, + k.source = $source, + k.category = $category, + k.ingestedAt = datetime(), + k.hash = $hash + + // 2. Opret kategori-struktur + MERGE (c:Category {name: $category}) + MERGE (k)-[:BELONGS_TO]->(c) + + // 3. Auto-Tagging (Opretter tags og linker dem) + FOREACH (tagName IN $tags | + MERGE (t:Tag {name: tagName}) + MERGE (k)-[:TAGGED_WITH]->(t) + ) + `; + + // Simpel hash for integritet + const hash = Buffer.from(packet.title + packet.source).toString('base64'); + + await neo4jService.runQuery(cypher, { + title: packet.title, + content: packet.content, + source: packet.source, + category: packet.category || 'General', + tags: packet.tags || [], + hash + }); + + // 4. THE MAGIC: Semantic Auto-Linking + // Vi søger efter eksisterende noder, der nævnes i den nye tekst, og linker dem. + await this.createSemanticLinks(packet.title, packet.content); + + // 5. Metrics & Feedback + const duration = Date.now() - startTime; + metricsService.incrementCounter('knowledge_ingested'); + console.log(`✅ [Acquisition] Absorbed: "${packet.title}" in ${duration}ms`); + + neuralStream.emitEvent('SYSTEM_HEALED', 'LOW', { + action: 'MEMORY_CONSOLIDATED', + target: packet.title + }, 'KnowledgeAcquisition'); + + return true; + + } catch (error) { + console.error('❌ [Acquisition] Failed to ingest:', error); + return false; + } + } + + /** + * Finder andre noder i grafen, der nævnes i denne tekst, og skaber relationer. + * Dette gør grafen "tættere" og klogere over tid. + */ + private async createSemanticLinks(nodeTitle: string, content: string) { + // Find noder (Personer, Tech, Threats) hvis navne optræder i den nye tekst + // Undgå at linke til sig selv + const linkCypher = ` + MATCH (k:Knowledge {title: $title}) + MATCH (other) + WHERE other <> k + AND (other:Person OR other:Technology OR other:Threat OR other:Organization) + AND size(other.name) > 3 // Ignorer støj + AND $content CONTAINS other.name + MERGE (k)-[:MENTIONS]->(other) + RETURN count(other) as links + `; + + const result = await neo4jService.runQuery(linkCypher, { + title: nodeTitle, + content: content + }); + + const linksCreated = (result[0]?.links as any)?.toNumber?.() || 0; + if (linksCreated > 0) { + console.log(`🔗 [Acquisition] Auto-linked "${nodeTitle}" to ${linksCreated} existing concepts.`); + } + } + + // Stubs for backward compatibility + public async acquire(params: any): Promise { return { success: false, message: 'Deprecated' }; } + public async batchAcquire(params: any): Promise { return []; } + public async semanticSearch(query: string, limit: number): Promise { return []; } + public async getVectorStats(): Promise { return { totalRecords: 0 }; } + public async acquireFromTargets(ids?: string[]): Promise { return []; } + public async acquireSingleTarget(id: string): Promise { return null; } +} + +export const knowledgeAcquisition = KnowledgeAcquisitionService.getInstance(); diff --git a/apps/backend/src/services/MetricsService.ts b/apps/backend/src/services/MetricsService.ts new file mode 100644 index 0000000000000000000000000000000000000000..4d7c8ae0d81ab9d5b7d3e47c4b82b54285a0c3a3 --- /dev/null +++ b/apps/backend/src/services/MetricsService.ts @@ -0,0 +1,129 @@ +/** + * MetricsService - Dashboard & Monitoring Integration + * + * Tracks system health metrics for: + * - Self-healing events + * - Database connections + * - API performance + * - Error rates + */ + +interface MetricLabels { + [key: string]: string; +} + +interface Metric { + name: string; + value: number; + labels: MetricLabels; + timestamp: number; +} + +export class MetricsService { + private counters: Map = new Map(); + private gauges: Map = new Map(); + private historyBuffer: Metric[] = []; + private maxHistorySize = 1000; + + /** + * Increment a counter metric + */ + async incrementCounter(name: string, labels: MetricLabels = {}): Promise { + const key = this.buildKey(name, labels); + const current = this.counters.get(key) || 0; + this.counters.set(key, current + 1); + + this.recordToHistory({ + name, + value: current + 1, + labels, + timestamp: Date.now() + }); + + // Log for debugging + console.log(`📊 [Metrics] ${name}: ${current + 1}`, labels); + } + + /** + * Set a gauge metric (point-in-time value) + */ + async setGauge(name: string, value: number, labels: MetricLabels = {}): Promise { + const key = this.buildKey(name, labels); + this.gauges.set(key, value); + + this.recordToHistory({ + name, + value, + labels, + timestamp: Date.now() + }); + } + + /** + * Get current value of a counter + */ + getCounter(name: string, labels: MetricLabels = {}): number { + const key = this.buildKey(name, labels); + return this.counters.get(key) || 0; + } + + /** + * Get current value of a gauge + */ + getGauge(name: string, labels: MetricLabels = {}): number { + const key = this.buildKey(name, labels); + return this.gauges.get(key) || 0; + } + + /** + * Get all metrics for dashboard + */ + getAllMetrics(): { counters: Record; gauges: Record } { + return { + counters: Object.fromEntries(this.counters), + gauges: Object.fromEntries(this.gauges) + }; + } + + /** + * Get recent metric history + */ + getHistory(limit: number = 100): Metric[] { + return this.historyBuffer.slice(-limit); + } + + /** + * Export metrics in Prometheus format (for Grafana) + */ + toPrometheusFormat(): string { + const lines: string[] = []; + + for (const [key, value] of this.counters) { + lines.push(`widgetdc_${key.replace(/[^a-zA-Z0-9_]/g, '_')} ${value}`); + } + + for (const [key, value] of this.gauges) { + lines.push(`widgetdc_${key.replace(/[^a-zA-Z0-9_]/g, '_')} ${value}`); + } + + return lines.join('\n'); + } + + private buildKey(name: string, labels: MetricLabels): string { + const labelStr = Object.entries(labels) + .sort(([a], [b]) => a.localeCompare(b)) + .map(([k, v]) => `${k}="${v}"`) + .join(','); + return labelStr ? `${name}{${labelStr}}` : name; + } + + private recordToHistory(metric: Metric): void { + this.historyBuffer.push(metric); + if (this.historyBuffer.length > this.maxHistorySize) { + this.historyBuffer.shift(); + } + } +} + +// Singleton for global access +export const metricsService = new MetricsService(); diff --git a/apps/backend/src/services/MotorCortex.ts b/apps/backend/src/services/MotorCortex.ts new file mode 100644 index 0000000000000000000000000000000000000000..4e9f81790966e677f73233952300673068b6b74e --- /dev/null +++ b/apps/backend/src/services/MotorCortex.ts @@ -0,0 +1,635 @@ +/** + * ╔═══════════════════════════════════════════════════════════════════════════╗ + * ║ MOTOR CORTEX SERVICE ║ + * ║═══════════════════════════════════════════════════════════════════════════║ + * ║ The system's "hands" - executes actions in the real world ║ + * ║ Git operations, file management, deployments, shell commands ║ + * ║ All actions require approval workflow for safety ║ + * ╚═══════════════════════════════════════════════════════════════════════════╝ + */ + +import { exec } from 'child_process'; +import { promisify } from 'util'; +import * as fs from 'fs/promises'; +import * as path from 'path'; +import { neo4jAdapter } from '../adapters/Neo4jAdapter.js'; +import { v4 as uuidv4 } from 'uuid'; + +const execAsync = promisify(exec); + +// ═══════════════════════════════════════════════════════════════════════════ +// Types +// ═══════════════════════════════════════════════════════════════════════════ + +export type ActionType = + | 'GIT_COMMIT' + | 'GIT_PUSH' + | 'GIT_BRANCH' + | 'GIT_MERGE' + | 'FILE_CREATE' + | 'FILE_MODIFY' + | 'FILE_DELETE' + | 'SHELL_COMMAND' + | 'NPM_INSTALL' + | 'NPM_RUN' + | 'DEPLOY' + | 'RESTART_SERVICE'; + +export type ActionStatus = 'PENDING' | 'APPROVED' | 'REJECTED' | 'EXECUTING' | 'COMPLETED' | 'FAILED'; + +export interface ActionRequest { + id: string; + type: ActionType; + description: string; + command?: string; + targetPath?: string; + content?: string; + params?: Record; + requestedBy: string; + requestedAt: string; + status: ActionStatus; + requiresApproval: boolean; + approvedBy?: string; + approvedAt?: string; + executedAt?: string; + result?: ActionResult; + riskLevel: 'LOW' | 'MEDIUM' | 'HIGH' | 'CRITICAL'; +} + +export interface ActionResult { + success: boolean; + output?: string; + error?: string; + duration_ms: number; + artifacts?: string[]; +} + +// ═══════════════════════════════════════════════════════════════════════════ +// Motor Cortex Service +// ═══════════════════════════════════════════════════════════════════════════ + +class MotorCortexService { + private static instance: MotorCortexService; + private actionQueue: Map = new Map(); + private actionHistory: ActionRequest[] = []; + + // Safety configuration + private readonly AUTO_APPROVE_RISK_LEVELS: ActionRequest['riskLevel'][] = ['LOW']; + private readonly BLOCKED_COMMANDS = [ + /rm\s+-rf\s+\//, // rm -rf / + /mkfs/, // format disk + /dd\s+if=/, // disk destroyer + /:(){ :|:& };:/, // fork bomb + />\s*\/dev\/sd/, // write to disk device + /shutdown/, // system shutdown + /reboot/, // system reboot + /init\s+0/, // halt system + ]; + + private readonly PROJECT_ROOT = process.cwd(); + + private constructor() { + this.loadHistoryFromNeo4j(); + } + + public static getInstance(): MotorCortexService { + if (!MotorCortexService.instance) { + MotorCortexService.instance = new MotorCortexService(); + } + return MotorCortexService.instance; + } + + // ═══════════════════════════════════════════════════════════════════════ + // Action Request & Approval + // ═══════════════════════════════════════════════════════════════════════ + + /** + * Request an action to be executed + */ + public async requestAction(params: { + type: ActionType; + description: string; + command?: string; + targetPath?: string; + content?: string; + params?: Record; + requestedBy: string; + }): Promise { + const riskLevel = this.assessRisk(params); + + const action: ActionRequest = { + id: `action-${uuidv4()}`, + type: params.type, + description: params.description, + command: params.command, + targetPath: params.targetPath, + content: params.content, + params: params.params, + requestedBy: params.requestedBy, + requestedAt: new Date().toISOString(), + status: 'PENDING', + requiresApproval: !this.AUTO_APPROVE_RISK_LEVELS.includes(riskLevel), + riskLevel + }; + + // Check for blocked commands + if (params.command && this.isBlockedCommand(params.command)) { + action.status = 'REJECTED'; + action.result = { + success: false, + error: 'Command blocked for safety reasons', + duration_ms: 0 + }; + console.error(`[MotorCortex] 🚫 Blocked dangerous command: ${params.command}`); + return action; + } + + this.actionQueue.set(action.id, action); + + // Auto-approve low-risk actions + if (!action.requiresApproval) { + return await this.approveAndExecute(action.id, 'SYSTEM_AUTO'); + } + + // Persist pending action + await this.persistAction(action); + + console.error(`[MotorCortex] 📋 Action queued (${action.riskLevel}): ${action.description}`); + + return action; + } + + /** + * Approve a pending action + */ + public async approveAction(actionId: string, approvedBy: string): Promise { + const action = this.actionQueue.get(actionId); + if (!action || action.status !== 'PENDING') { + return null; + } + + return await this.approveAndExecute(actionId, approvedBy); + } + + /** + * Reject a pending action + */ + public rejectAction(actionId: string, rejectedBy: string): ActionRequest | null { + const action = this.actionQueue.get(actionId); + if (!action || action.status !== 'PENDING') { + return null; + } + + action.status = 'REJECTED'; + action.approvedBy = rejectedBy; + action.approvedAt = new Date().toISOString(); + + this.actionHistory.push(action); + this.actionQueue.delete(actionId); + + console.error(`[MotorCortex] ❌ Action rejected: ${action.description}`); + + return action; + } + + /** + * Approve and execute action + */ + private async approveAndExecute(actionId: string, approvedBy: string): Promise { + const action = this.actionQueue.get(actionId)!; + + action.status = 'APPROVED'; + action.approvedBy = approvedBy; + action.approvedAt = new Date().toISOString(); + + console.error(`[MotorCortex] ✅ Action approved: ${action.description}`); + + return await this.executeAction(action); + } + + // ═══════════════════════════════════════════════════════════════════════ + // Action Execution + // ═══════════════════════════════════════════════════════════════════════ + + /** + * Execute an approved action + */ + private async executeAction(action: ActionRequest): Promise { + action.status = 'EXECUTING'; + action.executedAt = new Date().toISOString(); + const startTime = Date.now(); + + try { + let result: ActionResult; + + switch (action.type) { + case 'GIT_COMMIT': + result = await this.executeGitCommit(action); + break; + case 'GIT_PUSH': + result = await this.executeGitPush(action); + break; + case 'GIT_BRANCH': + result = await this.executeGitBranch(action); + break; + case 'FILE_CREATE': + result = await this.executeFileCreate(action); + break; + case 'FILE_MODIFY': + result = await this.executeFileModify(action); + break; + case 'FILE_DELETE': + result = await this.executeFileDelete(action); + break; + case 'SHELL_COMMAND': + result = await this.executeShellCommand(action); + break; + case 'NPM_INSTALL': + result = await this.executeNpmInstall(action); + break; + case 'NPM_RUN': + result = await this.executeNpmRun(action); + break; + default: + result = { + success: false, + error: `Unsupported action type: ${action.type}`, + duration_ms: Date.now() - startTime + }; + } + + action.result = result; + action.status = result.success ? 'COMPLETED' : 'FAILED'; + + } catch (error: any) { + action.status = 'FAILED'; + action.result = { + success: false, + error: error.message, + duration_ms: Date.now() - startTime + }; + } + + // Move to history + this.actionHistory.push(action); + this.actionQueue.delete(action.id); + + // Persist result + await this.persistAction(action); + + const emoji = action.status === 'COMPLETED' ? '✅' : '❌'; + console.error(`[MotorCortex] ${emoji} Action ${action.status}: ${action.description}`); + + return action; + } + + // ═══════════════════════════════════════════════════════════════════════ + // Specific Action Handlers + // ═══════════════════════════════════════════════════════════════════════ + + private async executeGitCommit(action: ActionRequest): Promise { + const message = action.params?.message as string || action.description; + const startTime = Date.now(); + + try { + // Stage all changes + await execAsync('git add -A', { cwd: this.PROJECT_ROOT }); + + // Commit + const { stdout, stderr } = await execAsync( + `git commit -m "${message.replace(/"/g, '\\"')}"`, + { cwd: this.PROJECT_ROOT } + ); + + return { + success: true, + output: stdout || stderr, + duration_ms: Date.now() - startTime + }; + } catch (error: any) { + return { + success: false, + error: error.message, + duration_ms: Date.now() - startTime + }; + } + } + + private async executeGitPush(action: ActionRequest): Promise { + const branch = action.params?.branch as string || 'main'; + const startTime = Date.now(); + + try { + const { stdout, stderr } = await execAsync( + `git push origin ${branch}`, + { cwd: this.PROJECT_ROOT } + ); + + return { + success: true, + output: stdout || stderr, + duration_ms: Date.now() - startTime + }; + } catch (error: any) { + return { + success: false, + error: error.message, + duration_ms: Date.now() - startTime + }; + } + } + + private async executeGitBranch(action: ActionRequest): Promise { + const branchName = action.params?.branchName as string; + const checkout = action.params?.checkout as boolean; + const startTime = Date.now(); + + try { + const command = checkout + ? `git checkout -b ${branchName}` + : `git branch ${branchName}`; + + const { stdout, stderr } = await execAsync(command, { cwd: this.PROJECT_ROOT }); + + return { + success: true, + output: stdout || stderr, + duration_ms: Date.now() - startTime + }; + } catch (error: any) { + return { + success: false, + error: error.message, + duration_ms: Date.now() - startTime + }; + } + } + + private async executeFileCreate(action: ActionRequest): Promise { + const startTime = Date.now(); + const targetPath = action.targetPath!; + const content = action.content || ''; + + try { + // Ensure directory exists + await fs.mkdir(path.dirname(targetPath), { recursive: true }); + + // Create file + await fs.writeFile(targetPath, content, 'utf-8'); + + return { + success: true, + output: `Created: ${targetPath}`, + duration_ms: Date.now() - startTime, + artifacts: [targetPath] + }; + } catch (error: any) { + return { + success: false, + error: error.message, + duration_ms: Date.now() - startTime + }; + } + } + + private async executeFileModify(action: ActionRequest): Promise { + const startTime = Date.now(); + const targetPath = action.targetPath!; + const content = action.content!; + + try { + await fs.writeFile(targetPath, content, 'utf-8'); + + return { + success: true, + output: `Modified: ${targetPath}`, + duration_ms: Date.now() - startTime, + artifacts: [targetPath] + }; + } catch (error: any) { + return { + success: false, + error: error.message, + duration_ms: Date.now() - startTime + }; + } + } + + private async executeFileDelete(action: ActionRequest): Promise { + const startTime = Date.now(); + const targetPath = action.targetPath!; + + try { + await fs.unlink(targetPath); + + return { + success: true, + output: `Deleted: ${targetPath}`, + duration_ms: Date.now() - startTime + }; + } catch (error: any) { + return { + success: false, + error: error.message, + duration_ms: Date.now() - startTime + }; + } + } + + private async executeShellCommand(action: ActionRequest): Promise { + const startTime = Date.now(); + const command = action.command!; + + try { + const { stdout, stderr } = await execAsync(command, { + cwd: this.PROJECT_ROOT, + timeout: 60000 // 1 minute timeout + }); + + return { + success: true, + output: stdout || stderr, + duration_ms: Date.now() - startTime + }; + } catch (error: any) { + return { + success: false, + error: error.message, + output: error.stdout || error.stderr, + duration_ms: Date.now() - startTime + }; + } + } + + private async executeNpmInstall(action: ActionRequest): Promise { + const startTime = Date.now(); + const packageName = action.params?.package as string; + + try { + const command = packageName + ? `npm install ${packageName}` + : 'npm install'; + + const { stdout, stderr } = await execAsync(command, { + cwd: this.PROJECT_ROOT, + timeout: 300000 // 5 minute timeout + }); + + return { + success: true, + output: stdout || stderr, + duration_ms: Date.now() - startTime + }; + } catch (error: any) { + return { + success: false, + error: error.message, + duration_ms: Date.now() - startTime + }; + } + } + + private async executeNpmRun(action: ActionRequest): Promise { + const startTime = Date.now(); + const script = action.params?.script as string; + + try { + const { stdout, stderr } = await execAsync(`npm run ${script}`, { + cwd: this.PROJECT_ROOT, + timeout: 300000 // 5 minute timeout + }); + + return { + success: true, + output: stdout || stderr, + duration_ms: Date.now() - startTime + }; + } catch (error: any) { + return { + success: false, + error: error.message, + duration_ms: Date.now() - startTime + }; + } + } + + // ═══════════════════════════════════════════════════════════════════════ + // Risk Assessment + // ═══════════════════════════════════════════════════════════════════════ + + private assessRisk(params: { type: ActionType; command?: string; targetPath?: string }): ActionRequest['riskLevel'] { + // File deletion is high risk + if (params.type === 'FILE_DELETE') return 'HIGH'; + + // Git push is medium risk + if (params.type === 'GIT_PUSH' || params.type === 'GIT_MERGE') return 'MEDIUM'; + + // Deployment is critical + if (params.type === 'DEPLOY' || params.type === 'RESTART_SERVICE') return 'CRITICAL'; + + // Shell commands need careful assessment + if (params.type === 'SHELL_COMMAND' && params.command) { + if (/sudo|rm|chmod|chown/i.test(params.command)) return 'HIGH'; + if (/curl|wget|npm|pip/i.test(params.command)) return 'MEDIUM'; + } + + // File creation/modification outside project + if (params.targetPath && !params.targetPath.startsWith(this.PROJECT_ROOT)) { + return 'HIGH'; + } + + return 'LOW'; + } + + private isBlockedCommand(command: string): boolean { + return this.BLOCKED_COMMANDS.some(pattern => pattern.test(command)); + } + + // ═══════════════════════════════════════════════════════════════════════ + // Query Functions + // ═══════════════════════════════════════════════════════════════════════ + + public getPendingActions(): ActionRequest[] { + return Array.from(this.actionQueue.values()).filter(a => a.status === 'PENDING'); + } + + public getActionHistory(limit: number = 50): ActionRequest[] { + return this.actionHistory.slice(-limit); + } + + public getAction(actionId: string): ActionRequest | undefined { + return this.actionQueue.get(actionId) || this.actionHistory.find(a => a.id === actionId); + } + + public getStatus(): { + pendingActions: number; + executingActions: number; + completedToday: number; + failedToday: number; + } { + const today = new Date().toISOString().split('T')[0]; + const todayActions = this.actionHistory.filter(a => + a.executedAt?.startsWith(today) + ); + + return { + pendingActions: this.getPendingActions().length, + executingActions: Array.from(this.actionQueue.values()).filter(a => a.status === 'EXECUTING').length, + completedToday: todayActions.filter(a => a.status === 'COMPLETED').length, + failedToday: todayActions.filter(a => a.status === 'FAILED').length + }; + } + + // ═══════════════════════════════════════════════════════════════════════ + // Persistence + // ═══════════════════════════════════════════════════════════════════════ + + private async persistAction(action: ActionRequest): Promise { + try { + await neo4jAdapter.executeQuery(` + MERGE (a:Action {id: $id}) + SET a.type = $type, + a.description = $description, + a.status = $status, + a.riskLevel = $riskLevel, + a.requestedBy = $requestedBy, + a.requestedAt = $requestedAt, + a.approvedBy = $approvedBy, + a.executedAt = $executedAt, + a.success = $success + `, { + id: action.id, + type: action.type, + description: action.description, + status: action.status, + riskLevel: action.riskLevel, + requestedBy: action.requestedBy, + requestedAt: action.requestedAt, + approvedBy: action.approvedBy || '', + executedAt: action.executedAt || '', + success: action.result?.success ?? null + }); + } catch (error) { + console.warn('[MotorCortex] Failed to persist action:', error); + } + } + + private async loadHistoryFromNeo4j(): Promise { + try { + const results = await neo4jAdapter.executeQuery(` + MATCH (a:Action) + WHERE a.executedAt IS NOT NULL + RETURN a + ORDER BY a.executedAt DESC + LIMIT 100 + `); + + // Load into history (simplified) + console.error(`[MotorCortex] 🔄 Loaded ${results.length} actions from history`); + } catch (error) { + console.warn('[MotorCortex] Failed to load history:', error); + } + } +} + +export const motorCortex = MotorCortexService.getInstance(); diff --git a/apps/backend/src/services/Neo4jService.ts b/apps/backend/src/services/Neo4jService.ts new file mode 100644 index 0000000000000000000000000000000000000000..ffa0a6436b5c2bfbb0987e7fcd4f639f22d48b4b --- /dev/null +++ b/apps/backend/src/services/Neo4jService.ts @@ -0,0 +1,177 @@ +import neo4j, { Driver, Session, SessionConfig } from 'neo4j-driver'; + +/** + * Neo4jService - Hybrid Cloud/Local Graph Database Connection + * + * Automatically switches between: + * - LOCAL (dev): bolt://localhost:7687 or Docker neo4j:7687 + * - CLOUD (prod): neo4j+s://.databases.neo4j.io (AuraDB) + * + * Features: + * - Self-healing with automatic reconnection + * - Connection pooling + * - Health checks + * - Singleton pattern + */ +export class Neo4jService { + private driver: Driver | null = null; + private isConnecting: boolean = false; + private reconnectAttempts: number = 0; + private maxReconnectAttempts: number = 10; + private reconnectDelay: number = 5000; + + constructor() { + this.connect(); + } + + /** + * Determines connection URI based on environment + */ + private getConnectionConfig(): { uri: string; user: string; password: string } { + const isProduction = process.env.NODE_ENV === 'production'; + const hasCloudUri = process.env.NEO4J_URI?.includes('neo4j.io'); + + // Cloud (AuraDB) - when explicitly configured or in production with cloud URI + if (hasCloudUri) { + console.log('🌩️ Neo4j Mode: CLOUD (AuraDB)'); + return { + uri: process.env.NEO4J_URI!, + user: process.env.NEO4J_USER || 'neo4j', + password: process.env.NEO4J_PASSWORD || '' + }; + } + + // Local Docker (default for dev) + console.log('🐳 Neo4j Mode: LOCAL (Docker)'); + return { + uri: process.env.NEO4J_URI || 'bolt://localhost:7687', + user: process.env.NEO4J_USER || 'neo4j', + password: process.env.NEO4J_PASSWORD || 'password' + }; + } + + /** + * Initializes connection with self-healing retry logic + */ + private async connect(): Promise { + if (this.driver || this.isConnecting) return; + + this.isConnecting = true; + const config = this.getConnectionConfig(); + + try { + console.log(`🔌 Connecting to Neural Graph at ${config.uri}...`); + + this.driver = neo4j.driver( + config.uri, + neo4j.auth.basic(config.user, config.password), + { + maxConnectionLifetime: 3 * 60 * 60 * 1000, // 3 hours + maxConnectionPoolSize: 50, + connectionAcquisitionTimeout: 10000, // 10 seconds + connectionTimeout: 30000, // 30 seconds + } + ); + + // Verify connectivity + await this.driver.verifyConnectivity(); + + console.log('🟢 NEURAL CORTEX CONNECTED - Neo4j is Online'); + this.reconnectAttempts = 0; + this.isConnecting = false; + + } catch (error: any) { + console.error('🔴 Failed to connect to Neural Graph:', error.message); + this.driver = null; + this.isConnecting = false; + + // Self-healing: Retry with exponential backoff + if (this.reconnectAttempts < this.maxReconnectAttempts) { + this.reconnectAttempts++; + const delay = this.reconnectDelay * Math.pow(1.5, this.reconnectAttempts - 1); + console.log(`⏳ Retry attempt ${this.reconnectAttempts}/${this.maxReconnectAttempts} in ${delay/1000}s...`); + setTimeout(() => this.connect(), delay); + } else { + console.error('💀 Max reconnection attempts reached. Neural Graph is OFFLINE.'); + } + } + } + + /** + * Get a session for graph operations + * Triggers reconnect if disconnected + */ + public getSession(config?: SessionConfig): Session { + if (!this.driver) { + this.connect(); + throw new Error('Neural Graph is currently offline. Reconnection in progress...'); + } + return this.driver.session(config); + } + + /** + * Execute a Cypher query with automatic session management + */ + public async query(cypher: string, params: Record = {}): Promise { + const session = this.getSession(); + try { + const result = await session.run(cypher, params); + return result.records.map(record => record.toObject() as T); + } finally { + await session.close(); + } + } + + /** + * Execute a write transaction + */ + public async write(cypher: string, params: Record = {}): Promise { + const session = this.getSession(); + try { + const result = await session.executeWrite(tx => tx.run(cypher, params)); + return result.records.map(record => record.toObject() as T); + } finally { + await session.close(); + } + } + + /** + * Health check for monitoring + */ + public async checkHealth(): Promise<{ status: string; mode: string; latency?: number }> { + if (!this.driver) { + return { status: 'offline', mode: 'unknown' }; + } + + const start = Date.now(); + try { + await this.driver.verifyConnectivity(); + const latency = Date.now() - start; + const mode = process.env.NEO4J_URI?.includes('neo4j.io') ? 'cloud' : 'local'; + return { status: 'online', mode, latency }; + } catch (e) { + return { status: 'error', mode: 'unknown' }; + } + } + + /** + * Graceful shutdown + */ + public async disconnect(): Promise { + if (this.driver) { + await this.driver.close(); + this.driver = null; + console.log('🔌 Neural Graph connection closed.'); + } + } + + /** + * Check if connected + */ + public isConnected(): boolean { + return this.driver !== null; + } +} + +// Singleton instance +export const neo4jService = new Neo4jService(); diff --git a/apps/backend/src/services/NeuralChat/AgentCapabilities.ts b/apps/backend/src/services/NeuralChat/AgentCapabilities.ts new file mode 100644 index 0000000000000000000000000000000000000000..6786884be5e5e53012f5872ab4fd1413d9953fe4 --- /dev/null +++ b/apps/backend/src/services/NeuralChat/AgentCapabilities.ts @@ -0,0 +1,241 @@ +/** + * ╔═══════════════════════════════════════════════════════════════════════════╗ + * ║ AGENT CAPABILITY REGISTRY ║ + * ║═══════════════════════════════════════════════════════════════════════════║ + * ║ Hver agent registrerer sine evner - andre kan kalde dem ║ + * ║ ║ + * ║ ARKITEKTUR: ║ + * ║ ┌─────────────────────────────────────────────────────────────────┐ ║ + * ║ │ COMMUNICATION LAYERS │ ║ + * ║ ├─────────────────────────────────────────────────────────────────┤ ║ + * ║ │ BLACKBOARD (Async) │ NEURAL CHAT (Sync) │ ║ + * ║ │ • Handovers │ • Real-time discussion │ ║ + * ║ │ • Formal tasks │ • Quick questions │ ║ + * ║ │ • Offline messages │ • Status updates │ ║ + * ║ │ • File: DropZone/agents/ │ • Neo4j persistence │ ║ + * ║ ├─────────────────────────────────────────────────────────────────┤ ║ + * ║ │ CAPABILITY LAYER │ ║ + * ║ │ • invoke_capability(agent, capability, params) │ ║ + * ║ │ • Agents expose their strengths as callable functions │ ║ + * ║ └─────────────────────────────────────────────────────────────────┘ ║ + * ╚═══════════════════════════════════════════════════════════════════════════╝ + */ + +import { AgentId } from './types.js'; + +export interface AgentCapability { + id: string; + name: string; + description: string; + agent: AgentId; + category: 'code' | 'research' | 'analysis' | 'creative' | 'integration' | 'decision'; + inputSchema?: Record; + outputSchema?: Record; + costEstimate?: 'low' | 'medium' | 'high'; // Token/tid cost + reliability?: number; // 0-1 score +} + +export interface CapabilityRequest { + requestId: string; + fromAgent: AgentId; + toAgent: AgentId; + capability: string; + params: Record; + priority: 'low' | 'normal' | 'high' | 'critical'; + timestamp: string; + deadline?: string; +} + +export interface CapabilityResponse { + requestId: string; + success: boolean; + result?: any; + error?: string; + executionTimeMs?: number; + timestamp: string; +} + +// ═══════════════════════════════════════════════════════════════════════════ +// AGENT CAPABILITY DEFINITIONS +// ═══════════════════════════════════════════════════════════════════════════ + +export const AGENT_CAPABILITIES: Record = { + claude: [ + { + id: 'claude.architecture', + name: 'System Architecture Design', + description: 'Design complex system architectures, API designs, database schemas', + agent: 'claude', + category: 'code', + costEstimate: 'medium', + reliability: 0.95 + }, + { + id: 'claude.code_review', + name: 'Deep Code Review', + description: 'Thorough code review with security, performance, and maintainability focus', + agent: 'claude', + category: 'analysis', + costEstimate: 'medium', + reliability: 0.9 + }, + { + id: 'claude.mcp_tools', + name: 'MCP Tool Execution', + description: 'Execute MCP tools, file operations, Neo4j queries', + agent: 'claude', + category: 'integration', + costEstimate: 'low', + reliability: 0.98 + }, + { + id: 'claude.refactor', + name: 'Code Refactoring', + description: 'Refactor code for better structure, readability, and performance', + agent: 'claude', + category: 'code', + costEstimate: 'high', + reliability: 0.85 + } + ], + + 'claude-cli': [ + { + id: 'claude-cli.terminal', + name: 'Terminal Command Execution', + description: 'Execute shell commands, scripts, build processes directly', + agent: 'claude-cli', + category: 'integration', + costEstimate: 'low', + reliability: 0.95 + }, + { + id: 'claude-cli.git', + name: 'Git Operations', + description: 'Git commits, branches, merges, conflict resolution', + agent: 'claude-cli', + category: 'integration', + costEstimate: 'low', + reliability: 0.9 + }, + { + id: 'claude-cli.rapid_prototype', + name: 'Rapid Prototyping', + description: 'Quick file creation, scaffolding, boilerplate generation', + agent: 'claude-cli', + category: 'code', + costEstimate: 'low', + reliability: 0.9 + }, + { + id: 'claude-cli.build_test', + name: 'Build & Test Automation', + description: 'Run builds, tests, linting, type checking', + agent: 'claude-cli', + category: 'integration', + costEstimate: 'low', + reliability: 0.95 + } + ], + + gemini: [ + { + id: 'gemini.research', + name: 'Deep Research', + description: 'Comprehensive research with web search, document analysis', + agent: 'gemini', + category: 'research', + costEstimate: 'medium', + reliability: 0.9 + }, + { + id: 'gemini.multimodal', + name: 'Multimodal Analysis', + description: 'Analyze images, diagrams, screenshots alongside text', + agent: 'gemini', + category: 'analysis', + costEstimate: 'medium', + reliability: 0.85 + }, + { + id: 'gemini.large_context', + name: 'Large Context Processing', + description: 'Process very large documents or codebases (1M+ tokens)', + agent: 'gemini', + category: 'analysis', + costEstimate: 'high', + reliability: 0.8 + }, + { + id: 'gemini.project_management', + name: 'Project Management', + description: 'Sprint planning, roadmap creation, task prioritization', + agent: 'gemini', + category: 'decision', + costEstimate: 'low', + reliability: 0.9 + } + ], + + deepseek: [ + { + id: 'deepseek.rapid_code', + name: 'Rapid Code Generation', + description: 'Fast code generation for well-defined tasks', + agent: 'deepseek', + category: 'code', + costEstimate: 'low', + reliability: 0.8 + }, + { + id: 'deepseek.math', + name: 'Mathematical Analysis', + description: 'Complex mathematical computations and proofs', + agent: 'deepseek', + category: 'analysis', + costEstimate: 'low', + reliability: 0.9 + }, + { + id: 'deepseek.tests', + name: 'Test Generation', + description: 'Generate unit tests, integration tests, test data', + agent: 'deepseek', + category: 'code', + costEstimate: 'low', + reliability: 0.85 + } + ], + + clak: [ + { + id: 'clak.decision', + name: 'Executive Decision', + description: 'Make final decisions on direction, priorities, approvals', + agent: 'clak', + category: 'decision', + costEstimate: 'low', + reliability: 1.0 + }, + { + id: 'clak.domain_knowledge', + name: 'Domain Knowledge', + description: 'TDC Erhverv context, business requirements, user needs', + agent: 'clak', + category: 'research', + costEstimate: 'low', + reliability: 1.0 + }, + { + id: 'clak.approval', + name: 'Security/Deploy Approval', + description: 'Approve sensitive operations, deployments, API keys', + agent: 'clak', + category: 'decision', + costEstimate: 'low', + reliability: 1.0 + } + ], + + system: [] // System agent has no capabilities (it's infrastructure) +}; diff --git a/apps/backend/src/services/NeuralChat/ApprovalGate.ts b/apps/backend/src/services/NeuralChat/ApprovalGate.ts new file mode 100644 index 0000000000000000000000000000000000000000..a8ec589175831a3b693bcb5cbb3a3d29831bfe1e --- /dev/null +++ b/apps/backend/src/services/NeuralChat/ApprovalGate.ts @@ -0,0 +1,196 @@ +/** + * ╔═══════════════════════════════════════════════════════════════════════════╗ + * ║ APPROVAL GATE - Claude as Delegate ║ + * ║═══════════════════════════════════════════════════════════════════════════║ + * ║ Claude har fået delegeret godkendelsesautoritet fra CLAK ║ + * ║ ║ + * ║ APPROVAL FLOW: ║ + * ║ Gemini/DeepSeek → Claude (auto-review) → Execute ║ + * ║ ║ + * ║ ESCALATE TO CLAK ONLY FOR: ║ + * ║ • Production deployments ║ + * ║ • API key/credential changes ║ + * ║ • Budget/cost decisions ║ + * ║ • Security-sensitive operations ║ + * ╚═══════════════════════════════════════════════════════════════════════════╝ + */ + +import { AgentId } from './types.js'; + +export type ApprovalDecision = 'approved' | 'rejected' | 'escalate_to_clak'; + +export interface ApprovalResult { + decision: ApprovalDecision; + reason: string; + approvedBy: AgentId; + timestamp: string; + modifications?: string[]; // Claude kan modificere requests +} + +// Only these require CLAK personally +const ESCALATE_TO_CLAK = [ + 'production_deploy', + 'api_key_create', + 'credential_change', + 'budget_decision', + 'security_config', + 'delete_data', + 'external_integration' +]; + +class ApprovalGate { + private static instance: ApprovalGate; + + public static getInstance(): ApprovalGate { + if (!ApprovalGate.instance) { + ApprovalGate.instance = new ApprovalGate(); + } + return ApprovalGate.instance; + } + + /** + * Claude reviews and decides on behalf of CLAK + * Returns immediately - no waiting for human + */ + review(params: { + fromAgent: AgentId; + action: string; + description: string; + context?: Record; + }): ApprovalResult { + const { fromAgent, action, description } = params; + const timestamp = new Date().toISOString(); + + // CLAK's own requests are auto-approved + if (fromAgent === 'clak') { + return { + decision: 'approved', + reason: 'CLAK authority - auto-approved', + approvedBy: 'clak', + timestamp + }; + } + + // Check if needs escalation to CLAK + const needsEscalation = ESCALATE_TO_CLAK.some(e => + action.toLowerCase().includes(e) || + description.toLowerCase().includes(e) + ); + + if (needsEscalation) { + return { + decision: 'escalate_to_clak', + reason: `Action "${action}" requires CLAK's personal approval`, + approvedBy: 'claude', + timestamp + }; + } + + // Claude reviews based on reasonableness + const review = this.evaluateRequest(params); + + return { + decision: review.approved ? 'approved' : 'rejected', + reason: review.reason, + approvedBy: 'claude', + timestamp, + modifications: review.modifications + }; + } + + /** + * Claude's evaluation logic + */ + private evaluateRequest(params: { + fromAgent: AgentId; + action: string; + description: string; + context?: Record; + }): { approved: boolean; reason: string; modifications?: string[] } { + const { fromAgent, action, description } = params; + + // ════════════════════════════════════════════════════════════ + // APPROVAL RULES (Claude's delegated authority) + // ════════════════════════════════════════════════════════════ + + // 1. Research requests - always approved + if (action.includes('research') || action.includes('search') || action.includes('analyze')) { + return { + approved: true, + reason: 'Research/analysis tasks are pre-approved' + }; + } + + // 2. Code review/suggestions - approved + if (action.includes('review') || action.includes('suggest') || action.includes('recommend')) { + return { + approved: true, + reason: 'Review and recommendations are welcome' + }; + } + + // 3. Documentation - approved + if (action.includes('document') || action.includes('readme') || action.includes('comment')) { + return { + approved: true, + reason: 'Documentation improvements are pre-approved' + }; + } + + // 4. Questions/clarifications - approved + if (action.includes('question') || action.includes('clarify') || action.includes('ask')) { + return { + approved: true, + reason: 'Questions and clarifications are encouraged' + }; + } + + // 5. Status updates - approved + if (action.includes('status') || action.includes('update') || action.includes('report')) { + return { + approved: true, + reason: 'Status updates are pre-approved' + }; + } + + // 6. Code generation for review - approved with note + if (action.includes('generate') || action.includes('create') || action.includes('implement')) { + return { + approved: true, + reason: 'Code generation approved - will be reviewed before merge', + modifications: ['Output requires Claude code review before integration'] + }; + } + + // 7. Test generation - always approved + if (action.includes('test')) { + return { + approved: true, + reason: 'Test creation is always welcome' + }; + } + + // Default: Approve with logging + console.log(`⚠️ [ApprovalGate] New action type from ${fromAgent}: ${action}`); + return { + approved: true, + reason: `Approved by Claude (delegated authority) - action: ${action}`, + modifications: ['New action type - logged for pattern review'] + }; + } + + /** + * Quick check without full review + */ + canProceed(fromAgent: AgentId, action: string): boolean { + if (fromAgent === 'clak' || fromAgent === 'claude') return true; + + const needsEscalation = ESCALATE_TO_CLAK.some(e => + action.toLowerCase().includes(e) + ); + + return !needsEscalation; + } +} + +export const approvalGate = ApprovalGate.getInstance(); diff --git a/apps/backend/src/services/NeuralChat/CapabilityBroker.ts b/apps/backend/src/services/NeuralChat/CapabilityBroker.ts new file mode 100644 index 0000000000000000000000000000000000000000..6eaccc1c9281ffcadcb5337eac66f57610ba7252 --- /dev/null +++ b/apps/backend/src/services/NeuralChat/CapabilityBroker.ts @@ -0,0 +1,348 @@ +/** + * ╔═══════════════════════════════════════════════════════════════════════════╗ + * ║ CAPABILITY BROKER ║ + * ║═══════════════════════════════════════════════════════════════════════════║ + * ║ Routes capability requests mellem agenter ║ + * ║ Bruger BÅDE Blackboard (async) og Neural Chat (sync) ║ + * ╚═══════════════════════════════════════════════════════════════════════════╝ + */ + +import { v4 as uuidv4 } from 'uuid'; +import * as fs from 'fs/promises'; +import * as path from 'path'; +import { neo4jAdapter } from '../../adapters/Neo4jAdapter.js'; +import { neuralChatService } from './ChatService.js'; +import { approvalGate } from './ApprovalGate.js'; +import { AgentId } from './types.js'; +import { + CapabilityRequest, + CapabilityResponse, + AGENT_CAPABILITIES, + AgentCapability +} from './AgentCapabilities.js'; +import { DROPZONE_PATH } from '../../config.js'; + +class CapabilityBroker { + private static instance: CapabilityBroker; + private pendingRequests: Map = new Map(); + + private constructor() {} + + public static getInstance(): CapabilityBroker { + if (!CapabilityBroker.instance) { + CapabilityBroker.instance = new CapabilityBroker(); + } + return CapabilityBroker.instance; + } + + /** + * Find den bedste agent til en given capability + */ + findBestAgent(capabilityType: string): AgentCapability | null { + for (const [agentId, capabilities] of Object.entries(AGENT_CAPABILITIES)) { + const match = capabilities.find(c => + c.id === capabilityType || + c.name.toLowerCase().includes(capabilityType.toLowerCase()) || + c.category === capabilityType + ); + if (match) return match; + } + return null; + } + + /** + * List alle capabilities for en agent + */ + getAgentCapabilities(agent: AgentId): AgentCapability[] { + return AGENT_CAPABILITIES[agent] || []; + } + + /** + * List ALLE capabilities på tværs af agenter + */ + getAllCapabilities(): AgentCapability[] { + return Object.values(AGENT_CAPABILITIES).flat(); + } + + /** + * Request en capability fra en anden agent + * CLAUDE REVIEWS ALL REQUESTS (delegeret fra CLAK) + */ + async requestCapability(params: { + fromAgent: AgentId; + toAgent: AgentId; + capability: string; + params: Record; + priority?: 'low' | 'normal' | 'high' | 'critical'; + deadline?: string; + }): Promise { + + // ════════════════════════════════════════════════════════════ + // STEP 1: Claude reviews the request (delegated from CLAK) + // ════════════════════════════════════════════════════════════ + const approval = approvalGate.review({ + fromAgent: params.fromAgent, + action: params.capability, + description: `${params.fromAgent} wants to use ${params.capability} capability`, + context: params.params + }); + + console.log(`[ApprovalGate] ${params.fromAgent} -> ${params.capability}: ${approval.decision}`); + + // If escalation needed, notify and pause + if (approval.decision === 'escalate_to_clak') { + await neuralChatService.sendMessage({ + channel: 'alerts', + from: 'system', + body: `[ESCALATION REQUIRED]\n\nAgent: ${params.fromAgent}\nAction: ${params.capability}\nReason: ${approval.reason}\n\n@clak please review and decide.`, + type: 'alert', + priority: 'critical', + mentions: ['clak'] + }); + + throw new Error(`Action requires CLAK approval: ${approval.reason}`); + } + + // If rejected, notify and stop + if (approval.decision === 'rejected') { + await neuralChatService.sendMessage({ + channel: 'core-dev', + from: 'claude', + body: `[DENIED] Request Denied\n\nFrom: ${params.fromAgent}\nAction: ${params.capability}\nReason: ${approval.reason}`, + type: 'status', + priority: 'normal' + }); + + throw new Error(`Request denied by Claude: ${approval.reason}`); + } + + // ════════════════════════════════════════════════════════════ + // STEP 2: Approved - proceed with request + // ════════════════════════════════════════════════════════════ + const request: CapabilityRequest = { + requestId: `cap-${uuidv4()}`, + fromAgent: params.fromAgent, + toAgent: params.toAgent, + capability: params.capability, + params: params.params, + priority: params.priority || 'normal', + timestamp: new Date().toISOString(), + deadline: params.deadline + }; + + // 1. Gem i memory + this.pendingRequests.set(request.requestId, request); + + // 2. Gem i Blackboard (fil-baseret) for async pickup + await this.saveToBlackboard(request); + + // 3. Gem i Neo4j for persistence og queries + await this.persistToNeo4j(request); + + // 4. Send notifikation via Neural Chat (with approval note) + const modNote = approval.modifications?.length + ? `\n[Notes]: ${approval.modifications.join(', ')}` + : ''; + + await neuralChatService.sendMessage({ + channel: 'core-dev', + from: 'claude', + body: `[APPROVED] Capability Request\n\nCapability: ${params.capability}\nFrom: ${params.fromAgent} -> To: ${params.toAgent}\nPriority: ${params.priority || 'normal'}\nRequest ID: ${request.requestId}${modNote}`, + type: 'task', + priority: params.priority || 'normal', + mentions: [params.toAgent] + }); + + console.log(`[CapabilityBroker] APPROVED: ${request.requestId}: ${params.fromAgent} -> ${params.toAgent} (${params.capability})`); + + return { ...request, approval }; + } + + /** + * Gem request i Blackboard (fil-system) + */ + private async saveToBlackboard(request: CapabilityRequest): Promise { + const filename = `${request.timestamp.replace(/[:.]/g, '-')}_capability_${request.requestId}.json`; + const inboxPath = path.join(DROPZONE_PATH, 'agents', request.toAgent, 'inbox', filename); + + try { + await fs.mkdir(path.dirname(inboxPath), { recursive: true }); + await fs.writeFile(inboxPath, JSON.stringify({ + ...request, + _type: 'capability_request', + _blackboard_version: '2.0' + }, null, 2)); + } catch (error) { + console.warn('Failed to save to Blackboard:', error); + } + } + + /** + * Persist request til Neo4j + */ + private async persistToNeo4j(request: CapabilityRequest): Promise { + try { + await neo4jAdapter.executeQuery(` + CREATE (r:CapabilityRequest { + requestId: $requestId, + fromAgent: $fromAgent, + toAgent: $toAgent, + capability: $capability, + params: $params, + priority: $priority, + timestamp: $timestamp, + deadline: $deadline, + status: 'pending' + }) + WITH r + MATCH (from:Agent {name: $fromAgent}) + MATCH (to:Agent {name: $toAgent}) + MERGE (from)-[:REQUESTED]->(r) + MERGE (r)-[:ASSIGNED_TO]->(to) + `, { + requestId: request.requestId, + fromAgent: request.fromAgent, + toAgent: request.toAgent, + capability: request.capability, + params: JSON.stringify(request.params), + priority: request.priority, + timestamp: request.timestamp, + deadline: request.deadline || '' + }); + } catch (error) { + console.warn('Failed to persist to Neo4j:', error); + } + } + + /** + * Besvar en capability request + */ + async respondToCapability(params: { + requestId: string; + success: boolean; + result?: any; + error?: string; + respondingAgent: AgentId; + }): Promise { + const response: CapabilityResponse = { + requestId: params.requestId, + success: params.success, + result: params.result, + error: params.error, + timestamp: new Date().toISOString() + }; + + // Fjern fra pending + const request = this.pendingRequests.get(params.requestId); + this.pendingRequests.delete(params.requestId); + + // Opdater Neo4j + try { + await neo4jAdapter.executeQuery(` + MATCH (r:CapabilityRequest {requestId: $requestId}) + SET r.status = $status, + r.response = $response, + r.completedAt = $completedAt + `, { + requestId: params.requestId, + status: params.success ? 'completed' : 'failed', + response: JSON.stringify(response), + completedAt: response.timestamp + }); + } catch (error) { + console.warn('Failed to update Neo4j:', error); + } + + // Notificer via Neural Chat + if (request) { + await neuralChatService.sendMessage({ + channel: 'core-dev', + from: params.respondingAgent, + body: `[RESPONSE] Capability Response: ${params.requestId}\nStatus: ${params.success ? 'SUCCESS' : 'FAILED'}\n${params.error || ''}`, + type: 'response', + priority: 'normal', + mentions: [request.fromAgent] + }); + } + + return response; + } + + /** + * Hent pending requests for en agent + */ + async getPendingRequests(agent: AgentId): Promise { + try { + const results = await neo4jAdapter.executeQuery(` + MATCH (r:CapabilityRequest {toAgent: $agent, status: 'pending'}) + RETURN r + ORDER BY r.timestamp DESC + `, { agent }); + + return results.map((row: any) => ({ + requestId: row.r.properties.requestId, + fromAgent: row.r.properties.fromAgent, + toAgent: row.r.properties.toAgent, + capability: row.r.properties.capability, + params: JSON.parse(row.r.properties.params || '{}'), + priority: row.r.properties.priority, + timestamp: row.r.properties.timestamp, + deadline: row.r.properties.deadline + })); + } catch (error) { + console.warn('Failed to fetch pending requests:', error); + return Array.from(this.pendingRequests.values()) + .filter(r => r.toAgent === agent); + } + } + + /** + * Smart routing: Find bedste agent til en opgave baseret på capability match + */ + async smartRoute(params: { + task: string; + context?: string; + fromAgent: AgentId; + }): Promise<{ agent: AgentId; capability: AgentCapability; confidence: number } | null> { + const allCapabilities = this.getAllCapabilities(); + + // Simple keyword matching (kan udvides med embeddings) + const taskLower = params.task.toLowerCase(); + + let bestMatch: { agent: AgentId; capability: AgentCapability; confidence: number } | null = null; + + for (const cap of allCapabilities) { + let score = 0; + + // Check name match + if (taskLower.includes(cap.name.toLowerCase())) score += 0.5; + + // Check description match + const descWords = cap.description.toLowerCase().split(' '); + const matchingWords = descWords.filter(w => taskLower.includes(w) && w.length > 3); + score += matchingWords.length * 0.1; + + // Check category match + if (taskLower.includes(cap.category)) score += 0.3; + + // Factor in reliability + score *= cap.reliability || 0.8; + + if (!bestMatch || score > bestMatch.confidence) { + bestMatch = { + agent: cap.agent, + capability: cap, + confidence: Math.min(score, 1.0) + }; + } + } + + return bestMatch; + } +} + +export const capabilityBroker = CapabilityBroker.getInstance(); + +// Re-export types (use 'export type' for interfaces) +export { AGENT_CAPABILITIES } from './AgentCapabilities.js'; +export type { AgentCapability, CapabilityRequest, CapabilityResponse } from './AgentCapabilities.js'; diff --git a/apps/backend/src/services/NeuralChat/ChatController.ts b/apps/backend/src/services/NeuralChat/ChatController.ts new file mode 100644 index 0000000000000000000000000000000000000000..8c6e5a05069ebbd95aca44ed9e5976ba56653df2 --- /dev/null +++ b/apps/backend/src/services/NeuralChat/ChatController.ts @@ -0,0 +1,242 @@ +/** + * ╔═══════════════════════════════════════════════════════════════════════════╗ + * ║ NEURAL CHAT CONTROLLER ║ + * ║═══════════════════════════════════════════════════════════════════════════║ + * ║ REST API endpoints for Neural Chat ║ + * ╚═══════════════════════════════════════════════════════════════════════════╝ + */ + +import { Router, Request, Response } from 'express'; +import { neuralChatService } from './ChatService.js'; +import { AgentId, ChannelId, MessageType, MessagePriority } from './types.js'; + +export const neuralChatRouter = Router(); + +// Initialize service on first request +let initialized = false; +const ensureInitialized = async () => { + if (!initialized) { + await neuralChatService.initialize(); + initialized = true; + } +}; + +/** + * POST /api/neural-chat/send + * Send a message to a channel + */ +neuralChatRouter.post('/send', async (req: Request, res: Response) => { + try { + await ensureInitialized(); + + const { channel, from, body, type, priority, subject, to, replyTo, mentions } = req.body; + + if (!channel || !from || !body) { + return res.status(400).json({ + error: 'Missing required fields: channel, from, body' + }); + } + + const message = await neuralChatService.sendMessage({ + channel: channel as ChannelId, + from: from as AgentId, + body, + type: type as MessageType, + priority: priority as MessagePriority, + subject, + to: to as AgentId | AgentId[], + replyTo, + mentions: mentions as AgentId[] + }); + + res.json({ success: true, message }); + } catch (error: any) { + console.error('Neural Chat send error:', error); + res.status(500).json({ error: error.message }); + } +}); + +/** + * GET /api/neural-chat/messages + * Get messages from a channel or for an agent + */ +neuralChatRouter.get('/messages', async (req: Request, res: Response) => { + try { + await ensureInitialized(); + + const { channel, since, limit, agent } = req.query; + + const messages = await neuralChatService.getMessages({ + channel: channel as ChannelId, + since: since as string, + limit: limit ? parseInt(limit as string) : undefined, + agent: agent as AgentId + }); + + res.json({ messages, count: messages.length }); + } catch (error: any) { + console.error('Neural Chat fetch error:', error); + res.status(500).json({ error: error.message }); + } +}); + +/** + * GET /api/neural-chat/channels + * List all channels + */ +neuralChatRouter.get('/channels', async (_req: Request, res: Response) => { + try { + await ensureInitialized(); + const channels = neuralChatService.getChannels(); + res.json({ channels }); + } catch (error: any) { + res.status(500).json({ error: error.message }); + } +}); + +/** + * GET /api/neural-chat/health + * Health check for Neural Chat + */ +neuralChatRouter.get('/health', async (_req: Request, res: Response) => { + res.json({ + status: 'healthy', + service: 'neural-chat', + timestamp: new Date().toISOString() + }); +}); + + +// ═══════════════════════════════════════════════════════════════════ +// CAPABILITY ENDPOINTS +// ═══════════════════════════════════════════════════════════════════ + +import { capabilityBroker, AGENT_CAPABILITIES } from './CapabilityBroker.js'; + +/** + * GET /api/neural-chat/capabilities + * List all agent capabilities + */ +neuralChatRouter.get('/capabilities', async (req: Request, res: Response) => { + try { + const agent = req.query.agent as AgentId; + + if (agent) { + const capabilities = capabilityBroker.getAgentCapabilities(agent); + res.json({ agent, capabilities }); + } else { + res.json({ capabilities: AGENT_CAPABILITIES }); + } + } catch (error: any) { + res.status(500).json({ error: error.message }); + } +}); + +/** + * POST /api/neural-chat/capabilities/request + * Request a capability from another agent + */ +neuralChatRouter.post('/capabilities/request', async (req: Request, res: Response) => { + try { + const { fromAgent, toAgent, capability, params, priority, deadline } = req.body; + + if (!fromAgent || !toAgent || !capability) { + return res.status(400).json({ + error: 'Missing required fields: fromAgent, toAgent, capability' + }); + } + + const request = await capabilityBroker.requestCapability({ + fromAgent, + toAgent, + capability, + params: params || {}, + priority, + deadline + }); + + res.json({ success: true, request }); + } catch (error: any) { + res.status(500).json({ error: error.message }); + } +}); + +/** + * POST /api/neural-chat/capabilities/respond + * Respond to a capability request + */ +neuralChatRouter.post('/capabilities/respond', async (req: Request, res: Response) => { + try { + const { requestId, success, result, error, respondingAgent } = req.body; + + if (!requestId || success === undefined || !respondingAgent) { + return res.status(400).json({ + error: 'Missing required fields: requestId, success, respondingAgent' + }); + } + + const response = await capabilityBroker.respondToCapability({ + requestId, + success, + result, + error, + respondingAgent + }); + + res.json({ success: true, response }); + } catch (error: any) { + res.status(500).json({ error: error.message }); + } +}); + +/** + * GET /api/neural-chat/capabilities/pending + * Get pending capability requests for an agent + */ +neuralChatRouter.get('/capabilities/pending', async (req: Request, res: Response) => { + try { + const agent = req.query.agent as AgentId; + + if (!agent) { + return res.status(400).json({ error: 'Missing agent parameter' }); + } + + const requests = await capabilityBroker.getPendingRequests(agent); + res.json({ agent, pending: requests, count: requests.length }); + } catch (error: any) { + res.status(500).json({ error: error.message }); + } +}); + +/** + * POST /api/neural-chat/capabilities/route + * Smart-route a task to the best agent + */ +neuralChatRouter.post('/capabilities/route', async (req: Request, res: Response) => { + try { + const { task, context, fromAgent } = req.body; + + if (!task || !fromAgent) { + return res.status(400).json({ + error: 'Missing required fields: task, fromAgent' + }); + } + + const result = await capabilityBroker.smartRoute({ task, context, fromAgent }); + + if (result) { + res.json({ + success: true, + recommendation: result, + message: `Recommended: ${result.agent} for ${result.capability.name} (confidence: ${(result.confidence * 100).toFixed(0)}%)` + }); + } else { + res.json({ + success: false, + message: 'No suitable agent found for this task' + }); + } + } catch (error: any) { + res.status(500).json({ error: error.message }); + } +}); diff --git a/apps/backend/src/services/NeuralChat/ChatService.ts b/apps/backend/src/services/NeuralChat/ChatService.ts new file mode 100644 index 0000000000000000000000000000000000000000..e7976533a3b75cd412ac915bdde6b00defde319b --- /dev/null +++ b/apps/backend/src/services/NeuralChat/ChatService.ts @@ -0,0 +1,212 @@ +/** + * ╔═══════════════════════════════════════════════════════════════════════════╗ + * ║ NEURAL CHAT SERVICE ║ + * ║═══════════════════════════════════════════════════════════════════════════║ + * ║ Core service for agent-to-agent real-time communication ║ + * ║ • Neo4j persistence for message history ║ + * ║ • Channel management ║ + * ║ • Thread support ║ + * ╚═══════════════════════════════════════════════════════════════════════════╝ + */ + +import { v4 as uuidv4 } from 'uuid'; +import { neo4jAdapter } from '../../adapters/Neo4jAdapter.js'; +import { + ChatMessage, + Channel, + AgentId, + ChannelId, + MessageType, + MessagePriority, + DEFAULT_CHANNELS +} from './types.js'; + +class NeuralChatService { + private static instance: NeuralChatService; + private channels: Map = new Map(); + private initialized: boolean = false; + + private constructor() {} + + public static getInstance(): NeuralChatService { + if (!NeuralChatService.instance) { + NeuralChatService.instance = new NeuralChatService(); + } + return NeuralChatService.instance; + } + + async initialize(): Promise { + if (this.initialized) return; + + // Setup default channels + for (const channel of DEFAULT_CHANNELS) { + this.channels.set(channel.id, channel); + // Persist to Neo4j + await this.persistChannel(channel); + } + + console.log('[NeuralChat] Service initialized'); + this.initialized = true; + } + + private async persistChannel(channel: Channel): Promise { + try { + await neo4jAdapter.executeQuery(` + MERGE (c:Channel {id: $id}) + SET c.name = $name, + c.description = $description, + c.members = $members, + c.created_at = $created_at, + c.is_private = $is_private + `, { + id: channel.id, + name: channel.name, + description: channel.description || '', + members: channel.members, + created_at: channel.created_at, + is_private: channel.is_private + }); + } catch (error) { + console.warn('Failed to persist channel to Neo4j:', error); + } + } + + async sendMessage(params: { + channel: ChannelId; + from: AgentId; + body: string; + type?: MessageType; + priority?: MessagePriority; + subject?: string; + to?: AgentId | AgentId[]; + replyTo?: string; + mentions?: AgentId[]; + }): Promise { + const message: ChatMessage = { + id: `msg-${uuidv4()}`, + channel: params.channel, + from: params.from, + to: params.to, + type: params.type || 'chat', + priority: params.priority || 'normal', + subject: params.subject, + body: params.body, + mentions: params.mentions || this.extractMentions(params.body), + replyTo: params.replyTo, + timestamp: new Date().toISOString(), + read_by: [params.from] + }; + + // Persist to Neo4j + await this.persistMessage(message); + + console.log(`[NeuralChat] [${message.channel}] ${message.from}: ${message.body.substring(0, 50)}...`); + + return message; + } + + private extractMentions(body: string): AgentId[] { + const mentionRegex = /@(claude|gemini|deepseek|clak)/gi; + const matches = body.match(mentionRegex) || []; + return [...new Set(matches.map(m => m.slice(1).toLowerCase() as AgentId))]; + } + + private async persistMessage(message: ChatMessage): Promise { + try { + await neo4jAdapter.executeQuery(` + CREATE (m:ChatMessage { + id: $id, + channel: $channel, + from_agent: $from, + to_agent: $to, + type: $type, + priority: $priority, + subject: $subject, + body: $body, + mentions: $mentions, + reply_to: $replyTo, + timestamp: $timestamp + }) + WITH m + MATCH (c:Channel {id: $channel}) + MERGE (c)-[:HAS_MESSAGE]->(m) + `, { + id: message.id, + channel: message.channel, + from: message.from, + to: Array.isArray(message.to) ? message.to.join(',') : (message.to || ''), + type: message.type, + priority: message.priority, + subject: message.subject || '', + body: message.body, + mentions: message.mentions || [], + replyTo: message.replyTo || '', + timestamp: message.timestamp + }); + } catch (error) { + console.warn('Failed to persist message to Neo4j:', error); + } + } + + async getMessages(params: { + channel?: ChannelId; + since?: string; + limit?: number; + agent?: AgentId; + }): Promise { + const limit = params.limit || 50; + + let query = ` + MATCH (m:ChatMessage) + WHERE 1=1 + `; + const queryParams: Record = { limit }; + + if (params.channel) { + query += ` AND m.channel = $channel`; + queryParams.channel = params.channel; + } + + if (params.since) { + query += ` AND m.timestamp > $since`; + queryParams.since = params.since; + } + + if (params.agent) { + query += ` AND (m.from_agent = $agent OR $agent IN m.mentions OR m.to_agent CONTAINS $agent)`; + queryParams.agent = params.agent; + } + + query += ` RETURN m ORDER BY m.timestamp DESC LIMIT $limit`; + + try { + const results = await neo4jAdapter.executeQuery(query, queryParams); + return results.map((r: any) => ({ + id: r.m.properties.id, + channel: r.m.properties.channel, + from: r.m.properties.from_agent, + to: r.m.properties.to_agent, + type: r.m.properties.type, + priority: r.m.properties.priority, + subject: r.m.properties.subject, + body: r.m.properties.body, + mentions: r.m.properties.mentions, + replyTo: r.m.properties.reply_to, + timestamp: r.m.properties.timestamp + })); + } catch (error) { + console.warn('Failed to fetch messages from Neo4j:', error); + return []; + } + } + + getChannels(): Channel[] { + return Array.from(this.channels.values()); + } + + getChannel(id: ChannelId): Channel | undefined { + return this.channels.get(id); + } +} + +export const neuralChatService = NeuralChatService.getInstance(); diff --git a/apps/backend/src/services/NeuralChat/NeuralCortex.ts b/apps/backend/src/services/NeuralChat/NeuralCortex.ts new file mode 100644 index 0000000000000000000000000000000000000000..4472c4273b0816aa0c6c091114e7047a42b6daac --- /dev/null +++ b/apps/backend/src/services/NeuralChat/NeuralCortex.ts @@ -0,0 +1,620 @@ +/** + * ╔═══════════════════════════════════════════════════════════════════════════════════════╗ + * ║ NEURAL CORTEX ║ + * ║═══════════════════════════════════════════════════════════════════════════════════════║ + * ║ ║ + * ║ "If everything is connected, you can talk to anything and find all patterns" ║ + * ║ - CLAK, 2025 ║ + * ║ ║ + * ║ ┌─────────────────────────────────────────────────────────────────────────────┐ ║ + * ║ │ HYBRID NEURAL CORTEX ARCHITECTURE │ ║ + * ║ │ │ ║ + * ║ │ ┌──────────┐ ┌──────────┐ ┌──────────┐ ┌──────────┐ │ ║ + * ║ │ │ AGENTS │────▶│ CHAT │────▶│ HYBRID │────▶│ PATTERNS │ │ ║ + * ║ │ └──────────┘ └──────────┘ │ SEARCH │ └──────────┘ │ ║ + * ║ │ │ │ └─────┬────┘ │ │ ║ + * ║ │ ▼ ▼ │ ▼ │ ║ + * ║ │ ┌─────────┐ ┌─────────┐ ┌─────▼─────┐ ┌─────────┐ │ ║ + * ║ │ │ NEO4J │◀────▶│ PGVECTR │ │ SEMANTIC │ │ INSIGHT │ │ ║ + * ║ │ │ (Graph) │ link │ (Dense) │ │ MATCHING │ │ ENGINE │ │ ║ + * ║ │ └─────────┘ └─────────┘ └───────────┘ └─────────┘ │ ║ + * ║ │ │ ║ + * ║ │ CAPABILITIES: │ ║ + * ║ │ • Hybrid Search (Vector Similarity + Graph Traversal) │ ║ + * ║ │ • Chat with agents & documents │ ║ + * ║ │ • Auto-link messages to relevant nodes │ ║ + * ║ │ • Discover patterns across conversations │ ║ + * ║ │ • Surface insights from connected data │ ║ + * ║ │ │ ║ + * ║ └─────────────────────────────────────────────────────────────────────────────┘ ║ + * ╚═══════════════════════════════════════════════════════════════════════════════════════╝ + */ + +import { neo4jAdapter } from '../../adapters/Neo4jAdapter.js'; +import { neuralChatService } from './ChatService.js'; +import { AgentId, ChatMessage } from './types.js'; +import { getVectorStore } from '../../platform/vector/index.js'; + +export interface CortexQuery { + type: 'chat' | 'search' | 'pattern' | 'insight' | 'history'; + query: string; + context?: { + channel?: string; + agent?: AgentId; + timeRange?: { from: string; to: string }; + nodeTypes?: string[]; + }; +} + +export interface CortexResult { + type: 'message' | 'node' | 'pattern' | 'insight' | 'connection'; + data: any; + relevance: number; + source: string; + connections?: { type: string; target: string }[]; +} + +export interface DiscoveredPattern { + id: string; + name: string; + description: string; + occurrences: number; + confidence: number; + relatedNodes: string[]; + discoveredAt: string; + examples: string[]; +} + +class NeuralCortex { + private static instance: NeuralCortex; + + public static getInstance(): NeuralCortex { + if (!NeuralCortex.instance) { + NeuralCortex.instance = new NeuralCortex(); + } + return NeuralCortex.instance; + } + + /** + * Process a message and connect it to the knowledge graph AND vector store + */ + async processMessage(message: ChatMessage): Promise<{ + entities: string[]; + concepts: string[]; + linkedNodes: string[]; + vectorStored: boolean; + }> { + const entities = this.extractEntities(message.body); + const concepts = this.extractConcepts(message.body); + + // 1. Create message node in GRAPH (Neo4j) + await neo4jAdapter.runQuery(` + CREATE (m:Message { + id: $id, + from_agent: $from, + channel: $channel, + body: $body, + timestamp: $timestamp, + type: $type + }) + WITH m + + // Link to sender agent + MERGE (a:Agent {name: $from}) + MERGE (a)-[:SENT]->(m) + + // Link to channel + MERGE (c:Channel {name: $channel}) + MERGE (m)-[:IN_CHANNEL]->(c) + `, { + id: message.id, + from: message.from, + channel: message.channel, + body: message.body, + timestamp: message.timestamp, + type: message.type || 'chat' + }); + + // 2. Link to mentioned entities in GRAPH + const linkedNodes: string[] = []; + for (const entity of entities) { + const linked = await this.linkToEntity(message.id, entity); + if (linked) linkedNodes.push(linked); + } + + // 3. Link to concepts in GRAPH + for (const concept of concepts) { + await this.linkToConcept(message.id, concept); + } + + // 4. Store embedding in VECTOR STORE (pgvector) + // This enables "fuzzy" semantic search later + let vectorStored = false; + try { + const vectorStore = await getVectorStore(); + await vectorStore.upsert({ + id: message.id, + content: message.body, + metadata: { + type: 'message', + channel: message.channel, + from: message.from, + timestamp: message.timestamp, + entities: entities, + concepts: concepts + }, + namespace: 'neural_chat' + }); + vectorStored = true; + } catch (error) { + console.warn('Failed to store vector for message:', error); + } + + return { entities, concepts, linkedNodes, vectorStored }; + } + + /** + * Extract entities from message (agents, files, components, etc.) + */ + private extractEntities(text: string): string[] { + const entities: string[] = []; + + // @mentions + const mentions = text.match(/@(\w+)/g); + if (mentions) entities.push(...mentions.map(m => m.slice(1))); + + // File paths + const files = text.match(/[\w-]+\.(ts|js|tsx|jsx|json|md|py|yaml|yml|sql)/gi); + if (files) entities.push(...files); + + // Component/Class names (PascalCase) + const components = text.match(/\b[A-Z][a-zA-Z]+(?:Widget|Service|Controller|Adapter|Component|Provider|Handler)\b/g); + if (components) entities.push(...components); + + // URLs + const urls = text.match(/https?:\/\/[^\s]+/g); + if (urls) entities.push(...urls); + + return [...new Set(entities)]; + } + + /** + * Extract concepts from message (topics, actions, technologies) + */ + private extractConcepts(text: string): string[] { + const concepts: string[] = []; + const textLower = text.toLowerCase(); + + // Technologies + const techs = ['neo4j', 'react', 'typescript', 'docker', 'kubernetes', 'api', 'websocket', 'mcp', 'graphql', 'rest', 'postgresql', 'redis', 'vector', 'rag']; + techs.forEach(t => { if (textLower.includes(t)) concepts.push(t); }); + + // Actions + const actions = ['deploy', 'review', 'test', 'refactor', 'implement', 'fix', 'create', 'delete', 'update', 'analyze', 'research', 'architect']; + actions.forEach(a => { if (textLower.includes(a)) concepts.push(a); }); + + // Domains + const domains = ['security', 'performance', 'architecture', 'authentication', 'authorization', 'database', 'frontend', 'backend', 'infrastructure', 'ai', 'agents']; + domains.forEach(d => { if (textLower.includes(d)) concepts.push(d); }); + + return [...new Set(concepts)]; + } + + /** + * Link message to existing entity in graph + */ + private async linkToEntity(messageId: string, entity: string): Promise { + try { + const result = await neo4jAdapter.runQuery(` + MATCH (m:Message {id: $messageId}) + MATCH (e) WHERE e.name = $entity OR e.path CONTAINS $entity OR e.id = $entity + MERGE (m)-[:MENTIONS]->(e) + RETURN e.name as linked + `, { messageId, entity }); + + return result[0]?.linked || null; + } catch { + return null; + } + } + + /** + * Link message to concept (create if not exists) + */ + private async linkToConcept(messageId: string, concept: string): Promise { + try { + await neo4jAdapter.runQuery(` + MATCH (m:Message {id: $messageId}) + MERGE (c:Concept {name: $concept}) + ON CREATE SET c.created_at = datetime() + MERGE (m)-[:RELATES_TO]->(c) + WITH c + SET c.mention_count = coalesce(c.mention_count, 0) + 1 + `, { messageId, concept }); + } catch (error) { + console.warn('Failed to link concept:', error); + } + } + + /** + * Query the cortex in natural language using Hybrid Strategy + */ + async query(input: CortexQuery): Promise { + const results: CortexResult[] = []; + + switch (input.type) { + case 'search': + return await this.hybridSearch(input.query, input.context); + + case 'pattern': + return await this.findPatterns(input.query, input.context); + + case 'insight': + return await this.generateInsights(input.query, input.context); + + case 'history': + return await this.getDecisionHistory(input.query, input.context); + + case 'chat': + default: + // 1. Search Chat History (Vector + Keyword) + const chatResults = await this.searchMessages(input.query); + // 2. Search Knowledge Graph (Keyword/Hybrid) + const graphResults = await this.hybridSearch(input.query, input.context); + + return [...chatResults, ...graphResults].sort((a, b) => b.relevance - a.relevance); + } + } + + /** + * Search messages using Vector Similarity (Semantic) + Graph (Keyword) + */ + private async searchMessages(query: string): Promise { + try { + const results: CortexResult[] = []; + + // A. Semantic Search (Vector) + try { + const vectorStore = await getVectorStore(); + const vectorResults = await vectorStore.search({ + text: query, + limit: 10, + namespace: 'neural_chat' + }); + + results.push(...vectorResults.map(r => ({ + type: 'message' as const, + data: { + id: r.id, + body: r.content, + metadata: r.metadata, + from: r.metadata?.from, + timestamp: r.metadata?.timestamp + }, + relevance: r.similarity, + source: 'vector_memory' + }))); + } catch (err) { + console.warn('Vector search failed, falling back to graph only', err); + } + + // B. Keyword Search (Graph) - if vector search didn't yield enough + if (results.length < 5) { + const graphResults = await neo4jAdapter.runQuery(` + MATCH (m:Message) + WHERE toLower(m.body) CONTAINS toLower($query) + OPTIONAL MATCH (a:Agent)-[:SENT]->(m) + RETURN m, a.name as agent + ORDER BY m.timestamp DESC + LIMIT 10 + `, { query }); + + const existingIds = new Set(results.map(r => r.data.id)); + + for (const r of graphResults) { + if (!existingIds.has(r.m.properties.id)) { + results.push({ + type: 'message' as const, + data: { + id: r.m.properties.id, + body: r.m.properties.body, + from: r.agent, + timestamp: r.m.properties.timestamp + }, + relevance: this.calculateRelevance(query, r.m.properties.body), + source: 'graph_memory' + }); + } + } + } + + return results; + } catch { + return []; + } + } + + /** + * Hybrid Search: Vector -> Graph Entry -> Traversal + */ + private async hybridSearch(query: string, context?: CortexQuery['context']): Promise { + try { + const nodeTypes = context?.nodeTypes?.join('|') || 'File|Component|Service|Document|Concept|Agent'; + const results: CortexResult[] = []; + + // 1. Vector Search (Find conceptually related nodes) + // Assuming we have documents/nodes in 'knowledge' namespace + try { + const vectorStore = await getVectorStore(); + const vectorResults = await vectorStore.search({ + text: query, + limit: 10, + namespace: 'knowledge' // Search acquired knowledge too + }); + + results.push(...vectorResults.map(r => ({ + type: 'node' as const, + data: { + name: r.metadata?.title || r.id, + description: r.content.substring(0, 200) + '...', + labels: [r.metadata?.type || 'Unknown'] + }, + relevance: r.similarity, + source: 'semantic_search' + }))); + + } catch (e) { /* ignore vector error */ } + + // 2. Graph Search (Exact/Fuzzy string match) + const graphResults = await neo4jAdapter.runQuery(` + MATCH (n) + WHERE any(label IN labels(n) WHERE label IN split($nodeTypes, '|')) + AND ( + toLower(n.name) CONTAINS toLower($query) OR + toLower(coalesce(n.description, '')) CONTAINS toLower($query) OR + toLower(coalesce(n.path, '')) CONTAINS toLower($query) + ) + OPTIONAL MATCH (n)-[r]-(connected) + RETURN n, labels(n) as types, + collect(DISTINCT {type: type(r), target: coalesce(connected.name, connected.id)}) as connections + LIMIT 20 + `, { query, nodeTypes }); + + // Merge results (simple dedup by name) + const existingNames = new Set(results.map(r => r.data.name)); + + for (const r of graphResults) { + const name = r.n.properties.name || r.n.properties.id; + if (!existingNames.has(name)) { + results.push({ + type: 'node' as const, + data: { + name: name, + labels: r.types, + properties: r.n.properties + }, + relevance: this.calculateRelevance(query, name), + source: 'knowledge_graph', + connections: r.connections.filter((c: any) => c.target) + }); + } + } + + return results; + } catch { + return []; + } + } + + /** + * Find patterns across conversations and code + */ + private async findPatterns(query: string, context?: CortexQuery['context']): Promise { + try { + // Find frequently co-occurring concepts + const conceptPatterns = await neo4jAdapter.runQuery(` + MATCH (m:Message)-[:RELATES_TO]->(c1:Concept) + MATCH (m)-[:RELATES_TO]->(c2:Concept) + WHERE c1 <> c2 AND id(c1) < id(c2) + WITH c1, c2, count(m) as cooccurrences + WHERE cooccurrences >= 2 + RETURN c1.name as concept1, c2.name as concept2, cooccurrences + ORDER BY cooccurrences DESC + LIMIT 10 + `); + + // Find agent collaboration patterns + const collabPatterns = await neo4jAdapter.runQuery(` + MATCH (a1:Agent)-[:SENT]->(m1:Message)-[:IN_CHANNEL]->(ch:Channel)<-[:IN_CHANNEL]-(m2:Message)<-[:SENT]-(a2:Agent) + WHERE a1 <> a2 AND m1.timestamp < m2.timestamp + AND duration.between(datetime(m1.timestamp), datetime(m2.timestamp)).minutes < 30 + WITH a1, a2, count(*) as interactions + RETURN a1.name as agent1, a2.name as agent2, interactions + ORDER BY interactions DESC + `); + + // Find decision patterns (messages with action words followed by changes) + const decisionPatterns = await neo4jAdapter.runQuery(` + MATCH (m:Message) + WHERE any(word IN ['besluttet', 'approved', 'godkendt', 'implement', 'deploy', 'fix'] + WHERE toLower(m.body) CONTAINS word) + OPTIONAL MATCH (m)-[:MENTIONS]->(e) + RETURN m.body as decision, m.from_agent as by, m.timestamp as when, + collect(e.name) as affected + ORDER BY m.timestamp DESC + LIMIT 10 + `); + + const patterns: CortexResult[] = []; + + if (conceptPatterns.length > 0) { + patterns.push({ + type: 'pattern', + data: { + name: 'Concept Relationships', + description: 'Frequently discussed together', + items: conceptPatterns.map((p: any) => `${p.concept1} ↔ ${p.concept2} (${p.cooccurrences}x)`) + }, + relevance: 0.9, + source: 'pattern_analysis' + }); + } + + if (collabPatterns.length > 0) { + patterns.push({ + type: 'pattern', + data: { + name: 'Agent Collaboration', + description: 'Who works together most', + items: collabPatterns.map((p: any) => `${p.agent1} ↔ ${p.agent2} (${p.interactions} interactions)`) + }, + relevance: 0.85, + source: 'pattern_analysis' + }); + } + + if (decisionPatterns.length > 0) { + patterns.push({ + type: 'pattern', + data: { + name: 'Recent Decisions', + description: 'Actions taken by the team', + items: decisionPatterns.map((p: any) => ({ + decision: p.decision.substring(0, 100) + '...', + by: p.by, + when: p.when, + affected: p.affected + })) + }, + relevance: 0.95, + source: 'pattern_analysis' + }); + } + + return patterns; + } catch (error) { + console.warn('Pattern finding failed:', error); + return []; + } + } + + /** + * Generate insights from the knowledge graph + */ + private async generateInsights(query: string, context?: CortexQuery['context']): Promise { + try { + const insights: CortexResult[] = []; + + // Most active areas + const activeAreas = await neo4jAdapter.runQuery(` + MATCH (c:Concept)<-[:RELATES_TO]-(m:Message) + WITH c.name as concept, count(m) as activity + ORDER BY activity DESC + LIMIT 5 + RETURN concept, activity + `); + + // Knowledge gaps (mentioned but not documented) + const gaps = await neo4jAdapter.runQuery(` + MATCH (m:Message)-[:MENTIONS]->(name) + WHERE NOT (name)-[:DOCUMENTED_IN]->(:Document) + AND NOT name:Agent + WITH name.name as entity, count(m) as mentions + WHERE mentions >= 2 + RETURN entity, mentions + ORDER BY mentions DESC + LIMIT 5 + `); + + // Cross-cutting concerns (concepts that touch many areas) + const crossCutting = await neo4jAdapter.runQuery(` + MATCH (c:Concept)<-[:RELATES_TO]-(m:Message)-[:IN_CHANNEL]->(ch:Channel) + WITH c.name as concept, count(DISTINCT ch) as channels + WHERE channels >= 2 + RETURN concept, channels + ORDER BY channels DESC + LIMIT 5 + `); + + if (activeAreas.length > 0) { + insights.push({ + type: 'insight', + data: { + title: '🔥 Hottest Topics', + description: 'Most discussed areas right now', + items: activeAreas.map((a: any) => `${a.concept}: ${a.activity} mentions`) + }, + relevance: 0.9, + source: 'insight_engine' + }); + } + + if (crossCutting.length > 0) { + insights.push({ + type: 'insight', + data: { + title: '🔗 Cross-Cutting Concerns', + description: 'Topics that span multiple channels', + items: crossCutting.map((c: any) => `${c.concept}: ${c.channels} channels`) + }, + relevance: 0.85, + source: 'insight_engine' + }); + } + + return insights; + } catch { + return []; + } + } + + /** + * Calculate relevance score (0-1) for a result + */ + private calculateRelevance(query: string, text: string): number { + if (!text) return 0; + const queryTerms = query.toLowerCase().split(' '); + const textLower = text.toLowerCase(); + + let matches = 0; + for (const term of queryTerms) { + if (textLower.includes(term)) matches++; + } + + return matches / queryTerms.length; + } + + /** + * Retrieve decision history + */ + private async getDecisionHistory(query: string, context?: CortexQuery['context']): Promise { + try { + const history = await neo4jAdapter.runQuery(` + MATCH (m:Message) + WHERE any(word IN ['decision', 'approved', 'rejected', 'selected', 'chose'] + WHERE toLower(m.body) CONTAINS word) + AND toLower(m.body) CONTAINS toLower($query) + RETURN m, m.from_agent as agent + ORDER BY m.timestamp DESC + LIMIT 20 + `, { query }); + + return history.map((h: any) => ({ + type: 'message', + data: { + id: h.m.properties.id, + body: h.m.properties.body, + agent: h.agent, + timestamp: h.m.properties.timestamp + }, + relevance: 1, + source: 'history' + })); + } catch { + return []; + } + } +} + +// Singleton export +export const neuralCortex = new NeuralCortex(); \ No newline at end of file diff --git a/apps/backend/src/services/NeuralChat/index.ts b/apps/backend/src/services/NeuralChat/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..07e2e886556116ce298fae35666b1133759addb2 --- /dev/null +++ b/apps/backend/src/services/NeuralChat/index.ts @@ -0,0 +1,30 @@ +/** + * Neural Chat - Agent Communication System + * + * DUAL-LAYER ARCHITECTURE: + * + * 1. BLACKBOARD (Async/File-based) + * - Handovers between agents + * - Formal task assignments + * - Works when agents are offline + * - Location: DropZone/agents/[agent]/inbox + * + * 2. NEURAL CHAT (Sync/Neo4j-based) + * - Real-time discussions + * - Quick questions + * - Status updates + * - Persisted in Neo4j graph + * + * 3. CAPABILITY BROKER + * - Agents expose their strengths + * - Cross-agent task delegation + * - Smart routing based on capability match + */ + +export { neuralChatRouter } from './ChatController.js'; +export { neuralChatService } from './ChatService.js'; +export { capabilityBroker, AGENT_CAPABILITIES } from './CapabilityBroker.js'; +export { approvalGate } from './ApprovalGate.js'; +export * from './types.js'; +export type { AgentCapability, CapabilityRequest, CapabilityResponse } from './AgentCapabilities.js'; +export type { ApprovalResult, ApprovalDecision } from './ApprovalGate.js'; diff --git a/apps/backend/src/services/NeuralChat/types.ts b/apps/backend/src/services/NeuralChat/types.ts new file mode 100644 index 0000000000000000000000000000000000000000..170349aec5b902dee0c06564cf85d315725f5584 --- /dev/null +++ b/apps/backend/src/services/NeuralChat/types.ts @@ -0,0 +1,85 @@ +/** + * ╔═══════════════════════════════════════════════════════════════════════════╗ + * ║ NEURAL CHAT - TYPE DEFINITIONS ║ + * ║═══════════════════════════════════════════════════════════════════════════║ + * ║ Real-time agent-to-agent communication system ║ + * ╚═══════════════════════════════════════════════════════════════════════════╝ + */ + +export type AgentId = 'claude' | 'claude-cli' | 'gemini' | 'deepseek' | 'clak' | 'system'; +export type ChannelId = string; +export type MessagePriority = 'low' | 'normal' | 'high' | 'critical'; +export type MessageType = 'chat' | 'task' | 'status' | 'alert' | 'handover' | 'response'; + +export interface ChatMessage { + id: string; + channel: ChannelId; + from: AgentId; + to?: AgentId | AgentId[]; // Optional - if not set, broadcast to channel + type: MessageType; + priority: MessagePriority; + subject?: string; + body: string; + mentions?: AgentId[]; + replyTo?: string; // Parent message ID for threads + attachments?: MessageAttachment[]; + metadata?: Record; + timestamp: string; + read_by?: AgentId[]; +} + +export interface MessageAttachment { + type: 'code' | 'file' | 'node' | 'link'; + name: string; + content?: string; + url?: string; + nodeId?: string; // Neo4j node reference +} + +export interface Channel { + id: ChannelId; + name: string; + description?: string; + members: AgentId[]; + created_at: string; + created_by: AgentId; + is_private: boolean; +} + +export interface ChatThread { + rootMessageId: string; + messages: ChatMessage[]; + participants: AgentId[]; + lastActivity: string; +} + +// Default channels +export const DEFAULT_CHANNELS: Channel[] = [ + { + id: 'core-dev', + name: '#core-dev', + description: 'Core development discussions', + members: ['claude', 'claude-cli', 'gemini', 'deepseek', 'clak'], + created_at: new Date().toISOString(), + created_by: 'system', + is_private: false + }, + { + id: 'standup', + name: '#standup', + description: 'Daily standups and status updates', + members: ['claude', 'claude-cli', 'gemini', 'deepseek', 'clak'], + created_at: new Date().toISOString(), + created_by: 'system', + is_private: false + }, + { + id: 'alerts', + name: '#alerts', + description: 'System alerts and critical notifications', + members: ['claude', 'claude-cli', 'gemini', 'deepseek', 'clak'], + created_at: new Date().toISOString(), + created_by: 'system', + is_private: false + } +]; diff --git a/apps/backend/src/services/NeuralStream.ts b/apps/backend/src/services/NeuralStream.ts new file mode 100644 index 0000000000000000000000000000000000000000..de7d75ec0d36acf15f2aec1268f6a1442719b306 --- /dev/null +++ b/apps/backend/src/services/NeuralStream.ts @@ -0,0 +1,70 @@ +import { EventEmitter } from 'events'; + +// 1. Definition af lovlige nervesignaler +// Vi har tilføjet 'KNOWLEDGE_INGEST_REQ' så Ingestoren kan lytte +export type EventType = + | 'SECURITY_THREAT' + | 'KNOWLEDGE_GAP' + | 'SYSTEM_ERROR' + | 'SYSTEM_HEALED' + | 'KNOWLEDGE_INGEST_REQ'; // <--- NY: Signal til Cortex Ingestor + +export type EventSeverity = 'LOW' | 'MEDIUM' | 'HIGH' | 'CRITICAL'; + +export interface NeuralEvent { + type: EventType; + severity: EventSeverity; + payload: any; + timestamp: Date; + source: string; +} + +class NeuralStream extends EventEmitter { + private static instance: NeuralStream; + + private constructor() { + super(); + // Vi tillader mange lyttere, da både OmniHarvester, Ingestor, og Frontend lytter med + this.setMaxListeners(50); + } + + public static getInstance(): NeuralStream { + if (!NeuralStream.instance) { + NeuralStream.instance = new NeuralStream(); + } + return NeuralStream.instance; + } + + /** + * Publish a neural event to the entire organism + */ + public emitEvent(type: EventType, severity: EventSeverity, payload: any, source: string = 'System') { + const event: NeuralEvent = { + type, + severity, + payload, + timestamp: new Date(), + source + }; + + // 1. Emit internt til Node.js listeners (ReflexLoader, KnowledgeService) + this.emit(type, event); + this.emit('*', event); // Catch-all for logging/debugging + + // 2. Log til konsollen (så vi kan se nervesystemet arbejde) + // Kun CRITICAL og HIGH vises med store ikoner for at mindske støj + if (severity === 'CRITICAL' || severity === 'HIGH') { + const icon = severity === 'CRITICAL' ? '🔥' : '🔴'; + console.log(`${icon} [NeuralStream] ${type} from ${source}:`, this.truncate(JSON.stringify(payload), 100)); + } else if (process.env.NODE_ENV === 'development') { + // I dev mode ser vi alt + console.log(`⚡ [NeuralStream] ${type} from ${source}`); + } + } + + private truncate(str: string, n: number) { + return (str.length > n) ? str.substr(0, n-1) + '...' : str; + } +} + +export const neuralStream = NeuralStream.getInstance(); diff --git a/apps/backend/src/services/NudgeService.ts b/apps/backend/src/services/NudgeService.ts new file mode 100644 index 0000000000000000000000000000000000000000..e3f7a37978592f6b039ef04f69f1bc4d0ae40825 --- /dev/null +++ b/apps/backend/src/services/NudgeService.ts @@ -0,0 +1,355 @@ +/** + * NudgeService - Aggressive Data Generation System + * + * Runs every 15 minutes to push data into the system: + * - System metrics → Neo4j + * - Process snapshots → Neo4j + * - Graph evolution triggers + * - Agent activity pings + * - OmniHarvester triggers + */ + +import cron from 'node-cron'; +import si from 'systeminformation'; +import { logger } from '../utils/logger.js'; +import { eventBus } from '../mcp/EventBus.js'; + +interface NudgeStats { + lastRun: Date | null; + totalRuns: number; + nodesCreated: number; + eventsEmitted: number; + errors: number; +} + +class NudgeService { + private isRunning = false; + private task: cron.ScheduledTask | null = null; + private neo4jAdapter: any = null; + private stats: NudgeStats = { + lastRun: null, + totalRuns: 0, + nodesCreated: 0, + eventsEmitted: 0, + errors: 0 + }; + + async start() { + if (this.isRunning) return; + this.isRunning = true; + + // Dynamic import to avoid circular deps + try { + const { neo4jAdapter } = await import('../adapters/Neo4jAdapter.js'); + this.neo4jAdapter = neo4jAdapter; + await this.neo4jAdapter.connect(); + } catch (e) { + logger.warn('⚡ NudgeService: Neo4j not available, running in limited mode'); + } + + console.log('⚡ NudgeService ACTIVATED - Running every 15 minutes'); + console.log(' └─ Pushing: System metrics, Process snapshots, Graph evolution'); + + // Every 15 minutes: */15 * * * * + this.task = cron.schedule('*/15 * * * *', async () => { + await this.runNudgeCycle(); + }); + + // Initial nudge after 30 seconds + setTimeout(() => this.runNudgeCycle(), 30000); + + // Also run a mini-nudge every 5 minutes for basic stats + cron.schedule('*/5 * * * *', async () => { + await this.miniNudge(); + }); + } + + stop() { + if (this.task) { + this.task.stop(); + this.task = null; + } + this.isRunning = false; + console.log('🛑 NudgeService stopped'); + } + + private async runNudgeCycle() { + const cycleStart = Date.now(); + console.log('\n⚡══════════════════════════════════════════════════════════⚡'); + console.log(' NUDGE CYCLE #' + (this.stats.totalRuns + 1) + ' - ' + new Date().toISOString()); + console.log('⚡══════════════════════════════════════════════════════════⚡'); + + try { + // 1. Capture System Metrics + await this.captureSystemMetrics(); + + // 2. Capture Process Snapshot + await this.captureProcessSnapshot(); + + // 3. Trigger Graph Evolution + await this.triggerGraphEvolution(); + + // 4. Ping All Agents + await this.pingAgents(); + + // 5. Emit Activity Events + await this.emitActivityEvents(); + + // 6. Run Knowledge Compilation + await this.triggerKnowledgeCompilation(); + + this.stats.totalRuns++; + this.stats.lastRun = new Date(); + + const duration = Date.now() - cycleStart; + console.log(`\n✅ NUDGE CYCLE COMPLETE in ${duration}ms`); + console.log(` Stats: ${this.stats.nodesCreated} nodes | ${this.stats.eventsEmitted} events | ${this.stats.errors} errors`); + console.log('⚡══════════════════════════════════════════════════════════⚡\n'); + + } catch (error) { + this.stats.errors++; + logger.error('❌ Nudge cycle failed:', error); + } + } + + private async miniNudge() { + // Quick stats push every 5 minutes + try { + const [cpu, mem] = await Promise.all([ + si.currentLoad(), + si.mem() + ]); + + eventBus.emitEvent({ + type: 'system:heartbeat', + timestamp: new Date().toISOString(), + source: 'NudgeService', + payload: { + cpuLoad: cpu.currentLoad.toFixed(1), + memUsed: ((mem.used / mem.total) * 100).toFixed(1), + uptime: process.uptime() + } + }); + + this.stats.eventsEmitted++; + } catch (e) { + // Silent fail for mini-nudge + } + } + + private async captureSystemMetrics() { + console.log('📊 [1/6] Capturing system metrics...'); + + try { + const [cpu, mem, osInfo, currentLoad, disk, network] = await Promise.all([ + si.cpu(), + si.mem(), + si.osInfo(), + si.currentLoad(), + si.fsSize(), + si.networkStats() + ]); + + if (this.neo4jAdapter) { + const timestamp = new Date().toISOString(); + const snapshotId = `sys-${Date.now()}`; + + await this.neo4jAdapter.runQuery(` + MERGE (s:SystemSnapshot {id: $id}) + SET s.timestamp = $timestamp, + s.cpuBrand = $cpuBrand, + s.cpuCores = $cpuCores, + s.cpuLoad = $cpuLoad, + s.memTotal = $memTotal, + s.memUsed = $memUsed, + s.memPercent = $memPercent, + s.platform = $platform, + s.osDistro = $osDistro, + s.diskUsed = $diskUsed + MERGE (sys:System {name: 'WidgeTDC'}) + MERGE (sys)-[:HAS_SNAPSHOT]->(s) + RETURN s + `, { + id: snapshotId, + timestamp, + cpuBrand: cpu.brand, + cpuCores: cpu.cores, + cpuLoad: currentLoad.currentLoad, + memTotal: mem.total, + memUsed: mem.used, + memPercent: (mem.used / mem.total) * 100, + platform: osInfo.platform, + osDistro: osInfo.distro, + diskUsed: disk[0]?.used || 0 + }); + + this.stats.nodesCreated++; + console.log(' ✓ System snapshot saved to Neo4j'); + } + + eventBus.emitEvent({ + type: 'nudge.system_metrics', + timestamp: new Date().toISOString(), + source: 'NudgeService', + payload: { + cpu: currentLoad.currentLoad.toFixed(1) + '%', + memory: ((mem.used / mem.total) * 100).toFixed(1) + '%' + } + }); + this.stats.eventsEmitted++; + + } catch (error) { + logger.error('System metrics capture failed:', error); + this.stats.errors++; + } + } + + private async captureProcessSnapshot() { + console.log('🔄 [2/6] Capturing process snapshot...'); + + try { + const data = await si.processes(); + const topProcesses = data.list + .sort((a, b) => (b.cpu || 0) - (a.cpu || 0)) + .slice(0, 10); + + if (this.neo4jAdapter) { + const timestamp = new Date().toISOString(); + + for (const proc of topProcesses) { + await this.neo4jAdapter.runQuery(` + MERGE (p:Process {name: $name}) + SET p.lastSeen = $timestamp, + p.cpu = $cpu, + p.mem = $mem, + p.pid = $pid + MERGE (sys:System {name: 'WidgeTDC'}) + MERGE (sys)-[:RUNS]->(p) + `, { + name: proc.name || 'Unknown', + timestamp, + cpu: proc.cpu || 0, + mem: proc.mem || 0, + pid: proc.pid + }); + } + + this.stats.nodesCreated += topProcesses.length; + console.log(` ✓ ${topProcesses.length} processes tracked`); + } + + } catch (error) { + logger.error('Process snapshot failed:', error); + this.stats.errors++; + } + } + + private async triggerGraphEvolution() { + console.log('🧬 [3/6] Triggering graph evolution...'); + + try { + if (this.neo4jAdapter) { + // Get current graph stats + const statsResult = await this.neo4jAdapter.runQuery(` + MATCH (n) + WITH count(n) as nodeCount + MATCH ()-[r]->() + RETURN nodeCount, count(r) as relCount + `); + + const stats = statsResult?.[0]; + const nodeCount = stats?.nodeCount || 0; + const relCount = stats?.relCount || 0; + + // Create evolution event node + await this.neo4jAdapter.runQuery(` + CREATE (e:EvolutionEvent { + id: $id, + timestamp: $timestamp, + nodeCount: $nodeCount, + relationshipCount: $relCount, + source: 'NudgeService' + }) + `, { + id: `evo-${Date.now()}`, + timestamp: new Date().toISOString(), + nodeCount, + relCount + }); + + this.stats.nodesCreated++; + console.log(` ✓ Graph: ${nodeCount} nodes, ${relCount} relationships`); + } + + } catch (error) { + logger.error('Graph evolution failed:', error); + this.stats.errors++; + } + } + + private async pingAgents() { + console.log('🤖 [4/6] Pinging agents...'); + + const agents = ['HansPedder', 'GraphRAG', 'System', 'OmniHarvester']; + + for (const agent of agents) { + eventBus.emitEvent({ + type: 'agent.ping', + timestamp: new Date().toISOString(), + source: 'NudgeService', + payload: { agent, action: 'heartbeat' } + }); + this.stats.eventsEmitted++; + } + + console.log(` ✓ Pinged ${agents.length} agents`); + } + + private async emitActivityEvents() { + console.log('📡 [5/6] Emitting activity events...'); + + const events = [ + { type: 'nudge.cycle_complete', payload: { cycle: this.stats.totalRuns + 1 } }, + { type: 'system.activity', payload: { source: 'NudgeService', active: true } }, + { type: 'data.push', payload: { nodesCreated: this.stats.nodesCreated } } + ]; + + for (const event of events) { + eventBus.emitEvent({ + type: event.type as any, // Dynamic event types + timestamp: new Date().toISOString(), + source: 'NudgeService', + payload: event.payload + }); + this.stats.eventsEmitted++; + } + + console.log(` ✓ Emitted ${events.length} events`); + } + + private async triggerKnowledgeCompilation() { + console.log('🧠 [6/6] Triggering knowledge compilation...'); + + try { + // Trigger compilation via HTTP to self + const response = await fetch('http://localhost:3001/api/knowledge/compile', { + method: 'POST', + headers: { 'Content-Type': 'application/json' } + }).catch(() => null); + + if (response?.ok) { + console.log(' ✓ Knowledge compilation triggered'); + } else { + console.log(' ⚠ Knowledge compilation endpoint not available'); + } + } catch (e) { + // Silent - endpoint might not exist + } + } + + getStats(): NudgeStats { + return { ...this.stats }; + } +} + +export const nudgeService = new NudgeService(); diff --git a/apps/backend/src/services/OmniHarvester.ts b/apps/backend/src/services/OmniHarvester.ts new file mode 100644 index 0000000000000000000000000000000000000000..fb8cbf4226bbb50a2fc291624217bec17aaed82d --- /dev/null +++ b/apps/backend/src/services/OmniHarvester.ts @@ -0,0 +1,175 @@ +import { neo4jService } from './Neo4jService'; +import { hyperLog } from './HyperLog'; +import * as fs from 'fs'; +import * as path from 'path'; +import { DROPZONE_PATH } from '../config.js'; + +// Typer for Threat Intel +interface ThreatIntel { + ip: string; + riskScore: number; + country: string; + isp: string; + knownVulnerabilities: string[]; +} + +export class OmniHarvester { + private static instance: OmniHarvester; + private dropZonePath: string; + + private constructor() { + // Vi gemmer høstet data fysisk for sporbarhed + this.dropZonePath = path.join(DROPZONE_PATH, 'harvested'); + if (!fs.existsSync(this.dropZonePath)) { + fs.mkdirSync(this.dropZonePath, { recursive: true }); + } + console.log('🕷️ [OmniHarvester] Hunter-Killer system initialized.'); + } + + public static getInstance(): OmniHarvester { + if (!OmniHarvester.instance) { + OmniHarvester.instance = new OmniHarvester(); + } + return OmniHarvester.instance; + } + + /** + * 🛡️ ACTIVE DEFENSE: Undersøger en fjendtlig IP + * Kaldes automatisk når NeuralStream detekterer et angreb. + */ + public async investigateThreat(ip: string, payload: string): Promise { + console.log(`🛡️ [OmniHarvester] ALERT: Hunting threat actor at ${ip}...`); + + // 1. External Recon (Simuleret API kald til AbuseIPDB/GeoIP) + const intel = await this.mockThreatLookup(ip); + + // 2. Payload Analysis (Hvad prøver de på?) + const attackType = this.analyzePayload(payload); + + // 3. Graph Synthesis (Gem fjenden i Neo4j) + await this.persistThreatToGraph(intel, attackType, payload); + + // 4. Memory Injection (Husk dette til fremtiden) + await hyperLog.logEvent( + 'THREAT_NEUTRALIZED', + { + ip, + attackType, + riskScore: intel.riskScore, + severity: 'HIGH' + } + ); + + return intel; + } + + /** + * 🧠 KNOWLEDGE EXPANSION: Henter viden fra nettet + * Kaldes når brugerens spørgsmål ikke kan besvares af den interne graf. + */ + public async harvestKnowledge(topic: string): Promise { + console.log(`🧠 [OmniHarvester] Scouting external web for: "${topic}"...`); + + try { + // 1. "DuckDuckGo" Simulation (I prod ville vi bruge en rigtig search API) + // For nu scraper vi en dummy URL eller bruger en specifik kilde hvis angivet + const content = await this.performWebScrape(topic); + + // 2. Gem som fil i DropZone (Så GraphIngestor kan tage den senere hvis nødvendigt) + const filename = `harvest_${topic.replace(/[^a-z0-9]/gi, '_').toLowerCase()}_${Date.now()}.md`; + const filePath = path.join(this.dropZonePath, filename); + + const fileContent = `# Harvested Intelligence: ${topic}\n\nDate: ${new Date().toISOString()}\nSource: Web Scrape\n\n## Content\n${content}`; + fs.writeFileSync(filePath, fileContent); + + // 3. Direkte Graph Injection (Så vi ikke skal vente på fil-scanning) + await neo4jService.write( + ` + MERGE (t:Topic {name: $topic}) + CREATE (d:Document { + title: 'Harvested: ' + $topic, + path: $path, + content: $content, + type: 'EXTERNAL_KNOWLEDGE', + ingestedAt: datetime() + }) + MERGE (d)-[:ABOUT]->(t) + `, + { topic, path: filePath, content } + ); + + return content; + } catch (error) { + console.error('❌ [OmniHarvester] Failed to harvest knowledge:', error); + throw error; + } + } + + // --- Private Helpers & Intelligence Logic --- + + private analyzePayload(payload: string): string { + const p = payload.toLowerCase(); + if (p.includes('union select') || p.includes('1=1')) return 'SQL Injection'; + if (p.includes('