ZAIDX11 commited on
Commit
b657fcc
·
verified ·
1 Parent(s): 0eac5ef

Add files using upload-large-folder tool

Browse files
Axiovorax/.github/copilot-instructions.md ADDED
File without changes
Axiovorax/.gitignore ADDED
File without changes
Axiovorax/README.md ADDED
File without changes
Axiovorax/axiovorax/__init__.py ADDED
File without changes
Axiovorax/axiovorax/genesis.py ADDED
@@ -0,0 +1,46 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Genesis module for mathematical universe generation.
3
+ """
4
+ import logging
5
+ from typing import List, Any
6
+
7
+ class MathematicalGenesis:
8
+ """
9
+ Handles the creation and evolution of mathematical universes from axioms.
10
+ """
11
+ def __init__(self, axioms: List[str]):
12
+ """
13
+ Initialize with a set of axioms.
14
+ Args:
15
+ axioms (List[str]): List of initial axioms.
16
+ """
17
+ self.axioms = axioms
18
+ self.universe = None
19
+ logging.info("MathematicalGenesis initialized with axioms: %s", axioms)
20
+
21
+ def generate_universe(self) -> Any:
22
+ """
23
+ Generate a new mathematical universe from the current axioms.
24
+ Returns:
25
+ Any: A representation of the generated universe.
26
+ """
27
+ logging.info("Generating mathematical universe from axioms.")
28
+ # Minimal working logic: create a graph of axioms
29
+ import networkx as nx
30
+ G = nx.Graph()
31
+ for ax in self.axioms:
32
+ G.add_node(ax) # type: ignore
33
+
34
+ self.universe = G
35
+ logging.info("Universe generated with %d nodes.", len(G.nodes))
36
+ return G
37
+
38
+ def evolve_axioms(self):
39
+ """
40
+ Evolve the set of axioms (mock logic).
41
+ """
42
+ logging.info("Evolving axioms.")
43
+ # Example: add a new axiom
44
+ new_axiom = f"Axiom {len(self.axioms)+1}"
45
+ self.axioms.append(new_axiom)
46
+ logging.info("Added new axiom: %s", new_axiom)
Axiovorax/axiovorax/meta_cognitive_core.py ADDED
File without changes
Axiovorax/axiovorax/quantum_layer.py ADDED
File without changes
Axiovorax/config.yaml ADDED
File without changes
Axiovorax/data/.gitkeep ADDED
File without changes
Axiovorax/docs/.gitkeep ADDED
File without changes
Axiovorax/modules/__init__.py ADDED
File without changes
Axiovorax/modules/utils.py ADDED
File without changes
Axiovorax/modules/visualizer.py ADDED
File without changes
Axiovorax/notebooks/exploration.ipynb ADDED
File without changes
Axiovorax/requirements.txt ADDED
File without changes
Axiovorax/tests/test_genesis.py ADDED
File without changes
Axiovorax/tests/test_quantum.py ADDED
File without changes
archive/Axiovorax/modules/__init__.py ADDED
File without changes
archive/Axiovorax/notebooks/exploration.ipynb ADDED
@@ -0,0 +1,90 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "markdown",
5
+ "id": "25cda4ee",
6
+ "metadata": {},
7
+ "source": [
8
+ "# Axiovorax System Exploration\n",
9
+ "An AI-driven, quantum-inspired engine for evolving mathematical universes and generating new theorems.\n",
10
+ "\n",
11
+ "This notebook demonstrates the core features of the Axiovorax research platform."
12
+ ]
13
+ },
14
+ {
15
+ "cell_type": "code",
16
+ "execution_count": null,
17
+ "id": "8a58dd39",
18
+ "metadata": {},
19
+ "outputs": [],
20
+ "source": [
21
+ "# Import core Axiovorax classes and set up logging\n",
22
+ "from axiovorax.genesis import MathematicalGenesis\n",
23
+ "from axiovorax.meta_cognitive_core import MetaCognitiveCore\n",
24
+ "from axiovorax.quantum_layer import QuantumOrchestrator\n",
25
+ "from modules.utils import setup_logging\n",
26
+ "setup_logging()"
27
+ ]
28
+ },
29
+ {
30
+ "cell_type": "code",
31
+ "execution_count": null,
32
+ "id": "bb1d87e4",
33
+ "metadata": {},
34
+ "outputs": [],
35
+ "source": [
36
+ "# Create a MathematicalGenesis instance and generate a universe\n",
37
+ "axioms = [\"Axiom 1: Existence\", \"Axiom 2: Uniqueness\"]\n",
38
+ "mg = MathematicalGenesis(axioms)\n",
39
+ "universe = mg.generate_universe()\n",
40
+ "print(f\"Generated universe nodes: {list(universe.nodes)}\")"
41
+ ]
42
+ },
43
+ {
44
+ "cell_type": "code",
45
+ "execution_count": null,
46
+ "id": "352408e6",
47
+ "metadata": {},
48
+ "outputs": [],
49
+ "source": [
50
+ "# Evolve axioms and show updated universe\n",
51
+ "mg.evolve_axioms()\n",
52
+ "universe2 = mg.generate_universe()\n",
53
+ "print(f\"Evolved universe nodes: {list(universe2.nodes)}\")"
54
+ ]
55
+ },
56
+ {
57
+ "cell_type": "code",
58
+ "execution_count": null,
59
+ "id": "9969aaab",
60
+ "metadata": {},
61
+ "outputs": [],
62
+ "source": [
63
+ "# Run a hybrid quantum-classical algorithm\n",
64
+ "qo = QuantumOrchestrator()\n",
65
+ "result = qo.run_hybrid_algorithm({\"universe\": list(universe2.nodes)})\n",
66
+ "print(f\"Hybrid algorithm result: {result}\")"
67
+ ]
68
+ },
69
+ {
70
+ "cell_type": "code",
71
+ "execution_count": null,
72
+ "id": "c13e2583",
73
+ "metadata": {},
74
+ "outputs": [],
75
+ "source": [
76
+ "# Use MetaCognitiveCore for proof storage and retrieval\n",
77
+ "mcc = MetaCognitiveCore()\n",
78
+ "mcc.store_proof(\"Proof of Axiom 1 implies Axiom 2\")\n",
79
+ "print(\"Past proofs:\", mcc.retrieve_past_results())"
80
+ ]
81
+ }
82
+ ],
83
+ "metadata": {
84
+ "language_info": {
85
+ "name": "python"
86
+ }
87
+ },
88
+ "nbformat": 4,
89
+ "nbformat_minor": 5
90
+ }
archive/Axiovorax/tests/test_genesis.py ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Test for MathematicalGenesis universe generation.
3
+ """
4
+ import pytest
5
+ from axiovorax.genesis import MathematicalGenesis
6
+
7
+ def test_generate_universe():
8
+ axioms = ["Axiom 1", "Axiom 2"]
9
+ mg = MathematicalGenesis(axioms)
10
+ universe = mg.generate_universe()
11
+ assert universe is not None
12
+ assert len(universe.nodes) > 0
13
+
archive/Axiovorax/tests/test_quantum.py ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ """
3
+ Test for QuantumOrchestrator output format.
4
+ """
5
+ from axiovorax.quantum_layer import QuantumOrchestrator
6
+
7
+ def test_run_hybrid_algorithm():
8
+ qo = QuantumOrchestrator()
9
+ data = {"input": 1}
10
+ result = qo.run_search(data) # type: ignore
11
+ assert isinstance(result, dict)
12
+ assert "score" in result
13
+ assert "info" in result
14
+
archive/modules/__init__.py ADDED
@@ -0,0 +1 @@
 
 
1
+ \"\"\"Utility modules for Axiovorax.\"\"\"
archive/modules/open_problems.py ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ OPEN_PROBLEMS = [
2
+ "Riemann Hypothesis",
3
+ "P vs NP",
4
+ "Goldbach Conjecture",
5
+ "Hodge Conjecture"
6
+ ]
7
+
8
+ class OpenProblemSolver:
9
+ def __init__(self, genesis_engine):
10
+ self.genesis = genesis_engine
11
+
12
+ def attempt_proof(self, problem):
13
+ axioms = self.genesis.axioms
14
+ return {"problem": problem, "attempt": f"Using axioms {axioms} to attempt solving {problem}...", "status": "In Progress"}
archive/modules/storage.py ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ import json
2
+
3
+ def save_to_file(filename, data):
4
+ with open(filename, "w") as f:
5
+ json.dump(data, f)
6
+
7
+ def load_from_file(filename):
8
+ with open(filename, "r") as f:
9
+ return json.load(f)
archive/modules/utils.py ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ def format_response(data):
2
+ return {"status": "success", "data": data}
archive/notebooks/exploration.ipynb ADDED
@@ -0,0 +1,22 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ \"cells\": [
3
+ {
4
+ \"cell_type\": \"code\",
5
+ \"execution_count\": null,
6
+ \"metadata\": {},
7
+ \"outputs\": [],
8
+ \"source\": [
9
+ \"from axiovorax.genesis import MathematicalGenesis\\n\",
10
+ \"mg = MathematicalGenesis(axioms=['A1','A2'], seed=1)\\n\",
11
+ \"g = mg.generate_universe()\\n\",
12
+ \"mg.summary()\\n\"
13
+ ]
14
+ }
15
+ ],
16
+ \"metadata\": {
17
+ \"kernelspec\": {\"name\": \"python3\", \"display_name\": \"Python 3\"},
18
+ \"language_info\": {\"name\": \"python\"}
19
+ },
20
+ \"nbformat\": 4,
21
+ \"nbformat_minor\": 2
22
+ }
archive/tests/test_api.py ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ from fastapi.testclient import TestClient
2
+ from app import app
3
+
4
+ client = TestClient(app)
5
+
6
+ def test_generate_universe():
7
+ response = client.get("/generate_universe")
8
+ assert response.status_code == 200
archive/tests/test_genesis.py ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ from genesis import GenesisEngine
2
+
3
+ def test_generate_universe():
4
+ g = GenesisEngine()
5
+ universe = g.generate_universe()
6
+ assert "axioms" in universe
archive/tests/test_quantum.py ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ import sys
2
+ import os
3
+ sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..')))
4
+ from axiovorax.quantum_layer import QuantumOrchestrator
5
+ def test_quantum_run(): q = QuantumOrchestrator() out = q.run_search({'statement': 'example'}) assert 'score' in out
6
+
backend/.dockerignore ADDED
File without changes
backend/.env.example ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Example environment variables
2
+
3
+ # Database URL
4
+ DB_URL=sqlite:///:memory:
5
+
6
+ # Other environment variables can be added here as needed.# Example environment variables for the backend
7
+ # Copy this file to .env and fill in secrets before running in development
8
+
9
+ # Database URL - use sqlite for local development or a Postgres URL in production
10
+ # Examples:
11
+ # sqlite:///./dev.db
12
+ # postgresql+psycopg2://user:password@localhost:5432/dbname
13
+ DB_URL=sqlite:///:memory:
14
+
15
+ # Application secret key (used for signing tokens)
16
+ SECRET_KEY=replace-me-with-a-secret
17
+
18
+ # Sentry DSN (optional)
19
+ SENTRY_DSN=
20
+
21
+ # Other optional flags
22
+ #LOG_LEVEL=INFODB_URL=sqlite:///./backend/db/projectv1.db
23
+ SECRET_KEY=your_secret_key_here
backend/.gitignore ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Ignore Python cache and environment
2
+ __pycache__/
3
+ *.pyc
4
+ venv/
5
+ .env
6
+
7
+ # Ignore SQLite DB
8
+ backend/db/*.db
9
+
10
+ # Ignore test results
11
+ *.log
12
+ .coverage
backend/Dockerfile ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Project V1 Backend Dockerfile
2
+ FROM python:3.10-slim
3
+
4
+ WORKDIR /app
5
+
6
+ COPY requirements.txt ./
7
+ RUN pip install --upgrade pip && pip install -r requirements.txt
8
+
9
+ COPY backend ./backend
10
+ COPY .env ./backend/.env
11
+
12
+ EXPOSE 8000
13
+
14
+ CMD ["uvicorn", "backend.app:app", "--host", "0.0.0.0", "--port", "8000"]
backend/api.py ADDED
@@ -0,0 +1,67 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from pydantic import BaseModel
2
+ from .universe import UniverseManager
3
+
4
+
5
+ class AxiomsRequest(BaseModel):
6
+ axioms: list[str]
7
+
8
+
9
+ class ProveRequest(BaseModel):
10
+ theorem: str
11
+ universe_id: str
12
+
13
+
14
+ class PipelineRequest(BaseModel):
15
+ axioms: list[str]
16
+ theorem: str
17
+
18
+
19
+ def create_app():
20
+ """Create and return the FastAPI app. This is lazy to avoid import-time
21
+ collisions with a repo-local `fastapi/` directory when running tests in
22
+ this workspace.
23
+ """
24
+ from fastapi import FastAPI, HTTPException
25
+ from .auth import require_token
26
+ from .pipeline import Pipeline
27
+
28
+ app = FastAPI(title="AlphaGeometry Demo API")
29
+ manager = UniverseManager()
30
+ pipeline = Pipeline(manager)
31
+
32
+ @app.post("/generateUniverse")
33
+ def generate_universe(req: AxiomsRequest):
34
+ uid = manager.create_universe(req.axioms)
35
+ return {"universe_id": uid}
36
+
37
+ @app.post("/prove")
38
+ def prove(req: ProveRequest):
39
+ try:
40
+ result = manager.prove(req.universe_id, req.theorem)
41
+ except KeyError:
42
+ raise HTTPException(status_code=404, detail="Universe not found")
43
+ return {"result": result}
44
+
45
+ @app.post("/pipeline/generate_and_prove")
46
+ def pipeline_generate_and_prove(req: PipelineRequest, x_api_key: str | None = None):
47
+ # basic auth header check
48
+ require_token(x_api_key)
49
+ res = pipeline.run_generate_and_prove(req.axioms, req.theorem)
50
+ return res
51
+
52
+ @app.get("/compare/{a}/{b}")
53
+ def compare(a: str, b: str):
54
+ try:
55
+ score = manager.compare_universes(a, b)
56
+ except KeyError:
57
+ raise HTTPException(status_code=404, detail="Universe not found")
58
+ return {"similarity": score}
59
+
60
+ @app.get("/health")
61
+ def health():
62
+ return {"status": "ok"}
63
+
64
+ return app
65
+
66
+
67
+ __all__ = ["create_app"]
backend/app.py ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI
2
+ from backend.db.session import engine
3
+ from backend.db.models import Base
4
+ from backend.api.routes import router
5
+ from backend.api.neuro_symbolic_routes import router as neuro_router
6
+ from backend.api.quantum_routes import router as quantum_router
7
+ from backend.api.analysis_routes import router as analysis_router
8
+ from backend.api.visualization_routes import router as visualization_router
9
+ from backend.api.query_routes import router as query_router
10
+ from backend.api.vector_routes import router as vector_router
11
+ from backend.api.vector_routes import router as vector_router
12
+
13
+ app = FastAPI()
14
+
15
+ @app.on_event("startup")
16
+ def startup():
17
+ Base.metadata.create_all(bind=engine)
18
+
19
+ @app.get("/health")
20
+ def health():
21
+ return {"status": "ok"}
22
+
23
+ app.include_router(router)
24
+ app.include_router(neuro_router)
25
+ app.include_router(quantum_router)
26
+ app.include_router(analysis_router)
27
+ app.include_router(visualization_router)
28
+ app.include_router(query_router)
29
+ app.include_router(vector_router)
30
+ app.include_router(vector_router)
backend/auth.py ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from .config import get_env
2
+
3
+
4
+ def require_token(x_api_key: str | None = None):
5
+ """Check the API token. Does not import FastAPI at module import time so
6
+ tests can run when a repo-local `fastapi/` folder is present. If FastAPI is
7
+ available at runtime, raise an HTTPException to produce a proper 401 for
8
+ API endpoints; otherwise raise PermissionError for programmatic callers.
9
+ """
10
+ expected = get_env("DEMO_API_TOKEN", "demo-token")
11
+ if x_api_key != expected:
12
+ try:
13
+ # import lazily so tests without FastAPI don't fail at import time
14
+ from fastapi import HTTPException
15
+ raise HTTPException(status_code=401, detail="Invalid API token")
16
+ except Exception:
17
+ raise PermissionError("Invalid API token")
backend/config.py ADDED
@@ -0,0 +1,29 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Configuration helpers."""
2
+ import os
3
+ from typing import Optional
4
+
5
+ try:
6
+ from dotenv import load_dotenv
7
+ load_dotenv()
8
+ except Exception:
9
+ pass
10
+
11
+
12
+ def get_env(key: str, default: Optional[str] = None) -> Optional[str]:
13
+ return os.environ.get(key, default)
14
+ import os
15
+ try:
16
+ from dotenv import load_dotenv
17
+ load_dotenv()
18
+ except Exception:
19
+ # If python-dotenv isn't installed, continue with environment variables or defaults
20
+ try:
21
+ # If there's a local shim, call it
22
+ from dotenv import load_dotenv as _ld
23
+ _ld()
24
+ except Exception:
25
+ pass
26
+
27
+ # Default to an in-memory SQLite DB for tests/local development when DB_URL is not set
28
+ DB_URL = os.getenv('DB_URL') or 'sqlite:///:memory:'
29
+ SECRET_KEY = os.getenv('SECRET_KEY') or 'dev-secret'
backend/embeddings.py ADDED
@@ -0,0 +1,51 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Pluggable embedding interface. Provides simple char-histogram fallback and
2
+ an optional sentence-transformers adapter if available.
3
+ """
4
+ from typing import List
5
+ import math
6
+
7
+ try:
8
+ from sentence_transformers import SentenceTransformer
9
+ SBER_AVAILABLE = True
10
+ except Exception:
11
+ SBER_AVAILABLE = False
12
+
13
+
14
+ class EmbeddingBackend:
15
+ def embed(self, texts: List[str]) -> List[List[float]]:
16
+ raise NotImplementedError()
17
+
18
+
19
+ class CharHistogramEmbedding(EmbeddingBackend):
20
+ def __init__(self, dim: int = 32):
21
+ self.dim = dim
22
+
23
+ def embed(self, texts: List[str]) -> List[List[float]]:
24
+ def _embed(text: str):
25
+ vec = [0.0] * self.dim
26
+ for ch in text[:4096]:
27
+ vec[ord(ch) % self.dim] += 1.0
28
+ norm = math.sqrt(sum(v * v for v in vec)) or 1.0
29
+ return [v / norm for v in vec]
30
+
31
+ return [_embed(t) for t in texts]
32
+
33
+
34
+ class SBERTEmbedding(EmbeddingBackend):
35
+ def __init__(self, model_name: str = "sentence-transformers/all-MiniLM-L6-v2"):
36
+ if not SBER_AVAILABLE:
37
+ raise RuntimeError("sentence-transformers not installed")
38
+ self.model = SentenceTransformer(model_name)
39
+
40
+ def embed(self, texts: List[str]) -> List[List[float]]:
41
+ arr = self.model.encode(texts)
42
+ return [list(map(float, vec)) for vec in arr]
43
+
44
+
45
+ def make_default_backend() -> EmbeddingBackend:
46
+ if SBER_AVAILABLE:
47
+ try:
48
+ return SBERTEmbedding()
49
+ except Exception:
50
+ pass
51
+ return CharHistogramEmbedding()
backend/evolution.py ADDED
@@ -0,0 +1,84 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Evolutionary algorithms for evolving axioms.
2
+
3
+ Provides a small GeneticAlgorithm class that mutates and recombines axioms (strings)
4
+ to produce candidate new axioms. Pure-Python fallback implementation so the
5
+ project can run without external evolutionary frameworks.
6
+ """
7
+ import random
8
+ from typing import List, Callable
9
+
10
+
11
+ class GeneticAlgorithm:
12
+ def __init__(self, population: List[str], fitness_fn: Callable[[str], float],
13
+ mutate_rate: float = 0.2, crossover_rate: float = 0.5):
14
+ self.population = list(population)
15
+ self.fitness_fn = fitness_fn
16
+ self.mutate_rate = mutate_rate
17
+ self.crossover_rate = crossover_rate
18
+
19
+ def _mutate(self, axiom: str) -> str:
20
+ if random.random() > self.mutate_rate:
21
+ return axiom
22
+ # simple mutation: insert/delete/replace a character or word
23
+ ops = [self._insert_char, self._delete_char, self._replace_char, self._swap_words]
24
+ op = random.choice(ops)
25
+ return op(axiom)
26
+
27
+ def _insert_char(self, s: str) -> str:
28
+ pos = random.randint(0, len(s))
29
+ ch = random.choice('abcdefghijklmnopqrstuvwxyz ')
30
+ return s[:pos] + ch + s[pos:]
31
+
32
+ def _delete_char(self, s: str) -> str:
33
+ if not s:
34
+ return s
35
+ pos = random.randint(0, len(s)-1)
36
+ return s[:pos] + s[pos+1:]
37
+
38
+ def _replace_char(self, s: str) -> str:
39
+ if not s:
40
+ return s
41
+ pos = random.randint(0, len(s)-1)
42
+ ch = random.choice('abcdefghijklmnopqrstuvwxyz ')
43
+ return s[:pos] + ch + s[pos+1:]
44
+
45
+ def _swap_words(self, s: str) -> str:
46
+ parts = s.split()
47
+ if len(parts) < 2:
48
+ return s
49
+ i, j = random.sample(range(len(parts)), 2)
50
+ parts[i], parts[j] = parts[j], parts[i]
51
+ return ' '.join(parts)
52
+
53
+ def _crossover(self, a: str, b: str) -> str:
54
+ # single-point crossover at word boundary
55
+ wa = a.split()
56
+ wb = b.split()
57
+ if not wa or not wb:
58
+ return a
59
+ pa = random.randint(0, len(wa))
60
+ pb = random.randint(0, len(wb))
61
+ child = ' '.join(wa[:pa] + wb[pb:])
62
+ return child
63
+
64
+ def step(self, k: int = 10) -> List[str]:
65
+ """Run one generation, return top-k candidates."""
66
+ scored = [(p, self.fitness_fn(p)) for p in self.population]
67
+ scored.sort(key=lambda x: x[1], reverse=True)
68
+ survivors = [p for p, _ in scored[: max(2, len(scored)//2)]]
69
+
70
+ # produce new population
71
+ new_pop = list(survivors)
72
+ while len(new_pop) < len(self.population):
73
+ if random.random() < self.crossover_rate:
74
+ a, b = random.sample(survivors, 2)
75
+ child = self._crossover(a, b)
76
+ else:
77
+ child = random.choice(survivors)
78
+ child = self._mutate(child)
79
+ new_pop.append(child)
80
+
81
+ self.population = new_pop
82
+ result = [(p, self.fitness_fn(p)) for p in self.population]
83
+ result.sort(key=lambda x: x[1], reverse=True)
84
+ return [p for p, _ in result[:k]]
backend/geometry.py ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ """Lightweight placeholder for geometry module used by legacy tests.
2
+
3
+ This file provides minimal stubs so unit tests that import `geometry` can run
4
+ in the demo environment. Replace with the project's real geometry module as needed.
5
+ """
6
+ def distance(a, b):
7
+ return 0.0
8
+
9
+ def area_of_triangle(a, b, c):
10
+ return 0.0
backend/grpc_server.py ADDED
@@ -0,0 +1,34 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """gRPC server skeleton for AlphaGeometry using proto/alphageometry.proto.
2
+
3
+ This is a lightweight skeleton; to run it install grpcio and grpcio-tools and
4
+ generate Python stubs from the proto. The server handlers delegate to the
5
+ existing Pipeline and UniverseManager implementations.
6
+ """
7
+ import os
8
+ try:
9
+ import grpc
10
+ # import generated modules when available
11
+ from proto import alphageometry_pb2_grpc as pb2_grpc
12
+ from proto import alphageometry_pb2 as pb2
13
+ except Exception:
14
+ grpc = None
15
+
16
+
17
+ class AlphaGeometryServicer:
18
+ # placeholder methods if proto generated modules available
19
+ pass
20
+
21
+
22
+ def serve(port: int = 50051):
23
+ if grpc is None:
24
+ raise RuntimeError("grpc not installed or generated stubs are missing")
25
+ server = grpc.server(
26
+ grpc.thread_pool_executor(max_workers=10)
27
+ )
28
+ pb2_grpc.add_AlphaGeometryServicer_to_server(AlphaGeometryServicer(), server)
29
+ server.add_insecure_port(f"[::]:{port}")
30
+ server.start()
31
+ try:
32
+ server.wait_for_termination()
33
+ except KeyboardInterrupt:
34
+ server.stop(0)
backend/integration_test.py ADDED
@@ -0,0 +1,45 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Integration test script: attempts to connect to Neo4j and Faiss index.
2
+ This script is safe: it catches missing libraries and reports status.
3
+ """
4
+ import sys
5
+ import traceback
6
+
7
+ def try_neo4j():
8
+ try:
9
+ from backend.adapters.graph_adapter import Neo4jAdapter
10
+ print("Neo4jAdapter loaded")
11
+ a = Neo4jAdapter()
12
+ if not a.is_available():
13
+ print("Neo4j driver not available or connection not established")
14
+ return
15
+ # create test node
16
+ res = a.run("CREATE (n:Test {name:$name}) RETURN id(n) as id", name="integration_test")
17
+ print("Neo4j create node result:", res)
18
+ a.close()
19
+ except Exception as e:
20
+ print("Neo4j test failed:")
21
+ traceback.print_exc()
22
+
23
+
24
+ def try_faiss():
25
+ try:
26
+ from backend.adapters.vector_adapter_full import FaissIndex
27
+ print("FaissIndex available")
28
+ dim = 32
29
+ idx = FaissIndex(dim)
30
+ vec = [float(i) for i in range(dim)]
31
+ idx.upsert("u1", vec)
32
+ results = idx.search(vec, top_k=1)
33
+ print("Faiss search results:", results)
34
+ except Exception as e:
35
+ print("Faiss test failed or faiss not installed:")
36
+ traceback.print_exc()
37
+
38
+
39
+ def main():
40
+ print("Running integration tests...")
41
+ try_neo4j()
42
+ try_faiss()
43
+
44
+ if __name__ == '__main__':
45
+ main()
backend/memory.py ADDED
@@ -0,0 +1,57 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """Cross-universe memory layer: embeddings, vector DB, knowledge graph, object store stubs."""
2
+ from typing import Dict, Any, List
3
+ from .adapters.vector_adapter import InMemoryVectorIndex
4
+ from .adapters.graph_adapter import NetworkXGraph, Neo4jAdapter
5
+ try:
6
+ from .adapters.vector_adapter_full import FaissIndex
7
+ except Exception:
8
+ FaissIndex = None
9
+ import json
10
+ import os
11
+ import os as _os
12
+
13
+
14
+ class MemoryManager:
15
+ def __init__(self, storage_dir: str = "./memory_store"):
16
+ # optionally use FAISS if requested
17
+ if _os.environ.get("USE_FAISS", "0") == "1" and FaissIndex is not None:
18
+ try:
19
+ dim = int(_os.environ.get("FAISS_DIM", "32"))
20
+ self.vindex = FaissIndex(dim)
21
+ except Exception:
22
+ self.vindex = InMemoryVectorIndex()
23
+ else:
24
+ self.vindex = InMemoryVectorIndex()
25
+ self.kg = NetworkXGraph()
26
+ self.storage_dir = storage_dir
27
+ os.makedirs(self.storage_dir, exist_ok=True)
28
+ # optionally try to connect to Neo4j if requested via env
29
+ self.neo4j = None
30
+ try:
31
+ if _os.environ.get("USE_NEO4J", "0") == "1":
32
+ self.neo4j = Neo4jAdapter(uri=_os.environ.get("NEO4J_URI"), user=_os.environ.get("NEO4J_USER"), password=_os.environ.get("NEO4J_PASSWORD"))
33
+ if not self.neo4j.is_available():
34
+ self.neo4j = None
35
+ except Exception:
36
+ self.neo4j = None
37
+
38
+ def index_universe(self, universe_id: str, embedding: List[float], metadata: Dict[str, Any] = None):
39
+ self.vindex.upsert(universe_id, embedding)
40
+ self.kg.add_node(universe_id, **(metadata or {}))
41
+ if self.neo4j:
42
+ try:
43
+ props = {"id": universe_id}
44
+ if metadata:
45
+ props.update(metadata)
46
+ self.neo4j.create_node(labels=["Universe"], props=props)
47
+ except Exception:
48
+ pass
49
+
50
+ def snapshot_universe(self, universe_id: str, universe_obj: Any):
51
+ path = os.path.join(self.storage_dir, f"{universe_id}.json")
52
+ with open(path, "w", encoding="utf-8") as f:
53
+ json.dump(universe_obj, f, default=str)
54
+ return path
55
+
56
+ def query_similar(self, embedding: List[float], top_k: int = 10):
57
+ return self.vindex.search(embedding, top_k=top_k)