omega-memory-mcp / Dockerfile
Jason Sosa
Deploy OMEGA MCP server (Streamable HTTP)
34cb740
FROM python:3.12-slim
# HF Spaces runs as uid 1000
RUN useradd -m -u 1000 user
WORKDIR /app
# Install system deps
RUN apt-get update && apt-get install -y --no-install-recommends curl && rm -rf /var/lib/apt/lists/*
# Copy and install the package
COPY pyproject.toml PROJECT_README.md ./
# HF Space needs README.md for pyproject.toml build
RUN cp PROJECT_README.md README.md
COPY src/ src/
RUN pip install --no-cache-dir .
# Download ONNX embedding model at build time (~130MB)
RUN mkdir -p /home/user/.cache/omega/models/bge-small-en-v1.5-onnx && \
curl -fSL -o /home/user/.cache/omega/models/bge-small-en-v1.5-onnx/model.onnx \
"https://huggingface.co/BAAI/bge-small-en-v1.5/resolve/main/onnx/model.onnx" && \
curl -fSL -o /home/user/.cache/omega/models/bge-small-en-v1.5-onnx/tokenizer.json \
"https://huggingface.co/BAAI/bge-small-en-v1.5/resolve/main/tokenizer.json" && \
curl -fSL -o /home/user/.cache/omega/models/bge-small-en-v1.5-onnx/config.json \
"https://huggingface.co/BAAI/bge-small-en-v1.5/resolve/main/config.json" && \
curl -fSL -o /home/user/.cache/omega/models/bge-small-en-v1.5-onnx/tokenizer_config.json \
"https://huggingface.co/BAAI/bge-small-en-v1.5/resolve/main/tokenizer_config.json"
# Create omega data dir
RUN mkdir -p /home/user/.omega && chown -R user:user /home/user
USER user
EXPOSE 7860
# Run HTTP server on port 7860 (HF Spaces default), no auth for public access
CMD ["python", "-c", "import asyncio; from omega.server.http_server import run_http; asyncio.run(run_http('0.0.0.0', 7860, None))"]