File size: 1,267 Bytes
b28041c
 
 
 
 
 
 
 
 
 
 
1748b43
b28041c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
567f582
b28041c
73465e6
 
567f582
 
b28041c
 
 
 
567f582
e679ce0
 
567f582
b28041c
 
567f582
b28041c
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
# Stage 1: Build React Frontend
FROM node:20 as build-frontend

WORKDIR /app/web_client
COPY web_client/package.json web_client/package-lock.json ./
RUN npm ci

COPY web_client/ ./
RUN npm run build

# Stage 2: Python Backend & Runtime
FROM python:3.11-slim

# Install system dependencies for compiling llama.cpp and upif extensions
RUN apt-get update && apt-get install -y \
    build-essential \
    cmake \
    git \
    && rm -rf /var/lib/apt/lists/*

WORKDIR /app

# Create a writable directory for model downloads (HF Spaces runs as user 1000)
RUN mkdir -p /app/models && chmod 777 /app/models

# Copy Python requirements first for caching
COPY requirements.txt .

# Install dependencies (pip + Python deps)
RUN pip install --no-cache-dir --upgrade pip && \
    pip install --no-cache-dir -r requirements.txt

# ⬇️ FIX: install llama-cpp-python correctly (compiles from source)
RUN CMAKE_ARGS="-DLLAMA_CUBLAS=off" pip install --no-cache-dir llama-cpp-python==0.2.90

# Copy the rest of the application
COPY . .

# Install your backend package (pyproject.toml)
RUN pip install .

# Copy built frontend assets
COPY --from=build-frontend /app/web_client/dist /app/web_client/dist

# Hugging Face defaults
ENV PORT=7860
EXPOSE 7860

CMD ["python", "server.py"]