NOT-OMEGA commited on
Commit
a563c79
·
verified ·
1 Parent(s): d78660f

Create Dockerfile

Browse files
Files changed (1) hide show
  1. Dockerfile +35 -0
Dockerfile ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Use a slim Python image
2
+ FROM python:3.10-slim
3
+
4
+ # Install g++ and OpenMP for the C++ code
5
+ RUN apt-get update && apt-get install -y \
6
+ g++ \
7
+ libomp-dev \
8
+ && rm -rf /var/lib/apt/lists/*
9
+
10
+ # Set the working directory
11
+ WORKDIR /app
12
+
13
+ # Copy all your files into the container
14
+ COPY . /app
15
+
16
+ # Compile the C++ inference engine.
17
+ # IMPORTANT: I removed `-mavx2` for compatibility on Hugging Face's free tier.
18
+ # It can cause issues on some cloud CPUs. We are generating a Linux binary called 'inference'.
19
+ RUN g++ -O3 -march=native -fopenmp -mfma -std=c++17 inference.cpp -o inference -lm
20
+
21
+ # Make the compiled binary executable
22
+ RUN chmod +x inference
23
+
24
+ # Install your Python requirements
25
+ RUN pip install --no-cache-dir -r requirements.txt
26
+
27
+ # Create a user to run the app (HF requirement for some spaces)
28
+ RUN useradd -m appuser && chown -R appuser /app
29
+ USER appuser
30
+
31
+ # Expose port 7860 (Hugging Face standard)
32
+ EXPOSE 7860
33
+
34
+ # Command to run your FastAPI app
35
+ CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "7860"]