File size: 509 Bytes
4852b30
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
# Use a small Python image
FROM python:3.11-slim

# Set working folder
WORKDIR /app

# Copy only deps first for better caching
COPY requirements.txt .

# Install FastAPI, uvicorn, meta-ai-api, openai
RUN pip install --no-cache-dir -r requirements.txt

# Copy rest of code
COPY . .

# Let FastAPI know which model to use
# You can override this when you run the container
ENV MODEL_NAME=llama-3-70b

# Expose port
EXPOSE 7860

# Start the app
CMD ["uvicorn", "main:app", "--host", "0.0.0.0", "--port", "7860"]