Varun6299 commited on
Commit
ab703ea
·
verified ·
1 Parent(s): 55b8a27

Upload folder using huggingface_hub

Browse files
Files changed (1) hide show
  1. Dockerfile +9 -57
Dockerfile CHANGED
@@ -1,64 +1,16 @@
1
- # Use a base image with Python and ideally CUDA support for GPU acceleration if available
2
- # Adjust the base image based on your needs and the environment of your deployment target (Hugging Face Spaces offers GPU options)
3
- # For CPU-only:
4
- # FROM python:3.9-slim
5
 
6
- # For CUDA support (example, check specific requirements for your CUDA version and OS):
7
- # FROM nvidia/cuda:11.6.2-cudnn8-runtime-ubuntu20.04
8
- # ENV PYTHON_VERSION=3.9
9
- # RUN apt-get update && apt-get install -y python${PYTHON_VERSION} python3-pip && rm -rf /var/lib/apt/lists/*
10
- # RUN ln -s /usr/bin/python3 /usr/bin/python
11
 
12
- # Let's use a general Python image for demonstration. If you plan to use GPU,
13
- # you'll need a base image with CUDA drivers and potentially install additional libs.
14
- FROM python:3.9
15
-
16
- # Set the working directory in the container
17
  WORKDIR /app
18
 
19
- # Copy the backend directory, including the requirements.txt and app.py
20
- COPY backend_files/ /app/backend_files/
21
-
22
- # Copy the serialized vector database directory
23
- # Ensure 'merck_manuals' matches the directory name you used to persist the Chroma DB
24
- # The path is relative to the directory containing the Dockerfile, which is now backend_files
25
- COPY ../merck_manuals/ /app/merck_manuals/
26
-
27
-
28
- # Install Python dependencies
29
- # Use --no-cache-dir to prevent caching, good for smaller image size
30
- # Use --upgrade pip to ensure you have the latest pip
31
- RUN pip install --upgrade pip
32
- RUN pip install --no-cache-dir -r /app/backend_files/requirements.txt
33
-
34
- # Copy the frontend directory (we will add this later)
35
- # If your frontend is a separate Gradio app calling the backend,
36
- # COPY front_end_files/ /app/front_end_files/
37
- # If your frontend is integrated into the backend (e.g., a single Gradio app), adjust paths
38
-
39
- # Expose the port the Flask application runs on
40
- # Hugging Face Spaces automatically exposes port 7860 for Gradio or 5000 for Flask/Streamlit
41
- # If using Gradio as the main app, it typically runs on 7860
42
- # If running Flask as the main app, it typically runs on 5000
43
- # EXPOSE 5000 # For Flask backend
44
 
45
- # Command to run the application
46
- # This depends on how you run your application.
47
- # If using Gunicorn to serve the Flask app:
48
- # CMD ["gunicorn", "--bind", "0.0.0.0:5000", "backend_files.app:app"]
49
 
50
- # If using Gradio as the main application entry point (recommended for HF Spaces with UI):
51
- # You would typically have a single app.py that launches Gradio,
52
- # and the Gradio app makes calls to the Flask backend (potentially running on the same Space internally or separately).
53
- # For a setup where Gradio runs and calls the Flask backend within the same container:
54
- # You might need a script to start both or configure Gunicorn/another server to handle both.
55
- # A simpler approach for HF Spaces is often to have one main app (e.g., Gradio) that handles everything or calls an external backend.
56
- # Assuming a setup where Gradio is the main entry point and lives in front_end_files:
57
- # CMD ["python", "-u", "/app/front_end_files/app.py"]
58
- # Or if the Gradio app is directly in the root of the container:
59
- # CMD ["python", "-u", "app.py"]
60
 
61
- # Let's assume for now your main entry point will be in backend_files/app.py
62
- # and you will run it with Gunicorn as is common for Flask production deployment.
63
- # If you integrate Gradio directly into app.py, adjust this command.
64
- CMD ["gunicorn", "--bind", "0.0.0.0:5000", "backend_files.app:app"]
 
 
 
 
 
1
 
2
+ # Use a lightweight Python image as the base
3
+ FROM python:3.9-slim
 
 
 
4
 
5
+ # Set the working directory
 
 
 
 
6
  WORKDIR /app
7
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8
 
9
+ # Copy all files from the current directory on the host to the container's /app directory
10
+ COPY . .
 
 
11
 
12
+ # Install Python dependencies listed in requirements.txt
13
+ RUN pip install --no-cache-dir -r requirements.txt
 
 
 
 
 
 
 
 
14
 
15
+ # Command to run the application using Gunicorn
16
+ CMD ["gunicorn", "-w", "4", "-b", "0.0.0.0:7860", "app:app"]