benkada commited on
Commit
182e396
·
verified ·
1 Parent(s): 8ff2dfe

Upload 3 files

Browse files
Files changed (3) hide show
  1. Dockerfile +28 -0
  2. app.py +14 -0
  3. requirements.txt +5 -0
Dockerfile ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Use Python 3.12 for better compatibility with latest packages
2
+ FROM python:3.12-slim
3
+
4
+ # Set the working directory
5
+ WORKDIR /app
6
+
7
+ # Install system dependencies
8
+ RUN apt-get update && apt-get install -y \
9
+ build-essential \
10
+ && rm -rf /var/lib/apt/lists/*
11
+
12
+ # Copy requirements first to leverage Docker cache
13
+ COPY requirements.txt .
14
+
15
+ # Install Python dependencies
16
+ RUN pip install --no-cache-dir -r requirements.txt
17
+
18
+ # Copy the rest of the application
19
+ COPY . .
20
+
21
+ # Create uploads directory
22
+ RUN mkdir -p uploads
23
+
24
+ # Expose the port the app runs on
25
+ EXPOSE 8000
26
+
27
+ # Command to run the application
28
+ CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "8000"]
app.py ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI
2
+ from transformers import pipeline
3
+ app = FastAPI() # <- Make sure this line exists
4
+ # Use a pipeline as a high-level helper
5
+ from transformers import pipeline
6
+
7
+ messages = [
8
+ {"role": "user", "content": "Who are you?"},
9
+ ]
10
+ pipe = pipeline("image-text-to-text", model="Qwen/Qwen2.5-VL-7B-Instruct")
11
+ pipe(messages)
12
+ @app.get("/")
13
+ def home():
14
+ return {"message": "FastAPI is running!"}
requirements.txt ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ fastapi==0.110.0
2
+ transformers==4.50.0
3
+ torch==2.6.0
4
+ uvicorn==0.27.1
5
+ python-multipart==0.0.9