guydffdsdsfd commited on
Commit
631c6d3
·
verified ·
1 Parent(s): ea898e8

Update Dockerfile

Browse files
Files changed (1) hide show
  1. Dockerfile +32 -51
Dockerfile CHANGED
@@ -1,58 +1,39 @@
1
- # 1. Use a more stable Python image
2
- FROM python:3.10-slim
3
 
4
- # 2. System dependencies
5
- RUN apt-get update && apt-get install -y \
6
- libgl1-mesa-glx libglib2.0-0 procps curl && \
7
- rm -rf /var/lib/apt/lists/*
 
 
 
8
 
9
- # 3. Python dependencies
10
- RUN pip3 install --no-cache-dir \
11
- flask flask-cors requests diffusers transformers accelerate torch \
12
- --extra-index-url https://download.pytorch.org/whl/cpu
 
 
13
 
14
- # 4. Set up non-root user (Required for HF)
15
  RUN useradd -m -u 1000 user
16
  USER user
17
- ENV HOME=/home/user
18
- ENV PATH=/home/user/.local/bin:$PATH
19
- WORKDIR $HOME
20
-
21
- # 5. Create the app file directly via a more robust method
22
- # Using printf instead of cat <<EOF to avoid shell/indentation errors
23
- RUN printf 'from flask import Flask, request, jsonify, send_file\n\
24
- import torch, os, datetime, io\n\
25
- from diffusers import StableDiffusionPipeline\n\
26
- from flask_cors import CORS\n\
27
- \n\
28
- app = Flask(__name__)\n\
29
- CORS(app)\n\
30
- \n\
31
- print("Loading SD Model on CPU...")\n\
32
- pipe = StableDiffusionPipeline.from_pretrained("runwayml/stable-diffusion-v1-5", torch_dtype=torch.float32)\n\
33
- pipe.to("cpu")\n\
34
- \n\
35
- @app.route("/api/generate", methods=["POST"])\n\
36
- def proxy():\n\
37
- user_key = request.headers.get("x-api-key", "")\n\
38
- # Simple check for your key\n\
39
- if user_key != "sk-sd-user-1" and user_key != "sk-ess4l0ri37":\n\
40
- return jsonify({"error": "Unauthorized"}), 401\n\
41
- \n\
42
- data = request.json\n\
43
- prompt = data.get("prompt", "a sunset")\n\
44
- image = pipe(prompt, num_inference_steps=15).images[0]\n\
45
- \n\
46
- img_io = io.BytesIO()\n\
47
- image.save(img_io, "PNG")\n\
48
- img_io.seek(0)\n\
49
- return send_file(img_io, mimetype="image/png")\n\
50
- \n\
51
- if __name__ == "__main__":\n\
52
- app.run(host="0.0.0.0", port=7860)\n' > $HOME/app.py
53
-
54
- # 6. Expose port 7860
55
  EXPOSE 7860
56
 
57
- # 7. Start the app
58
- CMD ["python3", "app.py"]
 
1
+ # Use an official PyTorch image with CUDA support
2
+ FROM pytorch/pytorch:2.1.0-cuda12.1-cudnn8-runtime
3
 
4
+ # Set environment variables
5
+ ENV PYTHONUNBUFFERED=1 \
6
+ GRADIO_ALLOW_FLAGGING=never \
7
+ GRADIO_NUM_PORTS=1 \
8
+ GRADIO_SERVER_NAME="0.0.0.0" \
9
+ GRADIO_THEME=huggingface \
10
+ HOME=/home/user
11
 
12
+ # Install system dependencies
13
+ RUN apt-get update && apt-get install -y \
14
+ git \
15
+ libgl1-mesa-glx \
16
+ libglib2.0-0 \
17
+ && rm -rf /var/lib/apt/lists/*
18
 
19
+ # Create a non-root user
20
  RUN useradd -m -u 1000 user
21
  USER user
22
+ WORKDIR $HOME/app
23
+
24
+ # Copy requirements and install
25
+ # (Assuming you have a requirements.txt with diffusers, transformers, accelerate, gradio)
26
+ COPY --chown=user requirements.txt .
27
+ RUN pip install --no-cache-dir -r requirements.txt
28
+
29
+ # Copy the rest of the application
30
+ COPY --chown=user . .
31
+
32
+ # Make the startup script executable
33
+ RUN chmod +x start.sh
34
+
35
+ # Expose the port Gradio runs on
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
36
  EXPOSE 7860
37
 
38
+ # Run the startup script
39
+ CMD ["./start.sh"]