Nehal721 commited on
Commit
ff5def3
·
verified ·
1 Parent(s): b456bb8

Upload 3 files

Browse files
Files changed (3) hide show
  1. Dockerfile +20 -0
  2. app.py +37 -0
  3. requirements.txt +4 -0
Dockerfile ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Base image
2
+ FROM python:3.9-slim
3
+
4
+ # Working directory set karna
5
+ WORKDIR /code
6
+
7
+ # Requirements file copy karna
8
+ COPY ./requirements.txt /code/requirements.txt
9
+
10
+ # Dependencies install karna
11
+ RUN pip install --no-cache-dir --upgrade -r /code/requirements.txt
12
+
13
+ # App code copy karna
14
+ COPY ./app.py /code/app.py
15
+
16
+ # Port expose karna (Hugging Face ke liye 7860)
17
+ EXPOSE 7860
18
+
19
+ # Gunicorn se app ko run karna
20
+ CMD ["gunicorn", "--bind", "0.0.0.0:7860", "app:app"]
app.py ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from flask import Flask, request, jsonify
3
+ from huggingface_hub import InferenceClient
4
+ import base64
5
+ import io
6
+
7
+ app = Flask(__name__)
8
+ client = InferenceClient(api_key=os.environ.get("HF_TOKEN"))
9
+
10
+ @app.route('/', methods=['GET'])
11
+ def home():
12
+ return "Server is running!", 200
13
+
14
+ @app.route('/generate-image', methods=['POST'])
15
+ def generate_image():
16
+ prompt = request.json.get('prompt')
17
+ if not prompt:
18
+ return jsonify({"error": "Prompt is missing"}), 400
19
+
20
+ try:
21
+ image = client.text_to_image(
22
+ prompt,
23
+ model="stabilityai/stable-diffusion-xl-base-1.0",
24
+ )
25
+
26
+ buffered = io.BytesIO()
27
+ image.save(buffered, format="PNG")
28
+ img_str = base64.b64encode(buffered.getvalue()).decode('utf-8')
29
+
30
+ return jsonify({"image": img_str})
31
+
32
+ except Exception as e:
33
+ print(f"An error occurred: {e}")
34
+ return jsonify({"error": str(e)}), 500
35
+
36
+ if __name__ == '__main__':
37
+ app.run(debug=True)
requirements.txt ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ flask
2
+ huggingface_hub
3
+ pillow
4
+ gunicorn