import os from flask import Flask, request, jsonify from huggingface_hub import InferenceClient import base64 import io app = Flask(__name__) client = InferenceClient(api_key=os.environ.get("HF_TOKEN")) @app.route('/', methods=['GET']) def home(): return "Server is running!", 200 @app.route('/generate-image', methods=['POST']) def generate_image(): prompt = request.json.get('prompt') if not prompt: return jsonify({"error": "Prompt is missing"}), 400 try: image = client.text_to_image( prompt, model="stabilityai/stable-diffusion-xl-base-1.0", ) buffered = io.BytesIO() image.save(buffered, format="PNG") img_str = base64.b64encode(buffered.getvalue()).decode('utf-8') return jsonify({"image": img_str}) except Exception as e: print(f"An error occurred: {e}") return jsonify({"error": str(e)}), 500 if __name__ == '__main__': app.run(debug=True)