Spaces:
Sleeping
Sleeping
| from fastapi import FastAPI, HTTPException | |
| import requests | |
| import io | |
| from PIL import Image | |
| from fastapi.responses import StreamingResponse | |
| import os | |
| from dotenv import load_dotenv | |
| load_dotenv() | |
| # Initialize the FastAPI app | |
| app = FastAPI() | |
| # Get the Hugging Face API key from environment variables | |
| API_URL = "https://api-inference.huggingface.co/models/black-forest-labs/FLUX.1-dev" | |
| HUGGINGFACE_TOKEN = os.getenv("huf_token") | |
| if not HUGGINGFACE_TOKEN: | |
| raise ValueError("Hugging Face API token not found. Please set it in the environment variables.") | |
| headers = {"Authorization": f"Bearer {HUGGINGFACE_TOKEN}"} | |
| # Function to query Hugging Face model API | |
| def query(payload): | |
| response = requests.post(API_URL, headers=headers, json=payload) | |
| if response.status_code != 200: | |
| raise HTTPException(status_code=response.status_code, detail="Failed to fetch image from Hugging Face API") | |
| return response.content | |
| # FastAPI route to return the image | |
| def generate_image(): | |
| try: | |
| # Send a request to the Hugging Face model | |
| image_bytes = query({"inputs": "cat with dog"}) | |
| # Open the image | |
| image = Image.open(io.BytesIO(image_bytes)) | |
| # Create a StreamingResponse to return the image as a response | |
| img_byte_arr = io.BytesIO() | |
| image.save(img_byte_arr, format='PNG') | |
| img_byte_arr.seek(0) | |
| return StreamingResponse(img_byte_arr, media_type="image/png") | |
| except Exception as e: | |
| raise HTTPException(status_code=500, detail=str(e)) | |
| # Define a root route to handle requests to "/" | |
| def read_root(logs: str = None): | |
| if logs == "container": | |
| return {"logs": "Container logs not available."} | |
| return {"message": "Welcome to the API! Use /generate-image to generate images."} | |