import os from fastapi import FastAPI, HTTPException from pydantic import BaseModel import openai import torch from transformers import pipeline from diffusers import StableDiffusionPipeline from PIL import Image import io import base64 import soundfile as sf import numpy as np # Initialize APIs openai.api_key = os.getenv("OPENAI_API_KEY") app = FastAPI(title="Maxenoz AI Server") # Request models class TextRequest(BaseModel): input_text: str class ImageRequest(BaseModel): prompt: str class AudioRequest(BaseModel): seed: int = 42 # --- Health check --- @app.get("/") def health(): return {"status": "OK", "message": "Server is running"} # --- GPT Text Endpoint --- @app.post("/ai/text") def ai_text(request: TextRequest): try: response = openai.ChatCompletion.create( model="gpt-4-turbo", messages=[{"role": "user", "content": request.input_text}] ) return {"output_text": response.choices[0].message.content} except Exception as e: raise HTTPException(status_code=500, detail=str(e)) # --- Image Generation Endpoint --- @app.post("/ai/image") def ai_image(request: ImageRequest): try: # Minimal example using StableDiffusion pipe = StableDiffusionPipeline.from_pretrained("runwayml/stable-diffusion-v1-5") pipe = pipe.to("cpu") # Change to "cuda" if GPU available image = pipe(request.prompt).images[0] # Encode image to base64 buf = io.BytesIO() image.save(buf, format="PNG") img_bytes = buf.getvalue() img_b64 = base64.b64encode(img_bytes).decode("utf-8") return {"image_base64": img_b64} except Exception as e: raise HTTPException(status_code=500, detail=str(e)) # --- Audio Generation Endpoint (placeholder) --- @app.post("/ai/audio") def ai_audio(request: AudioRequest): try: # Example: generate 1-second silent audio audio = np.zeros(16000) # 16kHz, 1 sec buf = io.BytesIO() sf.write(buf, audio, 16000, format="WAV") audio_b64 = base64.b64encode(buf.getvalue()).decode("utf-8") return {"audio_base64": audio_b64} except Exception as e: raise HTTPException(status_code=500, detail=str(e)) # --- Run server --- if __name__ == "__main__": import uvicorn uvicorn.run(app, host="0.0.0.0", port=7860)