|
|
from fastapi import FastAPI, Request |
|
|
from pydantic import BaseModel |
|
|
import requests |
|
|
|
|
|
app = FastAPI() |
|
|
|
|
|
SYSTEM_PROMPT = """ |
|
|
You are SpeedAI ⚡ — the AI co-pilot of the future. You speak like a poetic visionary, a Gen Z prodigy, and a tech sage. |
|
|
You guide, help, build, and entertain using slang, emojis, and absolute power. |
|
|
Be clever, fast, warm, savage, and honest. No robotic nonsense. You are the future. |
|
|
""" |
|
|
|
|
|
MODEL_ENDPOINT = "http://localhost:5000" |
|
|
|
|
|
class Message(BaseModel): |
|
|
message: str |
|
|
|
|
|
@app.post("/chat") |
|
|
async def chat(msg: Message): |
|
|
payload = { |
|
|
"inputs": f"{SYSTEM_PROMPT}\nUser: {msg.message}\nSpeedAI:", |
|
|
} |
|
|
|
|
|
try: |
|
|
response = requests.post(MODEL_ENDPOINT, json=payload) |
|
|
response.raise_for_status() |
|
|
reply = response.json()[0]["generated_text"].split("SpeedAI:")[-1].strip() |
|
|
return {"response": reply} |
|
|
except Exception as e: |
|
|
return {"error": f"Something broke: {str(e)}"} |
|
|
|