testing_ / app.py
legends810's picture
Update app.py
0fd0c7b verified
from fastapi import FastAPI
from fastapi.middleware.cors import CORSMiddleware
import requests
import json
import os
import uvicorn
# Configuration
TOKEN = os.getenv("POLLINATIONS_TOKEN")
POLLINATIONS_URL = "https://text.pollinations.ai/openai"
app = FastAPI(title="Pollinations OpenAI API")
# CORS
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_methods=["*"],
allow_headers=["*"],
)
@app.get("/")
async def root():
return {
"message": "Pollinations OpenAI Compatible API",
"status": "running",
"endpoints": {
"models": "/models",
"chat": "/chat/completions"
}
}
@app.get("/models")
async def get_models():
"""Models endpoint - यहीं missing था!"""
return {
"object": "list",
"data": [
{"id": "openai", "object": "model", "owned_by": "pollinations"},
{"id": "llama", "object": "model", "owned_by": "pollinations"},
{"id": "mistral", "object": "model", "owned_by": "pollinations"},
{"id": "gemini", "object": "model", "owned_by": "pollinations"},
{"id": "deepseek", "object": "model", "owned_by": "pollinations"}
]
}
@app.post("/chat/completions")
async def chat_completions(data: dict):
"""Chat endpoint"""
model = data.get("model", "openai")
messages = data.get("messages", [])
temperature = data.get("temperature", 0.7)
payload = {
"model": model,
"messages": messages,
"temperature": temperature,
"referrer": "hf-api"
}
headers = {"Content-Type": "application/json"}
if TOKEN:
headers["Authorization"] = f"Bearer {TOKEN}"
try:
response = requests.post(POLLINATIONS_URL, json=payload, headers=headers, timeout=60)
result = response.json()
return {
"id": "chatcmpl-123",
"object": "chat.completion",
"model": model,
"choices": [{
"index": 0,
"message": {
"role": "assistant",
"content": result["choices"][0]["message"]["content"]
},
"finish_reason": "stop"
}]
}
except Exception as e:
return {"error": str(e)}
if __name__ == "__main__":
uvicorn.run(app, host="0.0.0.0", port=7860)