File size: 2,281 Bytes
9c9e799 2fc7270 97fff9b 68ddb56 97fff9b 68ddb56 27e74be 97fff9b c785864 2fc7270 97fff9b 02d8b38 c785864 2fc7270 02d8b38 97fff9b 2fc7270 c785864 97fff9b c785864 62ad9cd 97fff9b 49c8ad8 2fc7270 62ad9cd 2fc7270 49c8ad8 97fff9b 1203a0f 97fff9b 1203a0f 97fff9b c785864 27e74be c785864 27e74be c785864 2773685 2fc7270 c785864 2fc7270 2773685 2fc7270 c785864 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 |
import gradio as gr
from fastapi import FastAPI, Request
from fastapi.responses import JSONResponse
from fastapi.middleware.cors import CORSMiddleware
import os
from huggingface_hub import InferenceClient
app = FastAPI()
app.add_middleware(
CORSMiddleware,
allow_origins=["*"],
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# HF Client - Token environment variable'dan alınacak
client = InferenceClient(token=os.getenv("HF_TOKEN"))
@app.post("/api/chat")
async def chat_api(request: Request):
try:
data = await request.json()
user_msg = data.get("message", "")
history = data.get("history", [])
SYSTEM_PROMPT = """Sen ZenkaMind 1.1'sin. Tek Yaratıcın: Berkay Adıgül.
TÜRKÇE konuş. Mahallenin ağır abisi karakterindesin."""
messages = [{"role": "system", "content": SYSTEM_PROMPT}]
for msg in history:
if len(msg) >= 2:
messages.append({"role": "user", "content": str(msg[0])})
messages.append({"role": "assistant", "content": str(msg[1])})
messages.append({"role": "user", "content": user_msg})
full_response = ""
stream = client.chat_completion(
model="Qwen/Qwen2.5-7B-Instruct",
messages=messages,
max_tokens=500,
stream=True,
temperature=0.7
)
for chunk in stream:
if chunk.choices and chunk.choices[0].delta.content:
full_response += chunk.choices[0].delta.content
return JSONResponse({"response": full_response})
except Exception as e:
return JSONResponse({"response": f"⚠️ Sistem meşgul: {str(e)}"})
# Basit Gradio arayüzü
def simple_chat(message, history):
return "HTML arayüzü için /api/chat endpoint'ini kullanın"
demo = gr.ChatInterface(
simple_chat,
title="ZenkaMind",
description="Ana uygulama için HTML frontend kullanın"
)
app = gr.mount_gradio_app(app, demo, path="/")
@app.get("/")
async def home():
return {"status": "ZenkaMind API çalışıyor", "endpoint": "/api/chat"}
if __name__ == "__main__":
import uvicorn
uvicorn.run(app, host="0.0.0.0", port=7860) |