Zenkad commited on
Commit
62da079
·
verified ·
1 Parent(s): f772d3d

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +87 -12
app.py CHANGED
@@ -1,21 +1,96 @@
 
 
 
 
 
1
  from fastapi import FastAPI
 
2
  from pydantic import BaseModel
 
 
 
 
 
 
 
 
 
 
 
 
3
 
 
 
 
4
  app = FastAPI()
5
 
6
- class Chat(BaseModel):
 
 
 
 
 
 
 
 
 
 
7
  message: str
 
8
 
9
- @app.get("/")
10
- def root():
11
- return {"status": "ZenkaMind test API çalışıyor"}
 
 
 
 
 
 
 
 
12
 
13
- @app.get("/health")
14
- def health():
15
- return {"ok": True}
 
16
 
17
- @app.post("/api/chat")
18
- def chat(data: Chat):
19
- return {
20
- "response": f"Mesaj alındı: {data.message}"
21
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import time
3
+ from typing import List, Dict, Any
4
+
5
+ import gradio as gr
6
  from fastapi import FastAPI
7
+ from fastapi.middleware.cors import CORSMiddleware
8
  from pydantic import BaseModel
9
+ from huggingface_hub import InferenceClient
10
+
11
+ # =============================
12
+ # MODEL
13
+ # =============================
14
+ REPO_ID = "Qwen/Qwen2.5-7B-Instruct"
15
+ HF_TOKEN = os.getenv("HF_TOKEN")
16
+
17
+ client = InferenceClient(
18
+ model=REPO_ID,
19
+ token=HF_TOKEN
20
+ )
21
 
22
+ # =============================
23
+ # FASTAPI
24
+ # =============================
25
  app = FastAPI()
26
 
27
+ app.add_middleware(
28
+ CORSMiddleware,
29
+ allow_origins=["*"],
30
+ allow_methods=["*"],
31
+ allow_headers=["*"],
32
+ )
33
+
34
+ # =============================
35
+ # REQUEST MODEL
36
+ # =============================
37
+ class ChatRequest(BaseModel):
38
  message: str
39
+ history: List[Any] | None = []
40
 
41
+ # =============================
42
+ # API ENDPOINT
43
+ # =============================
44
+ @app.post("/api/chat")
45
+ async def chat(req: ChatRequest):
46
+ messages = [
47
+ {
48
+ "role": "system",
49
+ "content": "Sen ZenkaMind isimli bir yapay zekasın. Sadece Türkçe konuş."
50
+ }
51
+ ]
52
 
53
+ for h in req.history or []:
54
+ if isinstance(h, list) and len(h) == 2:
55
+ messages.append({"role": "user", "content": h[0]})
56
+ messages.append({"role": "assistant", "content": h[1]})
57
 
58
+ messages.append({"role": "user", "content": req.message})
59
+
60
+ try:
61
+ out = client.chat_completion(
62
+ messages=messages,
63
+ max_tokens=300,
64
+ temperature=0.7,
65
+ )
66
+
67
+ reply = out.choices[0].message.content
68
+ return {"response": reply}
69
+
70
+ except Exception as e:
71
+ return {"response": f"Model hatası: {str(e)}"}
72
+
73
+ # =============================
74
+ # GRADIO UI (HF İÇİN ŞART)
75
+ # =============================
76
+ def demo_chat(msg, history):
77
+ history = history or []
78
+ r = client.chat_completion(
79
+ messages=[
80
+ {"role": "system", "content": "Türkçe konuş."},
81
+ {"role": "user", "content": msg},
82
+ ],
83
+ max_tokens=200,
84
+ )
85
+ answer = r.choices[0].message.content
86
+ history.append((msg, answer))
87
+ return history, history
88
+
89
+ with gr.Blocks() as demo:
90
+ gr.Markdown("# 🤖 ZenkaMind – Test Sunucusu")
91
+ chatbot = gr.Chatbot()
92
+ inp = gr.Textbox(placeholder="Mesaj yaz...")
93
+ inp.submit(demo_chat, [inp, chatbot], [chatbot, chatbot])
94
+
95
+ # ⚠️ HF BURAYI ÇALIŞTIRIR
96
+ app = gr.mount_gradio_app(app, demo, path="/")