Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -31,15 +31,13 @@ client = InferenceClient(token=HF_TOKEN)
|
|
| 31 |
model = HfApiModel(model="Qwen/Qwen2.5-Coder-32B-Instruct")
|
| 32 |
agent = CodeAgent(tools=[], model=model)
|
| 33 |
|
| 34 |
-
# Kullanıcının mesajını ekle (sadece user)
|
| 35 |
def add_user_message(prompt, history):
|
| 36 |
history.append({"role": "user", "content": prompt})
|
| 37 |
return history, history
|
| 38 |
|
| 39 |
-
# Bot cevabını getirip ayrı olarak ekle (boş balon yok)
|
| 40 |
def get_response(history):
|
| 41 |
prompt = history[-1]["content"]
|
| 42 |
-
response = agent.run(prompt)
|
| 43 |
history.append({"role": "assistant", "content": response})
|
| 44 |
return history
|
| 45 |
|
|
@@ -54,12 +52,10 @@ with gr.Blocks() as demo:
|
|
| 54 |
with gr.Row():
|
| 55 |
txt = gr.Textbox(placeholder="Ask something...", show_label=False)
|
| 56 |
|
| 57 |
-
# Kullanıcı mesajı → cevap
|
| 58 |
txt.submit(add_user_message, [txt, state], [chatbot, state], queue=False).then(
|
| 59 |
get_response, [state], [chatbot]
|
| 60 |
)
|
| 61 |
|
| 62 |
-
# Input temizleme
|
| 63 |
txt.submit(lambda: "", None, txt, queue=False)
|
| 64 |
|
| 65 |
if __name__ == "__main__":
|
|
|
|
| 31 |
model = HfApiModel(model="Qwen/Qwen2.5-Coder-32B-Instruct")
|
| 32 |
agent = CodeAgent(tools=[], model=model)
|
| 33 |
|
|
|
|
| 34 |
def add_user_message(prompt, history):
|
| 35 |
history.append({"role": "user", "content": prompt})
|
| 36 |
return history, history
|
| 37 |
|
|
|
|
| 38 |
def get_response(history):
|
| 39 |
prompt = history[-1]["content"]
|
| 40 |
+
response = str(agent.run(prompt))
|
| 41 |
history.append({"role": "assistant", "content": response})
|
| 42 |
return history
|
| 43 |
|
|
|
|
| 52 |
with gr.Row():
|
| 53 |
txt = gr.Textbox(placeholder="Ask something...", show_label=False)
|
| 54 |
|
|
|
|
| 55 |
txt.submit(add_user_message, [txt, state], [chatbot, state], queue=False).then(
|
| 56 |
get_response, [state], [chatbot]
|
| 57 |
)
|
| 58 |
|
|
|
|
| 59 |
txt.submit(lambda: "", None, txt, queue=False)
|
| 60 |
|
| 61 |
if __name__ == "__main__":
|