File size: 3,693 Bytes
3256847 2cf546e 3256847 2cf546e 3256847 2cf546e 3256847 2cf546e 3256847 2cf546e 3256847 2cf546e 3256847 2cf546e 3256847 2cf546e 3256847 2cf546e 3256847 2cf546e 3256847 2cf546e 3256847 2cf546e 3256847 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 |
from __future__ import annotations
import gradio as gr
from agent import ExamAgent
try:
from gradio.exceptions import Error as GradioComponentError
except Exception:
GradioComponentError = Exception
def new_agent() -> ExamAgent:
return ExamAgent()
CHAT_MODE = "tuples"
def detect_chat_mode(chatbot: gr.Chatbot) -> str:
"""
Detect what format current gr.Chatbot expects:
- messages: list[{"role": "...", "content": "..."}]
- tuples: list[(user, bot)]
"""
check = getattr(chatbot, "_check_format", None)
if callable(check):
try:
check([{"role": "assistant", "content": "hi"}])
return "messages"
except Exception:
pass
try:
check([(None, "hi")])
return "tuples"
except Exception:
pass
t = getattr(chatbot, "type", None)
if t in ("messages", "tuples"):
return t
return "tuples"
def init_chat(first_assistant_text: str):
if CHAT_MODE == "messages":
return [{"role": "assistant", "content": first_assistant_text}]
return [(None, first_assistant_text)]
def on_load():
agent = new_agent()
first = agent.initial_message()
chat = init_chat(first)
return agent, chat
def on_reset():
return on_load()
def on_user_message(
agent: ExamAgent,
chat,
user_text: str,
api_key: str,
model: str,
base_url: str,
):
if agent is None:
agent = new_agent()
if chat is None:
chat = []
user_text = (user_text or "").strip()
if not user_text:
return agent, chat, ""
if CHAT_MODE == "messages":
chat.append({"role": "user", "content": user_text})
try:
reply = agent.step(user_text, api_key=api_key, model=model, base_url=base_url)
except Exception as e:
reply = f"Сталася помилка: {e}"
if CHAT_MODE == "messages":
chat.append({"role": "assistant", "content": reply})
else:
chat.append((user_text, reply))
return agent, chat, ""
with gr.Blocks(title="AI Examiner Agent") as demo:
gr.Markdown(
"# AI Examiner Agent\n"
"Сервіс проводить міні-іспит: питає ім’я та email, обирає 2–3 теми, "
"ставить питання, оцінює відповіді та зберігає результат у файли."
)
with gr.Row():
api_key = gr.Textbox(label="LLM API Key", type="password", placeholder="Встав ключ тут")
model = gr.Textbox(label="Model", value="gpt-4o-mini")
base_url = gr.Textbox(label="Base URL", value="https://api.openai.com")
chatbot = gr.Chatbot(label="Exam Chat", height=420)
CHAT_MODE = detect_chat_mode(chatbot)
print(f"[AI Examiner Agent] Chatbot mode detected: {CHAT_MODE}")
with gr.Row():
user_in = gr.Textbox(label="Твоє повідомлення", placeholder="Напиши відповідь…", scale=4)
send = gr.Button("Send", scale=1)
reset = gr.Button("Reset", scale=1)
agent_state = gr.State()
demo.load(on_load, outputs=[agent_state, chatbot])
reset.click(on_reset, outputs=[agent_state, chatbot])
send.click(
on_user_message,
inputs=[agent_state, chatbot, user_in, api_key, model, base_url],
outputs=[agent_state, chatbot, user_in],
api_name=False,
)
user_in.submit(
on_user_message,
inputs=[agent_state, chatbot, user_in, api_key, model, base_url],
outputs=[agent_state, chatbot, user_in],
api_name=False,
)
if __name__ == "__main__":
demo.launch() |