Spaces:
Runtime error
Runtime error
File size: 9,263 Bytes
426f5ad |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 |
import os
import gradio as gr
from openai import AsyncOpenAI
import pandas as pd
import uuid, asyncio
from sql_tab import run_sql
from logger import log_event
from chat_helpers import build_input_from_history, get_db_sys_prompt
oclient = AsyncOpenAI(api_key=os.getenv("OPENAI_API_KEY"))
max_rows = 100
async def respond_once(message, history):
text_input = build_input_from_history(message, history)
kwargs = dict(
model="gpt-4.1",
input=text_input,
temperature=0,
instructions=get_db_sys_prompt(),
tools=[{"type": "web_search"}],
tool_choice="auto",
parallel_tool_calls=True,
)
# MOCK mode to isolate app/DB without burning tokens
if os.getenv("MOCK_OPENAI", "").lower() in {"1", "true", "yes"}:
import random, asyncio
await asyncio.sleep(random.uniform(0.05, 0.25))
return "MOCK: Here’s a fabricated answer for load testing."
resp = await oclient.responses.create(**kwargs)
return getattr(resp, "output_text", "")
async def respond(message, history):
text_input = build_input_from_history(message, history)
kwargs = dict(
model="gpt-4.1",
input=text_input,
temperature=0,
instructions=get_db_sys_prompt(),
tools=[{"type": "web_search"}],
tool_choice="auto",
parallel_tool_calls=True,
)
buffer = []
async with oclient.responses.stream(**kwargs) as stream:
async for event in stream:
if event.type == "response.output_text.delta":
buffer.append(event.delta)
yield "".join(buffer)
final = await stream.get_final_response()
final_text = getattr(final, "output_text", None)
if final_text and (not buffer or final_text != "".join(buffer)):
yield final_text
async def chat_driver(user_message, messages_history, _user_name, _session_id):
messages_history = messages_history or []
base = messages_history + [{"role": "user", "content": user_message}]
assistant_text = ""
asyncio.create_task(log_event(_user_name, _session_id, "chat_user", {"text": user_message}))
async for chunk in respond(user_message, messages_history):
assistant_text = chunk
# stream to UI
yield base + [{"role": "assistant", "content": assistant_text}], ""
# after stream finished, log the final assistant text
asyncio.create_task(log_event(_user_name, _session_id, "chat_assistant", {"text": assistant_text}))
async def post_completion_code(_user_name, _session_id):
code = "9C1F4B2E"
msg = f"the completion code is {code}"
updated = [{"role": "assistant", "content": msg}]
await log_event(_user_name, _session_id, "completion_code", {"code": code})
return updated
with gr.Blocks(title="Movie Database", theme="soft") as demo:
# gr.Markdown("## Movie Database Bot and SQL Console")
user_name = gr.State("")
session_id = gr.State("")
with gr.Column(visible=True) as identify_view:
gr.Markdown("### Login")
name_tb = gr.Textbox(label="Student ID (required)", placeholder="Please enter your student ID", autofocus=True)
enter_btn = gr.Button("Enter", variant="primary")
id_msg = gr.Markdown("")
async def do_login(name):
name = (name or "").strip()
if not name:
return (gr.update(visible=True), gr.update(visible=False), "⚠️ Please enter your student ID to continue.", "", "")
sid = uuid.uuid4().hex
await log_event(name, sid, "login", {"meta": {"agent": "gradio_app", "version": 1}})
return (gr.update(visible=False), gr.update(visible=True), "", name, sid)
with gr.Column(visible=False) as app_view:
welcome_md = gr.Markdown("")
with gr.Tabs():
with gr.Tab("Assignment"):
gr.Markdown("""
<h2> Platform Usage and the Assignment </h2>
<br>
<ul>
<li> You can use the "SQL" tab to run your queries and see if you have the correct results.</li>
<li> The "Chatbot" tab provides you a chatbot (that is connected to ChatGPT) to ask questions about PostgreSQL and the database.</li>
<li> The chatbot knows the tables and their columns, and would help with questions.</li>
<li> Even with its knowledge, the chatbot can still make mistakes.</li>
<li> When you are finished with all questions, the survey platform will ask for a completion code. You can find it in the "Chatbot" tab. </li>
<li> <b> Reminder: </b> This assignment is optional and ungraded. It is designed for you to practice. You can be relaxed, it is okay to have errors. Good luck! </li>
</ul>
<h3> Database </h3>
The database has 4 tables, each corresponding to the 4 excel files you have for the project:
<ul>
<li>sales</li>
<li>metadata</li>
<li>user_reviews</li>
<li>expert_reviews</li>
</ul>
<br>
<b> Important Notes: </b>
<br>
<br>
<ul>
<li> A proper ERD or foreign key relationships are not defined for the tables. You can still join them based on the column names, but be careful. </li>
<li> Some movies have the same title but they are different movies. </li>
<li> A column that stores numerical information might have the datatype "text". </li>
<li> Datatypes might not be exactly the same as the excel files. </li>
<li> Some columns might store null values as text, like "n/a" or "null". </li>
<li> Columns with the same names might store different values in different tables. Example: "url" column in metadata and sales.</li>
</ul>
""")
with gr.Tab("Chatbot"):
chatbot = gr.Chatbot(type="messages", label="Conversation", height=450)
with gr.Row():
chat_input = gr.Textbox(
placeholder="How can I help you with PostgreSQL today?",
scale=8,
autofocus=True,
container=False,
)
send_btn = gr.Button("Send", variant="primary", scale=1)
code_btn = gr.Button("Completion code", variant="secondary", scale=1)
def _clear_input():
return ""
ev = send_btn.click(chat_driver, [chat_input, chatbot, user_name, session_id], [chatbot, chat_input])
ev.then(_clear_input, None, [chat_input])
ev2 = chat_input.submit(chat_driver, [chat_input, chatbot, user_name, session_id], [chatbot, chat_input])
ev2.then(_clear_input, None, [chat_input])
code_btn.click(
post_completion_code,
inputs=[user_name, session_id],
outputs=[chatbot],
)
with gr.Tab("SQL"):
with gr.Column():
sql_input = gr.Code(
label="SQL",
language="sql",
value="SELECT * FROM sales;",
lines=10,
)
with gr.Row():
run_btn = gr.Button("Run", variant="primary")
clear_btn = gr.Button("Clear")
results = gr.Dataframe(
label="Results",
wrap=True,
interactive=True,
)
meta = gr.Markdown("")
plan = gr.Markdown("", label="Explain/Plan")
async def on_run(q, _user_name, _session_id):
df, meta_msg, _ = await asyncio.to_thread(run_sql, q, max_rows, False)
await log_event(
_user_name, _session_id, "sql",
{
"query": q,
"row_limit": max_rows,
"row_count": int(getattr(df, "shape", [0])[0]),
"meta": meta_msg,
},
)
return df, meta_msg, ""
def on_clear():
return "", pd.DataFrame(), "Cleared.", ""
run_btn.click(on_run, [sql_input, user_name, session_id], [results, meta, plan])
clear_btn.click(on_clear, inputs=None, outputs=[sql_input, results, meta, plan])
outputs = [identify_view, app_view, id_msg, user_name, session_id]
enter_btn.click(do_login, inputs=[name_tb], outputs=outputs)
name_tb.submit(do_login, inputs=[name_tb], outputs=outputs)
def greet(name):
return f"**Hello, {name}!**"
user_name.change(greet, inputs=[user_name], outputs=[welcome_md])
if __name__ == "__main__":
demo.launch(share=True)
|