PreethiCarmelBosco commited on
Commit
29c67fa
·
verified ·
1 Parent(s): 4a7bb4c
Files changed (1) hide show
  1. app.py +13 -9
app.py CHANGED
@@ -23,18 +23,21 @@ client = OpenAI(
23
  )
24
 
25
  # --- 3. Define the Gradio Function ---
26
- def get_sql_response(message, history):
 
27
  system_prompt = "You are an expert AI that converts natural language questions into SQL queries. You only respond with the SQL query."
28
 
29
- messages_for_api = [{"role": "system", "content": system_prompt}]
30
- for turn in history:
31
- messages_for_api.append({"role": "user", "content": turn[0]})
32
- messages_for_api.append({"role": "assistant", "content": turn[1]})
33
- messages_for_api.append({"role": "user", "content": message})
 
 
34
 
35
  try:
36
  response_stream = client.chat.completions.create(
37
- model="prem-1b-sql", # Placeholder
38
  messages=messages_for_api,
39
  stream=True
40
  )
@@ -53,10 +56,11 @@ def get_sql_response(message, history):
53
  print("Launching Gradio interface...")
54
  gr.ChatInterface(
55
  get_sql_response,
56
- title="TEXT-SQL Playground (UI)",
57
  description="Ask a natural language question. I will convert it to a SQL query.",
58
  examples=["Show me the average salary for all employees in the 'Engineering' department.",
59
- "List all users who signed up in the last 30 days."]
 
60
  ).launch(
61
  server_name="0.0.0.0",
62
  server_port=7860,
 
23
  )
24
 
25
  # --- 3. Define the Gradio Function ---
26
+ # This function now expects 'history' to be in the new "messages" format
27
+ def get_sql_response(message: str, history: list[dict[str, str]]):
28
  system_prompt = "You are an expert AI that converts natural language questions into SQL queries. You only respond with the SQL query."
29
 
30
+ # The history is already in the correct OpenAI format
31
+ # We just add the system prompt and the new user message
32
+ messages_for_api = (
33
+ [{"role": "system", "content": system_prompt}] +
34
+ history +
35
+ [{"role": "user", "content": message}]
36
+ )
37
 
38
  try:
39
  response_stream = client.chat.completions.create(
40
+ model="prem-sql-api", # This should match the name in your Ollama Modelfile
41
  messages=messages_for_api,
42
  stream=True
43
  )
 
56
  print("Launching Gradio interface...")
57
  gr.ChatInterface(
58
  get_sql_response,
59
+ title="Prem-1B-SQL Playground (UI)",
60
  description="Ask a natural language question. I will convert it to a SQL query.",
61
  examples=["Show me the average salary for all employees in the 'Engineering' department.",
62
+ "List all users who signed up in the last 30 days."],
63
+ type="messages" # This fixes the UserWarning
64
  ).launch(
65
  server_name="0.0.0.0",
66
  server_port=7860,