Ganesh Chintalapati commited on
Commit
f45d8b2
·
1 Parent(s): 231773b

Multimodel chat with user login and historical dropdown

Browse files
Files changed (4) hide show
  1. api.py +60 -12
  2. app.py +234 -79
  3. chat_history.db +0 -0
  4. core.py +98 -28
api.py CHANGED
@@ -67,35 +67,75 @@ async def ask_anthropic(query: str, history: List[Dict[str, str]]) -> AsyncGener
67
  yield "Error: Anthropic API key not provided."
68
  return
69
 
70
- messages = []
 
 
 
 
 
71
  for msg in history:
72
- if msg.get("role") == "user":
73
- messages.append({"role": "user", "content": msg["content"]})
74
- elif msg.get("role") == "assistant":
75
- messages.append({"role": "assistant", "content": msg["content"]})
76
- messages.append({"role": "user", "content": query})
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
77
 
78
  headers = {
79
  "x-api-key": anthropic_api_key,
80
- "anthropic-version": "2023-06-01",
81
  "Content-Type": "application/json"
82
  }
83
 
84
  payload = {
85
- "model": "claude-3-5-sonnet-20241022",
86
- "max_tokens": 1024,
87
- "messages": messages,
88
  "stream": True
89
  }
90
 
91
  try:
92
  async with httpx.AsyncClient() as client:
93
  async with client.stream("POST", "https://api.anthropic.com/v1/messages", headers=headers, json=payload) as response:
94
- response.raise_for_status()
95
  buffer = ""
96
  async for chunk in response.aiter_text():
97
  if chunk:
98
  buffer += chunk
 
 
99
  while "\n" in buffer:
100
  line, buffer = buffer.split("\n", 1)
101
  if line.startswith("data: "):
@@ -106,11 +146,19 @@ async def ask_anthropic(query: str, history: List[Dict[str, str]]) -> AsyncGener
106
  continue
107
  try:
108
  json_data = json.loads(data)
 
109
  if json_data.get("type") == "content_block_delta" and "delta" in json_data:
110
  yield json_data["delta"].get("text", "")
 
 
 
 
111
  except Exception as e:
112
  logger.error(f"Anthropic parse error: {e}")
113
- yield f"[Anthropic Error]: {e}"
 
 
 
114
  except Exception as e:
115
  logger.error(f"Anthropic API error: {e}")
116
  yield f"[Anthropic Error]: {e}"
 
67
  yield "Error: Anthropic API key not provided."
68
  return
69
 
70
+ # --- Start: Message Cleaning for Anthropic ---
71
+ # Anthropic requires messages to alternate roles, starting with 'user'.
72
+ # Clean the history to ensure this format.
73
+ cleaned_messages = []
74
+ last_role = None
75
+
76
  for msg in history:
77
+ role = msg.get("role")
78
+ content = msg.get("content")
79
+
80
+ if not role or not content:
81
+ continue # Skip invalid messages
82
+
83
+ # If the last message was the same role, skip this one or combine (combining is more complex)
84
+ if role == last_role:
85
+ logger.warning(f"Skipping consecutive message with role: {role}")
86
+ continue
87
+
88
+ # If the first message is 'assistant', skip it
89
+ if not cleaned_messages and role == "assistant":
90
+ logger.warning("Skipping initial assistant message in history for Anthropic.")
91
+ continue
92
+
93
+ cleaned_messages.append({"role": role, "content": content})
94
+ last_role = role
95
+
96
+ # Ensure the last message in history is 'assistant' before adding the new user query
97
+ # If the history ends with 'user', we might have an issue or the model didn't respond last turn.
98
+ # For simplicity, we'll just append the new user query. The API will validate the full list.
99
+ # A more robust approach might require padding with an empty assistant message if history ends with user.
100
+ # However, the core.py logic should ensure history alternates correctly.
101
+ # The main cleaning needed is handling initial assistant messages and consecutive roles.
102
+
103
+ # Append the current user query
104
+ cleaned_messages.append({"role": "user", "content": query})
105
+
106
+ # Final check: Ensure the list starts with 'user' and alternates.
107
+ # If after cleaning and adding the new query, the list is empty or starts with 'assistant', something is wrong.
108
+ if not cleaned_messages or cleaned_messages[0].get("role") != "user":
109
+ logger.error("Anthropic message cleaning resulted in invalid format.")
110
+ yield "Error: Internal message formatting issue for Anthropic."
111
+ return
112
+
113
+ # --- End: Message Cleaning ---
114
+
115
 
116
  headers = {
117
  "x-api-key": anthropic_api_key,
118
+ "anthropic-version": "2023-06-01", # Use a valid API version
119
  "Content-Type": "application/json"
120
  }
121
 
122
  payload = {
123
+ "model": "claude-3-5-sonnet-20241022", # Ensure you are using a valid model name
124
+ "max_tokens": 4096, # Increased max_tokens for potentially longer responses
125
+ "messages": cleaned_messages, # Use the cleaned messages
126
  "stream": True
127
  }
128
 
129
  try:
130
  async with httpx.AsyncClient() as client:
131
  async with client.stream("POST", "https://api.anthropic.com/v1/messages", headers=headers, json=payload) as response:
132
+ response.raise_for_status() # Raise HTTPError for bad responses (like 400)
133
  buffer = ""
134
  async for chunk in response.aiter_text():
135
  if chunk:
136
  buffer += chunk
137
+ # Anthropic streaming sends JSON objects separated by newlines
138
+ # Sometimes multiple objects are in one chunk
139
  while "\n" in buffer:
140
  line, buffer = buffer.split("\n", 1)
141
  if line.startswith("data: "):
 
146
  continue
147
  try:
148
  json_data = json.loads(data)
149
+ # Check the type of event
150
  if json_data.get("type") == "content_block_delta" and "delta" in json_data:
151
  yield json_data["delta"].get("text", "")
152
+ # Handle other event types if necessary (e.g., message_start, message_delta, message_stop)
153
+ except json.JSONDecodeError:
154
+ # If it's not a complete JSON line, keep buffering
155
+ buffer = line + "\n" + buffer # Put the line back in buffer
156
  except Exception as e:
157
  logger.error(f"Anthropic parse error: {e}")
158
+ yield f"[Anthropic Parse Error]: {e}"
159
+ except httpx.HTTPStatusError as e:
160
+ logger.error(f"Anthropic API HTTP error: {e.response.status_code} - {e.response.text}")
161
+ yield f"[Anthropic API Error {e.response.status_code}]: {e.response.text}"
162
  except Exception as e:
163
  logger.error(f"Anthropic API error: {e}")
164
  yield f"[Anthropic Error]: {e}"
app.py CHANGED
@@ -3,6 +3,7 @@ import sqlite3
3
  import os
4
  import asyncio
5
  from core import submit_query
 
6
 
7
  DB_PATH = "chat_history.db"
8
 
@@ -15,14 +16,23 @@ def init_db():
15
  password TEXT NOT NULL
16
  )
17
  """)
 
 
 
 
 
 
 
18
  c.execute("""
19
  CREATE TABLE IF NOT EXISTS history (
 
20
  username TEXT,
21
  providers TEXT,
22
  message TEXT,
23
  openai_response TEXT,
24
  anthropic_response TEXT,
25
- gemini_response TEXT
 
26
  )
27
  """)
28
  conn.commit()
@@ -48,121 +58,266 @@ def login_user(username, password):
48
  conn.close()
49
  return result is not None
50
 
51
- def get_chat_history(username):
 
 
 
 
 
 
 
 
 
52
  conn = sqlite3.connect(DB_PATH)
53
  c = conn.cursor()
54
- c.execute("SELECT message, openai_response, anthropic_response, gemini_response FROM history WHERE username=?", (username,))
 
 
 
 
 
 
 
 
55
  rows = c.fetchall()
56
  conn.close()
57
- history = []
 
 
 
 
58
  for m, o, a, g in rows:
59
- history.append({"role": "user", "content": m})
60
- responses = []
61
- if o: responses.append(f"OpenAI: {o}")
62
- if a: responses.append(f"Anthropic: {a}")
63
- if g: responses.append(f"Gemini: {g}")
64
- if responses:
65
- history.append({"role": "assistant", "content": "\n".join(responses)})
66
- return history
67
-
68
- def save_chat_history(username, providers, message, openai_resp, anthropic_resp, gemini_resp):
 
 
 
 
69
  conn = sqlite3.connect(DB_PATH)
70
  c = conn.cursor()
71
  c.execute(
72
- "INSERT INTO history (username, providers, message, openai_response, anthropic_response, gemini_response) VALUES (?, ?, ?, ?, ?, ?)",
73
- (username, ",".join(providers), message, openai_resp, anthropic_resp, gemini_resp)
74
  )
75
  conn.commit()
76
  conn.close()
77
 
78
- async def chatbot_fn(message, history, username, providers):
79
- if not username:
80
- return "", history or []
81
  if not providers:
82
- return "", (history or []) + [
83
- {"role": "user", "content": message},
84
- {"role": "assistant", "content": "Please select at least one provider."}
85
- ]
 
 
86
 
 
87
  final_result = None
88
- async for _, openai_msgs, anthropic_msgs, gemini_msgs, updated_history in submit_query(message, providers, history, username):
89
- final_result = (openai_msgs, anthropic_msgs, gemini_msgs, updated_history)
 
 
 
 
90
 
91
  if not final_result:
92
- return "", (history or []) + [
93
- {"role": "user", "content": message},
94
- {"role": "assistant", "content": "Error: No response received."}
95
- ]
96
 
97
- openai_resp, anthropic_resp, gemini_resp, updated_history = final_result
98
 
99
- def get_last_response(msgs):
100
- if isinstance(msgs, list) and msgs:
101
- for m in reversed(msgs):
102
  if isinstance(m, dict) and m.get("role") == "assistant" and m.get("content"):
103
  return m.get("content", "")
104
  return ""
105
 
106
- openai_text = get_last_response(openai_resp)
107
- anthropic_text = get_last_response(anthropic_resp)
108
- gemini_text = get_last_response(gemini_resp)
109
- save_chat_history(username, providers, message, openai_text, anthropic_text, gemini_text)
110
 
111
- responses = []
112
- if openai_text: responses.append(f"OpenAI: {openai_text}")
113
- if anthropic_text: responses.append(f"Anthropic: {anthropic_text}")
114
- if gemini_text: responses.append(f"Gemini: {gemini_text}")
115
 
116
- history = history or []
117
- history.append({"role": "user", "content": message})
118
- if responses:
119
- history.append({"role": "assistant", "content": "\n".join(responses)})
120
- return "", history
121
 
122
- def load_history(username):
123
- if not username:
124
- return "", []
125
- return "", get_chat_history(username)
126
 
127
  with gr.Blocks() as demo:
128
- gr.Markdown("# Multi-Model Chat with Login")
129
- with gr.Tabs():
130
- with gr.Tab("Login / Register"):
131
- username = gr.Textbox(label="Username")
132
- password = gr.Textbox(label="Password", type="password")
133
- login_btn = gr.Button("Login")
134
- register_btn = gr.Button("Register")
135
- login_status = gr.Markdown("")
136
-
137
- with gr.Tab("Chat"):
138
- providers = gr.CheckboxGroup(
139
- choices=["OpenAI", "Anthropic", "Gemini"],
140
- label="Select Providers",
141
- value=["OpenAI"]
142
- )
143
- chatbox = gr.Chatbot(type="messages")
144
- msg = gr.Textbox(label="Message")
145
- send_btn = gr.Button("Send")
146
- clear_btn = gr.Button("Clear History")
147
- hidden_username = gr.State("")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
148
 
 
149
  def do_login(u, p):
150
  if login_user(u, p):
151
- return f"Welcome, {u}!", gr.update(visible=True), u
 
 
 
 
 
 
 
 
 
 
 
 
 
 
152
  else:
153
- return "Login failed.", gr.update(visible=False), ""
154
 
155
  def do_register(u, p):
156
  if register_user(u, p):
157
- return "Registration successful! Please login.", gr.update(visible=False), ""
158
  else:
159
- return "Username already exists.", gr.update(visible=False), ""
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
160
 
161
- login_btn.click(do_login, [username, password], [login_status, chatbox, hidden_username])
162
- register_btn.click(do_register, [username, password], [login_status, chatbox, hidden_username])
163
- send_btn.click(chatbot_fn, [msg, chatbox, hidden_username, providers], [msg, chatbox])
164
- clear_btn.click(lambda u: ("", []), [hidden_username], [msg, chatbox])
165
- login_btn.click(load_history, [username], [msg, chatbox])
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
166
 
167
  if __name__ == "__main__":
168
  init_db()
 
3
  import os
4
  import asyncio
5
  from core import submit_query
6
+ from config import logger
7
 
8
  DB_PATH = "chat_history.db"
9
 
 
16
  password TEXT NOT NULL
17
  )
18
  """)
19
+ c.execute("""
20
+ CREATE TABLE IF NOT EXISTS sessions (
21
+ session_id INTEGER PRIMARY KEY AUTOINCREMENT,
22
+ username TEXT,
23
+ created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
24
+ )
25
+ """)
26
  c.execute("""
27
  CREATE TABLE IF NOT EXISTS history (
28
+ session_id INTEGER,
29
  username TEXT,
30
  providers TEXT,
31
  message TEXT,
32
  openai_response TEXT,
33
  anthropic_response TEXT,
34
+ gemini_response TEXT,
35
+ FOREIGN KEY(session_id) REFERENCES sessions(session_id)
36
  )
37
  """)
38
  conn.commit()
 
58
  conn.close()
59
  return result is not None
60
 
61
+ def create_new_session(username):
62
+ conn = sqlite3.connect(DB_PATH)
63
+ c = conn.cursor()
64
+ c.execute("INSERT INTO sessions (username) VALUES (?)", (username,))
65
+ session_id = c.lastrowid
66
+ conn.commit()
67
+ conn.close()
68
+ return session_id
69
+
70
+ def list_sessions(username):
71
  conn = sqlite3.connect(DB_PATH)
72
  c = conn.cursor()
73
+ c.execute("SELECT session_id, created_at FROM sessions WHERE username=? ORDER BY created_at DESC", (username,))
74
+ sessions = c.fetchall()
75
+ conn.close()
76
+ return sessions
77
+
78
+ def get_chat_history(session_id):
79
+ conn = sqlite3.connect(DB_PATH)
80
+ c = conn.cursor()
81
+ c.execute("SELECT message, openai_response, anthropic_response, gemini_response FROM history WHERE session_id=? ORDER BY rowid", (session_id,))
82
  rows = c.fetchall()
83
  conn.close()
84
+
85
+ openai_history = []
86
+ anthropic_history = []
87
+ gemini_history = []
88
+
89
  for m, o, a, g in rows:
90
+ user_msg_dict = {"role": "user", "content": m}
91
+ openai_history.append(user_msg_dict)
92
+ anthropic_history.append(user_msg_dict)
93
+ gemini_history.append(user_msg_dict)
94
+ if o:
95
+ openai_history.append({"role": "assistant", "content": o})
96
+ if a:
97
+ anthropic_history.append({"role": "assistant", "content": a})
98
+ if g:
99
+ gemini_history.append({"role": "assistant", "content": g})
100
+
101
+ return openai_history, anthropic_history, gemini_history
102
+
103
+ def save_chat_history(session_id, username, providers, message, openai_resp, anthropic_resp, gemini_resp):
104
  conn = sqlite3.connect(DB_PATH)
105
  c = conn.cursor()
106
  c.execute(
107
+ "INSERT INTO history (session_id, username, providers, message, openai_response, anthropic_response, gemini_response) VALUES (?, ?, ?, ?, ?, ?, ?)",
108
+ (session_id, username, ",".join(providers), message, openai_resp, anthropic_resp, gemini_resp)
109
  )
110
  conn.commit()
111
  conn.close()
112
 
113
+ async def chatbot_fn(message, openai_history, anthropic_history, gemini_history, username, providers, session_id):
114
+ if not username or not session_id:
115
+ return "", openai_history or [], anthropic_history or [], gemini_history or [], openai_history or [], anthropic_history or [], gemini_history or []
116
  if not providers:
117
+ user_msg_dict = {"role": "user", "content": message}
118
+ no_provider_msg = {"role": "assistant", "content": "Please select at least one provider."}
119
+ openai_history = (openai_history or []) + [user_msg_dict, no_provider_msg]
120
+ anthropic_history = (anthropic_history or []) + [user_msg_dict, no_provider_msg]
121
+ gemini_history = (gemini_history or []) + [user_msg_dict, no_provider_msg]
122
+ return "", openai_history, anthropic_history, gemini_history, openai_history, anthropic_history, gemini_history
123
 
124
+ context_history = openai_history
125
  final_result = None
126
+ error_message = ""
127
+ async for err, openai_msgs, anthropic_msgs, gemini_msgs, updated_context_history in submit_query(
128
+ message, providers, context_history, username
129
+ ):
130
+ final_result = (openai_msgs, anthropic_msgs, gemini_msgs, updated_context_history)
131
+ error_message = err
132
 
133
  if not final_result:
134
+ logger.error("submit_query returned no result")
135
+ return f"Error: submit_query failed.", openai_history, anthropic_history, gemini_history, openai_history, anthropic_history, gemini_history
 
 
136
 
137
+ new_openai_history, new_anthropic_history, new_gemini_history, _ = final_result
138
 
139
+ def get_last_response_text_from_history(history_list):
140
+ if isinstance(history_list, list) and history_list:
141
+ for m in reversed(history_list):
142
  if isinstance(m, dict) and m.get("role") == "assistant" and m.get("content"):
143
  return m.get("content", "")
144
  return ""
145
 
146
+ openai_text_to_save = get_last_response_text_from_history(new_openai_history)
147
+ anthropic_text_to_save = get_last_response_text_from_history(new_anthropic_history)
148
+ gemini_text_to_save = get_last_response_text_from_history(new_gemini_history)
 
149
 
150
+ save_chat_history(session_id, username, providers, message, openai_text_to_save, anthropic_text_to_save, gemini_text_to_save)
 
 
 
151
 
152
+ return error_message, new_openai_history, new_anthropic_history, new_gemini_history, new_openai_history, new_anthropic_history, new_gemini_history
 
 
 
 
153
 
154
+ def session_label(session):
155
+ sid, created = session
156
+ return f"{sid} ({created})"
 
157
 
158
  with gr.Blocks() as demo:
159
+ gr.Markdown("# Multi-Model Chat")
160
+
161
+ # Login/Register group
162
+ login_group = gr.Group(visible=True)
163
+ with login_group:
164
+ username = gr.Textbox(label="Username")
165
+ password = gr.Textbox(label="Password", type="password")
166
+ login_btn = gr.Button("Login")
167
+ register_btn = gr.Button("Register")
168
+ login_status = gr.Markdown("")
169
+
170
+ # Chat group (hidden by default)
171
+ chat_group = gr.Group(visible=False)
172
+ with chat_group:
173
+ with gr.Row():
174
+ session_selector = gr.Dropdown(choices=[], label="Select Chat Session", interactive=True)
175
+ new_chat_btn = gr.Button("New Chat")
176
+ user_display = gr.Markdown("", elem_id="user_display")
177
+ logout_btn = gr.Button("Logout")
178
+ providers = gr.CheckboxGroup(
179
+ choices=["OpenAI", "Anthropic", "Gemini"],
180
+ label="Select Providers",
181
+ value=["OpenAI"]
182
+ )
183
+ with gr.Row():
184
+ openai_chat = gr.Chatbot(label="OpenAI", type="messages", height=400)
185
+ anthropic_chat = gr.Chatbot(label="Anthropic", type="messages", height=400)
186
+ gemini_chat = gr.Chatbot(label="Gemini", type="messages", height=400)
187
+ msg = gr.Textbox(label="Message")
188
+ send_btn = gr.Button("Send")
189
+ clear_btn = gr.Button("Clear History")
190
+ hidden_username = gr.State("")
191
+ openai_history_state = gr.State([])
192
+ anthropic_history_state = gr.State([])
193
+ gemini_history_state = gr.State([])
194
+ session_id_state = gr.State("")
195
 
196
+ # --- Function Definitions ---
197
  def do_login(u, p):
198
  if login_user(u, p):
199
+ session_id = create_new_session(u)
200
+ sessions = list_sessions(u)
201
+ session_choices = [session_label(s) for s in sessions]
202
+ session_value = session_label(sessions[0]) if sessions else ""
203
+ return (
204
+ f"Welcome, {u}!",
205
+ gr.update(visible=False), # Hide login group
206
+ gr.update(visible=True), # Show chat group
207
+ [], [], [], # Empty chats for new session
208
+ [], [], [],
209
+ u,
210
+ f"Logged in as: **{u}**",
211
+ session_id,
212
+ gr.update(choices=session_choices, value=session_value)
213
+ )
214
  else:
215
+ return "Login failed.", gr.update(), gr.update(), [], [], [], [], [], [], "", "", gr.update(choices=[], value="")
216
 
217
  def do_register(u, p):
218
  if register_user(u, p):
219
+ return "Registration successful! Please login.", gr.update(), gr.update(), [], [], [], [], [], [], "", "", gr.update(choices=[], value="")
220
  else:
221
+ return "Username already exists.", gr.update(), gr.update(), [], [], [], [], [], [], "", "", gr.update(choices=[], value="")
222
+
223
+ def do_logout():
224
+ return (
225
+ "", # login_status
226
+ gr.update(visible=True), # Show login group
227
+ gr.update(visible=False), # Hide chat group
228
+ [], [], [], [], [], [], "", "", gr.update(choices=[], value="")
229
+ )
230
+
231
+ def do_new_chat(username):
232
+ if not username:
233
+ return "", [], [], [], gr.update(choices=[], value="")
234
+ session_id = create_new_session(username)
235
+ sessions = list_sessions(username)
236
+ session_choices = [session_label(s) for s in sessions]
237
+ session_value = session_label(sessions[0]) if sessions else ""
238
+ return session_id, [], [], [], gr.update(choices=session_choices, value=session_value)
239
+
240
+ def do_select_session(session_label_str, username):
241
+ if not session_label_str or not username:
242
+ return [], [], [], [], [], [], ""
243
+ session_id = int(session_label_str.split(" ")[0])
244
+ openai_hist, anthropic_hist, gemini_hist = get_chat_history(session_id)
245
+ return (
246
+ openai_hist, anthropic_hist, gemini_hist, # update chat displays
247
+ openai_hist, anthropic_hist, gemini_hist, # update state variables
248
+ session_id # update session_id_state
249
+ )
250
+
251
+ # --- Gradio Wiring ---
252
+ login_btn.click(
253
+ do_login,
254
+ [username, password],
255
+ [
256
+ login_status, login_group, chat_group,
257
+ openai_chat, anthropic_chat, gemini_chat,
258
+ openai_history_state, anthropic_history_state, gemini_history_state,
259
+ hidden_username,
260
+ user_display,
261
+ session_id_state,
262
+ session_selector # Only one output for dropdown!
263
+ ]
264
+ )
265
+
266
+ register_btn.click(
267
+ do_register,
268
+ [username, password],
269
+ [
270
+ login_status, login_group, chat_group,
271
+ openai_chat, anthropic_chat, gemini_chat,
272
+ openai_history_state, anthropic_history_state, gemini_history_state,
273
+ hidden_username,
274
+ user_display,
275
+ session_id_state,
276
+ session_selector
277
+ ]
278
+ )
279
 
280
+ send_btn.click(
281
+ chatbot_fn,
282
+ [msg, openai_history_state, anthropic_history_state, gemini_history_state, hidden_username, providers, session_id_state],
283
+ [msg, openai_chat, anthropic_chat, gemini_chat, openai_history_state, anthropic_history_state, gemini_history_state]
284
+ )
285
+
286
+ clear_btn.click(
287
+ lambda: ("", [], [], [], [], [], []),
288
+ [],
289
+ [msg, openai_chat, anthropic_chat, gemini_chat, openai_history_state, anthropic_history_state, gemini_history_state]
290
+ )
291
+
292
+ logout_btn.click(
293
+ do_logout,
294
+ [],
295
+ [
296
+ login_status, login_group, chat_group,
297
+ openai_chat, anthropic_chat, gemini_chat,
298
+ openai_history_state, anthropic_history_state, gemini_history_state,
299
+ hidden_username,
300
+ user_display,
301
+ session_id_state,
302
+ session_selector
303
+ ]
304
+ )
305
+
306
+ new_chat_btn.click(
307
+ do_new_chat,
308
+ [hidden_username],
309
+ [session_id_state, openai_chat, anthropic_chat, gemini_chat, session_selector]
310
+ )
311
+
312
+ session_selector.change(
313
+ do_select_session,
314
+ [session_selector, hidden_username],
315
+ [
316
+ openai_chat, anthropic_chat, gemini_chat, # chat displays
317
+ openai_history_state, anthropic_history_state, gemini_history_state, # state variables
318
+ session_id_state # session id state
319
+ ]
320
+ )
321
 
322
  if __name__ == "__main__":
323
  init_db()
chat_history.db CHANGED
Binary files a/chat_history.db and b/chat_history.db differ
 
core.py CHANGED
@@ -1,13 +1,18 @@
1
  import asyncio
2
  from typing import AsyncGenerator, List, Dict, Tuple
3
- from config import logger
4
- from api import ask_openai, ask_anthropic, ask_gemini
5
- from database import Database
6
  import json
7
 
 
 
 
8
  db = Database()
9
  db.connect()
10
 
 
 
11
  def register(username, password, message=None):
12
  if db.add_user(username, password):
13
  return "Registration successful"
@@ -17,8 +22,11 @@ def register(username, password, message=None):
17
  def login(username, password, message=None):
18
  user_id = db.get_user(username, password)
19
  if user_id:
 
 
 
20
  conversation = db.get_conversations(user_id)
21
- history = json.loads(conversation) if conversation else []
22
  return "Login successful", user_id, history
23
  else:
24
  return "Invalid credentials", None, []
@@ -26,45 +34,107 @@ def login(username, password, message=None):
26
  def logout():
27
  return "Logout successful"
28
 
 
29
  def clear_history(user_id):
30
  db.clear_conversation(user_id)
31
- return [], [], [], []
 
32
 
33
  async def submit_query(query, providers, history, user_id):
34
- async for _, openai_msgs, anthropic_msgs, gemini_msgs, updated_history in query_model(query, providers, history):
35
- db.add_conversation(user_id, json.dumps(updated_history))
36
- yield "", openai_msgs, anthropic_msgs, gemini_msgs, updated_history
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
37
 
38
  async def query_model(query: str, providers: List[str], history: List[Dict[str, str]]) -> AsyncGenerator[Tuple[str, List[Dict[str, str]], List[Dict[str, str]], List[Dict[str, str]], List[Dict[str, str]]], None]:
39
- try:
40
- openai_msgs = history.copy()
41
- anthropic_msgs = history.copy()
42
- gemini_msgs = history.copy()
43
-
44
- openai_response = ""
45
- if "OpenAI" in providers:
46
- async for chunk in ask_openai(query, history):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
47
  openai_response += chunk
48
  if openai_response:
 
49
  openai_msgs.append({"role": "assistant", "content": openai_response.strip()})
 
 
 
 
 
 
 
50
 
51
- anthropic_response = ""
52
- if "Anthropic" in providers:
53
- async for chunk in ask_anthropic(query, history):
 
 
 
54
  anthropic_response += chunk
55
  if anthropic_response:
 
56
  anthropic_msgs.append({"role": "assistant", "content": anthropic_response.strip()})
 
 
 
 
 
 
57
 
58
- gemini_response = ""
59
- if "Gemini" in providers:
60
- async for chunk in ask_gemini(query, history):
 
 
 
 
61
  gemini_response += chunk
62
  if gemini_response:
 
63
  gemini_msgs.append({"role": "assistant", "content": gemini_response.strip()})
 
 
 
 
 
 
 
64
 
65
- updated_history = history.copy()
66
- updated_history.append({"role": "user", "content": query})
67
- yield "", openai_msgs, anthropic_msgs, gemini_msgs, updated_history
68
- except Exception as e:
69
- logger.error(f"Error in query_model: {e}")
70
- yield f"Error: An unexpected error occurred. {e}", [], [], [], history
 
1
  import asyncio
2
  from typing import AsyncGenerator, List, Dict, Tuple
3
+ from config import logger # Make sure config.py exists with a logger
4
+ from api import ask_openai, ask_anthropic, ask_gemini # Make sure api.py exists with these functions
5
+ from database import Database # Make sure database.py exists with a Database class
6
  import json
7
 
8
+ # Assuming database.py has a Database class with connect, add_user, get_user,
9
+ # get_conversations, add_conversation, clear_conversation methods.
10
+ # If not, you'll need to implement these or adjust.
11
  db = Database()
12
  db.connect()
13
 
14
+ # These functions might be redundant if login/register is handled fully in app.py
15
+ # but keeping them here based on previous code structure.
16
  def register(username, password, message=None):
17
  if db.add_user(username, password):
18
  return "Registration successful"
 
22
  def login(username, password, message=None):
23
  user_id = db.get_user(username, password)
24
  if user_id:
25
+ # Note: This loads a single conversation string, not separate histories.
26
+ # If you need separate histories loaded here, database schema needs adjustment.
27
+ # For now, app.py's get_chat_history handles loading separate histories from the DB.
28
  conversation = db.get_conversations(user_id)
29
+ history = json.loads(conversation) if conversation else [] # Assuming conversation is JSON string of history
30
  return "Login successful", user_id, history
31
  else:
32
  return "Invalid credentials", None, []
 
34
  def logout():
35
  return "Logout successful"
36
 
37
+ # This clear_history might be redundant if app.py handles clearing via Gradio states
38
  def clear_history(user_id):
39
  db.clear_conversation(user_id)
40
+ return [], [], [], [] # Assuming this returns empty histories for 3 models + context
41
+
42
 
43
  async def submit_query(query, providers, history, user_id):
44
+ # submit_query calls query_model and saves the history
45
+ # query_model will return the updated histories for each model
46
+ # The yielded values are: error_msg, openai_msgs, anthropic_msgs, gemini_msgs, updated_context_history
47
+ async for error_msg, openai_msgs, anthropic_msgs, gemini_msgs, updated_context_history in query_model(query, providers, history):
48
+ # Save the *combined* history for the user (using one of the updated histories, e.g., openai_msgs)
49
+ # Note: Saving separate histories per user per model might be better for full history recall per model
50
+ # but sticking to the current DB schema which seems to save combined responses per turn.
51
+ # Let's save the history from one model (e.g., OpenAI) as the main conversation history.
52
+ # A better approach for saving separate histories would require changing the DB schema.
53
+ # For now, let's save the OpenAI history as the main user conversation history.
54
+ # db.add_conversation(user_id, json.dumps(openai_msgs)) # This seems to save the *full* history list
55
+ # The save_chat_history in app.py saves individual responses per turn.
56
+ # Let's rely on app.py's save_chat_history for saving to the DB.
57
+ # This submit_query function should focus on getting responses and yielding updated histories.
58
+
59
+ # Yield the results from query_model
60
+ yield error_msg, openai_msgs, anthropic_msgs, gemini_msgs, updated_context_history
61
+
62
 
63
  async def query_model(query: str, providers: List[str], history: List[Dict[str, str]]) -> AsyncGenerator[Tuple[str, List[Dict[str, str]], List[Dict[str, str]], List[Dict[str, str]], List[Dict[str, str]]], None]:
64
+ # history input is the context history (e.g., openai_history from app.py)
65
+ # We need to create the *new* history lists for this turn based on the input history
66
+
67
+ # Start with the context history for each model's potential history
68
+ openai_msgs = history.copy()
69
+ anthropic_msgs = history.copy()
70
+ gemini_msgs = history.copy()
71
+
72
+ user_msg_dict = {"role": "user", "content": query} # User message for this turn
73
+
74
+ # Append user message to the history copies *before* calling models
75
+ # This ensures the history passed to ask_* includes the current user message
76
+ # and the history returned includes the user message.
77
+ openai_msgs.append(user_msg_dict)
78
+ anthropic_msgs.append(user_msg_dict)
79
+ gemini_msgs.append(user_msg_dict)
80
+
81
+ error_msg = ""
82
+
83
+ # --- OpenAI ---
84
+ openai_response = ""
85
+ if "OpenAI" in providers:
86
+ try:
87
+ # Pass the history *including* the current user message to the model API
88
+ async for chunk in ask_openai(query, openai_msgs): # Pass openai_msgs as history
89
  openai_response += chunk
90
  if openai_response:
91
+ # Append assistant response to the OpenAI history copy
92
  openai_msgs.append({"role": "assistant", "content": openai_response.strip()})
93
+ # If no response (e.g., API error), openai_msgs remains with just the user message
94
+ except Exception as e:
95
+ logger.error(f"Error calling OpenAI: {e}")
96
+ error_msg += f"OpenAI Error: {e}\n"
97
+ # Optionally append an error message to the history
98
+ openai_msgs.append({"role": "assistant", "content": f"Error: {e}"})
99
+
100
 
101
+ # --- Anthropic ---
102
+ anthropic_response = ""
103
+ if "Anthropic" in providers:
104
+ try:
105
+ # Pass the history *including* the current user message to the model API
106
+ async for chunk in ask_anthropic(query, anthropic_msgs): # Pass anthropic_msgs as history
107
  anthropic_response += chunk
108
  if anthropic_response:
109
+ # Append assistant response to the Anthropic history copy
110
  anthropic_msgs.append({"role": "assistant", "content": anthropic_response.strip()})
111
+ # If no response, anthropic_msgs remains with just the user message
112
+ except Exception as e:
113
+ logger.error(f"Error calling Anthropic: {e}")
114
+ error_msg += f"Anthropic Error: {e}\n"
115
+ # Optionally append an error message to the history
116
+ anthropic_msgs.append({"role": "assistant", "content": f"Error: {e}"})
117
 
118
+
119
+ # --- Gemini ---
120
+ gemini_response = ""
121
+ if "Gemini" in providers: # Add Gemini check
122
+ try:
123
+ # Pass the history *including* the current user message to the model API
124
+ async for chunk in ask_gemini(query, gemini_msgs): # Pass gemini_msgs as history
125
  gemini_response += chunk
126
  if gemini_response:
127
+ # Append assistant response to the Gemini history copy
128
  gemini_msgs.append({"role": "assistant", "content": gemini_response.strip()})
129
+ # If no response, gemini_msgs remains with just the user message
130
+ except Exception as e:
131
+ logger.error(f"Error calling Gemini: {e}")
132
+ error_msg += f"Gemini Error: {e}\n"
133
+ # Optionally append an error message to the history
134
+ gemini_msgs.append({"role": "assistant", "content": f"Error: {e}"})
135
+
136
 
137
+ # Yield the updated histories for each model.
138
+ # The first element is for error messages.
139
+ # The last element is the updated context history (using openai_msgs as the main one).
140
+ yield error_msg.strip(), openai_msgs, anthropic_msgs, gemini_msgs, openai_msgs # Yield the updated lists