Ganesh Chintalapati commited on
Commit
40966a6
·
1 Parent(s): d05cbb8
Files changed (5) hide show
  1. app.py +44 -40
  2. chatapp.db +0 -0
  3. core.py +39 -150
  4. database.py +57 -0
  5. requirements.txt +2 -1
app.py CHANGED
@@ -1,50 +1,54 @@
1
  import gradio as gr
2
- from core import submit_query, clear_history
3
-
4
- # Define Gradio interface
5
- with gr.Blocks(theme=gr.themes.Soft(), css=".full-height { height: 100%; display: flex; align-items: stretch; min-height: 40px; } .full-height button { height: 100%; padding: 8px 16px; } .providers-row { height: 100%; display: flex; align-items: stretch; min-height: 40px; } .providers-row .checkbox-group { height: 100%; display: flex; flex-direction: row; align-items: center; gap: 10px; }") as demo:
6
- gr.Markdown("# Multi-Model Chat")
7
- gr.Markdown("Chat with OpenAI, Anthropic, or Gemini. Select providers and compare responses side by side!")
8
-
9
- with gr.Row(elem_classes="providers-row"):
10
- providers = gr.CheckboxGroup(choices=["OpenAI", "Anthropic", "Gemini"], label="Select Providers", value=["OpenAI"], elem_classes="checkbox-group")
11
-
12
- with gr.Row(elem_classes="full-height"):
13
- query = gr.Textbox(
14
- label="Enter your query",
15
- placeholder="e.g., What is the capital of the United States?",
16
- scale=4,
17
- autofocus=True # This will focus the textbox on load
18
- )
19
- submit_button = gr.Button("Submit", scale=1)
20
-
21
- with gr.Row():
22
  clear_button = gr.Button("Clear History")
 
 
 
 
 
23
 
24
- with gr.Row():
25
- openai_chatbot = gr.Chatbot(label="OpenAI", type="messages", scale=1)
26
- anthropic_chatbot = gr.Chatbot(label="Anthropic", type="messages", scale=1)
27
- gemini_chatbot = gr.Chatbot(label="Gemini", type="messages", scale=1)
 
28
 
29
- chat_history = gr.State([])
 
 
 
 
 
 
 
 
30
 
31
- # Handle both button click and Enter key
32
- query.submit(
33
- fn=submit_query,
34
- inputs=[query, providers, chat_history],
35
- outputs=[query, openai_chatbot, anthropic_chatbot, gemini_chatbot, chat_history]
36
- )
37
-
38
  submit_button.click(
39
- fn=submit_query,
40
- inputs=[query, providers, chat_history],
41
- outputs=[query, openai_chatbot, anthropic_chatbot, gemini_chatbot, chat_history]
42
  )
43
-
44
  clear_button.click(
45
- fn=clear_history,
46
- inputs=[],
47
- outputs=[openai_chatbot, anthropic_chatbot, gemini_chatbot, chat_history]
48
  )
49
 
50
- demo.launch()
 
1
  import gradio as gr
2
+ from core import submit_query, clear_history, login, register, logout
3
+
4
+ with gr.Blocks() as demo:
5
+ gr.Markdown("# Multi-Model Chat with Login")
6
+ user_id = gr.State(None)
7
+ chat_history = gr.State([])
8
+
9
+ with gr.Tab("Login/Register"):
10
+ username = gr.Textbox(label="Username")
11
+ password = gr.Textbox(label="Password", type="password")
12
+ login_btn = gr.Button("Login")
13
+ register_btn = gr.Button("Register")
14
+ login_status = gr.Textbox(label="Status", interactive=False)
15
+
16
+ with gr.Tab("Chat"):
17
+ providers = gr.CheckboxGroup(choices=["OpenAI", "Anthropic", "Gemini"], label="Select Providers", value=["OpenAI"])
18
+ query = gr.Textbox(label="Enter your query")
19
+ submit_button = gr.Button("Submit")
 
 
20
  clear_button = gr.Button("Clear History")
21
+ openai_chatbot = gr.Chatbot(label="OpenAI", type="messages")
22
+ anthropic_chatbot = gr.Chatbot(label="Anthropic", type="messages")
23
+ gemini_chatbot = gr.Chatbot(label="Gemini", type="messages")
24
+ logout_btn = gr.Button("Logout")
25
+ chat_status = gr.Textbox(label="Chat Status", interactive=False)
26
 
27
+ def login_fn(username, password):
28
+ msg, uid, history = login(username, password)
29
+ if uid is None:
30
+ uid = ""
31
+ return msg, uid, history
32
 
33
+ def register_fn(username, password):
34
+ return register(username, password)
35
+
36
+ def logout_fn():
37
+ return logout(), "", []
38
+
39
+ login_btn.click(login_fn, [username, password], [login_status, user_id, chat_history])
40
+ register_btn.click(register_fn, [username, password], login_status)
41
+ logout_btn.click(logout_fn, [], [chat_status, user_id, chat_history])
42
 
 
 
 
 
 
 
 
43
  submit_button.click(
44
+ submit_query,
45
+ [query, providers, chat_history, user_id],
46
+ [query, openai_chatbot, anthropic_chatbot, gemini_chatbot, chat_history]
47
  )
 
48
  clear_button.click(
49
+ clear_history,
50
+ [user_id],
51
+ [openai_chatbot, anthropic_chatbot, gemini_chatbot, chat_history]
52
  )
53
 
54
+ demo.launch(share=True)
chatapp.db ADDED
Binary file (20.5 kB). View file
 
core.py CHANGED
@@ -2,156 +2,45 @@ import asyncio
2
  from typing import AsyncGenerator, List, Dict, Tuple
3
  from config import logger
4
  from api import ask_openai, ask_anthropic, ask_gemini
5
-
6
- async def query_model(
7
- query: str,
8
- providers: List[str],
9
- history: List[Dict[str, str]]
10
- ) -> AsyncGenerator[
11
- Tuple[str, List[Dict[str, str]], List[Dict[str, str]], List[Dict[str, str]], List[Dict[str, str]]],
12
- None
13
- ]:
14
- logger.info(f"Processing query with providers: {providers}")
15
- openai_response = ""
16
- anthropic_response = ""
17
- gemini_response = ""
18
-
19
- openai_messages = []
20
- anthropic_messages = []
21
- gemini_messages = []
22
-
23
- # Build message history for each provider
24
- for msg in history:
25
- if "user" in msg:
26
- openai_messages.append({"role": "user", "content": msg["user"]})
27
- anthropic_messages.append({"role": "user", "content": msg["user"]})
28
- gemini_messages.append({"role": "user", "content": msg["user"]})
29
- if msg.get("openai"):
30
- openai_messages.append({"role": "assistant", "content": msg["openai"]})
31
- if msg.get("anthropic"):
32
- anthropic_messages.append({"role": "assistant", "content": msg["anthropic"]})
33
- if msg.get("gemini"):
34
- gemini_messages.append({"role": "assistant", "content": msg["gemini"]})
35
-
36
- # Append the user query and prepare for assistant response
37
- if "OpenAI" in providers:
38
- openai_messages.append({"role": "user", "content": query})
39
- openai_messages.append({"role": "assistant", "content": ""})
40
- if "Anthropic" in providers:
41
- anthropic_messages.append({"role": "user", "content": query})
42
- anthropic_messages.append({"role": "assistant", "content": ""})
43
- if "Gemini" in providers:
44
- gemini_messages.append({"role": "user", "content": query})
45
- gemini_messages.append({"role": "assistant", "content": ""})
46
-
47
- # Yield initial state with user query
48
- logger.info(f"Yielding initial state with user query: {query}")
49
- yield "", openai_messages, anthropic_messages, gemini_messages, history
50
-
51
- tasks = []
52
- if "OpenAI" in providers:
53
- tasks.append(("OpenAI", ask_openai(query, history), openai_response, openai_messages))
54
- if "Anthropic" in providers:
55
- tasks.append(("Anthropic", ask_anthropic(query, history), anthropic_response, anthropic_messages))
56
- if "Gemini" in providers:
57
- tasks.append(("Gemini", ask_gemini(query, history), gemini_response, gemini_messages))
58
-
59
- async def collect_chunks(
60
- provider: str,
61
- generator: AsyncGenerator[str, None],
62
- response: str,
63
- messages: List[Dict[str, str]]
64
- ) -> AsyncGenerator[Tuple[str, str, List[Dict[str, str]]], None]:
65
- async for chunk in generator:
66
- response += chunk
67
- messages[-1] = {"role": "assistant", "content": response}
68
- yield provider, response, messages
69
-
70
- generator_states = [(provider, collect_chunks(provider, gen, resp, msgs), None) for provider, gen, resp, msgs in tasks]
71
- active_generators = generator_states[:]
72
-
73
- while active_generators:
74
- tasks_to_wait = []
75
- new_generator_states = []
76
-
77
- for provider, gen, active_task in active_generators:
78
- if active_task is None or active_task.done():
79
- try:
80
- task = asyncio.create_task(gen.__anext__())
81
- new_generator_states.append((provider, gen, task))
82
- tasks_to_wait.append(task)
83
- logger.debug(f"Created task for {provider}")
84
- except StopAsyncIteration:
85
- logger.info(f"Generator for {provider} completed")
86
- continue
87
- else:
88
- new_generator_states.append((provider, gen, active_task))
89
- tasks_to_wait.append(active_task)
90
-
91
- if not tasks_to_wait:
92
- break
93
-
94
- done, _ = await asyncio.wait(tasks_to_wait, return_when=asyncio.FIRST_COMPLETED)
95
-
96
- for provider, gen, task in new_generator_states:
97
- if task in done:
98
- try:
99
- provider, response, messages = task.result()
100
- if provider == "OpenAI":
101
- openai_response = response
102
- openai_messages = messages
103
- elif provider == "Anthropic":
104
- anthropic_response = response
105
- anthropic_messages = messages
106
- elif provider == "Gemini":
107
- gemini_response = response
108
- gemini_messages = messages
109
- logger.info(f"Yielding update for {provider}: {response[:50]}...")
110
- yield "", openai_messages, anthropic_messages, gemini_messages, history
111
- new_generator_states[new_generator_states.index((provider, gen, task))] = (provider, gen, None)
112
- except StopAsyncIteration:
113
- logger.info(f"Generator for {provider} completed")
114
- new_generator_states.remove((provider, gen, task))
115
- else:
116
- if (provider, gen, task) not in new_generator_states:
117
- new_generator_states.append((provider, gen, task))
118
-
119
- active_generators = new_generator_states
120
-
121
- updated_history = history + [{
122
- "user": query,
123
- "openai": openai_response.strip() if openai_response else "",
124
- "anthropic": anthropic_response.strip() if anthropic_response else "",
125
- "gemini": gemini_response.strip() if gemini_response else ""
126
- }]
127
-
128
- logger.info(f"Updated history: {updated_history}")
129
- yield "", openai_messages, anthropic_messages, gemini_messages, updated_history
130
-
131
- async def submit_query(
132
- query: str,
133
- providers: List[str],
134
- history: List[Dict[str, str]]
135
- ) -> AsyncGenerator[
136
- Tuple[str, List[Dict[str, str]], List[Dict[str, str]], List[Dict[str, str]], List[Dict[str, str]]],
137
- None
138
- ]:
139
- if not query.strip():
140
- msg = {"role": "assistant", "content": "Please enter a query."}
141
- yield "", [msg], [msg], [msg], history
142
- return
143
-
144
- if not providers:
145
- msg = {"role": "assistant", "content": "Please select at least one provider."}
146
- yield "", [msg], [msg], [msg], history
147
- return
148
-
149
  async for _, openai_msgs, anthropic_msgs, gemini_msgs, updated_history in query_model(query, providers, history):
150
- logger.info(f"Submitting update to UI: OpenAI: {openai_msgs[-1]['content'][:50] if openai_msgs else ''}, "
151
- f"Anthropic: {anthropic_msgs[-1]['content'][:50] if anthropic_msgs else ''}, "
152
- f"Gemini: {gemini_msgs[-1]['content'][:50] if gemini_msgs else ''}")
153
  yield "", openai_msgs, anthropic_msgs, gemini_msgs, updated_history
154
 
155
- def clear_history():
156
- logger.info("Clearing history")
157
- return [], [], [], []
 
 
 
 
 
2
  from typing import AsyncGenerator, List, Dict, Tuple
3
  from config import logger
4
  from api import ask_openai, ask_anthropic, ask_gemini
5
+ from database import Database
6
+ import json
7
+
8
+ db = Database()
9
+ db.connect()
10
+
11
+ def register(username, password, message=None):
12
+ if db.add_user(username, password):
13
+ return "Registration successful"
14
+ else:
15
+ return "Username already exists"
16
+
17
+ def login(username, password, message=None):
18
+ user_id = db.get_user(username, password)
19
+ if user_id:
20
+ # Load last conversation if exists
21
+ conversation = db.get_conversations(user_id)
22
+ history = json.loads(conversation) if conversation else []
23
+ return "Login successful", user_id, history
24
+ else:
25
+ return "Invalid credentials", None, []
26
+
27
+ def logout():
28
+ return "Logout successful"
29
+
30
+ def clear_history(user_id):
31
+ db.clear_conversation(user_id)
32
+ return [], [], [], []
33
+
34
+ async def submit_query(query, providers, history, user_id):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
35
  async for _, openai_msgs, anthropic_msgs, gemini_msgs, updated_history in query_model(query, providers, history):
36
+ # Save updated history for the user
37
+ db.add_conversation(user_id, json.dumps(updated_history))
 
38
  yield "", openai_msgs, anthropic_msgs, gemini_msgs, updated_history
39
 
40
+ async def query_model(query: str, providers: List[str], history: List[Dict[str, str]]) -> AsyncGenerator[Tuple[str, List[Dict[str, str]], List[Dict[str, str]], List[Dict[str, str]], List[Dict[str, str]]], None]:
41
+ try:
42
+ # Code for query_model function
43
+ pass
44
+ except Exception as e:
45
+ logger.error(f"Error in query_model: {e}")
46
+ yield f"Error: An unexpected error occurred. {e}", [], [], [], history
database.py ADDED
@@ -0,0 +1,57 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import sqlite3
2
+ from typing import Optional
3
+
4
+ class Database:
5
+ def __init__(self, db_path="chatapp.db"):
6
+ self.db_path = db_path
7
+ self.conn = None
8
+
9
+ def connect(self):
10
+ self.conn = sqlite3.connect(self.db_path, check_same_thread=False)
11
+ self.create_tables()
12
+
13
+ def create_tables(self):
14
+ c = self.conn.cursor()
15
+ c.execute('''CREATE TABLE IF NOT EXISTS users (
16
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
17
+ username TEXT UNIQUE,
18
+ password TEXT
19
+ )''')
20
+ c.execute('''CREATE TABLE IF NOT EXISTS conversations (
21
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
22
+ user_id INTEGER,
23
+ conversation TEXT,
24
+ FOREIGN KEY(user_id) REFERENCES users(id)
25
+ )''')
26
+ self.conn.commit()
27
+
28
+ def add_user(self, username, password):
29
+ c = self.conn.cursor()
30
+ try:
31
+ c.execute("INSERT INTO users (username, password) VALUES (?, ?)", (username, password))
32
+ self.conn.commit()
33
+ return True
34
+ except sqlite3.IntegrityError:
35
+ return False
36
+
37
+ def get_user(self, username, password) -> Optional[int]:
38
+ c = self.conn.cursor()
39
+ c.execute("SELECT id FROM users WHERE username=? AND password=?", (username, password))
40
+ row = c.fetchone()
41
+ return row[0] if row else None
42
+
43
+ def add_conversation(self, user_id, conversation):
44
+ c = self.conn.cursor()
45
+ c.execute("INSERT INTO conversations (user_id, conversation) VALUES (?, ?)", (user_id, conversation))
46
+ self.conn.commit()
47
+
48
+ def get_conversations(self, user_id):
49
+ c = self.conn.cursor()
50
+ c.execute("SELECT conversation FROM conversations WHERE user_id=? ORDER BY id DESC LIMIT 1", (user_id,))
51
+ row = c.fetchone()
52
+ return row[0] if row else None
53
+
54
+ def clear_conversation(self, user_id):
55
+ c = self.conn.cursor()
56
+ c.execute("DELETE FROM conversations WHERE user_id=?", (user_id,))
57
+ self.conn.commit()
requirements.txt CHANGED
@@ -1,3 +1,4 @@
1
  gradio==4.44.0
2
  httpx==0.27.2
3
- python-dotenv==1.0.1
 
 
1
  gradio==4.44.0
2
  httpx==0.27.2
3
+ python-dotenv==1.0.1
4
+ SQLAlchemy==2.0.24 # Add SQLAlchemy for database interaction (optional, but recommended for production)