Ganesh Chintalapati commited on
Commit
9c644da
·
1 Parent(s): f45bc4f

modular code apsssjjjadfdr

Browse files
Files changed (2) hide show
  1. app.py +2 -3
  2. core.py +24 -24
app.py CHANGED
@@ -1,13 +1,12 @@
1
-
2
  import gradio as gr
3
  from core import submit_query, clear_history
4
 
5
- with gr.Blocks(theme=gr.themes.Soft(), css="""
6
  .full-height { height: 100%; display: flex; align-items: stretch; min-height: 40px; }
7
  .full-height button { height: 100%; padding: 8px 16px; }
8
  .providers-row { height: 100%; display: flex; align-items: stretch; min-height: 40px; }
9
  .providers-row .checkbox-group { height: 100%; display: flex; flex-direction: row; align-items: center; gap: 10px; }
10
- """) as demo:
11
  gr.Markdown("# Multi-Model Chat")
12
  gr.Markdown("Chat with OpenAI, Anthropic, or Gemini. Select providers and compare responses side by side!")
13
 
 
 
1
  import gradio as gr
2
  from core import submit_query, clear_history
3
 
4
+ with gr.Blocks(theme=gr.themes.Soft(), css=\"\"\"
5
  .full-height { height: 100%; display: flex; align-items: stretch; min-height: 40px; }
6
  .full-height button { height: 100%; padding: 8px 16px; }
7
  .providers-row { height: 100%; display: flex; align-items: stretch; min-height: 40px; }
8
  .providers-row .checkbox-group { height: 100%; display: flex; flex-direction: row; align-items: center; gap: 10px; }
9
+ \"\"\") as demo:
10
  gr.Markdown("# Multi-Model Chat")
11
  gr.Markdown("Chat with OpenAI, Anthropic, or Gemini. Select providers and compare responses side by side!")
12
 
core.py CHANGED
@@ -1,4 +1,3 @@
1
-
2
  import asyncio
3
  from typing import AsyncGenerator, List, Dict, Tuple
4
  from config import logger
@@ -18,12 +17,15 @@ async def query_model(query: str, providers: List[str], history: List[Dict[str,
18
  openai_messages.append({"role": "user", "content": msg["user"]})
19
  anthropic_messages.append({"role": "user", "content": msg["user"]})
20
  gemini_messages.append({"role": "user", "content": msg["user"]})
21
- if "openai" in msg and msg["openai"]:
22
- openai_messages.append({"role": "assistant", "content": msg["openai"]})
23
- if "anthropic" in msg and msg["anthropic"]:
24
- anthropic_messages.append({"role": "assistant", "content": msg["anthropic"]})
25
- if "gemini" in msg and msg["gemini"]:
26
- gemini_messages.append({"role": "assistant", "content": msg["gemini"]})
 
 
 
27
 
28
  if "OpenAI" in providers:
29
  openai_messages.append({"role": "user", "content": query})
@@ -100,19 +102,22 @@ async def query_model(query: str, providers: List[str], history: List[Dict[str,
100
 
101
  active_generators = new_generator_states
102
 
103
- updated_entry = {
104
- "user": query,
105
- "openai": openai_response if openai_response.strip() and not openai_response.startswith("Error:") else "",
106
- "anthropic": anthropic_response if anthropic_response.strip() and not anthropic_response.startswith("Error:") else "",
107
- "gemini": gemini_response if gemini_response.strip() and not gemini_response.startswith("Error:") else ""
108
- }
 
109
 
110
- updated_history = history + [updated_entry]
111
- logger.info(f"Updated structured history: {updated_history}")
 
112
 
113
- yield "", openai_messages, anthropic_messages, gemini_messages
114
 
115
- async def submit_query(query: str, providers: List[str], history: List[Dict[str, str]]) -> AsyncGenerator[Tuple[str, List[Dict[str, str]], List[Dict[str, str]], List[Dict[str, str]], List[Dict[str, str]]], None]:
 
116
 
117
  if not query.strip():
118
  msg = {"role": "assistant", "content": "Please enter a query."}
@@ -125,13 +130,8 @@ async def submit_query(query: str, providers: List[str], history: List[Dict[str,
125
  return
126
 
127
  final_history = history.copy()
128
- async for _, openai_messages, anthropic_messages, gemini_messages in query_model(query, providers, history):
129
- final_history = history + [{
130
- "user": query,
131
- "openai": openai_messages[-1]["content"] if openai_messages else "",
132
- "anthropic": anthropic_messages[-1]["content"] if anthropic_messages else "",
133
- "gemini": gemini_messages[-1]["content"] if gemini_messages else ""
134
- }]
135
  yield "", openai_messages, anthropic_messages, gemini_messages, final_history
136
 
137
  yield "", openai_messages, anthropic_messages, gemini_messages, final_history
 
 
1
  import asyncio
2
  from typing import AsyncGenerator, List, Dict, Tuple
3
  from config import logger
 
17
  openai_messages.append({"role": "user", "content": msg["user"]})
18
  anthropic_messages.append({"role": "user", "content": msg["user"]})
19
  gemini_messages.append({"role": "user", "content": msg["user"]})
20
+ if msg["bot"]:
21
+ lines = msg["bot"].split("\\n\\n")
22
+ for line in lines:
23
+ if line.startswith("[OpenAI]:"):
24
+ openai_messages.append({"role": "assistant", "content": line[len("[OpenAI]:"):].strip()})
25
+ elif line.startswith("[Anthropic]:"):
26
+ anthropic_messages.append({"role": "assistant", "content": line[len("[Anthropic]:"):].strip()})
27
+ elif line.startswith("[Gemini]:"):
28
+ gemini_messages.append({"role": "assistant", "content": line[len("[Gemini]:"):].strip()})
29
 
30
  if "OpenAI" in providers:
31
  openai_messages.append({"role": "user", "content": query})
 
102
 
103
  active_generators = new_generator_states
104
 
105
+ responses = []
106
+ if openai_response.strip() and not openai_response.startswith("Error:"):
107
+ responses.append(f"[OpenAI]: {openai_response}")
108
+ if anthropic_response.strip() and not anthropic_response.startswith("Error:"):
109
+ responses.append(f"[Anthropic]: {anthropic_response}")
110
+ if gemini_response.strip() and not gemini_response.startswith("Error:"):
111
+ responses.append(f"[Gemini]: {gemini_response}")
112
 
113
+ combined_response = "\\n\\n".join(responses) if responses else "No valid responses received."
114
+ updated_history = history + [{"user": query, "bot": combined_response}]
115
+ logger.info(f"Updated history: {updated_history}")
116
 
117
+ yield combined_response, openai_messages, anthropic_messages, gemini_messages
118
 
119
+ async def submit_query(query: str, providers: List[str], history: List[Dict[str, str]]) \
120
+ -> AsyncGenerator[Tuple[str, List[Dict[str, str]], List[Dict[str, str]], List[Dict[str, str]], List[Dict[str, str]]], None]:
121
 
122
  if not query.strip():
123
  msg = {"role": "assistant", "content": "Please enter a query."}
 
130
  return
131
 
132
  final_history = history.copy()
133
+ async for response_chunk, openai_messages, anthropic_messages, gemini_messages in query_model(query, providers, history):
134
+ final_history = history + [{"user": query, "bot": response_chunk}]
 
 
 
 
 
135
  yield "", openai_messages, anthropic_messages, gemini_messages, final_history
136
 
137
  yield "", openai_messages, anthropic_messages, gemini_messages, final_history