WeVi commited on
Commit
9736fd1
·
verified ·
1 Parent(s): ccc722b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +21 -83
app.py CHANGED
@@ -1,66 +1,11 @@
1
- # File: README.md
2
-
3
- ## Side‑by‑Side AI — One prompt, answers from many models
4
-
5
- This Space shows Grok 4, ChatGPT (OpenAI), Gemini 2.5 Pro, DeepSeek, Claude Sonnet 4, and Perplexity Sonar Pro side‑by‑side in a single chat window. No tab‑hopping. You enter one prompt and compare answers instantly.
6
-
7
- ### Live features
8
- - One prompt → parallel calls to selected providers
9
- - Models shown in columns, updated as responses arrive
10
- - Optional system prompt, temperature, and per‑provider model names
11
- - Works with API keys set as **Space Secrets** (recommended) or typed at runtime
12
- - Clean, mobile‑friendly Gradio UI
13
-
14
- ### Supported providers
15
- - **OpenAI** (e.g., `gpt-4o`, `o4-mini`, etc.)
16
- - **Anthropic** (Claude Sonnet 3.7/4, etc.)
17
- - **Google Gemini** (e.g., `gemini-2.0-pro`, `gemini-2.0-flash`)
18
- - **xAI Grok** (via OpenAI‑compatible API, e.g., `grok-2-latest`)
19
- - **DeepSeek** (`deepseek-chat`) via OpenAI‑compatible API
20
- - **Perplexity** (`sonar-pro`) via OpenAI‑compatible API
21
-
22
- > You can enable or disable any provider from the sidebar.
23
-
24
- ### How to run on Hugging Face Spaces
25
- 1. Create a **Gradio** Space.
26
- 2. Add these files (`app.py`, `requirements.txt`, optional `Procfile`).
27
- 3. In **Settings → Secrets**, add any keys you have (you don’t need all of them):
28
- - `OPENAI_API_KEY`
29
- - `ANTHROPIC_API_KEY`
30
- - `GEMINI_API_KEY`
31
- - `XAI_API_KEY` (Grok)
32
- - `DEEPSEEK_API_KEY`
33
- - `PPLX_API_KEY` (Perplexity)
34
- 4. Deploy. The app auto‑detects which providers have keys.
35
-
36
- ### Notes
37
- - Some providers have usage limits and content rules. Your account is responsible for API costs.
38
- - If a provider is missing a key, its column will show a helpful message instead of failing.
39
- - Perplexity supports retrieval/web by default; this app sends plain prompts. You can add custom params in code if you need web search.
40
-
41
- ---
42
-
43
- # File: requirements.txt
44
-
45
- gradio>=4.40.0
46
- openai>=1.40.0
47
- anthropic>=0.34.2
48
- google-generativeai>=0.7.2
49
- aiohttp>=3.9.5
50
- pydantic>=2.8.2
51
- uvicorn==0.30.6
52
-
53
- ---
54
-
55
- # File: app.py
56
-
57
  import os
58
  import asyncio
59
  from typing import Dict, Any, Optional
60
 
61
  import gradio as gr
62
 
63
- # OpenAIcompatible SDK (also used for Grok, DeepSeek, Perplexity)
64
  from openai import OpenAI
65
 
66
  # Anthropic SDK
@@ -89,7 +34,9 @@ async def call_openai(prompt: str, system: str, model: str, api_key: Optional[st
89
  except Exception as e:
90
  return f"❌ OpenAI error: {e}"
91
 
92
- async def call_openai_compatible(prompt: str, system: str, model: str, base_url: str, key_env: str, fallback_name: str, api_key_override: Optional[str] = None) -> str:
 
 
93
  key = api_key_override or os.getenv(key_env)
94
  if not key:
95
  return f"⚠️ {fallback_name} key not set. Add {key_env} in Secrets or enter in sidebar."
@@ -116,7 +63,7 @@ async def call_anthropic(prompt: str, system: str, model: str, api_key: Optional
116
  max_tokens=1200,
117
  messages=[{"role": "user", "content": prompt}],
118
  )
119
- return "".join([b.text for b in msg.content if getattr(b, 'type', '') == 'text'])
120
  except Exception as e:
121
  return f"❌ Anthropic error: {e}"
122
 
@@ -133,16 +80,18 @@ async def call_gemini(prompt: str, system: str, model: str, api_key: Optional[st
133
  except Exception as e:
134
  return f"❌ Gemini error: {e}"
135
 
136
- # Wrappers for specific OpenAIcompatible providers
137
  async def call_grok(prompt: str, system: str, model: str, api_key: Optional[str]) -> str:
138
- return await call_openai_compatible(prompt, system, model or "grok-2-latest", "https://api.x.ai/v1", "XAI_API_KEY", "Grok", api_key)
 
139
 
140
  async def call_deepseek(prompt: str, system: str, model: str, api_key: Optional[str]) -> str:
141
- return await call_openai_compatible(prompt, system, model or "deepseek-chat", "https://api.deepseek.com", "DEEPSEEK_API_KEY", "DeepSeek", api_key)
 
142
 
143
  async def call_perplexity(prompt: str, system: str, model: str, api_key: Optional[str]) -> str:
144
- # Perplexity uses OpenAI‑compatible API; models: sonar, sonar-pro, sonar-reasoning, etc.
145
- return await call_openai_compatible(prompt, system, model or "sonar-pro", "https://api.perplexity.ai", "PPLX_API_KEY", "Perplexity", api_key)
146
 
147
  PROVIDERS = {
148
  "OpenAI": call_openai,
@@ -184,10 +133,8 @@ async def run_all(prompt: str,
184
  if not tasks:
185
  return {"status": "Select at least one provider in the sidebar."}
186
 
187
- # Run all providers in parallel
188
  responses = await asyncio.gather(*tasks)
189
 
190
- # Map results back to names in order
191
  i = 0
192
  for name in list(results_map.keys()):
193
  results_map[name] = responses[i]
@@ -199,21 +146,18 @@ async def run_all(prompt: str,
199
 
200
  def build_ui():
201
  with gr.Blocks(fill_height=True, theme=gr.themes.Soft()) as demo:
202
- gr.Markdown("""
203
- # Side‑by‑Side AI
204
- Compare answers from Grok 4, ChatGPT, Gemini 2.5 Pro, DeepSeek, Claude Sonnet 4, and Perplexity Sonar Pro — in one chat window.
205
- """)
206
 
207
  with gr.Row():
208
  with gr.Column(scale=1):
209
  gr.Markdown("### Settings")
210
  system = gr.Textbox(value=DEFAULT_SYSTEM_PROMPT, label="System prompt", lines=3)
211
- temp = gr.Slider(0.0, 1.0, value=0.7, step=0.05, label="Temperature (some SDKs ignore this)")
212
- gr.Markdown("**Providers** (enable and set model names as you like)")
213
 
 
214
  use_openai = gr.Checkbox(value=True, label="OpenAI (ChatGPT)")
215
  openai_model = gr.Textbox(value="gpt-4o-mini", label="OpenAI model")
216
- openai_key = gr.Textbox(value="", label="OpenAI API key (optional — uses env if empty)", type="password")
217
 
218
  use_claude = gr.Checkbox(value=True, label="Anthropic Claude")
219
  claude_model = gr.Textbox(value="claude-3-5-sonnet-latest", label="Claude model")
@@ -236,7 +180,7 @@ def build_ui():
236
  pplx_key = gr.Textbox(value="", label="Perplexity API key (optional)", type="password")
237
 
238
  with gr.Column(scale=2):
239
- prompt = gr.Textbox(placeholder="Ask anything once. All providers will answer side‑by‑side.", label="Your prompt", lines=6)
240
  ask = gr.Button("Ask all")
241
  status = gr.Markdown(visible=False)
242
 
@@ -254,7 +198,6 @@ def build_ui():
254
  use_openai, use_claude, use_gemini, use_grok, use_deepseek, use_perplexity,
255
  openai_model, claude_model, gemini_model, grok_model, deepseek_model, pplx_model,
256
  openai_key, claude_key, gemini_key, grok_key, deepseek_key, pplx_key):
257
- # Run async in sync handler
258
  results = asyncio.run(run_all(prompt, system, temp,
259
  use_openai, use_claude, use_gemini, use_grok, use_deepseek, use_perplexity,
260
  openai_model, claude_model, gemini_model, grok_model, deepseek_model, pplx_model,
@@ -263,7 +206,7 @@ def build_ui():
263
  if "status" in results:
264
  return results["status"], "", "", "", "", "", ""
265
 
266
- return ("", # status
267
  results.get("OpenAI", ""),
268
  results.get("Claude", ""),
269
  results.get("Gemini", ""),
@@ -278,14 +221,9 @@ def build_ui():
278
  openai_key, claude_key, gemini_key, grok_key, deepseek_key, pplx_key],
279
  [status, out_openai, out_claude, out_gemini, out_grok, out_deepseek, out_pplx])
280
 
281
- gr.Markdown("Built with ❤️ using Gradio. Keys are stored only in your Space runtime.")
282
 
283
  return demo
284
 
285
  if __name__ == "__main__":
286
- build_ui().queue(concurrency_count=8).launch()
287
-
288
- ---
289
-
290
- # (Optional) File: Procfile
291
- web: python app.py
 
1
+ # app.py
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2
  import os
3
  import asyncio
4
  from typing import Dict, Any, Optional
5
 
6
  import gradio as gr
7
 
8
+ # OpenAI-compatible SDK (also used for Grok, DeepSeek, Perplexity)
9
  from openai import OpenAI
10
 
11
  # Anthropic SDK
 
34
  except Exception as e:
35
  return f"❌ OpenAI error: {e}"
36
 
37
+ async def call_openai_compatible(prompt: str, system: str, model: str,
38
+ base_url: str, key_env: str, fallback_name: str,
39
+ api_key_override: Optional[str] = None) -> str:
40
  key = api_key_override or os.getenv(key_env)
41
  if not key:
42
  return f"⚠️ {fallback_name} key not set. Add {key_env} in Secrets or enter in sidebar."
 
63
  max_tokens=1200,
64
  messages=[{"role": "user", "content": prompt}],
65
  )
66
+ return "".join([b.text for b in msg.content if getattr(b, "type", "") == "text"])
67
  except Exception as e:
68
  return f"❌ Anthropic error: {e}"
69
 
 
80
  except Exception as e:
81
  return f"❌ Gemini error: {e}"
82
 
83
+ # Wrappers for specific OpenAI-compatible providers
84
  async def call_grok(prompt: str, system: str, model: str, api_key: Optional[str]) -> str:
85
+ return await call_openai_compatible(prompt, system, model or "grok-2-latest",
86
+ "https://api.x.ai/v1", "XAI_API_KEY", "Grok", api_key)
87
 
88
  async def call_deepseek(prompt: str, system: str, model: str, api_key: Optional[str]) -> str:
89
+ return await call_openai_compatible(prompt, system, model or "deepseek-chat",
90
+ "https://api.deepseek.com", "DEEPSEEK_API_KEY", "DeepSeek", api_key)
91
 
92
  async def call_perplexity(prompt: str, system: str, model: str, api_key: Optional[str]) -> str:
93
+ return await call_openai_compatible(prompt, system, model or "sonar-pro",
94
+ "https://api.perplexity.ai", "PPLX_API_KEY", "Perplexity", api_key)
95
 
96
  PROVIDERS = {
97
  "OpenAI": call_openai,
 
133
  if not tasks:
134
  return {"status": "Select at least one provider in the sidebar."}
135
 
 
136
  responses = await asyncio.gather(*tasks)
137
 
 
138
  i = 0
139
  for name in list(results_map.keys()):
140
  results_map[name] = responses[i]
 
146
 
147
  def build_ui():
148
  with gr.Blocks(fill_height=True, theme=gr.themes.Soft()) as demo:
149
+ gr.Markdown("# Side-by-Side AI\nCompare Grok, ChatGPT, Gemini, DeepSeek, Claude, and Perplexity — in one chat window.")
 
 
 
150
 
151
  with gr.Row():
152
  with gr.Column(scale=1):
153
  gr.Markdown("### Settings")
154
  system = gr.Textbox(value=DEFAULT_SYSTEM_PROMPT, label="System prompt", lines=3)
155
+ temp = gr.Slider(0.0, 1.0, value=0.7, step=0.05, label="Temperature")
 
156
 
157
+ # Provider toggles
158
  use_openai = gr.Checkbox(value=True, label="OpenAI (ChatGPT)")
159
  openai_model = gr.Textbox(value="gpt-4o-mini", label="OpenAI model")
160
+ openai_key = gr.Textbox(value="", label="OpenAI API key (optional)", type="password")
161
 
162
  use_claude = gr.Checkbox(value=True, label="Anthropic Claude")
163
  claude_model = gr.Textbox(value="claude-3-5-sonnet-latest", label="Claude model")
 
180
  pplx_key = gr.Textbox(value="", label="Perplexity API key (optional)", type="password")
181
 
182
  with gr.Column(scale=2):
183
+ prompt = gr.Textbox(placeholder="Ask once, see all providers' answers.", label="Your prompt", lines=6)
184
  ask = gr.Button("Ask all")
185
  status = gr.Markdown(visible=False)
186
 
 
198
  use_openai, use_claude, use_gemini, use_grok, use_deepseek, use_perplexity,
199
  openai_model, claude_model, gemini_model, grok_model, deepseek_model, pplx_model,
200
  openai_key, claude_key, gemini_key, grok_key, deepseek_key, pplx_key):
 
201
  results = asyncio.run(run_all(prompt, system, temp,
202
  use_openai, use_claude, use_gemini, use_grok, use_deepseek, use_perplexity,
203
  openai_model, claude_model, gemini_model, grok_model, deepseek_model, pplx_model,
 
206
  if "status" in results:
207
  return results["status"], "", "", "", "", "", ""
208
 
209
+ return ("",
210
  results.get("OpenAI", ""),
211
  results.get("Claude", ""),
212
  results.get("Gemini", ""),
 
221
  openai_key, claude_key, gemini_key, grok_key, deepseek_key, pplx_key],
222
  [status, out_openai, out_claude, out_gemini, out_grok, out_deepseek, out_pplx])
223
 
224
+ gr.Markdown("Built with ❤️ using Gradio. Keys stay inside this Space only.")
225
 
226
  return demo
227
 
228
  if __name__ == "__main__":
229
+ build_ui().queue(concurrency_count=8).launch()