Spaces:
Sleeping
Sleeping
| # app.py | |
| import os | |
| import asyncio | |
| from typing import Dict, Any, Optional | |
| import gradio as gr | |
| # OpenAI-compatible SDK (also used for Grok, DeepSeek, Perplexity) | |
| from openai import OpenAI | |
| # Anthropic SDK | |
| import anthropic | |
| # Google Gemini SDK | |
| import google.generativeai as genai | |
| DEFAULT_SYSTEM_PROMPT = "You are a helpful, concise assistant." | |
| # -------- Provider adapters -------- # | |
| async def call_openai(prompt: str, system: str, model: str, api_key: Optional[str]) -> str: | |
| key = api_key or os.getenv("OPENAI_API_KEY") | |
| if not key: | |
| return "⚠️ OpenAI key not set. Add OPENAI_API_KEY in Secrets or enter in sidebar." | |
| client = OpenAI(api_key=key) | |
| try: | |
| resp = client.chat.completions.create( | |
| model=model or "gpt-4o-mini", | |
| messages=[{"role": "system", "content": system or DEFAULT_SYSTEM_PROMPT}, | |
| {"role": "user", "content": prompt}], | |
| temperature=0.7, | |
| ) | |
| return resp.choices[0].message.content | |
| except Exception as e: | |
| return f"❌ OpenAI error: {e}" | |
| async def call_openai_compatible(prompt: str, system: str, model: str, | |
| base_url: str, key_env: str, fallback_name: str, | |
| api_key_override: Optional[str] = None) -> str: | |
| key = api_key_override or os.getenv(key_env) | |
| if not key: | |
| return f"⚠️ {fallback_name} key not set. Add {key_env} in Secrets or enter in sidebar." | |
| client = OpenAI(api_key=key, base_url=base_url) | |
| try: | |
| resp = client.chat.completions.create( | |
| model=model, | |
| messages=[{"role": "system", "content": system or DEFAULT_SYSTEM_PROMPT}, | |
| {"role": "user", "content": prompt}], | |
| ) | |
| return resp.choices[0].message.content | |
| except Exception as e: | |
| return f"❌ {fallback_name} error: {e}" | |
| async def call_anthropic(prompt: str, system: str, model: str, api_key: Optional[str]) -> str: | |
| key = api_key or os.getenv("ANTHROPIC_API_KEY") | |
| if not key: | |
| return "⚠️ Anthropic key not set. Add ANTHROPIC_API_KEY in Secrets or enter in sidebar." | |
| client = anthropic.Anthropic(api_key=key) | |
| try: | |
| msg = client.messages.create( | |
| model=model or "claude-3-5-sonnet-latest", | |
| system=system or DEFAULT_SYSTEM_PROMPT, | |
| max_tokens=1200, | |
| messages=[{"role": "user", "content": prompt}], | |
| ) | |
| return "".join([b.text for b in msg.content if getattr(b, "type", "") == "text"]) | |
| except Exception as e: | |
| return f"❌ Anthropic error: {e}" | |
| async def call_gemini(prompt: str, system: str, model: str, api_key: Optional[str]) -> str: | |
| key = api_key or os.getenv("GEMINI_API_KEY") | |
| if not key: | |
| return "⚠️ Gemini key not set. Add GEMINI_API_KEY in Secrets or enter in sidebar." | |
| try: | |
| genai.configure(api_key=key) | |
| mname = model or "gemini-2.0-pro" | |
| model_obj = genai.GenerativeModel(mname, system_instruction=(system or DEFAULT_SYSTEM_PROMPT)) | |
| resp = model_obj.generate_content(prompt) | |
| return resp.text | |
| except Exception as e: | |
| return f"❌ Gemini error: {e}" | |
| # Wrappers for specific OpenAI-compatible providers | |
| async def call_grok(prompt: str, system: str, model: str, api_key: Optional[str]) -> str: | |
| return await call_openai_compatible(prompt, system, model or "grok-2-latest", | |
| "https://api.x.ai/v1", "XAI_API_KEY", "Grok", api_key) | |
| async def call_deepseek(prompt: str, system: str, model: str, api_key: Optional[str]) -> str: | |
| return await call_openai_compatible(prompt, system, model or "deepseek-chat", | |
| "https://api.deepseek.com", "DEEPSEEK_API_KEY", "DeepSeek", api_key) | |
| async def call_perplexity(prompt: str, system: str, model: str, api_key: Optional[str]) -> str: | |
| return await call_openai_compatible(prompt, system, model or "sonar-pro", | |
| "https://api.perplexity.ai", "PPLX_API_KEY", "Perplexity", api_key) | |
| PROVIDERS = { | |
| "OpenAI": call_openai, | |
| "Claude": call_anthropic, | |
| "Gemini": call_gemini, | |
| "Grok": call_grok, | |
| "DeepSeek": call_deepseek, | |
| "Perplexity": call_perplexity, | |
| } | |
| # -------- App logic -------- # | |
| async def run_all(prompt: str, | |
| system: str, | |
| temperature: float, | |
| use_openai: bool, use_claude: bool, use_gemini: bool, use_grok: bool, use_deepseek: bool, use_perplexity: bool, | |
| openai_model: str, claude_model: str, gemini_model: str, grok_model: str, deepseek_model: str, pplx_model: str, | |
| openai_key: str, claude_key: str, gemini_key: str, grok_key: str, deepseek_key: str, pplx_key: str | |
| ) -> Dict[str, Any]: | |
| if not prompt or not prompt.strip(): | |
| return {"status": "Please enter a prompt."} | |
| tasks = [] | |
| results_map = {} | |
| async def add_task(flag: bool, name: str, func, model: str, key: str): | |
| if flag: | |
| tasks.append(asyncio.create_task(func(prompt, system, model, key or None))) | |
| results_map[name] = None | |
| await add_task(use_openai, "OpenAI", call_openai, openai_model, openai_key) | |
| await add_task(use_claude, "Claude", call_anthropic, claude_model, claude_key) | |
| await add_task(use_gemini, "Gemini", call_gemini, gemini_model, gemini_key) | |
| await add_task(use_grok, "Grok", call_grok, grok_model, grok_key) | |
| await add_task(use_deepseek, "DeepSeek", call_deepseek, deepseek_model, deepseek_key) | |
| await add_task(use_perplexity, "Perplexity", call_perplexity, pplx_model, pplx_key) | |
| if not tasks: | |
| return {"status": "Select at least one provider in the sidebar."} | |
| responses = await asyncio.gather(*tasks) | |
| i = 0 | |
| for name in list(results_map.keys()): | |
| results_map[name] = responses[i] | |
| i += 1 | |
| return results_map | |
| # -------- UI -------- # | |
| def build_ui(): | |
| with gr.Blocks(fill_height=True, theme=gr.themes.Soft()) as demo: | |
| gr.Markdown("# Side-by-Side AI\nCompare Grok, ChatGPT, Gemini, DeepSeek, Claude, and Perplexity — in one chat window.") | |
| with gr.Row(): | |
| with gr.Column(scale=1): | |
| gr.Markdown("### Settings") | |
| system = gr.Textbox(value=DEFAULT_SYSTEM_PROMPT, label="System prompt", lines=3) | |
| temp = gr.Slider(0.0, 1.0, value=0.7, step=0.05, label="Temperature") | |
| # Provider toggles | |
| use_openai = gr.Checkbox(value=True, label="OpenAI (ChatGPT)") | |
| openai_model = gr.Textbox(value="gpt-4o-mini", label="OpenAI model") | |
| openai_key = gr.Textbox(value="", label="OpenAI API key (optional)", type="password") | |
| use_claude = gr.Checkbox(value=True, label="Anthropic Claude") | |
| claude_model = gr.Textbox(value="claude-3-5-sonnet-latest", label="Claude model") | |
| claude_key = gr.Textbox(value="", label="Anthropic API key (optional)", type="password") | |
| use_gemini = gr.Checkbox(value=True, label="Google Gemini") | |
| gemini_model = gr.Textbox(value="gemini-2.0-pro", label="Gemini model") | |
| gemini_key = gr.Textbox(value="", label="Gemini API key (optional)", type="password") | |
| use_grok = gr.Checkbox(value=True, label="xAI Grok") | |
| grok_model = gr.Textbox(value="grok-2-latest", label="Grok model") | |
| grok_key = gr.Textbox(value="", label="xAI API key (optional)", type="password") | |
| use_deepseek = gr.Checkbox(value=True, label="DeepSeek") | |
| deepseek_model = gr.Textbox(value="deepseek-chat", label="DeepSeek model") | |
| deepseek_key = gr.Textbox(value="", label="DeepSeek API key (optional)", type="password") | |
| use_perplexity = gr.Checkbox(value=True, label="Perplexity Sonar") | |
| pplx_model = gr.Textbox(value="sonar-pro", label="Perplexity model") | |
| pplx_key = gr.Textbox(value="", label="Perplexity API key (optional)", type="password") | |
| with gr.Column(scale=2): | |
| prompt = gr.Textbox(placeholder="Ask once, see all providers' answers.", label="Your prompt", lines=6) | |
| ask = gr.Button("Ask all") | |
| status = gr.Markdown(visible=False) | |
| with gr.Row(): | |
| out_openai = gr.Markdown(label="OpenAI") | |
| out_claude = gr.Markdown(label="Claude") | |
| out_gemini = gr.Markdown(label="Gemini") | |
| with gr.Row(): | |
| out_grok = gr.Markdown(label="Grok") | |
| out_deepseek = gr.Markdown(label="DeepSeek") | |
| out_pplx = gr.Markdown(label="Perplexity") | |
| def on_click(prompt, system, temp, | |
| use_openai, use_claude, use_gemini, use_grok, use_deepseek, use_perplexity, | |
| openai_model, claude_model, gemini_model, grok_model, deepseek_model, pplx_model, | |
| openai_key, claude_key, gemini_key, grok_key, deepseek_key, pplx_key): | |
| results = asyncio.run(run_all(prompt, system, temp, | |
| use_openai, use_claude, use_gemini, use_grok, use_deepseek, use_perplexity, | |
| openai_model, claude_model, gemini_model, grok_model, deepseek_model, pplx_model, | |
| openai_key, claude_key, gemini_key, grok_key, deepseek_key, pplx_key)) | |
| if "status" in results: | |
| return results["status"], "", "", "", "", "", "" | |
| return ("", | |
| results.get("OpenAI", ""), | |
| results.get("Claude", ""), | |
| results.get("Gemini", ""), | |
| results.get("Grok", ""), | |
| results.get("DeepSeek", ""), | |
| results.get("Perplexity", "")) | |
| ask.click(on_click, | |
| [prompt, system, temp, | |
| use_openai, use_claude, use_gemini, use_grok, use_deepseek, use_perplexity, | |
| openai_model, claude_model, gemini_model, grok_model, deepseek_model, pplx_model, | |
| openai_key, claude_key, gemini_key, grok_key, deepseek_key, pplx_key], | |
| [status, out_openai, out_claude, out_gemini, out_grok, out_deepseek, out_pplx]) | |
| gr.Markdown("Built with ❤️ using Gradio. Keys stay inside this Space only.") | |
| return demo | |
| if __name__ == "__main__": | |
| build_ui().queue().launch() |