Spaces:
Sleeping
Sleeping
| # yellowflash_with_perplexity.py | |
| # TEST ONLY: hardcoded keys included (do NOT publish) | |
| import time, traceback, requests | |
| import gradio as gr | |
| # --------------------------- | |
| # HARDCODED KEYS (TESTING) | |
| # --------------------------- | |
| GEMINI_KEY = "AIzaSyAPfDiu2V_aD6un00qHt5bkISm6C0Pkx7o" | |
| GEMINI_URL = "https://generativelanguage.googleapis.com/v1beta/models/gemini-2.0-flash:generateContent" | |
| GROQ_KEY = "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" | |
| GROQ_URL = "https://api.groq.com/openai/v1/chat/completions" | |
| GROQ_MODEL = "meta-llama/llama-4-scout-17b-16e-instruct" | |
| # --------------------------- | |
| # Helpers | |
| # --------------------------- | |
| def post_with_retries(url, headers, payload, timeout=18, max_retries=2): | |
| for i in range(max_retries): | |
| try: | |
| r = requests.post(url, headers=headers, json=payload, timeout=timeout) | |
| r.raise_for_status() | |
| return r | |
| except Exception as e: | |
| if i == max_retries - 1: | |
| raise | |
| time.sleep(0.5 + i) | |
| raise Exception("Max retries exceeded") | |
| # --------------------------- | |
| # Model callers | |
| # --------------------------- | |
| def call_gemini(api_key, message, history): | |
| headers = {"Content-Type": "application/json", "x-goog-api-key": api_key} | |
| contents = [] | |
| for u, m in history: | |
| contents.append({"role":"user","parts":[{"text":u}]}) | |
| contents.append({"role":"model","parts":[{"text":m}]}) | |
| contents.append({"role":"user","parts":[{"text":message}]}) | |
| payload = {"contents": contents} | |
| r = post_with_retries(GEMINI_URL, headers, payload) | |
| data = r.json() | |
| return data.get("candidates",[{}])[0].get("content",{}).get("parts",[{}])[0].get("text","") | |
| def call_llama_via_groq(api_key, model, message, history): | |
| headers = {"Authorization": f"Bearer {api_key}", "Content-Type": "application/json"} | |
| msgs = [] | |
| for u, m in history: | |
| msgs.append({"role":"user","content":u}) | |
| msgs.append({"role":"assistant","content":m}) | |
| msgs.append({"role":"user","content":message}) | |
| payload = {"model": model, "messages": msgs} | |
| r = post_with_retries(GROQ_URL, headers, payload) | |
| data = r.json() | |
| if "choices" in data and data["choices"]: | |
| ch = data["choices"][0] | |
| if isinstance(ch.get("message"), dict): | |
| return ch["message"].get("content","") | |
| return ch.get("text","") | |
| return str(data) | |
| # --------------------------- | |
| # Chat function | |
| # --------------------------- | |
| def chat_fn(message, history, model_choice): | |
| try: | |
| if model_choice == "Google Gemini 2.0 Flash": | |
| return call_gemini(GEMINI_KEY, message, history) | |
| elif model_choice == "Meta LLaMA 4": | |
| return call_llama_via_groq(GROQ_KEY, GROQ_MODEL, message, history) | |
| else: | |
| return f"Unknown model: {model_choice}" | |
| except Exception as e: | |
| return f"Error: {e}\n{traceback.format_exc()}" | |
| # --------------------------- | |
| # Dark Mode CSS (your original) | |
| # --------------------------- | |
| css = """ | |
| /* topbar layout */ | |
| #topbar { display:flex; justify-content:space-between; align-items:center; | |
| padding:18px 28px; background:#0f0f0f; border-bottom:1px solid #1f1f1f; } | |
| #title { font-weight:800; color:#ffcc33; font-size:20px; } | |
| /* compact, flat dropdown look */ | |
| #model_dropdown .gr-dropdown { background:#1a1a1a !important; border:1px solid #2b2b2b !important; | |
| color:#ddd !important; padding:10px 12px !important; border-radius:8px !important; | |
| width:260px !important; box-shadow:none !important; } | |
| /* make ChatInterface chat area taller */ | |
| .gradio-container .chat-interface .chatbot { min-height: calc(100vh - 220px); background:#111; color:#eee; } | |
| /* style send button */ | |
| .gr-button { border-radius:10px !important; background:#2c2c3f !important; color:#fff !important; } | |
| """ | |
| # --------------------------- | |
| # Build UI | |
| # --------------------------- | |
| with gr.Blocks(css=css, title="⚡ YellowFlash.ai") as app: | |
| with gr.Row(elem_id="topbar"): | |
| model_dropdown = gr.Dropdown( | |
| choices=["Google Gemini 2.0 Flash", "Meta LLaMA 4"], | |
| value="Google Gemini 2.0 Flash", | |
| show_label=False, | |
| elem_id="model_dropdown" | |
| ) | |
| gr.ChatInterface( | |
| fn=chat_fn, | |
| title="⚡ YellowFlash.ai", | |
| description="under development", | |
| additional_inputs=[model_dropdown], | |
| ) | |
| app.launch(share=True) |