Spaces:
Sleeping
Sleeping
| import os | |
| import requests | |
| import gradio as gr | |
| import sqlite3 | |
| from openai import OpenAI | |
| # 🔑 API keys | |
| openai_api_key = os.getenv("OPENAI_API_KEY") | |
| gemini_api_key = os.getenv("GEMINI_API_KEY") | |
| print("OPENAI_API_KEY =", openai_api_key) | |
| print("GEMINI_API_KEY =", gemini_api_key) | |
| if openai_api_key is None: | |
| raise ValueError("⚠️ OPENAI_API_KEY тохируулна уу!") | |
| client = OpenAI(api_key=openai_api_key) | |
| # SQLite db setup | |
| conn = sqlite3.connect("chat_history.db", check_same_thread=False) | |
| c = conn.cursor() | |
| c.execute(""" | |
| CREATE TABLE IF NOT EXISTS history ( | |
| id INTEGER PRIMARY KEY AUTOINCREMENT, | |
| user_input TEXT, | |
| fusion_output TEXT | |
| ) | |
| """) | |
| conn.commit() | |
| # ChatGPT (gpt-5) | |
| def chatgpt_response(prompt): | |
| try: | |
| response = client.responses.create(model="gpt-5", input=prompt) | |
| return response.output_text | |
| except Exception as e: | |
| return f"⚠️ ChatGPT error: {e}" | |
| # Gemini | |
| def gemini_response(prompt): | |
| if gemini_api_key is None: | |
| return "⚠️ GEMINI_API_KEY тохируулаагүй байна!" | |
| try: | |
| url = f"https://generativelanguage.googleapis.com/v1beta/models/gemini-2.0-flash:generateContent?key={gemini_api_key}" | |
| headers = {"Content-Type": "application/json"} | |
| data = {"contents": [{"parts": [{"text": prompt}]}]} | |
| res = requests.post(url, headers=headers, json=data) | |
| res_json = res.json() | |
| return res_json["candidates"][0]["content"]["parts"][0]["text"] | |
| except Exception as e: | |
| return f"⚠️ Gemini error: {e}" | |
| # Fusion + summary | |
| def fusion_ai(prompt, chat_history=[]): | |
| gpt_reply = chatgpt_response(prompt) | |
| gemini_reply = gemini_response(prompt) | |
| if "ChatGPT error" in gpt_reply.lower(): | |
| combined = f"⚠️ ChatGPT quota дууссан тул Gemini хариу:\n\n{gemini_reply}" | |
| else: | |
| # Fusion summary: урт хариултаас сонгох | |
| combined = f"ChatGPT 🧠: {gpt_reply}\nGemini 🌐: {gemini_reply}" | |
| # Update SQLite history | |
| c.execute("INSERT INTO history (user_input, fusion_output) VALUES (?, ?)", (prompt, combined)) | |
| conn.commit() | |
| chat_history.append((prompt, combined)) | |
| return chat_history, chat_history | |
| # Gradio interface | |
| css = """ | |
| .gradio-container {background-color: #0d1117 !important; color: #e6edf3;} | |
| h1, h3, h4, label {color: #00d9ff !important;} | |
| textarea, input {background-color:#161b22 !important; color:#fff !important; border-radius:10px;} | |
| button {background-color:#00d9ff !important; color:#000 !important; font-weight:bold; border-radius:10px;} | |
| """ | |
| with gr.Blocks(css=css, theme=gr.themes.Soft()) as app: | |
| gr.Markdown("## 🚀 ZeppFusion AI — Persistent Fusion Chat") | |
| chatbot = gr.Chatbot() | |
| msg = gr.Textbox(placeholder="Таны асуулт...") | |
| clear = gr.Button("🗑️ Clear History") | |
| # Submit: add to history | |
| msg.submit(fusion_ai, [msg, chatbot], [chatbot, chatbot]) | |
| # Clear button: delete SQLite history | |
| def clear_history(): | |
| c.execute("DELETE FROM history") | |
| conn.commit() | |
| return [] | |
| clear.click(clear_history, None, chatbot) | |
| if __name__ == "__main__": | |
| app.launch() |