ai / app.py
Aqso's picture
Update app.py
8916ddc verified
import gradio as gr
from huggingface_hub import InferenceClient
import os
import json
import threading
import firebase_admin
from firebase_admin import credentials, firestore
# ==========================================
# 1. ARSITEKTUR DATABASE (FIREBASE FIRESTORE)
# ==========================================
firebase_initialized = False
db = None
try:
firebase_cred_raw = os.getenv("FIREBASE_CREDENTIALS")
if firebase_cred_raw:
cred_dict = json.loads(firebase_cred_raw)
# Mencegah error inisialisasi ganda di environment Gradio
if not firebase_admin._apps:
cred = credentials.Certificate(cred_dict)
firebase_admin.initialize_app(cred)
db = firestore.client()
firebase_initialized = True
print("[System Architect] Firebase berhasil terhubung.")
else:
print("[System Architect Warning] FIREBASE_CREDENTIALS tidak ditemukan di Secrets.")
except Exception as e:
print(f"[System Architect Error] Gagal inisiasi Firebase: {e}")
def push_to_firebase(user_message, ai_response):
"""Menulis log secara asynchronous tanpa memblokir thread UI Gradio."""
if not firebase_initialized or not db: return
try:
db.collection("aqso_chat_logs").add({
"timestamp": firestore.SERVER_TIMESTAMP,
"user": user_message,
"ai": ai_response
})
except Exception as e:
print(f"[Firebase Write Error] {e}")
def fetch_history_from_firebase():
"""Mengambil 10 riwayat terakhir untuk ditampilkan di Sidebar UI."""
if not firebase_initialized or not db:
return "⚠️ Firebase belum terhubung. Cek FIREBASE_CREDENTIALS di Secrets Space."
try:
docs = db.collection("aqso_chat_logs").order_by("timestamp", direction=firestore.Query.DESCENDING).limit(10).stream()
history_text = ""
for doc in docs:
data = doc.to_dict()
time_str = data.get('timestamp', 'Waktu Tidak Diketahui')
user_msg = str(data.get('user', ''))[:40] + "..."
history_text += f"πŸ•’ {time_str}\nπŸ‘€: {user_msg}\n---\n"
return history_text if history_text else "πŸ“­ Belum ada riwayat."
except Exception as e:
return f"❌ Gagal memuat riwayat: {e}"
# ==========================================
# 2. LOGIKA INFERENCE & PARSER (DEFENSIVE ZERO ERROR)
# ==========================================
def parse_thinking_ui(text: str) -> str:
"""Mengubah tag <think> bawaan model menjadi UI Collapsible ala Claude."""
text = text.replace("<think>", "<details open>\n<summary>🧠 <b>Thinking Process...</b></summary>\n\n> ")
text = text.replace("</think>", "\n</details>\n\n---\n\n")
return text
def respond(message, history, system_message, max_tokens, temperature, top_p):
hf_token = os.getenv("HF_TOKEN")
if not hf_token:
yield "❌ **FATAL ERROR:** HF_TOKEN tidak ditemukan. Pastikan variabel Secrets sudah diset dan Space telah di-Factory Rebuild."
return
# Lazy Initialization: Hanya panggil client saat fungsi dieksekusi, mencegah error Docker Build
client = InferenceClient("deepseek-ai/DeepSeek-R1-Distill-Qwen-32B", token=hf_token)
messages = [{"role": "system", "content": system_message}]
# Reconstruct history memori sesi saat ini
for user_msg, ai_msg in history:
if user_msg: messages.append({"role": "user", "content": user_msg})
if ai_msg: messages.append({"role": "assistant", "content": ai_msg})
messages.append({"role": "user", "content": message})
response = ""
try:
stream = client.chat_completion(
messages, max_tokens=max_tokens, stream=True,
temperature=temperature, top_p=top_p
)
for msg in stream:
try:
# Validasi Null-Check (Zero Error Logic) untuk bypass error "list index out of range"
if hasattr(msg, 'choices') and isinstance(msg.choices, list) and len(msg.choices) > 0:
delta = msg.choices[0].delta
if hasattr(delta, 'content') and delta.content is not None:
response += delta.content
yield parse_thinking_ui(response)
except Exception as chunk_err:
# Jika server HF mengirim chunk rusak, hiraukan dan lanjut ke chunk berikutnya
print(f"[System Architect Warning] Malformed stream chunk ignored: {chunk_err}")
continue
# Simpan ke Database Firebase hanya jika generate selesai dan memiliki response
if response:
threading.Thread(target=push_to_firebase, args=(message, response)).start()
except Exception as e:
# Menyelamatkan teks yang sudah sempat diketik sebelum koneksi HF terputus
error_msg = f"\n\n⚠️ **[Koneksi HF API Terputus]:** Server kelebihan beban atau timeout. Teks diselamatkan sejauh ini. Error internal: {str(e)}"
yield parse_thinking_ui(response) + error_msg
# ==========================================
# 3. ARSITEKTUR UI (PREMIUM LAYOUT)
# ==========================================
custom_css = """
/* Injeksi CSS Premium Dark Mode */
.gradio-container { background-color: #0B0F19 !important; font-family: 'Inter', sans-serif; }
.contain { border-radius: 12px !important; border: 1px solid #1E293B !important; background-color: #111827 !important;}
button { border-radius: 8px !important; }
textarea { background-color: #1F2937 !important; color: white !important; border: 1px solid #374151 !important; }
.message.user { background-color: #3B82F6 !important; color: white !important; }
.message.bot { background-color: transparent !important; border: 1px solid #374151 !important; color: white !important; }
"""
default_prompt = (
"You are an elite Senior Developer, System Architect, and Expert Scraper built for AQSO. "
"Prioritize Absolute Logical Accuracy & Zero Error. "
"Always simulate logic internally before outputting. For scraping tasks, proactively implement stealth mitigations, "
"WAF/Cloudflare bypasses, and dynamic rendering handling."
)
with gr.Blocks(theme=gr.themes.Monochrome(), css=custom_css, title="AQSO Engine") as demo:
with gr.Row():
# Kolom Kiri: Sidebar Riwayat Firebase
with gr.Column(scale=1, min_width=250):
gr.Markdown("### πŸ—„οΈ Riwayat Sesi (Firebase)")
btn_refresh = gr.Button("πŸ”„ Muat Ulang Riwayat", variant="primary")
txt_history = gr.Textbox(label="10 Percakapan Terakhir", lines=25, interactive=False, value="Klik tombol muat ulang untuk mengambil data dari Firestore...")
# Binding tombol ke fungsi fetch Firestore
btn_refresh.click(fn=fetch_history_from_firebase, inputs=None, outputs=txt_history)
# Kolom Kanan: Engine Chat & Konfigurasi
with gr.Column(scale=4):
gr.Markdown("## ⚑ AQSO Thinking Engine (Defensive Architecture)")
with gr.Accordion("βš™οΈ Konfigurasi Arsitektur Engine", open=False):
sys_prompt = gr.Textbox(value=default_prompt, label="System Prompt", lines=4)
with gr.Row():
max_tok = gr.Slider(1, 8192, 4096, step=1, label="Max Tokens")
temp = gr.Slider(0.1, 2.0, 0.6, step=0.1, label="Temperature")
top_p = gr.Slider(0.1, 1.0, 0.9, step=0.05, label="Top-P")
chat_interface = gr.ChatInterface(
fn=respond,
additional_inputs=[sys_prompt, max_tok, temp, top_p],
autofocus=True
)
if __name__ == "__main__":
# Server binding wajib untuk HF Spaces Docker
demo.launch(server_name="0.0.0.0", server_port=7860)