File size: 7,837 Bytes
f895444 8f5acc6 e7dc044 f895444 8f5acc6 8916ddc 8f5acc6 e7dc044 9fcd5e1 e7dc044 8916ddc 9fcd5e1 e7dc044 8916ddc e7dc044 8916ddc 8f5acc6 e7dc044 8916ddc 9fcd5e1 8f5acc6 9fcd5e1 e7dc044 8f5acc6 9fcd5e1 e7dc044 9fcd5e1 8916ddc 9fcd5e1 8916ddc 9fcd5e1 8916ddc 9fcd5e1 8f5acc6 8916ddc 8f5acc6 f895444 8916ddc f895444 9fcd5e1 8916ddc 9fcd5e1 8916ddc 9fcd5e1 f895444 8916ddc 9fcd5e1 f895444 9fcd5e1 f895444 9fcd5e1 f895444 8916ddc 9fcd5e1 f895444 8916ddc f895444 8f5acc6 8916ddc 8f5acc6 9fcd5e1 8916ddc 9fcd5e1 8916ddc 9fcd5e1 f895444 8916ddc 9fcd5e1 8916ddc 9fcd5e1 8916ddc 9fcd5e1 8916ddc 9fcd5e1 8916ddc 9fcd5e1 8916ddc 9fcd5e1 8916ddc 9fcd5e1 8916ddc 9fcd5e1 f895444 8916ddc f895444 e7dc044 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 | import gradio as gr
from huggingface_hub import InferenceClient
import os
import json
import threading
import firebase_admin
from firebase_admin import credentials, firestore
# ==========================================
# 1. ARSITEKTUR DATABASE (FIREBASE FIRESTORE)
# ==========================================
firebase_initialized = False
db = None
try:
firebase_cred_raw = os.getenv("FIREBASE_CREDENTIALS")
if firebase_cred_raw:
cred_dict = json.loads(firebase_cred_raw)
# Mencegah error inisialisasi ganda di environment Gradio
if not firebase_admin._apps:
cred = credentials.Certificate(cred_dict)
firebase_admin.initialize_app(cred)
db = firestore.client()
firebase_initialized = True
print("[System Architect] Firebase berhasil terhubung.")
else:
print("[System Architect Warning] FIREBASE_CREDENTIALS tidak ditemukan di Secrets.")
except Exception as e:
print(f"[System Architect Error] Gagal inisiasi Firebase: {e}")
def push_to_firebase(user_message, ai_response):
"""Menulis log secara asynchronous tanpa memblokir thread UI Gradio."""
if not firebase_initialized or not db: return
try:
db.collection("aqso_chat_logs").add({
"timestamp": firestore.SERVER_TIMESTAMP,
"user": user_message,
"ai": ai_response
})
except Exception as e:
print(f"[Firebase Write Error] {e}")
def fetch_history_from_firebase():
"""Mengambil 10 riwayat terakhir untuk ditampilkan di Sidebar UI."""
if not firebase_initialized or not db:
return "β οΈ Firebase belum terhubung. Cek FIREBASE_CREDENTIALS di Secrets Space."
try:
docs = db.collection("aqso_chat_logs").order_by("timestamp", direction=firestore.Query.DESCENDING).limit(10).stream()
history_text = ""
for doc in docs:
data = doc.to_dict()
time_str = data.get('timestamp', 'Waktu Tidak Diketahui')
user_msg = str(data.get('user', ''))[:40] + "..."
history_text += f"π {time_str}\nπ€: {user_msg}\n---\n"
return history_text if history_text else "π Belum ada riwayat."
except Exception as e:
return f"β Gagal memuat riwayat: {e}"
# ==========================================
# 2. LOGIKA INFERENCE & PARSER (DEFENSIVE ZERO ERROR)
# ==========================================
def parse_thinking_ui(text: str) -> str:
"""Mengubah tag <think> bawaan model menjadi UI Collapsible ala Claude."""
text = text.replace("<think>", "<details open>\n<summary>π§ <b>Thinking Process...</b></summary>\n\n> ")
text = text.replace("</think>", "\n</details>\n\n---\n\n")
return text
def respond(message, history, system_message, max_tokens, temperature, top_p):
hf_token = os.getenv("HF_TOKEN")
if not hf_token:
yield "β **FATAL ERROR:** HF_TOKEN tidak ditemukan. Pastikan variabel Secrets sudah diset dan Space telah di-Factory Rebuild."
return
# Lazy Initialization: Hanya panggil client saat fungsi dieksekusi, mencegah error Docker Build
client = InferenceClient("deepseek-ai/DeepSeek-R1-Distill-Qwen-32B", token=hf_token)
messages = [{"role": "system", "content": system_message}]
# Reconstruct history memori sesi saat ini
for user_msg, ai_msg in history:
if user_msg: messages.append({"role": "user", "content": user_msg})
if ai_msg: messages.append({"role": "assistant", "content": ai_msg})
messages.append({"role": "user", "content": message})
response = ""
try:
stream = client.chat_completion(
messages, max_tokens=max_tokens, stream=True,
temperature=temperature, top_p=top_p
)
for msg in stream:
try:
# Validasi Null-Check (Zero Error Logic) untuk bypass error "list index out of range"
if hasattr(msg, 'choices') and isinstance(msg.choices, list) and len(msg.choices) > 0:
delta = msg.choices[0].delta
if hasattr(delta, 'content') and delta.content is not None:
response += delta.content
yield parse_thinking_ui(response)
except Exception as chunk_err:
# Jika server HF mengirim chunk rusak, hiraukan dan lanjut ke chunk berikutnya
print(f"[System Architect Warning] Malformed stream chunk ignored: {chunk_err}")
continue
# Simpan ke Database Firebase hanya jika generate selesai dan memiliki response
if response:
threading.Thread(target=push_to_firebase, args=(message, response)).start()
except Exception as e:
# Menyelamatkan teks yang sudah sempat diketik sebelum koneksi HF terputus
error_msg = f"\n\nβ οΈ **[Koneksi HF API Terputus]:** Server kelebihan beban atau timeout. Teks diselamatkan sejauh ini. Error internal: {str(e)}"
yield parse_thinking_ui(response) + error_msg
# ==========================================
# 3. ARSITEKTUR UI (PREMIUM LAYOUT)
# ==========================================
custom_css = """
/* Injeksi CSS Premium Dark Mode */
.gradio-container { background-color: #0B0F19 !important; font-family: 'Inter', sans-serif; }
.contain { border-radius: 12px !important; border: 1px solid #1E293B !important; background-color: #111827 !important;}
button { border-radius: 8px !important; }
textarea { background-color: #1F2937 !important; color: white !important; border: 1px solid #374151 !important; }
.message.user { background-color: #3B82F6 !important; color: white !important; }
.message.bot { background-color: transparent !important; border: 1px solid #374151 !important; color: white !important; }
"""
default_prompt = (
"You are an elite Senior Developer, System Architect, and Expert Scraper built for AQSO. "
"Prioritize Absolute Logical Accuracy & Zero Error. "
"Always simulate logic internally before outputting. For scraping tasks, proactively implement stealth mitigations, "
"WAF/Cloudflare bypasses, and dynamic rendering handling."
)
with gr.Blocks(theme=gr.themes.Monochrome(), css=custom_css, title="AQSO Engine") as demo:
with gr.Row():
# Kolom Kiri: Sidebar Riwayat Firebase
with gr.Column(scale=1, min_width=250):
gr.Markdown("### ποΈ Riwayat Sesi (Firebase)")
btn_refresh = gr.Button("π Muat Ulang Riwayat", variant="primary")
txt_history = gr.Textbox(label="10 Percakapan Terakhir", lines=25, interactive=False, value="Klik tombol muat ulang untuk mengambil data dari Firestore...")
# Binding tombol ke fungsi fetch Firestore
btn_refresh.click(fn=fetch_history_from_firebase, inputs=None, outputs=txt_history)
# Kolom Kanan: Engine Chat & Konfigurasi
with gr.Column(scale=4):
gr.Markdown("## β‘ AQSO Thinking Engine (Defensive Architecture)")
with gr.Accordion("βοΈ Konfigurasi Arsitektur Engine", open=False):
sys_prompt = gr.Textbox(value=default_prompt, label="System Prompt", lines=4)
with gr.Row():
max_tok = gr.Slider(1, 8192, 4096, step=1, label="Max Tokens")
temp = gr.Slider(0.1, 2.0, 0.6, step=0.1, label="Temperature")
top_p = gr.Slider(0.1, 1.0, 0.9, step=0.05, label="Top-P")
chat_interface = gr.ChatInterface(
fn=respond,
additional_inputs=[sys_prompt, max_tok, temp, top_p],
autofocus=True
)
if __name__ == "__main__":
# Server binding wajib untuk HF Spaces Docker
demo.launch(server_name="0.0.0.0", server_port=7860)
|