yewint commited on
Commit
96fac4e
·
unverified ·
1 Parent(s): 6e835a8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +52 -26
app.py CHANGED
@@ -6,9 +6,14 @@ import gradio as gr
6
  from dotenv import load_dotenv
7
  from groq import Groq
8
 
9
- # 🔱 CORE INITIALIZATION
10
  load_dotenv()
11
- client = Groq(api_key=os.getenv("GROQ_API_KEY"))
 
 
 
 
 
12
 
13
  class HydraEngine:
14
  @staticmethod
@@ -18,35 +23,48 @@ class HydraEngine:
18
  except:
19
  return str(c)
20
 
21
- def sync_matrix():
 
22
  try:
23
- conn = psycopg2.connect(os.getenv("DATABASE_URL"), connect_timeout=3)
 
24
  cur = conn.cursor()
25
- cur.execute("SELECT message FROM neurons ORDER BY id DESC LIMIT 1;")
26
- res = cur.fetchone()
 
27
  cur.close(); conn.close()
28
- return HydraEngine.decompress(res[0]) if res else "Natural Order Active"
29
- except:
30
- return "Standby Mode"
 
 
 
 
 
 
31
 
32
- # 🔱 CHAT LOGIC (Using OpenAI-style Message Format for Gradio 6.0)
33
  def stream_logic(msg, hist):
34
- ctx = sync_matrix()
35
- sys_msg = f"System Context: {ctx[:300]}. You are TelefoxX. Reply in Burmese."
36
 
37
- messages = [{"role": "system", "content": sys_msg}]
38
- # Gradio messages format ကို အမှန်ကန်ဆုံး ပြောင်းလဲခြင်း
39
- for h in hist:
40
- messages.append({"role": "user", "content": h["content"] if isinstance(h, dict) else h[0]})
41
- messages.append({"role": "assistant", "content": h["content"] if isinstance(h, dict) else h[1]})
 
 
42
 
 
 
 
43
  messages.append({"role": "user", "content": msg})
44
 
45
  try:
46
  completion = client.chat.completions.create(
47
  model="llama-3.1-8b-instant",
48
  messages=messages,
49
- temperature=0.2,
50
  stream=True
51
  )
52
  ans = ""
@@ -55,20 +73,17 @@ def stream_logic(msg, hist):
55
  ans += chunk.choices[0].delta.content
56
  yield ans
57
  except Exception as e:
58
- yield f"🔱 Matrix Link Interrupted: {str(e)}"
59
 
60
- # 🔱 UI SETUP (Resolved all Depreciation and UserWarnings)
61
  with gr.Blocks(theme="monochrome") as demo:
62
- gr.Markdown("# 🔱 TELEFOXX CONTROL CENTER")
63
-
64
- # type="messages" သတ်မှတ်ခြင်းဖြင့် Tuples warning ကို ရှင်းလိုက်ပြီ
65
- chatbot = gr.Chatbot(label="Neural Stream", type="messages", allow_tags=False)
66
  msg_input = gr.Textbox(placeholder="အမိန့်ပေးပါ Commander...")
67
 
68
  def respond(message, chat_history):
69
  chat_history.append({"role": "user", "content": message})
70
  chat_history.append({"role": "assistant", "content": ""})
71
- # stream bot response
72
  for r in stream_logic(message, chat_history[:-1]):
73
  chat_history[-1]["content"] = r
74
  yield "", chat_history
@@ -76,4 +91,15 @@ with gr.Blocks(theme="monochrome") as demo:
76
  msg_input.submit(respond, [msg_input, chatbot], [msg_input, chatbot])
77
 
78
  if __name__ == "__main__":
79
- demo.launch(server_name="0.0.0.0", server_port=7860)
 
 
 
 
 
 
 
 
 
 
 
 
6
  from dotenv import load_dotenv
7
  from groq import Groq
8
 
9
+ # 🔱 LOAD TRINITY KEYS
10
  load_dotenv()
11
+ NEON_URL = os.getenv("DATABASE_URL") or os.getenv("NEON_KEY")
12
+ FIREBASE_KEY = os.getenv("FIREBASE_KEY")
13
+ SUPABASE_URL = os.getenv("SUPABASE_URL")
14
+ GROQ_API_KEY = os.getenv("GROQ_API_KEY")
15
+
16
+ client = Groq(api_key=GROQ_API_KEY)
17
 
18
  class HydraEngine:
19
  @staticmethod
 
23
  except:
24
  return str(c)
25
 
26
+ # 🔱 SYNC WITH NEON DATABASE (CRITICAL CORE)
27
+ def fetch_neon_context():
28
  try:
29
+ # Connect to your Neon DB
30
+ conn = psycopg2.connect(NEON_URL, connect_timeout=5)
31
  cur = conn.cursor()
32
+ # နောက်ဆုံးဖြစ်ပေါ်ထားတဲ့ Neural Data ခုကို ယူမယ်
33
+ cur.execute("SELECT user_id, message FROM neurons ORDER BY id DESC LIMIT 3;")
34
+ rows = cur.fetchall()
35
  cur.close(); conn.close()
36
+
37
+ if rows:
38
+ # Data တွေကို Decompress လုပ်ပြီး Groq ဖတ်ဖို့ စုစည်းမယ်
39
+ context = " | ".join([f"{r[0]}: {HydraEngine.decompress(r[1])}" for r in rows])
40
+ return context
41
+ return "Initial Order Active"
42
+ except Exception as e:
43
+ print(f"🔱 DB SYNC ERROR: {str(e)}")
44
+ return "Matrix Standby"
45
 
 
46
  def stream_logic(msg, hist):
47
+ # မင်းရဲ့ Database ထဲက တကယ့် data ကို ဆွဲထုတ်တယ်
48
+ real_data = fetch_neon_context()
49
 
50
+ # Groq ကို မင်းရဲ့ DB data ပေါ်မှာပဲ အခြေခံခိုင်းတယ်
51
+ system_message = (
52
+ f"MASTER CONTEXT FROM NEON DB: {real_data}\n\n"
53
+ "DIRECTIVE: မင်းဟာ TelefoxX Overseer ဖြစ်တယ်။ "
54
+ "အထက်ပါ CONTEXT ထဲမှာပါတဲ့ အချက်အလက်ကိုပဲ သုံးပြီး မြန်မာလိုဖြေပါ။ "
55
+ "Context ထဲမှာ မပါတဲ့အရာတွေကို ကိုယ့်ဘာသာမထည့်ပါနဲ့။"
56
+ )
57
 
58
+ messages = [{"role": "system", "content": system_message}]
59
+ for h in hist[-3:]: # Chat memory
60
+ messages.append({"role": "user", "content": h['content']})
61
  messages.append({"role": "user", "content": msg})
62
 
63
  try:
64
  completion = client.chat.completions.create(
65
  model="llama-3.1-8b-instant",
66
  messages=messages,
67
+ temperature=0.1, # Hallucination မဖြစ်အောင် အနိမ့်ဆုံးထားတယ်
68
  stream=True
69
  )
70
  ans = ""
 
73
  ans += chunk.choices[0].delta.content
74
  yield ans
75
  except Exception as e:
76
+ yield f"🔱 Matrix Link Lost: {str(e)}"
77
 
78
+ # 🔱 UI SETUP
79
  with gr.Blocks(theme="monochrome") as demo:
80
+ gr.Markdown(f"# 🔱 TELEFOXX OMNI-SYNC\n**Status:** {'Connected' if NEON_URL else 'Key Missing'}")
81
+ chatbot = gr.Chatbot(type="messages", allow_tags=False)
 
 
82
  msg_input = gr.Textbox(placeholder="အမိန့်ပေးပါ Commander...")
83
 
84
  def respond(message, chat_history):
85
  chat_history.append({"role": "user", "content": message})
86
  chat_history.append({"role": "assistant", "content": ""})
 
87
  for r in stream_logic(message, chat_history[:-1]):
88
  chat_history[-1]["content"] = r
89
  yield "", chat_history
 
91
  msg_input.submit(respond, [msg_input, chatbot], [msg_input, chatbot])
92
 
93
  if __name__ == "__main__":
94
+ # GitHub Actions Headless Mode
95
+ is_headless = os.getenv("HEADLESS_MODE") == "true"
96
+ demo.launch(
97
+ server_name="0.0.0.0",
98
+ server_port=7860,
99
+ prevent_thread_lock=is_headless
100
+ )
101
+
102
+ if is_headless:
103
+ import time
104
+ print("🔱 SYNCHRONIZING TRINITY MATRIX...")
105
+ time.sleep(15) # Sync လုပ်ဖို့ အချိန်ပေးတယ်