Update app.py
Browse files
app.py
CHANGED
|
@@ -18,7 +18,7 @@ from playwright.async_api import async_playwright
|
|
| 18 |
from apscheduler.schedulers.background import BackgroundScheduler
|
| 19 |
from upstash_redis import Redis as UpstashRedis
|
| 20 |
|
| 21 |
-
# إ
|
| 22 |
os.environ["TZ"] = "UTC"
|
| 23 |
nest_asyncio.apply()
|
| 24 |
|
|
@@ -55,7 +55,7 @@ LIMITS = {
|
|
| 55 |
}
|
| 56 |
|
| 57 |
# ==========================================
|
| 58 |
-
# 2. مدير التخزين (
|
| 59 |
# ==========================================
|
| 60 |
|
| 61 |
class LogManager:
|
|
@@ -66,28 +66,48 @@ class LogManager:
|
|
| 66 |
self.redis = UpstashRedis(url=UPSTASH_REDIS_REST_URL, token=UPSTASH_REDIS_REST_TOKEN)
|
| 67 |
self.redis.ping()
|
| 68 |
self.enabled = True
|
| 69 |
-
print("✅ Connected to Upstash Redis")
|
| 70 |
except Exception as e:
|
| 71 |
print(f"❌ Upstash Connection Error: {e}")
|
| 72 |
else:
|
| 73 |
-
print("⚠️ Upstash Credentials missing.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 74 |
|
| 75 |
def load_logs(self):
|
| 76 |
-
"""جلب آخر 100 سجل"""
|
| 77 |
if not self.enabled: return []
|
| 78 |
try:
|
| 79 |
raw_logs = self.redis.lrange("nexus_logs", 0, 99)
|
| 80 |
parsed_logs = []
|
| 81 |
for log in raw_logs:
|
| 82 |
if isinstance(log, str):
|
| 83 |
-
try:
|
| 84 |
-
parsed_logs.append(json.loads(log))
|
| 85 |
except: pass
|
| 86 |
-
else:
|
| 87 |
-
parsed_logs.append(log)
|
| 88 |
return parsed_logs
|
| 89 |
-
except
|
| 90 |
-
return []
|
| 91 |
|
| 92 |
def save_log_entry(self, platform, title, url, original_text, our_reply):
|
| 93 |
entry = {
|
|
@@ -102,12 +122,11 @@ class LogManager:
|
|
| 102 |
LOCAL_LOGS.insert(0, entry)
|
| 103 |
if len(LOCAL_LOGS) > 100: LOCAL_LOGS.pop()
|
| 104 |
|
| 105 |
-
if
|
| 106 |
-
|
| 107 |
-
|
| 108 |
-
|
| 109 |
-
|
| 110 |
-
except: pass
|
| 111 |
return entry
|
| 112 |
|
| 113 |
log_manager = LogManager()
|
|
@@ -186,7 +205,7 @@ def send_smtp_email(to_email, subject, body):
|
|
| 186 |
except Exception as e: return False, str(e)
|
| 187 |
|
| 188 |
# ==========================================
|
| 189 |
-
# 5. الحلقات (The Loops)
|
| 190 |
# ==========================================
|
| 191 |
|
| 192 |
# --- GitHub Loop ---
|
|
@@ -200,14 +219,16 @@ def github_loop():
|
|
| 200 |
resp = requests.get(f"https://api.github.com/search/issues?q={kw}+state:open&sort=updated", headers=headers)
|
| 201 |
if resp.status_code == 200:
|
| 202 |
for item in resp.json().get("items", [])[:3]:
|
| 203 |
-
|
|
|
|
| 204 |
|
| 205 |
reply = generate_localized_reply(item['title'] + "\n" + (item['body'] or ""), "GitHub")
|
| 206 |
if reply:
|
| 207 |
post_resp = requests.post(item['url']+"/comments", headers=headers, json={"body": reply})
|
| 208 |
if post_resp.status_code == 201:
|
| 209 |
LOCAL_STATS["github"] += 1
|
| 210 |
-
|
|
|
|
| 211 |
log_manager.save_log_entry("GitHub", item['title'], item['html_url'], item['body'], reply)
|
| 212 |
print(f"✅ GitHub Replied: {item['title'][:20]}")
|
| 213 |
time.sleep(60)
|
|
@@ -241,7 +262,8 @@ async def email_hunter_process():
|
|
| 241 |
|
| 242 |
for url, title in extracted:
|
| 243 |
if LOCAL_STATS["email"] >= LIMITS["email_daily_send"]: break
|
| 244 |
-
|
|
|
|
| 245 |
|
| 246 |
try:
|
| 247 |
await page.goto(url, timeout=20000)
|
|
@@ -257,7 +279,8 @@ async def email_hunter_process():
|
|
| 257 |
ok, msg = send_smtp_email(target_email, data['subject'], data['body'])
|
| 258 |
if ok:
|
| 259 |
LOCAL_STATS["email"] += 1
|
| 260 |
-
|
|
|
|
| 261 |
log_manager.save_log_entry("Email", title, url, f"Target: {target_email}", data['body'])
|
| 262 |
print(f"✅ Email Sent: {title}")
|
| 263 |
await asyncio.sleep(20)
|
|
@@ -287,12 +310,15 @@ def reddit_job():
|
|
| 287 |
username=REDDIT_USERNAME, password=REDDIT_PASSWORD, user_agent="NexusBot")
|
| 288 |
kw = random.choice(generate_polyglot_keywords())
|
| 289 |
for post in reddit.subreddit("all").search(kw, limit=3, sort="new"):
|
| 290 |
-
|
|
|
|
|
|
|
| 291 |
reply = generate_localized_reply(post.title + "\n" + post.selftext, "Reddit")
|
| 292 |
if reply:
|
| 293 |
post.reply(reply)
|
| 294 |
LOCAL_STATS["reddit"] += 1
|
| 295 |
-
|
|
|
|
| 296 |
log_manager.save_log_entry("Reddit", post.title, post.url, post.selftext or "Image", reply)
|
| 297 |
print(f"✅ Reddit Replied: {post.title[:20]}")
|
| 298 |
break
|
|
@@ -308,14 +334,13 @@ def start_background_tasks():
|
|
| 308 |
sched = BackgroundScheduler()
|
| 309 |
sched.add_job(reddit_job, 'interval', minutes=20)
|
| 310 |
sched.start()
|
| 311 |
-
print("🚀 All Nexus Systems Operational")
|
| 312 |
|
| 313 |
start_background_tasks()
|
| 314 |
|
| 315 |
def get_dashboard_stats():
|
| 316 |
return LOCAL_STATS["reddit"], LOCAL_STATS["github"], LOCAL_STATS["email"]
|
| 317 |
|
| 318 |
-
# --- التعديل الحاسم هنا ---
|
| 319 |
def get_chat_history():
|
| 320 |
try:
|
| 321 |
logs = log_manager.load_logs()
|
|
@@ -324,16 +349,10 @@ def get_chat_history():
|
|
| 324 |
logs = LOCAL_LOGS
|
| 325 |
|
| 326 |
formatted_chat = []
|
| 327 |
-
|
| 328 |
-
# تحويل البيانات إلى "قائمة من القواميس" (List of Dictionaries)
|
| 329 |
-
# هذا يرضي رسالة الخطأ: "Each message should be a dictionary with 'role' and 'content'"
|
| 330 |
for log in logs:
|
| 331 |
-
# رسالة العميل (المنشور الأصلي)
|
| 332 |
user_content = f"**[{log['platform']}] {log['title']}**\n\n{log.get('original_text','')}\n\n🔗 [Link]({log['url']})"
|
| 333 |
-
formatted_chat.append({"role": "user", "content": user_content})
|
| 334 |
-
|
| 335 |
-
# ردنا (البوت)
|
| 336 |
bot_content = f"**🗓️ {log['timestamp']}**\n\n{log['our_reply']}"
|
|
|
|
| 337 |
formatted_chat.append({"role": "assistant", "content": bot_content})
|
| 338 |
|
| 339 |
return formatted_chat
|
|
@@ -350,7 +369,6 @@ with gr.Blocks(title="Nexus Ultimate") as demo:
|
|
| 350 |
refresh_btn = gr.Button("🔄 Refresh Data & Logs")
|
| 351 |
|
| 352 |
gr.Markdown("### 💬 Live Interaction Logs")
|
| 353 |
-
# إزالة type نهائياً لتجنب TypeError، مع الاعتماد على تنسيق البيانات في الدالة أعلاه
|
| 354 |
chatbot_view = gr.Chatbot(label="History Log", height=600)
|
| 355 |
|
| 356 |
refresh_btn.click(get_dashboard_stats, outputs=[r, g, e])
|
|
@@ -360,4 +378,4 @@ with gr.Blocks(title="Nexus Ultimate") as demo:
|
|
| 360 |
demo.load(get_chat_history, outputs=[chatbot_view])
|
| 361 |
|
| 362 |
if __name__ == "__main__":
|
| 363 |
-
demo.launch(server_name="0.0.0.0", server_port=7860, theme=gr.themes.Base())
|
|
|
|
| 18 |
from apscheduler.schedulers.background import BackgroundScheduler
|
| 19 |
from upstash_redis import Redis as UpstashRedis
|
| 20 |
|
| 21 |
+
# إعدادات النظام
|
| 22 |
os.environ["TZ"] = "UTC"
|
| 23 |
nest_asyncio.apply()
|
| 24 |
|
|
|
|
| 55 |
}
|
| 56 |
|
| 57 |
# ==========================================
|
| 58 |
+
# 2. مدير التخزين والذاكرة (Iron Memory Manager)
|
| 59 |
# ==========================================
|
| 60 |
|
| 61 |
class LogManager:
|
|
|
|
| 66 |
self.redis = UpstashRedis(url=UPSTASH_REDIS_REST_URL, token=UPSTASH_REDIS_REST_TOKEN)
|
| 67 |
self.redis.ping()
|
| 68 |
self.enabled = True
|
| 69 |
+
print("✅ Connected to Upstash Redis (Iron Memory Active)")
|
| 70 |
except Exception as e:
|
| 71 |
print(f"❌ Upstash Connection Error: {e}")
|
| 72 |
else:
|
| 73 |
+
print("⚠️ Upstash Credentials missing. Memory will be temporary only!")
|
| 74 |
+
|
| 75 |
+
def is_processed(self, url):
|
| 76 |
+
"""فحص هل تم التعامل مع الرابط سابقاً (سحابياً)"""
|
| 77 |
+
# أولاً نفحص الذاكرة المحلية للسرعة
|
| 78 |
+
if url in LOCAL_STATS['processed_urls']:
|
| 79 |
+
return True
|
| 80 |
+
|
| 81 |
+
# ثانياً نفحص السحابة (للأمان بعد إعادة التشغيل)
|
| 82 |
+
if self.enabled:
|
| 83 |
+
try:
|
| 84 |
+
# نستخدم أمر sismember للتحقق من وجود الرابط في المجموعة
|
| 85 |
+
return self.redis.sismember("nexus_processed_set", url) == 1
|
| 86 |
+
except:
|
| 87 |
+
return False
|
| 88 |
+
return False
|
| 89 |
+
|
| 90 |
+
def mark_processed(self, url):
|
| 91 |
+
"""تسجيل الرابط بأنه تم التعامل معه"""
|
| 92 |
+
LOCAL_STATS['processed_urls'].append(url)
|
| 93 |
+
if self.enabled:
|
| 94 |
+
try:
|
| 95 |
+
# إضافة الرابط لمجموعة الروابط المعالجة
|
| 96 |
+
self.redis.sadd("nexus_processed_set", url)
|
| 97 |
+
except: pass
|
| 98 |
|
| 99 |
def load_logs(self):
|
|
|
|
| 100 |
if not self.enabled: return []
|
| 101 |
try:
|
| 102 |
raw_logs = self.redis.lrange("nexus_logs", 0, 99)
|
| 103 |
parsed_logs = []
|
| 104 |
for log in raw_logs:
|
| 105 |
if isinstance(log, str):
|
| 106 |
+
try: parsed_logs.append(json.loads(log))
|
|
|
|
| 107 |
except: pass
|
| 108 |
+
else: parsed_logs.append(log)
|
|
|
|
| 109 |
return parsed_logs
|
| 110 |
+
except: return []
|
|
|
|
| 111 |
|
| 112 |
def save_log_entry(self, platform, title, url, original_text, our_reply):
|
| 113 |
entry = {
|
|
|
|
| 122 |
LOCAL_LOGS.insert(0, entry)
|
| 123 |
if len(LOCAL_LOGS) > 100: LOCAL_LOGS.pop()
|
| 124 |
|
| 125 |
+
if self.enabled:
|
| 126 |
+
try:
|
| 127 |
+
self.redis.lpush("nexus_logs", json.dumps(entry))
|
| 128 |
+
self.redis.ltrim("nexus_logs", 0, 499)
|
| 129 |
+
except: pass
|
|
|
|
| 130 |
return entry
|
| 131 |
|
| 132 |
log_manager = LogManager()
|
|
|
|
| 205 |
except Exception as e: return False, str(e)
|
| 206 |
|
| 207 |
# ==========================================
|
| 208 |
+
# 5. الحلقات (The Loops) - Protected
|
| 209 |
# ==========================================
|
| 210 |
|
| 211 |
# --- GitHub Loop ---
|
|
|
|
| 219 |
resp = requests.get(f"https://api.github.com/search/issues?q={kw}+state:open&sort=updated", headers=headers)
|
| 220 |
if resp.status_code == 200:
|
| 221 |
for item in resp.json().get("items", [])[:3]:
|
| 222 |
+
# 🛡️ الحماية الحديدية: فحص السحابة قبل العمل
|
| 223 |
+
if log_manager.is_processed(item['html_url']): continue
|
| 224 |
|
| 225 |
reply = generate_localized_reply(item['title'] + "\n" + (item['body'] or ""), "GitHub")
|
| 226 |
if reply:
|
| 227 |
post_resp = requests.post(item['url']+"/comments", headers=headers, json={"body": reply})
|
| 228 |
if post_resp.status_code == 201:
|
| 229 |
LOCAL_STATS["github"] += 1
|
| 230 |
+
# 🛡️ تسجيل الرابط في السحابة
|
| 231 |
+
log_manager.mark_processed(item['html_url'])
|
| 232 |
log_manager.save_log_entry("GitHub", item['title'], item['html_url'], item['body'], reply)
|
| 233 |
print(f"✅ GitHub Replied: {item['title'][:20]}")
|
| 234 |
time.sleep(60)
|
|
|
|
| 262 |
|
| 263 |
for url, title in extracted:
|
| 264 |
if LOCAL_STATS["email"] >= LIMITS["email_daily_send"]: break
|
| 265 |
+
# 🛡️ الحماية الحديدية
|
| 266 |
+
if log_manager.is_processed(url): continue
|
| 267 |
|
| 268 |
try:
|
| 269 |
await page.goto(url, timeout=20000)
|
|
|
|
| 279 |
ok, msg = send_smtp_email(target_email, data['subject'], data['body'])
|
| 280 |
if ok:
|
| 281 |
LOCAL_STATS["email"] += 1
|
| 282 |
+
# 🛡️ تسجيل الرابط
|
| 283 |
+
log_manager.mark_processed(url)
|
| 284 |
log_manager.save_log_entry("Email", title, url, f"Target: {target_email}", data['body'])
|
| 285 |
print(f"✅ Email Sent: {title}")
|
| 286 |
await asyncio.sleep(20)
|
|
|
|
| 310 |
username=REDDIT_USERNAME, password=REDDIT_PASSWORD, user_agent="NexusBot")
|
| 311 |
kw = random.choice(generate_polyglot_keywords())
|
| 312 |
for post in reddit.subreddit("all").search(kw, limit=3, sort="new"):
|
| 313 |
+
# 🛡️ الحماية الحديدية
|
| 314 |
+
if log_manager.is_processed(post.url): continue
|
| 315 |
+
|
| 316 |
reply = generate_localized_reply(post.title + "\n" + post.selftext, "Reddit")
|
| 317 |
if reply:
|
| 318 |
post.reply(reply)
|
| 319 |
LOCAL_STATS["reddit"] += 1
|
| 320 |
+
# 🛡️ تسجيل الرابط
|
| 321 |
+
log_manager.mark_processed(post.url)
|
| 322 |
log_manager.save_log_entry("Reddit", post.title, post.url, post.selftext or "Image", reply)
|
| 323 |
print(f"✅ Reddit Replied: {post.title[:20]}")
|
| 324 |
break
|
|
|
|
| 334 |
sched = BackgroundScheduler()
|
| 335 |
sched.add_job(reddit_job, 'interval', minutes=20)
|
| 336 |
sched.start()
|
| 337 |
+
print("🚀 All Nexus Systems Operational (With Iron Memory Protection)")
|
| 338 |
|
| 339 |
start_background_tasks()
|
| 340 |
|
| 341 |
def get_dashboard_stats():
|
| 342 |
return LOCAL_STATS["reddit"], LOCAL_STATS["github"], LOCAL_STATS["email"]
|
| 343 |
|
|
|
|
| 344 |
def get_chat_history():
|
| 345 |
try:
|
| 346 |
logs = log_manager.load_logs()
|
|
|
|
| 349 |
logs = LOCAL_LOGS
|
| 350 |
|
| 351 |
formatted_chat = []
|
|
|
|
|
|
|
|
|
|
| 352 |
for log in logs:
|
|
|
|
| 353 |
user_content = f"**[{log['platform']}] {log['title']}**\n\n{log.get('original_text','')}\n\n🔗 [Link]({log['url']})"
|
|
|
|
|
|
|
|
|
|
| 354 |
bot_content = f"**🗓️ {log['timestamp']}**\n\n{log['our_reply']}"
|
| 355 |
+
formatted_chat.append({"role": "user", "content": user_content})
|
| 356 |
formatted_chat.append({"role": "assistant", "content": bot_content})
|
| 357 |
|
| 358 |
return formatted_chat
|
|
|
|
| 369 |
refresh_btn = gr.Button("🔄 Refresh Data & Logs")
|
| 370 |
|
| 371 |
gr.Markdown("### 💬 Live Interaction Logs")
|
|
|
|
| 372 |
chatbot_view = gr.Chatbot(label="History Log", height=600)
|
| 373 |
|
| 374 |
refresh_btn.click(get_dashboard_stats, outputs=[r, g, e])
|
|
|
|
| 378 |
demo.load(get_chat_history, outputs=[chatbot_view])
|
| 379 |
|
| 380 |
if __name__ == "__main__":
|
| 381 |
+
demo.launch(server_name="0.0.0.0", server_port=7860, theme=gr.themes.Base())
|