Update app.py
Browse files
app.py
CHANGED
|
@@ -12,10 +12,11 @@ UPLOAD_DIR = "static/uploads"
|
|
| 12 |
os.makedirs(UPLOAD_DIR, exist_ok=True)
|
| 13 |
app.mount("/static", StaticFiles(directory="static"), name="static")
|
| 14 |
|
|
|
|
| 15 |
HF_TOKEN = os.environ.get("HF_TOKEN")
|
|
|
|
| 16 |
client = InferenceClient(api_key=HF_TOKEN)
|
| 17 |
|
| 18 |
-
# دالة استخراج المحتوى من مختلف أنواع الملفات
|
| 19 |
def extract_file_content(file_id):
|
| 20 |
filepath = os.path.join(UPLOAD_DIR, file_id)
|
| 21 |
if not os.path.exists(filepath): return ""
|
|
@@ -31,16 +32,16 @@ def extract_file_content(file_id):
|
|
| 31 |
elif ext == ".zip":
|
| 32 |
with zipfile.ZipFile(filepath, 'r') as z:
|
| 33 |
names = z.namelist()
|
| 34 |
-
content = f"ملف
|
| 35 |
-
for name in names[:3]:
|
| 36 |
if name.endswith(('.txt', '.py', '.html', '.json')):
|
| 37 |
with z.open(name) as f:
|
| 38 |
-
content += f"\n--
|
| 39 |
elif ext in [".txt", ".py", ".js", ".html", ".json"]:
|
| 40 |
with open(filepath, "r", encoding="utf-8") as f:
|
| 41 |
content = f.read()
|
| 42 |
except Exception as e:
|
| 43 |
-
return f"خطأ
|
| 44 |
return content[:15000]
|
| 45 |
|
| 46 |
@app.get("/")
|
|
@@ -50,7 +51,6 @@ async def read_index():
|
|
| 50 |
|
| 51 |
@app.post("/api/upload")
|
| 52 |
async def upload_file(file: UploadFile = File(...)):
|
| 53 |
-
# حفظ الملف بامتداده الأصلي
|
| 54 |
ext = os.path.splitext(file.filename)[1]
|
| 55 |
file_id = f"{uuid.uuid4()}{ext}"
|
| 56 |
filepath = os.path.join(UPLOAD_DIR, file_id)
|
|
@@ -64,26 +64,35 @@ async def chat_stream(request: Request):
|
|
| 64 |
message = data.get("message", "")
|
| 65 |
file_id = data.get("file_id")
|
| 66 |
|
|
|
|
|
|
|
|
|
|
| 67 |
context = ""
|
| 68 |
if file_id:
|
| 69 |
file_text = extract_file_content(file_id)
|
| 70 |
-
context += f"\n[محتوى الملف
|
| 71 |
|
| 72 |
messages = [
|
| 73 |
-
{"role": "system", "content": "أنت مساعد
|
| 74 |
-
{"role": "user", "content": f"{context}\n
|
| 75 |
]
|
| 76 |
|
| 77 |
async def gen():
|
| 78 |
try:
|
|
|
|
| 79 |
stream = client.chat.completions.create(
|
| 80 |
model="huihui-ai/Qwen2.5-72B-Instruct-abliterated",
|
| 81 |
-
messages=messages,
|
|
|
|
|
|
|
|
|
|
| 82 |
)
|
| 83 |
for chunk in stream:
|
| 84 |
if chunk.choices[0].delta.content:
|
| 85 |
yield f"data: {json.dumps({'token': chunk.choices[0].delta.content})}\n\n"
|
| 86 |
except Exception as e:
|
| 87 |
-
|
|
|
|
|
|
|
| 88 |
|
| 89 |
return StreamingResponse(gen(), media_type="text/event-stream")
|
|
|
|
| 12 |
os.makedirs(UPLOAD_DIR, exist_ok=True)
|
| 13 |
app.mount("/static", StaticFiles(directory="static"), name="static")
|
| 14 |
|
| 15 |
+
# التأكد من جلب التوكن
|
| 16 |
HF_TOKEN = os.environ.get("HF_TOKEN")
|
| 17 |
+
# استخدام العميل مع التوكن مباشرة
|
| 18 |
client = InferenceClient(api_key=HF_TOKEN)
|
| 19 |
|
|
|
|
| 20 |
def extract_file_content(file_id):
|
| 21 |
filepath = os.path.join(UPLOAD_DIR, file_id)
|
| 22 |
if not os.path.exists(filepath): return ""
|
|
|
|
| 32 |
elif ext == ".zip":
|
| 33 |
with zipfile.ZipFile(filepath, 'r') as z:
|
| 34 |
names = z.namelist()
|
| 35 |
+
content = f"ملف ZIP يحتوي على: {', '.join(names)}\n"
|
| 36 |
+
for name in names[:3]:
|
| 37 |
if name.endswith(('.txt', '.py', '.html', '.json')):
|
| 38 |
with z.open(name) as f:
|
| 39 |
+
content += f"\n-- {name} --\n{f.read().decode('utf-8')[:1000]}"
|
| 40 |
elif ext in [".txt", ".py", ".js", ".html", ".json"]:
|
| 41 |
with open(filepath, "r", encoding="utf-8") as f:
|
| 42 |
content = f.read()
|
| 43 |
except Exception as e:
|
| 44 |
+
return f"خطأ قراءة: {str(e)}"
|
| 45 |
return content[:15000]
|
| 46 |
|
| 47 |
@app.get("/")
|
|
|
|
| 51 |
|
| 52 |
@app.post("/api/upload")
|
| 53 |
async def upload_file(file: UploadFile = File(...)):
|
|
|
|
| 54 |
ext = os.path.splitext(file.filename)[1]
|
| 55 |
file_id = f"{uuid.uuid4()}{ext}"
|
| 56 |
filepath = os.path.join(UPLOAD_DIR, file_id)
|
|
|
|
| 64 |
message = data.get("message", "")
|
| 65 |
file_id = data.get("file_id")
|
| 66 |
|
| 67 |
+
if not HF_TOKEN:
|
| 68 |
+
return JSONResponse({"error": "HF_TOKEN missing in Settings!"}, status_code=500)
|
| 69 |
+
|
| 70 |
context = ""
|
| 71 |
if file_id:
|
| 72 |
file_text = extract_file_content(file_id)
|
| 73 |
+
context += f"\n[محتوى الملف]:\n{file_text}\n"
|
| 74 |
|
| 75 |
messages = [
|
| 76 |
+
{"role": "system", "content": "أنت مساعد ذكي. حلل الملفات وأجب بالعربية."},
|
| 77 |
+
{"role": "user", "content": f"{context}\n{message}"}
|
| 78 |
]
|
| 79 |
|
| 80 |
async def gen():
|
| 81 |
try:
|
| 82 |
+
# تم تحديث الاستدعاء ليكون أكثر استقراراً
|
| 83 |
stream = client.chat.completions.create(
|
| 84 |
model="huihui-ai/Qwen2.5-72B-Instruct-abliterated",
|
| 85 |
+
messages=messages,
|
| 86 |
+
stream=True,
|
| 87 |
+
max_tokens=2048,
|
| 88 |
+
temperature=0.7
|
| 89 |
)
|
| 90 |
for chunk in stream:
|
| 91 |
if chunk.choices[0].delta.content:
|
| 92 |
yield f"data: {json.dumps({'token': chunk.choices[0].delta.content})}\n\n"
|
| 93 |
except Exception as e:
|
| 94 |
+
# إرسال الخطأ الحقيقي للواجهة لنعرف السبب (مثل توكن خاطئ أو ضغط على السيرفر)
|
| 95 |
+
error_msg = str(e)
|
| 96 |
+
yield f"data: {json.dumps({'error': error_msg})}\n\n"
|
| 97 |
|
| 98 |
return StreamingResponse(gen(), media_type="text/event-stream")
|