File size: 3,376 Bytes
c71b1a4
7205d56
c71b1a4
 
 
 
19af1c8
c71b1a4
 
7205d56
c71b1a4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
280cb4f
7205d56
c71b1a4
 
7205d56
280cb4f
c71b1a4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
import gradio as gr
from openai import OpenAI
import zipfile
import os
import fitz  # PyMuPDF
import docx

# جلب المفتاح من إعدادات Secrets
API_KEY = os.environ.get("MOONSHOT_API_KEY")

def extract_text_from_file(file_path):
    try:
        filename = os.path.basename(file_path)
        ext = filename.split('.')[-1].lower()
        if ext == 'pdf':
            with fitz.open(file_path) as doc:
                return f"\n[PDF: {filename}]\n" + "".join([p.get_text() for p in doc])[:30000]
        elif ext == 'docx':
            doc_obj = docx.Document(file_path)
            return f"\n[Word: {filename}]\n" + "\n".join([p.text for p in doc_obj.paragraphs])[:30000]
        elif ext == 'zip':
            report = f"\n[ZIP: {filename}]\n"
            with zipfile.ZipFile(file_path, 'r') as z:
                files = z.namelist()
                report += f"يحتوي المجلد على {len(files)} ملف. فهرس أول 50 ملف:\n" + "\n".join(files[:50])
            return report
        else:
            with open(file_path, "r", encoding="utf-8", errors="ignore") as f:
                return f"\n[File: {filename}]\n" + f.read()[:20000]
    except Exception as e:
        return f"\n⚠️ خطأ في قراءة {os.path.basename(file_path)}: {str(e)}\n"

def respond(message, history, system_message, max_tokens, temperature):
    if not API_KEY:
        yield "⚠️ خطأ: يرجى إضافة MOONSHOT_API_KEY في Secrets."
        return

    client = OpenAI(api_key=API_KEY, base_url="https://api.moonshot.cn/v1")
    
    user_text = message.get("text", "")
    files = message.get("files", [])
    
    context = ""
    if files:
        yield "⏳ جاري تحليل ملفاتك بواسطة Kimi-K2..."
        for f in files:
            path = f if isinstance(f, str) else f.name
            context += extract_text_from_file(path)

    messages = [{"role": "system", "content": system_message}]
    for msg in history:
        messages.append(msg)
    
    full_user_msg = f"{context}\n\nالسؤال: {user_text}"
    messages.append({"role": "user", "content": full_user_msg})

    try:
        completion = client.chat.completions.create(
            model="moonshot-v1-128k",
            messages=messages,
            temperature=temperature,
            max_tokens=max_tokens,
            stream=True,
        )
        
        response = ""
        for chunk in completion:
            content = chunk.choices[0].delta.content
            if content:
                response += content
                yield response
    except Exception as e:
        yield f"⚠️ خطأ من سيرفر Moonshot: {str(e)}"

# واجهة المستخدم
with gr.Blocks(theme=gr.themes.Soft()) as demo:
    gr.Markdown("# 🤖 Kimi-K2 (Moonshot AI) File Analyzer")
    gr.ChatInterface(
        respond,
        multimodal=True,
        type="messages",
        additional_inputs=[
            gr.Textbox(value="أنت مساعد ذكي يعتمد على نموذج Kimi-K2.", label="نظام المساعد"),
            gr.Slider(512, 16384, value=4096, label="Max Tokens"),
            gr.Slider(0.1, 1.5, value=0.7, label="Temperature"),
        ]
    )

# التعديل الجوهري للتشغيل في Hugging Face Spaces
if __name__ == "__main__":
    demo.queue().launch(server_name="0.0.0.0", server_port=7860)