| | import gradio as gr |
| | from openai import OpenAI |
| | import zipfile |
| | import os |
| | import fitz |
| | import docx |
| |
|
| | |
| | API_KEY = os.environ.get("MOONSHOT_API_KEY") |
| |
|
| | def extract_text_from_file(file_path): |
| | try: |
| | filename = os.path.basename(file_path) |
| | ext = filename.split('.')[-1].lower() |
| | if ext == 'pdf': |
| | with fitz.open(file_path) as doc: |
| | return f"\n[PDF: {filename}]\n" + "".join([p.get_text() for p in doc])[:30000] |
| | elif ext == 'docx': |
| | doc_obj = docx.Document(file_path) |
| | return f"\n[Word: {filename}]\n" + "\n".join([p.text for p in doc_obj.paragraphs])[:30000] |
| | elif ext == 'zip': |
| | report = f"\n[ZIP: {filename}]\n" |
| | with zipfile.ZipFile(file_path, 'r') as z: |
| | files = z.namelist() |
| | report += f"يحتوي المجلد على {len(files)} ملف. فهرس أول 50 ملف:\n" + "\n".join(files[:50]) |
| | return report |
| | else: |
| | with open(file_path, "r", encoding="utf-8", errors="ignore") as f: |
| | return f"\n[File: {filename}]\n" + f.read()[:20000] |
| | except Exception as e: |
| | return f"\n⚠️ خطأ في قراءة {os.path.basename(file_path)}: {str(e)}\n" |
| |
|
| | def respond(message, history, system_message, max_tokens, temperature): |
| | if not API_KEY: |
| | yield "⚠️ خطأ: يرجى إضافة MOONSHOT_API_KEY في Secrets." |
| | return |
| |
|
| | client = OpenAI(api_key=API_KEY, base_url="https://api.moonshot.cn/v1") |
| | |
| | user_text = message.get("text", "") |
| | files = message.get("files", []) |
| | |
| | context = "" |
| | if files: |
| | yield "⏳ جاري تحليل ملفاتك بواسطة Kimi-K2..." |
| | for f in files: |
| | path = f if isinstance(f, str) else f.name |
| | context += extract_text_from_file(path) |
| |
|
| | messages = [{"role": "system", "content": system_message}] |
| | for msg in history: |
| | messages.append(msg) |
| | |
| | full_user_msg = f"{context}\n\nالسؤال: {user_text}" |
| | messages.append({"role": "user", "content": full_user_msg}) |
| |
|
| | try: |
| | completion = client.chat.completions.create( |
| | model="moonshot-v1-128k", |
| | messages=messages, |
| | temperature=temperature, |
| | max_tokens=max_tokens, |
| | stream=True, |
| | ) |
| | |
| | response = "" |
| | for chunk in completion: |
| | content = chunk.choices[0].delta.content |
| | if content: |
| | response += content |
| | yield response |
| | except Exception as e: |
| | yield f"⚠️ خطأ من سيرفر Moonshot: {str(e)}" |
| |
|
| | |
| | with gr.Blocks(theme=gr.themes.Soft()) as demo: |
| | gr.Markdown("# 🤖 Kimi-K2 (Moonshot AI) File Analyzer") |
| | gr.ChatInterface( |
| | respond, |
| | multimodal=True, |
| | type="messages", |
| | additional_inputs=[ |
| | gr.Textbox(value="أنت مساعد ذكي يعتمد على نموذج Kimi-K2.", label="نظام المساعد"), |
| | gr.Slider(512, 16384, value=4096, label="Max Tokens"), |
| | gr.Slider(0.1, 1.5, value=0.7, label="Temperature"), |
| | ] |
| | ) |
| |
|
| | |
| | if __name__ == "__main__": |
| | demo.queue().launch(server_name="0.0.0.0", server_port=7860) |
| |
|