| import requests | |
| from telegram import Update | |
| from telegram.ext import ApplicationBuilder, MessageHandler, ContextTypes, filters | |
| BOT_TOKEN = "" | |
| LLAMA_API_URL = "http://127.0.0.1:8080/completion" | |
| # گرفتن پاسخ از llama.cpp | |
| def get_llama_response(prompt): | |
| system_prompt = f"User: {prompt}\nAssistant:" | |
| payload = { | |
| "prompt": system_prompt, | |
| "max_tokens": 64, | |
| "temperature": 0.7, | |
| "stop": ["</s>", "User:"] | |
| } | |
| response = requests.post(LLAMA_API_URL, json=payload) | |
| if response.ok: | |
| return response.json()["content"].strip() | |
| else: | |
| return "خطا در ارتباط با مدل زبان." | |
| # هندل کردن پیامهایی که با / شروع میشن | |
| async def handle_command(update: Update, context: ContextTypes.DEFAULT_TYPE): | |
| message = update.message | |
| user_input = message.text.lstrip('/gemma') # حذف اسلش اول | |
| reply = get_llama_response(user_input) | |
| await message.reply_text(reply) | |
| # راهاندازی ربات | |
| app = ApplicationBuilder().token(BOT_TOKEN).build() | |
| app.add_handler(MessageHandler(filters.COMMAND, handle_command)) | |
| app.run_polling() | |