Spaces:
Running
Running
| #!/usr/bin/env python3 | |
| """ | |
| githubbot_v2.py โ GitHub Bot + Groq AI Assistant | |
| pip install python-telegram-bot httpx groq | |
| """ | |
| import asyncio | |
| import io | |
| import os, json, base64, logging, httpx, threading, time, traceback | |
| from logging.handlers import RotatingFileHandler | |
| from pathlib import Path | |
| from functools import wraps | |
| from datetime import datetime | |
| from groq import Groq | |
| from openai import OpenAI | |
| from huggingface_hub import InferenceClient | |
| from telegram import Update, InlineKeyboardButton, InlineKeyboardMarkup | |
| from telegram.ext import (Application, CommandHandler, CallbackQueryHandler, | |
| MessageHandler, filters, ContextTypes, ConversationHandler) | |
| # โโ ูุฑุงุกุฉ .env โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ | |
| def _load_env(): | |
| for name in ["githubbot.env", ".env"]: | |
| f = Path(__file__).parent / name | |
| if f.exists(): | |
| for line in f.read_text().splitlines(): | |
| line = line.strip() | |
| if line and not line.startswith("#") and "=" in line: | |
| k, v = line.split("=", 1) | |
| os.environ.setdefault(k.strip(), v.strip()) | |
| return | |
| _load_env() | |
| def _persist_env(key: str, value: str) -> None: | |
| """Save key=value to githubbot.env.""" | |
| path = Path(__file__).parent / "githubbot.env" | |
| try: | |
| lines = path.read_text().splitlines() if path.exists() else [] | |
| lines = [l for l in lines if not l.startswith(f"{key}=")] | |
| lines.append(f"{key}={value}") | |
| path.write_text("\n".join(lines) + "\n") | |
| except Exception as e: | |
| log.warning("_persist_env failed (read-only FS?): %s", e) | |
| BOT_TOKEN = os.getenv("BOT_TOKEN", "") | |
| ADMIN_ID = int(os.getenv("ADMIN_ID", "0")) | |
| GH_TOKEN = os.getenv("GH_TOKEN", "") | |
| GH_USER = os.getenv("GH_USER", "FAJU85") | |
| GH_REPO = os.getenv("GH_REPO", "ORC_Dash_Last") | |
| GROQ_KEY = os.getenv("GROQ_KEY", "") | |
| DEEPSEEK_KEY = os.getenv("DEEPSEEK_KEY", "") | |
| HF_TOKEN = os.getenv("HF_TOKEN", "") | |
| MISTRAL_KEY = os.getenv("MISTRAL_KEY", "") | |
| logging.basicConfig(format="%(asctime)s | %(levelname)s | %(message)s", level=logging.INFO) | |
| log = logging.getLogger(__name__) | |
| # โโ File logger (rotating, 5 MB ร 3 backups) โโโโโโโโโโโ | |
| _log_path = Path(__file__).parent / "githubbot.log" | |
| _fh = RotatingFileHandler(str(_log_path), maxBytes=5*1024*1024, backupCount=3, encoding="utf-8") | |
| _fh.setFormatter(logging.Formatter( | |
| "%(asctime)s | %(levelname)-8s | %(name)s | %(message)s", | |
| datefmt="%Y-%m-%d %H:%M:%S" | |
| )) | |
| logging.getLogger().addHandler(_fh) # attach to root so all loggers write to file | |
| # โโ States โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ | |
| (WAIT_PAT, WAIT_GROQ_KEY, WAIT_REPO_NAME, WAIT_COMMIT_MSG, | |
| WAIT_FILE_CONTENT, WAIT_FILE_PATH, WAIT_BRANCH_NAME, | |
| WAIT_ISSUE_TITLE, WAIT_ISSUE_BODY, WAIT_RELEASE_TAG, | |
| WAIT_MULTI_FILES, WAIT_PR_TITLE, WAIT_PR_BODY, WAIT_AI_KEY, | |
| ) = range(14) | |
| # โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ | |
| # GitHub API | |
| # โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ | |
| class GitHub: | |
| BASE = "https://api.github.com" | |
| def __init__(self, token, user): | |
| self.token = token | |
| self.user = user | |
| self.h = {"Authorization": f"Bearer {token}", | |
| "Accept": "application/vnd.github+json", | |
| "X-GitHub-Api-Version": "2022-11-28"} | |
| # Persistent client โ reused across all requests (connection pooling) | |
| self._session = httpx.Client(headers=self.h, timeout=15) | |
| def _c(self): return httpx.Client(headers=self.h, timeout=15) | |
| def _req(self, method: str, url: str, **kwargs): | |
| """Logged HTTP request with retry (3 attempts, exponential backoff).""" | |
| last_exc: Exception | None = None | |
| for attempt in range(3): | |
| try: | |
| t0 = time.monotonic() | |
| r = getattr(self._session, method)(url, **kwargs) | |
| elapsed = (time.monotonic() - t0) * 1000 | |
| log.debug("GH %s %s โ %d (%.0f ms)", method.upper(), | |
| url.split("api.github.com")[-1], r.status_code, elapsed) | |
| if r.status_code >= 400: | |
| log.warning("GH API error %d on %s %s | body: %s", | |
| r.status_code, method.upper(), | |
| url.split("api.github.com")[-1], r.text[:200]) | |
| # Retry on server errors and rate limiting | |
| if r.status_code in (429, 500, 502, 503, 504) and attempt < 2: | |
| wait = 2 ** attempt | |
| log.warning("GH retry %d/2 in %ds (status=%d)", attempt + 1, wait, r.status_code) | |
| time.sleep(wait) | |
| continue | |
| return r | |
| except (httpx.TimeoutException, httpx.ConnectError) as e: | |
| last_exc = e | |
| if attempt < 2: | |
| wait = 2 ** attempt | |
| log.warning("GH network error (attempt %d/3): %s โ retrying in %ds", | |
| attempt + 1, e, wait) | |
| time.sleep(wait) | |
| if last_exc: | |
| raise last_exc | |
| return r | |
| # Repos | |
| def list_repos(self): | |
| r = self._req("get", f"{self.BASE}/user/repos?sort=updated&per_page=20&type=all") | |
| return r.json() if r.status_code == 200 else [] | |
| def get_repo(self, repo): | |
| r = self._req("get", f"{self.BASE}/repos/{self.user}/{repo}") | |
| return r.json() if r.status_code == 200 else None | |
| def create_repo(self, name, private=False, desc=""): | |
| r = self._req("post", f"{self.BASE}/user/repos", | |
| json={"name": name, "private": private, | |
| "description": desc, "auto_init": True}) | |
| return r.json(), r.status_code | |
| def delete_repo(self, repo): | |
| r = self._req("delete", f"{self.BASE}/repos/{self.user}/{repo}") | |
| return r.status_code == 204 | |
| # Files | |
| def list_files(self, repo, path="", branch="main"): | |
| r = self._req("get", f"{self.BASE}/repos/{self.user}/{repo}/contents/{path}", | |
| params={"ref": branch}) | |
| return r.json() if r.status_code == 200 else [] | |
| def get_file(self, repo, path, branch="main"): | |
| r = self._req("get", f"{self.BASE}/repos/{self.user}/{repo}/contents/{path}", | |
| params={"ref": branch}) | |
| if r.status_code == 200: | |
| d = r.json() | |
| content = base64.b64decode(d.get("content","")).decode("utf-8", errors="replace") | |
| return content, d.get("sha","") | |
| return None, None | |
| def create_or_update_file(self, repo, path, content, message, branch="main", sha=None): | |
| payload = {"message": message, | |
| "content": base64.b64encode(content.encode()).decode(), | |
| "branch": branch} | |
| if sha: payload["sha"] = sha | |
| r = self._req("put", f"{self.BASE}/repos/{self.user}/{repo}/contents/{path}", | |
| json=payload) | |
| return r.status_code in (200, 201), r.json() | |
| def delete_file(self, repo, path, message, sha, branch="main"): | |
| r = self._req("delete", f"{self.BASE}/repos/{self.user}/{repo}/contents/{path}", | |
| json={"message": message, "sha": sha, "branch": branch}) | |
| return r.status_code == 200 | |
| def upload_multiple(self, repo, files: list[dict], branch="main"): | |
| """files = [{"path": "...", "content": "..."}, ...]""" | |
| results = [] | |
| for f in files: | |
| _, sha = self.get_file(repo, f["path"], branch) | |
| ok, res = self.create_or_update_file( | |
| repo, f["path"], f["content"], | |
| f.get("message", f"add: {f['path']}"), branch, sha) | |
| results.append({"path": f["path"], "ok": ok}) | |
| return results | |
| # Commits | |
| def list_commits(self, repo, branch="main", per_page=10): | |
| r = self._req("get", f"{self.BASE}/repos/{self.user}/{repo}/commits", | |
| params={"sha": branch, "per_page": per_page}) | |
| return r.json() if r.status_code == 200 else [] | |
| # Branches | |
| def list_branches(self, repo): | |
| r = self._req("get", f"{self.BASE}/repos/{self.user}/{repo}/branches") | |
| return r.json() if r.status_code == 200 else [] | |
| def create_branch(self, repo, name, from_branch="main"): | |
| r = self._req("get", f"{self.BASE}/repos/{self.user}/{repo}/git/refs/heads/{from_branch}") | |
| if r.status_code != 200: | |
| return False, "ูุดู ุงูุญุตูู ุนูู SHA" | |
| sha = r.json()["object"]["sha"] | |
| r2 = self._req("post", f"{self.BASE}/repos/{self.user}/{repo}/git/refs", | |
| json={"ref": f"refs/heads/{name}", "sha": sha}) | |
| return r2.status_code == 201, r2.json() | |
| def delete_branch(self, repo, branch): | |
| r = self._req("delete", | |
| f"{self.BASE}/repos/{self.user}/{repo}/git/refs/heads/{branch}") | |
| return r.status_code == 204 | |
| # Pull Requests | |
| def list_prs(self, repo, state="open"): | |
| r = self._req("get", f"{self.BASE}/repos/{self.user}/{repo}/pulls", | |
| params={"state": state, "per_page": 10}) | |
| return r.json() if r.status_code == 200 else [] | |
| def create_pr(self, repo, title, head, base="main", body=""): | |
| r = self._req("post", f"{self.BASE}/repos/{self.user}/{repo}/pulls", | |
| json={"title": title, "head": head, "base": base, "body": body}) | |
| return r.status_code == 201, r.json() | |
| def merge_pr(self, repo, pr_number, message=""): | |
| r = self._req("put", f"{self.BASE}/repos/{self.user}/{repo}/pulls/{pr_number}/merge", | |
| json={"merge_method": "merge", | |
| "commit_message": message or f"Merge PR #{pr_number}"}) | |
| return r.status_code == 200, r.json() | |
| # Issues | |
| def list_issues(self, repo, state="open"): | |
| r = self._req("get", f"{self.BASE}/repos/{self.user}/{repo}/issues", | |
| params={"state": state, "per_page": 10}) | |
| return r.json() if r.status_code == 200 else [] | |
| def create_issue(self, repo, title, body=""): | |
| r = self._req("post", f"{self.BASE}/repos/{self.user}/{repo}/issues", | |
| json={"title": title, "body": body}) | |
| return r.status_code == 201, r.json() | |
| def close_issue(self, repo, number): | |
| r = self._req("patch", f"{self.BASE}/repos/{self.user}/{repo}/issues/{number}", | |
| json={"state": "closed"}) | |
| return r.status_code == 200 | |
| # Releases | |
| def list_releases(self, repo): | |
| r = self._req("get", f"{self.BASE}/repos/{self.user}/{repo}/releases?per_page=5") | |
| return r.json() if r.status_code == 200 else [] | |
| def create_release(self, repo, tag, name="", body=""): | |
| r = self._req("post", f"{self.BASE}/repos/{self.user}/{repo}/releases", | |
| json={"tag_name": tag, "name": name or tag, "body": body}) | |
| return r.status_code == 201, r.json() | |
| # Stats | |
| def get_profile(self): | |
| r = self._req("get", f"{self.BASE}/user") | |
| return r.json() if r.status_code == 200 else {} | |
| def get_traffic(self, repo): | |
| v = self._req("get", f"{self.BASE}/repos/{self.user}/{repo}/traffic/views") | |
| cl = self._req("get", f"{self.BASE}/repos/{self.user}/{repo}/traffic/clones") | |
| return (v.json() if v.status_code == 200 else {}, | |
| cl.json() if cl.status_code == 200 else {}) | |
| # โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ | |
| # Groq AI | |
| # โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ | |
| TOOLS = [ | |
| {"type":"function","function":{"name":"list_repos", | |
| "description":"ุนุฑุถ ูุงุฆู ุฉ ุงูุฑูุจููุงุช","parameters":{"type":"object","properties":{}}}}, | |
| {"type":"function","function":{"name":"get_repo_info", | |
| "description":"ู ุนููู ุงุช ูุฅุญุตุงุฆูุงุช ุฑูุจู", | |
| "parameters":{"type":"object","properties":{ | |
| "repo":{"type":"string","description":"ุงุณู ุงูุฑูุจู"}},"required":["repo"]}}}, | |
| {"type":"function","function":{"name":"list_commits", | |
| "description":"ุนุฑุถ ุขุฎุฑ commits", | |
| "parameters":{"type":"object","properties":{ | |
| "repo":{"type":"string"},"branch":{"type":"string","default":"main"}},"required":["repo"]}}}, | |
| {"type":"function","function":{"name":"list_files", | |
| "description":"ุนุฑุถ ู ููุงุช ูู ุฌูุฏุงุช", | |
| "parameters":{"type":"object","properties":{ | |
| "repo":{"type":"string"},"path":{"type":"string","default":""}},"required":["repo"]}}}, | |
| {"type":"function","function":{"name":"list_branches", | |
| "description":"ุนุฑุถ ุงูู branches", | |
| "parameters":{"type":"object","properties":{ | |
| "repo":{"type":"string"}},"required":["repo"]}}}, | |
| {"type":"function","function":{"name":"list_issues", | |
| "description":"ุนุฑุถ ุงูู issues", | |
| "parameters":{"type":"object","properties":{ | |
| "repo":{"type":"string"},"state":{"type":"string","default":"open"}},"required":["repo"]}}}, | |
| {"type":"function","function":{"name":"list_prs", | |
| "description":"ุนุฑุถ Pull Requests", | |
| "parameters":{"type":"object","properties":{ | |
| "repo":{"type":"string"},"state":{"type":"string","default":"open"}},"required":["repo"]}}}, | |
| {"type":"function","function":{"name":"create_branch", | |
| "description":"ุฅูุดุงุก branch ุฌุฏูุฏ", | |
| "parameters":{"type":"object","properties":{ | |
| "repo":{"type":"string"},"name":{"type":"string"}, | |
| "from_branch":{"type":"string","default":"main"}},"required":["repo","name"]}}}, | |
| {"type":"function","function":{"name":"create_issue", | |
| "description":"ุฅูุดุงุก issue ุฌุฏูุฏ", | |
| "parameters":{"type":"object","properties":{ | |
| "repo":{"type":"string"},"title":{"type":"string"}, | |
| "body":{"type":"string","default":""}},"required":["repo","title"]}}}, | |
| {"type":"function","function":{"name":"merge_pr", | |
| "description":"ุฏู ุฌ Pull Request", | |
| "parameters":{"type":"object","properties":{ | |
| "repo":{"type":"string"},"pr_number":{"type":"integer"}},"required":["repo","pr_number"]}}}, | |
| {"type":"function","function":{"name":"create_release", | |
| "description":"ุฅูุดุงุก release ุฌุฏูุฏ", | |
| "parameters":{"type":"object","properties":{ | |
| "repo":{"type":"string"},"tag":{"type":"string"}, | |
| "name":{"type":"string","default":""},"body":{"type":"string","default":""}},"required":["repo","tag"]}}}, | |
| ] | |
| SYSTEM_PROMPT = f"""ุฃูุช ู ุณุงุนุฏ ุฐูู ู ุชุฎุตุต ูู ุฅุฏุงุฑุฉ GitHub ููู ุทูุฑ {GH_USER}. | |
| ุชุชุญุฏุซ ุจุงูุนุฑุจูุฉ ุฏุงุฆู ุงู ูุชููู ุงูุฃูุงู ุฑ ุงูุนุฑุจูุฉ ูุงูุฅูุฌููุฒูุฉ. | |
| ุงูุฑูุจู ุงูุงูุชุฑุงุถู ูู {GH_REPO}. | |
| ุนูุฏ ุงูุญุงุฌุฉ ูู ุนููู ุงุช ู ู GitHubุ ุงุณุชุฎุฏู ุงูุฃุฏูุงุช ุงูู ุชุงุญุฉ. | |
| ูุฏูู ุงูู ุนููู ุงุช ุจุดูู ู ูุธู ููุงุถุญ. | |
| ุฃุฎุจุฑ ุงูู ุณุชุฎุฏู ุจุงูุชุทูุฑุงุช ูุงูุชุบููุฑุงุช ุงูู ูู ุฉ. | |
| ุฅุฐุง ุทููุจ ู ูู ุฅูุดุงุก ุดูุก ุฃู ุชุนุฏููุ ุงุณุชุฎุฏู ุงูุฃุฏุงุฉ ุงูู ูุงุณุจุฉ.""" | |
| _groq_cache: dict[str, Groq] = {} | |
| def _groq_client(key: str) -> Groq: | |
| """Return a cached Groq client with 30 s timeout (one instance per API key).""" | |
| if key not in _groq_cache: | |
| _groq_cache[key] = Groq(api_key=key, timeout=30.0) | |
| return _groq_cache[key] | |
| # โโ Multi-provider AI registry โโโโโโโโโโโโโโโโโโโโโโโโโ | |
| PROVIDERS: dict[str, dict] = { | |
| "groq": { | |
| "label": "Groq", | |
| "models": ["llama-3.3-70b-versatile", "mixtral-8x7b-32768", "gemma2-9b-it"], | |
| "supports_tools": True, | |
| "env_key": "GROQ_KEY", | |
| "user_key": "groq_key", | |
| }, | |
| "deepseek": { | |
| "label": "DeepSeek", | |
| "models": ["deepseek-chat", "deepseek-reasoner"], | |
| "supports_tools": True, | |
| "env_key": "DEEPSEEK_KEY", | |
| "user_key": "deepseek_key", | |
| }, | |
| "huggingface": { | |
| "label": "HuggingFace", | |
| "models": [ | |
| "humain-ai/ALLaM-7B-Instruct-preview", | |
| "Qwen/Qwen2.5-72B-Instruct", | |
| "google/gemma-3-27b-it", | |
| "mistralai/Mixtral-8x7B-Instruct-v0.1", | |
| "deepseek-ai/DeepSeek-R1", | |
| ], | |
| "supports_tools": False, | |
| "env_key": "HF_TOKEN", | |
| "user_key": "hf_token", | |
| }, | |
| "mistral": { | |
| "label": "Mistral", | |
| "models": ["mistral-large-latest", "mistral-small-latest", "codestral-latest"], | |
| "supports_tools": True, | |
| "env_key": "MISTRAL_KEY", | |
| "user_key": "mistral_key", | |
| }, | |
| } | |
| _openai_cache: dict[str, OpenAI] = {} | |
| _hf_cache_dict: dict[str, InferenceClient] = {} | |
| def _openai_client(base_url: str, key: str) -> OpenAI: | |
| cache_key = f"{base_url}:{key}" | |
| if cache_key not in _openai_cache: | |
| _openai_cache[cache_key] = OpenAI(api_key=key, base_url=base_url, timeout=30.0) | |
| return _openai_cache[cache_key] | |
| def _hf_client(key: str) -> InferenceClient: | |
| if key not in _hf_cache_dict: | |
| _hf_cache_dict[key] = InferenceClient(api_key=key, timeout=30.0) | |
| return _hf_cache_dict[key] | |
| def _get_ai_key(provider: str, ctx) -> str: | |
| env_vals = { | |
| "groq": GROQ_KEY, "deepseek": DEEPSEEK_KEY, | |
| "huggingface": HF_TOKEN, "mistral": MISTRAL_KEY, | |
| } | |
| user_key = PROVIDERS.get(provider, {}).get("user_key", "") | |
| return env_vals.get(provider, "") or (ctx.user_data.get(user_key, "") if ctx else "") | |
| def _execute_tool(gh: "GitHub", name: str, args: dict) -> str: | |
| """Execute a GitHub tool call and return JSON string result.""" | |
| repo_name = args.get("repo", GH_REPO) | |
| if name == "list_repos": | |
| repos = gh.list_repos() | |
| return json.dumps([{"name": r["name"], "stars": r.get("stargazers_count", 0), | |
| "private": r.get("private", False), | |
| "updated": r.get("updated_at", "")[:10], | |
| "language": r.get("language", "")} for r in repos[:10]]) | |
| elif name == "get_repo_info": | |
| info = gh.get_repo(repo_name) | |
| if not info: | |
| return json.dumps({"error": f"repo {repo_name} not found"}) | |
| v, cl = gh.get_traffic(repo_name) | |
| return json.dumps({"name": repo_name, "stars": info.get("stargazers_count", 0), | |
| "forks": info.get("forks_count", 0), | |
| "issues": info.get("open_issues_count", 0), | |
| "language": info.get("language", ""), | |
| "size_kb": info.get("size", 0), | |
| "private": info.get("private", False), | |
| "views_14d": v.get("count", 0), | |
| "clones_14d": cl.get("count", 0)}) | |
| elif name == "list_commits": | |
| commits = gh.list_commits(repo_name, args.get("branch", "main")) | |
| return json.dumps([{"sha": c["sha"][:7], | |
| "message": c["commit"]["message"].splitlines()[0], | |
| "author": c["commit"]["author"]["name"], | |
| "date": c["commit"]["author"]["date"][:10]} | |
| for c in commits[:10]]) | |
| elif name == "list_files": | |
| items = gh.list_files(repo_name, args.get("path", "")) | |
| if isinstance(items, list): | |
| return json.dumps([{"name": i["name"], "type": i["type"], | |
| "size": i.get("size", 0)} for i in items]) | |
| return json.dumps({"error": str(items)}) | |
| elif name == "list_branches": | |
| branches = gh.list_branches(repo_name) | |
| return json.dumps([b["name"] for b in branches]) | |
| elif name == "list_issues": | |
| issues = gh.list_issues(repo_name, args.get("state", "open")) | |
| return json.dumps([{"number": i["number"], "title": i["title"], | |
| "state": i["state"], "author": i["user"]["login"]} | |
| for i in issues[:10]]) | |
| elif name == "list_prs": | |
| prs = gh.list_prs(repo_name, args.get("state", "open")) | |
| return json.dumps([{"number": p["number"], "title": p["title"], | |
| "head": p["head"]["ref"], "base": p["base"]["ref"]} | |
| for p in prs[:10]]) | |
| elif name == "create_branch": | |
| ok, _ = gh.create_branch(repo_name, args["name"], args.get("from_branch", "main")) | |
| return json.dumps({"success": ok, "branch": args["name"]}) | |
| elif name == "create_issue": | |
| ok, res = gh.create_issue(repo_name, args["title"], args.get("body", "")) | |
| return json.dumps({"success": ok, "number": res.get("number", ""), "title": args["title"]}) | |
| elif name == "merge_pr": | |
| ok, _ = gh.merge_pr(repo_name, args["pr_number"]) | |
| return json.dumps({"success": ok, "pr": args["pr_number"]}) | |
| elif name == "create_release": | |
| ok, res = gh.create_release(repo_name, args["tag"], args.get("name", ""), args.get("body", "")) | |
| return json.dumps({"success": ok, "tag": args["tag"], "url": res.get("html_url", "")}) | |
| log.warning("Unknown tool: %s", name) | |
| return json.dumps({"error": f"ุฃุฏุงุฉ ุบูุฑ ู ุนุฑููุฉ: {name}"}) | |
| def call_groq_with_tools(gh: GitHub, user_message: str, history: list, groq_key: str = "") -> str: | |
| key = groq_key or GROQ_KEY | |
| if not key: | |
| log.warning("call_groq_with_tools: no GROQ_KEY available") | |
| return "โ GROQ_KEY ุบูุฑ ู ูุฌูุฏ โ ุฃุถูู ู ู ูุงุฆู ุฉ ุงูุฅุนุฏุงุฏุงุช" | |
| log.info("Groq call | msg=%.80s | history_len=%d", user_message, len(history)) | |
| t0 = time.monotonic() | |
| try: | |
| client = _groq_client(key) | |
| messages = [{"role":"system","content":SYSTEM_PROMPT}] + history[-10:] + \ | |
| [{"role":"user","content":user_message}] | |
| response = client.chat.completions.create( | |
| model="llama-3.3-70b-versatile", | |
| messages=messages, | |
| tools=TOOLS, | |
| tool_choice="auto", | |
| max_tokens=2048, | |
| ) | |
| msg = response.choices[0].message | |
| if not msg.tool_calls: | |
| elapsed = (time.monotonic() - t0) * 1000 | |
| log.info("Groq response (no tools) in %.0f ms | len=%d", elapsed, len(msg.content or "")) | |
| return msg.content or "" | |
| log.info("Groq requested %d tool(s): %s", len(msg.tool_calls), | |
| [tc.function.name for tc in msg.tool_calls]) | |
| messages.append({"role":"assistant","content":msg.content or "", | |
| "tool_calls":[{"id":tc.id,"type":"function", | |
| "function":{"name":tc.function.name, | |
| "arguments":tc.function.arguments}} | |
| for tc in msg.tool_calls]}) | |
| for tc in msg.tool_calls: | |
| name = tc.function.name | |
| try: | |
| args = json.loads(tc.function.arguments) | |
| except json.JSONDecodeError as e: | |
| log.warning("Failed to parse tool args for %s: %s | raw=%s", | |
| name, e, tc.function.arguments[:200]) | |
| args = {} | |
| result = _execute_tool(gh, name, args) | |
| log.debug("Tool %s โ result_len=%d", name, len(result)) | |
| messages.append({"role":"tool","tool_call_id":tc.id,"content":result}) | |
| final = client.chat.completions.create( | |
| model="llama-3.3-70b-versatile", | |
| messages=messages, | |
| max_tokens=2048, | |
| ) | |
| elapsed = (time.monotonic() - t0) * 1000 | |
| content = final.choices[0].message.content or "" | |
| log.info("Groq final response in %.0f ms | len=%d", elapsed, len(content)) | |
| return content | |
| except Exception as e: | |
| log.error("call_groq_with_tools exception: %s\n%s", e, traceback.format_exc()) | |
| return f"โ ุฎุทุฃ ูู ุงูุงุชุตุงู ุจู Groq: {e}" | |
| def _call_openai_compat(gh: "GitHub", user_message: str, history: list, | |
| client: OpenAI, model: str, supports_tools: bool = True) -> str: | |
| log.info("AI call | model=%s | msg=%.80s", model, user_message) | |
| t0 = time.monotonic() | |
| try: | |
| messages = [{"role": "system", "content": SYSTEM_PROMPT}] + history[-10:] + \ | |
| [{"role": "user", "content": user_message}] | |
| kwargs: dict = {"model": model, "messages": messages, "max_tokens": 2048} | |
| if supports_tools: | |
| kwargs["tools"] = TOOLS | |
| kwargs["tool_choice"] = "auto" | |
| response = client.chat.completions.create(**kwargs) | |
| msg = response.choices[0].message | |
| if not supports_tools or not msg.tool_calls: | |
| elapsed = (time.monotonic() - t0) * 1000 | |
| log.info("AI response in %.0f ms", elapsed) | |
| return msg.content or "" | |
| log.info("AI requested %d tool(s)", len(msg.tool_calls)) | |
| messages.append({"role": "assistant", "content": msg.content or "", | |
| "tool_calls": [{"id": tc.id, "type": "function", | |
| "function": {"name": tc.function.name, | |
| "arguments": tc.function.arguments}} | |
| for tc in msg.tool_calls]}) | |
| for tc in msg.tool_calls: | |
| try: | |
| args = json.loads(tc.function.arguments) | |
| except json.JSONDecodeError: | |
| args = {} | |
| result = _execute_tool(gh, tc.function.name, args) | |
| log.debug("Tool %s โ len=%d", tc.function.name, len(result)) | |
| messages.append({"role": "tool", "tool_call_id": tc.id, "content": result}) | |
| final = client.chat.completions.create(model=model, messages=messages, max_tokens=2048) | |
| elapsed = (time.monotonic() - t0) * 1000 | |
| content = final.choices[0].message.content or "" | |
| log.info("AI final response in %.0f ms | len=%d", elapsed, len(content)) | |
| return content | |
| except Exception as e: | |
| log.error("AI call exception: %s\n%s", e, traceback.format_exc()) | |
| return f"โ ุฎุทุฃ ูู ุงูุงุชุตุงู ุจู AI: {e}" | |
| def _call_hf(user_message: str, history: list, client: InferenceClient, model: str) -> str: | |
| log.info("HF call | model=%s | msg=%.80s", model, user_message) | |
| t0 = time.monotonic() | |
| try: | |
| messages = [{"role": "system", "content": SYSTEM_PROMPT}] + history[-10:] + \ | |
| [{"role": "user", "content": user_message}] | |
| response = client.chat.completions.create(model=model, messages=messages, max_tokens=2048) | |
| elapsed = (time.monotonic() - t0) * 1000 | |
| content = response.choices[0].message.content or "" | |
| log.info("HF response in %.0f ms | len=%d", elapsed, len(content)) | |
| return content | |
| except Exception as e: | |
| log.error("HF call exception: %s\n%s", e, traceback.format_exc()) | |
| return f"โ ุฎุทุฃ ูู ุงูุงุชุตุงู ุจู HuggingFace: {e}" | |
| def call_ai_with_tools(gh: "GitHub", user_message: str, history: list, | |
| provider: str, model: str, key: str) -> str: | |
| """Unified AI router โ dispatches to the right provider.""" | |
| if not key: | |
| prov_label = PROVIDERS.get(provider, {}).get("label", provider) | |
| return f"โ {prov_label} Key ุบูุฑ ู ูุฌูุฏ โ ุฃุถูู ู ู โ๏ธ ุงูุฅุนุฏุงุฏุงุช" | |
| if provider == "groq": | |
| return call_groq_with_tools(gh, user_message, history, key) | |
| elif provider == "deepseek": | |
| client = _openai_client("https://api.deepseek.com", key) | |
| return _call_openai_compat(gh, user_message, history, client, model, True) | |
| elif provider == "mistral": | |
| client = _openai_client("https://api.mistral.ai/v1", key) | |
| return _call_openai_compat(gh, user_message, history, client, model, True) | |
| elif provider == "huggingface": | |
| return _call_hf(user_message, history, _hf_client(key), model) | |
| log.warning("Unknown provider: %s", provider) | |
| return "โ ู ุฒูุฏ AI ุบูุฑ ู ุนุฑูู" | |
| # โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ | |
| # Helpers | |
| # โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ | |
| _gh_cache: dict[str, GitHub] = {} | |
| def gh(ctx=None) -> GitHub: | |
| token = GH_TOKEN or (ctx.user_data.get("gh_token", "") if ctx else "") | |
| if token not in _gh_cache: | |
| _gh_cache[token] = GitHub(token, GH_USER) | |
| return _gh_cache[token] | |
| def repo(ctx) -> str: | |
| return ctx.user_data.get("current_repo", GH_REPO) | |
| def fmt_date(s): | |
| try: return datetime.fromisoformat(s.replace("Z","+00:00")).strftime("%Y-%m-%d") | |
| except: return s[:10] if s else "" | |
| def admin_only(func): | |
| async def wrapper(update: Update, ctx: ContextTypes.DEFAULT_TYPE): | |
| if update.effective_user.id != ADMIN_ID: | |
| await update.effective_message.reply_text("โ ุบูุฑ ู ุตุฑุญ.") | |
| return | |
| return await func(update, ctx) | |
| return wrapper | |
| # โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ | |
| # Keyboards | |
| # โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ | |
| def main_kb(r=""): | |
| r = r or GH_REPO | |
| return InlineKeyboardMarkup([ | |
| [InlineKeyboardButton(f"๐ {r}", callback_data="repos"), | |
| InlineKeyboardButton("๐ ุชุจุฏูู", callback_data="switch_repo")], | |
| [InlineKeyboardButton("๐ ุงูู ููุงุช", callback_data="files"), | |
| InlineKeyboardButton("๐ Commits", callback_data="commits")], | |
| [InlineKeyboardButton("๐ฟ Branches", callback_data="branches"), | |
| InlineKeyboardButton("๐ Pull Requests", callback_data="prs")], | |
| [InlineKeyboardButton("๐ Issues", callback_data="issues"), | |
| InlineKeyboardButton("๐ Releases", callback_data="releases")], | |
| [InlineKeyboardButton("๐ ุฅุญุตุงุฆูุงุช",callback_data="stats"), | |
| InlineKeyboardButton("๐ค ุฑูุน ู ููุงุช",callback_data="upload_files")], | |
| [InlineKeyboardButton("โ ู ูู ุฌุฏูุฏ", callback_data="new_file"), | |
| InlineKeyboardButton("๐ฟ Branch ุฌุฏูุฏ",callback_data="new_branch")], | |
| [InlineKeyboardButton("๐ Issue ุฌุฏูุฏ",callback_data="new_issue"), | |
| InlineKeyboardButton("๐ Release ุฌุฏูุฏ",callback_data="new_release")], | |
| [InlineKeyboardButton("๐ค ู ุณุงุนุฏ AI", callback_data="ai_chat"), | |
| InlineKeyboardButton("โ๏ธ ุฅุนุฏุงุฏุงุช", callback_data="settings")], | |
| ]) | |
| def settings_kb(gh_ok=False, ai_ok=False, ai_provider="groq"): | |
| gh_lbl = f"๐ GitHub PAT {'โ ' if gh_ok else 'โ'}" | |
| ai_lbl = f"๐ค AI Key {'โ ' if ai_ok else 'โ'}" | |
| prov_lbl = PROVIDERS.get(ai_provider, PROVIDERS["groq"])["label"] | |
| return InlineKeyboardMarkup([ | |
| [InlineKeyboardButton(gh_lbl, callback_data="set_pat"), | |
| InlineKeyboardButton(ai_lbl, callback_data="set_ai_key")], | |
| [InlineKeyboardButton(f"๐ AI: {prov_lbl}", callback_data="set_ai_provider")], | |
| [InlineKeyboardButton("๐ฆ ุฑูุจู ุฌุฏูุฏ", callback_data="new_repo")], | |
| [InlineKeyboardButton("โ๏ธ ุงูุฑุฆูุณูุฉ", callback_data="menu")], | |
| ]) | |
| def ai_provider_kb(current="groq"): | |
| rows = [] | |
| for pid, pinfo in PROVIDERS.items(): | |
| mark = "โ " if pid == current else "" | |
| rows.append([InlineKeyboardButton(f"{mark}{pinfo['label']}", callback_data=f"set_provider_{pid}")]) | |
| rows.append([InlineKeyboardButton("โ๏ธ ุงูุฑุฆูุณูุฉ", callback_data="menu")]) | |
| return InlineKeyboardMarkup(rows) | |
| def ai_model_kb(provider: str, current_model: str = ""): | |
| models = PROVIDERS.get(provider, PROVIDERS["groq"])["models"] | |
| rows = [] | |
| for i, m in enumerate(models): | |
| label = m.split("/")[-1] | |
| mark = "โ " if m == current_model else "" | |
| rows.append([InlineKeyboardButton(f"{mark}{label}", callback_data=f"set_model_{i}")]) | |
| rows.append([InlineKeyboardButton("โ๏ธ ุงูุฑุฆูุณูุฉ", callback_data="menu")]) | |
| return InlineKeyboardMarkup(rows) | |
| def back_kb(): | |
| return InlineKeyboardMarkup([[InlineKeyboardButton("โ๏ธ ุงูุฑุฆูุณูุฉ", callback_data="menu")]]) | |
| def cancel_kb(): | |
| return InlineKeyboardMarkup([[InlineKeyboardButton("โ ุฅูุบุงุก", callback_data="menu")]]) | |
| def ai_kb(): | |
| return InlineKeyboardMarkup([ | |
| [InlineKeyboardButton("๐ ุงุทูุนูู ุนูู ุขุฎุฑ ุงูุชุทูุฑุงุช", callback_data="ai_updates")], | |
| [InlineKeyboardButton("๐ ู ูุฎุต ุงูุฑูุจู", callback_data="ai_summary")], | |
| [InlineKeyboardButton("๐ Issues ุงูู ูุชูุญุฉ",callback_data="ai_issues")], | |
| [InlineKeyboardButton("๐ PRs ุงูู ุนููุฉ", callback_data="ai_prs")], | |
| [InlineKeyboardButton("โ๏ธ ุงูุฑุฆูุณูุฉ", callback_data="menu")], | |
| ]) | |
| # โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ | |
| # Handlers | |
| # โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ | |
| async def start(update: Update, ctx: ContextTypes.DEFAULT_TYPE): | |
| user_id = update.effective_user.id | |
| username = update.effective_user.username or str(user_id) | |
| log.info("start | user=%s (id=%d)", username, user_id) | |
| g = gh(ctx) | |
| profile = await asyncio.to_thread(g.get_profile) | |
| name = profile.get("name") or profile.get("login", GH_USER) | |
| repos_c = profile.get("public_repos",0) | |
| r = repo(ctx) | |
| provider = ctx.user_data.get("ai_provider", "groq") | |
| model = ctx.user_data.get("ai_model", PROVIDERS[provider]["models"][0]) | |
| ai_ok = bool(_get_ai_key(provider, ctx)) | |
| ai_status = "โ " if ai_ok else "โ" | |
| gh_status = "โ " if GH_TOKEN or ctx.user_data.get("gh_token") else "โ" | |
| prov_label = PROVIDERS[provider]["label"] | |
| text = (f"๐ ู ุฑุญุจุงู *{name}*!\n\n" | |
| f"๐ฆ Repos: {repos_c}\n" | |
| f"๐ ุงูุฑูุจู ุงูุญุงูู: `{r}`\n\n" | |
| f"GitHub PAT: {gh_status} | AI ({prov_label}): {ai_status}\n" | |
| f"๐ค ุงููู ูุฐุฌ: `{model.split('/')[-1]}`\n\n" | |
| f"๐ฌ ูู ููู ุงููุชุงุจุฉ ู ุจุงุดุฑุฉ ููู ุณุงุนุฏ AI") | |
| try: | |
| await update.message.reply_text(text, parse_mode="Markdown", | |
| reply_markup=main_kb(r)) | |
| except Exception: | |
| await update.message.reply_text(text, reply_markup=main_kb(r)) | |
| async def handle_text(update: Update, ctx: ContextTypes.DEFAULT_TYPE): | |
| """ู ุนุงูุฌุฉ ุงูุฑุณุงุฆู ุงููุตูุฉ โ ุฅู ุง AI ุฃู ูู ู ุญุงุฏุซุฉ""" | |
| text = update.message.text.strip() | |
| user_id = update.effective_user.id | |
| username = update.effective_user.username or str(user_id) | |
| log.info("handle_text | user=%s | msg=%.100s", username, text) | |
| g = gh(ctx) | |
| r = repo(ctx) | |
| history = ctx.user_data.setdefault("ai_history", []) | |
| history.append({"role":"user","content":text}) | |
| await update.message.reply_text("๐ค ุฌุงุฑู ุงูู ุนุงูุฌุฉ...") | |
| provider = ctx.user_data.get("ai_provider", "groq") | |
| model = ctx.user_data.get("ai_model", PROVIDERS[provider]["models"][0]) | |
| ai_key = _get_ai_key(provider, ctx) | |
| try: | |
| # Run blocking I/O in thread pool to avoid blocking the event loop | |
| result = await asyncio.to_thread( | |
| call_ai_with_tools, g, text, history[:-1], provider, model, ai_key | |
| ) | |
| except Exception as e: | |
| log.error("handle_text ai error for user=%s: %s\n%s", username, e, traceback.format_exc()) | |
| result = f"โ ุฎุทุฃ: {e}" | |
| history.append({"role":"assistant","content":result}) | |
| if len(history) > 20: | |
| ctx.user_data["ai_history"] = history[-20:] | |
| try: | |
| await update.message.reply_text(result[:4000], parse_mode="Markdown", | |
| reply_markup=main_kb(r)) | |
| except Exception: | |
| await update.message.reply_text(result[:4000], reply_markup=main_kb(r)) | |
| async def handle_voice(update: Update, ctx: ContextTypes.DEFAULT_TYPE): | |
| """Transcribe incoming voice message via Groq Whisper then reply with text + TTS audio.""" | |
| username = update.effective_user.username or str(update.effective_user.id) | |
| log.info("handle_voice | user=%s | duration=%ds", | |
| username, update.message.voice.duration) | |
| groq_key = GROQ_KEY or ctx.user_data.get("groq_key", "") | |
| if not groq_key: | |
| await update.message.reply_text("โ GROQ_KEY ู ุทููุจ ููุชูุฑูุบ ุงูุตูุชู โ ุฃุถูู ู ู โ๏ธ ุงูุฅุนุฏุงุฏุงุช") | |
| return | |
| await update.message.reply_chat_action("typing") | |
| # Download Telegram voice file (OGG/Opus) into memory | |
| tg_file = await ctx.bot.get_file(update.message.voice.file_id) | |
| ogg_buf = io.BytesIO() | |
| await tg_file.download_to_memory(ogg_buf) | |
| ogg_buf.seek(0) | |
| # Transcribe with Groq Whisper | |
| try: | |
| client = _groq_client(groq_key) | |
| transcription = await asyncio.to_thread( | |
| lambda: client.audio.transcriptions.create( | |
| file=("voice.ogg", ogg_buf, "audio/ogg"), | |
| model="whisper-large-v3-turbo", | |
| ) | |
| ) | |
| text = transcription.text.strip() | |
| except Exception as e: | |
| log.error("Whisper error: %s\n%s", e, traceback.format_exc()) | |
| await update.message.reply_text(f"โ ุฎุทุฃ ูู ุงูุชูุฑูุบ ุงูุตูุชู: {e}") | |
| return | |
| if not text: | |
| await update.message.reply_text("โ ูู ุฃุชู ูู ู ู ููู ุงูุฑุณุงูุฉ ุงูุตูุชูุฉ") | |
| return | |
| await update.message.reply_text(f"๐ค _{text}_", parse_mode="Markdown") | |
| # Route transcribed text through the active AI provider | |
| history = ctx.user_data.setdefault("ai_history", []) | |
| history.append({"role": "user", "content": text}) | |
| provider = ctx.user_data.get("ai_provider", "groq") | |
| model = ctx.user_data.get("ai_model", PROVIDERS[provider]["models"][0]) | |
| ai_key = _get_ai_key(provider, ctx) | |
| g = gh(ctx) | |
| r = repo(ctx) | |
| await update.message.reply_chat_action("typing") | |
| try: | |
| result = await asyncio.to_thread( | |
| call_ai_with_tools, g, text, history[:-1], provider, model, ai_key | |
| ) | |
| except Exception as e: | |
| log.error("handle_voice ai error: %s\n%s", e, traceback.format_exc()) | |
| result = f"โ ุฎุทุฃ: {e}" | |
| history.append({"role": "assistant", "content": result}) | |
| ctx.user_data["ai_history"] = history[-20:] | |
| # Text reply | |
| try: | |
| await update.message.reply_text(result[:4000], parse_mode="Markdown", | |
| reply_markup=main_kb(r)) | |
| except Exception: | |
| await update.message.reply_text(result[:4000], reply_markup=main_kb(r)) | |
| # Voice reply via Groq PlayAI TTS (best-effort โ non-critical) | |
| await update.message.reply_chat_action("record_voice") | |
| try: | |
| tts_resp = await asyncio.to_thread( | |
| lambda: client.audio.speech.create( | |
| model="playai-tts-arabic", | |
| voice="Ahmad", | |
| input=result[:1000], # TTS is best kept concise | |
| response_format="mp3", | |
| ) | |
| ) | |
| mp3_buf = io.BytesIO(tts_resp.read()) | |
| mp3_buf.name = "reply.mp3" | |
| await update.message.reply_audio(mp3_buf, title="ุฑุฏ ุตูุชู") | |
| except Exception as e: | |
| log.warning("TTS skipped (non-critical): %s", e) | |
| async def button(update: Update, ctx: ContextTypes.DEFAULT_TYPE): | |
| q = update.callback_query | |
| await q.answer() | |
| data = q.data | |
| user_id = update.effective_user.id | |
| username = update.effective_user.username or str(user_id) | |
| log.info("button | user=%s | action=%s | repo=%s", username, data, ctx.user_data.get("current_repo", GH_REPO)) | |
| g = gh(ctx) | |
| r = repo(ctx) | |
| async def edit(text, kb=None, md=True): | |
| try: | |
| await q.message.edit_text(text[:4000], | |
| parse_mode="Markdown" if md else None, | |
| reply_markup=kb or back_kb()) | |
| except Exception as e: | |
| log.warning("edit Markdown failed (%s), retrying plain", e) | |
| try: | |
| await q.message.edit_text(text[:4000], reply_markup=kb or back_kb()) | |
| except Exception as e2: | |
| log.error("edit plain also failed: %s", e2) | |
| # โโ Menu โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ | |
| if data == "menu": | |
| profile = await asyncio.to_thread(g.get_profile) | |
| name = profile.get("name") or GH_USER | |
| await edit(f"๐ {name}\n๐ `{r}`", main_kb(r)) | |
| # โโ AI Chat โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ | |
| elif data == "ai_chat": | |
| await edit( | |
| "๐ค *ู ุณุงุนุฏ AI*\n\nูู ููู ุงููุชุงุจุฉ ู ุจุงุดุฑุฉ ุฃู ุงุฎุชูุงุฑ ู ู ุงูุฃุณูู:", | |
| ai_kb() | |
| ) | |
| elif data == "ai_updates": | |
| await edit("โณ ุฌุงุฑู ุฌูุจ ุขุฎุฑ ุงูุชุทูุฑุงุช...") | |
| provider = ctx.user_data.get("ai_provider", "groq") | |
| model = ctx.user_data.get("ai_model", PROVIDERS[provider]["models"][0]) | |
| ai_key = _get_ai_key(provider, ctx) | |
| try: | |
| result = await asyncio.to_thread( | |
| call_ai_with_tools, g, | |
| f"ุงุทูุนูู ุนูู ุขุฎุฑ ุงูุชุทูุฑุงุช ูู ุฑูุจู {r}: ุขุฎุฑ commitsุ issues ู ูุชูุญุฉุ PRs ู ุนููุฉ", | |
| [], provider, model, ai_key | |
| ) | |
| except Exception as e: | |
| log.error("ai_updates error: %s\n%s", e, traceback.format_exc()) | |
| result = f"โ ุฎุทุฃ: {e}" | |
| await edit(result[:4000] or "ูุง ุชูุฌุฏ ุชุทูุฑุงุช", ai_kb()) | |
| elif data == "ai_summary": | |
| await edit("โณ ุฌุงุฑู ุฅุนุฏุงุฏ ุงูู ูุฎุต...") | |
| provider = ctx.user_data.get("ai_provider", "groq") | |
| model = ctx.user_data.get("ai_model", PROVIDERS[provider]["models"][0]) | |
| ai_key = _get_ai_key(provider, ctx) | |
| try: | |
| result = await asyncio.to_thread( | |
| call_ai_with_tools, g, | |
| f"ูุฏูู ู ูุฎุตุงู ุดุงู ูุงู ูุฑูุจู {r}: ุงูุฅุญุตุงุฆูุงุชุ ุงููุบุฉุ ุงูุญุฌู ุ ุขุฎุฑ ุชุญุฏูุซ", | |
| [], provider, model, ai_key | |
| ) | |
| except Exception as e: | |
| log.error("ai_summary error: %s\n%s", e, traceback.format_exc()) | |
| result = f"โ ุฎุทุฃ: {e}" | |
| await edit(result[:4000] or "โ ุชุนุฐุฑ ุฌูุจ ุงูู ุนููู ุงุช", ai_kb()) | |
| elif data == "ai_issues": | |
| await edit("โณ ุฌุงุฑู ุฌูุจ Issues...") | |
| provider = ctx.user_data.get("ai_provider", "groq") | |
| model = ctx.user_data.get("ai_model", PROVIDERS[provider]["models"][0]) | |
| ai_key = _get_ai_key(provider, ctx) | |
| try: | |
| result = await asyncio.to_thread( | |
| call_ai_with_tools, g, | |
| f"ุงุนุฑุถ ูุญููู Issues ุงูู ูุชูุญุฉ ูู {r} ูุฃุนุทูู ุฑุฃูู ูููุง", | |
| [], provider, model, ai_key | |
| ) | |
| except Exception as e: | |
| log.error("ai_issues error: %s\n%s", e, traceback.format_exc()) | |
| result = f"โ ุฎุทุฃ: {e}" | |
| await edit(result[:4000] or "โ ูุง ุชูุฌุฏ issues", ai_kb()) | |
| elif data == "ai_prs": | |
| await edit("โณ ุฌุงุฑู ุฌูุจ PRs...") | |
| provider = ctx.user_data.get("ai_provider", "groq") | |
| model = ctx.user_data.get("ai_model", PROVIDERS[provider]["models"][0]) | |
| ai_key = _get_ai_key(provider, ctx) | |
| try: | |
| result = await asyncio.to_thread( | |
| call_ai_with_tools, g, | |
| f"ุงุนุฑุถ Pull Requests ุงูู ูุชูุญุฉ ูู {r}", | |
| [], provider, model, ai_key | |
| ) | |
| except Exception as e: | |
| log.error("ai_prs error: %s\n%s", e, traceback.format_exc()) | |
| result = f"โ ุฎุทุฃ: {e}" | |
| await edit(result[:4000] or "โ ูุง ุชูุฌุฏ PRs", ai_kb()) | |
| # โโ Settings โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ | |
| elif data == "settings": | |
| gh_ok = bool(GH_TOKEN or ctx.user_data.get("gh_token")) | |
| provider = ctx.user_data.get("ai_provider", "groq") | |
| ai_ok = bool(_get_ai_key(provider, ctx)) | |
| await edit("โ๏ธ *ุงูุฅุนุฏุงุฏุงุช*", settings_kb(gh_ok, ai_ok, provider)) | |
| elif data == "set_pat": | |
| await edit("๐ ุฃุฑุณู GitHub PAT:\n\n" | |
| "github.com โ Settings โ Developer Settings\n" | |
| "โ Personal Access Tokens โ Generate\n" | |
| "โ Scope: `repo`", cancel_kb()) | |
| return WAIT_PAT | |
| elif data == "set_groq": | |
| await edit("๐ค ุฃุฑุณู Groq API Key:\n\nconsole.groq.com โ API Keys โ Create", cancel_kb()) | |
| return WAIT_GROQ_KEY | |
| elif data == "set_ai_key": | |
| provider = ctx.user_data.get("ai_provider", "groq") | |
| prov = PROVIDERS.get(provider, PROVIDERS["groq"]) | |
| urls = { | |
| "groq": "console.groq.com โ API Keys", | |
| "deepseek": "platform.deepseek.com โ API Keys", | |
| "huggingface": "huggingface.co/settings/tokens", | |
| "mistral": "console.mistral.ai โ API Keys", | |
| } | |
| await edit(f"๐ ุฃุฑุณู API Key ูู *{prov['label']}*:\n\n{urls.get(provider, '')}", cancel_kb()) | |
| return WAIT_AI_KEY | |
| elif data == "set_ai_provider": | |
| provider = ctx.user_data.get("ai_provider", "groq") | |
| await edit("๐ค *ุงุฎุชุฑ ู ุฒูุฏ AI:*", ai_provider_kb(provider)) | |
| elif data.startswith("set_provider_"): | |
| pid = data[len("set_provider_"):] | |
| if pid in PROVIDERS: | |
| ctx.user_data["ai_provider"] = pid | |
| ctx.user_data["ai_model"] = PROVIDERS[pid]["models"][0] | |
| prov = PROVIDERS[pid] | |
| current_model = PROVIDERS[pid]["models"][0] | |
| await edit(f"โ ุชู ุงุฎุชูุงุฑ *{prov['label']}*\n\nุงุฎุชุฑ ุงููู ูุฐุฌ:", ai_model_kb(pid, current_model)) | |
| elif data == "set_ai_model": | |
| provider = ctx.user_data.get("ai_provider", "groq") | |
| current_model = ctx.user_data.get("ai_model", PROVIDERS[provider]["models"][0]) | |
| await edit("๐ค *ุงุฎุชุฑ ุงููู ูุฐุฌ:*", ai_model_kb(provider, current_model)) | |
| elif data.startswith("set_model_"): | |
| provider = ctx.user_data.get("ai_provider", "groq") | |
| try: | |
| idx = int(data[len("set_model_"):]) | |
| models = PROVIDERS[provider]["models"] | |
| if 0 <= idx < len(models): | |
| ctx.user_data["ai_model"] = models[idx] | |
| await edit(f"โ ุชู ุงุฎุชูุงุฑ ุงููู ูุฐุฌ: `{models[idx].split('/')[-1]}`", back_kb()) | |
| except (ValueError, IndexError): | |
| pass | |
| # โโ Repos โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ | |
| elif data == "repos": | |
| repos = await asyncio.to_thread(g.list_repos) | |
| if not repos: | |
| await edit("โ ูุง ุชูุฌุฏ ุฑูุจููุงุช ุฃู ุฎุทุฃ ูู ุงูู PAT"); return | |
| lines = [] | |
| for rp in repos[:15]: | |
| vis = "๐" if rp.get("private") else "๐" | |
| stars = rp.get("stargazers_count",0) | |
| lang = rp.get("language") or "โ" | |
| updated = fmt_date(rp.get("updated_at","")) | |
| lines.append(f"{vis} `{rp['name']}` โญ{stars} {lang}\n ๐ {updated}") | |
| total = len(repos) | |
| kb = InlineKeyboardMarkup([ | |
| [InlineKeyboardButton("โ ุฑูุจู ุฌุฏูุฏ", callback_data="new_repo")], | |
| [InlineKeyboardButton("โ๏ธ ุงูุฑุฆูุณูุฉ", callback_data="menu")], | |
| ]) | |
| suffix = f"\n_ุนุฑุถ {min(15,total)} ู ู {total}_" if total > 15 else "" | |
| await edit(f"๐ฆ *ุฑูุจููุงุชู ({total}):*\n\n" + "\n\n".join(lines) + suffix, kb) | |
| elif data == "switch_repo": | |
| repos = await asyncio.to_thread(g.list_repos) | |
| btns = [[InlineKeyboardButton(f"๐ {rp['name']}", | |
| callback_data=f"use_{rp['name']}")] for rp in repos[:10]] | |
| btns.append([InlineKeyboardButton("โ๏ธ ุงูุฑุฆูุณูุฉ", callback_data="menu")]) | |
| await edit("ุงุฎุชุฑ ุงูุฑูุจู:", InlineKeyboardMarkup(btns)) | |
| elif data.startswith("use_"): | |
| nr = data[4:] | |
| ctx.user_data["current_repo"] = nr | |
| await edit(f"โ ุชู ุงูุชุจุฏูู ูู `{nr}`", main_kb(nr)) | |
| # โโ Files โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ | |
| elif data == "files" or data.startswith("ls_"): | |
| path = data[3:] if data.startswith("ls_") else "" | |
| items = await asyncio.to_thread(g.list_files, r, path) | |
| if isinstance(items, dict): | |
| await edit(f"โ {items.get('message','ุฎุทุฃ')}"); return | |
| dirs_ = [i for i in items if i.get("type")=="dir"] | |
| files_ = [i for i in items if i.get("type")=="file"] | |
| btns = [] | |
| for d in dirs_[:8]: | |
| np = f"{path}/{d['name']}" if path else d["name"] | |
| btns.append([InlineKeyboardButton(f"๐ {d['name']}/", callback_data=f"ls_{np}")]) | |
| for f_ in files_[:12]: | |
| fp = f"{path}/{f_['name']}" if path else f_["name"] | |
| sz = f_["size"] | |
| s = f"{sz}B" if sz < 1024 else f"{sz/1024:.1f}KB" | |
| btns.append([InlineKeyboardButton(f"๐ {f_['name']} ({s})", | |
| callback_data=f"view_{fp}")]) | |
| if path: | |
| parent = "/".join(path.split("/")[:-1]) | |
| btns.append([InlineKeyboardButton(f"๐ .. ({parent or r})", | |
| callback_data=f"ls_{parent}" if parent else "files")]) | |
| btns.append([InlineKeyboardButton("โ๏ธ ุงูุฑุฆูุณูุฉ", callback_data="menu")]) | |
| loc = f"`{r}/{path}`" if path else f"`{r}`" | |
| await edit(f"๐ {loc}\n{len(dirs_)} ู ุฌูุฏุ {len(files_)} ู ูู", | |
| InlineKeyboardMarkup(btns)) | |
| elif data.startswith("view_"): | |
| path = data[5:] | |
| content, sha = await asyncio.to_thread(g.get_file, r, path) | |
| if content is None: | |
| await edit("โ ุชุนุฐุฑ ูุฑุงุกุฉ ุงูู ูู"); return | |
| ctx.user_data["edit_path"] = path | |
| ctx.user_data["edit_sha"] = sha | |
| lines = content.splitlines() | |
| preview = "\n".join(lines[:30]) | |
| if len(lines) > 30: preview += f"\n... ({len(lines)-30} ุณุทุฑ ุฅุถุงูู)" | |
| kb = InlineKeyboardMarkup([ | |
| [InlineKeyboardButton("โ๏ธ ุชุนุฏูู", callback_data="edit_file"), | |
| InlineKeyboardButton("๐๏ธ ุญุฐู", callback_data=f"confirm_del_{path}")], | |
| [InlineKeyboardButton("โ๏ธ ุงูู ููุงุช", callback_data="files")], | |
| ]) | |
| await edit(f"๐ `{path}`\n```\n{preview[:3000]}\n```", kb) | |
| elif data == "edit_file": | |
| path = ctx.user_data.get("edit_path","") | |
| await edit(f"โ๏ธ ุฃุฑุณู ุงูู ุญุชูู ุงูุฌุฏูุฏ ูู `{path}`:", cancel_kb()) | |
| return WAIT_FILE_CONTENT | |
| elif data.startswith("confirm_del_"): | |
| path = data[12:] | |
| kb = InlineKeyboardMarkup([ | |
| [InlineKeyboardButton("โ ูุนู ุ ุงุญุฐู", callback_data="delete_file"), | |
| InlineKeyboardButton("โ ุฅูุบุงุก", callback_data=f"view_{path}")], | |
| ]) | |
| await edit(f"๐๏ธ ุชุฃููุฏ ุญุฐู `{path}`ุ\nูุฐุง ุงูุฅุฌุฑุงุก ูุง ูู ูู ุงูุชุฑุงุฌุน ุนูู.", kb) | |
| elif data == "delete_file": | |
| path = ctx.user_data.get("edit_path","") | |
| sha = ctx.user_data.get("edit_sha","") | |
| log.info("delete_file | user=%s | path=%s | repo=%s", username, path, r) | |
| ok = await asyncio.to_thread(g.delete_file, r, path, f"delete: {path}", sha) | |
| await edit(f"{'โ ุชู ุญุฐู' if ok else 'โ ูุดู ุญุฐู'} `{path}`", back_kb()) | |
| # โโ Commits โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ | |
| elif data == "commits": | |
| commits = await asyncio.to_thread(g.list_commits, r) | |
| if not commits: | |
| await edit("โ ูุง ุชูุฌุฏ commits"); return | |
| lines = [] | |
| for c in commits[:10]: | |
| msg_ = c["commit"]["message"].splitlines()[0][:50] | |
| author = c["commit"]["author"]["name"][:15] | |
| date_ = fmt_date(c["commit"]["author"]["date"]) | |
| sha_ = c["sha"][:7] | |
| lines.append(f"`{sha_}` {msg_}\n ๐ค {author} ๐ {date_}") | |
| await edit(f"๐ *Commits ูู `{r}`:*\n\n" + "\n\n".join(lines)) | |
| # โโ Branches โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ | |
| elif data == "branches": | |
| branches = await asyncio.to_thread(g.list_branches, r) | |
| if not branches: | |
| await edit("โ ูุง ุชูุฌุฏ branches"); return | |
| lines = [f"๐ฟ `{b['name']}`" for b in branches] | |
| kb = InlineKeyboardMarkup([ | |
| [InlineKeyboardButton("โ Branch ุฌุฏูุฏ", callback_data="new_branch")], | |
| [InlineKeyboardButton("โ๏ธ ุงูุฑุฆูุณูุฉ", callback_data="menu")], | |
| ]) | |
| await edit(f"๐ฟ *Branches ูู `{r}`:*\n\n" + "\n".join(lines), kb) | |
| # โโ Pull Requests โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ | |
| elif data == "prs": | |
| prs = await asyncio.to_thread(g.list_prs, r) | |
| if not prs: | |
| await edit("โ ูุง ุชูุฌุฏ Pull Requests ู ูุชูุญุฉ"); return | |
| btns = [] | |
| lines = [] | |
| for p in prs[:10]: | |
| lines.append(f"#{p['number']} `{p['title'][:45]}`\n" | |
| f" `{p['head']['ref']}` โ `{p['base']['ref']}`") | |
| btns.append([InlineKeyboardButton( | |
| f"๐ Merge #{p['number']}", callback_data=f"merge_{p['number']}")]) | |
| btns.append([InlineKeyboardButton("โ PR ุฌุฏูุฏ", callback_data="new_pr")]) | |
| btns.append([InlineKeyboardButton("โ๏ธ ุงูุฑุฆูุณูุฉ", callback_data="menu")]) | |
| await edit(f"๐ *Pull Requests ูู `{r}`:*\n\n" + "\n\n".join(lines), | |
| InlineKeyboardMarkup(btns)) | |
| elif data.startswith("merge_"): | |
| pr_num = int(data[6:]) | |
| await edit(f"โณ ุฌุงุฑู ุฏู ุฌ PR #{pr_num}...") | |
| ok, res = await asyncio.to_thread(g.merge_pr, r, pr_num) | |
| if ok: | |
| await edit(f"โ ุชู ุฏู ุฌ PR #{pr_num} ุจูุฌุงุญ! ๐", main_kb(r)) | |
| else: | |
| await edit(f"โ ูุดู: {res.get('message','')}") | |
| elif data == "new_pr": | |
| await edit("๐ ุฃุฑุณู ุนููุงู ุงูู PR\n_ุงูู ุซุงู: feat: add new feature_", cancel_kb()) | |
| return WAIT_PR_TITLE | |
| # โโ Issues โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ | |
| elif data == "issues": | |
| issues = await asyncio.to_thread(g.list_issues, r) | |
| if not issues: | |
| await edit("โ ูุง ุชูุฌุฏ issues ู ูุชูุญุฉ"); return | |
| btns = [] | |
| lines = [] | |
| for i in issues[:10]: | |
| lines.append(f"#{i['number']} `{i['title'][:50]}`") | |
| btns.append([InlineKeyboardButton( | |
| f"โ ุฅุบูุงู #{i['number']}", callback_data=f"confirm_close_{i['number']}")]) | |
| btns.append([InlineKeyboardButton("โ Issue ุฌุฏูุฏ", callback_data="new_issue")]) | |
| btns.append([InlineKeyboardButton("โ๏ธ ุงูุฑุฆูุณูุฉ", callback_data="menu")]) | |
| await edit(f"๐ *Issues ูู `{r}`:*\n\n" + "\n".join(lines), | |
| InlineKeyboardMarkup(btns)) | |
| elif data.startswith("confirm_close_"): | |
| num = int(data[14:]) | |
| kb = InlineKeyboardMarkup([ | |
| [InlineKeyboardButton("โ ูุนู ุ ุฃุบูู", callback_data=f"close_issue_{num}"), | |
| InlineKeyboardButton("โ ุฅูุบุงุก", callback_data="issues")], | |
| ]) | |
| await edit(f"๐ ุชุฃููุฏ ุฅุบูุงู Issue #{num}ุ", kb) | |
| elif data.startswith("close_issue_"): | |
| num = int(data[12:]) | |
| log.info("close_issue | user=%s | issue=%d | repo=%s", username, num, r) | |
| ok = await asyncio.to_thread(g.close_issue, r, num) | |
| await edit(f"{'โ ุชู ุฅุบูุงู' if ok else 'โ ูุดู ุฅุบูุงู'} Issue #{num}", back_kb()) | |
| # โโ Releases โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ | |
| elif data == "releases": | |
| releases = await asyncio.to_thread(g.list_releases, r) | |
| if not releases: | |
| await edit("๐ฆ ูุง ุชูุฌุฏ releases ุจุนุฏ"); return | |
| lines = [] | |
| for rel in releases: | |
| tag = rel["tag_name"] | |
| date_ = fmt_date(rel.get("published_at","")) | |
| lines.append(f"๐ท๏ธ `{tag}` ๐ {date_}") | |
| kb = InlineKeyboardMarkup([ | |
| [InlineKeyboardButton("โ Release ุฌุฏูุฏ", callback_data="new_release")], | |
| [InlineKeyboardButton("โ๏ธ ุงูุฑุฆูุณูุฉ", callback_data="menu")], | |
| ]) | |
| await edit(f"๐ *Releases ูู `{r}`:*\n\n" + "\n".join(lines), kb) | |
| # โโ Stats โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ | |
| elif data == "stats": | |
| await edit("โณ ุฌุงุฑู ุฌูุจ ุงูุฅุญุตุงุฆูุงุช...") | |
| info, (v, cl) = await asyncio.gather( | |
| asyncio.to_thread(g.get_repo, r), | |
| asyncio.to_thread(g.get_traffic, r), | |
| ) | |
| if not info: | |
| await edit("โ ุชุนุฐุฑ ุฌูุจ ุงูู ุนููู ุงุช"); return | |
| text = (f"๐ *{r}*\n\n" | |
| f"โญ Stars: {info.get('stargazers_count',0)}\n" | |
| f"๐ด Forks: {info.get('forks_count',0)}\n" | |
| f"๐๏ธ Watchers: {info.get('watchers_count',0)}\n" | |
| f"๐ Issues: {info.get('open_issues_count',0)}\n" | |
| f"๐ค Language: {info.get('language','โ')}\n" | |
| f"๐ฆ Size: {info.get('size',0)} KB\n" | |
| f"๐ Private: {'ูุนู ' if info.get('private') else 'ูุง'}\n" | |
| f"๐ Created: {fmt_date(info.get('created_at',''))}\n" | |
| f"๐ Updated: {fmt_date(info.get('updated_at',''))}\n\n" | |
| f"๐ Views (14d): {v.get('count',0)}\n" | |
| f"๐ฅ Clones (14d): {cl.get('count',0)}") | |
| await edit(text) | |
| # โโ Upload Multiple Files โโโโโโโโโโโโโโโโโโโโโโโโโโ | |
| elif data == "upload_files": | |
| await edit( | |
| "๐ค *ุฑูุน ู ููุงุช ู ุชุนุฏุฏุฉ (JSON)*\n\n" | |
| "ุฃุฑุณู JSON ุจูุฐุง ุงูุดูู:\n" | |
| '```\n[{"path":"src/file.py","content":"..."},\n' | |
| ' {"path":"README.md","content":"..."}]\n```\n\n' | |
| "ุงุถุบุท ุฅูุบุงุก ููุฑุฌูุน:", | |
| cancel_kb() | |
| ) | |
| return WAIT_MULTI_FILES | |
| # โโ New actions โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ | |
| elif data == "new_file": | |
| await edit("๐ ุฃุฑุณู ู ุณุงุฑ ุงูู ูู:\n_ู ุซุงู: src/utils.py_", cancel_kb()) | |
| return WAIT_FILE_PATH | |
| elif data == "new_branch": | |
| await edit("๐ฟ ุฃุฑุณู ุงุณู ุงูู branch ุงูุฌุฏูุฏ:", cancel_kb()) | |
| return WAIT_BRANCH_NAME | |
| elif data == "new_issue": | |
| await edit("๐ ุฃุฑุณู ุนููุงู ุงูู Issue:", cancel_kb()) | |
| return WAIT_ISSUE_TITLE | |
| elif data == "new_release": | |
| await edit("๐ ุฃุฑุณู ุฑูู ุงูุฅุตุฏุงุฑ:\n_ู ุซุงู: v1.0.0_", cancel_kb()) | |
| return WAIT_RELEASE_TAG | |
| elif data == "new_repo": | |
| await edit("๐ฆ ุฃุฑุณู ุงุณู ุงูุฑูุจู ุงูุฌุฏูุฏ:", cancel_kb()) | |
| return WAIT_REPO_NAME | |
| # โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ | |
| # Conversation Receivers | |
| # โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ | |
| async def recv_pat(update: Update, ctx: ContextTypes.DEFAULT_TYPE): | |
| pat = update.message.text.strip() | |
| await update.message.delete() | |
| ctx.user_data["gh_token"] = pat | |
| _persist_env("GH_TOKEN", pat) | |
| await update.message.reply_text("โ ุชู ุญูุธ GitHub PAT ๐", reply_markup=main_kb(repo(ctx))) | |
| return ConversationHandler.END | |
| async def recv_groq_key(update: Update, ctx: ContextTypes.DEFAULT_TYPE): | |
| key = update.message.text.strip() | |
| await update.message.delete() | |
| ctx.user_data["groq_key"] = key | |
| _persist_env("GROQ_KEY", key) | |
| await update.message.reply_text("โ ุชู ุญูุธ Groq Key ๐ค", reply_markup=main_kb(repo(ctx))) | |
| return ConversationHandler.END | |
| async def recv_ai_key(update: Update, ctx: ContextTypes.DEFAULT_TYPE): | |
| key = update.message.text.strip() | |
| provider = ctx.user_data.get("ai_provider", "groq") | |
| prov = PROVIDERS.get(provider, PROVIDERS["groq"]) | |
| ctx.user_data[prov["user_key"]] = key | |
| _persist_env(prov["env_key"], key) | |
| log.info("recv_ai_key | provider=%s | user=%s", | |
| provider, update.effective_user.username or update.effective_user.id) | |
| await update.message.reply_text( | |
| f"โ ุชู ุญูุธ ู ูุชุงุญ *{prov['label']}*", | |
| parse_mode="Markdown", reply_markup=back_kb() | |
| ) | |
| return ConversationHandler.END | |
| async def recv_repo_name(update: Update, ctx: ContextTypes.DEFAULT_TYPE): | |
| name = update.message.text.strip().replace(" ", "-") | |
| g = gh(ctx) | |
| _, code = await asyncio.to_thread(g.create_repo, name) | |
| if code == 201: | |
| ctx.user_data["current_repo"] = name | |
| log.info("create_repo | user=%s | repo=%s", update.effective_user.username, name) | |
| try: | |
| await update.message.reply_text(f"โ ุชู ุฅูุดุงุก `{name}`!", | |
| parse_mode="Markdown", reply_markup=main_kb(name)) | |
| except Exception: | |
| await update.message.reply_text(f"โ ุชู ุฅูุดุงุก {name}!", reply_markup=main_kb(name)) | |
| else: | |
| await update.message.reply_text("โ ูุดู โ ุงูุงุณู ู ุฃุฎูุฐ ุฃู ุฎุทุฃ ูู ุงูู PAT", | |
| reply_markup=back_kb()) | |
| return ConversationHandler.END | |
| async def recv_file_content(update: Update, ctx: ContextTypes.DEFAULT_TYPE): | |
| ctx.user_data["new_content"] = update.message.text | |
| await update.message.reply_text("โ๏ธ ุฃุฑุณู ุฑุณุงูุฉ ุงูู commit:\n_ู ุซุงู: add: utils module_", | |
| parse_mode="Markdown") | |
| return WAIT_COMMIT_MSG | |
| async def recv_commit_msg(update: Update, ctx: ContextTypes.DEFAULT_TYPE): | |
| msg = update.message.text.strip() | |
| g = gh(ctx); r = repo(ctx) | |
| path = ctx.user_data.get("edit_path", "") | |
| content = ctx.user_data.get("new_content", "") | |
| sha = ctx.user_data.get("edit_sha") | |
| log.info("commit | user=%s | path=%s | repo=%s | msg=%.60s", | |
| update.effective_user.username, path, r, msg) | |
| ok, res = await asyncio.to_thread(g.create_or_update_file, r, path, content, msg, sha=sha) | |
| if ok: | |
| try: | |
| await update.message.reply_text(f"โ ุชู ุญูุธ `{path}`!", | |
| parse_mode="Markdown", reply_markup=main_kb(r)) | |
| except Exception: | |
| await update.message.reply_text(f"โ ุชู ุญูุธ {path}!", reply_markup=main_kb(r)) | |
| else: | |
| await update.message.reply_text(f"โ ูุดู: {res.get('message', '')}", | |
| reply_markup=back_kb()) | |
| return ConversationHandler.END | |
| async def recv_file_path(update: Update, ctx: ContextTypes.DEFAULT_TYPE): | |
| ctx.user_data["edit_path"] = update.message.text.strip() | |
| ctx.user_data["edit_sha"] = None | |
| await update.message.reply_text("๐ ุฃุฑุณู ู ุญุชูู ุงูู ูู:") | |
| return WAIT_FILE_CONTENT | |
| async def recv_branch_name(update: Update, ctx: ContextTypes.DEFAULT_TYPE): | |
| name = update.message.text.strip().replace(" ", "-") | |
| g = gh(ctx); r = repo(ctx) | |
| ok, res = await asyncio.to_thread(g.create_branch, r, name) | |
| if ok: | |
| log.info("create_branch | user=%s | branch=%s | repo=%s", | |
| update.effective_user.username, name, r) | |
| try: | |
| await update.message.reply_text(f"โ Branch `{name}` ุฃููุดุฆ!", | |
| parse_mode="Markdown", reply_markup=main_kb(r)) | |
| except Exception: | |
| await update.message.reply_text(f"โ Branch {name} ุฃููุดุฆ!", reply_markup=main_kb(r)) | |
| else: | |
| err = res if isinstance(res, str) else res.get("message", "") | |
| await update.message.reply_text(f"โ ูุดู: {err}", reply_markup=back_kb()) | |
| return ConversationHandler.END | |
| async def recv_issue_title(update: Update, ctx: ContextTypes.DEFAULT_TYPE): | |
| ctx.user_data["issue_title"] = update.message.text.strip() | |
| await update.message.reply_text( | |
| "๐ ุฃุฑุณู ุชูุงุตูู ุงูู Issue\n_ุฃุฑุณู `-` ููุชุฎุทู_", | |
| parse_mode="Markdown") | |
| return WAIT_ISSUE_BODY | |
| async def recv_issue_body(update: Update, ctx: ContextTypes.DEFAULT_TYPE): | |
| body = update.message.text.strip() | |
| title = ctx.user_data.pop("issue_title", "") | |
| g = gh(ctx); r = repo(ctx) | |
| ok, res = await asyncio.to_thread(g.create_issue, r, title, "" if body == "-" else body) | |
| if ok: | |
| log.info("create_issue | user=%s | #%d | repo=%s", | |
| update.effective_user.username, res.get("number", 0), r) | |
| try: | |
| await update.message.reply_text(f"โ Issue #{res['number']} ุฃููุดุฆ!\n`{title}`", | |
| parse_mode="Markdown", reply_markup=main_kb(r)) | |
| except Exception: | |
| await update.message.reply_text(f"โ Issue #{res.get('number')} ุฃููุดุฆ!", | |
| reply_markup=main_kb(r)) | |
| else: | |
| await update.message.reply_text(f"โ ูุดู: {res.get('message', '')}", | |
| reply_markup=back_kb()) | |
| return ConversationHandler.END | |
| async def recv_release_tag(update: Update, ctx: ContextTypes.DEFAULT_TYPE): | |
| tag = update.message.text.strip() | |
| g = gh(ctx); r = repo(ctx) | |
| ok, res = await asyncio.to_thread(g.create_release, r, tag) | |
| if ok: | |
| log.info("create_release | user=%s | tag=%s | repo=%s", | |
| update.effective_user.username, tag, r) | |
| url = res.get("html_url", "") | |
| try: | |
| await update.message.reply_text(f"๐ Release `{tag}` ุฃููุดุฆ!\n๐ {url}", | |
| parse_mode="Markdown", reply_markup=main_kb(r)) | |
| except Exception: | |
| await update.message.reply_text(f"๐ Release {tag} ุฃููุดุฆ!\n๐ {url}", | |
| reply_markup=main_kb(r)) | |
| else: | |
| await update.message.reply_text(f"โ ูุดู: {res.get('message', '')}", | |
| reply_markup=back_kb()) | |
| return ConversationHandler.END | |
| async def recv_multi_files(update: Update, ctx: ContextTypes.DEFAULT_TYPE): | |
| text = update.message.text.strip() | |
| g = gh(ctx); r = repo(ctx) | |
| try: | |
| files = json.loads(text) | |
| if not isinstance(files, list): | |
| raise ValueError("expected a JSON array") | |
| except (json.JSONDecodeError, ValueError) as e: | |
| log.warning("recv_multi_files: invalid JSON from user=%s: %s", | |
| update.effective_user.username, e) | |
| await update.message.reply_text( | |
| "โ ุตูุบุฉ JSON ุบูุฑ ุตุญูุญุฉ\n" | |
| 'ุงูู ุซุงู:\n`[{"path":"file.py","content":"..."}]`', | |
| parse_mode="Markdown") | |
| return WAIT_MULTI_FILES | |
| await update.message.reply_text(f"โณ ุฑูุน {len(files)} ู ูู...") | |
| log.info("upload_multiple | user=%s | count=%d | repo=%s", | |
| update.effective_user.username, len(files), r) | |
| results = await asyncio.to_thread(g.upload_multiple, r, files) | |
| ok_count = sum(1 for res in results if res["ok"]) | |
| lines = [f"{'โ ' if res['ok'] else 'โ'} `{res['path']}`" for res in results] | |
| try: | |
| await update.message.reply_text( | |
| f"๐ค *ุงููุชูุฌุฉ: {ok_count}/{len(files)} ูุฌุญ*\n\n" + "\n".join(lines), | |
| parse_mode="Markdown", reply_markup=main_kb(r)) | |
| except Exception: | |
| await update.message.reply_text( | |
| f"๐ค ุงููุชูุฌุฉ: {ok_count}/{len(files)} ูุฌุญ", reply_markup=main_kb(r)) | |
| return ConversationHandler.END | |
| async def recv_pr_title(update: Update, ctx: ContextTypes.DEFAULT_TYPE): | |
| ctx.user_data["pr_title"] = update.message.text.strip() | |
| await update.message.reply_text("๐ ุฃุฑุณู ุชูุงุตูู ุงูู PR\n_ุฃุฑุณู `-` ููุชุฎุทู_", | |
| parse_mode="Markdown") | |
| return WAIT_PR_BODY | |
| async def recv_pr_body(update: Update, ctx: ContextTypes.DEFAULT_TYPE): | |
| body = update.message.text.strip() | |
| title = ctx.user_data.pop("pr_title", "") | |
| g = gh(ctx); r = repo(ctx) | |
| branches = await asyncio.to_thread(g.list_branches, r) | |
| names = [b["name"] for b in branches if b["name"] != "main"] | |
| if not names: | |
| await update.message.reply_text( | |
| "โ ูุง ุชูุฌุฏ branches ุบูุฑ main โ ุฃูุดุฆ branch ุฃููุงู", | |
| reply_markup=back_kb()) | |
| return ConversationHandler.END | |
| head = names[0] | |
| ok, res = await asyncio.to_thread(g.create_pr, r, title, head, "main", | |
| "" if body == "-" else body) | |
| if ok: | |
| log.info("create_pr | user=%s | #%d | %sโmain | repo=%s", | |
| update.effective_user.username, res.get("number", 0), head, r) | |
| url = res.get("html_url", "") | |
| try: | |
| await update.message.reply_text(f"โ PR #{res['number']} ุฃููุดุฆ!\n๐ {url}", | |
| parse_mode="Markdown", reply_markup=main_kb(r)) | |
| except Exception: | |
| await update.message.reply_text(f"โ PR ุฃููุดุฆ!\n๐ {url}", reply_markup=main_kb(r)) | |
| else: | |
| await update.message.reply_text(f"โ ูุดู: {res.get('message', '')}", | |
| reply_markup=back_kb()) | |
| return ConversationHandler.END | |
| async def cancel(update: Update, ctx: ContextTypes.DEFAULT_TYPE): | |
| await update.message.reply_text("โ ุฅูุบุงุก", reply_markup=back_kb()) | |
| return ConversationHandler.END | |
| # โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ | |
| # Main | |
| # โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ | |
| # โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ | |
| # HTTP Health Server (required for Hugging Face Spaces) | |
| # โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ | |
| def _start_health_server() -> None: | |
| """HF Spaces keeps Docker containers alive only if they serve HTTP. | |
| This starts a minimal HTTP server on port 7860 in a background thread.""" | |
| import http.server | |
| class _Handler(http.server.BaseHTTPRequestHandler): | |
| def do_GET(self): | |
| self.send_response(200) | |
| self.send_header("Content-Type", "text/plain; charset=utf-8") | |
| self.end_headers() | |
| self.wfile.write("GitHub Bot is running โ ".encode()) | |
| def log_message(self, *args): | |
| pass # suppress access logs | |
| port = int(os.getenv("PORT", "7860")) | |
| httpd = http.server.HTTPServer(("0.0.0.0", port), _Handler) | |
| t = threading.Thread(target=httpd.serve_forever, daemon=True) | |
| t.start() | |
| log.info("Health server on port %s", port) | |
| async def _error_handler(update: object, ctx: ContextTypes.DEFAULT_TYPE) -> None: | |
| log.error("Unhandled exception | update=%s\n%s", | |
| update, "".join(traceback.format_exception(type(ctx.error), ctx.error, | |
| ctx.error.__traceback__))) | |
| if isinstance(update, Update) and update.effective_message: | |
| try: | |
| await update.effective_message.reply_text("โ ุญุฏุซ ุฎุทุฃ ุบูุฑ ู ุชููุน. ุชู ุชุณุฌููู.") | |
| except Exception: | |
| pass | |
| def main(): | |
| if not BOT_TOKEN: | |
| log.critical("BOT_TOKEN not set in githubbot.env โ cannot start") | |
| print("โ BOT_TOKEN ุบูุฑ ู ูุฌูุฏ ูู githubbot.env"); return | |
| if ADMIN_ID == 0: | |
| log.critical("ADMIN_ID not set โ cannot start") | |
| print("โ ADMIN_ID ุบูุฑ ู ูุฌูุฏ"); return | |
| if not GH_TOKEN: | |
| log.warning("GH_TOKEN not set โ must be added via bot settings") | |
| print("โ ๏ธ GH_TOKEN ุบูุฑ ู ูุฌูุฏ โ ุฃุถูู ู ู ุฏุงุฎู ุงูุจูุช") | |
| if not GROQ_KEY: | |
| log.warning("GROQ_KEY not set โ must be added via bot settings") | |
| print("โ ๏ธ GROQ_KEY ุบูุฑ ู ูุฌูุฏ โ ุฃุถูู ู ู ุฏุงุฎู ุงูุจูุช") | |
| log.info("Starting bot | GH_USER=%s | GH_REPO=%s | ADMIN_ID=%d | log=%s", | |
| GH_USER, GH_REPO, ADMIN_ID, _log_path) | |
| app = Application.builder().token(BOT_TOKEN).build() | |
| conv = ConversationHandler( | |
| entry_points=[CallbackQueryHandler(button)], | |
| states={ | |
| WAIT_PAT: [MessageHandler(filters.TEXT & ~filters.COMMAND, recv_pat)], | |
| WAIT_GROQ_KEY: [MessageHandler(filters.TEXT & ~filters.COMMAND, recv_groq_key)], | |
| WAIT_REPO_NAME: [MessageHandler(filters.TEXT & ~filters.COMMAND, recv_repo_name)], | |
| WAIT_FILE_CONTENT: [MessageHandler(filters.TEXT & ~filters.COMMAND, recv_file_content)], | |
| WAIT_FILE_PATH: [MessageHandler(filters.TEXT & ~filters.COMMAND, recv_file_path)], | |
| WAIT_COMMIT_MSG: [MessageHandler(filters.TEXT & ~filters.COMMAND, recv_commit_msg)], | |
| WAIT_BRANCH_NAME: [MessageHandler(filters.TEXT & ~filters.COMMAND, recv_branch_name)], | |
| WAIT_ISSUE_TITLE: [MessageHandler(filters.TEXT & ~filters.COMMAND, recv_issue_title)], | |
| WAIT_ISSUE_BODY: [MessageHandler(filters.TEXT & ~filters.COMMAND, recv_issue_body)], | |
| WAIT_RELEASE_TAG: [MessageHandler(filters.TEXT & ~filters.COMMAND, recv_release_tag)], | |
| WAIT_MULTI_FILES: [MessageHandler(filters.TEXT & ~filters.COMMAND, recv_multi_files)], | |
| WAIT_PR_TITLE: [MessageHandler(filters.TEXT & ~filters.COMMAND, recv_pr_title)], | |
| WAIT_PR_BODY: [MessageHandler(filters.TEXT & ~filters.COMMAND, recv_pr_body)], | |
| WAIT_AI_KEY: [MessageHandler(filters.TEXT & ~filters.COMMAND, recv_ai_key)], | |
| }, | |
| fallbacks=[CommandHandler("cancel", cancel)], | |
| per_message=False, | |
| ) | |
| app.add_handler(CommandHandler("start", start)) | |
| app.add_handler(conv) | |
| app.add_handler(MessageHandler(filters.VOICE, handle_voice)) | |
| # Free-text outside conversation states โ AI | |
| app.add_handler(MessageHandler(filters.TEXT & ~filters.COMMAND, handle_text)) | |
| app.add_error_handler(_error_handler) | |
| _start_health_server() # Keep HF Space alive | |
| log.info("Bot polling started") | |
| print(f"๐ค GitHub Bot ูุดุชุบู | {GH_USER}") | |
| app.run_polling(drop_pending_updates=True) | |
| if __name__ == "__main__": | |
| main() |