Commit ·
db78256
0
Parent(s):
Initial commit: Push project to Hugging Face
Browse filesThis view is limited to 50 files because it contains too many changes. See raw diff
- .github/ISSUE_TEMPLATE/bug_report.md +26 -0
- .github/ISSUE_TEMPLATE/feature_request.md +20 -0
- .github/workflows/ruff_format.yml +45 -0
- .gitignore +18 -0
- Dockerfile +13 -0
- LICENSE +661 -0
- README.md +189 -0
- bot/__init__.py +105 -0
- bot/__main__.py +141 -0
- bot/core/config_manager.py +258 -0
- bot/core/handlers.py +423 -0
- bot/core/jdownloader_booter.py +94 -0
- bot/core/plugin_manager.py +260 -0
- bot/core/startup.py +349 -0
- bot/core/tg_client.py +123 -0
- bot/core/torrent_manager.py +166 -0
- bot/helper/__init__.py +1 -0
- bot/helper/common.py +1162 -0
- bot/helper/ext_utils/__init__.py +1 -0
- bot/helper/ext_utils/bot_utils.py +308 -0
- bot/helper/ext_utils/bulk_links.py +35 -0
- bot/helper/ext_utils/db_handler.py +235 -0
- bot/helper/ext_utils/error_handler.py +223 -0
- bot/helper/ext_utils/exceptions.py +22 -0
- bot/helper/ext_utils/files_utils.py +453 -0
- bot/helper/ext_utils/help_messages.py +551 -0
- bot/helper/ext_utils/hyperdl_utils.py +509 -0
- bot/helper/ext_utils/links_utils.py +72 -0
- bot/helper/ext_utils/media_utils.py +852 -0
- bot/helper/ext_utils/metadata_utils.py +168 -0
- bot/helper/ext_utils/shortener_utils.py +84 -0
- bot/helper/ext_utils/status_utils.py +308 -0
- bot/helper/ext_utils/task_manager.py +296 -0
- bot/helper/ext_utils/telegraph_helper.py +85 -0
- bot/helper/languages/__init__.py +43 -0
- bot/helper/languages/bn.py +6 -0
- bot/helper/languages/en.py +6 -0
- bot/helper/listeners/__init__.py +1 -0
- bot/helper/listeners/aria2_listener.py +199 -0
- bot/helper/listeners/direct_listener.py +86 -0
- bot/helper/listeners/jdownloader_listener.py +91 -0
- bot/helper/listeners/mega_listener.py +282 -0
- bot/helper/listeners/nzb_listener.py +117 -0
- bot/helper/listeners/qbit_listener.py +222 -0
- bot/helper/listeners/task_listener.py +674 -0
- bot/helper/mirror_leech_utils/__init__.py +1 -0
- bot/helper/mirror_leech_utils/download_utils/__init__.py +1 -0
- bot/helper/mirror_leech_utils/download_utils/aria2_download.py +105 -0
- bot/helper/mirror_leech_utils/download_utils/direct_downloader.py +68 -0
- bot/helper/mirror_leech_utils/download_utils/direct_link_generator.py +2048 -0
.github/ISSUE_TEMPLATE/bug_report.md
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
name: Bug report
|
| 3 |
+
about: Create a report to help us improve
|
| 4 |
+
title: ''
|
| 5 |
+
labels: bug
|
| 6 |
+
assignees: ''
|
| 7 |
+
|
| 8 |
+
---
|
| 9 |
+
|
| 10 |
+
**Describe the bug**
|
| 11 |
+
A clear and concise description of what the bug is.
|
| 12 |
+
|
| 13 |
+
**To Reproduce**
|
| 14 |
+
Steps to reproduce the behavior:
|
| 15 |
+
1. Used this Command with this link or file.
|
| 16 |
+
2. Before/after/while Upload ..
|
| 17 |
+
3. Check logs
|
| 18 |
+
|
| 19 |
+
**Expected behavior**
|
| 20 |
+
A clear and concise description of what you expected to happen.
|
| 21 |
+
|
| 22 |
+
**Screenshots**
|
| 23 |
+
If applicable, add screenshots to help explain your problem.
|
| 24 |
+
|
| 25 |
+
**Additional context**
|
| 26 |
+
Add any other context about the problem here.
|
.github/ISSUE_TEMPLATE/feature_request.md
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
name: Feature request
|
| 3 |
+
about: Suggest an idea for this project
|
| 4 |
+
title: ''
|
| 5 |
+
labels: enhancement
|
| 6 |
+
assignees: ''
|
| 7 |
+
|
| 8 |
+
---
|
| 9 |
+
|
| 10 |
+
**Is your feature request related to a problem? Please describe.**
|
| 11 |
+
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
| 12 |
+
|
| 13 |
+
**Describe the solution you'd like**
|
| 14 |
+
A clear and concise description of what you want to happen.
|
| 15 |
+
|
| 16 |
+
**Describe alternatives you've considered**
|
| 17 |
+
A clear and concise description of any alternative solutions or features you've considered.
|
| 18 |
+
|
| 19 |
+
**Additional context**
|
| 20 |
+
Add any other context or screenshots about the feature request here.
|
.github/workflows/ruff_format.yml
ADDED
|
@@ -0,0 +1,45 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
name: Format Code via Ruff
|
| 2 |
+
|
| 3 |
+
on:
|
| 4 |
+
workflow_dispatch:
|
| 5 |
+
inputs:
|
| 6 |
+
branch:
|
| 7 |
+
description: 'Branch to format'
|
| 8 |
+
required: true
|
| 9 |
+
default: 'wzv3'
|
| 10 |
+
type: string
|
| 11 |
+
|
| 12 |
+
jobs:
|
| 13 |
+
code-format:
|
| 14 |
+
permissions:
|
| 15 |
+
contents: write
|
| 16 |
+
runs-on: ubuntu-latest
|
| 17 |
+
|
| 18 |
+
steps:
|
| 19 |
+
- name: Checkout Repo
|
| 20 |
+
uses: actions/checkout@v4
|
| 21 |
+
with:
|
| 22 |
+
ref: ${{ github.event.inputs.branch }}
|
| 23 |
+
|
| 24 |
+
- name: Set up Python3
|
| 25 |
+
uses: actions/setup-python@v4
|
| 26 |
+
|
| 27 |
+
- name: Install ruff Linter
|
| 28 |
+
run: pip install ruff
|
| 29 |
+
|
| 30 |
+
- name: Run ruff to format code
|
| 31 |
+
run: |
|
| 32 |
+
ruff check . --exit-zero
|
| 33 |
+
ruff format .
|
| 34 |
+
git add -u
|
| 35 |
+
|
| 36 |
+
- name: Commit and Push Changes
|
| 37 |
+
run: |
|
| 38 |
+
git config --global user.name "github-actions[bot]"
|
| 39 |
+
git config --global user.email "41898282+github-actions[bot]@users.noreply.github.com"
|
| 40 |
+
if git diff-index --quiet HEAD --; then
|
| 41 |
+
echo "No changes to commit."
|
| 42 |
+
else
|
| 43 |
+
git commit -m "style: Auto Ruff Formatter"
|
| 44 |
+
git push origin ${{ github.ref }}
|
| 45 |
+
fi
|
.gitignore
ADDED
|
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
.venv/*
|
| 2 |
+
config.py
|
| 3 |
+
token.pickle
|
| 4 |
+
rclone.conf
|
| 5 |
+
.netrc
|
| 6 |
+
log.txt
|
| 7 |
+
cfg.zip
|
| 8 |
+
accounts/*
|
| 9 |
+
thumbnails/*
|
| 10 |
+
rclone/*
|
| 11 |
+
tokens/*
|
| 12 |
+
cookies/*
|
| 13 |
+
mediainfo/*
|
| 14 |
+
sabnzbd/*
|
| 15 |
+
list_drives.txt
|
| 16 |
+
shortener.txt
|
| 17 |
+
cookies.txt
|
| 18 |
+
downloads/*
|
Dockerfile
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
FROM mysterysd/wzmlx:v3
|
| 2 |
+
|
| 3 |
+
WORKDIR /usr/src/app
|
| 4 |
+
|
| 5 |
+
RUN chmod 777 /usr/src/app
|
| 6 |
+
RUN uv venv --system-site-packages
|
| 7 |
+
|
| 8 |
+
COPY requirements.txt .
|
| 9 |
+
RUN uv pip install --no-cache-dir -r requirements.txt
|
| 10 |
+
|
| 11 |
+
COPY . .
|
| 12 |
+
|
| 13 |
+
CMD ["bash", "start.sh"]
|
LICENSE
ADDED
|
@@ -0,0 +1,661 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
GNU AFFERO GENERAL PUBLIC LICENSE
|
| 2 |
+
Version 3, 19 November 2007
|
| 3 |
+
|
| 4 |
+
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
| 5 |
+
Everyone is permitted to copy and distribute verbatim copies
|
| 6 |
+
of this license document, but changing it is not allowed.
|
| 7 |
+
|
| 8 |
+
Preamble
|
| 9 |
+
|
| 10 |
+
The GNU Affero General Public License is a free, copyleft license for
|
| 11 |
+
software and other kinds of works, specifically designed to ensure
|
| 12 |
+
cooperation with the community in the case of network server software.
|
| 13 |
+
|
| 14 |
+
The licenses for most software and other practical works are designed
|
| 15 |
+
to take away your freedom to share and change the works. By contrast,
|
| 16 |
+
our General Public Licenses are intended to guarantee your freedom to
|
| 17 |
+
share and change all versions of a program--to make sure it remains free
|
| 18 |
+
software for all its users.
|
| 19 |
+
|
| 20 |
+
When we speak of free software, we are referring to freedom, not
|
| 21 |
+
price. Our General Public Licenses are designed to make sure that you
|
| 22 |
+
have the freedom to distribute copies of free software (and charge for
|
| 23 |
+
them if you wish), that you receive source code or can get it if you
|
| 24 |
+
want it, that you can change the software or use pieces of it in new
|
| 25 |
+
free programs, and that you know you can do these things.
|
| 26 |
+
|
| 27 |
+
Developers that use our General Public Licenses protect your rights
|
| 28 |
+
with two steps: (1) assert copyright on the software, and (2) offer
|
| 29 |
+
you this License which gives you legal permission to copy, distribute
|
| 30 |
+
and/or modify the software.
|
| 31 |
+
|
| 32 |
+
A secondary benefit of defending all users' freedom is that
|
| 33 |
+
improvements made in alternate versions of the program, if they
|
| 34 |
+
receive widespread use, become available for other developers to
|
| 35 |
+
incorporate. Many developers of free software are heartened and
|
| 36 |
+
encouraged by the resulting cooperation. However, in the case of
|
| 37 |
+
software used on network servers, this result may fail to come about.
|
| 38 |
+
The GNU General Public License permits making a modified version and
|
| 39 |
+
letting the public access it on a server without ever releasing its
|
| 40 |
+
source code to the public.
|
| 41 |
+
|
| 42 |
+
The GNU Affero General Public License is designed specifically to
|
| 43 |
+
ensure that, in such cases, the modified source code becomes available
|
| 44 |
+
to the community. It requires the operator of a network server to
|
| 45 |
+
provide the source code of the modified version running there to the
|
| 46 |
+
users of that server. Therefore, public use of a modified version, on
|
| 47 |
+
a publicly accessible server, gives the public access to the source
|
| 48 |
+
code of the modified version.
|
| 49 |
+
|
| 50 |
+
An older license, called the Affero General Public License and
|
| 51 |
+
published by Affero, was designed to accomplish similar goals. This is
|
| 52 |
+
a different license, not a version of the Affero GPL, but Affero has
|
| 53 |
+
released a new version of the Affero GPL which permits relicensing under
|
| 54 |
+
this license.
|
| 55 |
+
|
| 56 |
+
The precise terms and conditions for copying, distribution and
|
| 57 |
+
modification follow.
|
| 58 |
+
|
| 59 |
+
TERMS AND CONDITIONS
|
| 60 |
+
|
| 61 |
+
0. Definitions.
|
| 62 |
+
|
| 63 |
+
"This License" refers to version 3 of the GNU Affero General Public License.
|
| 64 |
+
|
| 65 |
+
"Copyright" also means copyright-like laws that apply to other kinds of
|
| 66 |
+
works, such as semiconductor masks.
|
| 67 |
+
|
| 68 |
+
"The Program" refers to any copyrightable work licensed under this
|
| 69 |
+
License. Each licensee is addressed as "you". "Licensees" and
|
| 70 |
+
"recipients" may be individuals or organizations.
|
| 71 |
+
|
| 72 |
+
To "modify" a work means to copy from or adapt all or part of the work
|
| 73 |
+
in a fashion requiring copyright permission, other than the making of an
|
| 74 |
+
exact copy. The resulting work is called a "modified version" of the
|
| 75 |
+
earlier work or a work "based on" the earlier work.
|
| 76 |
+
|
| 77 |
+
A "covered work" means either the unmodified Program or a work based
|
| 78 |
+
on the Program.
|
| 79 |
+
|
| 80 |
+
To "propagate" a work means to do anything with it that, without
|
| 81 |
+
permission, would make you directly or secondarily liable for
|
| 82 |
+
infringement under applicable copyright law, except executing it on a
|
| 83 |
+
computer or modifying a private copy. Propagation includes copying,
|
| 84 |
+
distribution (with or without modification), making available to the
|
| 85 |
+
public, and in some countries other activities as well.
|
| 86 |
+
|
| 87 |
+
To "convey" a work means any kind of propagation that enables other
|
| 88 |
+
parties to make or receive copies. Mere interaction with a user through
|
| 89 |
+
a computer network, with no transfer of a copy, is not conveying.
|
| 90 |
+
|
| 91 |
+
An interactive user interface displays "Appropriate Legal Notices"
|
| 92 |
+
to the extent that it includes a convenient and prominently visible
|
| 93 |
+
feature that (1) displays an appropriate copyright notice, and (2)
|
| 94 |
+
tells the user that there is no warranty for the work (except to the
|
| 95 |
+
extent that warranties are provided), that licensees may convey the
|
| 96 |
+
work under this License, and how to view a copy of this License. If
|
| 97 |
+
the interface presents a list of user commands or options, such as a
|
| 98 |
+
menu, a prominent item in the list meets this criterion.
|
| 99 |
+
|
| 100 |
+
1. Source Code.
|
| 101 |
+
|
| 102 |
+
The "source code" for a work means the preferred form of the work
|
| 103 |
+
for making modifications to it. "Object code" means any non-source
|
| 104 |
+
form of a work.
|
| 105 |
+
|
| 106 |
+
A "Standard Interface" means an interface that either is an official
|
| 107 |
+
standard defined by a recognized standards body, or, in the case of
|
| 108 |
+
interfaces specified for a particular programming language, one that
|
| 109 |
+
is widely used among developers working in that language.
|
| 110 |
+
|
| 111 |
+
The "System Libraries" of an executable work include anything, other
|
| 112 |
+
than the work as a whole, that (a) is included in the normal form of
|
| 113 |
+
packaging a Major Component, but which is not part of that Major
|
| 114 |
+
Component, and (b) serves only to enable use of the work with that
|
| 115 |
+
Major Component, or to implement a Standard Interface for which an
|
| 116 |
+
implementation is available to the public in source code form. A
|
| 117 |
+
"Major Component", in this context, means a major essential component
|
| 118 |
+
(kernel, window system, and so on) of the specific operating system
|
| 119 |
+
(if any) on which the executable work runs, or a compiler used to
|
| 120 |
+
produce the work, or an object code interpreter used to run it.
|
| 121 |
+
|
| 122 |
+
The "Corresponding Source" for a work in object code form means all
|
| 123 |
+
the source code needed to generate, install, and (for an executable
|
| 124 |
+
work) run the object code and to modify the work, including scripts to
|
| 125 |
+
control those activities. However, it does not include the work's
|
| 126 |
+
System Libraries, or general-purpose tools or generally available free
|
| 127 |
+
programs which are used unmodified in performing those activities but
|
| 128 |
+
which are not part of the work. For example, Corresponding Source
|
| 129 |
+
includes interface definition files associated with source files for
|
| 130 |
+
the work, and the source code for shared libraries and dynamically
|
| 131 |
+
linked subprograms that the work is specifically designed to require,
|
| 132 |
+
such as by intimate data communication or control flow between those
|
| 133 |
+
subprograms and other parts of the work.
|
| 134 |
+
|
| 135 |
+
The Corresponding Source need not include anything that users
|
| 136 |
+
can regenerate automatically from other parts of the Corresponding
|
| 137 |
+
Source.
|
| 138 |
+
|
| 139 |
+
The Corresponding Source for a work in source code form is that
|
| 140 |
+
same work.
|
| 141 |
+
|
| 142 |
+
2. Basic Permissions.
|
| 143 |
+
|
| 144 |
+
All rights granted under this License are granted for the term of
|
| 145 |
+
copyright on the Program, and are irrevocable provided the stated
|
| 146 |
+
conditions are met. This License explicitly affirms your unlimited
|
| 147 |
+
permission to run the unmodified Program. The output from running a
|
| 148 |
+
covered work is covered by this License only if the output, given its
|
| 149 |
+
content, constitutes a covered work. This License acknowledges your
|
| 150 |
+
rights of fair use or other equivalent, as provided by copyright law.
|
| 151 |
+
|
| 152 |
+
You may make, run and propagate covered works that you do not
|
| 153 |
+
convey, without conditions so long as your license otherwise remains
|
| 154 |
+
in force. You may convey covered works to others for the sole purpose
|
| 155 |
+
of having them make modifications exclusively for you, or provide you
|
| 156 |
+
with facilities for running those works, provided that you comply with
|
| 157 |
+
the terms of this License in conveying all material for which you do
|
| 158 |
+
not control copyright. Those thus making or running the covered works
|
| 159 |
+
for you must do so exclusively on your behalf, under your direction
|
| 160 |
+
and control, on terms that prohibit them from making any copies of
|
| 161 |
+
your copyrighted material outside their relationship with you.
|
| 162 |
+
|
| 163 |
+
Conveying under any other circumstances is permitted solely under
|
| 164 |
+
the conditions stated below. Sublicensing is not allowed; section 10
|
| 165 |
+
makes it unnecessary.
|
| 166 |
+
|
| 167 |
+
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
| 168 |
+
|
| 169 |
+
No covered work shall be deemed part of an effective technological
|
| 170 |
+
measure under any applicable law fulfilling obligations under article
|
| 171 |
+
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
| 172 |
+
similar laws prohibiting or restricting circumvention of such
|
| 173 |
+
measures.
|
| 174 |
+
|
| 175 |
+
When you convey a covered work, you waive any legal power to forbid
|
| 176 |
+
circumvention of technological measures to the extent such circumvention
|
| 177 |
+
is effected by exercising rights under this License with respect to
|
| 178 |
+
the covered work, and you disclaim any intention to limit operation or
|
| 179 |
+
modification of the work as a means of enforcing, against the work's
|
| 180 |
+
users, your or third parties' legal rights to forbid circumvention of
|
| 181 |
+
technological measures.
|
| 182 |
+
|
| 183 |
+
4. Conveying Verbatim Copies.
|
| 184 |
+
|
| 185 |
+
You may convey verbatim copies of the Program's source code as you
|
| 186 |
+
receive it, in any medium, provided that you conspicuously and
|
| 187 |
+
appropriately publish on each copy an appropriate copyright notice;
|
| 188 |
+
keep intact all notices stating that this License and any
|
| 189 |
+
non-permissive terms added in accord with section 7 apply to the code;
|
| 190 |
+
keep intact all notices of the absence of any warranty; and give all
|
| 191 |
+
recipients a copy of this License along with the Program.
|
| 192 |
+
|
| 193 |
+
You may charge any price or no price for each copy that you convey,
|
| 194 |
+
and you may offer support or warranty protection for a fee.
|
| 195 |
+
|
| 196 |
+
5. Conveying Modified Source Versions.
|
| 197 |
+
|
| 198 |
+
You may convey a work based on the Program, or the modifications to
|
| 199 |
+
produce it from the Program, in the form of source code under the
|
| 200 |
+
terms of section 4, provided that you also meet all of these conditions:
|
| 201 |
+
|
| 202 |
+
a) The work must carry prominent notices stating that you modified
|
| 203 |
+
it, and giving a relevant date.
|
| 204 |
+
|
| 205 |
+
b) The work must carry prominent notices stating that it is
|
| 206 |
+
released under this License and any conditions added under section
|
| 207 |
+
7. This requirement modifies the requirement in section 4 to
|
| 208 |
+
"keep intact all notices".
|
| 209 |
+
|
| 210 |
+
c) You must license the entire work, as a whole, under this
|
| 211 |
+
License to anyone who comes into possession of a copy. This
|
| 212 |
+
License will therefore apply, along with any applicable section 7
|
| 213 |
+
additional terms, to the whole of the work, and all its parts,
|
| 214 |
+
regardless of how they are packaged. This License gives no
|
| 215 |
+
permission to license the work in any other way, but it does not
|
| 216 |
+
invalidate such permission if you have separately received it.
|
| 217 |
+
|
| 218 |
+
d) If the work has interactive user interfaces, each must display
|
| 219 |
+
Appropriate Legal Notices; however, if the Program has interactive
|
| 220 |
+
interfaces that do not display Appropriate Legal Notices, your
|
| 221 |
+
work need not make them do so.
|
| 222 |
+
|
| 223 |
+
A compilation of a covered work with other separate and independent
|
| 224 |
+
works, which are not by their nature extensions of the covered work,
|
| 225 |
+
and which are not combined with it such as to form a larger program,
|
| 226 |
+
in or on a volume of a storage or distribution medium, is called an
|
| 227 |
+
"aggregate" if the compilation and its resulting copyright are not
|
| 228 |
+
used to limit the access or legal rights of the compilation's users
|
| 229 |
+
beyond what the individual works permit. Inclusion of a covered work
|
| 230 |
+
in an aggregate does not cause this License to apply to the other
|
| 231 |
+
parts of the aggregate.
|
| 232 |
+
|
| 233 |
+
6. Conveying Non-Source Forms.
|
| 234 |
+
|
| 235 |
+
You may convey a covered work in object code form under the terms
|
| 236 |
+
of sections 4 and 5, provided that you also convey the
|
| 237 |
+
machine-readable Corresponding Source under the terms of this License,
|
| 238 |
+
in one of these ways:
|
| 239 |
+
|
| 240 |
+
a) Convey the object code in, or embodied in, a physical product
|
| 241 |
+
(including a physical distribution medium), accompanied by the
|
| 242 |
+
Corresponding Source fixed on a durable physical medium
|
| 243 |
+
customarily used for software interchange.
|
| 244 |
+
|
| 245 |
+
b) Convey the object code in, or embodied in, a physical product
|
| 246 |
+
(including a physical distribution medium), accompanied by a
|
| 247 |
+
written offer, valid for at least three years and valid for as
|
| 248 |
+
long as you offer spare parts or customer support for that product
|
| 249 |
+
model, to give anyone who possesses the object code either (1) a
|
| 250 |
+
copy of the Corresponding Source for all the software in the
|
| 251 |
+
product that is covered by this License, on a durable physical
|
| 252 |
+
medium customarily used for software interchange, for a price no
|
| 253 |
+
more than your reasonable cost of physically performing this
|
| 254 |
+
conveying of source, or (2) access to copy the
|
| 255 |
+
Corresponding Source from a network server at no charge.
|
| 256 |
+
|
| 257 |
+
c) Convey individual copies of the object code with a copy of the
|
| 258 |
+
written offer to provide the Corresponding Source. This
|
| 259 |
+
alternative is allowed only occasionally and noncommercially, and
|
| 260 |
+
only if you received the object code with such an offer, in accord
|
| 261 |
+
with subsection 6b.
|
| 262 |
+
|
| 263 |
+
d) Convey the object code by offering access from a designated
|
| 264 |
+
place (gratis or for a charge), and offer equivalent access to the
|
| 265 |
+
Corresponding Source in the same way through the same place at no
|
| 266 |
+
further charge. You need not require recipients to copy the
|
| 267 |
+
Corresponding Source along with the object code. If the place to
|
| 268 |
+
copy the object code is a network server, the Corresponding Source
|
| 269 |
+
may be on a different server (operated by you or a third party)
|
| 270 |
+
that supports equivalent copying facilities, provided you maintain
|
| 271 |
+
clear directions next to the object code saying where to find the
|
| 272 |
+
Corresponding Source. Regardless of what server hosts the
|
| 273 |
+
Corresponding Source, you remain obligated to ensure that it is
|
| 274 |
+
available for as long as needed to satisfy these requirements.
|
| 275 |
+
|
| 276 |
+
e) Convey the object code using peer-to-peer transmission, provided
|
| 277 |
+
you inform other peers where the object code and Corresponding
|
| 278 |
+
Source of the work are being offered to the general public at no
|
| 279 |
+
charge under subsection 6d.
|
| 280 |
+
|
| 281 |
+
A separable portion of the object code, whose source code is excluded
|
| 282 |
+
from the Corresponding Source as a System Library, need not be
|
| 283 |
+
included in conveying the object code work.
|
| 284 |
+
|
| 285 |
+
A "User Product" is either (1) a "consumer product", which means any
|
| 286 |
+
tangible personal property which is normally used for personal, family,
|
| 287 |
+
or household purposes, or (2) anything designed or sold for incorporation
|
| 288 |
+
into a dwelling. In determining whether a product is a consumer product,
|
| 289 |
+
doubtful cases shall be resolved in favor of coverage. For a particular
|
| 290 |
+
product received by a particular user, "normally used" refers to a
|
| 291 |
+
typical or common use of that class of product, regardless of the status
|
| 292 |
+
of the particular user or of the way in which the particular user
|
| 293 |
+
actually uses, or expects or is expected to use, the product. A product
|
| 294 |
+
is a consumer product regardless of whether the product has substantial
|
| 295 |
+
commercial, industrial or non-consumer uses, unless such uses represent
|
| 296 |
+
the only significant mode of use of the product.
|
| 297 |
+
|
| 298 |
+
"Installation Information" for a User Product means any methods,
|
| 299 |
+
procedures, authorization keys, or other information required to install
|
| 300 |
+
and execute modified versions of a covered work in that User Product from
|
| 301 |
+
a modified version of its Corresponding Source. The information must
|
| 302 |
+
suffice to ensure that the continued functioning of the modified object
|
| 303 |
+
code is in no case prevented or interfered with solely because
|
| 304 |
+
modification has been made.
|
| 305 |
+
|
| 306 |
+
If you convey an object code work under this section in, or with, or
|
| 307 |
+
specifically for use in, a User Product, and the conveying occurs as
|
| 308 |
+
part of a transaction in which the right of possession and use of the
|
| 309 |
+
User Product is transferred to the recipient in perpetuity or for a
|
| 310 |
+
fixed term (regardless of how the transaction is characterized), the
|
| 311 |
+
Corresponding Source conveyed under this section must be accompanied
|
| 312 |
+
by the Installation Information. But this requirement does not apply
|
| 313 |
+
if neither you nor any third party retains the ability to install
|
| 314 |
+
modified object code on the User Product (for example, the work has
|
| 315 |
+
been installed in ROM).
|
| 316 |
+
|
| 317 |
+
The requirement to provide Installation Information does not include a
|
| 318 |
+
requirement to continue to provide support service, warranty, or updates
|
| 319 |
+
for a work that has been modified or installed by the recipient, or for
|
| 320 |
+
the User Product in which it has been modified or installed. Access to a
|
| 321 |
+
network may be denied when the modification itself materially and
|
| 322 |
+
adversely affects the operation of the network or violates the rules and
|
| 323 |
+
protocols for communication across the network.
|
| 324 |
+
|
| 325 |
+
Corresponding Source conveyed, and Installation Information provided,
|
| 326 |
+
in accord with this section must be in a format that is publicly
|
| 327 |
+
documented (and with an implementation available to the public in
|
| 328 |
+
source code form), and must require no special password or key for
|
| 329 |
+
unpacking, reading or copying.
|
| 330 |
+
|
| 331 |
+
7. Additional Terms.
|
| 332 |
+
|
| 333 |
+
"Additional permissions" are terms that supplement the terms of this
|
| 334 |
+
License by making exceptions from one or more of its conditions.
|
| 335 |
+
Additional permissions that are applicable to the entire Program shall
|
| 336 |
+
be treated as though they were included in this License, to the extent
|
| 337 |
+
that they are valid under applicable law. If additional permissions
|
| 338 |
+
apply only to part of the Program, that part may be used separately
|
| 339 |
+
under those permissions, but the entire Program remains governed by
|
| 340 |
+
this License without regard to the additional permissions.
|
| 341 |
+
|
| 342 |
+
When you convey a copy of a covered work, you may at your option
|
| 343 |
+
remove any additional permissions from that copy, or from any part of
|
| 344 |
+
it. (Additional permissions may be written to require their own
|
| 345 |
+
removal in certain cases when you modify the work.) You may place
|
| 346 |
+
additional permissions on material, added by you to a covered work,
|
| 347 |
+
for which you have or can give appropriate copyright permission.
|
| 348 |
+
|
| 349 |
+
Notwithstanding any other provision of this License, for material you
|
| 350 |
+
add to a covered work, you may (if authorized by the copyright holders of
|
| 351 |
+
that material) supplement the terms of this License with terms:
|
| 352 |
+
|
| 353 |
+
a) Disclaiming warranty or limiting liability differently from the
|
| 354 |
+
terms of sections 15 and 16 of this License; or
|
| 355 |
+
|
| 356 |
+
b) Requiring preservation of specified reasonable legal notices or
|
| 357 |
+
author attributions in that material or in the Appropriate Legal
|
| 358 |
+
Notices displayed by works containing it; or
|
| 359 |
+
|
| 360 |
+
c) Prohibiting misrepresentation of the origin of that material, or
|
| 361 |
+
requiring that modified versions of such material be marked in
|
| 362 |
+
reasonable ways as different from the original version; or
|
| 363 |
+
|
| 364 |
+
d) Limiting the use for publicity purposes of names of licensors or
|
| 365 |
+
authors of the material; or
|
| 366 |
+
|
| 367 |
+
e) Declining to grant rights under trademark law for use of some
|
| 368 |
+
trade names, trademarks, or service marks; or
|
| 369 |
+
|
| 370 |
+
f) Requiring indemnification of licensors and authors of that
|
| 371 |
+
material by anyone who conveys the material (or modified versions of
|
| 372 |
+
it) with contractual assumptions of liability to the recipient, for
|
| 373 |
+
any liability that these contractual assumptions directly impose on
|
| 374 |
+
those licensors and authors.
|
| 375 |
+
|
| 376 |
+
All other non-permissive additional terms are considered "further
|
| 377 |
+
restrictions" within the meaning of section 10. If the Program as you
|
| 378 |
+
received it, or any part of it, contains a notice stating that it is
|
| 379 |
+
governed by this License along with a term that is a further
|
| 380 |
+
restriction, you may remove that term. If a license document contains
|
| 381 |
+
a further restriction but permits relicensing or conveying under this
|
| 382 |
+
License, you may add to a covered work material governed by the terms
|
| 383 |
+
of that license document, provided that the further restriction does
|
| 384 |
+
not survive such relicensing or conveying.
|
| 385 |
+
|
| 386 |
+
If you add terms to a covered work in accord with this section, you
|
| 387 |
+
must place, in the relevant source files, a statement of the
|
| 388 |
+
additional terms that apply to those files, or a notice indicating
|
| 389 |
+
where to find the applicable terms.
|
| 390 |
+
|
| 391 |
+
Additional terms, permissive or non-permissive, may be stated in the
|
| 392 |
+
form of a separately written license, or stated as exceptions;
|
| 393 |
+
the above requirements apply either way.
|
| 394 |
+
|
| 395 |
+
8. Termination.
|
| 396 |
+
|
| 397 |
+
You may not propagate or modify a covered work except as expressly
|
| 398 |
+
provided under this License. Any attempt otherwise to propagate or
|
| 399 |
+
modify it is void, and will automatically terminate your rights under
|
| 400 |
+
this License (including any patent licenses granted under the third
|
| 401 |
+
paragraph of section 11).
|
| 402 |
+
|
| 403 |
+
However, if you cease all violation of this License, then your
|
| 404 |
+
license from a particular copyright holder is reinstated (a)
|
| 405 |
+
provisionally, unless and until the copyright holder explicitly and
|
| 406 |
+
finally terminates your license, and (b) permanently, if the copyright
|
| 407 |
+
holder fails to notify you of the violation by some reasonable means
|
| 408 |
+
prior to 60 days after the cessation.
|
| 409 |
+
|
| 410 |
+
Moreover, your license from a particular copyright holder is
|
| 411 |
+
reinstated permanently if the copyright holder notifies you of the
|
| 412 |
+
violation by some reasonable means, this is the first time you have
|
| 413 |
+
received notice of violation of this License (for any work) from that
|
| 414 |
+
copyright holder, and you cure the violation prior to 30 days after
|
| 415 |
+
your receipt of the notice.
|
| 416 |
+
|
| 417 |
+
Termination of your rights under this section does not terminate the
|
| 418 |
+
licenses of parties who have received copies or rights from you under
|
| 419 |
+
this License. If your rights have been terminated and not permanently
|
| 420 |
+
reinstated, you do not qualify to receive new licenses for the same
|
| 421 |
+
material under section 10.
|
| 422 |
+
|
| 423 |
+
9. Acceptance Not Required for Having Copies.
|
| 424 |
+
|
| 425 |
+
You are not required to accept this License in order to receive or
|
| 426 |
+
run a copy of the Program. Ancillary propagation of a covered work
|
| 427 |
+
occurring solely as a consequence of using peer-to-peer transmission
|
| 428 |
+
to receive a copy likewise does not require acceptance. However,
|
| 429 |
+
nothing other than this License grants you permission to propagate or
|
| 430 |
+
modify any covered work. These actions infringe copyright if you do
|
| 431 |
+
not accept this License. Therefore, by modifying or propagating a
|
| 432 |
+
covered work, you indicate your acceptance of this License to do so.
|
| 433 |
+
|
| 434 |
+
10. Automatic Licensing of Downstream Recipients.
|
| 435 |
+
|
| 436 |
+
Each time you convey a covered work, the recipient automatically
|
| 437 |
+
receives a license from the original licensors, to run, modify and
|
| 438 |
+
propagate that work, subject to this License. You are not responsible
|
| 439 |
+
for enforcing compliance by third parties with this License.
|
| 440 |
+
|
| 441 |
+
An "entity transaction" is a transaction transferring control of an
|
| 442 |
+
organization, or substantially all assets of one, or subdividing an
|
| 443 |
+
organization, or merging organizations. If propagation of a covered
|
| 444 |
+
work results from an entity transaction, each party to that
|
| 445 |
+
transaction who receives a copy of the work also receives whatever
|
| 446 |
+
licenses to the work the party's predecessor in interest had or could
|
| 447 |
+
give under the previous paragraph, plus a right to possession of the
|
| 448 |
+
Corresponding Source of the work from the predecessor in interest, if
|
| 449 |
+
the predecessor has it or can get it with reasonable efforts.
|
| 450 |
+
|
| 451 |
+
You may not impose any further restrictions on the exercise of the
|
| 452 |
+
rights granted or affirmed under this License. For example, you may
|
| 453 |
+
not impose a license fee, royalty, or other charge for exercise of
|
| 454 |
+
rights granted under this License, and you may not initiate litigation
|
| 455 |
+
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
| 456 |
+
any patent claim is infringed by making, using, selling, offering for
|
| 457 |
+
sale, or importing the Program or any portion of it.
|
| 458 |
+
|
| 459 |
+
11. Patents.
|
| 460 |
+
|
| 461 |
+
A "contributor" is a copyright holder who authorizes use under this
|
| 462 |
+
License of the Program or a work on which the Program is based. The
|
| 463 |
+
work thus licensed is called the contributor's "contributor version".
|
| 464 |
+
|
| 465 |
+
A contributor's "essential patent claims" are all patent claims
|
| 466 |
+
owned or controlled by the contributor, whether already acquired or
|
| 467 |
+
hereafter acquired, that would be infringed by some manner, permitted
|
| 468 |
+
by this License, of making, using, or selling its contributor version,
|
| 469 |
+
but do not include claims that would be infringed only as a
|
| 470 |
+
consequence of further modification of the contributor version. For
|
| 471 |
+
purposes of this definition, "control" includes the right to grant
|
| 472 |
+
patent sublicenses in a manner consistent with the requirements of
|
| 473 |
+
this License.
|
| 474 |
+
|
| 475 |
+
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
| 476 |
+
patent license under the contributor's essential patent claims, to
|
| 477 |
+
make, use, sell, offer for sale, import and otherwise run, modify and
|
| 478 |
+
propagate the contents of its contributor version.
|
| 479 |
+
|
| 480 |
+
In the following three paragraphs, a "patent license" is any express
|
| 481 |
+
agreement or commitment, however denominated, not to enforce a patent
|
| 482 |
+
(such as an express permission to practice a patent or covenant not to
|
| 483 |
+
sue for patent infringement). To "grant" such a patent license to a
|
| 484 |
+
party means to make such an agreement or commitment not to enforce a
|
| 485 |
+
patent against the party.
|
| 486 |
+
|
| 487 |
+
If you convey a covered work, knowingly relying on a patent license,
|
| 488 |
+
and the Corresponding Source of the work is not available for anyone
|
| 489 |
+
to copy, free of charge and under the terms of this License, through a
|
| 490 |
+
publicly available network server or other readily accessible means,
|
| 491 |
+
then you must either (1) cause the Corresponding Source to be so
|
| 492 |
+
available, or (2) arrange to deprive yourself of the benefit of the
|
| 493 |
+
patent license for this particular work, or (3) arrange, in a manner
|
| 494 |
+
consistent with the requirements of this License, to extend the patent
|
| 495 |
+
license to downstream recipients. "Knowingly relying" means you have
|
| 496 |
+
actual knowledge that, but for the patent license, your conveying the
|
| 497 |
+
covered work in a country, or your recipient's use of the covered work
|
| 498 |
+
in a country, would infringe one or more identifiable patents in that
|
| 499 |
+
country that you have reason to believe are valid.
|
| 500 |
+
|
| 501 |
+
If, pursuant to or in connection with a single transaction or
|
| 502 |
+
arrangement, you convey, or propagate by procuring conveyance of, a
|
| 503 |
+
covered work, and grant a patent license to some of the parties
|
| 504 |
+
receiving the covered work authorizing them to use, propagate, modify
|
| 505 |
+
or convey a specific copy of the covered work, then the patent license
|
| 506 |
+
you grant is automatically extended to all recipients of the covered
|
| 507 |
+
work and works based on it.
|
| 508 |
+
|
| 509 |
+
A patent license is "discriminatory" if it does not include within
|
| 510 |
+
the scope of its coverage, prohibits the exercise of, or is
|
| 511 |
+
conditioned on the non-exercise of one or more of the rights that are
|
| 512 |
+
specifically granted under this License. You may not convey a covered
|
| 513 |
+
work if you are a party to an arrangement with a third party that is
|
| 514 |
+
in the business of distributing software, under which you make payment
|
| 515 |
+
to the third party based on the extent of your activity of conveying
|
| 516 |
+
the work, and under which the third party grants, to any of the
|
| 517 |
+
parties who would receive the covered work from you, a discriminatory
|
| 518 |
+
patent license (a) in connection with copies of the covered work
|
| 519 |
+
conveyed by you (or copies made from those copies), or (b) primarily
|
| 520 |
+
for and in connection with specific products or compilations that
|
| 521 |
+
contain the covered work, unless you entered into that arrangement,
|
| 522 |
+
or that patent license was granted, prior to 28 March 2007.
|
| 523 |
+
|
| 524 |
+
Nothing in this License shall be construed as excluding or limiting
|
| 525 |
+
any implied license or other defenses to infringement that may
|
| 526 |
+
otherwise be available to you under applicable patent law.
|
| 527 |
+
|
| 528 |
+
12. No Surrender of Others' Freedom.
|
| 529 |
+
|
| 530 |
+
If conditions are imposed on you (whether by court order, agreement or
|
| 531 |
+
otherwise) that contradict the conditions of this License, they do not
|
| 532 |
+
excuse you from the conditions of this License. If you cannot convey a
|
| 533 |
+
covered work so as to satisfy simultaneously your obligations under this
|
| 534 |
+
License and any other pertinent obligations, then as a consequence you may
|
| 535 |
+
not convey it at all. For example, if you agree to terms that obligate you
|
| 536 |
+
to collect a royalty for further conveying from those to whom you convey
|
| 537 |
+
the Program, the only way you could satisfy both those terms and this
|
| 538 |
+
License would be to refrain entirely from conveying the Program.
|
| 539 |
+
|
| 540 |
+
13. Remote Network Interaction; Use with the GNU General Public License.
|
| 541 |
+
|
| 542 |
+
Notwithstanding any other provision of this License, if you modify the
|
| 543 |
+
Program, your modified version must prominently offer all users
|
| 544 |
+
interacting with it remotely through a computer network (if your version
|
| 545 |
+
supports such interaction) an opportunity to receive the Corresponding
|
| 546 |
+
Source of your version by providing access to the Corresponding Source
|
| 547 |
+
from a network server at no charge, through some standard or customary
|
| 548 |
+
means of facilitating copying of software. This Corresponding Source
|
| 549 |
+
shall include the Corresponding Source for any work covered by version 3
|
| 550 |
+
of the GNU General Public License that is incorporated pursuant to the
|
| 551 |
+
following paragraph.
|
| 552 |
+
|
| 553 |
+
Notwithstanding any other provision of this License, you have
|
| 554 |
+
permission to link or combine any covered work with a work licensed
|
| 555 |
+
under version 3 of the GNU General Public License into a single
|
| 556 |
+
combined work, and to convey the resulting work. The terms of this
|
| 557 |
+
License will continue to apply to the part which is the covered work,
|
| 558 |
+
but the work with which it is combined will remain governed by version
|
| 559 |
+
3 of the GNU General Public License.
|
| 560 |
+
|
| 561 |
+
14. Revised Versions of this License.
|
| 562 |
+
|
| 563 |
+
The Free Software Foundation may publish revised and/or new versions of
|
| 564 |
+
the GNU Affero General Public License from time to time. Such new versions
|
| 565 |
+
will be similar in spirit to the present version, but may differ in detail to
|
| 566 |
+
address new problems or concerns.
|
| 567 |
+
|
| 568 |
+
Each version is given a distinguishing version number. If the
|
| 569 |
+
Program specifies that a certain numbered version of the GNU Affero General
|
| 570 |
+
Public License "or any later version" applies to it, you have the
|
| 571 |
+
option of following the terms and conditions either of that numbered
|
| 572 |
+
version or of any later version published by the Free Software
|
| 573 |
+
Foundation. If the Program does not specify a version number of the
|
| 574 |
+
GNU Affero General Public License, you may choose any version ever published
|
| 575 |
+
by the Free Software Foundation.
|
| 576 |
+
|
| 577 |
+
If the Program specifies that a proxy can decide which future
|
| 578 |
+
versions of the GNU Affero General Public License can be used, that proxy's
|
| 579 |
+
public statement of acceptance of a version permanently authorizes you
|
| 580 |
+
to choose that version for the Program.
|
| 581 |
+
|
| 582 |
+
Later license versions may give you additional or different
|
| 583 |
+
permissions. However, no additional obligations are imposed on any
|
| 584 |
+
author or copyright holder as a result of your choosing to follow a
|
| 585 |
+
later version.
|
| 586 |
+
|
| 587 |
+
15. Disclaimer of Warranty.
|
| 588 |
+
|
| 589 |
+
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
| 590 |
+
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
| 591 |
+
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
| 592 |
+
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
| 593 |
+
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
| 594 |
+
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
| 595 |
+
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
| 596 |
+
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
| 597 |
+
|
| 598 |
+
16. Limitation of Liability.
|
| 599 |
+
|
| 600 |
+
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
| 601 |
+
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
| 602 |
+
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
| 603 |
+
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
| 604 |
+
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
| 605 |
+
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
| 606 |
+
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
| 607 |
+
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
| 608 |
+
SUCH DAMAGES.
|
| 609 |
+
|
| 610 |
+
17. Interpretation of Sections 15 and 16.
|
| 611 |
+
|
| 612 |
+
If the disclaimer of warranty and limitation of liability provided
|
| 613 |
+
above cannot be given local legal effect according to their terms,
|
| 614 |
+
reviewing courts shall apply local law that most closely approximates
|
| 615 |
+
an absolute waiver of all civil liability in connection with the
|
| 616 |
+
Program, unless a warranty or assumption of liability accompanies a
|
| 617 |
+
copy of the Program in return for a fee.
|
| 618 |
+
|
| 619 |
+
END OF TERMS AND CONDITIONS
|
| 620 |
+
|
| 621 |
+
How to Apply These Terms to Your New Programs
|
| 622 |
+
|
| 623 |
+
If you develop a new program, and you want it to be of the greatest
|
| 624 |
+
possible use to the public, the best way to achieve this is to make it
|
| 625 |
+
free software which everyone can redistribute and change under these terms.
|
| 626 |
+
|
| 627 |
+
To do so, attach the following notices to the program. It is safest
|
| 628 |
+
to attach them to the start of each source file to most effectively
|
| 629 |
+
state the exclusion of warranty; and each file should have at least
|
| 630 |
+
the "copyright" line and a pointer to where the full notice is found.
|
| 631 |
+
|
| 632 |
+
<one line to give the program's name and a brief idea of what it does.>
|
| 633 |
+
Copyright (C) <year> <name of author>
|
| 634 |
+
|
| 635 |
+
This program is free software: you can redistribute it and/or modify
|
| 636 |
+
it under the terms of the GNU Affero General Public License as published
|
| 637 |
+
by the Free Software Foundation, either version 3 of the License, or
|
| 638 |
+
(at your option) any later version.
|
| 639 |
+
|
| 640 |
+
This program is distributed in the hope that it will be useful,
|
| 641 |
+
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
| 642 |
+
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
| 643 |
+
GNU Affero General Public License for more details.
|
| 644 |
+
|
| 645 |
+
You should have received a copy of the GNU Affero General Public License
|
| 646 |
+
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
| 647 |
+
|
| 648 |
+
Also add information on how to contact you by electronic and paper mail.
|
| 649 |
+
|
| 650 |
+
If your software can interact with users remotely through a computer
|
| 651 |
+
network, you should also make sure that it provides a way for users to
|
| 652 |
+
get its source. For example, if your program is a web application, its
|
| 653 |
+
interface could display a "Source" link that leads users to an archive
|
| 654 |
+
of the code. There are many ways you could offer source, and different
|
| 655 |
+
solutions will be better for different programs; see section 13 for the
|
| 656 |
+
specific requirements.
|
| 657 |
+
|
| 658 |
+
You should also get your employer (if you work as a programmer) or school,
|
| 659 |
+
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
| 660 |
+
For more information on this, and how to apply and follow the GNU AGPL, see
|
| 661 |
+
<https://www.gnu.org/licenses/>.
|
README.md
ADDED
|
@@ -0,0 +1,189 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
<p align="center">
|
| 2 |
+
<a href="https://github.com/SilentDemonSD/WZML-X">
|
| 3 |
+
<kbd>
|
| 4 |
+
<img width="250" src="https://graph.org/file/639fe4239b78e5862b302.jpg" alt="WZML-X Logo">
|
| 5 |
+
</kbd>
|
| 6 |
+
</a>
|
| 7 |
+
|
| 8 |
+
<i>This repository is a feature-enhanced version of the [mirror-leech-telegram-bot](https://github.com/anasty17/mirror-leech-telegram-bot). It integrates various improvements from multiple sources, expanding functionality while maintaining efficiency. Unlike the base repository, this version is fully deployable on Heroku.</i>
|
| 9 |
+
|
| 10 |
+
</p>
|
| 11 |
+
|
| 12 |
+
<div align=center>
|
| 13 |
+
|
| 14 |
+
[](#) [](#) [](#)|[](#) [](#) [](#) [](#)
|
| 15 |
+
:---:|:---:|
|
| 16 |
+
[](#) [](#) [](#) [](#)|[](#) [](#) [](#)
|
| 17 |
+
[](https://t.me/WZML_X) |[](https://t.me/WZML_Support) |
|
| 18 |
+
|
| 19 |
+
</div>
|
| 20 |
+
|
| 21 |
+
---
|
| 22 |
+
Below is a refined version that preserves all the important details while enhancing readability and design :
|
| 23 |
+
|
| 24 |
+
---
|
| 25 |
+
|
| 26 |
+
# Deployment Guide (VPS)
|
| 27 |
+
|
| 28 |
+
<details>
|
| 29 |
+
<summary><strong>View All Steps <kbd>Click Here</kbd></strong></summary>
|
| 30 |
+
|
| 31 |
+
---
|
| 32 |
+
|
| 33 |
+
## 1. Prerequisites
|
| 34 |
+
|
| 35 |
+
- **Tutorial Video from A to Z (Latest Video)**
|
| 36 |
+
- Special thanks to [Wiszky](https://github.com/vishnoe115)
|
| 37 |
+
|
| 38 |
+
[](https://youtu.be/xzLOLyKYl54)
|
| 39 |
+
|
| 40 |
+
---
|
| 41 |
+
|
| 42 |
+
## 2. Installing Requirements
|
| 43 |
+
|
| 44 |
+
Clone this repository:
|
| 45 |
+
|
| 46 |
+
```bash
|
| 47 |
+
git clone https://github.com/SilentDemonSD/WZML-X mirrorbot/ && cd mirrorbot
|
| 48 |
+
```
|
| 49 |
+
|
| 50 |
+
---
|
| 51 |
+
|
| 52 |
+
## 3. Build and Run the Docker Image
|
| 53 |
+
|
| 54 |
+
*Make sure you mount the app folder and install Docker following the official documentation.*
|
| 55 |
+
|
| 56 |
+
There are two methods to build and run the Docker image:
|
| 57 |
+
|
| 58 |
+
### 3.1 Using Official Docker Commands
|
| 59 |
+
|
| 60 |
+
- **Start Docker daemon** (skip if already running):
|
| 61 |
+
|
| 62 |
+
```bash
|
| 63 |
+
sudo dockerd
|
| 64 |
+
```
|
| 65 |
+
|
| 66 |
+
- **Build the Docker image:**
|
| 67 |
+
|
| 68 |
+
```bash
|
| 69 |
+
sudo docker build . -t wzmlx
|
| 70 |
+
```
|
| 71 |
+
|
| 72 |
+
- **Run the image:**
|
| 73 |
+
|
| 74 |
+
```bash
|
| 75 |
+
sudo docker run -p 80:80 -p 8080:8080 wzmlx
|
| 76 |
+
```
|
| 77 |
+
|
| 78 |
+
- **To stop the running image:**
|
| 79 |
+
|
| 80 |
+
First, list running containers:
|
| 81 |
+
|
| 82 |
+
```bash
|
| 83 |
+
sudo docker ps
|
| 84 |
+
```
|
| 85 |
+
|
| 86 |
+
Then, stop the container using its ID:
|
| 87 |
+
|
| 88 |
+
```bash
|
| 89 |
+
sudo docker stop <container_id>
|
| 90 |
+
```
|
| 91 |
+
|
| 92 |
+
---
|
| 93 |
+
|
| 94 |
+
### 3.2 Using docker-compose (Recommended)
|
| 95 |
+
|
| 96 |
+
**Note:** If you want to use ports other than 80 and 8080 for torrent file selection and rclone serve respectively, update them in [docker-compose.yml](https://github.com/weebzone/WZML-X/blob/master/docker-compose.yml).
|
| 97 |
+
|
| 98 |
+
- **Install docker-compose:**
|
| 99 |
+
|
| 100 |
+
```bash
|
| 101 |
+
sudo apt install docker-compose
|
| 102 |
+
```
|
| 103 |
+
|
| 104 |
+
- **Build and run the Docker image (or view the current running image):**
|
| 105 |
+
|
| 106 |
+
```bash
|
| 107 |
+
sudo docker-compose up
|
| 108 |
+
```
|
| 109 |
+
|
| 110 |
+
- **After editing files (e.g., using nano to edit start.sh), rebuild:**
|
| 111 |
+
|
| 112 |
+
```bash
|
| 113 |
+
sudo docker-compose up --build
|
| 114 |
+
```
|
| 115 |
+
|
| 116 |
+
- **To stop the running image:**
|
| 117 |
+
|
| 118 |
+
```bash
|
| 119 |
+
sudo docker-compose stop
|
| 120 |
+
```
|
| 121 |
+
|
| 122 |
+
- **To restart the image:**
|
| 123 |
+
|
| 124 |
+
```bash
|
| 125 |
+
sudo docker-compose start
|
| 126 |
+
```
|
| 127 |
+
|
| 128 |
+
- **To view the latest logs from the running container (after mounting the folder):**
|
| 129 |
+
|
| 130 |
+
```bash
|
| 131 |
+
sudo docker-compose up
|
| 132 |
+
```
|
| 133 |
+
|
| 134 |
+
- **Tutorial Video for docker-compose and checking ports:**
|
| 135 |
+
|
| 136 |
+
[](https://youtu.be/c8_TU1sPK08)
|
| 137 |
+
|
| 138 |
+
|
| 139 |
+
------
|
| 140 |
+
|
| 141 |
+
#### Docker Notes
|
| 142 |
+
|
| 143 |
+
**IMPORTANT NOTES**:
|
| 144 |
+
|
| 145 |
+
1. Set `BASE_URL_PORT` and `RCLONE_SERVE_PORT` variables to any port you want to use. Default is `80` and `8080` respectively.
|
| 146 |
+
2. You should stop the running image before deleting the container and you should delete the container before the image.
|
| 147 |
+
3. To delete the container (this will not affect on the image):
|
| 148 |
+
|
| 149 |
+
```
|
| 150 |
+
sudo docker container prune
|
| 151 |
+
```
|
| 152 |
+
|
| 153 |
+
4. To delete te images:
|
| 154 |
+
|
| 155 |
+
```
|
| 156 |
+
sudo docker image prune -a
|
| 157 |
+
```
|
| 158 |
+
|
| 159 |
+
5. Check the number of processing units of your machine with `nproc` cmd and times it by 4, then edit `AsyncIOThreadsCount` in qBittorrent.conf.
|
| 160 |
+
|
| 161 |
+
</details></li></ol>
|
| 162 |
+
</details>
|
| 163 |
+
|
| 164 |
+
------
|
| 165 |
+
|
| 166 |
+
# Deployment Guide (Heroku)
|
| 167 |
+
|
| 168 |
+
<details>
|
| 169 |
+
<summary><strong>View All Steps <kbd>Click Here</kbd></strong></summary>
|
| 170 |
+
|
| 171 |
+
---
|
| 172 |
+
|
| 173 |
+
**Check the Docs Here :** [Click Here](https://github.com/SilentDemonSD/WZ-Deploy/tree/main?tab=readme-ov-file#2%EF%B8%8F⃣-method-2-github-workflow-guide)
|
| 174 |
+
|
| 175 |
+
---
|
| 176 |
+
|
| 177 |
+
</details>
|
| 178 |
+
|
| 179 |
+
## 🏅 **Bot Authors**
|
| 180 |
+
<details>
|
| 181 |
+
<summary><b>Click Here For Description</b></summary>
|
| 182 |
+
|
| 183 |
+
|<img width="80" src="https://avatars.githubusercontent.com/u/105407900?v=4">|<img width="80" src="https://avatars.githubusercontent.com/u/93116400?v=4">|<img width="80" src="https://avatars.githubusercontent.com/u/113664541?v=4">|<img width="80" src="https://avatars.githubusercontent.com/u/84721324?v=4">|
|
| 184 |
+
|:---:|:---:|:---:|:---:|
|
| 185 |
+
|[`SilentDemonSD`](https://github.com/SilentDemonSD)|[`RjRiajul`](https://github.com/rjriajul)|[`CodeWithWeeb`](https://github.com/weebzone)|[`Maverick`](https://github.com/MajnuRangeela)|
|
| 186 |
+
|Author and DDL, UI Design, More Customs..|Co-Author & Maintainer|Author and Wraps Up Features|Co-Author & Bug Tester|
|
| 187 |
+
|
| 188 |
+
</details>
|
| 189 |
+
|
bot/__init__.py
ADDED
|
@@ -0,0 +1,105 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# ruff: noqa: E402
|
| 2 |
+
|
| 3 |
+
from uvloop import install
|
| 4 |
+
|
| 5 |
+
install()
|
| 6 |
+
|
| 7 |
+
from subprocess import run as srun
|
| 8 |
+
from os import getcwd
|
| 9 |
+
from asyncio import Lock, new_event_loop, set_event_loop
|
| 10 |
+
from logging import (
|
| 11 |
+
ERROR,
|
| 12 |
+
INFO,
|
| 13 |
+
WARNING,
|
| 14 |
+
FileHandler,
|
| 15 |
+
StreamHandler,
|
| 16 |
+
basicConfig,
|
| 17 |
+
getLogger,
|
| 18 |
+
)
|
| 19 |
+
from os import cpu_count
|
| 20 |
+
from time import time
|
| 21 |
+
|
| 22 |
+
from apscheduler.schedulers.asyncio import AsyncIOScheduler
|
| 23 |
+
|
| 24 |
+
from .core.config_manager import BinConfig
|
| 25 |
+
from sabnzbdapi import SabnzbdClient
|
| 26 |
+
|
| 27 |
+
getLogger("requests").setLevel(WARNING)
|
| 28 |
+
getLogger("urllib3").setLevel(WARNING)
|
| 29 |
+
getLogger("pyrogram").setLevel(ERROR)
|
| 30 |
+
getLogger("aiohttp").setLevel(ERROR)
|
| 31 |
+
getLogger("apscheduler").setLevel(ERROR)
|
| 32 |
+
getLogger("httpx").setLevel(WARNING)
|
| 33 |
+
getLogger("pymongo").setLevel(WARNING)
|
| 34 |
+
getLogger("aiohttp").setLevel(WARNING)
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
bot_start_time = time()
|
| 38 |
+
|
| 39 |
+
bot_loop = new_event_loop()
|
| 40 |
+
set_event_loop(bot_loop)
|
| 41 |
+
|
| 42 |
+
basicConfig(
|
| 43 |
+
format="[%(asctime)s] [%(levelname)s] - %(message)s", # [%(filename)s:%(lineno)d]
|
| 44 |
+
datefmt="%d-%b-%y %I:%M:%S %p",
|
| 45 |
+
handlers=[FileHandler("log.txt"), StreamHandler()],
|
| 46 |
+
level=INFO,
|
| 47 |
+
)
|
| 48 |
+
|
| 49 |
+
LOGGER = getLogger(__name__)
|
| 50 |
+
cpu_no = cpu_count()
|
| 51 |
+
threads = max(1, cpu_no // 2)
|
| 52 |
+
cores = ",".join(str(i) for i in range(threads))
|
| 53 |
+
|
| 54 |
+
bot_cache = {}
|
| 55 |
+
DOWNLOAD_DIR = "/usr/src/app/downloads/"
|
| 56 |
+
intervals = {"status": {}, "qb": "", "jd": "", "nzb": "", "stopAll": False}
|
| 57 |
+
qb_torrents = {}
|
| 58 |
+
jd_downloads = {}
|
| 59 |
+
nzb_jobs = {}
|
| 60 |
+
user_data = {}
|
| 61 |
+
aria2_options = {}
|
| 62 |
+
qbit_options = {}
|
| 63 |
+
nzb_options = {}
|
| 64 |
+
queued_dl = {}
|
| 65 |
+
queued_up = {}
|
| 66 |
+
status_dict = {}
|
| 67 |
+
task_dict = {}
|
| 68 |
+
rss_dict = {}
|
| 69 |
+
shortener_dict = {}
|
| 70 |
+
var_list = [
|
| 71 |
+
"BOT_TOKEN",
|
| 72 |
+
"TELEGRAM_API",
|
| 73 |
+
"TELEGRAM_HASH",
|
| 74 |
+
"OWNER_ID",
|
| 75 |
+
"DATABASE_URL",
|
| 76 |
+
"BASE_URL",
|
| 77 |
+
"UPSTREAM_REPO",
|
| 78 |
+
"UPSTREAM_BRANCH",
|
| 79 |
+
"UPDATE_PKGS",
|
| 80 |
+
]
|
| 81 |
+
auth_chats = {}
|
| 82 |
+
excluded_extensions = ["aria2", "!qB"]
|
| 83 |
+
drives_names = []
|
| 84 |
+
drives_ids = []
|
| 85 |
+
index_urls = []
|
| 86 |
+
sudo_users = []
|
| 87 |
+
non_queued_dl = set()
|
| 88 |
+
non_queued_up = set()
|
| 89 |
+
multi_tags = set()
|
| 90 |
+
task_dict_lock = Lock()
|
| 91 |
+
queue_dict_lock = Lock()
|
| 92 |
+
qb_listener_lock = Lock()
|
| 93 |
+
nzb_listener_lock = Lock()
|
| 94 |
+
jd_listener_lock = Lock()
|
| 95 |
+
cpu_eater_lock = Lock()
|
| 96 |
+
same_directory_lock = Lock()
|
| 97 |
+
|
| 98 |
+
sabnzbd_client = SabnzbdClient(
|
| 99 |
+
host="http://localhost",
|
| 100 |
+
api_key="admin",
|
| 101 |
+
port="8070",
|
| 102 |
+
)
|
| 103 |
+
srun([BinConfig.QBIT_NAME, "-d", f"--profile={getcwd()}"], check=False)
|
| 104 |
+
|
| 105 |
+
scheduler = AsyncIOScheduler(event_loop=bot_loop)
|
bot/__main__.py
ADDED
|
@@ -0,0 +1,141 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# ruff: noqa: E402
|
| 2 |
+
|
| 3 |
+
from .core.config_manager import Config
|
| 4 |
+
|
| 5 |
+
Config.load()
|
| 6 |
+
|
| 7 |
+
from datetime import datetime
|
| 8 |
+
from logging import Formatter
|
| 9 |
+
from time import localtime
|
| 10 |
+
|
| 11 |
+
from pytz import timezone
|
| 12 |
+
|
| 13 |
+
from . import LOGGER, bot_loop
|
| 14 |
+
from .core.tg_client import TgClient
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
async def main():
|
| 18 |
+
from asyncio import gather
|
| 19 |
+
|
| 20 |
+
from .core.startup import (
|
| 21 |
+
load_configurations,
|
| 22 |
+
load_settings,
|
| 23 |
+
save_settings,
|
| 24 |
+
update_aria2_options,
|
| 25 |
+
update_nzb_options,
|
| 26 |
+
update_qb_options,
|
| 27 |
+
update_variables,
|
| 28 |
+
)
|
| 29 |
+
|
| 30 |
+
await load_settings()
|
| 31 |
+
|
| 32 |
+
try:
|
| 33 |
+
tz = timezone(Config.TIMEZONE)
|
| 34 |
+
except Exception:
|
| 35 |
+
from pytz import utc
|
| 36 |
+
|
| 37 |
+
tz = utc
|
| 38 |
+
|
| 39 |
+
def changetz(*args):
|
| 40 |
+
try:
|
| 41 |
+
return datetime.now(tz).timetuple()
|
| 42 |
+
except Exception:
|
| 43 |
+
return localtime()
|
| 44 |
+
|
| 45 |
+
Formatter.converter = changetz
|
| 46 |
+
|
| 47 |
+
await gather(
|
| 48 |
+
TgClient.start_bot(), TgClient.start_user(), TgClient.start_helper_bots()
|
| 49 |
+
)
|
| 50 |
+
await gather(load_configurations(), update_variables())
|
| 51 |
+
|
| 52 |
+
from .core.torrent_manager import TorrentManager
|
| 53 |
+
|
| 54 |
+
await TorrentManager.initiate()
|
| 55 |
+
await gather(
|
| 56 |
+
update_qb_options(),
|
| 57 |
+
update_aria2_options(),
|
| 58 |
+
update_nzb_options(),
|
| 59 |
+
)
|
| 60 |
+
from .core.jdownloader_booter import jdownloader
|
| 61 |
+
from .helper.ext_utils.files_utils import clean_all
|
| 62 |
+
from .helper.ext_utils.telegraph_helper import telegraph
|
| 63 |
+
from .helper.mirror_leech_utils.rclone_utils.serve import rclone_serve_booter
|
| 64 |
+
from .modules import (
|
| 65 |
+
get_packages_version,
|
| 66 |
+
initiate_search_tools,
|
| 67 |
+
restart_notification,
|
| 68 |
+
)
|
| 69 |
+
|
| 70 |
+
await gather(
|
| 71 |
+
save_settings(),
|
| 72 |
+
jdownloader.boot(),
|
| 73 |
+
clean_all(),
|
| 74 |
+
initiate_search_tools(),
|
| 75 |
+
get_packages_version(),
|
| 76 |
+
restart_notification(),
|
| 77 |
+
telegraph.create_account(),
|
| 78 |
+
rclone_serve_booter(),
|
| 79 |
+
)
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
bot_loop.run_until_complete(main())
|
| 83 |
+
|
| 84 |
+
from .core.handlers import add_handlers
|
| 85 |
+
from .helper.ext_utils.bot_utils import create_help_buttons
|
| 86 |
+
from .helper.listeners.aria2_listener import add_aria2_callbacks
|
| 87 |
+
|
| 88 |
+
add_aria2_callbacks()
|
| 89 |
+
create_help_buttons()
|
| 90 |
+
add_handlers()
|
| 91 |
+
|
| 92 |
+
from .core.plugin_manager import get_plugin_manager
|
| 93 |
+
from .modules.plugin_manager import register_plugin_commands
|
| 94 |
+
|
| 95 |
+
plugin_manager = get_plugin_manager()
|
| 96 |
+
plugin_manager.bot = TgClient.bot
|
| 97 |
+
register_plugin_commands()
|
| 98 |
+
|
| 99 |
+
from pyrogram.filters import regex
|
| 100 |
+
from pyrogram.handlers import CallbackQueryHandler
|
| 101 |
+
|
| 102 |
+
from .core.handlers import add_handlers
|
| 103 |
+
from .helper.ext_utils.bot_utils import new_task
|
| 104 |
+
from .helper.telegram_helper.filters import CustomFilters
|
| 105 |
+
from .helper.telegram_helper.message_utils import (
|
| 106 |
+
delete_message,
|
| 107 |
+
edit_message,
|
| 108 |
+
send_message,
|
| 109 |
+
)
|
| 110 |
+
|
| 111 |
+
|
| 112 |
+
@new_task
|
| 113 |
+
async def restart_sessions_confirm(_, query):
|
| 114 |
+
data = query.data.split()
|
| 115 |
+
message = query.message
|
| 116 |
+
if data[1] == "confirm":
|
| 117 |
+
reply_to = message.reply_to_message
|
| 118 |
+
restart_message = await send_message(reply_to, "Restarting Session(s)...")
|
| 119 |
+
await delete_message(message)
|
| 120 |
+
await TgClient.reload()
|
| 121 |
+
add_handlers()
|
| 122 |
+
TgClient.bot.add_handler(
|
| 123 |
+
CallbackQueryHandler(
|
| 124 |
+
restart_sessions_confirm,
|
| 125 |
+
filters=regex("^sessionrestart") & CustomFilters.sudo,
|
| 126 |
+
)
|
| 127 |
+
)
|
| 128 |
+
await edit_message(restart_message, "Session(s) Restarted Successfully!")
|
| 129 |
+
else:
|
| 130 |
+
await delete_message(message)
|
| 131 |
+
|
| 132 |
+
|
| 133 |
+
TgClient.bot.add_handler(
|
| 134 |
+
CallbackQueryHandler(
|
| 135 |
+
restart_sessions_confirm,
|
| 136 |
+
filters=regex("^sessionrestart") & CustomFilters.sudo,
|
| 137 |
+
)
|
| 138 |
+
)
|
| 139 |
+
|
| 140 |
+
LOGGER.info("WZ Client(s) & Services Started !")
|
| 141 |
+
bot_loop.run_forever()
|
bot/core/config_manager.py
ADDED
|
@@ -0,0 +1,258 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from importlib import import_module
|
| 2 |
+
from os import getenv
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
class Config:
|
| 6 |
+
AS_DOCUMENT = False
|
| 7 |
+
AUTHORIZED_CHATS = ""
|
| 8 |
+
BASE_URL = ""
|
| 9 |
+
BASE_URL_PORT = 80
|
| 10 |
+
BOT_TOKEN = ""
|
| 11 |
+
HELPER_TOKENS = ""
|
| 12 |
+
BOT_MAX_TASKS = 0
|
| 13 |
+
BOT_PM = False
|
| 14 |
+
CMD_SUFFIX = ""
|
| 15 |
+
DEFAULT_LANG = "en"
|
| 16 |
+
DATABASE_URL = ""
|
| 17 |
+
DEFAULT_UPLOAD = "rc"
|
| 18 |
+
DELETE_LINKS = False
|
| 19 |
+
DEBRID_LINK_API = ""
|
| 20 |
+
DISABLE_TORRENTS = False
|
| 21 |
+
DISABLE_LEECH = False
|
| 22 |
+
DISABLE_BULK = False
|
| 23 |
+
DISABLE_MULTI = False
|
| 24 |
+
DISABLE_SEED = False
|
| 25 |
+
DISABLE_FF_MODE = False
|
| 26 |
+
EQUAL_SPLITS = False
|
| 27 |
+
EXCLUDED_EXTENSIONS = ""
|
| 28 |
+
FFMPEG_CMDS = {}
|
| 29 |
+
FILELION_API = ""
|
| 30 |
+
MEDIA_STORE = True
|
| 31 |
+
FORCE_SUB_IDS = ""
|
| 32 |
+
GOFILE_API = ""
|
| 33 |
+
GOFILE_FOLDER_ID = ""
|
| 34 |
+
PIXELDRAIN_KEY = ""
|
| 35 |
+
PROTECTED_API = ""
|
| 36 |
+
BUZZHEAVIER_API = ""
|
| 37 |
+
GDRIVE_ID = ""
|
| 38 |
+
GD_DESP = "Uploaded with WZ Bot"
|
| 39 |
+
AUTHOR_NAME = "WZML-X"
|
| 40 |
+
AUTHOR_URL = "https://t.me/WZML_X"
|
| 41 |
+
INSTADL_API = ""
|
| 42 |
+
IMDB_TEMPLATE = ""
|
| 43 |
+
INCOMPLETE_TASK_NOTIFIER = False
|
| 44 |
+
INDEX_URL = ""
|
| 45 |
+
IS_TEAM_DRIVE = False
|
| 46 |
+
JD_EMAIL = ""
|
| 47 |
+
JD_PASS = ""
|
| 48 |
+
MEGA_EMAIL = ""
|
| 49 |
+
MEGA_PASSWORD = ""
|
| 50 |
+
DIRECT_LIMIT = 0
|
| 51 |
+
MEGA_LIMIT = 0
|
| 52 |
+
TORRENT_LIMIT = 0
|
| 53 |
+
GD_DL_LIMIT = 0
|
| 54 |
+
RC_DL_LIMIT = 0
|
| 55 |
+
CLONE_LIMIT = 0
|
| 56 |
+
JD_LIMIT = 0
|
| 57 |
+
NZB_LIMIT = 0
|
| 58 |
+
YTDLP_LIMIT = 0
|
| 59 |
+
PLAYLIST_LIMIT = 0
|
| 60 |
+
LEECH_LIMIT = 0
|
| 61 |
+
EXTRACT_LIMIT = 0
|
| 62 |
+
ARCHIVE_LIMIT = 0
|
| 63 |
+
STORAGE_LIMIT = 0
|
| 64 |
+
LEECH_DUMP_CHAT = ""
|
| 65 |
+
LINKS_LOG_ID = ""
|
| 66 |
+
MIRROR_LOG_ID = ""
|
| 67 |
+
CLEAN_LOG_MSG = False
|
| 68 |
+
LEECH_PREFIX = ""
|
| 69 |
+
LEECH_CAPTION = ""
|
| 70 |
+
LEECH_SUFFIX = ""
|
| 71 |
+
LEECH_FONT = ""
|
| 72 |
+
LEECH_SPLIT_SIZE = 2097152000
|
| 73 |
+
MEDIA_GROUP = False
|
| 74 |
+
HYBRID_LEECH = True
|
| 75 |
+
HYPER_THREADS = 0
|
| 76 |
+
HYDRA_IP = ""
|
| 77 |
+
HYDRA_API_KEY = ""
|
| 78 |
+
NAME_SWAP = ""
|
| 79 |
+
OWNER_ID = 0
|
| 80 |
+
QUEUE_ALL = 0
|
| 81 |
+
QUEUE_DOWNLOAD = 0
|
| 82 |
+
QUEUE_UPLOAD = 0
|
| 83 |
+
RCLONE_FLAGS = ""
|
| 84 |
+
RCLONE_PATH = ""
|
| 85 |
+
RCLONE_SERVE_URL = ""
|
| 86 |
+
SHOW_CLOUD_LINK = True
|
| 87 |
+
RCLONE_SERVE_USER = ""
|
| 88 |
+
RCLONE_SERVE_PASS = ""
|
| 89 |
+
RCLONE_SERVE_PORT = 8080
|
| 90 |
+
RSS_CHAT = ""
|
| 91 |
+
RSS_DELAY = 600
|
| 92 |
+
RSS_SIZE_LIMIT = 0
|
| 93 |
+
SEARCH_API_LINK = ""
|
| 94 |
+
SEARCH_LIMIT = 0
|
| 95 |
+
SEARCH_PLUGINS = []
|
| 96 |
+
SET_COMMANDS = True
|
| 97 |
+
STATUS_LIMIT = 10
|
| 98 |
+
STATUS_UPDATE_INTERVAL = 15
|
| 99 |
+
STOP_DUPLICATE = False
|
| 100 |
+
STREAMWISH_API = ""
|
| 101 |
+
SUDO_USERS = ""
|
| 102 |
+
TELEGRAM_API = 0
|
| 103 |
+
TELEGRAM_HASH = ""
|
| 104 |
+
TG_PROXY = None
|
| 105 |
+
THUMBNAIL_LAYOUT = ""
|
| 106 |
+
VERIFY_TIMEOUT = 0
|
| 107 |
+
LOGIN_PASS = ""
|
| 108 |
+
TORRENT_TIMEOUT = 0
|
| 109 |
+
TIMEZONE = "Asia/Kolkata"
|
| 110 |
+
USER_MAX_TASKS = 0
|
| 111 |
+
USER_TIME_INTERVAL = 0
|
| 112 |
+
UPLOAD_PATHS = {}
|
| 113 |
+
UPSTREAM_REPO = ""
|
| 114 |
+
UPSTREAM_BRANCH = "master"
|
| 115 |
+
UPDATE_PKGS = True
|
| 116 |
+
USENET_SERVERS = []
|
| 117 |
+
USER_SESSION_STRING = ""
|
| 118 |
+
USER_TRANSMISSION = True
|
| 119 |
+
USE_SERVICE_ACCOUNTS = False
|
| 120 |
+
WEB_PINCODE = True
|
| 121 |
+
YT_DLP_OPTIONS = {}
|
| 122 |
+
YT_DESP = "Uploaded with WZML-X bot"
|
| 123 |
+
YT_TAGS = ["telegram", "bot", "youtube"]
|
| 124 |
+
YT_CATEGORY_ID = 22
|
| 125 |
+
YT_PRIVACY_STATUS = "unlisted"
|
| 126 |
+
|
| 127 |
+
@classmethod
|
| 128 |
+
def get(cls, key):
|
| 129 |
+
return getattr(cls, key) if hasattr(cls, key) else None
|
| 130 |
+
|
| 131 |
+
@classmethod
|
| 132 |
+
def set(cls, key, value):
|
| 133 |
+
if hasattr(cls, key):
|
| 134 |
+
value = cls._convert_env_type(key, value)
|
| 135 |
+
setattr(cls, key, value)
|
| 136 |
+
else:
|
| 137 |
+
raise KeyError(f"{key} is not a valid configuration key.")
|
| 138 |
+
|
| 139 |
+
@classmethod
|
| 140 |
+
def get_all(cls):
|
| 141 |
+
return {
|
| 142 |
+
key: getattr(cls, key)
|
| 143 |
+
for key in cls.__dict__.keys()
|
| 144 |
+
if not key.startswith("__") and not callable(getattr(cls, key))
|
| 145 |
+
}
|
| 146 |
+
|
| 147 |
+
@classmethod
|
| 148 |
+
def load(cls):
|
| 149 |
+
cls.load_config()
|
| 150 |
+
cls.load_env()
|
| 151 |
+
|
| 152 |
+
@classmethod
|
| 153 |
+
def load_config(cls):
|
| 154 |
+
try:
|
| 155 |
+
settings = import_module("config")
|
| 156 |
+
except ModuleNotFoundError:
|
| 157 |
+
return
|
| 158 |
+
for attr in dir(settings):
|
| 159 |
+
if hasattr(cls, attr):
|
| 160 |
+
value = getattr(settings, attr)
|
| 161 |
+
if not value:
|
| 162 |
+
continue
|
| 163 |
+
if isinstance(value, str):
|
| 164 |
+
value = value.strip()
|
| 165 |
+
if attr == "DEFAULT_UPLOAD" and value != "gd":
|
| 166 |
+
value = "rc"
|
| 167 |
+
elif attr in [
|
| 168 |
+
"BASE_URL",
|
| 169 |
+
"RCLONE_SERVE_URL",
|
| 170 |
+
"INDEX_URL",
|
| 171 |
+
"SEARCH_API_LINK",
|
| 172 |
+
]:
|
| 173 |
+
if value:
|
| 174 |
+
value = value.strip("/")
|
| 175 |
+
elif attr == "USENET_SERVERS":
|
| 176 |
+
try:
|
| 177 |
+
if not value[0].get("host"):
|
| 178 |
+
continue
|
| 179 |
+
except Exception:
|
| 180 |
+
continue
|
| 181 |
+
setattr(cls, attr, value)
|
| 182 |
+
for key in ["BOT_TOKEN", "OWNER_ID", "TELEGRAM_API", "TELEGRAM_HASH"]:
|
| 183 |
+
value = getattr(cls, key)
|
| 184 |
+
if isinstance(value, str):
|
| 185 |
+
value = value.strip()
|
| 186 |
+
if not value:
|
| 187 |
+
raise ValueError(f"{key} variable is missing!")
|
| 188 |
+
|
| 189 |
+
@classmethod
|
| 190 |
+
def load_env(cls):
|
| 191 |
+
config_vars = cls.get_all()
|
| 192 |
+
for key in config_vars:
|
| 193 |
+
env_value = getenv(key)
|
| 194 |
+
if env_value is not None:
|
| 195 |
+
converted_value = cls._convert_env_type(key, env_value)
|
| 196 |
+
cls.set(key, converted_value)
|
| 197 |
+
|
| 198 |
+
@classmethod
|
| 199 |
+
def _convert_env_type(cls, key, value):
|
| 200 |
+
original_value = getattr(cls, key, None)
|
| 201 |
+
if original_value is None:
|
| 202 |
+
return value
|
| 203 |
+
elif isinstance(original_value, bool):
|
| 204 |
+
if isinstance(value, bool):
|
| 205 |
+
return value
|
| 206 |
+
return str(value).lower() in ("true", "1", "yes")
|
| 207 |
+
elif isinstance(original_value, int):
|
| 208 |
+
if isinstance(value, int):
|
| 209 |
+
return value
|
| 210 |
+
try:
|
| 211 |
+
return int(value)
|
| 212 |
+
except (ValueError, TypeError):
|
| 213 |
+
return original_value
|
| 214 |
+
elif isinstance(original_value, float):
|
| 215 |
+
if isinstance(value, float):
|
| 216 |
+
return value
|
| 217 |
+
try:
|
| 218 |
+
return float(value)
|
| 219 |
+
except (ValueError, TypeError):
|
| 220 |
+
return original_value
|
| 221 |
+
return value
|
| 222 |
+
|
| 223 |
+
@classmethod
|
| 224 |
+
def load_dict(cls, config_dict):
|
| 225 |
+
for key, value in config_dict.items():
|
| 226 |
+
if hasattr(cls, key):
|
| 227 |
+
if key == "DEFAULT_UPLOAD" and value != "gd":
|
| 228 |
+
value = "rc"
|
| 229 |
+
elif key in [
|
| 230 |
+
"BASE_URL",
|
| 231 |
+
"RCLONE_SERVE_URL",
|
| 232 |
+
"INDEX_URL",
|
| 233 |
+
"SEARCH_API_LINK",
|
| 234 |
+
]:
|
| 235 |
+
if value:
|
| 236 |
+
value = value.strip("/")
|
| 237 |
+
elif key == "USENET_SERVERS":
|
| 238 |
+
try:
|
| 239 |
+
if not value[0].get("host"):
|
| 240 |
+
value = []
|
| 241 |
+
except Exception:
|
| 242 |
+
value = []
|
| 243 |
+
value = cls._convert_env_type(key, value)
|
| 244 |
+
setattr(cls, key, value)
|
| 245 |
+
for key in ["BOT_TOKEN", "OWNER_ID", "TELEGRAM_API", "TELEGRAM_HASH"]:
|
| 246 |
+
value = getattr(cls, key)
|
| 247 |
+
if isinstance(value, str):
|
| 248 |
+
value = value.strip()
|
| 249 |
+
if not value:
|
| 250 |
+
raise ValueError(f"{key} variable is missing!")
|
| 251 |
+
|
| 252 |
+
|
| 253 |
+
class BinConfig:
|
| 254 |
+
ARIA2_NAME = "blitzfetcher"
|
| 255 |
+
QBIT_NAME = "stormtorrent"
|
| 256 |
+
FFMPEG_NAME = "mediaforge"
|
| 257 |
+
RCLONE_NAME = "ghostdrive"
|
| 258 |
+
SABNZBD_NAME = "newsripper"
|
bot/core/handlers.py
ADDED
|
@@ -0,0 +1,423 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# ruff: noqa: F403, F405
|
| 2 |
+
|
| 3 |
+
from pyrogram.filters import command, regex
|
| 4 |
+
from pyrogram.handlers import CallbackQueryHandler, EditedMessageHandler, MessageHandler
|
| 5 |
+
from pyrogram.types import BotCommand
|
| 6 |
+
|
| 7 |
+
from ..core.config_manager import Config
|
| 8 |
+
from ..helper.ext_utils.help_messages import BOT_COMMANDS
|
| 9 |
+
from ..helper.telegram_helper.bot_commands import BotCommands
|
| 10 |
+
from ..helper.telegram_helper.filters import CustomFilters
|
| 11 |
+
from ..modules import *
|
| 12 |
+
from .tg_client import TgClient
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
def add_handlers():
|
| 16 |
+
TgClient.bot.add_handler(
|
| 17 |
+
MessageHandler(
|
| 18 |
+
authorize,
|
| 19 |
+
filters=command(BotCommands.AuthorizeCommand, case_sensitive=True)
|
| 20 |
+
& CustomFilters.sudo,
|
| 21 |
+
)
|
| 22 |
+
)
|
| 23 |
+
TgClient.bot.add_handler(
|
| 24 |
+
MessageHandler(
|
| 25 |
+
unauthorize,
|
| 26 |
+
filters=command(BotCommands.UnAuthorizeCommand, case_sensitive=True)
|
| 27 |
+
& CustomFilters.sudo,
|
| 28 |
+
)
|
| 29 |
+
)
|
| 30 |
+
TgClient.bot.add_handler(
|
| 31 |
+
MessageHandler(
|
| 32 |
+
add_sudo,
|
| 33 |
+
filters=command(BotCommands.AddSudoCommand, case_sensitive=True)
|
| 34 |
+
& CustomFilters.sudo,
|
| 35 |
+
)
|
| 36 |
+
)
|
| 37 |
+
TgClient.bot.add_handler(
|
| 38 |
+
MessageHandler(
|
| 39 |
+
remove_sudo,
|
| 40 |
+
filters=command(BotCommands.RmSudoCommand, case_sensitive=True)
|
| 41 |
+
& CustomFilters.sudo,
|
| 42 |
+
)
|
| 43 |
+
)
|
| 44 |
+
TgClient.bot.add_handler(
|
| 45 |
+
MessageHandler(
|
| 46 |
+
send_bot_settings,
|
| 47 |
+
filters=command(BotCommands.BotSetCommand, case_sensitive=True)
|
| 48 |
+
& CustomFilters.sudo,
|
| 49 |
+
)
|
| 50 |
+
)
|
| 51 |
+
TgClient.bot.add_handler(
|
| 52 |
+
MessageHandler(
|
| 53 |
+
broadcast,
|
| 54 |
+
filters=command(BotCommands.BroadcastCommand, case_sensitive=True)
|
| 55 |
+
& CustomFilters.sudo,
|
| 56 |
+
)
|
| 57 |
+
)
|
| 58 |
+
TgClient.bot.add_handler(
|
| 59 |
+
CallbackQueryHandler(
|
| 60 |
+
edit_bot_settings, filters=regex("^botset") & CustomFilters.sudo
|
| 61 |
+
)
|
| 62 |
+
)
|
| 63 |
+
TgClient.bot.add_handler(
|
| 64 |
+
MessageHandler(
|
| 65 |
+
cancel,
|
| 66 |
+
filters=regex(rf"^/{BotCommands.CancelTaskCommand[1]}?(?:_\w+).*$")
|
| 67 |
+
& CustomFilters.authorized,
|
| 68 |
+
)
|
| 69 |
+
)
|
| 70 |
+
TgClient.bot.add_handler(
|
| 71 |
+
MessageHandler(
|
| 72 |
+
cancel_all_buttons,
|
| 73 |
+
filters=command(BotCommands.CancelAllCommand, case_sensitive=True)
|
| 74 |
+
& CustomFilters.authorized,
|
| 75 |
+
)
|
| 76 |
+
)
|
| 77 |
+
TgClient.bot.add_handler(
|
| 78 |
+
CallbackQueryHandler(cancel_all_update, filters=regex("^canall"))
|
| 79 |
+
)
|
| 80 |
+
TgClient.bot.add_handler(
|
| 81 |
+
CallbackQueryHandler(cancel_multi, filters=regex("^stopm"))
|
| 82 |
+
)
|
| 83 |
+
TgClient.bot.add_handler(
|
| 84 |
+
MessageHandler(
|
| 85 |
+
clone_node,
|
| 86 |
+
filters=command(BotCommands.CloneCommand, case_sensitive=True)
|
| 87 |
+
& CustomFilters.authorized,
|
| 88 |
+
)
|
| 89 |
+
)
|
| 90 |
+
TgClient.bot.add_handler(
|
| 91 |
+
MessageHandler(
|
| 92 |
+
aioexecute,
|
| 93 |
+
filters=command(BotCommands.AExecCommand, case_sensitive=True)
|
| 94 |
+
& CustomFilters.sudo,
|
| 95 |
+
)
|
| 96 |
+
)
|
| 97 |
+
TgClient.bot.add_handler(
|
| 98 |
+
MessageHandler(
|
| 99 |
+
execute,
|
| 100 |
+
filters=command(BotCommands.ExecCommand, case_sensitive=True)
|
| 101 |
+
& CustomFilters.sudo,
|
| 102 |
+
)
|
| 103 |
+
)
|
| 104 |
+
TgClient.bot.add_handler(
|
| 105 |
+
MessageHandler(
|
| 106 |
+
clear,
|
| 107 |
+
filters=command(BotCommands.ClearLocalsCommand, case_sensitive=True)
|
| 108 |
+
& CustomFilters.sudo,
|
| 109 |
+
)
|
| 110 |
+
)
|
| 111 |
+
TgClient.bot.add_handler(
|
| 112 |
+
MessageHandler(
|
| 113 |
+
select,
|
| 114 |
+
filters=command(BotCommands.SelectCommand, case_sensitive=True)
|
| 115 |
+
& CustomFilters.authorized,
|
| 116 |
+
)
|
| 117 |
+
)
|
| 118 |
+
TgClient.bot.add_handler(
|
| 119 |
+
CallbackQueryHandler(confirm_selection, filters=regex("^sel"))
|
| 120 |
+
)
|
| 121 |
+
TgClient.bot.add_handler(
|
| 122 |
+
MessageHandler(
|
| 123 |
+
remove_from_queue,
|
| 124 |
+
filters=command(BotCommands.ForceStartCommand, case_sensitive=True)
|
| 125 |
+
& CustomFilters.authorized,
|
| 126 |
+
)
|
| 127 |
+
)
|
| 128 |
+
TgClient.bot.add_handler(
|
| 129 |
+
MessageHandler(
|
| 130 |
+
count_node,
|
| 131 |
+
filters=command(BotCommands.CountCommand, case_sensitive=True)
|
| 132 |
+
& CustomFilters.authorized,
|
| 133 |
+
)
|
| 134 |
+
)
|
| 135 |
+
TgClient.bot.add_handler(
|
| 136 |
+
MessageHandler(
|
| 137 |
+
delete_file,
|
| 138 |
+
filters=command(BotCommands.DeleteCommand, case_sensitive=True)
|
| 139 |
+
& CustomFilters.authorized,
|
| 140 |
+
)
|
| 141 |
+
)
|
| 142 |
+
TgClient.bot.add_handler(
|
| 143 |
+
MessageHandler(
|
| 144 |
+
gdrive_search,
|
| 145 |
+
filters=command(BotCommands.ListCommand, case_sensitive=True)
|
| 146 |
+
& CustomFilters.authorized,
|
| 147 |
+
)
|
| 148 |
+
)
|
| 149 |
+
TgClient.bot.add_handler(
|
| 150 |
+
CallbackQueryHandler(select_type, filters=regex("^list_types"))
|
| 151 |
+
)
|
| 152 |
+
TgClient.bot.add_handler(CallbackQueryHandler(arg_usage, filters=regex("^help")))
|
| 153 |
+
TgClient.bot.add_handler(
|
| 154 |
+
MessageHandler(
|
| 155 |
+
mirror,
|
| 156 |
+
filters=command(BotCommands.MirrorCommand, case_sensitive=True)
|
| 157 |
+
& CustomFilters.authorized,
|
| 158 |
+
)
|
| 159 |
+
)
|
| 160 |
+
TgClient.bot.add_handler(
|
| 161 |
+
MessageHandler(
|
| 162 |
+
qb_mirror,
|
| 163 |
+
filters=command(BotCommands.QbMirrorCommand, case_sensitive=True)
|
| 164 |
+
& CustomFilters.authorized,
|
| 165 |
+
)
|
| 166 |
+
)
|
| 167 |
+
TgClient.bot.add_handler(
|
| 168 |
+
MessageHandler(
|
| 169 |
+
jd_mirror,
|
| 170 |
+
filters=command(BotCommands.JdMirrorCommand, case_sensitive=True)
|
| 171 |
+
& CustomFilters.authorized,
|
| 172 |
+
)
|
| 173 |
+
)
|
| 174 |
+
TgClient.bot.add_handler(
|
| 175 |
+
MessageHandler(
|
| 176 |
+
nzb_mirror,
|
| 177 |
+
filters=command(BotCommands.NzbMirrorCommand, case_sensitive=True)
|
| 178 |
+
& CustomFilters.authorized,
|
| 179 |
+
)
|
| 180 |
+
)
|
| 181 |
+
TgClient.bot.add_handler(
|
| 182 |
+
MessageHandler(
|
| 183 |
+
leech,
|
| 184 |
+
filters=command(BotCommands.LeechCommand, case_sensitive=True)
|
| 185 |
+
& CustomFilters.authorized,
|
| 186 |
+
)
|
| 187 |
+
)
|
| 188 |
+
TgClient.bot.add_handler(
|
| 189 |
+
MessageHandler(
|
| 190 |
+
qb_leech,
|
| 191 |
+
filters=command(BotCommands.QbLeechCommand, case_sensitive=True)
|
| 192 |
+
& CustomFilters.authorized,
|
| 193 |
+
)
|
| 194 |
+
)
|
| 195 |
+
TgClient.bot.add_handler(
|
| 196 |
+
MessageHandler(
|
| 197 |
+
jd_leech,
|
| 198 |
+
filters=command(BotCommands.JdLeechCommand, case_sensitive=True)
|
| 199 |
+
& CustomFilters.authorized,
|
| 200 |
+
)
|
| 201 |
+
)
|
| 202 |
+
TgClient.bot.add_handler(
|
| 203 |
+
MessageHandler(
|
| 204 |
+
nzb_leech,
|
| 205 |
+
filters=command(BotCommands.NzbLeechCommand, case_sensitive=True)
|
| 206 |
+
& CustomFilters.authorized,
|
| 207 |
+
)
|
| 208 |
+
)
|
| 209 |
+
TgClient.bot.add_handler(
|
| 210 |
+
MessageHandler(
|
| 211 |
+
uphoster,
|
| 212 |
+
filters=command(BotCommands.UpHosterCommand, case_sensitive=True)
|
| 213 |
+
& CustomFilters.authorized,
|
| 214 |
+
)
|
| 215 |
+
)
|
| 216 |
+
TgClient.bot.add_handler(
|
| 217 |
+
MessageHandler(
|
| 218 |
+
get_rss_menu,
|
| 219 |
+
filters=command(BotCommands.RssCommand, case_sensitive=True)
|
| 220 |
+
& CustomFilters.authorized,
|
| 221 |
+
)
|
| 222 |
+
)
|
| 223 |
+
TgClient.bot.add_handler(CallbackQueryHandler(rss_listener, filters=regex("^rss")))
|
| 224 |
+
TgClient.bot.add_handler(
|
| 225 |
+
MessageHandler(
|
| 226 |
+
run_shell,
|
| 227 |
+
filters=command(BotCommands.ShellCommand, case_sensitive=True)
|
| 228 |
+
& CustomFilters.sudo,
|
| 229 |
+
)
|
| 230 |
+
)
|
| 231 |
+
TgClient.bot.add_handler(
|
| 232 |
+
EditedMessageHandler(
|
| 233 |
+
run_shell,
|
| 234 |
+
filters=command(BotCommands.ShellCommand, case_sensitive=True)
|
| 235 |
+
& CustomFilters.owner,
|
| 236 |
+
)
|
| 237 |
+
)
|
| 238 |
+
TgClient.bot.add_handler(
|
| 239 |
+
MessageHandler(
|
| 240 |
+
start, filters=command(BotCommands.StartCommand, case_sensitive=True)
|
| 241 |
+
)
|
| 242 |
+
)
|
| 243 |
+
TgClient.bot.add_handler(
|
| 244 |
+
MessageHandler(
|
| 245 |
+
login, filters=command(BotCommands.LoginCommand, case_sensitive=True)
|
| 246 |
+
)
|
| 247 |
+
)
|
| 248 |
+
TgClient.bot.add_handler(
|
| 249 |
+
MessageHandler(
|
| 250 |
+
log,
|
| 251 |
+
filters=command(BotCommands.LogCommand, case_sensitive=True)
|
| 252 |
+
& CustomFilters.sudo,
|
| 253 |
+
)
|
| 254 |
+
)
|
| 255 |
+
TgClient.bot.add_handler(
|
| 256 |
+
MessageHandler(
|
| 257 |
+
restart_bot,
|
| 258 |
+
filters=command(BotCommands.RestartCommand, case_sensitive=True)
|
| 259 |
+
& CustomFilters.sudo,
|
| 260 |
+
)
|
| 261 |
+
)
|
| 262 |
+
TgClient.bot.add_handler(
|
| 263 |
+
CallbackQueryHandler(
|
| 264 |
+
confirm_restart, filters=regex("^botrestart") & CustomFilters.sudo
|
| 265 |
+
)
|
| 266 |
+
)
|
| 267 |
+
TgClient.bot.add_handler(
|
| 268 |
+
MessageHandler(
|
| 269 |
+
restart_sessions,
|
| 270 |
+
filters=command(BotCommands.RestartSessionsCommand, case_sensitive=True)
|
| 271 |
+
& CustomFilters.sudo,
|
| 272 |
+
)
|
| 273 |
+
)
|
| 274 |
+
TgClient.bot.add_handler(
|
| 275 |
+
MessageHandler(
|
| 276 |
+
imdb_search,
|
| 277 |
+
filters=command(BotCommands.IMDBCommand, case_sensitive=True)
|
| 278 |
+
& CustomFilters.authorized,
|
| 279 |
+
)
|
| 280 |
+
)
|
| 281 |
+
TgClient.bot.add_handler(
|
| 282 |
+
CallbackQueryHandler(imdb_callback, filters=regex("^imdb"))
|
| 283 |
+
)
|
| 284 |
+
TgClient.bot.add_handler(
|
| 285 |
+
MessageHandler(
|
| 286 |
+
ping,
|
| 287 |
+
filters=command(BotCommands.PingCommand, case_sensitive=True)
|
| 288 |
+
& CustomFilters.authorized,
|
| 289 |
+
)
|
| 290 |
+
)
|
| 291 |
+
TgClient.bot.add_handler(
|
| 292 |
+
MessageHandler(
|
| 293 |
+
bot_help,
|
| 294 |
+
filters=command(BotCommands.HelpCommand, case_sensitive=True)
|
| 295 |
+
& CustomFilters.authorized,
|
| 296 |
+
)
|
| 297 |
+
)
|
| 298 |
+
TgClient.bot.add_handler(
|
| 299 |
+
MessageHandler(
|
| 300 |
+
mediainfo,
|
| 301 |
+
filters=command(BotCommands.MediaInfoCommand, case_sensitive=True)
|
| 302 |
+
& CustomFilters.authorized,
|
| 303 |
+
)
|
| 304 |
+
)
|
| 305 |
+
|
| 306 |
+
TgClient.bot.add_handler(
|
| 307 |
+
MessageHandler(
|
| 308 |
+
bot_stats,
|
| 309 |
+
filters=command(BotCommands.StatsCommand, case_sensitive=True)
|
| 310 |
+
& CustomFilters.authorized,
|
| 311 |
+
)
|
| 312 |
+
)
|
| 313 |
+
TgClient.bot.add_handler(
|
| 314 |
+
MessageHandler(
|
| 315 |
+
task_status,
|
| 316 |
+
filters=command(BotCommands.StatusCommand, case_sensitive=True)
|
| 317 |
+
& CustomFilters.authorized,
|
| 318 |
+
)
|
| 319 |
+
)
|
| 320 |
+
TgClient.bot.add_handler(
|
| 321 |
+
CallbackQueryHandler(status_pages, filters=regex("^status"))
|
| 322 |
+
)
|
| 323 |
+
TgClient.bot.add_handler(CallbackQueryHandler(stats_pages, filters=regex("^stats")))
|
| 324 |
+
TgClient.bot.add_handler(CallbackQueryHandler(log_cb, filters=regex("^log")))
|
| 325 |
+
TgClient.bot.add_handler(CallbackQueryHandler(start_cb, filters=regex("^start")))
|
| 326 |
+
TgClient.bot.add_handler(
|
| 327 |
+
MessageHandler(
|
| 328 |
+
torrent_search,
|
| 329 |
+
filters=command(BotCommands.SearchCommand, case_sensitive=True)
|
| 330 |
+
& CustomFilters.authorized,
|
| 331 |
+
)
|
| 332 |
+
)
|
| 333 |
+
TgClient.bot.add_handler(
|
| 334 |
+
CallbackQueryHandler(torrent_search_update, filters=regex("^torser"))
|
| 335 |
+
)
|
| 336 |
+
TgClient.bot.add_handler(
|
| 337 |
+
MessageHandler(
|
| 338 |
+
get_users_settings,
|
| 339 |
+
filters=command(BotCommands.UsersCommand, case_sensitive=True)
|
| 340 |
+
& CustomFilters.sudo,
|
| 341 |
+
)
|
| 342 |
+
)
|
| 343 |
+
TgClient.bot.add_handler(
|
| 344 |
+
MessageHandler(
|
| 345 |
+
send_user_settings,
|
| 346 |
+
filters=command(BotCommands.UserSetCommand, case_sensitive=True)
|
| 347 |
+
& CustomFilters.authorized_uset,
|
| 348 |
+
)
|
| 349 |
+
)
|
| 350 |
+
TgClient.bot.add_handler(
|
| 351 |
+
CallbackQueryHandler(edit_user_settings, filters=regex("^userset"))
|
| 352 |
+
)
|
| 353 |
+
TgClient.bot.add_handler(
|
| 354 |
+
MessageHandler(
|
| 355 |
+
ytdl,
|
| 356 |
+
filters=command(BotCommands.YtdlCommand, case_sensitive=True)
|
| 357 |
+
& CustomFilters.authorized,
|
| 358 |
+
)
|
| 359 |
+
)
|
| 360 |
+
TgClient.bot.add_handler(
|
| 361 |
+
MessageHandler(
|
| 362 |
+
ytdl_leech,
|
| 363 |
+
filters=command(BotCommands.YtdlLeechCommand, case_sensitive=True)
|
| 364 |
+
& CustomFilters.authorized,
|
| 365 |
+
)
|
| 366 |
+
)
|
| 367 |
+
TgClient.bot.add_handler(
|
| 368 |
+
MessageHandler(
|
| 369 |
+
hydra_search,
|
| 370 |
+
filters=command(BotCommands.NzbSearchCommand, case_sensitive=True)
|
| 371 |
+
& CustomFilters.authorized,
|
| 372 |
+
)
|
| 373 |
+
)
|
| 374 |
+
if Config.SET_COMMANDS:
|
| 375 |
+
global BOT_COMMANDS
|
| 376 |
+
|
| 377 |
+
def insert_at(d, k, v, i):
|
| 378 |
+
return dict(list(d.items())[:i] + [(k, v)] + list(d.items())[i:])
|
| 379 |
+
|
| 380 |
+
if Config.JD_EMAIL and Config.JD_PASS:
|
| 381 |
+
BOT_COMMANDS = insert_at(
|
| 382 |
+
BOT_COMMANDS,
|
| 383 |
+
"JdMirror",
|
| 384 |
+
"[link/file] Mirror to Upload Destination using JDownloader",
|
| 385 |
+
2,
|
| 386 |
+
)
|
| 387 |
+
BOT_COMMANDS = insert_at(
|
| 388 |
+
BOT_COMMANDS,
|
| 389 |
+
"JdLeech",
|
| 390 |
+
"[link/file] Leech files to Upload to Telegram using JDownloader",
|
| 391 |
+
6,
|
| 392 |
+
)
|
| 393 |
+
|
| 394 |
+
if len(Config.USENET_SERVERS) != 0:
|
| 395 |
+
BOT_COMMANDS = insert_at(
|
| 396 |
+
BOT_COMMANDS,
|
| 397 |
+
"NzbMirror",
|
| 398 |
+
"[nzb] Mirror to Upload Destination using Sabnzbd",
|
| 399 |
+
2,
|
| 400 |
+
)
|
| 401 |
+
BOT_COMMANDS = insert_at(
|
| 402 |
+
BOT_COMMANDS,
|
| 403 |
+
"NzbLeech",
|
| 404 |
+
"[nzb] Leech files to Upload to Telegram using Sabnzbd",
|
| 405 |
+
6,
|
| 406 |
+
)
|
| 407 |
+
|
| 408 |
+
if Config.LOGIN_PASS:
|
| 409 |
+
BOT_COMMANDS = insert_at(
|
| 410 |
+
BOT_COMMANDS, "Login", "[password] Login to Bot", 14
|
| 411 |
+
)
|
| 412 |
+
|
| 413 |
+
TgClient.bot.set_bot_commands(
|
| 414 |
+
[
|
| 415 |
+
BotCommand(
|
| 416 |
+
cmds[0] if isinstance(cmds, list) else cmds,
|
| 417 |
+
description,
|
| 418 |
+
)
|
| 419 |
+
for cmd, description in BOT_COMMANDS.items()
|
| 420 |
+
for cmds in [getattr(BotCommands, f"{cmd}Command", None)]
|
| 421 |
+
if cmds is not None
|
| 422 |
+
]
|
| 423 |
+
)
|
bot/core/jdownloader_booter.py
ADDED
|
@@ -0,0 +1,94 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from json import dumps
|
| 2 |
+
from random import randint
|
| 3 |
+
from re import match
|
| 4 |
+
|
| 5 |
+
from aiofiles import open as aiopen
|
| 6 |
+
from aiofiles.os import listdir, makedirs, path, rename
|
| 7 |
+
from aioshutil import rmtree
|
| 8 |
+
|
| 9 |
+
from myjd import MyJdApi
|
| 10 |
+
|
| 11 |
+
from .. import LOGGER
|
| 12 |
+
from ..helper.ext_utils.bot_utils import cmd_exec, new_task
|
| 13 |
+
from .config_manager import Config
|
| 14 |
+
from .tg_client import TgClient
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
class JDownloader(MyJdApi):
|
| 18 |
+
def __init__(self):
|
| 19 |
+
super().__init__()
|
| 20 |
+
self._username = ""
|
| 21 |
+
self._password = ""
|
| 22 |
+
self._device_name = ""
|
| 23 |
+
self.is_connected = False
|
| 24 |
+
self.error = "JDownloader Credentials not provided!"
|
| 25 |
+
|
| 26 |
+
async def _write_config(self, path, data):
|
| 27 |
+
async with aiopen(path, "w") as f:
|
| 28 |
+
await f.write(dumps(data))
|
| 29 |
+
|
| 30 |
+
@new_task
|
| 31 |
+
async def boot(self):
|
| 32 |
+
await cmd_exec(["pkill", "-9", "-f", "java"])
|
| 33 |
+
if not Config.JD_EMAIL or not Config.JD_PASS:
|
| 34 |
+
self.is_connected = False
|
| 35 |
+
self.error = "JDownloader Credentials not provided!"
|
| 36 |
+
return
|
| 37 |
+
self.error = "Connecting... Try agin after couple of seconds"
|
| 38 |
+
self._device_name = f"{randint(0, 1000)}@{TgClient.BNAME}"
|
| 39 |
+
if await path.exists("/JDownloader/logs"):
|
| 40 |
+
LOGGER.info(
|
| 41 |
+
"Starting JDownloader... This might take up to 10 sec and might restart once if update available!"
|
| 42 |
+
)
|
| 43 |
+
else:
|
| 44 |
+
LOGGER.info(
|
| 45 |
+
"Starting JDownloader... This might take up to 8 sec and might restart once after build!"
|
| 46 |
+
)
|
| 47 |
+
jdata = {
|
| 48 |
+
"autoconnectenabledv2": True,
|
| 49 |
+
"password": Config.JD_PASS,
|
| 50 |
+
"devicename": f"{self._device_name}",
|
| 51 |
+
"email": Config.JD_EMAIL,
|
| 52 |
+
}
|
| 53 |
+
remote_data = {
|
| 54 |
+
"localapiserverheaderaccesscontrollalloworigin": "",
|
| 55 |
+
"deprecatedapiport": 3128,
|
| 56 |
+
"localapiserverheaderxcontenttypeoptions": "nosniff",
|
| 57 |
+
"localapiserverheaderxframeoptions": "DENY",
|
| 58 |
+
"externinterfaceenabled": True,
|
| 59 |
+
"deprecatedapilocalhostonly": True,
|
| 60 |
+
"localapiserverheaderreferrerpolicy": "no-referrer",
|
| 61 |
+
"deprecatedapienabled": True,
|
| 62 |
+
"localapiserverheadercontentsecuritypolicy": "default-src 'self'",
|
| 63 |
+
"jdanywhereapienabled": True,
|
| 64 |
+
"externinterfacelocalhostonly": False,
|
| 65 |
+
"localapiserverheaderxxssprotection": "1; mode=block",
|
| 66 |
+
}
|
| 67 |
+
await makedirs("/JDownloader/cfg", exist_ok=True)
|
| 68 |
+
await self._write_config(
|
| 69 |
+
"/JDownloader/cfg/org.jdownloader.api.myjdownloader.MyJDownloaderSettings.json",
|
| 70 |
+
jdata,
|
| 71 |
+
)
|
| 72 |
+
await self._write_config(
|
| 73 |
+
"/JDownloader/cfg/org.jdownloader.api.RemoteAPIConfig.json",
|
| 74 |
+
remote_data,
|
| 75 |
+
)
|
| 76 |
+
if not await path.exists("/JDownloader/JDownloader.jar"):
|
| 77 |
+
pattern = r"JDownloader\.jar\.backup.\d$"
|
| 78 |
+
for filename in await listdir("/JDownloader"):
|
| 79 |
+
if match(pattern, filename):
|
| 80 |
+
await rename(
|
| 81 |
+
f"/JDownloader/{filename}", "/JDownloader/JDownloader.jar"
|
| 82 |
+
)
|
| 83 |
+
break
|
| 84 |
+
await rmtree("/JDownloader/update")
|
| 85 |
+
await rmtree("/JDownloader/tmp")
|
| 86 |
+
cmd = "cpulimit -l 20 -- java -Xms256m -Xmx500m -Dsun.jnu.encoding=UTF-8 -Dfile.encoding=UTF-8 -Djava.awt.headless=true -jar /JDownloader/JDownloader.jar"
|
| 87 |
+
self.is_connected = True
|
| 88 |
+
_, __, code = await cmd_exec(cmd, shell=True)
|
| 89 |
+
self.is_connected = False
|
| 90 |
+
if code != -9:
|
| 91 |
+
await self.boot()
|
| 92 |
+
|
| 93 |
+
|
| 94 |
+
jdownloader = JDownloader()
|
bot/core/plugin_manager.py
ADDED
|
@@ -0,0 +1,260 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import importlib
|
| 2 |
+
import importlib.util
|
| 3 |
+
import sys
|
| 4 |
+
from dataclasses import dataclass, field
|
| 5 |
+
from pathlib import Path
|
| 6 |
+
from typing import Any, Dict, List, Optional
|
| 7 |
+
|
| 8 |
+
from pyrogram import Client
|
| 9 |
+
from pyrogram.handlers import CallbackQueryHandler, MessageHandler
|
| 10 |
+
|
| 11 |
+
from .. import LOGGER
|
| 12 |
+
from ..helper.telegram_helper.filters import CustomFilters
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
@dataclass
|
| 16 |
+
class PluginInfo:
|
| 17 |
+
name: str
|
| 18 |
+
version: str
|
| 19 |
+
author: str
|
| 20 |
+
description: str
|
| 21 |
+
enabled: bool = True
|
| 22 |
+
handlers: List[Any] = field(default_factory=list)
|
| 23 |
+
commands: List[str] = field(default_factory=list)
|
| 24 |
+
dependencies: List[str] = field(default_factory=list)
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
class PluginBase:
|
| 28 |
+
PLUGIN_INFO: PluginInfo
|
| 29 |
+
|
| 30 |
+
async def on_load(self) -> bool:
|
| 31 |
+
return True
|
| 32 |
+
|
| 33 |
+
async def on_unload(self) -> bool:
|
| 34 |
+
return True
|
| 35 |
+
|
| 36 |
+
async def on_enable(self) -> bool:
|
| 37 |
+
return True
|
| 38 |
+
|
| 39 |
+
async def on_disable(self) -> bool:
|
| 40 |
+
return True
|
| 41 |
+
|
| 42 |
+
def register_command(self, command: str, handler_func, filters=None):
|
| 43 |
+
if filters is None:
|
| 44 |
+
filters = CustomFilters.authorized
|
| 45 |
+
return MessageHandler(handler_func, filters=command & filters)
|
| 46 |
+
|
| 47 |
+
def register_callback(self, pattern: str, callback_func, filters=None):
|
| 48 |
+
if filters is None:
|
| 49 |
+
filters = CustomFilters.authorized
|
| 50 |
+
return CallbackQueryHandler(callback_func, filters=pattern & filters)
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
class PluginManager:
|
| 54 |
+
def __init__(self, bot: Client):
|
| 55 |
+
self.bot = bot
|
| 56 |
+
self.plugins: Dict[str, PluginInfo] = {}
|
| 57 |
+
self.loaded_modules: Dict[str, Any] = {}
|
| 58 |
+
self.plugins_dir = Path("plugins")
|
| 59 |
+
self.plugins_dir.mkdir(exist_ok=True)
|
| 60 |
+
|
| 61 |
+
def discover_plugins(self) -> List[str]:
|
| 62 |
+
plugin_files = []
|
| 63 |
+
for file in self.plugins_dir.glob("*.py"):
|
| 64 |
+
if file.name.startswith("__"):
|
| 65 |
+
continue
|
| 66 |
+
plugin_files.append(file.stem)
|
| 67 |
+
return plugin_files
|
| 68 |
+
|
| 69 |
+
def _refresh_commands(self):
|
| 70 |
+
try:
|
| 71 |
+
from ..helper.telegram_helper.bot_commands import BotCommands
|
| 72 |
+
from ..helper.ext_utils.help_messages import (
|
| 73 |
+
get_bot_commands,
|
| 74 |
+
get_help_string,
|
| 75 |
+
)
|
| 76 |
+
import importlib
|
| 77 |
+
|
| 78 |
+
BotCommands.refresh_commands()
|
| 79 |
+
|
| 80 |
+
importlib.reload(sys.modules["bot.helper.ext_utils.help_messages"])
|
| 81 |
+
from ..helper.ext_utils.help_messages import BOT_COMMANDS, help_string
|
| 82 |
+
|
| 83 |
+
globals()["BOT_COMMANDS"] = get_bot_commands()
|
| 84 |
+
globals()["help_string"] = get_help_string()
|
| 85 |
+
|
| 86 |
+
LOGGER.info("Bot commands and help refreshed")
|
| 87 |
+
except Exception as e:
|
| 88 |
+
LOGGER.error(f"Error refreshing commands: {e}", exc_info=True)
|
| 89 |
+
|
| 90 |
+
async def load_plugin(self, plugin_name: str) -> bool:
|
| 91 |
+
try:
|
| 92 |
+
if plugin_name in self.loaded_modules:
|
| 93 |
+
LOGGER.warning(f"Plugin {plugin_name} already loaded")
|
| 94 |
+
return False
|
| 95 |
+
|
| 96 |
+
plugin_path = self.plugins_dir / f"{plugin_name}.py"
|
| 97 |
+
if not plugin_path.exists():
|
| 98 |
+
LOGGER.error(f"Plugin file {plugin_name}.py not found")
|
| 99 |
+
return False
|
| 100 |
+
|
| 101 |
+
spec = importlib.util.spec_from_file_location(plugin_name, plugin_path)
|
| 102 |
+
module = importlib.util.module_from_spec(spec)
|
| 103 |
+
sys.modules[plugin_name] = module
|
| 104 |
+
spec.loader.exec_module(module)
|
| 105 |
+
|
| 106 |
+
plugin_class = None
|
| 107 |
+
for attr_name in dir(module):
|
| 108 |
+
attr = getattr(module, attr_name)
|
| 109 |
+
if (
|
| 110 |
+
isinstance(attr, type)
|
| 111 |
+
and issubclass(attr, PluginBase)
|
| 112 |
+
and attr != PluginBase
|
| 113 |
+
):
|
| 114 |
+
plugin_class = attr
|
| 115 |
+
break
|
| 116 |
+
|
| 117 |
+
if not plugin_class:
|
| 118 |
+
LOGGER.error(f"No valid plugin class found in {plugin_name}")
|
| 119 |
+
return False
|
| 120 |
+
|
| 121 |
+
plugin_instance = plugin_class()
|
| 122 |
+
if not hasattr(plugin_instance, "PLUGIN_INFO"):
|
| 123 |
+
LOGGER.error(f"Plugin {plugin_name} missing PLUGIN_INFO")
|
| 124 |
+
return False
|
| 125 |
+
|
| 126 |
+
plugin_info = plugin_instance.PLUGIN_INFO
|
| 127 |
+
if await plugin_instance.on_load():
|
| 128 |
+
self.plugins[plugin_name] = plugin_info
|
| 129 |
+
self.loaded_modules[plugin_name] = plugin_instance
|
| 130 |
+
self._register_handlers(plugin_instance, plugin_info)
|
| 131 |
+
self._refresh_commands()
|
| 132 |
+
LOGGER.info(f"Plugin {plugin_name} loaded successfully")
|
| 133 |
+
return True
|
| 134 |
+
else:
|
| 135 |
+
LOGGER.error(f"Plugin {plugin_name} failed to load")
|
| 136 |
+
return False
|
| 137 |
+
|
| 138 |
+
except Exception as e:
|
| 139 |
+
LOGGER.error(f"Error loading plugin {plugin_name}: {e}", exc_info=True)
|
| 140 |
+
return False
|
| 141 |
+
|
| 142 |
+
async def unload_plugin(self, plugin_name: str) -> bool:
|
| 143 |
+
try:
|
| 144 |
+
if plugin_name not in self.loaded_modules:
|
| 145 |
+
LOGGER.error(f"Plugin {plugin_name} not loaded")
|
| 146 |
+
return False
|
| 147 |
+
|
| 148 |
+
plugin_instance = self.loaded_modules[plugin_name]
|
| 149 |
+
plugin_info = self.plugins[plugin_name]
|
| 150 |
+
|
| 151 |
+
if await plugin_instance.on_unload():
|
| 152 |
+
self._unregister_handlers(plugin_info)
|
| 153 |
+
del self.loaded_modules[plugin_name]
|
| 154 |
+
del self.plugins[plugin_name]
|
| 155 |
+
if plugin_name in sys.modules:
|
| 156 |
+
del sys.modules[plugin_name]
|
| 157 |
+
self._refresh_commands()
|
| 158 |
+
LOGGER.info(f"Plugin {plugin_name} unloaded successfully")
|
| 159 |
+
return True
|
| 160 |
+
else:
|
| 161 |
+
LOGGER.error(f"Plugin {plugin_name} failed to unload")
|
| 162 |
+
return False
|
| 163 |
+
|
| 164 |
+
except Exception as e:
|
| 165 |
+
LOGGER.error(f"Error unloading plugin {plugin_name}: {e}", exc_info=True)
|
| 166 |
+
return False
|
| 167 |
+
|
| 168 |
+
async def reload_plugin(self, plugin_name: str) -> bool:
|
| 169 |
+
try:
|
| 170 |
+
await self.unload_plugin(plugin_name)
|
| 171 |
+
return await self.load_plugin(plugin_name)
|
| 172 |
+
except Exception as e:
|
| 173 |
+
LOGGER.error(f"Error reloading plugin {plugin_name}: {e}", exc_info=True)
|
| 174 |
+
return False
|
| 175 |
+
|
| 176 |
+
async def enable_plugin(self, plugin_name: str) -> bool:
|
| 177 |
+
try:
|
| 178 |
+
if plugin_name not in self.plugins:
|
| 179 |
+
LOGGER.error(f"Plugin {plugin_name} not found")
|
| 180 |
+
return False
|
| 181 |
+
|
| 182 |
+
plugin_instance = self.loaded_modules[plugin_name]
|
| 183 |
+
if await plugin_instance.on_enable():
|
| 184 |
+
self.plugins[plugin_name].enabled = True
|
| 185 |
+
self._refresh_commands()
|
| 186 |
+
LOGGER.info(f"Plugin {plugin_name} enabled")
|
| 187 |
+
return True
|
| 188 |
+
else:
|
| 189 |
+
LOGGER.error(f"Plugin {plugin_name} failed to enable")
|
| 190 |
+
return False
|
| 191 |
+
|
| 192 |
+
except Exception as e:
|
| 193 |
+
LOGGER.error(f"Error enabling plugin {plugin_name}: {e}", exc_info=True)
|
| 194 |
+
return False
|
| 195 |
+
|
| 196 |
+
async def disable_plugin(self, plugin_name: str) -> bool:
|
| 197 |
+
try:
|
| 198 |
+
if plugin_name not in self.plugins:
|
| 199 |
+
LOGGER.error(f"Plugin {plugin_name} not found")
|
| 200 |
+
return False
|
| 201 |
+
|
| 202 |
+
plugin_instance = self.loaded_modules[plugin_name]
|
| 203 |
+
if await plugin_instance.on_disable():
|
| 204 |
+
self.plugins[plugin_name].enabled = False
|
| 205 |
+
self._refresh_commands()
|
| 206 |
+
LOGGER.info(f"Plugin {plugin_name} disabled")
|
| 207 |
+
return True
|
| 208 |
+
else:
|
| 209 |
+
LOGGER.error(f"Plugin {plugin_name} failed to disable")
|
| 210 |
+
return False
|
| 211 |
+
|
| 212 |
+
except Exception as e:
|
| 213 |
+
LOGGER.error(f"Error disabling plugin {plugin_name}: {e}", exc_info=True)
|
| 214 |
+
return False
|
| 215 |
+
|
| 216 |
+
def list_plugins(self) -> List[PluginInfo]:
|
| 217 |
+
return list(self.plugins.values())
|
| 218 |
+
|
| 219 |
+
def get_plugin_info(self, plugin_name: str) -> Optional[PluginInfo]:
|
| 220 |
+
return self.plugins.get(plugin_name)
|
| 221 |
+
|
| 222 |
+
def _register_handlers(self, plugin_instance: PluginBase, plugin_info: PluginInfo):
|
| 223 |
+
from ..helper.telegram_helper.filters import CustomFilters
|
| 224 |
+
from pyrogram.filters import command
|
| 225 |
+
from pyrogram.handlers import MessageHandler
|
| 226 |
+
|
| 227 |
+
for handler in plugin_info.handlers:
|
| 228 |
+
self.bot.add_handler(handler)
|
| 229 |
+
|
| 230 |
+
module = sys.modules.get(plugin_info.name)
|
| 231 |
+
if module:
|
| 232 |
+
for attr_name in dir(module):
|
| 233 |
+
attr = getattr(module, attr_name)
|
| 234 |
+
if callable(attr) and attr_name.endswith("_command"):
|
| 235 |
+
cmd_name = attr_name.replace("_command", "")
|
| 236 |
+
if cmd_name in plugin_info.commands:
|
| 237 |
+
handler = MessageHandler(
|
| 238 |
+
attr,
|
| 239 |
+
filters=command(cmd_name, case_sensitive=True)
|
| 240 |
+
& CustomFilters.authorized,
|
| 241 |
+
)
|
| 242 |
+
plugin_info.handlers.append(handler)
|
| 243 |
+
self.bot.add_handler(handler)
|
| 244 |
+
LOGGER.info(
|
| 245 |
+
f"Registered command /{cmd_name} for plugin {plugin_info.name}"
|
| 246 |
+
)
|
| 247 |
+
|
| 248 |
+
def _unregister_handlers(self, plugin_info: PluginInfo):
|
| 249 |
+
for handler in plugin_info.handlers:
|
| 250 |
+
try:
|
| 251 |
+
self.bot.remove_handler(handler)
|
| 252 |
+
except Exception as e:
|
| 253 |
+
LOGGER.warning(f"Error removing handler: {e}")
|
| 254 |
+
|
| 255 |
+
|
| 256 |
+
plugin_manager = PluginManager(None)
|
| 257 |
+
|
| 258 |
+
|
| 259 |
+
def get_plugin_manager() -> PluginManager:
|
| 260 |
+
return plugin_manager
|
bot/core/startup.py
ADDED
|
@@ -0,0 +1,349 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from asyncio import create_subprocess_exec, create_subprocess_shell, sleep
|
| 2 |
+
from importlib import import_module
|
| 3 |
+
from os import environ, getenv, path as ospath
|
| 4 |
+
|
| 5 |
+
from aiofiles import open as aiopen
|
| 6 |
+
from aiofiles.os import makedirs, remove, path as aiopath
|
| 7 |
+
from aioshutil import rmtree
|
| 8 |
+
|
| 9 |
+
from sabnzbdapi.exception import APIResponseError
|
| 10 |
+
|
| 11 |
+
from .. import (
|
| 12 |
+
LOGGER,
|
| 13 |
+
aria2_options,
|
| 14 |
+
auth_chats,
|
| 15 |
+
drives_ids,
|
| 16 |
+
drives_names,
|
| 17 |
+
index_urls,
|
| 18 |
+
shortener_dict,
|
| 19 |
+
var_list,
|
| 20 |
+
user_data,
|
| 21 |
+
excluded_extensions,
|
| 22 |
+
nzb_options,
|
| 23 |
+
qbit_options,
|
| 24 |
+
rss_dict,
|
| 25 |
+
sabnzbd_client,
|
| 26 |
+
sudo_users,
|
| 27 |
+
)
|
| 28 |
+
from ..helper.ext_utils.db_handler import database
|
| 29 |
+
from .config_manager import Config, BinConfig
|
| 30 |
+
from .tg_client import TgClient
|
| 31 |
+
from .torrent_manager import TorrentManager
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
async def update_qb_options():
|
| 35 |
+
LOGGER.info("Get qBittorrent options from server")
|
| 36 |
+
if not qbit_options:
|
| 37 |
+
if not TorrentManager.qbittorrent:
|
| 38 |
+
LOGGER.warning(
|
| 39 |
+
"qBittorrent is not initialized. Skipping qBittorrent options update."
|
| 40 |
+
)
|
| 41 |
+
return
|
| 42 |
+
opt = await TorrentManager.qbittorrent.app.preferences()
|
| 43 |
+
qbit_options.update(opt)
|
| 44 |
+
del qbit_options["listen_port"]
|
| 45 |
+
for k in list(qbit_options.keys()):
|
| 46 |
+
if k.startswith("rss"):
|
| 47 |
+
del qbit_options[k]
|
| 48 |
+
qbit_options["web_ui_password"] = "admin"
|
| 49 |
+
await TorrentManager.qbittorrent.app.set_preferences(
|
| 50 |
+
{"web_ui_password": "admin"}
|
| 51 |
+
)
|
| 52 |
+
else:
|
| 53 |
+
await TorrentManager.qbittorrent.app.set_preferences(qbit_options)
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
async def update_aria2_options():
|
| 57 |
+
LOGGER.info("Get aria2 options from server")
|
| 58 |
+
if not aria2_options:
|
| 59 |
+
op = await TorrentManager.aria2.getGlobalOption()
|
| 60 |
+
aria2_options.update(op)
|
| 61 |
+
else:
|
| 62 |
+
await TorrentManager.aria2.changeGlobalOption(aria2_options)
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
async def update_nzb_options():
|
| 66 |
+
if Config.USENET_SERVERS:
|
| 67 |
+
LOGGER.info("Get SABnzbd options from server")
|
| 68 |
+
while True:
|
| 69 |
+
try:
|
| 70 |
+
no = (await sabnzbd_client.get_config())["config"]["misc"]
|
| 71 |
+
nzb_options.update(no)
|
| 72 |
+
except Exception:
|
| 73 |
+
await sleep(0.5)
|
| 74 |
+
continue
|
| 75 |
+
break
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
async def load_settings():
|
| 79 |
+
if not Config.DATABASE_URL:
|
| 80 |
+
return
|
| 81 |
+
for p in ["thumbnails", "tokens", "rclone"]:
|
| 82 |
+
if await aiopath.exists(p):
|
| 83 |
+
await rmtree(p, ignore_errors=True)
|
| 84 |
+
await database.connect()
|
| 85 |
+
if database.db is not None:
|
| 86 |
+
BOT_ID = Config.BOT_TOKEN.split(":", 1)[0]
|
| 87 |
+
try:
|
| 88 |
+
settings = import_module("config")
|
| 89 |
+
config_file = {
|
| 90 |
+
key: value.strip() if isinstance(value, str) else value
|
| 91 |
+
for key, value in vars(settings).items()
|
| 92 |
+
if not key.startswith("__")
|
| 93 |
+
}
|
| 94 |
+
except ModuleNotFoundError:
|
| 95 |
+
config_file = {}
|
| 96 |
+
config_file.update(
|
| 97 |
+
{
|
| 98 |
+
key: value.strip() if isinstance(value, str) else value
|
| 99 |
+
for key, value in environ.items()
|
| 100 |
+
if key in var_list
|
| 101 |
+
}
|
| 102 |
+
)
|
| 103 |
+
|
| 104 |
+
old_config = await database.db.settings.deployConfig.find_one(
|
| 105 |
+
{"_id": BOT_ID}, {"_id": 0}
|
| 106 |
+
)
|
| 107 |
+
if old_config is None:
|
| 108 |
+
await database.db.settings.deployConfig.replace_one(
|
| 109 |
+
{"_id": BOT_ID}, config_file, upsert=True
|
| 110 |
+
)
|
| 111 |
+
if old_config and old_config != config_file:
|
| 112 |
+
LOGGER.info("Saving.. Deploy Config imported from Bot")
|
| 113 |
+
await database.db.settings.deployConfig.replace_one(
|
| 114 |
+
{"_id": BOT_ID}, config_file, upsert=True
|
| 115 |
+
)
|
| 116 |
+
config_dict = (
|
| 117 |
+
await database.db.settings.config.find_one({"_id": BOT_ID}, {"_id": 0})
|
| 118 |
+
or {}
|
| 119 |
+
)
|
| 120 |
+
config_dict.update(config_file)
|
| 121 |
+
if config_dict:
|
| 122 |
+
Config.load_dict(config_dict)
|
| 123 |
+
else:
|
| 124 |
+
LOGGER.info("Updating.. Saved Config imported from MongoDB")
|
| 125 |
+
config_dict = await database.db.settings.config.find_one(
|
| 126 |
+
{"_id": BOT_ID}, {"_id": 0}
|
| 127 |
+
)
|
| 128 |
+
if config_dict:
|
| 129 |
+
Config.load_dict(config_dict)
|
| 130 |
+
|
| 131 |
+
if pf_dict := await database.db.settings.files.find_one(
|
| 132 |
+
{"_id": BOT_ID}, {"_id": 0}
|
| 133 |
+
):
|
| 134 |
+
for key, value in pf_dict.items():
|
| 135 |
+
if value:
|
| 136 |
+
file_ = key.replace("__", ".")
|
| 137 |
+
async with aiopen(file_, "wb+") as f:
|
| 138 |
+
await f.write(value)
|
| 139 |
+
|
| 140 |
+
if a2c_options := await database.db.settings.aria2c.find_one(
|
| 141 |
+
{"_id": BOT_ID}, {"_id": 0}
|
| 142 |
+
):
|
| 143 |
+
aria2_options.update(a2c_options)
|
| 144 |
+
|
| 145 |
+
if not Config.DISABLE_TORRENTS:
|
| 146 |
+
if qbit_opt := await database.db.settings.qbittorrent.find_one(
|
| 147 |
+
{"_id": BOT_ID}, {"_id": 0}
|
| 148 |
+
):
|
| 149 |
+
qbit_options.update(qbit_opt)
|
| 150 |
+
|
| 151 |
+
if nzb_opt := await database.db.settings.nzb.find_one(
|
| 152 |
+
{"_id": BOT_ID}, {"_id": 0}
|
| 153 |
+
):
|
| 154 |
+
if await aiopath.exists("sabnzbd/SABnzbd.ini.bak"):
|
| 155 |
+
await remove("sabnzbd/SABnzbd.ini.bak")
|
| 156 |
+
((key, value),) = nzb_opt.items()
|
| 157 |
+
file_ = key.replace("__", ".")
|
| 158 |
+
async with aiopen(f"sabnzbd/{file_}", "wb+") as f:
|
| 159 |
+
await f.write(value)
|
| 160 |
+
LOGGER.info("Loaded.. Sabnzbd Data from MongoDB")
|
| 161 |
+
|
| 162 |
+
if await database.db.users[BOT_ID].find_one():
|
| 163 |
+
rows = database.db.users[BOT_ID].find({})
|
| 164 |
+
async for row in rows:
|
| 165 |
+
uid = row["_id"]
|
| 166 |
+
del row["_id"]
|
| 167 |
+
paths = {
|
| 168 |
+
"THUMBNAIL": f"thumbnails/{uid}.jpg",
|
| 169 |
+
"RCLONE_CONFIG": f"rclone/{uid}.conf",
|
| 170 |
+
"TOKEN_PICKLE": f"tokens/{uid}.pickle",
|
| 171 |
+
"USER_COOKIE_FILE": f"cookies/{uid}/cookies.txt",
|
| 172 |
+
}
|
| 173 |
+
|
| 174 |
+
async def save_file(file_path, content):
|
| 175 |
+
dir_path = ospath.dirname(file_path)
|
| 176 |
+
if not await aiopath.exists(dir_path):
|
| 177 |
+
await makedirs(dir_path)
|
| 178 |
+
if file_path.startswith("cookies/") and file_path.endswith(".txt"):
|
| 179 |
+
async with aiopen(file_path, "wb") as f:
|
| 180 |
+
if isinstance(content, str):
|
| 181 |
+
content = content.encode("utf-8")
|
| 182 |
+
await f.write(content)
|
| 183 |
+
else:
|
| 184 |
+
async with aiopen(file_path, "wb+") as f:
|
| 185 |
+
if isinstance(content, str):
|
| 186 |
+
content = content.encode("utf-8")
|
| 187 |
+
await f.write(content)
|
| 188 |
+
|
| 189 |
+
for key, path in paths.items():
|
| 190 |
+
if row.get(key):
|
| 191 |
+
await save_file(path, row[key])
|
| 192 |
+
row[key] = path
|
| 193 |
+
user_data[uid] = row
|
| 194 |
+
LOGGER.info("Users Data has been imported from MongoDB")
|
| 195 |
+
|
| 196 |
+
if await database.db.rss[BOT_ID].find_one():
|
| 197 |
+
rows = database.db.rss[BOT_ID].find({})
|
| 198 |
+
async for row in rows:
|
| 199 |
+
user_id = row["_id"]
|
| 200 |
+
del row["_id"]
|
| 201 |
+
rss_dict[user_id] = row
|
| 202 |
+
LOGGER.info("RSS data has been imported from MongoDB")
|
| 203 |
+
|
| 204 |
+
|
| 205 |
+
async def save_settings():
|
| 206 |
+
if database.db is None:
|
| 207 |
+
return
|
| 208 |
+
config_file = Config.get_all()
|
| 209 |
+
await database.db.settings.config.update_one(
|
| 210 |
+
{"_id": TgClient.ID}, {"$set": config_file}, upsert=True
|
| 211 |
+
)
|
| 212 |
+
if await database.db.settings.aria2c.find_one({"_id": TgClient.ID}) is None:
|
| 213 |
+
await database.db.settings.aria2c.update_one(
|
| 214 |
+
{"_id": TgClient.ID}, {"$set": aria2_options}, upsert=True
|
| 215 |
+
)
|
| 216 |
+
if await database.db.settings.qbittorrent.find_one({"_id": TgClient.ID}) is None:
|
| 217 |
+
await database.save_qbit_settings()
|
| 218 |
+
if await database.db.settings.nzb.find_one({"_id": TgClient.ID}) is None:
|
| 219 |
+
async with aiopen("sabnzbd/SABnzbd.ini", "rb+") as pf:
|
| 220 |
+
nzb_conf = await pf.read()
|
| 221 |
+
await database.db.settings.nzb.update_one(
|
| 222 |
+
{"_id": TgClient.ID}, {"$set": {"SABnzbd__ini": nzb_conf}}, upsert=True
|
| 223 |
+
)
|
| 224 |
+
|
| 225 |
+
|
| 226 |
+
async def update_variables():
|
| 227 |
+
if (
|
| 228 |
+
Config.LEECH_SPLIT_SIZE > TgClient.MAX_SPLIT_SIZE
|
| 229 |
+
or Config.LEECH_SPLIT_SIZE == 2097152000
|
| 230 |
+
or not Config.LEECH_SPLIT_SIZE
|
| 231 |
+
):
|
| 232 |
+
Config.LEECH_SPLIT_SIZE = TgClient.MAX_SPLIT_SIZE
|
| 233 |
+
|
| 234 |
+
Config.HYBRID_LEECH = bool(Config.HYBRID_LEECH and TgClient.IS_PREMIUM_USER)
|
| 235 |
+
Config.USER_TRANSMISSION = bool(
|
| 236 |
+
Config.USER_TRANSMISSION and TgClient.IS_PREMIUM_USER
|
| 237 |
+
)
|
| 238 |
+
|
| 239 |
+
if Config.AUTHORIZED_CHATS:
|
| 240 |
+
aid = Config.AUTHORIZED_CHATS.split()
|
| 241 |
+
for id_ in aid:
|
| 242 |
+
chat_id, *thread_ids = id_.split("|")
|
| 243 |
+
chat_id = int(chat_id.strip())
|
| 244 |
+
if thread_ids:
|
| 245 |
+
thread_ids = list(map(lambda x: int(x.strip()), thread_ids))
|
| 246 |
+
auth_chats[chat_id] = thread_ids
|
| 247 |
+
else:
|
| 248 |
+
auth_chats[chat_id] = []
|
| 249 |
+
|
| 250 |
+
if Config.SUDO_USERS:
|
| 251 |
+
aid = Config.SUDO_USERS.split()
|
| 252 |
+
for id_ in aid:
|
| 253 |
+
sudo_users.append(int(id_.strip()))
|
| 254 |
+
|
| 255 |
+
if Config.EXCLUDED_EXTENSIONS:
|
| 256 |
+
fx = Config.EXCLUDED_EXTENSIONS.split()
|
| 257 |
+
for x in fx:
|
| 258 |
+
x = x.lstrip(".")
|
| 259 |
+
excluded_extensions.append(x.strip().lower())
|
| 260 |
+
|
| 261 |
+
if Config.GDRIVE_ID:
|
| 262 |
+
drives_names.append("Main")
|
| 263 |
+
drives_ids.append(Config.GDRIVE_ID)
|
| 264 |
+
index_urls.append(Config.INDEX_URL)
|
| 265 |
+
|
| 266 |
+
if not Config.IMDB_TEMPLATE:
|
| 267 |
+
Config.IMDB_TEMPLATE = """
|
| 268 |
+
<b>Title: </b> {title} [{year}]
|
| 269 |
+
<b>Also Known As:</b> {aka}
|
| 270 |
+
<b>Rating ⭐️:</b> <i>{rating}</i>
|
| 271 |
+
<b>Release Info: </b> <a href="{url_releaseinfo}">{release_date}</a>
|
| 272 |
+
<b>Genre: </b>{genres}
|
| 273 |
+
<b>IMDb URL:</b> {url}
|
| 274 |
+
<b>Language: </b>{languages}
|
| 275 |
+
<b>Country of Origin : </b> {countries}
|
| 276 |
+
|
| 277 |
+
<b>Story Line: </b><code>{plot}</code>
|
| 278 |
+
|
| 279 |
+
<a href="{url_cast}">Read More ...</a>"""
|
| 280 |
+
|
| 281 |
+
if await aiopath.exists("list_drives.txt"):
|
| 282 |
+
async with aiopen("list_drives.txt", "r+") as f:
|
| 283 |
+
lines = await f.readlines()
|
| 284 |
+
for line in lines:
|
| 285 |
+
temp = line.split()
|
| 286 |
+
drives_ids.append(temp[1])
|
| 287 |
+
drives_names.append(temp[0].replace("_", " "))
|
| 288 |
+
if len(temp) > 2:
|
| 289 |
+
index_urls.append(temp[2])
|
| 290 |
+
else:
|
| 291 |
+
index_urls.append("")
|
| 292 |
+
|
| 293 |
+
if await aiopath.exists("shortener.txt"):
|
| 294 |
+
async with aiopen("shortener.txt", "r+") as f:
|
| 295 |
+
lines = await f.readlines()
|
| 296 |
+
for line in lines:
|
| 297 |
+
temp = line.strip().split()
|
| 298 |
+
if len(temp) == 2:
|
| 299 |
+
shortener_dict[temp[0]] = temp[1]
|
| 300 |
+
|
| 301 |
+
|
| 302 |
+
async def load_configurations():
|
| 303 |
+
if not await aiopath.exists(".netrc"):
|
| 304 |
+
async with aiopen(".netrc", "w"):
|
| 305 |
+
pass
|
| 306 |
+
|
| 307 |
+
await (
|
| 308 |
+
await create_subprocess_shell(
|
| 309 |
+
f"chmod 600 .netrc && cp .netrc /root/.netrc && chmod +x setpkgs.sh && ./setpkgs.sh {BinConfig.ARIA2_NAME} {BinConfig.SABNZBD_NAME}"
|
| 310 |
+
)
|
| 311 |
+
).wait()
|
| 312 |
+
|
| 313 |
+
PORT = getenv("PORT", "") or Config.BASE_URL_PORT
|
| 314 |
+
if PORT:
|
| 315 |
+
await create_subprocess_shell(
|
| 316 |
+
f"gunicorn -k uvicorn.workers.UvicornWorker -w 1 web.wserver:app --bind 0.0.0.0:{PORT}"
|
| 317 |
+
)
|
| 318 |
+
await create_subprocess_shell("python3 cron_boot.py")
|
| 319 |
+
|
| 320 |
+
if await aiopath.exists("cfg.zip"):
|
| 321 |
+
if await aiopath.exists("/JDownloader/cfg"):
|
| 322 |
+
await rmtree("/JDownloader/cfg", ignore_errors=True)
|
| 323 |
+
await (
|
| 324 |
+
await create_subprocess_exec("7z", "x", "cfg.zip", "-o/JDownloader")
|
| 325 |
+
).wait()
|
| 326 |
+
|
| 327 |
+
if await aiopath.exists("accounts.zip"):
|
| 328 |
+
if await aiopath.exists("accounts"):
|
| 329 |
+
await rmtree("accounts")
|
| 330 |
+
await (
|
| 331 |
+
await create_subprocess_exec(
|
| 332 |
+
"7z", "x", "-o.", "-aoa", "accounts.zip", "accounts/*.json"
|
| 333 |
+
)
|
| 334 |
+
).wait()
|
| 335 |
+
await (await create_subprocess_exec("chmod", "-R", "777", "accounts")).wait()
|
| 336 |
+
await remove("accounts.zip")
|
| 337 |
+
|
| 338 |
+
if not await aiopath.exists("accounts"):
|
| 339 |
+
Config.USE_SERVICE_ACCOUNTS = False
|
| 340 |
+
|
| 341 |
+
await TorrentManager.initiate()
|
| 342 |
+
|
| 343 |
+
if Config.DISABLE_TORRENTS:
|
| 344 |
+
LOGGER.info("Torrents are disabled. Skipping qBittorrent initialization.")
|
| 345 |
+
else:
|
| 346 |
+
try:
|
| 347 |
+
await TorrentManager.qbittorrent.app.set_preferences(qbit_options)
|
| 348 |
+
except Exception as e:
|
| 349 |
+
LOGGER.error(f"Failed to configure qBittorrent: {e}")
|
bot/core/tg_client.py
ADDED
|
@@ -0,0 +1,123 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from pyrogram import Client, enums
|
| 2 |
+
from asyncio import Lock, gather
|
| 3 |
+
from inspect import signature
|
| 4 |
+
|
| 5 |
+
from .. import LOGGER
|
| 6 |
+
from .config_manager import Config
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
class TgClient:
|
| 10 |
+
_lock = Lock()
|
| 11 |
+
_hlock = Lock()
|
| 12 |
+
|
| 13 |
+
bot = None
|
| 14 |
+
user = None
|
| 15 |
+
helper_bots = {}
|
| 16 |
+
helper_loads = {}
|
| 17 |
+
|
| 18 |
+
BNAME = ""
|
| 19 |
+
ID = 0
|
| 20 |
+
IS_PREMIUM_USER = False
|
| 21 |
+
MAX_SPLIT_SIZE = 2097152000
|
| 22 |
+
|
| 23 |
+
@classmethod
|
| 24 |
+
def wztgClient(cls, *args, **kwargs):
|
| 25 |
+
kwargs["api_id"] = Config.TELEGRAM_API
|
| 26 |
+
kwargs["api_hash"] = Config.TELEGRAM_HASH
|
| 27 |
+
kwargs["proxy"] = Config.TG_PROXY
|
| 28 |
+
kwargs["parse_mode"] = enums.ParseMode.HTML
|
| 29 |
+
kwargs["in_memory"] = True
|
| 30 |
+
for param, value in {
|
| 31 |
+
"max_concurrent_transmissions": 100,
|
| 32 |
+
"skip_updates": False,
|
| 33 |
+
}.items():
|
| 34 |
+
if param in signature(Client.__init__).parameters:
|
| 35 |
+
kwargs[param] = value
|
| 36 |
+
return Client(*args, **kwargs)
|
| 37 |
+
|
| 38 |
+
@classmethod
|
| 39 |
+
async def start_hclient(cls, no, b_token):
|
| 40 |
+
try:
|
| 41 |
+
hbot = await cls.wztgClient(
|
| 42 |
+
f"WZ-HBot{no}",
|
| 43 |
+
bot_token=b_token,
|
| 44 |
+
no_updates=True,
|
| 45 |
+
).start()
|
| 46 |
+
LOGGER.info(f"Helper Bot [@{hbot.me.username}] Started!")
|
| 47 |
+
cls.helper_bots[no], cls.helper_loads[no] = hbot, 0
|
| 48 |
+
except Exception as e:
|
| 49 |
+
LOGGER.error(f"Failed to start helper bot {no} from HELPER_TOKENS. {e}")
|
| 50 |
+
cls.helper_bots.pop(no, None)
|
| 51 |
+
|
| 52 |
+
@classmethod
|
| 53 |
+
async def start_helper_bots(cls):
|
| 54 |
+
if not Config.HELPER_TOKENS:
|
| 55 |
+
return
|
| 56 |
+
LOGGER.info("Generating helper client from HELPER_TOKENS")
|
| 57 |
+
async with cls._hlock:
|
| 58 |
+
await gather(
|
| 59 |
+
*(
|
| 60 |
+
cls.start_hclient(no, b_token)
|
| 61 |
+
for no, b_token in enumerate(Config.HELPER_TOKENS.split(), start=1)
|
| 62 |
+
)
|
| 63 |
+
)
|
| 64 |
+
|
| 65 |
+
@classmethod
|
| 66 |
+
async def start_bot(cls):
|
| 67 |
+
LOGGER.info("Generating client from BOT_TOKEN")
|
| 68 |
+
cls.ID = Config.BOT_TOKEN.split(":", 1)[0]
|
| 69 |
+
cls.bot = cls.wztgClient(
|
| 70 |
+
f"WZ-Bot{cls.ID}",
|
| 71 |
+
bot_token=Config.BOT_TOKEN,
|
| 72 |
+
workdir="/usr/src/app",
|
| 73 |
+
)
|
| 74 |
+
await cls.bot.start()
|
| 75 |
+
cls.BNAME = cls.bot.me.username
|
| 76 |
+
cls.ID = Config.BOT_TOKEN.split(":", 1)[0]
|
| 77 |
+
LOGGER.info(f"WZ Bot : [@{cls.BNAME}] Started!")
|
| 78 |
+
|
| 79 |
+
@classmethod
|
| 80 |
+
async def start_user(cls):
|
| 81 |
+
if Config.USER_SESSION_STRING:
|
| 82 |
+
LOGGER.info("Generating client from USER_SESSION_STRING")
|
| 83 |
+
try:
|
| 84 |
+
cls.user = cls.wztgClient(
|
| 85 |
+
"WZ-User",
|
| 86 |
+
session_string=Config.USER_SESSION_STRING,
|
| 87 |
+
sleep_threshold=60,
|
| 88 |
+
no_updates=True,
|
| 89 |
+
)
|
| 90 |
+
await cls.user.start()
|
| 91 |
+
cls.IS_PREMIUM_USER = cls.user.me.is_premium
|
| 92 |
+
if cls.IS_PREMIUM_USER:
|
| 93 |
+
cls.MAX_SPLIT_SIZE = 4194304000
|
| 94 |
+
uname = cls.user.me.username or cls.user.me.first_name
|
| 95 |
+
LOGGER.info(f"WZ User : [{uname}] Started!")
|
| 96 |
+
except Exception as e:
|
| 97 |
+
LOGGER.error(f"Failed to start client from USER_SESSION_STRING. {e}")
|
| 98 |
+
cls.IS_PREMIUM_USER = False
|
| 99 |
+
cls.user = None
|
| 100 |
+
|
| 101 |
+
@classmethod
|
| 102 |
+
async def stop(cls):
|
| 103 |
+
async with cls._lock:
|
| 104 |
+
if cls.bot:
|
| 105 |
+
await cls.bot.stop()
|
| 106 |
+
cls.bot = None
|
| 107 |
+
if cls.user:
|
| 108 |
+
await cls.user.stop()
|
| 109 |
+
cls.user = None
|
| 110 |
+
if cls.helper_bots:
|
| 111 |
+
await gather(*[h_bot.stop() for h_bot in cls.helper_bots.values()])
|
| 112 |
+
cls.helper_bots = {}
|
| 113 |
+
LOGGER.info("All Client(s) stopped")
|
| 114 |
+
|
| 115 |
+
@classmethod
|
| 116 |
+
async def reload(cls):
|
| 117 |
+
async with cls._lock:
|
| 118 |
+
await cls.bot.restart()
|
| 119 |
+
if cls.user:
|
| 120 |
+
await cls.user.restart()
|
| 121 |
+
if cls.helper_bots:
|
| 122 |
+
await gather(*[h_bot.restart() for h_bot in cls.helper_bots.values()])
|
| 123 |
+
LOGGER.info("All Client(s) restarted")
|
bot/core/torrent_manager.py
ADDED
|
@@ -0,0 +1,166 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from asyncio import TimeoutError, gather
|
| 2 |
+
from contextlib import suppress
|
| 3 |
+
from inspect import iscoroutinefunction
|
| 4 |
+
from pathlib import Path
|
| 5 |
+
|
| 6 |
+
from aioaria2 import Aria2WebsocketClient
|
| 7 |
+
from aiohttp import ClientError
|
| 8 |
+
from aioqbt.client import create_client
|
| 9 |
+
from tenacity import (
|
| 10 |
+
retry,
|
| 11 |
+
retry_if_exception_type,
|
| 12 |
+
stop_after_attempt,
|
| 13 |
+
wait_exponential,
|
| 14 |
+
)
|
| 15 |
+
|
| 16 |
+
from .. import LOGGER, aria2_options
|
| 17 |
+
from .config_manager import Config
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
def wrap_with_retry(obj, max_retries=3):
|
| 21 |
+
for attr_name in dir(obj):
|
| 22 |
+
if attr_name.startswith("_"):
|
| 23 |
+
continue
|
| 24 |
+
|
| 25 |
+
attr = getattr(obj, attr_name)
|
| 26 |
+
if iscoroutinefunction(attr):
|
| 27 |
+
retry_policy = retry(
|
| 28 |
+
stop=stop_after_attempt(max_retries),
|
| 29 |
+
wait=wait_exponential(multiplier=1, min=1, max=5),
|
| 30 |
+
retry=retry_if_exception_type(
|
| 31 |
+
(ClientError, TimeoutError, RuntimeError)
|
| 32 |
+
),
|
| 33 |
+
)
|
| 34 |
+
wrapped = retry_policy(attr)
|
| 35 |
+
setattr(obj, attr_name, wrapped)
|
| 36 |
+
return obj
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
class TorrentManager:
|
| 40 |
+
aria2 = None
|
| 41 |
+
qbittorrent = None
|
| 42 |
+
|
| 43 |
+
@classmethod
|
| 44 |
+
async def initiate(cls):
|
| 45 |
+
if cls.aria2:
|
| 46 |
+
return
|
| 47 |
+
try:
|
| 48 |
+
cls.aria2 = await Aria2WebsocketClient.new("http://localhost:6800/jsonrpc")
|
| 49 |
+
LOGGER.info("Aria2 initialized successfully.")
|
| 50 |
+
|
| 51 |
+
if Config.DISABLE_TORRENTS:
|
| 52 |
+
LOGGER.info("Torrents are disabled.")
|
| 53 |
+
return
|
| 54 |
+
|
| 55 |
+
cls.qbittorrent = await create_client("http://localhost:8090/api/v2/")
|
| 56 |
+
cls.qbittorrent = wrap_with_retry(cls.qbittorrent)
|
| 57 |
+
|
| 58 |
+
except Exception as e:
|
| 59 |
+
LOGGER.error(f"Error during initialization: {e}")
|
| 60 |
+
await cls.close_all()
|
| 61 |
+
raise
|
| 62 |
+
|
| 63 |
+
@classmethod
|
| 64 |
+
async def close_all(cls):
|
| 65 |
+
close_tasks = []
|
| 66 |
+
if cls.aria2:
|
| 67 |
+
close_tasks.append(cls.aria2.close())
|
| 68 |
+
cls.aria2 = None
|
| 69 |
+
if cls.qbittorrent:
|
| 70 |
+
close_tasks.append(cls.qbittorrent.close())
|
| 71 |
+
cls.qbittorrent = None
|
| 72 |
+
if close_tasks:
|
| 73 |
+
await gather(*close_tasks)
|
| 74 |
+
|
| 75 |
+
@classmethod
|
| 76 |
+
async def aria2_remove(cls, download):
|
| 77 |
+
if download.get("status", "") in ["active", "paused", "waiting"]:
|
| 78 |
+
await cls.aria2.forceRemove(download.get("gid", ""))
|
| 79 |
+
else:
|
| 80 |
+
with suppress(Exception):
|
| 81 |
+
await cls.aria2.removeDownloadResult(download.get("gid", ""))
|
| 82 |
+
|
| 83 |
+
@classmethod
|
| 84 |
+
async def remove_all(cls):
|
| 85 |
+
await cls.pause_all()
|
| 86 |
+
if cls.qbittorrent:
|
| 87 |
+
await gather(
|
| 88 |
+
cls.qbittorrent.torrents.delete("all", False),
|
| 89 |
+
cls.aria2.purgeDownloadResult(),
|
| 90 |
+
)
|
| 91 |
+
else:
|
| 92 |
+
await gather(
|
| 93 |
+
cls.aria2.purgeDownloadResult(),
|
| 94 |
+
)
|
| 95 |
+
downloads = []
|
| 96 |
+
results = await gather(cls.aria2.tellActive(), cls.aria2.tellWaiting(0, 1000))
|
| 97 |
+
for res in results:
|
| 98 |
+
downloads.extend(res)
|
| 99 |
+
tasks = []
|
| 100 |
+
tasks.extend(
|
| 101 |
+
cls.aria2.forceRemove(download.get("gid")) for download in downloads
|
| 102 |
+
)
|
| 103 |
+
with suppress(Exception):
|
| 104 |
+
await gather(*tasks)
|
| 105 |
+
|
| 106 |
+
@classmethod
|
| 107 |
+
async def overall_speed(cls):
|
| 108 |
+
aria2_speed = await cls.aria2.getGlobalStat()
|
| 109 |
+
download_speed = int(aria2_speed.get("downloadSpeed", "0"))
|
| 110 |
+
upload_speed = int(aria2_speed.get("uploadSpeed", "0"))
|
| 111 |
+
|
| 112 |
+
if cls.qbittorrent:
|
| 113 |
+
qb_speed = await cls.qbittorrent.transfer.info()
|
| 114 |
+
download_speed += qb_speed.dl_info_speed
|
| 115 |
+
upload_speed += qb_speed.up_info_speed
|
| 116 |
+
|
| 117 |
+
return download_speed, upload_speed
|
| 118 |
+
|
| 119 |
+
@classmethod
|
| 120 |
+
async def pause_all(cls):
|
| 121 |
+
pause_tasks = [cls.aria2.forcePauseAll()]
|
| 122 |
+
if cls.qbittorrent:
|
| 123 |
+
pause_tasks.append(cls.qbittorrent.torrents.stop("all"))
|
| 124 |
+
await gather(*pause_tasks)
|
| 125 |
+
|
| 126 |
+
@classmethod
|
| 127 |
+
async def change_aria2_option(cls, key, value):
|
| 128 |
+
downloads = []
|
| 129 |
+
results = await gather(cls.aria2.tellActive(), cls.aria2.tellWaiting(0, 1000))
|
| 130 |
+
for res in results:
|
| 131 |
+
downloads.extend(res)
|
| 132 |
+
tasks = [
|
| 133 |
+
cls.aria2.changeOption(download.get("gid"), {key: value})
|
| 134 |
+
for download in downloads
|
| 135 |
+
if download.get("status", "") != "complete"
|
| 136 |
+
]
|
| 137 |
+
if tasks:
|
| 138 |
+
try:
|
| 139 |
+
await gather(*tasks)
|
| 140 |
+
except Exception as e:
|
| 141 |
+
LOGGER.error(e)
|
| 142 |
+
if key not in ["checksum", "index-out", "out", "pause", "select-file"]:
|
| 143 |
+
await cls.aria2.changeGlobalOption({key: value})
|
| 144 |
+
aria2_options[key] = value
|
| 145 |
+
|
| 146 |
+
|
| 147 |
+
def aria2_name(download_info):
|
| 148 |
+
if "bittorrent" in download_info and download_info["bittorrent"].get("info"):
|
| 149 |
+
return download_info["bittorrent"]["info"]["name"]
|
| 150 |
+
elif download_info.get("files"):
|
| 151 |
+
if download_info["files"][0]["path"].startswith("[METADATA]"):
|
| 152 |
+
return download_info["files"][0]["path"]
|
| 153 |
+
file_path = download_info["files"][0]["path"]
|
| 154 |
+
dir_path = download_info["dir"]
|
| 155 |
+
if file_path.startswith(dir_path):
|
| 156 |
+
return Path(file_path[len(dir_path) + 1 :]).parts[0]
|
| 157 |
+
else:
|
| 158 |
+
return ""
|
| 159 |
+
else:
|
| 160 |
+
return ""
|
| 161 |
+
|
| 162 |
+
|
| 163 |
+
def is_metadata(download_info):
|
| 164 |
+
return any(
|
| 165 |
+
f["path"].startswith("[METADATA]") for f in download_info.get("files", [])
|
| 166 |
+
)
|
bot/helper/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
|
bot/helper/common.py
ADDED
|
@@ -0,0 +1,1162 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import re
|
| 2 |
+
from asyncio import gather, sleep
|
| 3 |
+
from contextlib import suppress
|
| 4 |
+
from os import path as ospath, walk
|
| 5 |
+
from re import sub
|
| 6 |
+
from secrets import token_hex
|
| 7 |
+
from shlex import split
|
| 8 |
+
|
| 9 |
+
from aiofiles.os import listdir, makedirs, remove, path as aiopath
|
| 10 |
+
from aioshutil import move, rmtree
|
| 11 |
+
from pyrogram.enums import ChatAction
|
| 12 |
+
|
| 13 |
+
from .. import (
|
| 14 |
+
DOWNLOAD_DIR,
|
| 15 |
+
LOGGER,
|
| 16 |
+
cores,
|
| 17 |
+
cpu_eater_lock,
|
| 18 |
+
excluded_extensions,
|
| 19 |
+
intervals,
|
| 20 |
+
multi_tags,
|
| 21 |
+
task_dict,
|
| 22 |
+
task_dict_lock,
|
| 23 |
+
user_data,
|
| 24 |
+
)
|
| 25 |
+
from ..core.config_manager import Config, BinConfig
|
| 26 |
+
from ..core.tg_client import TgClient
|
| 27 |
+
from .ext_utils.bot_utils import get_size_bytes, new_task, sync_to_async
|
| 28 |
+
from .ext_utils.bulk_links import extract_bulk_links
|
| 29 |
+
from .ext_utils.files_utils import (
|
| 30 |
+
SevenZ,
|
| 31 |
+
get_base_name,
|
| 32 |
+
get_path_size,
|
| 33 |
+
is_archive,
|
| 34 |
+
is_archive_split,
|
| 35 |
+
is_first_archive_split,
|
| 36 |
+
split_file,
|
| 37 |
+
)
|
| 38 |
+
from .ext_utils.links_utils import (
|
| 39 |
+
is_gdrive_id,
|
| 40 |
+
is_gdrive_link,
|
| 41 |
+
is_rclone_path,
|
| 42 |
+
is_telegram_link,
|
| 43 |
+
is_mega_link,
|
| 44 |
+
)
|
| 45 |
+
from .ext_utils.media_utils import (
|
| 46 |
+
FFMpeg,
|
| 47 |
+
create_thumb,
|
| 48 |
+
get_document_type,
|
| 49 |
+
take_ss,
|
| 50 |
+
)
|
| 51 |
+
from .ext_utils.metadata_utils import MetadataProcessor
|
| 52 |
+
from .mirror_leech_utils.gdrive_utils.list import GoogleDriveList
|
| 53 |
+
from .mirror_leech_utils.rclone_utils.list import RcloneList
|
| 54 |
+
from .mirror_leech_utils.status_utils.ffmpeg_status import FFmpegStatus
|
| 55 |
+
from .mirror_leech_utils.status_utils.sevenz_status import SevenZStatus
|
| 56 |
+
from .telegram_helper.bot_commands import BotCommands
|
| 57 |
+
from .telegram_helper.message_utils import (
|
| 58 |
+
get_tg_link_message,
|
| 59 |
+
send_message,
|
| 60 |
+
send_status_message,
|
| 61 |
+
)
|
| 62 |
+
|
| 63 |
+
|
| 64 |
+
class TaskConfig:
|
| 65 |
+
def __init__(self):
|
| 66 |
+
self.mid = self.message.id
|
| 67 |
+
self.user = self.message.from_user or self.message.sender_chat
|
| 68 |
+
self.user_id = self.user.id
|
| 69 |
+
self.user_dict = user_data.get(self.user_id, {})
|
| 70 |
+
self.metadata_processor = MetadataProcessor()
|
| 71 |
+
for k in ("METADATA", "AUDIO_METADATA", "VIDEO_METADATA", "SUBTITLE_METADATA"):
|
| 72 |
+
v = self.user_dict.get(k, {})
|
| 73 |
+
if k == "METADATA":
|
| 74 |
+
k = "default_metadata"
|
| 75 |
+
if isinstance(v, dict):
|
| 76 |
+
setattr(self, f"{k.lower()}_dict", v)
|
| 77 |
+
elif isinstance(v, str):
|
| 78 |
+
setattr(
|
| 79 |
+
self, f"{k.lower()}_dict", self.metadata_processor.parse_string(v)
|
| 80 |
+
)
|
| 81 |
+
else:
|
| 82 |
+
setattr(self, f"{k.lower()}_dict", {})
|
| 83 |
+
self.dir = f"{DOWNLOAD_DIR}{self.mid}"
|
| 84 |
+
self.up_dir = ""
|
| 85 |
+
self.link = ""
|
| 86 |
+
self.up_dest = ""
|
| 87 |
+
self.leech_dest = ""
|
| 88 |
+
self.rc_flags = ""
|
| 89 |
+
self.tag = ""
|
| 90 |
+
self.name = ""
|
| 91 |
+
self.subname = ""
|
| 92 |
+
self.name_swap = ""
|
| 93 |
+
self.thumbnail_layout = ""
|
| 94 |
+
self.folder_name = ""
|
| 95 |
+
self.split_size = 0
|
| 96 |
+
self.max_split_size = 0
|
| 97 |
+
self.multi = 0
|
| 98 |
+
self.size = 0
|
| 99 |
+
self.subsize = 0
|
| 100 |
+
self.proceed_count = 0
|
| 101 |
+
self.is_leech = False
|
| 102 |
+
self.is_yt = False
|
| 103 |
+
self.is_qbit = False
|
| 104 |
+
self.is_mega = False
|
| 105 |
+
self.is_nzb = False
|
| 106 |
+
self.is_jd = False
|
| 107 |
+
self.is_clone = False
|
| 108 |
+
self.is_uphoster = False
|
| 109 |
+
self.is_gdrive = False
|
| 110 |
+
self.is_rclone = False
|
| 111 |
+
self.is_ytdlp = False
|
| 112 |
+
self.equal_splits = False
|
| 113 |
+
self.user_transmission = False
|
| 114 |
+
self.hybrid_leech = False
|
| 115 |
+
self.extract = False
|
| 116 |
+
self.compress = False
|
| 117 |
+
self.select = False
|
| 118 |
+
self.seed = False
|
| 119 |
+
self.join = False
|
| 120 |
+
self.private_link = False
|
| 121 |
+
self.stop_duplicate = False
|
| 122 |
+
self.sample_video = False
|
| 123 |
+
self.convert_audio = False
|
| 124 |
+
self.convert_video = False
|
| 125 |
+
self.screen_shots = False
|
| 126 |
+
self.is_cancelled = False
|
| 127 |
+
self.force_run = False
|
| 128 |
+
self.force_download = False
|
| 129 |
+
self.force_upload = False
|
| 130 |
+
self.is_torrent = False
|
| 131 |
+
self.as_med = False
|
| 132 |
+
self.as_doc = False
|
| 133 |
+
self.is_file = False
|
| 134 |
+
self.bot_trans = False
|
| 135 |
+
self.user_trans = False
|
| 136 |
+
self.progress = True
|
| 137 |
+
self.ffmpeg_cmds = None
|
| 138 |
+
self.metadata_title = None
|
| 139 |
+
self.chat_thread_id = None
|
| 140 |
+
self.subproc = None
|
| 141 |
+
self.thumb = None
|
| 142 |
+
self.excluded_extensions = []
|
| 143 |
+
self.files_to_proceed = []
|
| 144 |
+
self.is_super_chat = self.message.chat.type.name in [
|
| 145 |
+
"SUPERGROUP",
|
| 146 |
+
"CHANNEL",
|
| 147 |
+
"FORUM",
|
| 148 |
+
]
|
| 149 |
+
self.source_url = None
|
| 150 |
+
self.bot_pm = Config.BOT_PM or self.user_dict.get("BOT_PM")
|
| 151 |
+
self.pm_msg = None
|
| 152 |
+
self.file_details = {}
|
| 153 |
+
self.mode = tuple()
|
| 154 |
+
|
| 155 |
+
def _set_mode_engine(self):
|
| 156 |
+
self.source_url = (
|
| 157 |
+
self.link
|
| 158 |
+
if len(self.link) > 0 and self.link.startswith("http")
|
| 159 |
+
else (
|
| 160 |
+
f"https://t.me/share/url?url={self.link}"
|
| 161 |
+
if self.link
|
| 162 |
+
else self.message.link
|
| 163 |
+
)
|
| 164 |
+
)
|
| 165 |
+
|
| 166 |
+
out_mode = f"#{'Leech' if self.is_leech else 'UphosterUpload' if self.is_uphoster else 'Clone' if self.is_clone else 'RClone' if self.up_dest.startswith('mrcc:') or is_rclone_path(self.up_dest) else 'GDrive' if self.up_dest.startswith(('mtp:', 'tp:', 'sa:')) or is_gdrive_id(self.up_dest) else 'UpHosters'}"
|
| 167 |
+
out_mode += " (Zip)" if self.compress else " (Unzip)" if self.extract else ""
|
| 168 |
+
|
| 169 |
+
self.is_rclone = is_rclone_path(self.link)
|
| 170 |
+
self.is_gdrive = is_gdrive_link(self.source_url) if self.source_url else False
|
| 171 |
+
self.is_mega = is_mega_link(self.link) if self.source_url else False
|
| 172 |
+
|
| 173 |
+
in_mode = f"#{'Mega' if self.is_mega else 'qBit' if self.is_qbit else 'SABnzbd' if self.is_nzb else 'JDown' if self.is_jd else 'RCloneDL' if self.is_rclone else 'ytdlp' if self.is_ytdlp else 'GDrive' if (self.is_clone or self.is_gdrive) else 'Aria2' if (self.source_url and self.source_url != self.message.link) else 'TgMedia'}"
|
| 174 |
+
|
| 175 |
+
self.mode = (in_mode, out_mode)
|
| 176 |
+
|
| 177 |
+
def get_token_path(self, dest):
|
| 178 |
+
if dest.startswith("mtp:"):
|
| 179 |
+
return f"tokens/{self.user_id}.pickle"
|
| 180 |
+
elif (
|
| 181 |
+
dest.startswith("sa:")
|
| 182 |
+
or Config.USE_SERVICE_ACCOUNTS
|
| 183 |
+
and not dest.startswith("tp:")
|
| 184 |
+
):
|
| 185 |
+
return "accounts"
|
| 186 |
+
else:
|
| 187 |
+
return "token.pickle"
|
| 188 |
+
|
| 189 |
+
def get_config_path(self, dest):
|
| 190 |
+
return (
|
| 191 |
+
f"rclone/{self.user_id}.conf" if dest.startswith("mrcc:") else "rclone.conf"
|
| 192 |
+
)
|
| 193 |
+
|
| 194 |
+
async def is_token_exists(self, path, status):
|
| 195 |
+
if is_rclone_path(path):
|
| 196 |
+
config_path = self.get_config_path(path)
|
| 197 |
+
if config_path != "rclone.conf" and status == "up":
|
| 198 |
+
self.private_link = True
|
| 199 |
+
if not await aiopath.exists(config_path):
|
| 200 |
+
raise ValueError(f"Rclone Config: {config_path} not Exists!")
|
| 201 |
+
elif (
|
| 202 |
+
status == "dl"
|
| 203 |
+
and is_gdrive_link(path)
|
| 204 |
+
or status == "up"
|
| 205 |
+
and is_gdrive_id(path)
|
| 206 |
+
):
|
| 207 |
+
token_path = self.get_token_path(path)
|
| 208 |
+
if token_path.startswith("tokens/") and status == "up":
|
| 209 |
+
self.private_link = True
|
| 210 |
+
if not await aiopath.exists(token_path):
|
| 211 |
+
raise ValueError(f"NO TOKEN! {token_path} not Exists!")
|
| 212 |
+
|
| 213 |
+
async def before_start(self):
|
| 214 |
+
self.name_swap = (
|
| 215 |
+
self.name_swap
|
| 216 |
+
or self.user_dict.get("NAME_SWAP", False)
|
| 217 |
+
or (Config.NAME_SWAP if "NAME_SWAP" not in self.user_dict else "")
|
| 218 |
+
)
|
| 219 |
+
if self.name_swap:
|
| 220 |
+
self.name_swap = [x.split(":") for x in self.name_swap.split("|")]
|
| 221 |
+
self.excluded_extensions = self.user_dict.get("EXCLUDED_EXTENSIONS") or (
|
| 222 |
+
excluded_extensions
|
| 223 |
+
if "EXCLUDED_EXTENSIONS" not in self.user_dict
|
| 224 |
+
else ["aria2", "!qB"]
|
| 225 |
+
)
|
| 226 |
+
if not self.rc_flags:
|
| 227 |
+
if self.user_dict.get("RCLONE_FLAGS"):
|
| 228 |
+
self.rc_flags = self.user_dict["RCLONE_FLAGS"]
|
| 229 |
+
elif "RCLONE_FLAGS" not in self.user_dict and Config.RCLONE_FLAGS:
|
| 230 |
+
self.rc_flags = Config.RCLONE_FLAGS
|
| 231 |
+
if self.link not in ["rcl", "gdl"]:
|
| 232 |
+
if not self.is_jd:
|
| 233 |
+
if is_rclone_path(self.link):
|
| 234 |
+
if not self.link.startswith("mrcc:") and self.user_dict.get(
|
| 235 |
+
"USER_TOKENS", False
|
| 236 |
+
):
|
| 237 |
+
self.link = f"mrcc:{self.link}"
|
| 238 |
+
await self.is_token_exists(self.link, "dl")
|
| 239 |
+
elif is_gdrive_link(self.link):
|
| 240 |
+
if not self.link.startswith(
|
| 241 |
+
("mtp:", "tp:", "sa:")
|
| 242 |
+
) and self.user_dict.get("USER_TOKENS", False):
|
| 243 |
+
self.link = f"mtp:{self.link}"
|
| 244 |
+
await self.is_token_exists(self.link, "dl")
|
| 245 |
+
elif self.link == "rcl":
|
| 246 |
+
if not self.is_ytdlp and not self.is_jd:
|
| 247 |
+
self.link = await RcloneList(self).get_rclone_path("rcd")
|
| 248 |
+
if not is_rclone_path(self.link):
|
| 249 |
+
raise ValueError(self.link)
|
| 250 |
+
elif self.link == "gdl":
|
| 251 |
+
if not self.is_ytdlp and not self.is_jd:
|
| 252 |
+
self.link = await GoogleDriveList(self).get_target_id("gdd")
|
| 253 |
+
if not is_gdrive_id(self.link):
|
| 254 |
+
raise ValueError(self.link)
|
| 255 |
+
|
| 256 |
+
self.user_transmission = TgClient.IS_PREMIUM_USER and (
|
| 257 |
+
self.user_dict.get("USER_TRANSMISSION")
|
| 258 |
+
or Config.USER_TRANSMISSION
|
| 259 |
+
and "USER_TRANSMISSION" not in self.user_dict
|
| 260 |
+
)
|
| 261 |
+
|
| 262 |
+
if self.user_dict.get("UPLOAD_PATHS", False):
|
| 263 |
+
if self.up_dest in self.user_dict["UPLOAD_PATHS"]:
|
| 264 |
+
self.up_dest = self.user_dict["UPLOAD_PATHS"][self.up_dest]
|
| 265 |
+
elif "UPLOAD_PATHS" not in self.user_dict and Config.UPLOAD_PATHS:
|
| 266 |
+
if self.up_dest in Config.UPLOAD_PATHS:
|
| 267 |
+
self.up_dest = Config.UPLOAD_PATHS[self.up_dest]
|
| 268 |
+
|
| 269 |
+
if self.ffmpeg_cmds and not isinstance(self.ffmpeg_cmds, list):
|
| 270 |
+
if self.user_dict.get("FFMPEG_CMDS", None):
|
| 271 |
+
ffmpeg_dict = self.user_dict["FFMPEG_CMDS"]
|
| 272 |
+
self.ffmpeg_cmds = [
|
| 273 |
+
value
|
| 274 |
+
for key in list(self.ffmpeg_cmds)
|
| 275 |
+
if key in ffmpeg_dict
|
| 276 |
+
for value in ffmpeg_dict[key]
|
| 277 |
+
]
|
| 278 |
+
elif "FFMPEG_CMDS" not in self.user_dict and Config.FFMPEG_CMDS:
|
| 279 |
+
ffmpeg_dict = Config.FFMPEG_CMDS
|
| 280 |
+
self.ffmpeg_cmds = [
|
| 281 |
+
value
|
| 282 |
+
for key in list(self.ffmpeg_cmds)
|
| 283 |
+
if key in ffmpeg_dict
|
| 284 |
+
for value in ffmpeg_dict[key]
|
| 285 |
+
]
|
| 286 |
+
else:
|
| 287 |
+
self.ffmpeg_cmds = None
|
| 288 |
+
|
| 289 |
+
self.metadata_title = self.user_dict.get("METADATA")
|
| 290 |
+
|
| 291 |
+
if not self.is_leech:
|
| 292 |
+
self.stop_duplicate = (
|
| 293 |
+
self.user_dict.get("STOP_DUPLICATE")
|
| 294 |
+
or "STOP_DUPLICATE" not in self.user_dict
|
| 295 |
+
and Config.STOP_DUPLICATE
|
| 296 |
+
)
|
| 297 |
+
default_upload = (
|
| 298 |
+
self.user_dict.get("DEFAULT_UPLOAD", "") or Config.DEFAULT_UPLOAD
|
| 299 |
+
)
|
| 300 |
+
if not self.is_uphoster and (
|
| 301 |
+
(not self.up_dest and default_upload == "rc") or self.up_dest == "rc"
|
| 302 |
+
):
|
| 303 |
+
self.up_dest = self.user_dict.get("RCLONE_PATH") or Config.RCLONE_PATH
|
| 304 |
+
elif not self.is_uphoster and (
|
| 305 |
+
(not self.up_dest and default_upload == "gd") or self.up_dest == "gd"
|
| 306 |
+
):
|
| 307 |
+
self.up_dest = self.user_dict.get("GDRIVE_ID") or Config.GDRIVE_ID
|
| 308 |
+
|
| 309 |
+
if self.is_uphoster and not self.up_dest:
|
| 310 |
+
uphoster_service = self.user_dict.get("UPHOSTER_SERVICE", "gofile")
|
| 311 |
+
services = uphoster_service.split(",")
|
| 312 |
+
for service in services:
|
| 313 |
+
if service == "gofile":
|
| 314 |
+
if not (
|
| 315 |
+
self.user_dict.get("GOFILE_TOKEN") or Config.GOFILE_API
|
| 316 |
+
):
|
| 317 |
+
raise ValueError("No Gofile Token Found!")
|
| 318 |
+
elif service == "buzzheavier":
|
| 319 |
+
if not (
|
| 320 |
+
self.user_dict.get("BUZZHEAVIER_TOKEN")
|
| 321 |
+
or Config.BUZZHEAVIER_API
|
| 322 |
+
):
|
| 323 |
+
raise ValueError("No BuzzHeavier Token Found!")
|
| 324 |
+
elif service == "pixeldrain":
|
| 325 |
+
if not (
|
| 326 |
+
self.user_dict.get("PIXELDRAIN_KEY")
|
| 327 |
+
or Config.PIXELDRAIN_KEY
|
| 328 |
+
):
|
| 329 |
+
raise ValueError("No PixelDrain Key Found!")
|
| 330 |
+
self.up_dest = "Uphoster"
|
| 331 |
+
|
| 332 |
+
if not self.up_dest:
|
| 333 |
+
raise ValueError("No Upload Destination!")
|
| 334 |
+
|
| 335 |
+
if is_gdrive_id(self.up_dest):
|
| 336 |
+
if not self.up_dest.startswith(
|
| 337 |
+
("mtp:", "tp:", "sa:")
|
| 338 |
+
) and self.user_dict.get("USER_TOKENS", False):
|
| 339 |
+
self.up_dest = f"mtp:{self.up_dest}"
|
| 340 |
+
elif is_rclone_path(self.up_dest):
|
| 341 |
+
if not self.up_dest.startswith("mrcc:") and self.user_dict.get(
|
| 342 |
+
"USER_TOKENS", False
|
| 343 |
+
):
|
| 344 |
+
self.up_dest = f"mrcc:{self.up_dest}"
|
| 345 |
+
self.up_dest = self.up_dest.strip("/")
|
| 346 |
+
elif self.is_uphoster:
|
| 347 |
+
pass
|
| 348 |
+
else:
|
| 349 |
+
raise ValueError("Wrong Upload Destination!")
|
| 350 |
+
|
| 351 |
+
if self.up_dest not in ["rcl", "gdl"] and not self.is_uphoster:
|
| 352 |
+
await self.is_token_exists(self.up_dest, "up")
|
| 353 |
+
|
| 354 |
+
if self.up_dest == "rcl":
|
| 355 |
+
if self.is_clone:
|
| 356 |
+
if not is_rclone_path(self.link):
|
| 357 |
+
raise ValueError(
|
| 358 |
+
"You can't clone from different types of tools"
|
| 359 |
+
)
|
| 360 |
+
config_path = self.get_config_path(self.link)
|
| 361 |
+
else:
|
| 362 |
+
config_path = None
|
| 363 |
+
self.up_dest = await RcloneList(self).get_rclone_path(
|
| 364 |
+
"rcu", config_path
|
| 365 |
+
)
|
| 366 |
+
if not is_rclone_path(self.up_dest):
|
| 367 |
+
raise ValueError(self.up_dest)
|
| 368 |
+
elif self.up_dest == "gdl":
|
| 369 |
+
if self.is_clone:
|
| 370 |
+
if not is_gdrive_link(self.link):
|
| 371 |
+
raise ValueError(
|
| 372 |
+
"You can't clone from different types of tools"
|
| 373 |
+
)
|
| 374 |
+
token_path = self.get_token_path(self.link)
|
| 375 |
+
else:
|
| 376 |
+
token_path = None
|
| 377 |
+
self.up_dest = await GoogleDriveList(self).get_target_id(
|
| 378 |
+
"gdu", token_path
|
| 379 |
+
)
|
| 380 |
+
if not is_gdrive_id(self.up_dest):
|
| 381 |
+
raise ValueError(self.up_dest)
|
| 382 |
+
elif self.is_clone:
|
| 383 |
+
if is_gdrive_link(self.link) and self.get_token_path(
|
| 384 |
+
self.link
|
| 385 |
+
) != self.get_token_path(self.up_dest):
|
| 386 |
+
raise ValueError("You must use the same token to clone!")
|
| 387 |
+
elif is_rclone_path(self.link) and self.get_config_path(
|
| 388 |
+
self.link
|
| 389 |
+
) != self.get_config_path(self.up_dest):
|
| 390 |
+
raise ValueError("You must use the same config to clone!")
|
| 391 |
+
else:
|
| 392 |
+
self.leech_dest = self.up_dest or self.user_dict.get("LEECH_DUMP_CHAT")
|
| 393 |
+
self.up_dest = Config.LEECH_DUMP_CHAT
|
| 394 |
+
self.hybrid_leech = TgClient.IS_PREMIUM_USER and (
|
| 395 |
+
self.user_dict.get("HYBRID_LEECH")
|
| 396 |
+
or Config.HYBRID_LEECH
|
| 397 |
+
and "HYBRID_LEECH" not in self.user_dict
|
| 398 |
+
)
|
| 399 |
+
if self.bot_trans:
|
| 400 |
+
self.user_transmission = False
|
| 401 |
+
self.hybrid_leech = False
|
| 402 |
+
if self.user_trans:
|
| 403 |
+
self.user_transmission = TgClient.IS_PREMIUM_USER
|
| 404 |
+
if self.up_dest:
|
| 405 |
+
if not isinstance(self.up_dest, int):
|
| 406 |
+
if self.up_dest.startswith("b:"):
|
| 407 |
+
self.up_dest = self.up_dest.replace("b:", "", 1)
|
| 408 |
+
self.user_transmission = False
|
| 409 |
+
self.hybrid_leech = False
|
| 410 |
+
elif self.up_dest.startswith("u:"):
|
| 411 |
+
self.up_dest = self.up_dest.replace("u:", "", 1)
|
| 412 |
+
self.user_transmission = TgClient.IS_PREMIUM_USER
|
| 413 |
+
elif self.up_dest.startswith("h:"):
|
| 414 |
+
self.up_dest = self.up_dest.replace("h:", "", 1)
|
| 415 |
+
self.user_transmission = TgClient.IS_PREMIUM_USER
|
| 416 |
+
self.hybrid_leech = self.user_transmission
|
| 417 |
+
if "|" in self.up_dest:
|
| 418 |
+
self.up_dest, self.chat_thread_id = list(
|
| 419 |
+
map(
|
| 420 |
+
lambda x: int(x) if x.lstrip("-").isdigit() else x,
|
| 421 |
+
self.up_dest.split("|", 1),
|
| 422 |
+
)
|
| 423 |
+
)
|
| 424 |
+
elif self.up_dest.lstrip("-").isdigit():
|
| 425 |
+
self.up_dest = int(self.up_dest)
|
| 426 |
+
elif self.up_dest.lower() == "pm":
|
| 427 |
+
self.up_dest = self.user_id
|
| 428 |
+
|
| 429 |
+
if self.user_transmission:
|
| 430 |
+
try:
|
| 431 |
+
chat = await TgClient.user.get_chat(self.up_dest)
|
| 432 |
+
except Exception:
|
| 433 |
+
chat = None
|
| 434 |
+
if chat is None:
|
| 435 |
+
self.user_transmission = False
|
| 436 |
+
self.hybrid_leech = False
|
| 437 |
+
else:
|
| 438 |
+
uploader_id = TgClient.user.me.id
|
| 439 |
+
if chat.type.name not in [
|
| 440 |
+
"SUPERGROUP",
|
| 441 |
+
"CHANNEL",
|
| 442 |
+
"GROUP",
|
| 443 |
+
"FORUM",
|
| 444 |
+
]:
|
| 445 |
+
self.user_transmission = False
|
| 446 |
+
self.hybrid_leech = False
|
| 447 |
+
else:
|
| 448 |
+
member = await chat.get_member(uploader_id)
|
| 449 |
+
if (
|
| 450 |
+
not member.privileges.can_manage_chat
|
| 451 |
+
or not member.privileges.can_delete_messages
|
| 452 |
+
):
|
| 453 |
+
self.user_transmission = False
|
| 454 |
+
self.hybrid_leech = False
|
| 455 |
+
|
| 456 |
+
if not self.user_transmission or self.hybrid_leech:
|
| 457 |
+
try:
|
| 458 |
+
chat = await self.client.get_chat(self.up_dest)
|
| 459 |
+
except Exception:
|
| 460 |
+
chat = None
|
| 461 |
+
if chat is None:
|
| 462 |
+
if self.user_transmission:
|
| 463 |
+
self.hybrid_leech = False
|
| 464 |
+
else:
|
| 465 |
+
raise ValueError("Chat not found!")
|
| 466 |
+
else:
|
| 467 |
+
uploader_id = self.client.me.id
|
| 468 |
+
if chat.type.name in [
|
| 469 |
+
"SUPERGROUP",
|
| 470 |
+
"CHANNEL",
|
| 471 |
+
"GROUP",
|
| 472 |
+
"FORUM",
|
| 473 |
+
]:
|
| 474 |
+
member = await chat.get_member(uploader_id)
|
| 475 |
+
if (
|
| 476 |
+
not member.privileges.can_manage_chat
|
| 477 |
+
or not member.privileges.can_delete_messages
|
| 478 |
+
):
|
| 479 |
+
if not self.user_transmission:
|
| 480 |
+
raise ValueError(
|
| 481 |
+
"You don't have enough privileges in this chat!"
|
| 482 |
+
)
|
| 483 |
+
else:
|
| 484 |
+
self.hybrid_leech = False
|
| 485 |
+
else:
|
| 486 |
+
try:
|
| 487 |
+
await self.client.send_chat_action(
|
| 488 |
+
self.up_dest, ChatAction.TYPING
|
| 489 |
+
)
|
| 490 |
+
except Exception:
|
| 491 |
+
raise ValueError("Start the bot and try again!")
|
| 492 |
+
elif (
|
| 493 |
+
self.user_transmission or self.hybrid_leech
|
| 494 |
+
) and not self.is_super_chat:
|
| 495 |
+
self.user_transmission = False
|
| 496 |
+
self.hybrid_leech = False
|
| 497 |
+
if self.split_size:
|
| 498 |
+
if self.split_size.isdigit():
|
| 499 |
+
self.split_size = int(self.split_size)
|
| 500 |
+
else:
|
| 501 |
+
self.split_size = get_size_bytes(self.split_size)
|
| 502 |
+
self.split_size = (
|
| 503 |
+
self.split_size
|
| 504 |
+
or self.user_dict.get("LEECH_SPLIT_SIZE")
|
| 505 |
+
or Config.LEECH_SPLIT_SIZE
|
| 506 |
+
)
|
| 507 |
+
self.equal_splits = (
|
| 508 |
+
self.user_dict.get("EQUAL_SPLITS")
|
| 509 |
+
or Config.EQUAL_SPLITS
|
| 510 |
+
and "EQUAL_SPLITS" not in self.user_dict
|
| 511 |
+
)
|
| 512 |
+
self.max_split_size = (
|
| 513 |
+
TgClient.MAX_SPLIT_SIZE if self.user_transmission else 2097152000
|
| 514 |
+
)
|
| 515 |
+
self.split_size = min(self.split_size, self.max_split_size)
|
| 516 |
+
|
| 517 |
+
if not self.as_doc:
|
| 518 |
+
self.as_doc = (
|
| 519 |
+
not self.as_med
|
| 520 |
+
if self.as_med
|
| 521 |
+
else (
|
| 522 |
+
self.user_dict.get("AS_DOCUMENT", False)
|
| 523 |
+
or Config.AS_DOCUMENT
|
| 524 |
+
and "AS_DOCUMENT" not in self.user_dict
|
| 525 |
+
)
|
| 526 |
+
)
|
| 527 |
+
|
| 528 |
+
self.thumbnail_layout = (
|
| 529 |
+
self.thumbnail_layout
|
| 530 |
+
or self.user_dict.get("THUMBNAIL_LAYOUT", False)
|
| 531 |
+
or (
|
| 532 |
+
Config.THUMBNAIL_LAYOUT
|
| 533 |
+
if "THUMBNAIL_LAYOUT" not in self.user_dict
|
| 534 |
+
else ""
|
| 535 |
+
)
|
| 536 |
+
)
|
| 537 |
+
|
| 538 |
+
if self.thumb != "none" and is_telegram_link(self.thumb):
|
| 539 |
+
msg = (await get_tg_link_message(self.thumb))[0]
|
| 540 |
+
self.thumb = (
|
| 541 |
+
await create_thumb(msg) if msg.photo or msg.document else ""
|
| 542 |
+
)
|
| 543 |
+
|
| 544 |
+
async def get_tag(self, text: list):
|
| 545 |
+
if len(text) > 1 and text[1].startswith("Tag: "):
|
| 546 |
+
user_info = text[1].split("Tag: ")
|
| 547 |
+
if len(user_info) >= 3:
|
| 548 |
+
id_ = user_info[-1]
|
| 549 |
+
self.tag = " ".join(user_info[:-1])
|
| 550 |
+
else:
|
| 551 |
+
self.tag, id_ = text[1].split("Tag: ")[1].split()
|
| 552 |
+
self.user = self.message.from_user = await self.client.get_users(int(id_))
|
| 553 |
+
self.user_id = self.user.id
|
| 554 |
+
self.user_dict = user_data.get(self.user_id, {})
|
| 555 |
+
with suppress(Exception):
|
| 556 |
+
await self.message.unpin()
|
| 557 |
+
if self.user:
|
| 558 |
+
if username := self.user.username:
|
| 559 |
+
self.tag = f"@{username}"
|
| 560 |
+
elif hasattr(self.user, "mention"):
|
| 561 |
+
self.tag = self.user.mention
|
| 562 |
+
else:
|
| 563 |
+
self.tag = self.user.title
|
| 564 |
+
|
| 565 |
+
@new_task
|
| 566 |
+
async def run_multi(self, input_list, obj):
|
| 567 |
+
await sleep(7)
|
| 568 |
+
if not self.multi_tag and self.multi > 1:
|
| 569 |
+
self.multi_tag = token_hex(3)
|
| 570 |
+
multi_tags.add(self.multi_tag)
|
| 571 |
+
elif self.multi <= 1:
|
| 572 |
+
if self.multi_tag in multi_tags:
|
| 573 |
+
multi_tags.discard(self.multi_tag)
|
| 574 |
+
return
|
| 575 |
+
if self.multi_tag and self.multi_tag not in multi_tags:
|
| 576 |
+
await send_message(
|
| 577 |
+
self.message, f"{self.tag} Multi Task has been cancelled!"
|
| 578 |
+
)
|
| 579 |
+
await send_status_message(self.message)
|
| 580 |
+
async with task_dict_lock:
|
| 581 |
+
for fd_name in self.same_dir:
|
| 582 |
+
self.same_dir[fd_name]["total"] -= self.multi
|
| 583 |
+
return
|
| 584 |
+
if len(self.bulk) != 0:
|
| 585 |
+
msg = input_list[:1]
|
| 586 |
+
msg.append(f"{self.bulk[0]} -i {self.multi - 1} {self.options}")
|
| 587 |
+
msgts = " ".join(msg)
|
| 588 |
+
if self.multi > 2:
|
| 589 |
+
msgts += f"\n• <b>Cancel Multi:</b> <i>/{BotCommands.CancelTaskCommand[1]}_{self.multi_tag}</i>"
|
| 590 |
+
nextmsg = await send_message(self.message, msgts)
|
| 591 |
+
else:
|
| 592 |
+
msg = [s.strip() for s in input_list]
|
| 593 |
+
index = msg.index("-i")
|
| 594 |
+
msg[index + 1] = f"{self.multi - 1}"
|
| 595 |
+
nextmsg = await self.client.get_messages(
|
| 596 |
+
chat_id=self.message.chat.id,
|
| 597 |
+
message_ids=self.message.reply_to_message_id + 1,
|
| 598 |
+
)
|
| 599 |
+
msgts = " ".join(msg)
|
| 600 |
+
if self.multi > 2:
|
| 601 |
+
msgts += f"\n• <b>Cancel Multi:</b> <i>/{BotCommands.CancelTaskCommand[1]}_{self.multi_tag}</i>"
|
| 602 |
+
nextmsg = await send_message(nextmsg, msgts)
|
| 603 |
+
nextmsg = await self.client.get_messages(
|
| 604 |
+
chat_id=self.message.chat.id, message_ids=nextmsg.id
|
| 605 |
+
)
|
| 606 |
+
if self.message.from_user:
|
| 607 |
+
nextmsg.from_user = self.user
|
| 608 |
+
else:
|
| 609 |
+
nextmsg.sender_chat = self.user
|
| 610 |
+
if intervals["stopAll"]:
|
| 611 |
+
return
|
| 612 |
+
|
| 613 |
+
await obj(
|
| 614 |
+
client=self.client,
|
| 615 |
+
message=nextmsg,
|
| 616 |
+
is_qbit=self.is_qbit,
|
| 617 |
+
is_leech=self.is_leech,
|
| 618 |
+
is_jd=self.is_jd,
|
| 619 |
+
is_nzb=self.is_nzb,
|
| 620 |
+
is_uphoster=self.is_uphoster,
|
| 621 |
+
same_dir=self.same_dir,
|
| 622 |
+
bulk=self.bulk,
|
| 623 |
+
multi_tag=self.multi_tag,
|
| 624 |
+
options=self.options,
|
| 625 |
+
).new_event()
|
| 626 |
+
|
| 627 |
+
async def init_bulk(self, input_list, bulk_start, bulk_end, obj):
|
| 628 |
+
if Config.DISABLE_BULK:
|
| 629 |
+
await send_message(self.message, "Bulk downloads are currently disabled.")
|
| 630 |
+
return
|
| 631 |
+
try:
|
| 632 |
+
self.bulk = await extract_bulk_links(self.message, bulk_start, bulk_end)
|
| 633 |
+
if len(self.bulk) == 0:
|
| 634 |
+
raise ValueError("Bulk Empty!")
|
| 635 |
+
b_msg = input_list[:1]
|
| 636 |
+
self.options = input_list[1:]
|
| 637 |
+
index = self.options.index("-b")
|
| 638 |
+
del self.options[index]
|
| 639 |
+
if bulk_start or bulk_end:
|
| 640 |
+
del self.options[index]
|
| 641 |
+
self.options = " ".join(self.options)
|
| 642 |
+
b_msg.append(f"{self.bulk[0]} -i {len(self.bulk)} {self.options}")
|
| 643 |
+
msg = " ".join(b_msg)
|
| 644 |
+
if len(self.bulk) > 2:
|
| 645 |
+
self.multi_tag = token_hex(3)
|
| 646 |
+
multi_tags.add(self.multi_tag)
|
| 647 |
+
msg += f"\n• <b>Cancel Multi:</b> <i>/{BotCommands.CancelTaskCommand[1]}_{self.multi_tag}</i>"
|
| 648 |
+
nextmsg = await send_message(self.message, msg)
|
| 649 |
+
nextmsg = await self.client.get_messages(
|
| 650 |
+
chat_id=self.message.chat.id, message_ids=nextmsg.id
|
| 651 |
+
)
|
| 652 |
+
if self.message.from_user:
|
| 653 |
+
nextmsg.from_user = self.user
|
| 654 |
+
else:
|
| 655 |
+
nextmsg.sender_chat = self.user
|
| 656 |
+
|
| 657 |
+
await obj(
|
| 658 |
+
client=self.client,
|
| 659 |
+
message=nextmsg,
|
| 660 |
+
is_qbit=self.is_qbit,
|
| 661 |
+
is_leech=self.is_leech,
|
| 662 |
+
is_jd=self.is_jd,
|
| 663 |
+
is_nzb=self.is_nzb,
|
| 664 |
+
is_uphoster=self.is_uphoster,
|
| 665 |
+
same_dir=self.same_dir,
|
| 666 |
+
bulk=self.bulk,
|
| 667 |
+
multi_tag=self.multi_tag,
|
| 668 |
+
options=self.options,
|
| 669 |
+
).new_event()
|
| 670 |
+
except Exception:
|
| 671 |
+
await send_message(
|
| 672 |
+
self.message,
|
| 673 |
+
"Reply to text file or to telegram message that have links seperated by new line!",
|
| 674 |
+
)
|
| 675 |
+
|
| 676 |
+
async def proceed_extract(self, dl_path, gid):
|
| 677 |
+
pswd = self.extract if isinstance(self.extract, str) else ""
|
| 678 |
+
self.files_to_proceed = []
|
| 679 |
+
if self.is_file and is_archive(dl_path):
|
| 680 |
+
self.files_to_proceed.append(dl_path)
|
| 681 |
+
else:
|
| 682 |
+
for dirpath, _, files in await sync_to_async(walk, dl_path, topdown=False):
|
| 683 |
+
for file_ in files:
|
| 684 |
+
if (
|
| 685 |
+
is_first_archive_split(file_)
|
| 686 |
+
or is_archive(file_)
|
| 687 |
+
and not file_.strip().lower().endswith(".rar")
|
| 688 |
+
):
|
| 689 |
+
f_path = ospath.join(dirpath, file_)
|
| 690 |
+
self.files_to_proceed.append(f_path)
|
| 691 |
+
|
| 692 |
+
if not self.files_to_proceed:
|
| 693 |
+
return dl_path
|
| 694 |
+
sevenz = SevenZ(self)
|
| 695 |
+
LOGGER.info(f"Extracting: {self.name}")
|
| 696 |
+
async with task_dict_lock:
|
| 697 |
+
task_dict[self.mid] = SevenZStatus(self, sevenz, gid, "Extract")
|
| 698 |
+
for dirpath, _, files in await sync_to_async(
|
| 699 |
+
walk, self.up_dir or self.dir, topdown=False
|
| 700 |
+
):
|
| 701 |
+
code = 0
|
| 702 |
+
for file_ in files:
|
| 703 |
+
if self.is_cancelled:
|
| 704 |
+
return False
|
| 705 |
+
if (
|
| 706 |
+
is_first_archive_split(file_)
|
| 707 |
+
or is_archive(file_)
|
| 708 |
+
and not file_.strip().lower().endswith(".rar")
|
| 709 |
+
):
|
| 710 |
+
self.proceed_count += 1
|
| 711 |
+
f_path = ospath.join(dirpath, file_)
|
| 712 |
+
t_path = get_base_name(f_path) if self.is_file else dirpath
|
| 713 |
+
if not self.is_file:
|
| 714 |
+
self.subname = file_
|
| 715 |
+
code = await sevenz.extract(f_path, t_path, pswd)
|
| 716 |
+
if self.is_cancelled:
|
| 717 |
+
return code
|
| 718 |
+
if code == 0:
|
| 719 |
+
for file_ in files:
|
| 720 |
+
if is_archive_split(file_) or is_archive(file_):
|
| 721 |
+
del_path = ospath.join(dirpath, file_)
|
| 722 |
+
try:
|
| 723 |
+
await remove(del_path)
|
| 724 |
+
except Exception:
|
| 725 |
+
self.is_cancelled = True
|
| 726 |
+
return t_path if self.is_file and code == 0 else dl_path
|
| 727 |
+
|
| 728 |
+
async def proceed_ffmpeg(self, dl_path, gid):
|
| 729 |
+
checked = False
|
| 730 |
+
cmds = [
|
| 731 |
+
[part.strip() for part in split(item) if part.strip()]
|
| 732 |
+
for item in self.ffmpeg_cmds
|
| 733 |
+
]
|
| 734 |
+
try:
|
| 735 |
+
ffmpeg = FFMpeg(self)
|
| 736 |
+
for ffmpeg_cmd in cmds:
|
| 737 |
+
self.proceed_count = 0
|
| 738 |
+
cmd = [
|
| 739 |
+
"taskset",
|
| 740 |
+
"-c",
|
| 741 |
+
f"{cores}",
|
| 742 |
+
BinConfig.FFMPEG_NAME,
|
| 743 |
+
"-hide_banner",
|
| 744 |
+
"-loglevel",
|
| 745 |
+
"error",
|
| 746 |
+
"-progress",
|
| 747 |
+
"pipe:1",
|
| 748 |
+
] + ffmpeg_cmd
|
| 749 |
+
if "-del" in cmd:
|
| 750 |
+
cmd.remove("-del")
|
| 751 |
+
delete_files = True
|
| 752 |
+
else:
|
| 753 |
+
delete_files = False
|
| 754 |
+
index = cmd.index("-i")
|
| 755 |
+
input_file = cmd[index + 1]
|
| 756 |
+
if input_file.strip().endswith(".video"):
|
| 757 |
+
ext = "video"
|
| 758 |
+
elif input_file.strip().endswith(".audio"):
|
| 759 |
+
ext = "audio"
|
| 760 |
+
elif "." not in input_file:
|
| 761 |
+
ext = "all"
|
| 762 |
+
else:
|
| 763 |
+
ext = ospath.splitext(input_file)[-1].lower()
|
| 764 |
+
if await aiopath.isfile(dl_path):
|
| 765 |
+
is_video, is_audio, _ = await get_document_type(dl_path)
|
| 766 |
+
if not is_video and not is_audio:
|
| 767 |
+
break
|
| 768 |
+
elif is_video and ext == "audio":
|
| 769 |
+
break
|
| 770 |
+
elif is_audio and not is_video and ext == "video":
|
| 771 |
+
break
|
| 772 |
+
elif ext not in [
|
| 773 |
+
"all",
|
| 774 |
+
"audio",
|
| 775 |
+
"video",
|
| 776 |
+
] and not dl_path.strip().lower().endswith(ext):
|
| 777 |
+
break
|
| 778 |
+
new_folder = ospath.splitext(dl_path)[0]
|
| 779 |
+
if await aiopath.isfile(new_folder):
|
| 780 |
+
new_folder = f"{new_folder}_temp"
|
| 781 |
+
name = ospath.basename(dl_path)
|
| 782 |
+
await makedirs(new_folder, exist_ok=True)
|
| 783 |
+
file_path = f"{new_folder}/{name}"
|
| 784 |
+
await move(dl_path, file_path)
|
| 785 |
+
if not checked:
|
| 786 |
+
checked = True
|
| 787 |
+
async with task_dict_lock:
|
| 788 |
+
task_dict[self.mid] = FFmpegStatus(
|
| 789 |
+
self, ffmpeg, gid, "FFmpeg"
|
| 790 |
+
)
|
| 791 |
+
self.progress = False
|
| 792 |
+
await cpu_eater_lock.acquire()
|
| 793 |
+
self.progress = True
|
| 794 |
+
LOGGER.info(f"Running ffmpeg cmd for: {file_path}")
|
| 795 |
+
var_cmd = cmd.copy()
|
| 796 |
+
var_cmd[index + 1] = file_path
|
| 797 |
+
self.subsize = self.size
|
| 798 |
+
res = await ffmpeg.ffmpeg_cmds(var_cmd, file_path)
|
| 799 |
+
if res:
|
| 800 |
+
if delete_files:
|
| 801 |
+
await remove(file_path)
|
| 802 |
+
if len(await listdir(new_folder)) == 1:
|
| 803 |
+
folder = new_folder.rsplit("/", 1)[0]
|
| 804 |
+
self.name = ospath.basename(res[0])
|
| 805 |
+
if self.name.startswith("ffmpeg"):
|
| 806 |
+
self.name = self.name.split(".", 1)[-1]
|
| 807 |
+
dl_path = ospath.join(folder, self.name)
|
| 808 |
+
await move(res[0], dl_path)
|
| 809 |
+
await rmtree(new_folder)
|
| 810 |
+
else:
|
| 811 |
+
dl_path = new_folder
|
| 812 |
+
self.name = new_folder.rsplit("/", 1)[-1]
|
| 813 |
+
else:
|
| 814 |
+
dl_path = new_folder
|
| 815 |
+
self.name = new_folder.rsplit("/", 1)[-1]
|
| 816 |
+
else:
|
| 817 |
+
await move(file_path, dl_path)
|
| 818 |
+
await rmtree(new_folder)
|
| 819 |
+
else:
|
| 820 |
+
for dirpath, _, files in await sync_to_async(
|
| 821 |
+
walk, dl_path, topdown=False
|
| 822 |
+
):
|
| 823 |
+
for file_ in files:
|
| 824 |
+
var_cmd = cmd.copy()
|
| 825 |
+
if self.is_cancelled:
|
| 826 |
+
return False
|
| 827 |
+
f_path = ospath.join(dirpath, file_)
|
| 828 |
+
is_video, is_audio, _ = await get_document_type(f_path)
|
| 829 |
+
if not is_video and not is_audio:
|
| 830 |
+
continue
|
| 831 |
+
elif is_video and ext == "audio":
|
| 832 |
+
continue
|
| 833 |
+
elif is_audio and not is_video and ext == "video":
|
| 834 |
+
continue
|
| 835 |
+
elif ext not in [
|
| 836 |
+
"all",
|
| 837 |
+
"audio",
|
| 838 |
+
"video",
|
| 839 |
+
] and not f_path.strip().lower().endswith(ext):
|
| 840 |
+
continue
|
| 841 |
+
self.proceed_count += 1
|
| 842 |
+
var_cmd[index + 1] = f_path
|
| 843 |
+
if not checked:
|
| 844 |
+
checked = True
|
| 845 |
+
async with task_dict_lock:
|
| 846 |
+
task_dict[self.mid] = FFmpegStatus(
|
| 847 |
+
self, ffmpeg, gid, "FFmpeg"
|
| 848 |
+
)
|
| 849 |
+
self.progress = False
|
| 850 |
+
await cpu_eater_lock.acquire()
|
| 851 |
+
self.progress = True
|
| 852 |
+
LOGGER.info(f"Running ffmpeg cmd for: {f_path}")
|
| 853 |
+
self.subsize = await get_path_size(f_path)
|
| 854 |
+
self.subname = file_
|
| 855 |
+
res = await ffmpeg.ffmpeg_cmds(var_cmd, f_path)
|
| 856 |
+
if res and delete_files:
|
| 857 |
+
await remove(f_path)
|
| 858 |
+
if len(res) == 1:
|
| 859 |
+
file_name = ospath.basename(res[0])
|
| 860 |
+
if file_name.startswith("ffmpeg"):
|
| 861 |
+
newname = file_name.split(".", 1)[-1]
|
| 862 |
+
newres = ospath.join(dirpath, newname)
|
| 863 |
+
await move(res[0], newres)
|
| 864 |
+
finally:
|
| 865 |
+
if checked:
|
| 866 |
+
cpu_eater_lock.release()
|
| 867 |
+
return dl_path
|
| 868 |
+
|
| 869 |
+
async def substitute(self, dl_path):
|
| 870 |
+
def perform_swap(name, swaps):
|
| 871 |
+
name, ext = ospath.splitext(name)
|
| 872 |
+
name = sub(r"www\S+", "", name)
|
| 873 |
+
for swap in swaps:
|
| 874 |
+
pattern, res, cnt, sen = (
|
| 875 |
+
swap + ["", "0", "NOFLAG"][min(len(swap) - 1, 2) :]
|
| 876 |
+
)[0:4]
|
| 877 |
+
cnt = 0 if len(cnt) == 0 else int(cnt)
|
| 878 |
+
try:
|
| 879 |
+
name = sub(
|
| 880 |
+
rf"{pattern}", res, name, cnt, flags=getattr(re, sen.upper(), 0)
|
| 881 |
+
)
|
| 882 |
+
except Exception as e:
|
| 883 |
+
LOGGER.error(
|
| 884 |
+
f"Swap Error: pattern: {pattern} res: {res}. Error: {e}"
|
| 885 |
+
)
|
| 886 |
+
return False
|
| 887 |
+
if len(name.encode()) > 255:
|
| 888 |
+
LOGGER.error(f"Substitute: {name} is too long")
|
| 889 |
+
return False
|
| 890 |
+
return name + ext
|
| 891 |
+
|
| 892 |
+
if self.is_file:
|
| 893 |
+
up_dir, name = dl_path.rsplit("/", 1)
|
| 894 |
+
new_name = perform_swap(name, self.name_swap)
|
| 895 |
+
if not new_name:
|
| 896 |
+
return dl_path
|
| 897 |
+
new_path = ospath.join(up_dir, new_name)
|
| 898 |
+
await move(dl_path, new_path)
|
| 899 |
+
return new_path
|
| 900 |
+
else:
|
| 901 |
+
for dirpath, _, files in await sync_to_async(walk, dl_path, topdown=False):
|
| 902 |
+
for file_ in files:
|
| 903 |
+
f_path = ospath.join(dirpath, file_)
|
| 904 |
+
new_name = perform_swap(file_, self.name_swap)
|
| 905 |
+
if not new_name:
|
| 906 |
+
continue
|
| 907 |
+
await move(f_path, ospath.join(dirpath, new_name))
|
| 908 |
+
return dl_path
|
| 909 |
+
|
| 910 |
+
async def generate_screenshots(self, dl_path):
|
| 911 |
+
ss_nb = int(self.screen_shots) if isinstance(self.screen_shots, str) else 10
|
| 912 |
+
if self.is_file:
|
| 913 |
+
if (await get_document_type(dl_path))[0]:
|
| 914 |
+
LOGGER.info(f"Creating Screenshot for: {dl_path}")
|
| 915 |
+
res = await take_ss(dl_path, ss_nb)
|
| 916 |
+
if res:
|
| 917 |
+
new_folder = ospath.splitext(dl_path)[0]
|
| 918 |
+
if await aiopath.isfile(new_folder):
|
| 919 |
+
new_folder = f"{new_folder}_temp"
|
| 920 |
+
name = ospath.basename(dl_path)
|
| 921 |
+
await makedirs(new_folder, exist_ok=True)
|
| 922 |
+
await gather(
|
| 923 |
+
move(dl_path, f"{new_folder}/{name}"),
|
| 924 |
+
move(res, new_folder),
|
| 925 |
+
)
|
| 926 |
+
return new_folder
|
| 927 |
+
else:
|
| 928 |
+
LOGGER.info(f"Creating Screenshot for: {dl_path}")
|
| 929 |
+
for dirpath, _, files in await sync_to_async(walk, dl_path, topdown=False):
|
| 930 |
+
for file_ in files:
|
| 931 |
+
f_path = ospath.join(dirpath, file_)
|
| 932 |
+
if (await get_document_type(f_path))[0]:
|
| 933 |
+
await take_ss(f_path, ss_nb)
|
| 934 |
+
return dl_path
|
| 935 |
+
|
| 936 |
+
async def convert_media(self, dl_path, gid):
|
| 937 |
+
fvext = []
|
| 938 |
+
if self.convert_video:
|
| 939 |
+
vdata = self.convert_video.split()
|
| 940 |
+
vext = vdata[0].lower()
|
| 941 |
+
if len(vdata) > 2:
|
| 942 |
+
if "+" in vdata[1].split():
|
| 943 |
+
vstatus = "+"
|
| 944 |
+
elif "-" in vdata[1].split():
|
| 945 |
+
vstatus = "-"
|
| 946 |
+
else:
|
| 947 |
+
vstatus = ""
|
| 948 |
+
fvext.extend(f".{ext.lower()}" for ext in vdata[2:])
|
| 949 |
+
else:
|
| 950 |
+
vstatus = ""
|
| 951 |
+
else:
|
| 952 |
+
vext = ""
|
| 953 |
+
vstatus = ""
|
| 954 |
+
|
| 955 |
+
faext = []
|
| 956 |
+
if self.convert_audio:
|
| 957 |
+
adata = self.convert_audio.split()
|
| 958 |
+
aext = adata[0].lower()
|
| 959 |
+
if len(adata) > 2:
|
| 960 |
+
if "+" in adata[1].split():
|
| 961 |
+
astatus = "+"
|
| 962 |
+
elif "-" in adata[1].split():
|
| 963 |
+
astatus = "-"
|
| 964 |
+
else:
|
| 965 |
+
astatus = ""
|
| 966 |
+
faext.extend(f".{ext.lower()}" for ext in adata[2:])
|
| 967 |
+
else:
|
| 968 |
+
astatus = ""
|
| 969 |
+
else:
|
| 970 |
+
aext = ""
|
| 971 |
+
astatus = ""
|
| 972 |
+
|
| 973 |
+
self.files_to_proceed = {}
|
| 974 |
+
all_files = []
|
| 975 |
+
if self.is_file:
|
| 976 |
+
all_files.append(dl_path)
|
| 977 |
+
else:
|
| 978 |
+
for dirpath, _, files in await sync_to_async(walk, dl_path, topdown=False):
|
| 979 |
+
for file_ in files:
|
| 980 |
+
f_path = ospath.join(dirpath, file_)
|
| 981 |
+
all_files.append(f_path)
|
| 982 |
+
|
| 983 |
+
for f_path in all_files:
|
| 984 |
+
is_video, is_audio, _ = await get_document_type(f_path)
|
| 985 |
+
if (
|
| 986 |
+
is_video
|
| 987 |
+
and vext
|
| 988 |
+
and not f_path.strip().lower().endswith(f".{vext}")
|
| 989 |
+
and (
|
| 990 |
+
vstatus == "+"
|
| 991 |
+
and f_path.strip().lower().endswith(tuple(fvext))
|
| 992 |
+
or vstatus == "-"
|
| 993 |
+
and not f_path.strip().lower().endswith(tuple(fvext))
|
| 994 |
+
or not vstatus
|
| 995 |
+
)
|
| 996 |
+
):
|
| 997 |
+
self.files_to_proceed[f_path] = "video"
|
| 998 |
+
elif (
|
| 999 |
+
is_audio
|
| 1000 |
+
and aext
|
| 1001 |
+
and not is_video
|
| 1002 |
+
and not f_path.strip().lower().endswith(f".{aext}")
|
| 1003 |
+
and (
|
| 1004 |
+
astatus == "+"
|
| 1005 |
+
and f_path.strip().lower().endswith(tuple(faext))
|
| 1006 |
+
or astatus == "-"
|
| 1007 |
+
and not f_path.strip().lower().endswith(tuple(faext))
|
| 1008 |
+
or not astatus
|
| 1009 |
+
)
|
| 1010 |
+
):
|
| 1011 |
+
self.files_to_proceed[f_path] = "audio"
|
| 1012 |
+
del all_files
|
| 1013 |
+
|
| 1014 |
+
if self.files_to_proceed:
|
| 1015 |
+
ffmpeg = FFMpeg(self)
|
| 1016 |
+
async with task_dict_lock:
|
| 1017 |
+
task_dict[self.mid] = FFmpegStatus(self, ffmpeg, gid, "Convert")
|
| 1018 |
+
self.progress = False
|
| 1019 |
+
async with cpu_eater_lock:
|
| 1020 |
+
self.progress = True
|
| 1021 |
+
for f_path, f_type in self.files_to_proceed.items():
|
| 1022 |
+
self.proceed_count += 1
|
| 1023 |
+
LOGGER.info(f"Converting: {f_path}")
|
| 1024 |
+
if self.is_file:
|
| 1025 |
+
self.subsize = self.size
|
| 1026 |
+
else:
|
| 1027 |
+
self.subsize = await get_path_size(f_path)
|
| 1028 |
+
self.subname = ospath.basename(f_path)
|
| 1029 |
+
if f_type == "video":
|
| 1030 |
+
res = await ffmpeg.convert_video(f_path, vext)
|
| 1031 |
+
else:
|
| 1032 |
+
res = await ffmpeg.convert_audio(f_path, aext)
|
| 1033 |
+
if res:
|
| 1034 |
+
try:
|
| 1035 |
+
await remove(f_path)
|
| 1036 |
+
except Exception:
|
| 1037 |
+
self.is_cancelled = True
|
| 1038 |
+
return False
|
| 1039 |
+
if self.is_file:
|
| 1040 |
+
return res
|
| 1041 |
+
return dl_path
|
| 1042 |
+
|
| 1043 |
+
async def generate_sample_video(self, dl_path, gid):
|
| 1044 |
+
data = (
|
| 1045 |
+
self.sample_video.split(":") if isinstance(self.sample_video, str) else ""
|
| 1046 |
+
)
|
| 1047 |
+
if data:
|
| 1048 |
+
sample_duration = int(data[0]) if data[0] else 60
|
| 1049 |
+
part_duration = int(data[1]) if len(data) > 1 else 4
|
| 1050 |
+
else:
|
| 1051 |
+
sample_duration = 60
|
| 1052 |
+
part_duration = 4
|
| 1053 |
+
|
| 1054 |
+
self.files_to_proceed = {}
|
| 1055 |
+
if self.is_file and (await get_document_type(dl_path))[0]:
|
| 1056 |
+
file_ = ospath.basename(dl_path)
|
| 1057 |
+
self.files_to_proceed[dl_path] = file_
|
| 1058 |
+
else:
|
| 1059 |
+
for dirpath, _, files in await sync_to_async(walk, dl_path, topdown=False):
|
| 1060 |
+
for file_ in files:
|
| 1061 |
+
f_path = ospath.join(dirpath, file_)
|
| 1062 |
+
if (await get_document_type(f_path))[0]:
|
| 1063 |
+
self.files_to_proceed[f_path] = file_
|
| 1064 |
+
if self.files_to_proceed:
|
| 1065 |
+
ffmpeg = FFMpeg(self)
|
| 1066 |
+
async with task_dict_lock:
|
| 1067 |
+
task_dict[self.mid] = FFmpegStatus(self, ffmpeg, gid, "Sample Video")
|
| 1068 |
+
self.progress = False
|
| 1069 |
+
async with cpu_eater_lock:
|
| 1070 |
+
self.progress = True
|
| 1071 |
+
LOGGER.info(f"Creating Sample video: {self.name}")
|
| 1072 |
+
for f_path, file_ in self.files_to_proceed.items():
|
| 1073 |
+
self.proceed_count += 1
|
| 1074 |
+
if self.is_file:
|
| 1075 |
+
self.subsize = self.size
|
| 1076 |
+
else:
|
| 1077 |
+
self.subsize = await get_path_size(f_path)
|
| 1078 |
+
self.subname = file_
|
| 1079 |
+
res = await ffmpeg.sample_video(
|
| 1080 |
+
f_path, sample_duration, part_duration
|
| 1081 |
+
)
|
| 1082 |
+
if res and self.is_file:
|
| 1083 |
+
new_folder = ospath.splitext(f_path)[0]
|
| 1084 |
+
if await aiopath.isfile(new_folder):
|
| 1085 |
+
new_folder = f"{new_folder}_temp"
|
| 1086 |
+
await makedirs(new_folder, exist_ok=True)
|
| 1087 |
+
await gather(
|
| 1088 |
+
move(f_path, f"{new_folder}/{file_}"),
|
| 1089 |
+
move(res, f"{new_folder}/SAMPLE.{file_}"),
|
| 1090 |
+
)
|
| 1091 |
+
return new_folder
|
| 1092 |
+
return dl_path
|
| 1093 |
+
|
| 1094 |
+
async def proceed_compress(self, dl_path, gid):
|
| 1095 |
+
pswd = self.compress if isinstance(self.compress, str) else ""
|
| 1096 |
+
if self.is_leech and self.is_file:
|
| 1097 |
+
new_folder = ospath.splitext(dl_path)[0]
|
| 1098 |
+
if await aiopath.isfile(new_folder):
|
| 1099 |
+
new_folder = f"{new_folder}_temp"
|
| 1100 |
+
name = ospath.basename(dl_path)
|
| 1101 |
+
await makedirs(new_folder, exist_ok=True)
|
| 1102 |
+
new_dl_path = f"{new_folder}/{name}"
|
| 1103 |
+
await move(dl_path, new_dl_path)
|
| 1104 |
+
dl_path = new_dl_path
|
| 1105 |
+
up_path = f"{new_dl_path}.zip"
|
| 1106 |
+
self.is_file = False
|
| 1107 |
+
else:
|
| 1108 |
+
up_path = f"{dl_path}.zip"
|
| 1109 |
+
sevenz = SevenZ(self)
|
| 1110 |
+
async with task_dict_lock:
|
| 1111 |
+
task_dict[self.mid] = SevenZStatus(self, sevenz, gid, "Zip")
|
| 1112 |
+
return await sevenz.zip(dl_path, up_path, pswd)
|
| 1113 |
+
|
| 1114 |
+
async def proceed_split(self, dl_path, gid):
|
| 1115 |
+
self.files_to_proceed = {}
|
| 1116 |
+
if self.is_file:
|
| 1117 |
+
f_size = await get_path_size(dl_path)
|
| 1118 |
+
if f_size > self.split_size:
|
| 1119 |
+
self.files_to_proceed[dl_path] = [f_size, ospath.basename(dl_path)]
|
| 1120 |
+
else:
|
| 1121 |
+
for dirpath, _, files in await sync_to_async(walk, dl_path, topdown=False):
|
| 1122 |
+
for file_ in files:
|
| 1123 |
+
f_path = ospath.join(dirpath, file_)
|
| 1124 |
+
f_size = await get_path_size(f_path)
|
| 1125 |
+
if f_size > self.split_size:
|
| 1126 |
+
self.files_to_proceed[f_path] = [f_size, file_]
|
| 1127 |
+
if self.files_to_proceed:
|
| 1128 |
+
ffmpeg = FFMpeg(self)
|
| 1129 |
+
async with task_dict_lock:
|
| 1130 |
+
task_dict[self.mid] = FFmpegStatus(self, ffmpeg, gid, "Split")
|
| 1131 |
+
LOGGER.info(f"Splitting: {self.name}")
|
| 1132 |
+
for f_path, (f_size, file_) in self.files_to_proceed.items():
|
| 1133 |
+
self.proceed_count += 1
|
| 1134 |
+
if self.is_file:
|
| 1135 |
+
self.subsize = self.size
|
| 1136 |
+
else:
|
| 1137 |
+
self.subsize = f_size
|
| 1138 |
+
self.subname = file_
|
| 1139 |
+
parts = -(-f_size // self.split_size)
|
| 1140 |
+
if self.equal_splits:
|
| 1141 |
+
split_size = (f_size // parts) + (f_size % parts)
|
| 1142 |
+
else:
|
| 1143 |
+
split_size = self.split_size
|
| 1144 |
+
if not self.as_doc and (await get_document_type(f_path))[0]:
|
| 1145 |
+
self.progress = True
|
| 1146 |
+
res = await ffmpeg.split(f_path, file_, parts, split_size)
|
| 1147 |
+
else:
|
| 1148 |
+
self.progress = False
|
| 1149 |
+
res = await split_file(f_path, split_size, self)
|
| 1150 |
+
if self.is_cancelled:
|
| 1151 |
+
return False
|
| 1152 |
+
if res or f_size >= self.max_split_size:
|
| 1153 |
+
try:
|
| 1154 |
+
await remove(f_path)
|
| 1155 |
+
except Exception:
|
| 1156 |
+
self.is_cancelled = True
|
| 1157 |
+
|
| 1158 |
+
def parse_metadata_string(self, metadata_str):
|
| 1159 |
+
return self.metadata_processor.parse_string(metadata_str)
|
| 1160 |
+
|
| 1161 |
+
def merge_metadata_dicts(self, default_dict, cmd_dict):
|
| 1162 |
+
return self.metadata_processor.merge_dicts(default_dict, cmd_dict)
|
bot/helper/ext_utils/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
|
bot/helper/ext_utils/bot_utils.py
ADDED
|
@@ -0,0 +1,308 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from asyncio import (
|
| 2 |
+
create_subprocess_exec,
|
| 3 |
+
create_subprocess_shell,
|
| 4 |
+
run_coroutine_threadsafe,
|
| 5 |
+
sleep,
|
| 6 |
+
)
|
| 7 |
+
from asyncio.subprocess import PIPE
|
| 8 |
+
from concurrent.futures import ThreadPoolExecutor
|
| 9 |
+
from functools import partial, wraps
|
| 10 |
+
|
| 11 |
+
from httpx import AsyncClient
|
| 12 |
+
|
| 13 |
+
from ... import bot_loop, user_data
|
| 14 |
+
from ...core.config_manager import Config
|
| 15 |
+
from ..telegram_helper.button_build import ButtonMaker
|
| 16 |
+
from .help_messages import (
|
| 17 |
+
CLONE_HELP_DICT,
|
| 18 |
+
MIRROR_HELP_DICT,
|
| 19 |
+
YT_HELP_DICT,
|
| 20 |
+
)
|
| 21 |
+
from .telegraph_helper import telegraph
|
| 22 |
+
|
| 23 |
+
COMMAND_USAGE = {}
|
| 24 |
+
|
| 25 |
+
THREAD_POOL = ThreadPoolExecutor(max_workers=500)
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
class SetInterval:
|
| 29 |
+
def __init__(self, interval, action, *args, **kwargs):
|
| 30 |
+
self.interval = interval
|
| 31 |
+
self.action = action
|
| 32 |
+
self.task = bot_loop.create_task(self._set_interval(*args, **kwargs))
|
| 33 |
+
|
| 34 |
+
async def _set_interval(self, *args, **kwargs):
|
| 35 |
+
while True:
|
| 36 |
+
await sleep(self.interval)
|
| 37 |
+
await self.action(*args, **kwargs)
|
| 38 |
+
|
| 39 |
+
def cancel(self):
|
| 40 |
+
self.task.cancel()
|
| 41 |
+
|
| 42 |
+
|
| 43 |
+
def _build_command_usage(help_dict, command_key):
|
| 44 |
+
buttons = ButtonMaker()
|
| 45 |
+
cmd_list = list(help_dict.keys())[1:]
|
| 46 |
+
temp_store = []
|
| 47 |
+
cmd_pages = [cmd_list[i : i + 10] for i in range(0, len(cmd_list), 10)]
|
| 48 |
+
for i in range(1, len(cmd_pages) + 1):
|
| 49 |
+
for name in cmd_pages[i]:
|
| 50 |
+
buttons.data_button(name, f"help {command_key} {name}")
|
| 51 |
+
buttons.data_button("Prev", f"help pre {command_key} {i - 1}")
|
| 52 |
+
buttons.data_button("Next", f"help nex {command_key} {i + 1}")
|
| 53 |
+
buttons.data_button("Close", "help close", "footer")
|
| 54 |
+
temp_store.append(buttons.build_menu(2))
|
| 55 |
+
COMMAND_USAGE[command_key] = [help_dict["main"], *temp_store]
|
| 56 |
+
buttons.reset()
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
def _build_command_usage(help_dict, command_key):
|
| 60 |
+
buttons = ButtonMaker()
|
| 61 |
+
cmd_list = list(help_dict.keys())[1:]
|
| 62 |
+
cmd_pages = [cmd_list[i : i + 10] for i in range(0, len(cmd_list), 10)]
|
| 63 |
+
temp_store = []
|
| 64 |
+
|
| 65 |
+
for i, page in enumerate(cmd_pages):
|
| 66 |
+
for name in page:
|
| 67 |
+
buttons.data_button(name, f"help {command_key} {name} {i}")
|
| 68 |
+
if len(cmd_pages) > 1:
|
| 69 |
+
if i > 0:
|
| 70 |
+
buttons.data_button("⫷", f"help pre {command_key} {i - 1}")
|
| 71 |
+
if i < len(cmd_pages) - 1:
|
| 72 |
+
buttons.data_button("⫸", f"help nex {command_key} {i + 1}")
|
| 73 |
+
buttons.data_button("Close", "help close", "footer")
|
| 74 |
+
temp_store.append(buttons.build_menu(2))
|
| 75 |
+
buttons.reset()
|
| 76 |
+
|
| 77 |
+
COMMAND_USAGE[command_key] = [help_dict["main"], *temp_store]
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
def create_help_buttons():
|
| 81 |
+
_build_command_usage(MIRROR_HELP_DICT, "mirror")
|
| 82 |
+
_build_command_usage(YT_HELP_DICT, "yt")
|
| 83 |
+
_build_command_usage(CLONE_HELP_DICT, "clone")
|
| 84 |
+
|
| 85 |
+
|
| 86 |
+
def compare_versions(v1, v2):
|
| 87 |
+
v1, v2 = (list(map(int, v.split("-")[0][1:].split("."))) for v in (v1, v2))
|
| 88 |
+
return (
|
| 89 |
+
"New Version Update is Available! Check Now!"
|
| 90 |
+
if v1 < v2
|
| 91 |
+
else (
|
| 92 |
+
"More Updated! Kindly Contribute in Official"
|
| 93 |
+
if v1 > v2
|
| 94 |
+
else "Already up to date with latest version"
|
| 95 |
+
)
|
| 96 |
+
)
|
| 97 |
+
|
| 98 |
+
|
| 99 |
+
def bt_selection_buttons(id_):
|
| 100 |
+
gid = id_[:12] if len(id_) > 25 else id_
|
| 101 |
+
pin = "".join([n for n in id_ if n.isdigit()][:4])
|
| 102 |
+
buttons = ButtonMaker()
|
| 103 |
+
if Config.WEB_PINCODE:
|
| 104 |
+
buttons.url_button("Select Files", f"{Config.BASE_URL}/app/files?gid={id_}")
|
| 105 |
+
buttons.data_button("Pincode", f"sel pin {gid} {pin}")
|
| 106 |
+
else:
|
| 107 |
+
buttons.url_button(
|
| 108 |
+
"Select Files", f"{Config.BASE_URL}/app/files?gid={id_}&pin={pin}"
|
| 109 |
+
)
|
| 110 |
+
buttons.data_button("Done Selecting", f"sel done {gid} {id_}")
|
| 111 |
+
buttons.data_button("Cancel", f"sel cancel {gid}")
|
| 112 |
+
return buttons.build_menu(2)
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
async def get_telegraph_list(telegraph_content):
|
| 116 |
+
path = [
|
| 117 |
+
(
|
| 118 |
+
await telegraph.create_page(
|
| 119 |
+
title="Mirror-Leech-Bot Drive Search", content=content
|
| 120 |
+
)
|
| 121 |
+
)["path"]
|
| 122 |
+
for content in telegraph_content
|
| 123 |
+
]
|
| 124 |
+
if len(path) > 1:
|
| 125 |
+
await telegraph.edit_telegraph(path, telegraph_content)
|
| 126 |
+
buttons = ButtonMaker()
|
| 127 |
+
buttons.url_button("🔎 VIEW", f"https://telegra.ph/{path[0]}")
|
| 128 |
+
return buttons.build_menu(1)
|
| 129 |
+
|
| 130 |
+
|
| 131 |
+
def arg_parser(items, arg_base):
|
| 132 |
+
if not items:
|
| 133 |
+
return
|
| 134 |
+
|
| 135 |
+
arg_start = -1
|
| 136 |
+
i = 0
|
| 137 |
+
total = len(items)
|
| 138 |
+
|
| 139 |
+
bool_arg_set = {
|
| 140 |
+
"-b",
|
| 141 |
+
"-e",
|
| 142 |
+
"-z",
|
| 143 |
+
"-s",
|
| 144 |
+
"-j",
|
| 145 |
+
"-d",
|
| 146 |
+
"-sv",
|
| 147 |
+
"-ss",
|
| 148 |
+
"-f",
|
| 149 |
+
"-fd",
|
| 150 |
+
"-fu",
|
| 151 |
+
"-sync",
|
| 152 |
+
"-hl",
|
| 153 |
+
"-doc",
|
| 154 |
+
"-med",
|
| 155 |
+
"-ut",
|
| 156 |
+
"-bt",
|
| 157 |
+
"-yt",
|
| 158 |
+
}
|
| 159 |
+
if Config.DISABLE_BULK and "-b" in items:
|
| 160 |
+
arg_base["-b"] = False
|
| 161 |
+
|
| 162 |
+
if Config.DISABLE_MULTI and "-i" in items:
|
| 163 |
+
arg_base["-i"] = 0
|
| 164 |
+
|
| 165 |
+
if Config.DISABLE_SEED and "-d" in items:
|
| 166 |
+
arg_base["-d"] = False
|
| 167 |
+
|
| 168 |
+
while i < total:
|
| 169 |
+
part = items[i]
|
| 170 |
+
|
| 171 |
+
if part in arg_base:
|
| 172 |
+
if arg_start == -1:
|
| 173 |
+
arg_start = i
|
| 174 |
+
|
| 175 |
+
if (
|
| 176 |
+
i + 1 == total
|
| 177 |
+
and part in bool_arg_set
|
| 178 |
+
or part
|
| 179 |
+
in [
|
| 180 |
+
"-s",
|
| 181 |
+
"-j",
|
| 182 |
+
"-f",
|
| 183 |
+
"-fd",
|
| 184 |
+
"-fu",
|
| 185 |
+
"-sync",
|
| 186 |
+
"-hl",
|
| 187 |
+
"-doc",
|
| 188 |
+
"-med",
|
| 189 |
+
"-ut",
|
| 190 |
+
"-bt",
|
| 191 |
+
"-yt",
|
| 192 |
+
]
|
| 193 |
+
):
|
| 194 |
+
arg_base[part] = True
|
| 195 |
+
else:
|
| 196 |
+
sub_list = []
|
| 197 |
+
for j in range(i + 1, total):
|
| 198 |
+
if items[j] in arg_base:
|
| 199 |
+
if part == "-c" and items[j] == "-c":
|
| 200 |
+
sub_list.append(items[j])
|
| 201 |
+
continue
|
| 202 |
+
if part in bool_arg_set and not sub_list:
|
| 203 |
+
arg_base[part] = True
|
| 204 |
+
break
|
| 205 |
+
if not sub_list:
|
| 206 |
+
break
|
| 207 |
+
check = " ".join(sub_list).strip()
|
| 208 |
+
if check.startswith("[") and check.endswith("]"):
|
| 209 |
+
break
|
| 210 |
+
elif not check.startswith("["):
|
| 211 |
+
break
|
| 212 |
+
sub_list.append(items[j])
|
| 213 |
+
if sub_list:
|
| 214 |
+
value = " ".join(sub_list)
|
| 215 |
+
if part == "-ff" and not value.strip().startswith("["):
|
| 216 |
+
arg_base[part].add(value)
|
| 217 |
+
else:
|
| 218 |
+
arg_base[part] = value
|
| 219 |
+
i += len(sub_list)
|
| 220 |
+
|
| 221 |
+
i += 1
|
| 222 |
+
|
| 223 |
+
if "link" in arg_base:
|
| 224 |
+
link_items = items[:arg_start] if arg_start != -1 else items
|
| 225 |
+
if link_items:
|
| 226 |
+
arg_base["link"] = " ".join(link_items)
|
| 227 |
+
|
| 228 |
+
|
| 229 |
+
def get_size_bytes(size):
|
| 230 |
+
size = size.lower()
|
| 231 |
+
if "k" in size:
|
| 232 |
+
size = int(float(size.split("k")[0]) * 1024)
|
| 233 |
+
elif "m" in size:
|
| 234 |
+
size = int(float(size.split("m")[0]) * 1048576)
|
| 235 |
+
elif "g" in size:
|
| 236 |
+
size = int(float(size.split("g")[0]) * 1073741824)
|
| 237 |
+
elif "t" in size:
|
| 238 |
+
size = int(float(size.split("t")[0]) * 1099511627776)
|
| 239 |
+
else:
|
| 240 |
+
size = 0
|
| 241 |
+
return size
|
| 242 |
+
|
| 243 |
+
|
| 244 |
+
async def get_content_type(url):
|
| 245 |
+
try:
|
| 246 |
+
async with AsyncClient() as client:
|
| 247 |
+
response = await client.get(url, allow_redirects=True, verify=False)
|
| 248 |
+
return response.headers.get("Content-Type")
|
| 249 |
+
except Exception:
|
| 250 |
+
return None
|
| 251 |
+
|
| 252 |
+
|
| 253 |
+
def update_user_ldata(id_, key, value):
|
| 254 |
+
user_data.setdefault(id_, {})
|
| 255 |
+
user_data[id_][key] = value
|
| 256 |
+
|
| 257 |
+
|
| 258 |
+
async def cmd_exec(cmd, shell=False):
|
| 259 |
+
if shell:
|
| 260 |
+
proc = await create_subprocess_shell(cmd, stdout=PIPE, stderr=PIPE)
|
| 261 |
+
else:
|
| 262 |
+
proc = await create_subprocess_exec(*cmd, stdout=PIPE, stderr=PIPE)
|
| 263 |
+
stdout, stderr = await proc.communicate()
|
| 264 |
+
try:
|
| 265 |
+
stdout = stdout.decode().strip()
|
| 266 |
+
except Exception:
|
| 267 |
+
stdout = "Unable to decode the response!"
|
| 268 |
+
try:
|
| 269 |
+
stderr = stderr.decode().strip()
|
| 270 |
+
except Exception:
|
| 271 |
+
stderr = "Unable to decode the error!"
|
| 272 |
+
return stdout, stderr, proc.returncode
|
| 273 |
+
|
| 274 |
+
|
| 275 |
+
def new_task(func):
|
| 276 |
+
@wraps(func)
|
| 277 |
+
async def wrapper(*args, **kwargs):
|
| 278 |
+
task = bot_loop.create_task(func(*args, **kwargs))
|
| 279 |
+
return task
|
| 280 |
+
|
| 281 |
+
return wrapper
|
| 282 |
+
|
| 283 |
+
|
| 284 |
+
async def sync_to_async(func, *args, wait=True, **kwargs):
|
| 285 |
+
pfunc = partial(func, *args, **kwargs)
|
| 286 |
+
future = bot_loop.run_in_executor(THREAD_POOL, pfunc)
|
| 287 |
+
return await future if wait else future
|
| 288 |
+
|
| 289 |
+
|
| 290 |
+
def async_to_sync(func, *args, wait=True, **kwargs):
|
| 291 |
+
future = run_coroutine_threadsafe(func(*args, **kwargs), bot_loop)
|
| 292 |
+
return future.result() if wait else future
|
| 293 |
+
|
| 294 |
+
|
| 295 |
+
def loop_thread(func):
|
| 296 |
+
@wraps(func)
|
| 297 |
+
def wrapper(*args, wait=False, **kwargs):
|
| 298 |
+
future = run_coroutine_threadsafe(func(*args, **kwargs), bot_loop)
|
| 299 |
+
return future.result() if wait else future
|
| 300 |
+
|
| 301 |
+
return wrapper
|
| 302 |
+
|
| 303 |
+
|
| 304 |
+
def safe_int(value, default=0):
|
| 305 |
+
try:
|
| 306 |
+
return int(value)
|
| 307 |
+
except (ValueError, TypeError):
|
| 308 |
+
return default
|
bot/helper/ext_utils/bulk_links.py
ADDED
|
@@ -0,0 +1,35 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from aiofiles import open as aiopen
|
| 2 |
+
from aiofiles.os import remove
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
def filter_links(links_list, bulk_start, bulk_end):
|
| 6 |
+
start = bulk_start if bulk_start > 0 else None
|
| 7 |
+
end = bulk_end if bulk_end > 0 else None
|
| 8 |
+
return links_list[start:end]
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
def get_links_from_message(text):
|
| 12 |
+
links_list = text.split("\n")
|
| 13 |
+
return [item.strip() for item in links_list if len(item) != 0]
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
async def get_links_from_file(message):
|
| 17 |
+
links_list = []
|
| 18 |
+
text_file_dir = await message.download()
|
| 19 |
+
async with aiopen(text_file_dir, "r+") as f:
|
| 20 |
+
lines = await f.readlines()
|
| 21 |
+
links_list.extend(line.strip() for line in lines if len(line) != 0)
|
| 22 |
+
await remove(text_file_dir)
|
| 23 |
+
return links_list
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
async def extract_bulk_links(message, bulk_start, bulk_end):
|
| 27 |
+
bulk_start = int(bulk_start)
|
| 28 |
+
bulk_end = int(bulk_end)
|
| 29 |
+
links_list = []
|
| 30 |
+
if reply_to := message.reply_to_message:
|
| 31 |
+
if (file_ := reply_to.document) and (file_.mime_type == "text/plain"):
|
| 32 |
+
links_list = await get_links_from_file(reply_to)
|
| 33 |
+
elif text := reply_to.text:
|
| 34 |
+
links_list = get_links_from_message(text)
|
| 35 |
+
return filter_links(links_list, bulk_start, bulk_end) if links_list else links_list
|
bot/helper/ext_utils/db_handler.py
ADDED
|
@@ -0,0 +1,235 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from importlib import import_module
|
| 2 |
+
|
| 3 |
+
from aiofiles import open as aiopen
|
| 4 |
+
from aiofiles.os import path as aiopath
|
| 5 |
+
from motor.motor_asyncio import AsyncIOMotorClient
|
| 6 |
+
from pymongo.errors import PyMongoError
|
| 7 |
+
from pymongo.server_api import ServerApi
|
| 8 |
+
|
| 9 |
+
from ... import LOGGER, qbit_options, rss_dict, user_data
|
| 10 |
+
from ...core.config_manager import Config
|
| 11 |
+
from ...core.tg_client import TgClient
|
| 12 |
+
|
| 13 |
+
|
| 14 |
+
class DbManager:
|
| 15 |
+
def __init__(self):
|
| 16 |
+
self._return = True
|
| 17 |
+
self._conn = None
|
| 18 |
+
self.db = None
|
| 19 |
+
|
| 20 |
+
async def connect(self):
|
| 21 |
+
try:
|
| 22 |
+
if self._conn is not None:
|
| 23 |
+
await self._conn.close()
|
| 24 |
+
self._conn = AsyncIOMotorClient(
|
| 25 |
+
Config.DATABASE_URL, server_api=ServerApi("1")
|
| 26 |
+
)
|
| 27 |
+
self.db = self._conn.wzmlx
|
| 28 |
+
self._return = False
|
| 29 |
+
except PyMongoError as e:
|
| 30 |
+
LOGGER.error(f"Error in DB connection: {e}")
|
| 31 |
+
self.db = None
|
| 32 |
+
self._return = True
|
| 33 |
+
self._conn = None
|
| 34 |
+
|
| 35 |
+
async def disconnect(self):
|
| 36 |
+
self._return = True
|
| 37 |
+
if self._conn is not None:
|
| 38 |
+
await self._conn.close()
|
| 39 |
+
self._conn = None
|
| 40 |
+
|
| 41 |
+
async def update_deploy_config(self):
|
| 42 |
+
if self._return:
|
| 43 |
+
return
|
| 44 |
+
settings = import_module("config")
|
| 45 |
+
config_file = {
|
| 46 |
+
key: value.strip() if isinstance(value, str) else value
|
| 47 |
+
for key, value in vars(settings).items()
|
| 48 |
+
if not key.startswith("__")
|
| 49 |
+
}
|
| 50 |
+
await self.db.settings.deployConfig.replace_one(
|
| 51 |
+
{"_id": TgClient.ID}, config_file, upsert=True
|
| 52 |
+
)
|
| 53 |
+
|
| 54 |
+
async def update_config(self, dict_):
|
| 55 |
+
if self._return:
|
| 56 |
+
return
|
| 57 |
+
await self.db.settings.config.update_one(
|
| 58 |
+
{"_id": TgClient.ID}, {"$set": dict_}, upsert=True
|
| 59 |
+
)
|
| 60 |
+
|
| 61 |
+
async def update_aria2(self, key, value):
|
| 62 |
+
if self._return:
|
| 63 |
+
return
|
| 64 |
+
await self.db.settings.aria2c.update_one(
|
| 65 |
+
{"_id": TgClient.ID}, {"$set": {key: value}}, upsert=True
|
| 66 |
+
)
|
| 67 |
+
|
| 68 |
+
async def update_qbittorrent(self, key, value):
|
| 69 |
+
if self._return:
|
| 70 |
+
return
|
| 71 |
+
await self.db.settings.qbittorrent.update_one(
|
| 72 |
+
{"_id": TgClient.ID}, {"$set": {key: value}}, upsert=True
|
| 73 |
+
)
|
| 74 |
+
|
| 75 |
+
async def save_qbit_settings(self):
|
| 76 |
+
if self._return:
|
| 77 |
+
return
|
| 78 |
+
await self.db.settings.qbittorrent.update_one(
|
| 79 |
+
{"_id": TgClient.ID}, {"$set": qbit_options}, upsert=True
|
| 80 |
+
)
|
| 81 |
+
|
| 82 |
+
async def update_private_file(self, path):
|
| 83 |
+
if self._return:
|
| 84 |
+
return
|
| 85 |
+
db_path = path.replace(".", "__")
|
| 86 |
+
if await aiopath.exists(path):
|
| 87 |
+
async with aiopen(path, "rb+") as pf:
|
| 88 |
+
pf_bin = await pf.read()
|
| 89 |
+
await self.db.settings.files.update_one(
|
| 90 |
+
{"_id": TgClient.ID}, {"$set": {db_path: pf_bin}}, upsert=True
|
| 91 |
+
)
|
| 92 |
+
if path == "config.py":
|
| 93 |
+
await self.update_deploy_config()
|
| 94 |
+
else:
|
| 95 |
+
await self.db.settings.files.update_one(
|
| 96 |
+
{"_id": TgClient.ID}, {"$unset": {db_path: ""}}, upsert=True
|
| 97 |
+
)
|
| 98 |
+
|
| 99 |
+
async def update_nzb_config(self):
|
| 100 |
+
if self._return:
|
| 101 |
+
return
|
| 102 |
+
async with aiopen("sabnzbd/SABnzbd.ini", "rb+") as pf:
|
| 103 |
+
nzb_conf = await pf.read()
|
| 104 |
+
await self.db.settings.nzb.replace_one(
|
| 105 |
+
{"_id": TgClient.ID}, {"SABnzbd__ini": nzb_conf}, upsert=True
|
| 106 |
+
)
|
| 107 |
+
|
| 108 |
+
async def update_user_data(self, user_id):
|
| 109 |
+
if self._return:
|
| 110 |
+
return
|
| 111 |
+
data = user_data.get(user_id, {})
|
| 112 |
+
data = data.copy()
|
| 113 |
+
for key in ("THUMBNAIL", "RCLONE_CONFIG", "TOKEN_PICKLE", "USER_COOKIE_FILE"):
|
| 114 |
+
data.pop(key, None)
|
| 115 |
+
pipeline = [
|
| 116 |
+
{
|
| 117 |
+
"$replaceRoot": {
|
| 118 |
+
"newRoot": {
|
| 119 |
+
"$mergeObjects": [
|
| 120 |
+
data,
|
| 121 |
+
{
|
| 122 |
+
"$arrayToObject": {
|
| 123 |
+
"$filter": {
|
| 124 |
+
"input": {"$objectToArray": "$$ROOT"},
|
| 125 |
+
"as": "field",
|
| 126 |
+
"cond": {
|
| 127 |
+
"$in": [
|
| 128 |
+
"$$field.k",
|
| 129 |
+
[
|
| 130 |
+
"THUMBNAIL",
|
| 131 |
+
"RCLONE_CONFIG",
|
| 132 |
+
"TOKEN_PICKLE",
|
| 133 |
+
"USER_COOKIE_FILE",
|
| 134 |
+
],
|
| 135 |
+
]
|
| 136 |
+
},
|
| 137 |
+
}
|
| 138 |
+
}
|
| 139 |
+
},
|
| 140 |
+
]
|
| 141 |
+
}
|
| 142 |
+
}
|
| 143 |
+
}
|
| 144 |
+
]
|
| 145 |
+
await self.db.users[TgClient.ID].update_one(
|
| 146 |
+
{"_id": user_id}, pipeline, upsert=True
|
| 147 |
+
)
|
| 148 |
+
|
| 149 |
+
async def update_user_doc(self, user_id, key, path=""):
|
| 150 |
+
if self._return:
|
| 151 |
+
return
|
| 152 |
+
if path:
|
| 153 |
+
async with aiopen(path, "rb+") as doc:
|
| 154 |
+
doc_bin = await doc.read()
|
| 155 |
+
await self.db.users[TgClient.ID].update_one(
|
| 156 |
+
{"_id": user_id}, {"$set": {key: doc_bin}}, upsert=True
|
| 157 |
+
)
|
| 158 |
+
else:
|
| 159 |
+
await self.db.users[TgClient.ID].update_one(
|
| 160 |
+
{"_id": user_id}, {"$unset": {key: ""}}, upsert=True
|
| 161 |
+
)
|
| 162 |
+
|
| 163 |
+
async def rss_update_all(self):
|
| 164 |
+
if self._return:
|
| 165 |
+
return
|
| 166 |
+
for user_id in list(rss_dict.keys()):
|
| 167 |
+
await self.db.rss[TgClient.ID].replace_one(
|
| 168 |
+
{"_id": user_id}, rss_dict[user_id], upsert=True
|
| 169 |
+
)
|
| 170 |
+
|
| 171 |
+
async def rss_update(self, user_id):
|
| 172 |
+
if self._return:
|
| 173 |
+
return
|
| 174 |
+
await self.db.rss[TgClient.ID].replace_one(
|
| 175 |
+
{"_id": user_id}, rss_dict[user_id], upsert=True
|
| 176 |
+
)
|
| 177 |
+
|
| 178 |
+
async def rss_delete(self, user_id):
|
| 179 |
+
if self._return:
|
| 180 |
+
return
|
| 181 |
+
await self.db.rss[TgClient.ID].delete_one({"_id": user_id})
|
| 182 |
+
|
| 183 |
+
async def add_incomplete_task(self, cid, link, tag):
|
| 184 |
+
if self._return:
|
| 185 |
+
return
|
| 186 |
+
await self.db.tasks[TgClient.ID].insert_one(
|
| 187 |
+
{"_id": link, "cid": cid, "tag": tag}
|
| 188 |
+
)
|
| 189 |
+
|
| 190 |
+
async def get_pm_uids(self):
|
| 191 |
+
if self._return:
|
| 192 |
+
return
|
| 193 |
+
return [doc["_id"] async for doc in self.db.pm_users[TgClient.ID].find({})]
|
| 194 |
+
|
| 195 |
+
async def set_pm_users(self, user_id):
|
| 196 |
+
if self._return:
|
| 197 |
+
return
|
| 198 |
+
if not bool(await self.db.pm_users[TgClient.ID].find_one({"_id": user_id})):
|
| 199 |
+
await self.db.pm_users[TgClient.ID].insert_one({"_id": user_id})
|
| 200 |
+
LOGGER.info(f"New PM User Added : {user_id}")
|
| 201 |
+
|
| 202 |
+
async def rm_pm_user(self, user_id):
|
| 203 |
+
if self._return:
|
| 204 |
+
return
|
| 205 |
+
await self.db.pm_users[TgClient.ID].delete_one({"_id": user_id})
|
| 206 |
+
|
| 207 |
+
async def rm_complete_task(self, link):
|
| 208 |
+
if self._return:
|
| 209 |
+
return
|
| 210 |
+
await self.db.tasks[TgClient.ID].delete_one({"_id": link})
|
| 211 |
+
|
| 212 |
+
async def get_incomplete_tasks(self):
|
| 213 |
+
notifier_dict = {}
|
| 214 |
+
if self._return:
|
| 215 |
+
return notifier_dict
|
| 216 |
+
if await self.db.tasks[TgClient.ID].find_one():
|
| 217 |
+
rows = self.db.tasks[TgClient.ID].find({})
|
| 218 |
+
async for row in rows:
|
| 219 |
+
if row["cid"] in list(notifier_dict.keys()):
|
| 220 |
+
if row["tag"] in list(notifier_dict[row["cid"]]):
|
| 221 |
+
notifier_dict[row["cid"]][row["tag"]].append(row["_id"])
|
| 222 |
+
else:
|
| 223 |
+
notifier_dict[row["cid"]][row["tag"]] = [row["_id"]]
|
| 224 |
+
else:
|
| 225 |
+
notifier_dict[row["cid"]] = {row["tag"]: [row["_id"]]}
|
| 226 |
+
await self.db.tasks[TgClient.ID].drop()
|
| 227 |
+
return notifier_dict
|
| 228 |
+
|
| 229 |
+
async def trunc_table(self, name):
|
| 230 |
+
if self._return:
|
| 231 |
+
return
|
| 232 |
+
await self.db[name][TgClient.ID].drop()
|
| 233 |
+
|
| 234 |
+
|
| 235 |
+
database = DbManager()
|
bot/helper/ext_utils/error_handler.py
ADDED
|
@@ -0,0 +1,223 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import time
|
| 2 |
+
from dataclasses import dataclass, field
|
| 3 |
+
from datetime import datetime, timedelta
|
| 4 |
+
from enum import Enum
|
| 5 |
+
from functools import wraps
|
| 6 |
+
from typing import Any, Callable, Dict, List, Optional
|
| 7 |
+
|
| 8 |
+
from .. import LOGGER
|
| 9 |
+
|
| 10 |
+
|
| 11 |
+
class ErrorSeverity(Enum):
|
| 12 |
+
LOW = "low"
|
| 13 |
+
MEDIUM = "medium"
|
| 14 |
+
HIGH = "high"
|
| 15 |
+
CRITICAL = "critical"
|
| 16 |
+
|
| 17 |
+
|
| 18 |
+
class CircuitState(Enum):
|
| 19 |
+
CLOSED = "closed"
|
| 20 |
+
OPEN = "open"
|
| 21 |
+
HALF_OPEN = "half_open"
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
@dataclass
|
| 25 |
+
class ErrorRecord:
|
| 26 |
+
timestamp: datetime
|
| 27 |
+
error_type: str
|
| 28 |
+
message: str
|
| 29 |
+
severity: ErrorSeverity
|
| 30 |
+
context: Dict[str, Any] = field(default_factory=dict)
|
| 31 |
+
traceback: Optional[str] = None
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
@dataclass
|
| 35 |
+
class HealthMetrics:
|
| 36 |
+
total_operations: int = 0
|
| 37 |
+
successful_operations: int = 0
|
| 38 |
+
failed_operations: int = 0
|
| 39 |
+
error_rate: float = 0.0
|
| 40 |
+
avg_response_time: float = 0.0
|
| 41 |
+
last_updated: datetime = field(default_factory=datetime.now)
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
class CircuitBreaker:
|
| 45 |
+
def __init__(self, failure_threshold: int = 5, recovery_timeout: int = 60):
|
| 46 |
+
self.failure_threshold = failure_threshold
|
| 47 |
+
self.recovery_timeout = recovery_timeout
|
| 48 |
+
self.failure_count = 0
|
| 49 |
+
self.last_failure_time = None
|
| 50 |
+
self.state = CircuitState.CLOSED
|
| 51 |
+
|
| 52 |
+
def can_execute(self) -> bool:
|
| 53 |
+
if self.state == CircuitState.CLOSED:
|
| 54 |
+
return True
|
| 55 |
+
elif self.state == CircuitState.OPEN:
|
| 56 |
+
if (
|
| 57 |
+
datetime.now() - self.last_failure_time
|
| 58 |
+
).seconds >= self.recovery_timeout:
|
| 59 |
+
self.state = CircuitState.HALF_OPEN
|
| 60 |
+
return True
|
| 61 |
+
return False
|
| 62 |
+
else:
|
| 63 |
+
return True
|
| 64 |
+
|
| 65 |
+
def on_success(self):
|
| 66 |
+
self.failure_count = 0
|
| 67 |
+
self.state = CircuitState.CLOSED
|
| 68 |
+
|
| 69 |
+
def on_failure(self):
|
| 70 |
+
self.failure_count += 1
|
| 71 |
+
self.last_failure_time = datetime.now()
|
| 72 |
+
if self.failure_count >= self.failure_threshold:
|
| 73 |
+
self.state = CircuitState.OPEN
|
| 74 |
+
|
| 75 |
+
|
| 76 |
+
class ErrorMonitor:
|
| 77 |
+
def __init__(self):
|
| 78 |
+
self.errors: List[ErrorRecord] = []
|
| 79 |
+
self.health_metrics: Dict[str, HealthMetrics] = {}
|
| 80 |
+
self.circuit_breakers: Dict[str, CircuitBreaker] = {}
|
| 81 |
+
self.max_errors = 1000
|
| 82 |
+
|
| 83 |
+
def record_error(
|
| 84 |
+
self,
|
| 85 |
+
error_type: str,
|
| 86 |
+
message: str,
|
| 87 |
+
severity: ErrorSeverity = ErrorSeverity.MEDIUM,
|
| 88 |
+
context: Dict[str, Any] = None,
|
| 89 |
+
traceback: str = None,
|
| 90 |
+
):
|
| 91 |
+
error_record = ErrorRecord(
|
| 92 |
+
timestamp=datetime.now(),
|
| 93 |
+
error_type=error_type,
|
| 94 |
+
message=message,
|
| 95 |
+
severity=severity,
|
| 96 |
+
context=context or {},
|
| 97 |
+
traceback=traceback,
|
| 98 |
+
)
|
| 99 |
+
|
| 100 |
+
self.errors.append(error_record)
|
| 101 |
+
if len(self.errors) > self.max_errors:
|
| 102 |
+
self.errors.pop(0)
|
| 103 |
+
|
| 104 |
+
LOGGER.error(f"Error recorded: {error_type} - {message}")
|
| 105 |
+
|
| 106 |
+
def update_health_metrics(
|
| 107 |
+
self, operation: str, success: bool, response_time: float
|
| 108 |
+
):
|
| 109 |
+
if operation not in self.health_metrics:
|
| 110 |
+
self.health_metrics[operation] = HealthMetrics()
|
| 111 |
+
|
| 112 |
+
metrics = self.health_metrics[operation]
|
| 113 |
+
metrics.total_operations += 1
|
| 114 |
+
|
| 115 |
+
if success:
|
| 116 |
+
metrics.successful_operations += 1
|
| 117 |
+
else:
|
| 118 |
+
metrics.failed_operations += 1
|
| 119 |
+
|
| 120 |
+
metrics.error_rate = metrics.failed_operations / metrics.total_operations
|
| 121 |
+
metrics.avg_response_time = (
|
| 122 |
+
metrics.avg_response_time * (metrics.total_operations - 1) + response_time
|
| 123 |
+
) / metrics.total_operations
|
| 124 |
+
metrics.last_updated = datetime.now()
|
| 125 |
+
|
| 126 |
+
def get_circuit_breaker(self, operation: str) -> CircuitBreaker:
|
| 127 |
+
if operation not in self.circuit_breakers:
|
| 128 |
+
self.circuit_breakers[operation] = CircuitBreaker()
|
| 129 |
+
return self.circuit_breakers[operation]
|
| 130 |
+
|
| 131 |
+
def get_error_summary(self, hours: int = 24) -> Dict[str, Any]:
|
| 132 |
+
cutoff_time = datetime.now() - timedelta(hours=hours)
|
| 133 |
+
recent_errors = [e for e in self.errors if e.timestamp >= cutoff_time]
|
| 134 |
+
|
| 135 |
+
return {
|
| 136 |
+
"total_errors": len(recent_errors),
|
| 137 |
+
"errors_by_severity": {
|
| 138 |
+
severity.value: len(
|
| 139 |
+
[e for e in recent_errors if e.severity == severity]
|
| 140 |
+
)
|
| 141 |
+
for severity in ErrorSeverity
|
| 142 |
+
},
|
| 143 |
+
"errors_by_type": {
|
| 144 |
+
error_type: len(
|
| 145 |
+
[e for e in recent_errors if e.error_type == error_type]
|
| 146 |
+
)
|
| 147 |
+
for error_type in set(e.error_type for e in recent_errors)
|
| 148 |
+
},
|
| 149 |
+
"health_metrics": {
|
| 150 |
+
op: {
|
| 151 |
+
"error_rate": metrics.error_rate,
|
| 152 |
+
"avg_response_time": metrics.avg_response_time,
|
| 153 |
+
"total_operations": metrics.total_operations,
|
| 154 |
+
}
|
| 155 |
+
for op, metrics in self.health_metrics.items()
|
| 156 |
+
},
|
| 157 |
+
}
|
| 158 |
+
|
| 159 |
+
|
| 160 |
+
error_monitor = ErrorMonitor()
|
| 161 |
+
|
| 162 |
+
|
| 163 |
+
def error_handler(operation: str = None):
|
| 164 |
+
def decorator(func: Callable) -> Callable:
|
| 165 |
+
@wraps(func)
|
| 166 |
+
async def wrapper(*args, **kwargs):
|
| 167 |
+
op_name = operation or func.__name__
|
| 168 |
+
start_time = time.time()
|
| 169 |
+
|
| 170 |
+
circuit_breaker = error_monitor.get_circuit_breaker(op_name)
|
| 171 |
+
if not circuit_breaker.can_execute():
|
| 172 |
+
raise Exception(f"Circuit breaker open for {op_name}")
|
| 173 |
+
|
| 174 |
+
try:
|
| 175 |
+
result = await func(*args, **kwargs)
|
| 176 |
+
circuit_breaker.on_success()
|
| 177 |
+
error_monitor.update_health_metrics(
|
| 178 |
+
op_name, True, time.time() - start_time
|
| 179 |
+
)
|
| 180 |
+
return result
|
| 181 |
+
except Exception as e:
|
| 182 |
+
circuit_breaker.on_failure()
|
| 183 |
+
error_monitor.update_health_metrics(
|
| 184 |
+
op_name, False, time.time() - start_time
|
| 185 |
+
)
|
| 186 |
+
error_monitor.record_error(
|
| 187 |
+
error_type=type(e).__name__,
|
| 188 |
+
message=str(e),
|
| 189 |
+
severity=ErrorSeverity.HIGH,
|
| 190 |
+
context={
|
| 191 |
+
"operation": op_name,
|
| 192 |
+
"args": str(args),
|
| 193 |
+
"kwargs": str(kwargs),
|
| 194 |
+
},
|
| 195 |
+
)
|
| 196 |
+
raise
|
| 197 |
+
|
| 198 |
+
return wrapper
|
| 199 |
+
|
| 200 |
+
return decorator
|
| 201 |
+
|
| 202 |
+
|
| 203 |
+
def circuit_breaker(
|
| 204 |
+
operation: str, failure_threshold: int = 5, recovery_timeout: int = 60
|
| 205 |
+
):
|
| 206 |
+
def decorator(func: Callable) -> Callable:
|
| 207 |
+
@wraps(func)
|
| 208 |
+
async def wrapper(*args, **kwargs):
|
| 209 |
+
cb = CircuitBreaker(failure_threshold, recovery_timeout)
|
| 210 |
+
if not cb.can_execute():
|
| 211 |
+
raise Exception(f"Circuit breaker open for {operation}")
|
| 212 |
+
|
| 213 |
+
try:
|
| 214 |
+
result = await func(*args, **kwargs)
|
| 215 |
+
cb.on_success()
|
| 216 |
+
return result
|
| 217 |
+
except Exception:
|
| 218 |
+
cb.on_failure()
|
| 219 |
+
raise
|
| 220 |
+
|
| 221 |
+
return wrapper
|
| 222 |
+
|
| 223 |
+
return decorator
|
bot/helper/ext_utils/exceptions.py
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
class DirectDownloadLinkException(Exception):
|
| 2 |
+
"""Not method found for extracting direct download link from the http link"""
|
| 3 |
+
|
| 4 |
+
pass
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
class NotSupportedExtractionArchive(Exception):
|
| 8 |
+
"""The archive format use is trying to extract is not supported"""
|
| 9 |
+
|
| 10 |
+
pass
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
class RssShutdownException(Exception):
|
| 14 |
+
"""This exception should be raised when shutdown is called to stop the montior"""
|
| 15 |
+
|
| 16 |
+
pass
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class TgLinkException(Exception):
|
| 20 |
+
"""No Access granted for this chat"""
|
| 21 |
+
|
| 22 |
+
pass
|
bot/helper/ext_utils/files_utils.py
ADDED
|
@@ -0,0 +1,453 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from aioshutil import rmtree as aiormtree, move
|
| 2 |
+
from asyncio import create_subprocess_exec, sleep, wait_for
|
| 3 |
+
from asyncio.subprocess import PIPE
|
| 4 |
+
from contextlib import suppress
|
| 5 |
+
from psutil import disk_usage
|
| 6 |
+
from os import path as ospath, readlink, walk
|
| 7 |
+
from re import I, escape, search as re_search, split as re_split
|
| 8 |
+
|
| 9 |
+
from aiofiles.os import (
|
| 10 |
+
listdir,
|
| 11 |
+
remove,
|
| 12 |
+
rmdir,
|
| 13 |
+
symlink,
|
| 14 |
+
makedirs as aiomakedirs,
|
| 15 |
+
path as aiopath,
|
| 16 |
+
readlink as aioreadlink,
|
| 17 |
+
)
|
| 18 |
+
from magic import Magic
|
| 19 |
+
|
| 20 |
+
from ... import DOWNLOAD_DIR, LOGGER
|
| 21 |
+
from ...core.torrent_manager import TorrentManager
|
| 22 |
+
from .bot_utils import cmd_exec, sync_to_async
|
| 23 |
+
from .exceptions import NotSupportedExtractionArchive
|
| 24 |
+
|
| 25 |
+
ARCH_EXT = [
|
| 26 |
+
".tar.bz2",
|
| 27 |
+
".tar.gz",
|
| 28 |
+
".bz2",
|
| 29 |
+
".gz",
|
| 30 |
+
".tar.xz",
|
| 31 |
+
".tar",
|
| 32 |
+
".tbz2",
|
| 33 |
+
".tgz",
|
| 34 |
+
".lzma2",
|
| 35 |
+
".zip",
|
| 36 |
+
".7z",
|
| 37 |
+
".z",
|
| 38 |
+
".rar",
|
| 39 |
+
".iso",
|
| 40 |
+
".wim",
|
| 41 |
+
".cab",
|
| 42 |
+
".apm",
|
| 43 |
+
".arj",
|
| 44 |
+
".chm",
|
| 45 |
+
".cpio",
|
| 46 |
+
".cramfs",
|
| 47 |
+
".deb",
|
| 48 |
+
".dmg",
|
| 49 |
+
".fat",
|
| 50 |
+
".hfs",
|
| 51 |
+
".lzh",
|
| 52 |
+
".lzma",
|
| 53 |
+
".mbr",
|
| 54 |
+
".msi",
|
| 55 |
+
".mslz",
|
| 56 |
+
".nsis",
|
| 57 |
+
".ntfs",
|
| 58 |
+
".rpm",
|
| 59 |
+
".squashfs",
|
| 60 |
+
".udf",
|
| 61 |
+
".vhd",
|
| 62 |
+
".xar",
|
| 63 |
+
".zst",
|
| 64 |
+
".zstd",
|
| 65 |
+
".cbz",
|
| 66 |
+
".apfs",
|
| 67 |
+
".ar",
|
| 68 |
+
".qcow",
|
| 69 |
+
".macho",
|
| 70 |
+
".exe",
|
| 71 |
+
".dll",
|
| 72 |
+
".sys",
|
| 73 |
+
".pmd",
|
| 74 |
+
".swf",
|
| 75 |
+
".swfc",
|
| 76 |
+
".simg",
|
| 77 |
+
".vdi",
|
| 78 |
+
".vhdx",
|
| 79 |
+
".vmdk",
|
| 80 |
+
".gzip",
|
| 81 |
+
".lzma86",
|
| 82 |
+
".sha256",
|
| 83 |
+
".sha512",
|
| 84 |
+
".sha224",
|
| 85 |
+
".sha384",
|
| 86 |
+
".sha1",
|
| 87 |
+
".md5",
|
| 88 |
+
".crc32",
|
| 89 |
+
".crc64",
|
| 90 |
+
]
|
| 91 |
+
|
| 92 |
+
|
| 93 |
+
FIRST_SPLIT_REGEX = (
|
| 94 |
+
r"\.part0*1\.rar$|\.7z\.0*1$|\.zip\.0*1$|^(?!.*\.part\d+\.rar$).*\.rar$"
|
| 95 |
+
)
|
| 96 |
+
|
| 97 |
+
SPLIT_REGEX = r"\.r\d+$|\.7z\.\d+$|\.z\d+$|\.zip\.\d+$|\.part\d+\.rar$"
|
| 98 |
+
|
| 99 |
+
|
| 100 |
+
def is_first_archive_split(file):
|
| 101 |
+
return bool(re_search(FIRST_SPLIT_REGEX, file.lower(), I))
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
def is_archive(file):
|
| 105 |
+
return file.strip().lower().endswith(tuple(ARCH_EXT))
|
| 106 |
+
|
| 107 |
+
|
| 108 |
+
def is_archive_split(file):
|
| 109 |
+
return bool(re_search(SPLIT_REGEX, file.lower(), I))
|
| 110 |
+
|
| 111 |
+
|
| 112 |
+
async def clean_target(opath):
|
| 113 |
+
if await aiopath.exists(opath):
|
| 114 |
+
LOGGER.info(f"Cleaning Target: {opath}")
|
| 115 |
+
try:
|
| 116 |
+
if await aiopath.isdir(opath):
|
| 117 |
+
await aiormtree(opath, ignore_errors=True)
|
| 118 |
+
else:
|
| 119 |
+
await remove(opath)
|
| 120 |
+
except Exception as e:
|
| 121 |
+
LOGGER.error(str(e))
|
| 122 |
+
|
| 123 |
+
|
| 124 |
+
async def clean_download(opath):
|
| 125 |
+
if await aiopath.exists(opath):
|
| 126 |
+
LOGGER.info(f"Cleaning Download: {opath}")
|
| 127 |
+
try:
|
| 128 |
+
await aiormtree(opath, ignore_errors=True)
|
| 129 |
+
except Exception as e:
|
| 130 |
+
LOGGER.error(str(e))
|
| 131 |
+
|
| 132 |
+
|
| 133 |
+
async def clean_all():
|
| 134 |
+
await TorrentManager.remove_all()
|
| 135 |
+
LOGGER.info("Cleaning Download Directory")
|
| 136 |
+
await (await create_subprocess_exec("rm", "-rf", DOWNLOAD_DIR)).wait()
|
| 137 |
+
await aiomakedirs(DOWNLOAD_DIR, exist_ok=True)
|
| 138 |
+
|
| 139 |
+
|
| 140 |
+
async def clean_unwanted(opath):
|
| 141 |
+
LOGGER.info(f"Cleaning unwanted files/folders: {opath}")
|
| 142 |
+
for dirpath, _, files in await sync_to_async(walk, opath, topdown=False):
|
| 143 |
+
for filee in files:
|
| 144 |
+
f_path = ospath.join(dirpath, filee)
|
| 145 |
+
if filee.strip().endswith(".parts") and filee.startswith("."):
|
| 146 |
+
await remove(f_path)
|
| 147 |
+
if dirpath.strip().endswith(".unwanted"):
|
| 148 |
+
await aiormtree(dirpath, ignore_errors=True)
|
| 149 |
+
for dirpath, _, files in await sync_to_async(walk, opath, topdown=False):
|
| 150 |
+
if not await listdir(dirpath):
|
| 151 |
+
await rmdir(dirpath)
|
| 152 |
+
|
| 153 |
+
|
| 154 |
+
async def check_storage_threshold(size, threshold, io_task=False, alloc=False):
|
| 155 |
+
free = (await sync_to_async(disk_usage, DOWNLOAD_DIR)).free
|
| 156 |
+
return free >= (threshold + (size * (2 if io_task else 1) if not alloc else 0))
|
| 157 |
+
|
| 158 |
+
|
| 159 |
+
async def get_path_size(opath):
|
| 160 |
+
total_size = 0
|
| 161 |
+
if await aiopath.isfile(opath):
|
| 162 |
+
if await aiopath.islink(opath):
|
| 163 |
+
opath = await aioreadlink(opath)
|
| 164 |
+
return await aiopath.getsize(opath)
|
| 165 |
+
for root, _, files in await sync_to_async(walk, opath):
|
| 166 |
+
for f in files:
|
| 167 |
+
abs_path = ospath.join(root, f)
|
| 168 |
+
if await aiopath.islink(abs_path):
|
| 169 |
+
abs_path = await aioreadlink(abs_path)
|
| 170 |
+
total_size += await aiopath.getsize(abs_path)
|
| 171 |
+
return total_size
|
| 172 |
+
|
| 173 |
+
|
| 174 |
+
async def count_files_and_folders(opath):
|
| 175 |
+
total_files = 0
|
| 176 |
+
total_folders = 0
|
| 177 |
+
for _, dirs, files in await sync_to_async(walk, opath):
|
| 178 |
+
total_files += len(files)
|
| 179 |
+
total_folders += len(dirs)
|
| 180 |
+
return total_folders, total_files
|
| 181 |
+
|
| 182 |
+
|
| 183 |
+
def get_base_name(orig_path):
|
| 184 |
+
extension = next(
|
| 185 |
+
(ext for ext in ARCH_EXT if orig_path.strip().lower().endswith(ext)), ""
|
| 186 |
+
)
|
| 187 |
+
if extension != "":
|
| 188 |
+
return re_split(f"{extension}$", orig_path, maxsplit=1, flags=I)[0]
|
| 189 |
+
else:
|
| 190 |
+
raise NotSupportedExtractionArchive("File format not supported for extraction")
|
| 191 |
+
|
| 192 |
+
|
| 193 |
+
async def create_recursive_symlink(source, destination):
|
| 194 |
+
if ospath.isdir(source):
|
| 195 |
+
await aiomakedirs(destination, exist_ok=True)
|
| 196 |
+
for item in await listdir(source):
|
| 197 |
+
item_source = ospath.join(source, item)
|
| 198 |
+
item_dest = ospath.join(destination, item)
|
| 199 |
+
await create_recursive_symlink(item_source, item_dest)
|
| 200 |
+
elif ospath.isfile(source):
|
| 201 |
+
try:
|
| 202 |
+
await symlink(source, destination)
|
| 203 |
+
except FileExistsError:
|
| 204 |
+
LOGGER.error(f"Shortcut already exists: {destination}")
|
| 205 |
+
except Exception as e:
|
| 206 |
+
LOGGER.error(f"Error creating shortcut for {source}: {e}")
|
| 207 |
+
|
| 208 |
+
|
| 209 |
+
def get_mime_type(file_path):
|
| 210 |
+
if ospath.islink(file_path):
|
| 211 |
+
file_path = readlink(file_path)
|
| 212 |
+
mime = Magic(mime=True)
|
| 213 |
+
mime_type = mime.from_file(file_path)
|
| 214 |
+
mime_type = mime_type or "text/plain"
|
| 215 |
+
return mime_type
|
| 216 |
+
|
| 217 |
+
|
| 218 |
+
async def remove_excluded_files(fpath, ee):
|
| 219 |
+
for root, _, files in await sync_to_async(walk, fpath):
|
| 220 |
+
if root.strip().endswith("/yt-dlp-thumb"):
|
| 221 |
+
continue
|
| 222 |
+
for f in files:
|
| 223 |
+
if f.strip().lower().endswith(tuple(ee)):
|
| 224 |
+
await remove(ospath.join(root, f))
|
| 225 |
+
|
| 226 |
+
|
| 227 |
+
async def move_and_merge(source, destination, mid):
|
| 228 |
+
if not await aiopath.exists(destination):
|
| 229 |
+
await aiomakedirs(destination, exist_ok=True)
|
| 230 |
+
for item in await listdir(source):
|
| 231 |
+
item = item.strip()
|
| 232 |
+
src_path = f"{source}/{item}"
|
| 233 |
+
dest_path = f"{destination}/{item}"
|
| 234 |
+
if await aiopath.isdir(src_path):
|
| 235 |
+
if await aiopath.exists(dest_path):
|
| 236 |
+
await move_and_merge(src_path, dest_path, mid)
|
| 237 |
+
else:
|
| 238 |
+
await move(src_path, dest_path)
|
| 239 |
+
else:
|
| 240 |
+
if item.endswith((".aria2", ".!qB")):
|
| 241 |
+
continue
|
| 242 |
+
if await aiopath.exists(dest_path):
|
| 243 |
+
dest_path = f"{destination}/{mid}-{item}"
|
| 244 |
+
await move(src_path, dest_path)
|
| 245 |
+
|
| 246 |
+
|
| 247 |
+
async def join_files(opath):
|
| 248 |
+
files = await listdir(opath)
|
| 249 |
+
results = []
|
| 250 |
+
exists = False
|
| 251 |
+
for file_ in files:
|
| 252 |
+
if re_search(r"\.0+2$", file_) and await sync_to_async(
|
| 253 |
+
get_mime_type, f"{opath}/{file_}"
|
| 254 |
+
) not in ["application/x-7z-compressed", "application/zip"]:
|
| 255 |
+
exists = True
|
| 256 |
+
final_name = file_.rsplit(".", 1)[0]
|
| 257 |
+
fpath = f"{opath}/{final_name}"
|
| 258 |
+
cmd = f'cat "{fpath}."* > "{fpath}"'
|
| 259 |
+
_, stderr, code = await cmd_exec(cmd, True)
|
| 260 |
+
if code != 0:
|
| 261 |
+
LOGGER.error(f"Failed to join {final_name}, stderr: {stderr}")
|
| 262 |
+
if await aiopath.isfile(fpath):
|
| 263 |
+
await remove(fpath)
|
| 264 |
+
else:
|
| 265 |
+
results.append(final_name)
|
| 266 |
+
|
| 267 |
+
if not exists:
|
| 268 |
+
LOGGER.warning("No files to join!")
|
| 269 |
+
elif results:
|
| 270 |
+
LOGGER.info("Join Completed!")
|
| 271 |
+
for res in results:
|
| 272 |
+
for file_ in files:
|
| 273 |
+
if re_search(rf"{escape(res)}\.0[0-9]+$", file_):
|
| 274 |
+
await remove(f"{opath}/{file_}")
|
| 275 |
+
|
| 276 |
+
|
| 277 |
+
async def split_file(f_path, split_size, listener):
|
| 278 |
+
out_path = f"{f_path}."
|
| 279 |
+
if listener.is_cancelled:
|
| 280 |
+
return False
|
| 281 |
+
listener.subproc = await create_subprocess_exec(
|
| 282 |
+
"split",
|
| 283 |
+
"--numeric-suffixes=1",
|
| 284 |
+
"--suffix-length=3",
|
| 285 |
+
f"--bytes={split_size}",
|
| 286 |
+
f_path,
|
| 287 |
+
out_path,
|
| 288 |
+
stderr=PIPE,
|
| 289 |
+
)
|
| 290 |
+
_, stderr = await listener.subproc.communicate()
|
| 291 |
+
code = listener.subproc.returncode
|
| 292 |
+
if listener.is_cancelled:
|
| 293 |
+
return False
|
| 294 |
+
if code == -9:
|
| 295 |
+
listener.is_cancelled = True
|
| 296 |
+
return False
|
| 297 |
+
elif code != 0:
|
| 298 |
+
try:
|
| 299 |
+
stderr = stderr.decode().strip()
|
| 300 |
+
except Exception:
|
| 301 |
+
stderr = "Unable to decode the error!"
|
| 302 |
+
LOGGER.error(f"{stderr}. Split Document: {f_path}")
|
| 303 |
+
return True
|
| 304 |
+
|
| 305 |
+
|
| 306 |
+
class SevenZ:
|
| 307 |
+
def __init__(self, listener):
|
| 308 |
+
self._listener = listener
|
| 309 |
+
self._processed_bytes = 0
|
| 310 |
+
self._percentage = "0%"
|
| 311 |
+
|
| 312 |
+
@property
|
| 313 |
+
def processed_bytes(self):
|
| 314 |
+
return self._processed_bytes
|
| 315 |
+
|
| 316 |
+
@property
|
| 317 |
+
def progress(self):
|
| 318 |
+
return self._percentage
|
| 319 |
+
|
| 320 |
+
async def _sevenz_progress(self):
|
| 321 |
+
pattern = r"(\d+)\s+bytes|Total Physical Size\s*=\s*(\d+)"
|
| 322 |
+
while not (
|
| 323 |
+
self._listener.subproc.returncode is not None
|
| 324 |
+
or self._listener.is_cancelled
|
| 325 |
+
or self._listener.subproc.stdout.at_eof()
|
| 326 |
+
):
|
| 327 |
+
try:
|
| 328 |
+
line = await wait_for(self._listener.subproc.stdout.readline(), 2)
|
| 329 |
+
except Exception:
|
| 330 |
+
break
|
| 331 |
+
line = line.decode().strip()
|
| 332 |
+
if match := re_search(pattern, line):
|
| 333 |
+
self._listener.subsize = int(match[1] or match[2])
|
| 334 |
+
await sleep(0.05)
|
| 335 |
+
s = b""
|
| 336 |
+
while not (
|
| 337 |
+
self._listener.is_cancelled
|
| 338 |
+
or self._listener.subproc.returncode is not None
|
| 339 |
+
or self._listener.subproc.stdout.at_eof()
|
| 340 |
+
):
|
| 341 |
+
try:
|
| 342 |
+
char = await wait_for(self._listener.subproc.stdout.read(1), 60)
|
| 343 |
+
except Exception:
|
| 344 |
+
break
|
| 345 |
+
if not char:
|
| 346 |
+
break
|
| 347 |
+
s += char
|
| 348 |
+
if char == b"%":
|
| 349 |
+
try:
|
| 350 |
+
self._percentage = s.decode().rsplit(" ", 1)[-1].strip()
|
| 351 |
+
self._processed_bytes = (
|
| 352 |
+
int(self._percentage.strip("%")) / 100
|
| 353 |
+
) * self._listener.subsize
|
| 354 |
+
except Exception:
|
| 355 |
+
self._processed_bytes = 0
|
| 356 |
+
self._percentage = "0%"
|
| 357 |
+
s = b""
|
| 358 |
+
await sleep(0.05)
|
| 359 |
+
|
| 360 |
+
self._processed_bytes = 0
|
| 361 |
+
self._percentage = "0%"
|
| 362 |
+
|
| 363 |
+
async def extract(self, f_path, t_path, pswd):
|
| 364 |
+
cmd = [
|
| 365 |
+
"7z",
|
| 366 |
+
"x",
|
| 367 |
+
f"-p{pswd}",
|
| 368 |
+
f_path,
|
| 369 |
+
f"-o{t_path}",
|
| 370 |
+
"-aot",
|
| 371 |
+
"-xr!@PaxHeader",
|
| 372 |
+
"-bsp1",
|
| 373 |
+
"-bse1",
|
| 374 |
+
"-bb3",
|
| 375 |
+
]
|
| 376 |
+
if not pswd:
|
| 377 |
+
del cmd[2]
|
| 378 |
+
if self._listener.is_cancelled:
|
| 379 |
+
return False
|
| 380 |
+
self._listener.subproc = await create_subprocess_exec(
|
| 381 |
+
*cmd,
|
| 382 |
+
stdout=PIPE,
|
| 383 |
+
stderr=PIPE,
|
| 384 |
+
)
|
| 385 |
+
await self._sevenz_progress()
|
| 386 |
+
_, stderr = await self._listener.subproc.communicate()
|
| 387 |
+
code = self._listener.subproc.returncode
|
| 388 |
+
if self._listener.is_cancelled:
|
| 389 |
+
return False
|
| 390 |
+
if code == -9:
|
| 391 |
+
self._listener.is_cancelled = True
|
| 392 |
+
return False
|
| 393 |
+
elif code != 0:
|
| 394 |
+
try:
|
| 395 |
+
stderr = stderr.decode().strip()
|
| 396 |
+
except Exception:
|
| 397 |
+
stderr = "Unable to decode the error!"
|
| 398 |
+
LOGGER.error(f"{stderr}. Unable to extract archive!. Path: {f_path}")
|
| 399 |
+
return code
|
| 400 |
+
|
| 401 |
+
async def zip(self, dl_path, up_path, pswd):
|
| 402 |
+
size = await get_path_size(dl_path)
|
| 403 |
+
if self._listener.equal_splits:
|
| 404 |
+
parts = -(-size // self._listener.split_size)
|
| 405 |
+
split_size = (size // parts) + (size % parts)
|
| 406 |
+
else:
|
| 407 |
+
split_size = self._listener.split_size
|
| 408 |
+
cmd = [
|
| 409 |
+
"7z",
|
| 410 |
+
f"-v{split_size}b",
|
| 411 |
+
"a",
|
| 412 |
+
"-mx=0",
|
| 413 |
+
f"-p{pswd}",
|
| 414 |
+
up_path,
|
| 415 |
+
dl_path,
|
| 416 |
+
"-bsp1",
|
| 417 |
+
"-bse1",
|
| 418 |
+
"-bb3",
|
| 419 |
+
]
|
| 420 |
+
if self._listener.is_leech and int(size) > self._listener.split_size:
|
| 421 |
+
if not pswd:
|
| 422 |
+
del cmd[4]
|
| 423 |
+
LOGGER.info(f"Zip: orig_path: {dl_path}, zip_path: {up_path}.0*")
|
| 424 |
+
else:
|
| 425 |
+
del cmd[1]
|
| 426 |
+
if not pswd:
|
| 427 |
+
del cmd[3]
|
| 428 |
+
LOGGER.info(f"Zip: orig_path: {dl_path}, zip_path: {up_path}")
|
| 429 |
+
if self._listener.is_cancelled:
|
| 430 |
+
return False
|
| 431 |
+
self._listener.subproc = await create_subprocess_exec(
|
| 432 |
+
*cmd, stdout=PIPE, stderr=PIPE
|
| 433 |
+
)
|
| 434 |
+
await self._sevenz_progress()
|
| 435 |
+
_, stderr = await self._listener.subproc.communicate()
|
| 436 |
+
code = self._listener.subproc.returncode
|
| 437 |
+
if self._listener.is_cancelled:
|
| 438 |
+
return False
|
| 439 |
+
if code == -9:
|
| 440 |
+
self._listener.is_cancelled = True
|
| 441 |
+
return False
|
| 442 |
+
elif code == 0:
|
| 443 |
+
await clean_target(dl_path)
|
| 444 |
+
return up_path
|
| 445 |
+
else:
|
| 446 |
+
if await aiopath.exists(up_path):
|
| 447 |
+
await remove(up_path)
|
| 448 |
+
try:
|
| 449 |
+
stderr = stderr.decode().strip()
|
| 450 |
+
except Exception:
|
| 451 |
+
stderr = "Unable to decode the error!"
|
| 452 |
+
LOGGER.error(f"{stderr}. Unable to zip this path: {dl_path}")
|
| 453 |
+
return dl_path
|
bot/helper/ext_utils/help_messages.py
ADDED
|
@@ -0,0 +1,551 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# ruff: noqa: F403, F405
|
| 2 |
+
mirror = """<b>Send link along with command line or </b>
|
| 3 |
+
|
| 4 |
+
/cmd link
|
| 5 |
+
|
| 6 |
+
<b>By replying to link/file</b>:
|
| 7 |
+
|
| 8 |
+
/cmd -n new name -e -up upload destination
|
| 9 |
+
|
| 10 |
+
<b>NOTE:</b>
|
| 11 |
+
1. Commands that start with <b>qb</b> are ONLY for torrents."""
|
| 12 |
+
|
| 13 |
+
yt = """<b>Send link along with command line</b>:
|
| 14 |
+
|
| 15 |
+
/cmd link
|
| 16 |
+
<b>By replying to link</b>:
|
| 17 |
+
/cmd -n new name -z password -opt x:y|x1:y1
|
| 18 |
+
|
| 19 |
+
Check here all supported <a href='https://github.com/yt-dlp/yt-dlp/blob/master/supportedsites.md'>SITES</a>
|
| 20 |
+
Check all yt-dlp api options from this <a href='https://github.com/yt-dlp/yt-dlp/blob/master/yt_dlp/YoutubeDL.py#L212'>FILE</a> or use this <a href='https://t.me/mltb_official_channel/177'>script</a> to convert cli arguments to api options."""
|
| 21 |
+
|
| 22 |
+
clone = """Send Gdrive|Gdot|Filepress|Filebee|Appdrive|Gdflix link or rclone path along with command or by replying to the link/rc_path by command.
|
| 23 |
+
Use -sync to use sync method in rclone. Example: /cmd rcl/rclone_path -up rcl/rclone_path/rc -sync"""
|
| 24 |
+
|
| 25 |
+
new_name = """<b>New Name</b>: -n
|
| 26 |
+
|
| 27 |
+
/cmd link -n new name
|
| 28 |
+
Note: Doesn't work with torrents"""
|
| 29 |
+
|
| 30 |
+
multi_link = """<b>Multi links only by replying to first link/file</b>: -i
|
| 31 |
+
|
| 32 |
+
/cmd -i 10(number of links/files)"""
|
| 33 |
+
|
| 34 |
+
same_dir = """<b>Move file(s)/folder(s) to new folder</b>: -m
|
| 35 |
+
|
| 36 |
+
You can use this arg also to move multiple links/torrents contents to the same directory, so all links will be uploaded together as one task
|
| 37 |
+
|
| 38 |
+
/cmd link -m new folder (only one link inside new folder)
|
| 39 |
+
/cmd -i 10(number of links/files) -m folder name (all links contents in one folder)
|
| 40 |
+
/cmd -b -m folder name (reply to batch of message/file(each link on new line))
|
| 41 |
+
|
| 42 |
+
While using bulk you can also use this arg with different folder name along with the links in message or file batch
|
| 43 |
+
Example:
|
| 44 |
+
link1 -m folder1
|
| 45 |
+
link2 -m folder1
|
| 46 |
+
link3 -m folder2
|
| 47 |
+
link4 -m folder2
|
| 48 |
+
link5 -m folder3
|
| 49 |
+
link6
|
| 50 |
+
so link1 and link2 content will be uploaded from same folder which is folder1
|
| 51 |
+
link3 and link4 content will be uploaded from same folder also which is folder2
|
| 52 |
+
link5 will uploaded alone inside new folder named folder3
|
| 53 |
+
link6 will get uploaded normally alone
|
| 54 |
+
"""
|
| 55 |
+
|
| 56 |
+
thumb = """<b>Thumbnail for current task</b>: -t
|
| 57 |
+
|
| 58 |
+
/cmd link -t tg-message-link (doc or photo) or none (file without thumb)"""
|
| 59 |
+
|
| 60 |
+
split_size = """<b>Split size for current task</b>: -sp
|
| 61 |
+
|
| 62 |
+
/cmd link -sp (500mb or 2gb or 4000000000)
|
| 63 |
+
Note: Only mb and gb are supported or write in bytes without unit!"""
|
| 64 |
+
|
| 65 |
+
upload = """<b>Upload Destination</b>: -up
|
| 66 |
+
|
| 67 |
+
/cmd link -up rcl/gdl (rcl: to select rclone config, remote & path | gdl: To select token.pickle, gdrive id) using buttons
|
| 68 |
+
You can directly add the upload path: -up remote:dir/subdir or -up Gdrive_id or -up id/username (telegram) or -up id/username|topic_id (telegram)
|
| 69 |
+
If DEFAULT_UPLOAD is `rc` then you can pass up: `gd` to upload using gdrive tools to GDRIVE_ID.
|
| 70 |
+
If DEFAULT_UPLOAD is `gd` then you can pass up: `rc` to upload to RCLONE_PATH.
|
| 71 |
+
|
| 72 |
+
If you want to add path or gdrive manually from your config/token (UPLOADED FROM USETTING) add mrcc: for rclone and mtp: before the path/gdrive_id without space.
|
| 73 |
+
/cmd link -up mrcc:main:dump or -up mtp:gdrive_id <strong>or you can simply edit upload using owner/user token/config from usetting without adding mtp: or mrcc: before the upload path/id</strong>
|
| 74 |
+
|
| 75 |
+
To add leech destination:
|
| 76 |
+
-up id/@username/pm
|
| 77 |
+
-up b:id/@username/pm (b: means leech by bot) (id or username of the chat or write pm means private message so bot will send the files in private to you)
|
| 78 |
+
when you should use b:(leech by bot)? When your default settings is leech by user and you want to leech by bot for specific task.
|
| 79 |
+
-up u:id/@username(u: means leech by user) This incase OWNER added USER_STRING_SESSION.
|
| 80 |
+
-up h:id/@username(hybrid leech) h: to upload files by bot and user based on file size.
|
| 81 |
+
-up id/@username|topic_id(leech in specific chat and topic) add | without space and write topic id after chat id or username.
|
| 82 |
+
|
| 83 |
+
In case you want to specify whether using token.pickle or service accounts you can add tp:gdrive_id (using token.pickle) or sa:gdrive_id (using service accounts) or mtp:gdrive_id (using token.pickle uploaded from usetting).
|
| 84 |
+
DEFAULT_UPLOAD doesn't affect on leech cmds.
|
| 85 |
+
"""
|
| 86 |
+
|
| 87 |
+
user_download = """<b>User Download</b>: link
|
| 88 |
+
|
| 89 |
+
/cmd tp:link to download using owner token.pickle incase service account enabled.
|
| 90 |
+
/cmd sa:link to download using service account incase service account disabled.
|
| 91 |
+
/cmd tp:gdrive_id to download using token.pickle and file_id incase service account enabled.
|
| 92 |
+
/cmd sa:gdrive_id to download using service account and file_id incase service account disabled.
|
| 93 |
+
/cmd mtp:gdrive_id or mtp:link to download using user token.pickle uploaded from usetting
|
| 94 |
+
/cmd mrcc:remote:path to download using user rclone config uploaded from usetting
|
| 95 |
+
you can simply edit upload using owner/user token/config from usetting without adding mtp: or mrcc: before the path/id"""
|
| 96 |
+
|
| 97 |
+
rcf = """<b>Rclone Flags</b>: -rcf
|
| 98 |
+
|
| 99 |
+
/cmd link|path|rcl -up path|rcl -rcf --buffer-size:8M|--drive-starred-only|key|key:value
|
| 100 |
+
This will override all other flags except --exclude
|
| 101 |
+
Check here all <a href='https://rclone.org/flags/'>RcloneFlags</a>."""
|
| 102 |
+
|
| 103 |
+
bulk = """<b>Bulk Download</b>: -b
|
| 104 |
+
|
| 105 |
+
Bulk can be used only by replying to text message or text file contains links separated by new line.
|
| 106 |
+
Example:
|
| 107 |
+
link1 -n new name -up remote1:path1 -rcf |key:value|key:value
|
| 108 |
+
link2 -z -n new name -up remote2:path2
|
| 109 |
+
link3 -e -n new name -up remote2:path2
|
| 110 |
+
Reply to this example by this cmd -> /cmd -b(bulk)
|
| 111 |
+
|
| 112 |
+
Note: Any arg along with the cmd will be setted to all links
|
| 113 |
+
/cmd -b -up remote: -z -m folder name (all links contents in one zipped folder uploaded to one destination)
|
| 114 |
+
so you can't set different upload destinations along with link incase you have added -m along with cmd
|
| 115 |
+
You can set start and end of the links from the bulk like seed, with -b start:end or only end by -b :end or only start by -b start.
|
| 116 |
+
The default start is from zero(first link) to inf."""
|
| 117 |
+
|
| 118 |
+
rlone_dl = """<b>Rclone Download</b>:
|
| 119 |
+
|
| 120 |
+
Treat rclone paths exactly like links
|
| 121 |
+
/cmd main:dump/ubuntu.iso or rcl(To select config, remote and path)
|
| 122 |
+
Users can add their own rclone from user settings
|
| 123 |
+
If you want to add path manually from your config add mrcc: before the path without space
|
| 124 |
+
/cmd mrcc:main:dump/ubuntu.iso
|
| 125 |
+
You can simply edit using owner/user config from usetting without adding mrcc: before the path"""
|
| 126 |
+
|
| 127 |
+
extract_zip = """<b>Extract/Zip</b>: -e -z
|
| 128 |
+
|
| 129 |
+
/cmd link -e password (extract password protected)
|
| 130 |
+
/cmd link -z password (zip password protected)
|
| 131 |
+
/cmd link -z password -e (extract and zip password protected)
|
| 132 |
+
Note: When both extract and zip added with cmd it will extract first and then zip, so always extract first"""
|
| 133 |
+
|
| 134 |
+
join = """<b>Join Splitted Files</b>: -j
|
| 135 |
+
|
| 136 |
+
This option will only work before extract and zip, so mostly it will be used with -m argument (samedir)
|
| 137 |
+
By Reply:
|
| 138 |
+
/cmd -i 3 -j -m folder name
|
| 139 |
+
/cmd -b -j -m folder name
|
| 140 |
+
if u have link(folder) have splitted files:
|
| 141 |
+
/cmd link -j"""
|
| 142 |
+
|
| 143 |
+
tg_links = """<b>TG Links</b>:
|
| 144 |
+
|
| 145 |
+
Treat links like any direct link
|
| 146 |
+
Some links need user access so you must add USER_SESSION_STRING for it.
|
| 147 |
+
Three types of links:
|
| 148 |
+
Public: https://t.me/channel_name/message_id
|
| 149 |
+
Private: tg://openmessage?user_id=xxxxxx&message_id=xxxxx
|
| 150 |
+
Super: https://t.me/c/channel_id/message_id
|
| 151 |
+
Range: https://t.me/channel_name/first_message_id-last_message_id
|
| 152 |
+
Range Example: tg://openmessage?user_id=xxxxxx&message_id=555-560 or https://t.me/channel_name/100-150
|
| 153 |
+
Note: Range link will work only by replying cmd to it"""
|
| 154 |
+
|
| 155 |
+
sample_video = """<b>Sample Video</b>: -sv
|
| 156 |
+
|
| 157 |
+
Create sample video for one video or folder of videos.
|
| 158 |
+
/cmd -sv (it will take the default values which 60sec sample duration and part duration is 4sec).
|
| 159 |
+
You can control those values. Example: /cmd -sv 70:5(sample-duration:part-duration) or /cmd -sv :5 or /cmd -sv 70."""
|
| 160 |
+
|
| 161 |
+
screenshot = """<b>ScreenShots</b>: -ss
|
| 162 |
+
|
| 163 |
+
Create screenshots for one video or folder of videos.
|
| 164 |
+
/cmd -ss (it will take the default values which is 10 photos).
|
| 165 |
+
You can control this value. Example: /cmd -ss 6."""
|
| 166 |
+
|
| 167 |
+
seed = """<b>Bittorrent seed</b>: -d
|
| 168 |
+
|
| 169 |
+
/cmd link -d ratio:seed_time or by replying to file/link
|
| 170 |
+
To specify ratio and seed time add -d ratio:time.
|
| 171 |
+
Example: -d 0.7:10 (ratio and time) or -d 0.7 (only ratio) or -d :10 (only time) where time in minutes"""
|
| 172 |
+
|
| 173 |
+
zip_arg = """<b>Zip</b>: -z password
|
| 174 |
+
|
| 175 |
+
/cmd link -z (zip)
|
| 176 |
+
/cmd link -z password (zip password protected)"""
|
| 177 |
+
|
| 178 |
+
qual = """<b>Quality Buttons</b>: -s
|
| 179 |
+
|
| 180 |
+
In case default quality added from yt-dlp options using format option and you need to select quality for specific link or links with multi links feature.
|
| 181 |
+
/cmd link -s"""
|
| 182 |
+
|
| 183 |
+
yt_opt = """<b>Options</b>: -opt
|
| 184 |
+
|
| 185 |
+
/cmd link -opt {"format": "bv*+mergeall[vcodec=none]", "nocheckcertificate": True, "playliststart": 10, "fragment_retries": float("inf"), "matchtitle": "S13", "writesubtitles": True, "live_from_start": True, "postprocessor_args": {"ffmpeg": ["-threads", "4"]}, "wait_for_video": (5, 100), "download_ranges": [{"start_time": 0, "end_time": 10}]}
|
| 186 |
+
|
| 187 |
+
Check all yt-dlp api options from this <a href='https://github.com/yt-dlp/yt-dlp/blob/master/yt_dlp/YoutubeDL.py#L184'>FILE</a> or use this <a href='https://t.me/mltb_official_channel/177'>script</a> to convert cli arguments to api options."""
|
| 188 |
+
|
| 189 |
+
convert_media = """<b>Convert Media</b>: -ca -cv
|
| 190 |
+
/cmd link -ca mp3 -cv mp4 (convert all audios to mp3 and all videos to mp4)
|
| 191 |
+
/cmd link -ca mp3 (convert all audios to mp3)
|
| 192 |
+
/cmd link -cv mp4 (convert all videos to mp4)
|
| 193 |
+
/cmd link -ca mp3 + flac ogg (convert only flac and ogg audios to mp3)
|
| 194 |
+
/cmd link -cv mkv - webm flv (convert all videos to mp4 except webm and flv)"""
|
| 195 |
+
|
| 196 |
+
force_start = """<b>Force Start</b>: -f -fd -fu
|
| 197 |
+
/cmd link -f (force download and upload)
|
| 198 |
+
/cmd link -fd (force download only)
|
| 199 |
+
/cmd link -fu (force upload directly after download finish)"""
|
| 200 |
+
|
| 201 |
+
gdrive = """<b>Gdrive</b>: link
|
| 202 |
+
If DEFAULT_UPLOAD is `rc` then you can pass up: `gd` to upload using gdrive tools to GDRIVE_ID.
|
| 203 |
+
/cmd gdriveLink or gdl or gdriveId -up gdl or gdriveId or gd
|
| 204 |
+
/cmd tp:gdriveLink or tp:gdriveId -up tp:gdriveId or gdl or gd (to use token.pickle if service account enabled)
|
| 205 |
+
/cmd sa:gdriveLink or sa:gdriveId -p sa:gdriveId or gdl or gd (to use service account if service account disabled)
|
| 206 |
+
/cmd mtp:gdriveLink or mtp:gdriveId -up mtp:gdriveId or gdl or gd(if you have added upload gdriveId from usetting) (to use user token.pickle that uploaded by usetting)
|
| 207 |
+
You can simply edit using owner/user token from usetting without adding mtp: before the id"""
|
| 208 |
+
|
| 209 |
+
rclone_cl = """<b>Rclone</b>: path
|
| 210 |
+
If DEFAULT_UPLOAD is `gd` then you can pass up: `rc` to upload to RCLONE_PATH.
|
| 211 |
+
/cmd rcl/rclone_path -up rcl/rclone_path/rc -rcf flagkey:flagvalue|flagkey|flagkey:flagvalue
|
| 212 |
+
/cmd rcl or rclone_path -up rclone_path or rc or rcl
|
| 213 |
+
/cmd mrcc:rclone_path -up rcl or rc(if you have add rclone path from usetting) (to use user config)
|
| 214 |
+
You can simply edit using owner/user config from usetting without adding mrcc: before the path"""
|
| 215 |
+
|
| 216 |
+
name_swap = r"""<b>Name Substitution</b>: -ns
|
| 217 |
+
/cmd link -ns script/code/s | mirror/leech | tea/ /s | clone | cpu/ | \[mltb\]/mltb | \\text\\/text/s
|
| 218 |
+
This will affect on all files. Format: wordToReplace/wordToReplaceWith/sensitiveCase
|
| 219 |
+
Word Subtitions. You can add pattern instead of normal text. Timeout: 60 sec
|
| 220 |
+
NOTE: You must add \ before any character, those are the characters: \^$.|?*+()[]{}-
|
| 221 |
+
1. script will get replaced by code with sensitive case
|
| 222 |
+
2. mirror will get replaced by leech
|
| 223 |
+
4. tea will get replaced by space with sensitive case
|
| 224 |
+
5. clone will get removed
|
| 225 |
+
6. cpu will get replaced by space
|
| 226 |
+
7. [mltb] will get replaced by mltb
|
| 227 |
+
8. \text\ will get replaced by text with sensitive case
|
| 228 |
+
"""
|
| 229 |
+
|
| 230 |
+
transmission = """<b>Tg transmission</b>: -hl -ut -bt
|
| 231 |
+
/cmd link -hl (leech by user and bot session with respect to size) (Hybrid Leech)
|
| 232 |
+
/cmd link -bt (leech by bot session)
|
| 233 |
+
/cmd link -ut (leech by user)"""
|
| 234 |
+
|
| 235 |
+
thumbnail_layout = """Thumbnail Layout: -tl
|
| 236 |
+
/cmd link -tl 3x3 (widthxheight) 3 photos in row and 3 photos in column"""
|
| 237 |
+
|
| 238 |
+
leech_as = """<b>Leech as</b>: -doc -med
|
| 239 |
+
/cmd link -doc (Leech as document)
|
| 240 |
+
/cmd link -med (Leech as media)"""
|
| 241 |
+
|
| 242 |
+
ffmpeg_cmds = """<b>FFmpeg Commands</b>: -ff
|
| 243 |
+
list of lists of ffmpeg commands. You can set multiple ffmpeg commands for all files before upload. Don't write ffmpeg at beginning, start directly with the arguments.
|
| 244 |
+
Notes:
|
| 245 |
+
1. Add <code>-del</code> to the list(s) which you want from the bot to delete the original files after command run complete!
|
| 246 |
+
3. To execute one of pre-added lists in bot like: ({"subtitle": ["-i mltb.mkv -c copy -c:s srt mltb.mkv"]}), you must use -ff subtitle (list key)
|
| 247 |
+
Examples: ["-i mltb.mkv -c copy -c:s srt mltb.mkv", "-i mltb.video -c copy -c:s srt mltb", "-i mltb.m4a -c:a libmp3lame -q:a 2 mltb.mp3", "-i mltb.audio -c:a libmp3lame -q:a 2 mltb.mp3", "-i mltb -map 0:a -c copy mltb.mka -map 0:s -c copy mltb.srt"]
|
| 248 |
+
Here I will explain how to use mltb.* which is reference to files you want to work on.
|
| 249 |
+
1. First cmd: the input is mltb.mkv so this cmd will work only on mkv videos and the output is mltb.mkv also so all outputs is mkv. -del will delete the original media after complete run of the cmd.
|
| 250 |
+
2. Second cmd: the input is mltb.video so this cmd will work on all videos and the output is only mltb so the extenstion is same as input files.
|
| 251 |
+
3. Third cmd: the input in mltb.m4a so this cmd will work only on m4a audios and the output is mltb.mp3 so the output extension is mp3.
|
| 252 |
+
4. Fourth cmd: the input is mltb.audio so this cmd will work on all audios and the output is mltb.mp3 so the output extension is mp3."""
|
| 253 |
+
|
| 254 |
+
metadata = """<b>Metadata</b>: -meta
|
| 255 |
+
|
| 256 |
+
Apply custom metadata to media files using pipe (|) separator.
|
| 257 |
+
|
| 258 |
+
<b>Format:</b> key=value|key2=value2|key3=value3
|
| 259 |
+
|
| 260 |
+
<b>Dynamic Variables:</b>
|
| 261 |
+
• <code>{filename}</code> - Original filename
|
| 262 |
+
• <code>{basename}</code> - Filename without extension
|
| 263 |
+
• <code>{extension}</code> - File extension
|
| 264 |
+
• <code>{audiolang}</code> - Audio language (auto-detected or English)
|
| 265 |
+
• <code>{sublang}</code> - Subtitle language (auto-detected or none)
|
| 266 |
+
• <code>{year}</code> - Year extracted from filename
|
| 267 |
+
|
| 268 |
+
<b>Per-Stream Metadata:</b>
|
| 269 |
+
Set different metadata for audio/video/subtitle streams in User Settings > FFmpeg Settings:
|
| 270 |
+
• <b>Audio Metadata:</b> Applied to each audio stream
|
| 271 |
+
• <b>Video Metadata:</b> Applied to video streams
|
| 272 |
+
• <b>Subtitle Metadata:</b> Applied to subtitle streams
|
| 273 |
+
|
| 274 |
+
<b>Examples:</b>
|
| 275 |
+
<code>/mirror link -meta title=My Movie|artist={audiolang} Version</code>
|
| 276 |
+
<code>/yt link -meta album={basename}|year={year}|genre=Action</code>
|
| 277 |
+
|
| 278 |
+
<b>Escape Pipes:</b> Use <code>\\|</code> to include literal pipe in values:
|
| 279 |
+
<code>title=Movie \\| Director's Cut</code>
|
| 280 |
+
|
| 281 |
+
<b>User Settings Example:</b>
|
| 282 |
+
• Audio Metadata: <code>language={audiolang}|title=Audio Track</code>
|
| 283 |
+
• Video Metadata: <code>title={basename}|year={year}</code>
|
| 284 |
+
• Subtitle Metadata: <code>language={sublang}|title=Subtitles</code>"""
|
| 285 |
+
|
| 286 |
+
YT_HELP_DICT = {
|
| 287 |
+
"main": yt,
|
| 288 |
+
"New-Name": f"{new_name}\nNote: Don't add file extension",
|
| 289 |
+
"Zip": zip_arg,
|
| 290 |
+
"Quality": qual,
|
| 291 |
+
"Options": yt_opt,
|
| 292 |
+
"Multi-Link": multi_link,
|
| 293 |
+
"Same-Directory": same_dir,
|
| 294 |
+
"Thumb": thumb,
|
| 295 |
+
"Split-Size": split_size,
|
| 296 |
+
"Upload-Destination": upload,
|
| 297 |
+
"Rclone-Flags": rcf,
|
| 298 |
+
"Bulk": bulk,
|
| 299 |
+
"Sample-Video": sample_video,
|
| 300 |
+
"Screenshot": screenshot,
|
| 301 |
+
"Convert-Media": convert_media,
|
| 302 |
+
"Force-Start": force_start,
|
| 303 |
+
"Name-Swap": name_swap,
|
| 304 |
+
"TG-Transmission": transmission,
|
| 305 |
+
"Thumb-Layout": thumbnail_layout,
|
| 306 |
+
"Leech-Type": leech_as,
|
| 307 |
+
"FFmpeg-Cmds": ffmpeg_cmds,
|
| 308 |
+
"Metadata": metadata,
|
| 309 |
+
}
|
| 310 |
+
|
| 311 |
+
MIRROR_HELP_DICT = {
|
| 312 |
+
"main": mirror,
|
| 313 |
+
"New-Name": new_name,
|
| 314 |
+
"DL-Auth": "<b>Direct link authorization</b>: -au -ap\n\n/cmd link -au username -ap password",
|
| 315 |
+
"Headers": "<b>Direct link custom headers</b>: -h\n\n/cmd link -h key: value key1: value1",
|
| 316 |
+
"Extract/Zip": extract_zip,
|
| 317 |
+
"Select-Files": "<b>Bittorrent/JDownloader/Sabnzbd File Selection</b>: -s\n\n/cmd link -s or by replying to file/link",
|
| 318 |
+
"Torrent-Seed": seed,
|
| 319 |
+
"Multi-Link": multi_link,
|
| 320 |
+
"Same-Directory": same_dir,
|
| 321 |
+
"Thumb": thumb,
|
| 322 |
+
"Split-Size": split_size,
|
| 323 |
+
"Upload-Destination": upload,
|
| 324 |
+
"Rclone-Flags": rcf,
|
| 325 |
+
"Bulk": bulk,
|
| 326 |
+
"Join": join,
|
| 327 |
+
"Rclone-DL": rlone_dl,
|
| 328 |
+
"Tg-Links": tg_links,
|
| 329 |
+
"Sample-Video": sample_video,
|
| 330 |
+
"Screenshot": screenshot,
|
| 331 |
+
"Convert-Media": convert_media,
|
| 332 |
+
"Force-Start": force_start,
|
| 333 |
+
"User-Download": user_download,
|
| 334 |
+
"Name-Swap": name_swap,
|
| 335 |
+
"TG-Transmission": transmission,
|
| 336 |
+
"Thumb-Layout": thumbnail_layout,
|
| 337 |
+
"Leech-Type": leech_as,
|
| 338 |
+
"FFmpeg-Cmds": ffmpeg_cmds,
|
| 339 |
+
"Metadata": metadata,
|
| 340 |
+
}
|
| 341 |
+
|
| 342 |
+
CLONE_HELP_DICT = {
|
| 343 |
+
"main": clone,
|
| 344 |
+
"Multi-Link": multi_link,
|
| 345 |
+
"Bulk": bulk,
|
| 346 |
+
"Gdrive": gdrive,
|
| 347 |
+
"Rclone": rclone_cl,
|
| 348 |
+
}
|
| 349 |
+
|
| 350 |
+
RSS_HELP_MESSAGE = """
|
| 351 |
+
Use this format to add feed url:
|
| 352 |
+
Title1 link (required)
|
| 353 |
+
Title2 link -c cmd -inf xx -exf xx
|
| 354 |
+
Title3 link -c cmd -d ratio:time -z password
|
| 355 |
+
|
| 356 |
+
-c command -up mrcc:remote:path/subdir -rcf --buffer-size:8M|key|key:value
|
| 357 |
+
-inf For included words filter.
|
| 358 |
+
-exf For excluded words filter.
|
| 359 |
+
-stv true or false (sensitive filter)
|
| 360 |
+
|
| 361 |
+
Example: Title https://www.rss-url.com -inf 1080 or 720 or 144p|mkv or mp4|hevc -exf flv or web|xxx
|
| 362 |
+
This filter will parse links that its titles contain `(1080 or 720 or 144p) and (mkv or mp4) and hevc` and doesn't contain (flv or web) and xxx words. You can add whatever you want.
|
| 363 |
+
|
| 364 |
+
Another example: -inf 1080 or 720p|.web. or .webrip.|hvec or x264. This will parse titles that contain ( 1080 or 720p) and (.web. or .webrip.) and (hvec or x264). I have added space before and after 1080 to avoid wrong matching. If this `10805695` number in title it will match 1080 if added 1080 without spaces after it.
|
| 365 |
+
|
| 366 |
+
Filter Notes:
|
| 367 |
+
1. | means and.
|
| 368 |
+
2. Add `or` between similar keys, you can add it between qualities or between extensions, so don't add filter like this f: 1080|mp4 or 720|web because this will parse 1080 and (mp4 or 720) and web ... not (1080 and mp4) or (720 and web).
|
| 369 |
+
3. You can add `or` and `|` as much as you want.
|
| 370 |
+
4. Take a look at the title if it has a static special character after or before the qualities or extensions or whatever and use them in the filter to avoid wrong match.
|
| 371 |
+
Timeout: 60 sec.
|
| 372 |
+
"""
|
| 373 |
+
|
| 374 |
+
PASSWORD_ERROR_MESSAGE = """
|
| 375 |
+
<b>This link requires a password!</b>
|
| 376 |
+
- Insert <b>::</b> after the link and write the password after the sign.
|
| 377 |
+
|
| 378 |
+
<b>Example:</b> link::my password
|
| 379 |
+
"""
|
| 380 |
+
|
| 381 |
+
|
| 382 |
+
def get_bot_commands():
|
| 383 |
+
from ...core.plugin_manager import get_plugin_manager
|
| 384 |
+
|
| 385 |
+
static_commands = {
|
| 386 |
+
"Mirror": "[link/file] Mirror to Upload Destination",
|
| 387 |
+
"QbMirror": "[magnet/torrent] Mirror to Upload Destination using qbit",
|
| 388 |
+
"Ytdl": "[link] Mirror YouTube, m3u8, Social Media and yt-dlp supported urls",
|
| 389 |
+
"UpHoster": "[link/file] Upload to DDL Servers",
|
| 390 |
+
"Leech": "[link/file] Leech files to Upload to Telegram",
|
| 391 |
+
"QbLeech": "[magnet/torrent] Leech files to Upload to Telegram using qbit",
|
| 392 |
+
"YtdlLeech": "[link] Leech YouTube, m3u8, Social Media and yt-dlp supported urls",
|
| 393 |
+
"Clone": "[link] Clone files/folders to GDrive",
|
| 394 |
+
"UserSet": "User personal settings",
|
| 395 |
+
"ForceStart": "[gid/reply] Force start from queued task",
|
| 396 |
+
"Count": "[link] Count no. of files/folders in GDrive",
|
| 397 |
+
"List": "[query] Search any Text which is available in GDrive",
|
| 398 |
+
"Search": "[query] Search torrents via Qbit Plugins",
|
| 399 |
+
"MediaInfo": "[reply/link] Get MediaInfo of the Target Media",
|
| 400 |
+
"Select": "[gid/reply] Select files for NZB, Aria2, Qbit Tasks",
|
| 401 |
+
"Ping": "Ping Bot to test Response Speed",
|
| 402 |
+
"Status": "[id/me] Tasks Status of Bot",
|
| 403 |
+
"Stats": "Bot, OS, Repo & System full Statistics",
|
| 404 |
+
"Rss": "User RSS Management Settings",
|
| 405 |
+
"IMDB": "[query] or ttxxxxxx Get IMDB info",
|
| 406 |
+
"CancelAll": "Cancel all Tasks on the Bot",
|
| 407 |
+
"Help": "Detailed help usage of the WZ Bot",
|
| 408 |
+
"BotSet": "[SUDO] Bot Management Settings",
|
| 409 |
+
"Log": "[SUDO] Get Bot Logs for Internal Working",
|
| 410 |
+
"Restart": "[SUDO] Reboot bot",
|
| 411 |
+
"RestartSessions": "[SUDO] Reboot User Sessions",
|
| 412 |
+
}
|
| 413 |
+
|
| 414 |
+
commands = static_commands.copy()
|
| 415 |
+
|
| 416 |
+
plugin_manager = get_plugin_manager()
|
| 417 |
+
if plugin_manager:
|
| 418 |
+
for plugin_info in plugin_manager.list_plugins():
|
| 419 |
+
if plugin_info.enabled and plugin_info.commands:
|
| 420 |
+
for cmd in plugin_info.commands:
|
| 421 |
+
if cmd == "speedtest":
|
| 422 |
+
commands["SpeedTest"] = "Check Bot Speed using Speedtest.com"
|
| 423 |
+
|
| 424 |
+
return commands
|
| 425 |
+
|
| 426 |
+
|
| 427 |
+
BOT_COMMANDS = get_bot_commands()
|
| 428 |
+
|
| 429 |
+
|
| 430 |
+
def get_help_string():
|
| 431 |
+
from ..telegram_helper.bot_commands import BotCommands
|
| 432 |
+
|
| 433 |
+
help_lines = ["NOTE: Try each command without any argument to see more detalis."]
|
| 434 |
+
|
| 435 |
+
commands = BotCommands.get_commands()
|
| 436 |
+
|
| 437 |
+
for key, cmds in commands.items():
|
| 438 |
+
cmd_attr = getattr(BotCommands, f"{key}Command", None)
|
| 439 |
+
if not cmd_attr:
|
| 440 |
+
continue
|
| 441 |
+
|
| 442 |
+
if isinstance(cmd_attr, list):
|
| 443 |
+
cmd_str = f"/{' or /'.join(cmd_attr)}"
|
| 444 |
+
else:
|
| 445 |
+
cmd_str = f"/{cmd_attr}"
|
| 446 |
+
|
| 447 |
+
if key == "SpeedTest" and key in BOT_COMMANDS:
|
| 448 |
+
help_lines.append(f"{cmd_str}: Check Bot Speed using Speedtest.com")
|
| 449 |
+
elif key == "Mirror":
|
| 450 |
+
help_lines.append(f"{cmd_str}: Start mirroring to cloud.")
|
| 451 |
+
elif key == "QbMirror":
|
| 452 |
+
help_lines.append(f"{cmd_str}: Start Mirroring to cloud using qBittorrent.")
|
| 453 |
+
elif key == "JdMirror":
|
| 454 |
+
help_lines.append(f"{cmd_str}: Start Mirroring to cloud using JDownloader.")
|
| 455 |
+
elif key == "NzbMirror":
|
| 456 |
+
help_lines.append(f"{cmd_str}: Start Mirroring to cloud using Sabnzbd.")
|
| 457 |
+
elif key == "Ytdl":
|
| 458 |
+
help_lines.append(f"{cmd_str}: Mirror yt-dlp supported link.")
|
| 459 |
+
elif key == "UpHoster":
|
| 460 |
+
help_lines.append(f"{cmd_str}: Upload to DDL Servers.")
|
| 461 |
+
elif key == "Leech":
|
| 462 |
+
help_lines.append(f"{cmd_str}: Start leeching to Telegram.")
|
| 463 |
+
elif key == "QbLeech":
|
| 464 |
+
help_lines.append(f"{cmd_str}: Start leeching using qBittorrent.")
|
| 465 |
+
elif key == "JdLeech":
|
| 466 |
+
help_lines.append(f"{cmd_str}: Start leeching using JDownloader.")
|
| 467 |
+
elif key == "NzbLeech":
|
| 468 |
+
help_lines.append(f"{cmd_str}: Start leeching using Sabnzbd.")
|
| 469 |
+
elif key == "YtdlLeech":
|
| 470 |
+
help_lines.append(f"{cmd_str}: Leech yt-dlp supported link.")
|
| 471 |
+
elif key == "Clone":
|
| 472 |
+
help_lines.append(
|
| 473 |
+
f"{cmd_str} [drive_url]: Copy file/folder to Google Drive."
|
| 474 |
+
)
|
| 475 |
+
elif key == "Count":
|
| 476 |
+
help_lines.append(
|
| 477 |
+
f"{cmd_str} [drive_url]: Count file/folder of Google Drive."
|
| 478 |
+
)
|
| 479 |
+
elif key == "Delete":
|
| 480 |
+
help_lines.append(
|
| 481 |
+
f"{cmd_str} [drive_url]: Delete file/folder from Google Drive (Only Owner & Sudo)."
|
| 482 |
+
)
|
| 483 |
+
elif key == "UserSet":
|
| 484 |
+
help_lines.append(f"{cmd_str} [query]: Users settings.")
|
| 485 |
+
elif key == "BotSet":
|
| 486 |
+
help_lines.append(f"{cmd_str} [query]: Bot settings.")
|
| 487 |
+
elif key == "Select":
|
| 488 |
+
help_lines.append(
|
| 489 |
+
f"{cmd_str}: Select files from torrents or nzb by gid or reply."
|
| 490 |
+
)
|
| 491 |
+
elif key == "CancelTask":
|
| 492 |
+
help_lines.append(f"{cmd_str} [gid]: Cancel task by gid or reply.")
|
| 493 |
+
elif key == "ForceStart":
|
| 494 |
+
help_lines.append(f"{cmd_str} [gid]: Force start task by gid or reply.")
|
| 495 |
+
elif key == "CancelAll":
|
| 496 |
+
help_lines.append(f"{cmd_str} [query]: Cancel all [status] tasks.")
|
| 497 |
+
elif key == "List":
|
| 498 |
+
help_lines.append(f"{cmd_str} [query]: Search in Google Drive(s).")
|
| 499 |
+
elif key == "Search":
|
| 500 |
+
help_lines.append(f"{cmd_str} [query]: Search for torrents with API.")
|
| 501 |
+
elif key == "MediaInfo":
|
| 502 |
+
help_lines.append(f"{cmd_str} [query]: Get media info.")
|
| 503 |
+
elif key == "Status":
|
| 504 |
+
help_lines.append(f"{cmd_str}: Shows a status of all the downloads.")
|
| 505 |
+
elif key == "Stats":
|
| 506 |
+
help_lines.append(
|
| 507 |
+
f"{cmd_str}: Show stats of the machine where the bot is hosted in."
|
| 508 |
+
)
|
| 509 |
+
elif key == "Ping":
|
| 510 |
+
help_lines.append(
|
| 511 |
+
f"{cmd_str}: Check how long it takes to Ping the Bot (Only Owner & Sudo)."
|
| 512 |
+
)
|
| 513 |
+
elif key == "Authorize":
|
| 514 |
+
help_lines.append(
|
| 515 |
+
f"{cmd_str}: Authorize a chat or a user to use the bot (Only Owner & Sudo)."
|
| 516 |
+
)
|
| 517 |
+
elif key == "UnAuthorize":
|
| 518 |
+
help_lines.append(
|
| 519 |
+
f"{cmd_str}: Unauthorize a chat or a user to use the bot (Only Owner & Sudo)."
|
| 520 |
+
)
|
| 521 |
+
elif key == "Users":
|
| 522 |
+
help_lines.append(f"{cmd_str}: show users settings (Only Owner & Sudo).")
|
| 523 |
+
elif key == "AddSudo":
|
| 524 |
+
help_lines.append(f"{cmd_str}: Add sudo user (Only Owner).")
|
| 525 |
+
elif key == "RmSudo":
|
| 526 |
+
help_lines.append(f"{cmd_str}: Remove sudo users (Only Owner).")
|
| 527 |
+
elif key == "Restart":
|
| 528 |
+
help_lines.append(
|
| 529 |
+
f"{cmd_str}: Restart and update the bot (Only Owner & Sudo)."
|
| 530 |
+
)
|
| 531 |
+
elif key == "Log":
|
| 532 |
+
help_lines.append(
|
| 533 |
+
f"{cmd_str}: Get a log file of the bot. Handy for getting crash reports (Only Owner & Sudo)."
|
| 534 |
+
)
|
| 535 |
+
elif key == "Shell":
|
| 536 |
+
help_lines.append(f"{cmd_str}: Run shell commands (Only Owner).")
|
| 537 |
+
elif key == "AExec":
|
| 538 |
+
help_lines.append(f"{cmd_str}: Exec async functions (Only Owner).")
|
| 539 |
+
elif key == "Exec":
|
| 540 |
+
help_lines.append(f"{cmd_str}: Exec sync functions (Only Owner).")
|
| 541 |
+
elif key == "ClearLocals":
|
| 542 |
+
help_lines.append(
|
| 543 |
+
f"/{BotCommands.ClearLocalsCommand}: Clear {BotCommands.AExecCommand} or {BotCommands.ExecCommand} locals (Only Owner)."
|
| 544 |
+
)
|
| 545 |
+
elif key == "Rss":
|
| 546 |
+
help_lines.append(f"/{BotCommands.RssCommand}: RSS Menu.")
|
| 547 |
+
|
| 548 |
+
return "\n".join(help_lines)
|
| 549 |
+
|
| 550 |
+
|
| 551 |
+
help_string = get_help_string()
|
bot/helper/ext_utils/hyperdl_utils.py
ADDED
|
@@ -0,0 +1,509 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from asyncio import (
|
| 2 |
+
CancelledError,
|
| 3 |
+
create_task,
|
| 4 |
+
gather,
|
| 5 |
+
sleep,
|
| 6 |
+
wait_for,
|
| 7 |
+
TimeoutError as AsyncTimeoutError,
|
| 8 |
+
Event,
|
| 9 |
+
)
|
| 10 |
+
from datetime import datetime
|
| 11 |
+
from math import ceil, floor
|
| 12 |
+
from mimetypes import guess_extension
|
| 13 |
+
from os import path as ospath
|
| 14 |
+
from pathlib import Path
|
| 15 |
+
from re import sub
|
| 16 |
+
from sys import argv
|
| 17 |
+
from time import time
|
| 18 |
+
|
| 19 |
+
from aiofiles import open as aiopen
|
| 20 |
+
from aiofiles.os import makedirs, remove
|
| 21 |
+
from aioshutil import move
|
| 22 |
+
from pyrogram import StopTransmission, raw, utils
|
| 23 |
+
from pyrogram.errors import AuthBytesInvalid, FloodWait
|
| 24 |
+
from pyrogram.file_id import PHOTO_TYPES, FileId, FileType, ThumbnailSource
|
| 25 |
+
from pyrogram.session import Auth, Session
|
| 26 |
+
from pyrogram.session.internals import MsgId
|
| 27 |
+
|
| 28 |
+
from ... import LOGGER
|
| 29 |
+
from ...core.config_manager import Config
|
| 30 |
+
from ...core.tg_client import TgClient
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
class HyperTGDownload:
|
| 34 |
+
def __init__(self):
|
| 35 |
+
self.clients = TgClient.helper_bots
|
| 36 |
+
self.work_loads = TgClient.helper_loads
|
| 37 |
+
self.message = None
|
| 38 |
+
self.dump_chat = None
|
| 39 |
+
self.download_dir = "downloads/"
|
| 40 |
+
self.directory = None
|
| 41 |
+
self.num_parts = Config.HYPER_THREADS or max(8, len(self.clients))
|
| 42 |
+
self.cache_file_ref = {}
|
| 43 |
+
self.cache_last_access = {}
|
| 44 |
+
self.cache_max_size = 100
|
| 45 |
+
self._processed_bytes = 0
|
| 46 |
+
self.file_size = 0
|
| 47 |
+
self.chunk_size = 1024 * 1024
|
| 48 |
+
self.file_name = ""
|
| 49 |
+
self._cancel_event = Event()
|
| 50 |
+
self.session_pool = {}
|
| 51 |
+
create_task(self._clean_cache())
|
| 52 |
+
|
| 53 |
+
@staticmethod
|
| 54 |
+
async def get_media_type(message):
|
| 55 |
+
media_types = (
|
| 56 |
+
"audio",
|
| 57 |
+
"document",
|
| 58 |
+
"photo",
|
| 59 |
+
"sticker",
|
| 60 |
+
"animation",
|
| 61 |
+
"video",
|
| 62 |
+
"voice",
|
| 63 |
+
"video_note",
|
| 64 |
+
"new_chat_photo",
|
| 65 |
+
)
|
| 66 |
+
for attr in media_types:
|
| 67 |
+
if media := getattr(message, attr, None):
|
| 68 |
+
return media
|
| 69 |
+
raise ValueError("This message doesn't contain any downloadable media")
|
| 70 |
+
|
| 71 |
+
def _update_cache(self, index, file_ref):
|
| 72 |
+
self.cache_file_ref[index] = file_ref
|
| 73 |
+
self.cache_last_access[index] = time()
|
| 74 |
+
|
| 75 |
+
if len(self.cache_file_ref) > self.cache_max_size:
|
| 76 |
+
oldest = sorted(self.cache_last_access.items(), key=lambda x: x[1])[0][0]
|
| 77 |
+
del self.cache_file_ref[oldest]
|
| 78 |
+
del self.cache_last_access[oldest]
|
| 79 |
+
|
| 80 |
+
async def get_specific_file_ref(self, mid, client, max_retries=3):
|
| 81 |
+
retries = 0
|
| 82 |
+
last_error = None
|
| 83 |
+
|
| 84 |
+
while retries < max_retries:
|
| 85 |
+
try:
|
| 86 |
+
media = await client.get_messages(self.dump_chat, mid)
|
| 87 |
+
return FileId.decode(
|
| 88 |
+
getattr(await self.get_media_type(media), "file_id", "")
|
| 89 |
+
)
|
| 90 |
+
except Exception as e:
|
| 91 |
+
last_error = e
|
| 92 |
+
retries += 1
|
| 93 |
+
await sleep(1 * retries)
|
| 94 |
+
|
| 95 |
+
LOGGER.error(
|
| 96 |
+
f"Failed to get message {mid} from {self.dump_chat} with Client {client.me.username}"
|
| 97 |
+
)
|
| 98 |
+
raise ValueError(
|
| 99 |
+
f"Bot needs Admin access in Chat or message may be deleted. Error: {last_error}"
|
| 100 |
+
)
|
| 101 |
+
|
| 102 |
+
async def get_file_id(self, client, index) -> FileId:
|
| 103 |
+
if index not in self.cache_file_ref:
|
| 104 |
+
file_ref = await self.get_specific_file_ref(self.message.id, client)
|
| 105 |
+
self._update_cache(index, file_ref)
|
| 106 |
+
else:
|
| 107 |
+
self.cache_last_access[index] = time()
|
| 108 |
+
return self.cache_file_ref[index]
|
| 109 |
+
|
| 110 |
+
async def _clean_cache(self):
|
| 111 |
+
while True:
|
| 112 |
+
await sleep(15 * 60)
|
| 113 |
+
current_time = time()
|
| 114 |
+
expired_keys = [
|
| 115 |
+
k
|
| 116 |
+
for k, v in self.cache_last_access.items()
|
| 117 |
+
if current_time - v > 45 * 60
|
| 118 |
+
]
|
| 119 |
+
|
| 120 |
+
for key in expired_keys:
|
| 121 |
+
if key in self.cache_file_ref:
|
| 122 |
+
del self.cache_file_ref[key]
|
| 123 |
+
if key in self.cache_last_access:
|
| 124 |
+
del self.cache_last_access[key]
|
| 125 |
+
|
| 126 |
+
async def generate_media_session(self, client, file_id, index, max_retries=3):
|
| 127 |
+
session_key = (index, file_id.dc_id)
|
| 128 |
+
|
| 129 |
+
if session_key in self.session_pool:
|
| 130 |
+
return self.session_pool[session_key]
|
| 131 |
+
|
| 132 |
+
retries = 0
|
| 133 |
+
while retries < max_retries:
|
| 134 |
+
try:
|
| 135 |
+
if file_id.dc_id != await client.storage.dc_id():
|
| 136 |
+
media_session = Session(
|
| 137 |
+
client,
|
| 138 |
+
file_id.dc_id,
|
| 139 |
+
await Auth(
|
| 140 |
+
client, file_id.dc_id, await client.storage.test_mode()
|
| 141 |
+
).create(),
|
| 142 |
+
await client.storage.test_mode(),
|
| 143 |
+
is_media=True,
|
| 144 |
+
)
|
| 145 |
+
await media_session.start()
|
| 146 |
+
|
| 147 |
+
for _ in range(6):
|
| 148 |
+
exported_auth = await client.invoke(
|
| 149 |
+
raw.functions.auth.ExportAuthorization(dc_id=file_id.dc_id)
|
| 150 |
+
)
|
| 151 |
+
|
| 152 |
+
try:
|
| 153 |
+
await media_session.invoke(
|
| 154 |
+
raw.functions.auth.ImportAuthorization(
|
| 155 |
+
id=exported_auth.id, bytes=exported_auth.bytes
|
| 156 |
+
)
|
| 157 |
+
)
|
| 158 |
+
break
|
| 159 |
+
except AuthBytesInvalid:
|
| 160 |
+
await sleep(1)
|
| 161 |
+
else:
|
| 162 |
+
await media_session.stop()
|
| 163 |
+
raise AuthBytesInvalid
|
| 164 |
+
else:
|
| 165 |
+
media_session = Session(
|
| 166 |
+
client,
|
| 167 |
+
file_id.dc_id,
|
| 168 |
+
await client.storage.auth_key(),
|
| 169 |
+
await client.storage.test_mode(),
|
| 170 |
+
is_media=True,
|
| 171 |
+
)
|
| 172 |
+
await media_session.start()
|
| 173 |
+
|
| 174 |
+
self.session_pool[session_key] = media_session
|
| 175 |
+
return media_session
|
| 176 |
+
|
| 177 |
+
except Exception:
|
| 178 |
+
retries += 1
|
| 179 |
+
await sleep(1)
|
| 180 |
+
|
| 181 |
+
raise ValueError(f"Failed to create media session after {max_retries} attempts")
|
| 182 |
+
|
| 183 |
+
@staticmethod
|
| 184 |
+
async def get_location(file_id: FileId):
|
| 185 |
+
file_type = file_id.file_type
|
| 186 |
+
|
| 187 |
+
if file_type == FileType.CHAT_PHOTO:
|
| 188 |
+
if file_id.chat_id > 0:
|
| 189 |
+
peer = raw.types.InputPeerUser(
|
| 190 |
+
user_id=file_id.chat_id, access_hash=file_id.chat_access_hash
|
| 191 |
+
)
|
| 192 |
+
else:
|
| 193 |
+
peer = (
|
| 194 |
+
raw.types.InputPeerChat(chat_id=-file_id.chat_id)
|
| 195 |
+
if file_id.chat_access_hash == 0
|
| 196 |
+
else raw.types.InputPeerChannel(
|
| 197 |
+
channel_id=utils.get_channel_id(file_id.chat_id),
|
| 198 |
+
access_hash=file_id.chat_access_hash,
|
| 199 |
+
)
|
| 200 |
+
)
|
| 201 |
+
return raw.types.InputPeerPhotoFileLocation(
|
| 202 |
+
peer=peer,
|
| 203 |
+
volume_id=file_id.volume_id,
|
| 204 |
+
local_id=file_id.local_id,
|
| 205 |
+
big=file_id.thumbnail_source == ThumbnailSource.CHAT_PHOTO_BIG,
|
| 206 |
+
)
|
| 207 |
+
elif file_type == FileType.PHOTO:
|
| 208 |
+
return raw.types.InputPhotoFileLocation(
|
| 209 |
+
id=file_id.media_id,
|
| 210 |
+
access_hash=file_id.access_hash,
|
| 211 |
+
file_reference=file_id.file_reference,
|
| 212 |
+
thumb_size=file_id.thumbnail_size,
|
| 213 |
+
)
|
| 214 |
+
else:
|
| 215 |
+
return raw.types.InputDocumentFileLocation(
|
| 216 |
+
id=file_id.media_id,
|
| 217 |
+
access_hash=file_id.access_hash,
|
| 218 |
+
file_reference=file_id.file_reference,
|
| 219 |
+
thumb_size=file_id.thumbnail_size,
|
| 220 |
+
)
|
| 221 |
+
|
| 222 |
+
async def get_file(
|
| 223 |
+
self,
|
| 224 |
+
offset_bytes: int,
|
| 225 |
+
first_part_cut: int,
|
| 226 |
+
last_part_cut: int,
|
| 227 |
+
part_count: int,
|
| 228 |
+
max_retries=5,
|
| 229 |
+
):
|
| 230 |
+
index = min(self.work_loads, key=self.work_loads.get)
|
| 231 |
+
client = self.clients[index]
|
| 232 |
+
|
| 233 |
+
self.work_loads[index] += 1
|
| 234 |
+
current_retry = 0
|
| 235 |
+
|
| 236 |
+
try:
|
| 237 |
+
while current_retry < max_retries:
|
| 238 |
+
try:
|
| 239 |
+
if self._cancel_event.is_set():
|
| 240 |
+
raise CancelledError("Download cancelled")
|
| 241 |
+
|
| 242 |
+
file_id = await self.get_file_id(client, index)
|
| 243 |
+
media_session, location = await gather(
|
| 244 |
+
self.generate_media_session(client, file_id, index),
|
| 245 |
+
self.get_location(file_id),
|
| 246 |
+
)
|
| 247 |
+
|
| 248 |
+
current_part = 1
|
| 249 |
+
current_offset = offset_bytes
|
| 250 |
+
|
| 251 |
+
while current_part <= part_count:
|
| 252 |
+
if self._cancel_event.is_set():
|
| 253 |
+
raise CancelledError("Download cancelled")
|
| 254 |
+
|
| 255 |
+
try:
|
| 256 |
+
r = await wait_for(
|
| 257 |
+
media_session.invoke(
|
| 258 |
+
raw.functions.upload.GetFile(
|
| 259 |
+
location=location,
|
| 260 |
+
offset=current_offset,
|
| 261 |
+
limit=self.chunk_size,
|
| 262 |
+
),
|
| 263 |
+
),
|
| 264 |
+
timeout=30,
|
| 265 |
+
)
|
| 266 |
+
|
| 267 |
+
if isinstance(r, raw.types.upload.File):
|
| 268 |
+
chunk = r.bytes
|
| 269 |
+
|
| 270 |
+
if not chunk:
|
| 271 |
+
break
|
| 272 |
+
|
| 273 |
+
if part_count == 1:
|
| 274 |
+
yield chunk[first_part_cut:last_part_cut]
|
| 275 |
+
elif current_part == 1:
|
| 276 |
+
yield chunk[first_part_cut:]
|
| 277 |
+
elif current_part == part_count:
|
| 278 |
+
yield chunk[:last_part_cut]
|
| 279 |
+
else:
|
| 280 |
+
yield chunk
|
| 281 |
+
|
| 282 |
+
current_part += 1
|
| 283 |
+
current_offset += self.chunk_size
|
| 284 |
+
self._processed_bytes += len(chunk)
|
| 285 |
+
else:
|
| 286 |
+
raise ValueError(f"Unexpected response: {r}")
|
| 287 |
+
|
| 288 |
+
except (FloodWait, AsyncTimeoutError, ConnectionError) as e:
|
| 289 |
+
if isinstance(e, FloodWait):
|
| 290 |
+
await sleep(e.value + 1)
|
| 291 |
+
else:
|
| 292 |
+
await sleep(1)
|
| 293 |
+
continue
|
| 294 |
+
|
| 295 |
+
if current_part <= part_count:
|
| 296 |
+
raise ValueError(
|
| 297 |
+
f"Incomplete download: got {current_part-1} of {part_count} parts"
|
| 298 |
+
)
|
| 299 |
+
break
|
| 300 |
+
|
| 301 |
+
except (AsyncTimeoutError, ConnectionError, AttributeError):
|
| 302 |
+
current_retry += 1
|
| 303 |
+
if current_retry >= max_retries:
|
| 304 |
+
raise
|
| 305 |
+
await sleep(current_retry * 2)
|
| 306 |
+
|
| 307 |
+
finally:
|
| 308 |
+
self.work_loads[index] -= 1
|
| 309 |
+
|
| 310 |
+
async def progress_callback(self, progress, progress_args):
|
| 311 |
+
if not progress:
|
| 312 |
+
return
|
| 313 |
+
|
| 314 |
+
while not self._cancel_event.is_set():
|
| 315 |
+
try:
|
| 316 |
+
if callable(progress):
|
| 317 |
+
await progress(
|
| 318 |
+
self._processed_bytes, self.file_size, *progress_args
|
| 319 |
+
)
|
| 320 |
+
await sleep(1)
|
| 321 |
+
except (CancelledError, StopTransmission):
|
| 322 |
+
break
|
| 323 |
+
except Exception:
|
| 324 |
+
await sleep(1)
|
| 325 |
+
|
| 326 |
+
async def single_part(self, start, end, part_index, max_retries=3):
|
| 327 |
+
until_bytes, from_bytes = min(end, self.file_size - 1), start
|
| 328 |
+
|
| 329 |
+
offset = from_bytes - (from_bytes % self.chunk_size)
|
| 330 |
+
first_part_cut = from_bytes - offset
|
| 331 |
+
last_part_cut = until_bytes % self.chunk_size + 1
|
| 332 |
+
|
| 333 |
+
part_count = ceil(until_bytes / self.chunk_size) - floor(
|
| 334 |
+
offset / self.chunk_size
|
| 335 |
+
)
|
| 336 |
+
part_file_path = ospath.join(
|
| 337 |
+
self.directory, f"{self.file_name}.temp.{part_index:02d}"
|
| 338 |
+
)
|
| 339 |
+
|
| 340 |
+
for attempt in range(max_retries):
|
| 341 |
+
try:
|
| 342 |
+
async with aiopen(part_file_path, "wb") as f:
|
| 343 |
+
async for chunk in self.get_file(
|
| 344 |
+
offset, first_part_cut, last_part_cut, part_count
|
| 345 |
+
):
|
| 346 |
+
if self._cancel_event.is_set():
|
| 347 |
+
raise CancelledError("Download cancelled")
|
| 348 |
+
await f.write(chunk)
|
| 349 |
+
return part_index, part_file_path
|
| 350 |
+
except (AsyncTimeoutError, ConnectionError):
|
| 351 |
+
if attempt == max_retries - 1:
|
| 352 |
+
raise
|
| 353 |
+
await sleep((attempt + 1) * 2)
|
| 354 |
+
self._processed_bytes = 0
|
| 355 |
+
|
| 356 |
+
async def handle_download(self, progress, progress_args):
|
| 357 |
+
self._cancel_event.clear()
|
| 358 |
+
|
| 359 |
+
await makedirs(self.directory, exist_ok=True)
|
| 360 |
+
temp_file_path = (
|
| 361 |
+
ospath.abspath(
|
| 362 |
+
sub("\\\\", "/", ospath.join(self.directory, self.file_name))
|
| 363 |
+
)
|
| 364 |
+
+ ".temp"
|
| 365 |
+
)
|
| 366 |
+
|
| 367 |
+
num_parts = min(self.num_parts, max(1, self.file_size // (10 * 1024 * 1024)))
|
| 368 |
+
|
| 369 |
+
if self.file_size < 10 * 1024 * 1024:
|
| 370 |
+
num_parts = 1
|
| 371 |
+
|
| 372 |
+
part_size = self.file_size // num_parts if num_parts > 0 else self.file_size
|
| 373 |
+
ranges = [
|
| 374 |
+
(i * part_size, min((i + 1) * part_size - 1, self.file_size - 1))
|
| 375 |
+
for i in range(num_parts)
|
| 376 |
+
]
|
| 377 |
+
|
| 378 |
+
tasks = []
|
| 379 |
+
prog_task = None
|
| 380 |
+
|
| 381 |
+
try:
|
| 382 |
+
for i, (start, end) in enumerate(ranges):
|
| 383 |
+
tasks.append(create_task(self.single_part(start, end, i)))
|
| 384 |
+
|
| 385 |
+
if progress:
|
| 386 |
+
prog_task = create_task(self.progress_callback(progress, progress_args))
|
| 387 |
+
|
| 388 |
+
results = await gather(*tasks)
|
| 389 |
+
|
| 390 |
+
async with aiopen(temp_file_path, "wb") as temp_file:
|
| 391 |
+
for _, part_file_path in sorted(results, key=lambda x: x[0]):
|
| 392 |
+
try:
|
| 393 |
+
async with aiopen(part_file_path, "rb") as part_file:
|
| 394 |
+
while True:
|
| 395 |
+
chunk = await part_file.read(8 * 1024 * 1024)
|
| 396 |
+
if not chunk:
|
| 397 |
+
break
|
| 398 |
+
await temp_file.write(chunk)
|
| 399 |
+
await remove(part_file_path)
|
| 400 |
+
except Exception as e:
|
| 401 |
+
LOGGER.error(
|
| 402 |
+
f"Error processing part file {part_file_path}: {e}"
|
| 403 |
+
)
|
| 404 |
+
raise
|
| 405 |
+
|
| 406 |
+
if prog_task and not prog_task.done():
|
| 407 |
+
prog_task.cancel()
|
| 408 |
+
|
| 409 |
+
file_path = ospath.splitext(temp_file_path)[0]
|
| 410 |
+
await move(temp_file_path, file_path)
|
| 411 |
+
|
| 412 |
+
return file_path
|
| 413 |
+
|
| 414 |
+
except FloodWait as fw:
|
| 415 |
+
raise fw
|
| 416 |
+
except (CancelledError, StopTransmission):
|
| 417 |
+
return None
|
| 418 |
+
except Exception as e:
|
| 419 |
+
LOGGER.error(f"HyperDL Error: {e}")
|
| 420 |
+
return None
|
| 421 |
+
finally:
|
| 422 |
+
self._cancel_event.set()
|
| 423 |
+
if prog_task and not prog_task.done():
|
| 424 |
+
prog_task.cancel()
|
| 425 |
+
|
| 426 |
+
for task in tasks:
|
| 427 |
+
if not task.done():
|
| 428 |
+
task.cancel()
|
| 429 |
+
|
| 430 |
+
for i in range(len(ranges)):
|
| 431 |
+
part_path = ospath.join(
|
| 432 |
+
self.directory, f"{self.file_name}.temp.{i:02d}"
|
| 433 |
+
)
|
| 434 |
+
try:
|
| 435 |
+
if ospath.exists(part_path):
|
| 436 |
+
await remove(part_path)
|
| 437 |
+
except Exception:
|
| 438 |
+
pass
|
| 439 |
+
|
| 440 |
+
@staticmethod
|
| 441 |
+
async def get_extension(file_type, mime_type):
|
| 442 |
+
if file_type in PHOTO_TYPES:
|
| 443 |
+
return ".jpg"
|
| 444 |
+
|
| 445 |
+
if mime_type:
|
| 446 |
+
extension = guess_extension(mime_type)
|
| 447 |
+
if extension:
|
| 448 |
+
return extension
|
| 449 |
+
|
| 450 |
+
if file_type == FileType.VOICE:
|
| 451 |
+
return ".ogg"
|
| 452 |
+
elif file_type in (FileType.VIDEO, FileType.ANIMATION, FileType.VIDEO_NOTE):
|
| 453 |
+
return ".mp4"
|
| 454 |
+
elif file_type == FileType.DOCUMENT:
|
| 455 |
+
return ".bin"
|
| 456 |
+
elif file_type == FileType.STICKER:
|
| 457 |
+
return ".webp"
|
| 458 |
+
elif file_type == FileType.AUDIO:
|
| 459 |
+
return ".mp3"
|
| 460 |
+
else:
|
| 461 |
+
return ".bin"
|
| 462 |
+
|
| 463 |
+
async def download_media(
|
| 464 |
+
self,
|
| 465 |
+
message,
|
| 466 |
+
file_name="downloads/",
|
| 467 |
+
progress=None,
|
| 468 |
+
progress_args=(),
|
| 469 |
+
dump_chat=None,
|
| 470 |
+
):
|
| 471 |
+
try:
|
| 472 |
+
if dump_chat:
|
| 473 |
+
self.message = await TgClient.bot.copy_message(
|
| 474 |
+
chat_id=dump_chat,
|
| 475 |
+
from_chat_id=message.chat.id,
|
| 476 |
+
message_id=message.id,
|
| 477 |
+
disable_notification=True,
|
| 478 |
+
)
|
| 479 |
+
|
| 480 |
+
self.dump_chat = dump_chat or message.chat.id
|
| 481 |
+
self.message = self.message or message
|
| 482 |
+
media = await self.get_media_type(self.message)
|
| 483 |
+
|
| 484 |
+
file_id_str = media if isinstance(media, str) else media.file_id
|
| 485 |
+
file_id_obj = FileId.decode(file_id_str)
|
| 486 |
+
|
| 487 |
+
file_type = file_id_obj.file_type
|
| 488 |
+
media_file_name = getattr(media, "file_name", "")
|
| 489 |
+
self.file_size = getattr(media, "file_size", 0)
|
| 490 |
+
mime_type = getattr(media, "mime_type", "image/jpeg")
|
| 491 |
+
date = getattr(media, "date", None)
|
| 492 |
+
|
| 493 |
+
self.directory, self.file_name = ospath.split(file_name)
|
| 494 |
+
self.file_name = self.file_name or media_file_name or ""
|
| 495 |
+
|
| 496 |
+
if not ospath.isabs(self.file_name):
|
| 497 |
+
self.directory = Path(argv[0]).parent / (
|
| 498 |
+
self.directory or self.download_dir
|
| 499 |
+
)
|
| 500 |
+
|
| 501 |
+
if not self.file_name:
|
| 502 |
+
extension = await self.get_extension(file_type, mime_type)
|
| 503 |
+
self.file_name = f"{FileType(file_id_obj.file_type).name.lower()}_{(date or datetime.now()).strftime('%Y-%m-%d_%H-%M-%S')}_{MsgId()}{extension}"
|
| 504 |
+
|
| 505 |
+
return await self.handle_download(progress, progress_args)
|
| 506 |
+
|
| 507 |
+
except Exception as e:
|
| 508 |
+
LOGGER.error(f"Download media error: {e}")
|
| 509 |
+
raise
|
bot/helper/ext_utils/links_utils.py
ADDED
|
@@ -0,0 +1,72 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from re import match as re_match
|
| 2 |
+
from base64 import urlsafe_b64decode, urlsafe_b64encode
|
| 3 |
+
|
| 4 |
+
|
| 5 |
+
def is_magnet(url: str):
|
| 6 |
+
return bool(
|
| 7 |
+
re_match(
|
| 8 |
+
r"^magnet:\?.*xt=urn:(btih|btmh):([a-zA-Z0-9]{32,40}|[a-z2-7]{32}).*", url
|
| 9 |
+
)
|
| 10 |
+
)
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
def is_url(url: str):
|
| 14 |
+
return bool(
|
| 15 |
+
re_match(
|
| 16 |
+
r"^(?!\/)(rtmps?:\/\/|mms:\/\/|rtsp:\/\/|https?:\/\/|ftp:\/\/)?([^\/:]+:[^\/@]+@)?(www\.)?(?=[^\/:\s]+\.[^\/:\s]+)([^\/:\s]+\.[^\/:\s]+)(:\d+)?(\/[^#\s]*[\s\S]*)?(\?[^#\s]*)?(#.*)?$",
|
| 17 |
+
url,
|
| 18 |
+
)
|
| 19 |
+
)
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
def is_gdrive_link(url: str):
|
| 23 |
+
return "drive.google.com" in url or "drive.usercontent.google.com" in url
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
def is_telegram_link(url: str):
|
| 27 |
+
return url.startswith(("https://t.me/", "tg://openmessage?user_id="))
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
def is_mega_link(url: str):
|
| 31 |
+
return "mega.nz" in url or "mega.co.nz" in url
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
def get_mega_link_type(url):
|
| 35 |
+
return "folder" if "folder" in url or "/#F!" in url else "file"
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
def is_share_link(url: str):
|
| 39 |
+
return bool(
|
| 40 |
+
re_match(
|
| 41 |
+
r"https?:\/\/.+\.gdtot\.\S+|https?:\/\/(filepress|filebee|appdrive|gdflix)\.\S+",
|
| 42 |
+
url,
|
| 43 |
+
)
|
| 44 |
+
)
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
def is_rclone_path(path: str):
|
| 48 |
+
return bool(
|
| 49 |
+
re_match(
|
| 50 |
+
r"^(mrcc:)?(?!(magnet:|mtp:|sa:|tp:))(?![- ])[a-zA-Z0-9_\. -]+(?<! ):(?!.*\/\/).*$|^rcl$",
|
| 51 |
+
path,
|
| 52 |
+
)
|
| 53 |
+
)
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
def is_gdrive_id(id_: str):
|
| 57 |
+
return bool(
|
| 58 |
+
re_match(
|
| 59 |
+
r"^(tp:|sa:|mtp:)?(?:[a-zA-Z0-9-_]{33}|[a-zA-Z0-9_-]{19})$|^gdl$|^(tp:|mtp:)?root$",
|
| 60 |
+
id_,
|
| 61 |
+
)
|
| 62 |
+
)
|
| 63 |
+
|
| 64 |
+
|
| 65 |
+
def encode_slink(string):
|
| 66 |
+
return (urlsafe_b64encode(string.encode("ascii")).decode("ascii")).strip("=")
|
| 67 |
+
|
| 68 |
+
|
| 69 |
+
def decode_slink(b64_str):
|
| 70 |
+
return urlsafe_b64decode(
|
| 71 |
+
(b64_str.strip("=") + "=" * (-len(b64_str.strip("=")) % 4)).encode("ascii")
|
| 72 |
+
).decode("ascii")
|
bot/helper/ext_utils/media_utils.py
ADDED
|
@@ -0,0 +1,852 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import re
|
| 2 |
+
from contextlib import suppress
|
| 3 |
+
from PIL import Image
|
| 4 |
+
from hashlib import md5
|
| 5 |
+
from aiofiles.os import remove, path as aiopath, makedirs
|
| 6 |
+
import json
|
| 7 |
+
from asyncio import (
|
| 8 |
+
create_subprocess_exec,
|
| 9 |
+
gather,
|
| 10 |
+
wait_for,
|
| 11 |
+
sleep,
|
| 12 |
+
)
|
| 13 |
+
from asyncio.subprocess import PIPE
|
| 14 |
+
from os import path as ospath
|
| 15 |
+
from re import search as re_search, escape
|
| 16 |
+
from time import time
|
| 17 |
+
from aioshutil import rmtree
|
| 18 |
+
from langcodes import Language
|
| 19 |
+
|
| 20 |
+
from ... import LOGGER, DOWNLOAD_DIR, threads, cores
|
| 21 |
+
from ...core.config_manager import BinConfig
|
| 22 |
+
from .bot_utils import cmd_exec, sync_to_async
|
| 23 |
+
from .files_utils import get_mime_type, is_archive, is_archive_split
|
| 24 |
+
from .status_utils import time_to_seconds
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
def get_md5_hash(up_path):
|
| 28 |
+
md5_hash = md5()
|
| 29 |
+
with open(up_path, "rb") as f:
|
| 30 |
+
for byte_block in iter(lambda: f.read(4096), b""):
|
| 31 |
+
md5_hash.update(byte_block)
|
| 32 |
+
return md5_hash.hexdigest()
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
async def create_thumb(msg, _id=""):
|
| 36 |
+
if not _id:
|
| 37 |
+
_id = time()
|
| 38 |
+
path = f"{DOWNLOAD_DIR}thumbnails"
|
| 39 |
+
else:
|
| 40 |
+
path = "thumbnails"
|
| 41 |
+
await makedirs(path, exist_ok=True)
|
| 42 |
+
photo_dir = await msg.download()
|
| 43 |
+
output = ospath.join(path, f"{_id}.jpg")
|
| 44 |
+
await sync_to_async(Image.open(photo_dir).convert("RGB").save, output, "JPEG")
|
| 45 |
+
await remove(photo_dir)
|
| 46 |
+
return output
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
async def get_media_info(path, extra_info=False):
|
| 50 |
+
try:
|
| 51 |
+
result = await cmd_exec(
|
| 52 |
+
[
|
| 53 |
+
"ffprobe",
|
| 54 |
+
"-hide_banner",
|
| 55 |
+
"-loglevel",
|
| 56 |
+
"error",
|
| 57 |
+
"-print_format",
|
| 58 |
+
"json",
|
| 59 |
+
"-show_format",
|
| 60 |
+
"-show_streams",
|
| 61 |
+
path,
|
| 62 |
+
]
|
| 63 |
+
)
|
| 64 |
+
except Exception as e:
|
| 65 |
+
LOGGER.error(f"Get Media Info: {e}. Mostly File not found! - File: {path}")
|
| 66 |
+
return (0, "", "", "") if extra_info else (0, None, None)
|
| 67 |
+
if result[0] and result[2] == 0:
|
| 68 |
+
ffresult = eval(result[0])
|
| 69 |
+
fields = ffresult.get("format")
|
| 70 |
+
if fields is None:
|
| 71 |
+
LOGGER.error(f"get_media_info: {result}")
|
| 72 |
+
return (0, "", "", "") if extra_info else (0, None, None)
|
| 73 |
+
duration = round(float(fields.get("duration", 0)))
|
| 74 |
+
if extra_info:
|
| 75 |
+
lang, qual, stitles = "", "", ""
|
| 76 |
+
if (streams := ffresult.get("streams")) and streams[0].get(
|
| 77 |
+
"codec_type"
|
| 78 |
+
) == "video":
|
| 79 |
+
qual = int(streams[0].get("height"))
|
| 80 |
+
qual = f"{480 if qual <= 480 else 540 if qual <= 540 else 720 if qual <= 720 else 1080 if qual <= 1080 else 2160 if qual <= 2160 else 4320 if qual <= 4320 else 8640}p"
|
| 81 |
+
for stream in streams:
|
| 82 |
+
if stream.get("codec_type") == "audio" and (
|
| 83 |
+
lc := stream.get("tags", {}).get("language")
|
| 84 |
+
):
|
| 85 |
+
with suppress(Exception):
|
| 86 |
+
lc = Language.get(lc).display_name()
|
| 87 |
+
if lc not in lang:
|
| 88 |
+
lang += f"{lc}, "
|
| 89 |
+
if stream.get("codec_type") == "subtitle" and (
|
| 90 |
+
st := stream.get("tags", {}).get("language")
|
| 91 |
+
):
|
| 92 |
+
with suppress(Exception):
|
| 93 |
+
st = Language.get(st).display_name()
|
| 94 |
+
if st not in stitles:
|
| 95 |
+
stitles += f"{st}, "
|
| 96 |
+
return duration, qual, lang[:-2], stitles[:-2]
|
| 97 |
+
tags = fields.get("tags", {})
|
| 98 |
+
artist = tags.get("artist") or tags.get("ARTIST") or tags.get("Artist")
|
| 99 |
+
title = tags.get("title") or tags.get("TITLE") or tags.get("Title")
|
| 100 |
+
return duration, artist, title
|
| 101 |
+
return (0, "", "", "") if extra_info else (0, None, None)
|
| 102 |
+
|
| 103 |
+
|
| 104 |
+
async def get_document_type(path):
|
| 105 |
+
is_video, is_audio, is_image = False, False, False
|
| 106 |
+
if (
|
| 107 |
+
is_archive(path)
|
| 108 |
+
or is_archive_split(path)
|
| 109 |
+
or re_search(r".+(\.|_)(rar|7z|zip|bin)(\.0*\d+)?$", path)
|
| 110 |
+
):
|
| 111 |
+
return is_video, is_audio, is_image
|
| 112 |
+
mime_type = await sync_to_async(get_mime_type, path)
|
| 113 |
+
if mime_type.startswith("image"):
|
| 114 |
+
return False, False, True
|
| 115 |
+
try:
|
| 116 |
+
result = await cmd_exec(
|
| 117 |
+
[
|
| 118 |
+
"ffprobe",
|
| 119 |
+
"-hide_banner",
|
| 120 |
+
"-loglevel",
|
| 121 |
+
"error",
|
| 122 |
+
"-print_format",
|
| 123 |
+
"json",
|
| 124 |
+
"-show_streams",
|
| 125 |
+
path,
|
| 126 |
+
]
|
| 127 |
+
)
|
| 128 |
+
if result[1] and mime_type.startswith("video"):
|
| 129 |
+
is_video = True
|
| 130 |
+
except Exception as e:
|
| 131 |
+
LOGGER.error(f"Get Document Type: {e}. Mostly File not found! - File: {path}")
|
| 132 |
+
if mime_type.startswith("audio"):
|
| 133 |
+
return False, True, False
|
| 134 |
+
if not mime_type.startswith("video") and not mime_type.endswith("octet-stream"):
|
| 135 |
+
return is_video, is_audio, is_image
|
| 136 |
+
if mime_type.startswith("video"):
|
| 137 |
+
is_video = True
|
| 138 |
+
return is_video, is_audio, is_image
|
| 139 |
+
if result[0] and result[2] == 0:
|
| 140 |
+
fields = eval(result[0]).get("streams")
|
| 141 |
+
if fields is None:
|
| 142 |
+
LOGGER.error(f"get_document_type: {result}")
|
| 143 |
+
return is_video, is_audio, is_image
|
| 144 |
+
is_video = False
|
| 145 |
+
for stream in fields:
|
| 146 |
+
if stream.get("codec_type") == "video":
|
| 147 |
+
codec_name = stream.get("codec_name", "").lower()
|
| 148 |
+
if codec_name not in {"mjpeg", "png", "bmp"}:
|
| 149 |
+
is_video = True
|
| 150 |
+
elif stream.get("codec_type") == "audio":
|
| 151 |
+
is_audio = True
|
| 152 |
+
return is_video, is_audio, is_image
|
| 153 |
+
|
| 154 |
+
|
| 155 |
+
async def get_streams(file):
|
| 156 |
+
"""
|
| 157 |
+
Gets media stream information using ffprobe.
|
| 158 |
+
|
| 159 |
+
Args:
|
| 160 |
+
file: Path to the media file.
|
| 161 |
+
|
| 162 |
+
Returns:
|
| 163 |
+
A list of stream objects (dictionaries) or None if an error occurs
|
| 164 |
+
or no streams are found.
|
| 165 |
+
"""
|
| 166 |
+
cmd = [
|
| 167 |
+
"ffprobe",
|
| 168 |
+
"-hide_banner",
|
| 169 |
+
"-loglevel",
|
| 170 |
+
"error",
|
| 171 |
+
"-print_format",
|
| 172 |
+
"json",
|
| 173 |
+
"-show_streams",
|
| 174 |
+
file,
|
| 175 |
+
]
|
| 176 |
+
process = await create_subprocess_exec(*cmd, stdout=PIPE, stderr=PIPE)
|
| 177 |
+
stdout, stderr = await process.communicate()
|
| 178 |
+
|
| 179 |
+
if process.returncode != 0:
|
| 180 |
+
LOGGER.error(f"Error getting stream info: {stderr.decode().strip()}")
|
| 181 |
+
return None
|
| 182 |
+
|
| 183 |
+
try:
|
| 184 |
+
return json.loads(stdout)["streams"]
|
| 185 |
+
except KeyError:
|
| 186 |
+
LOGGER.error(
|
| 187 |
+
f"No streams found in the ffprobe output: {stdout.decode().strip()}",
|
| 188 |
+
)
|
| 189 |
+
return None
|
| 190 |
+
|
| 191 |
+
|
| 192 |
+
async def take_ss(video_file, ss_nb) -> bool:
|
| 193 |
+
duration = (await get_media_info(video_file))[0]
|
| 194 |
+
if duration != 0:
|
| 195 |
+
dirpath, name = video_file.rsplit("/", 1)
|
| 196 |
+
name, _ = ospath.splitext(name)
|
| 197 |
+
dirpath = f"{dirpath}/{name}_mltbss"
|
| 198 |
+
await makedirs(dirpath, exist_ok=True)
|
| 199 |
+
interval = duration // (ss_nb + 1)
|
| 200 |
+
cap_time = interval
|
| 201 |
+
cmds = []
|
| 202 |
+
for i in range(ss_nb):
|
| 203 |
+
output = f"{dirpath}/SS.{name}_{i:02}.png"
|
| 204 |
+
cmd = [
|
| 205 |
+
"taskset",
|
| 206 |
+
"-c",
|
| 207 |
+
f"{cores}",
|
| 208 |
+
BinConfig.FFMPEG_NAME,
|
| 209 |
+
"-hide_banner",
|
| 210 |
+
"-loglevel",
|
| 211 |
+
"error",
|
| 212 |
+
"-ss",
|
| 213 |
+
f"{cap_time}",
|
| 214 |
+
"-i",
|
| 215 |
+
video_file,
|
| 216 |
+
"-q:v",
|
| 217 |
+
"1",
|
| 218 |
+
"-frames:v",
|
| 219 |
+
"1",
|
| 220 |
+
"-threads",
|
| 221 |
+
f"{threads}",
|
| 222 |
+
output,
|
| 223 |
+
]
|
| 224 |
+
cap_time += interval
|
| 225 |
+
cmds.append(cmd_exec(cmd))
|
| 226 |
+
try:
|
| 227 |
+
resutls = await wait_for(gather(*cmds), timeout=60)
|
| 228 |
+
if resutls[0][2] != 0:
|
| 229 |
+
LOGGER.error(
|
| 230 |
+
f"Error while creating screenshots from video. Path: {video_file}. stderr: {resutls[0][1]}"
|
| 231 |
+
)
|
| 232 |
+
await rmtree(dirpath, ignore_errors=True)
|
| 233 |
+
return False
|
| 234 |
+
except Exception:
|
| 235 |
+
LOGGER.error(
|
| 236 |
+
f"Error while creating screenshots from video. Path: {video_file}. Error: Timeout some issues with ffmpeg with specific arch!"
|
| 237 |
+
)
|
| 238 |
+
await rmtree(dirpath, ignore_errors=True)
|
| 239 |
+
return False
|
| 240 |
+
return dirpath
|
| 241 |
+
else:
|
| 242 |
+
LOGGER.error("take_ss: Can't get the duration of video")
|
| 243 |
+
return False
|
| 244 |
+
|
| 245 |
+
|
| 246 |
+
async def get_audio_thumbnail(audio_file):
|
| 247 |
+
output_dir = f"{DOWNLOAD_DIR}thumbnails"
|
| 248 |
+
await makedirs(output_dir, exist_ok=True)
|
| 249 |
+
output = ospath.join(output_dir, f"{time()}.jpg")
|
| 250 |
+
cmd = [
|
| 251 |
+
"taskset",
|
| 252 |
+
"-c",
|
| 253 |
+
f"{cores}",
|
| 254 |
+
BinConfig.FFMPEG_NAME,
|
| 255 |
+
"-hide_banner",
|
| 256 |
+
"-loglevel",
|
| 257 |
+
"error",
|
| 258 |
+
"-i",
|
| 259 |
+
audio_file,
|
| 260 |
+
"-an",
|
| 261 |
+
"-vcodec",
|
| 262 |
+
"copy",
|
| 263 |
+
"-threads",
|
| 264 |
+
f"{threads}",
|
| 265 |
+
output,
|
| 266 |
+
]
|
| 267 |
+
try:
|
| 268 |
+
_, err, code = await wait_for(cmd_exec(cmd), timeout=60)
|
| 269 |
+
if code != 0 or not await aiopath.exists(output):
|
| 270 |
+
LOGGER.error(
|
| 271 |
+
f"Error while extracting thumbnail from audio. Name: {audio_file} stderr: {err}"
|
| 272 |
+
)
|
| 273 |
+
return None
|
| 274 |
+
except Exception:
|
| 275 |
+
LOGGER.error(
|
| 276 |
+
f"Error while extracting thumbnail from audio. Name: {audio_file}. Error: Timeout some issues with ffmpeg with specific arch!"
|
| 277 |
+
)
|
| 278 |
+
return None
|
| 279 |
+
return output
|
| 280 |
+
|
| 281 |
+
|
| 282 |
+
async def get_video_thumbnail(video_file, duration):
|
| 283 |
+
output_dir = f"{DOWNLOAD_DIR}thumbnails"
|
| 284 |
+
await makedirs(output_dir, exist_ok=True)
|
| 285 |
+
output = ospath.join(output_dir, f"{time()}.jpg")
|
| 286 |
+
if duration is None:
|
| 287 |
+
duration = (await get_media_info(video_file))[0]
|
| 288 |
+
if duration == 0:
|
| 289 |
+
duration = 3
|
| 290 |
+
duration = duration // 2
|
| 291 |
+
cmd = [
|
| 292 |
+
"taskset",
|
| 293 |
+
"-c",
|
| 294 |
+
f"{cores}",
|
| 295 |
+
BinConfig.FFMPEG_NAME,
|
| 296 |
+
"-hide_banner",
|
| 297 |
+
"-loglevel",
|
| 298 |
+
"error",
|
| 299 |
+
"-ss",
|
| 300 |
+
f"{duration}",
|
| 301 |
+
"-i",
|
| 302 |
+
video_file,
|
| 303 |
+
"-vf",
|
| 304 |
+
"thumbnail",
|
| 305 |
+
"-q:v",
|
| 306 |
+
"1",
|
| 307 |
+
"-frames:v",
|
| 308 |
+
"1",
|
| 309 |
+
"-threads",
|
| 310 |
+
f"{threads}",
|
| 311 |
+
output,
|
| 312 |
+
]
|
| 313 |
+
try:
|
| 314 |
+
_, err, code = await wait_for(cmd_exec(cmd), timeout=60)
|
| 315 |
+
if code != 0 or not await aiopath.exists(output):
|
| 316 |
+
LOGGER.error(
|
| 317 |
+
f"Error while extracting thumbnail from video. Name: {video_file} stderr: {err}"
|
| 318 |
+
)
|
| 319 |
+
return None
|
| 320 |
+
except Exception:
|
| 321 |
+
LOGGER.error(
|
| 322 |
+
f"Error while extracting thumbnail from video. Name: {video_file}. Error: Timeout some issues with ffmpeg with specific arch!"
|
| 323 |
+
)
|
| 324 |
+
return None
|
| 325 |
+
return output
|
| 326 |
+
|
| 327 |
+
|
| 328 |
+
async def get_multiple_frames_thumbnail(video_file, layout, keep_screenshots):
|
| 329 |
+
layout = re.sub(r"(\d+)\D+(\d+)", r"\1x\2", layout)
|
| 330 |
+
ss_nb = layout.split("x")
|
| 331 |
+
if len(ss_nb) != 2 or not ss_nb[0].isdigit() or not ss_nb[1].isdigit():
|
| 332 |
+
LOGGER.error(f"Invalid layout value: {layout}")
|
| 333 |
+
return None
|
| 334 |
+
ss_nb = int(ss_nb[0]) * int(ss_nb[1])
|
| 335 |
+
if ss_nb == 0:
|
| 336 |
+
LOGGER.error(f"Invalid layout value: {layout}")
|
| 337 |
+
return None
|
| 338 |
+
dirpath = await take_ss(video_file, ss_nb)
|
| 339 |
+
if not dirpath:
|
| 340 |
+
return None
|
| 341 |
+
output_dir = f"{DOWNLOAD_DIR}thumbnails"
|
| 342 |
+
await makedirs(output_dir, exist_ok=True)
|
| 343 |
+
output = ospath.join(output_dir, f"{time()}.jpg")
|
| 344 |
+
cmd = [
|
| 345 |
+
"taskset",
|
| 346 |
+
"-c",
|
| 347 |
+
f"{cores}",
|
| 348 |
+
BinConfig.FFMPEG_NAME,
|
| 349 |
+
"-hide_banner",
|
| 350 |
+
"-loglevel",
|
| 351 |
+
"error",
|
| 352 |
+
"-pattern_type",
|
| 353 |
+
"glob",
|
| 354 |
+
"-i",
|
| 355 |
+
f"{escape(dirpath)}/*.png",
|
| 356 |
+
"-vf",
|
| 357 |
+
f"tile={layout}, thumbnail",
|
| 358 |
+
"-q:v",
|
| 359 |
+
"1",
|
| 360 |
+
"-frames:v",
|
| 361 |
+
"1",
|
| 362 |
+
"-f",
|
| 363 |
+
"mjpeg",
|
| 364 |
+
"-threads",
|
| 365 |
+
f"{threads}",
|
| 366 |
+
output,
|
| 367 |
+
]
|
| 368 |
+
try:
|
| 369 |
+
_, err, code = await wait_for(cmd_exec(cmd), timeout=60)
|
| 370 |
+
if code != 0 or not await aiopath.exists(output):
|
| 371 |
+
LOGGER.error(
|
| 372 |
+
f"Error while combining thumbnails for video. Name: {video_file} stderr: {err}"
|
| 373 |
+
)
|
| 374 |
+
return None
|
| 375 |
+
except Exception:
|
| 376 |
+
LOGGER.error(
|
| 377 |
+
f"Error while combining thumbnails from video. Name: {video_file}. Error: Timeout some issues with ffmpeg with specific arch!"
|
| 378 |
+
)
|
| 379 |
+
return None
|
| 380 |
+
finally:
|
| 381 |
+
if not keep_screenshots:
|
| 382 |
+
await rmtree(dirpath, ignore_errors=True)
|
| 383 |
+
return output
|
| 384 |
+
|
| 385 |
+
|
| 386 |
+
class FFMpeg:
|
| 387 |
+
def __init__(self, listener):
|
| 388 |
+
self._listener = listener
|
| 389 |
+
self._processed_bytes = 0
|
| 390 |
+
self._last_processed_bytes = 0
|
| 391 |
+
self._processed_time = 0
|
| 392 |
+
self._last_processed_time = 0
|
| 393 |
+
self._speed_raw = 0
|
| 394 |
+
self._progress_raw = 0
|
| 395 |
+
self._total_time = 0
|
| 396 |
+
self._eta_raw = 0
|
| 397 |
+
self._time_rate = 0.1
|
| 398 |
+
self._start_time = 0
|
| 399 |
+
|
| 400 |
+
@property
|
| 401 |
+
def processed_bytes(self):
|
| 402 |
+
return self._processed_bytes
|
| 403 |
+
|
| 404 |
+
@property
|
| 405 |
+
def speed_raw(self):
|
| 406 |
+
return self._speed_raw
|
| 407 |
+
|
| 408 |
+
@property
|
| 409 |
+
def progress_raw(self):
|
| 410 |
+
return self._progress_raw
|
| 411 |
+
|
| 412 |
+
@property
|
| 413 |
+
def eta_raw(self):
|
| 414 |
+
return self._eta_raw
|
| 415 |
+
|
| 416 |
+
def clear(self):
|
| 417 |
+
self._start_time = time()
|
| 418 |
+
self._processed_bytes = 0
|
| 419 |
+
self._processed_time = 0
|
| 420 |
+
self._speed_raw = 0
|
| 421 |
+
self._progress_raw = 0
|
| 422 |
+
self._eta_raw = 0
|
| 423 |
+
self._time_rate = 0.1
|
| 424 |
+
self._last_processed_time = 0
|
| 425 |
+
self._last_processed_bytes = 0
|
| 426 |
+
|
| 427 |
+
async def _ffmpeg_progress(self):
|
| 428 |
+
while not (
|
| 429 |
+
self._listener.subproc.returncode is not None
|
| 430 |
+
or self._listener.is_cancelled
|
| 431 |
+
or self._listener.subproc.stdout.at_eof()
|
| 432 |
+
):
|
| 433 |
+
try:
|
| 434 |
+
line = await wait_for(self._listener.subproc.stdout.readline(), 60)
|
| 435 |
+
except Exception:
|
| 436 |
+
break
|
| 437 |
+
line = line.decode().strip()
|
| 438 |
+
if not line:
|
| 439 |
+
break
|
| 440 |
+
if "=" in line:
|
| 441 |
+
key, value = line.split("=", 1)
|
| 442 |
+
if value != "N/A":
|
| 443 |
+
if key == "total_size":
|
| 444 |
+
self._processed_bytes = int(value) + self._last_processed_bytes
|
| 445 |
+
self._speed_raw = self._processed_bytes / (
|
| 446 |
+
time() - self._start_time
|
| 447 |
+
)
|
| 448 |
+
elif key == "speed":
|
| 449 |
+
self._time_rate = max(0.1, float(value.strip("x")))
|
| 450 |
+
elif key == "out_time":
|
| 451 |
+
self._processed_time = (
|
| 452 |
+
time_to_seconds(value) + self._last_processed_time
|
| 453 |
+
)
|
| 454 |
+
try:
|
| 455 |
+
self._progress_raw = (
|
| 456 |
+
self._processed_time * 100
|
| 457 |
+
) / self._total_time
|
| 458 |
+
if (
|
| 459 |
+
hasattr(self._listener, "subsize")
|
| 460 |
+
and self._listener.subsize
|
| 461 |
+
and self._progress_raw > 0
|
| 462 |
+
):
|
| 463 |
+
self._processed_bytes = int(
|
| 464 |
+
self._listener.subsize * (self._progress_raw / 100)
|
| 465 |
+
)
|
| 466 |
+
if (time() - self._start_time) > 0:
|
| 467 |
+
self._speed_raw = self._processed_bytes / (
|
| 468 |
+
time() - self._start_time
|
| 469 |
+
)
|
| 470 |
+
else:
|
| 471 |
+
self._speed_raw = 0
|
| 472 |
+
self._eta_raw = (
|
| 473 |
+
self._total_time - self._processed_time
|
| 474 |
+
) / self._time_rate
|
| 475 |
+
except ZeroDivisionError:
|
| 476 |
+
self._progress_raw = 0
|
| 477 |
+
self._eta_raw = 0
|
| 478 |
+
await sleep(0.05)
|
| 479 |
+
|
| 480 |
+
async def ffmpeg_cmds(self, ffmpeg, f_path):
|
| 481 |
+
self.clear()
|
| 482 |
+
self._total_time = (await get_media_info(f_path))[0]
|
| 483 |
+
base_name, ext = ospath.splitext(f_path)
|
| 484 |
+
dir, base_name = base_name.rsplit("/", 1)
|
| 485 |
+
indices = [
|
| 486 |
+
index
|
| 487 |
+
for index, item in enumerate(ffmpeg)
|
| 488 |
+
if item.startswith("mltb") or item == "mltb"
|
| 489 |
+
]
|
| 490 |
+
outputs = []
|
| 491 |
+
for index in indices:
|
| 492 |
+
output_file = ffmpeg[index]
|
| 493 |
+
if output_file != "mltb" and output_file.startswith("mltb"):
|
| 494 |
+
bo, oext = ospath.splitext(output_file)
|
| 495 |
+
if oext:
|
| 496 |
+
if ext == oext:
|
| 497 |
+
prefix = f"ffmpeg{index}." if bo == "mltb" else ""
|
| 498 |
+
else:
|
| 499 |
+
prefix = ""
|
| 500 |
+
ext = ""
|
| 501 |
+
else:
|
| 502 |
+
prefix = ""
|
| 503 |
+
else:
|
| 504 |
+
prefix = f"ffmpeg{index}."
|
| 505 |
+
output = f"{dir}/{prefix}{output_file.replace('mltb', base_name)}{ext}"
|
| 506 |
+
outputs.append(output)
|
| 507 |
+
ffmpeg[index] = output
|
| 508 |
+
if self._listener.is_cancelled:
|
| 509 |
+
return False
|
| 510 |
+
self._listener.subproc = await create_subprocess_exec(
|
| 511 |
+
*ffmpeg, stdout=PIPE, stderr=PIPE
|
| 512 |
+
)
|
| 513 |
+
await self._ffmpeg_progress()
|
| 514 |
+
_, stderr = await self._listener.subproc.communicate()
|
| 515 |
+
code = self._listener.subproc.returncode
|
| 516 |
+
if self._listener.is_cancelled:
|
| 517 |
+
return False
|
| 518 |
+
if code == 0:
|
| 519 |
+
return outputs
|
| 520 |
+
elif code == -9:
|
| 521 |
+
self._listener.is_cancelled = True
|
| 522 |
+
return False
|
| 523 |
+
else:
|
| 524 |
+
try:
|
| 525 |
+
stderr = stderr.decode().strip()
|
| 526 |
+
except Exception:
|
| 527 |
+
stderr = "Unable to decode the error!"
|
| 528 |
+
LOGGER.error(
|
| 529 |
+
f"{stderr}. Something went wrong while running ffmpeg cmd, mostly file requires different/specific arguments. Path: {f_path}"
|
| 530 |
+
)
|
| 531 |
+
for op in outputs:
|
| 532 |
+
if await aiopath.exists(op):
|
| 533 |
+
await remove(op)
|
| 534 |
+
return False
|
| 535 |
+
|
| 536 |
+
async def convert_video(self, video_file, ext, retry=False):
|
| 537 |
+
self.clear()
|
| 538 |
+
self._total_time = (await get_media_info(video_file))[0]
|
| 539 |
+
base_name = ospath.splitext(video_file)[0]
|
| 540 |
+
output = f"{base_name}.{ext}"
|
| 541 |
+
if retry:
|
| 542 |
+
cmd = [
|
| 543 |
+
"taskset",
|
| 544 |
+
"-c",
|
| 545 |
+
f"{cores}",
|
| 546 |
+
BinConfig.FFMPEG_NAME,
|
| 547 |
+
"-hide_banner",
|
| 548 |
+
"-loglevel",
|
| 549 |
+
"error",
|
| 550 |
+
"-progress",
|
| 551 |
+
"pipe:1",
|
| 552 |
+
"-i",
|
| 553 |
+
video_file,
|
| 554 |
+
"-map",
|
| 555 |
+
"0",
|
| 556 |
+
"-c:v",
|
| 557 |
+
"libx264",
|
| 558 |
+
"-c:a",
|
| 559 |
+
"aac",
|
| 560 |
+
"-threads",
|
| 561 |
+
f"{threads}",
|
| 562 |
+
output,
|
| 563 |
+
]
|
| 564 |
+
if ext == "mp4":
|
| 565 |
+
cmd[17:17] = ["-c:s", "mov_text"]
|
| 566 |
+
elif ext == "mkv":
|
| 567 |
+
cmd[17:17] = ["-c:s", "ass"]
|
| 568 |
+
else:
|
| 569 |
+
cmd[17:17] = ["-c:s", "copy"]
|
| 570 |
+
else:
|
| 571 |
+
cmd = [
|
| 572 |
+
"taskset",
|
| 573 |
+
"-c",
|
| 574 |
+
f"{cores}",
|
| 575 |
+
BinConfig.FFMPEG_NAME,
|
| 576 |
+
"-hide_banner",
|
| 577 |
+
"-loglevel",
|
| 578 |
+
"error",
|
| 579 |
+
"-progress",
|
| 580 |
+
"pipe:1",
|
| 581 |
+
"-i",
|
| 582 |
+
video_file,
|
| 583 |
+
"-map",
|
| 584 |
+
"0",
|
| 585 |
+
"-c",
|
| 586 |
+
"copy",
|
| 587 |
+
"-threads",
|
| 588 |
+
f"{threads}",
|
| 589 |
+
output,
|
| 590 |
+
]
|
| 591 |
+
if self._listener.is_cancelled:
|
| 592 |
+
return False
|
| 593 |
+
self._listener.subproc = await create_subprocess_exec(
|
| 594 |
+
*cmd, stdout=PIPE, stderr=PIPE
|
| 595 |
+
)
|
| 596 |
+
await self._ffmpeg_progress()
|
| 597 |
+
_, stderr = await self._listener.subproc.communicate()
|
| 598 |
+
code = self._listener.subproc.returncode
|
| 599 |
+
if self._listener.is_cancelled:
|
| 600 |
+
return False
|
| 601 |
+
if code == 0:
|
| 602 |
+
return output
|
| 603 |
+
elif code == -9:
|
| 604 |
+
self._listener.is_cancelled = True
|
| 605 |
+
return False
|
| 606 |
+
else:
|
| 607 |
+
if await aiopath.exists(output):
|
| 608 |
+
await remove(output)
|
| 609 |
+
if not retry:
|
| 610 |
+
return await self.convert_video(video_file, ext, True)
|
| 611 |
+
try:
|
| 612 |
+
stderr = stderr.decode().strip()
|
| 613 |
+
except Exception:
|
| 614 |
+
stderr = "Unable to decode the error!"
|
| 615 |
+
LOGGER.error(
|
| 616 |
+
f"{stderr}. Something went wrong while converting video, mostly file need specific codec. Path: {video_file}"
|
| 617 |
+
)
|
| 618 |
+
return False
|
| 619 |
+
|
| 620 |
+
async def convert_audio(self, audio_file, ext):
|
| 621 |
+
self.clear()
|
| 622 |
+
self._total_time = (await get_media_info(audio_file))[0]
|
| 623 |
+
base_name = ospath.splitext(audio_file)[0]
|
| 624 |
+
output = f"{base_name}.{ext}"
|
| 625 |
+
cmd = [
|
| 626 |
+
"taskset",
|
| 627 |
+
"-c",
|
| 628 |
+
f"{cores}",
|
| 629 |
+
BinConfig.FFMPEG_NAME,
|
| 630 |
+
"-hide_banner",
|
| 631 |
+
"-loglevel",
|
| 632 |
+
"error",
|
| 633 |
+
"-progress",
|
| 634 |
+
"pipe:1",
|
| 635 |
+
"-i",
|
| 636 |
+
audio_file,
|
| 637 |
+
"-threads",
|
| 638 |
+
f"{threads}",
|
| 639 |
+
output,
|
| 640 |
+
]
|
| 641 |
+
if self._listener.is_cancelled:
|
| 642 |
+
return False
|
| 643 |
+
self._listener.subproc = await create_subprocess_exec(
|
| 644 |
+
*cmd, stdout=PIPE, stderr=PIPE
|
| 645 |
+
)
|
| 646 |
+
await self._ffmpeg_progress()
|
| 647 |
+
_, stderr = await self._listener.subproc.communicate()
|
| 648 |
+
code = self._listener.subproc.returncode
|
| 649 |
+
if self._listener.is_cancelled:
|
| 650 |
+
return False
|
| 651 |
+
if code == 0:
|
| 652 |
+
return output
|
| 653 |
+
elif code == -9:
|
| 654 |
+
self._listener.is_cancelled = True
|
| 655 |
+
return False
|
| 656 |
+
else:
|
| 657 |
+
try:
|
| 658 |
+
stderr = stderr.decode().strip()
|
| 659 |
+
except Exception:
|
| 660 |
+
stderr = "Unable to decode the error!"
|
| 661 |
+
LOGGER.error(
|
| 662 |
+
f"{stderr}. Something went wrong while converting audio, mostly file need specific codec. Path: {audio_file}"
|
| 663 |
+
)
|
| 664 |
+
if await aiopath.exists(output):
|
| 665 |
+
await remove(output)
|
| 666 |
+
return False
|
| 667 |
+
|
| 668 |
+
async def sample_video(self, video_file, sample_duration, part_duration):
|
| 669 |
+
self.clear()
|
| 670 |
+
self._total_time = sample_duration
|
| 671 |
+
dir, name = video_file.rsplit("/", 1)
|
| 672 |
+
output_file = f"{dir}/SAMPLE.{name}"
|
| 673 |
+
segments = [(0, part_duration)]
|
| 674 |
+
duration = (await get_media_info(video_file))[0]
|
| 675 |
+
remaining_duration = duration - (part_duration * 2)
|
| 676 |
+
parts = (sample_duration - (part_duration * 2)) // part_duration
|
| 677 |
+
time_interval = remaining_duration // parts
|
| 678 |
+
next_segment = time_interval
|
| 679 |
+
for _ in range(parts):
|
| 680 |
+
segments.append((next_segment, next_segment + part_duration))
|
| 681 |
+
next_segment += time_interval
|
| 682 |
+
segments.append((duration - part_duration, duration))
|
| 683 |
+
|
| 684 |
+
filter_complex = ""
|
| 685 |
+
for i, (start, end) in enumerate(segments):
|
| 686 |
+
filter_complex += (
|
| 687 |
+
f"[0:v]trim=start={start}:end={end},setpts=PTS-STARTPTS[v{i}]; "
|
| 688 |
+
)
|
| 689 |
+
filter_complex += (
|
| 690 |
+
f"[0:a]atrim=start={start}:end={end},asetpts=PTS-STARTPTS[a{i}]; "
|
| 691 |
+
)
|
| 692 |
+
|
| 693 |
+
for i in range(len(segments)):
|
| 694 |
+
filter_complex += f"[v{i}][a{i}]"
|
| 695 |
+
|
| 696 |
+
filter_complex += f"concat=n={len(segments)}:v=1:a=1[vout][aout]"
|
| 697 |
+
|
| 698 |
+
cmd = [
|
| 699 |
+
"taskset",
|
| 700 |
+
"-c",
|
| 701 |
+
f"{cores}",
|
| 702 |
+
BinConfig.FFMPEG_NAME,
|
| 703 |
+
"-hide_banner",
|
| 704 |
+
"-loglevel",
|
| 705 |
+
"error",
|
| 706 |
+
"-progress",
|
| 707 |
+
"pipe:1",
|
| 708 |
+
"-i",
|
| 709 |
+
video_file,
|
| 710 |
+
"-filter_complex",
|
| 711 |
+
filter_complex,
|
| 712 |
+
"-map",
|
| 713 |
+
"[vout]",
|
| 714 |
+
"-map",
|
| 715 |
+
"[aout]",
|
| 716 |
+
"-c:v",
|
| 717 |
+
"libx264",
|
| 718 |
+
"-c:a",
|
| 719 |
+
"aac",
|
| 720 |
+
"-threads",
|
| 721 |
+
f"{threads}",
|
| 722 |
+
output_file,
|
| 723 |
+
]
|
| 724 |
+
|
| 725 |
+
if self._listener.is_cancelled:
|
| 726 |
+
return False
|
| 727 |
+
self._listener.subproc = await create_subprocess_exec(
|
| 728 |
+
*cmd, stdout=PIPE, stderr=PIPE
|
| 729 |
+
)
|
| 730 |
+
await self._ffmpeg_progress()
|
| 731 |
+
_, stderr = await self._listener.subproc.communicate()
|
| 732 |
+
code = self._listener.subproc.returncode
|
| 733 |
+
if self._listener.is_cancelled:
|
| 734 |
+
return False
|
| 735 |
+
if code == -9:
|
| 736 |
+
self._listener.is_cancelled = True
|
| 737 |
+
return False
|
| 738 |
+
elif code == 0:
|
| 739 |
+
return output_file
|
| 740 |
+
else:
|
| 741 |
+
try:
|
| 742 |
+
stderr = stderr.decode().strip()
|
| 743 |
+
except Exception:
|
| 744 |
+
stderr = "Unable to decode the error!"
|
| 745 |
+
LOGGER.error(
|
| 746 |
+
f"{stderr}. Something went wrong while creating sample video, mostly file is corrupted. Path: {video_file}"
|
| 747 |
+
)
|
| 748 |
+
if await aiopath.exists(output_file):
|
| 749 |
+
await remove(output_file)
|
| 750 |
+
return False
|
| 751 |
+
|
| 752 |
+
async def split(self, f_path, file_, parts, split_size):
|
| 753 |
+
self.clear()
|
| 754 |
+
multi_streams = True
|
| 755 |
+
self._total_time = duration = (await get_media_info(f_path))[0]
|
| 756 |
+
base_name, extension = ospath.splitext(file_)
|
| 757 |
+
split_size -= 3000000
|
| 758 |
+
start_time = 0
|
| 759 |
+
i = 1
|
| 760 |
+
while i <= parts or start_time < duration - 4:
|
| 761 |
+
out_path = f_path.replace(file_, f"{base_name}.part{i:03}{extension}")
|
| 762 |
+
cmd = [
|
| 763 |
+
"taskset",
|
| 764 |
+
"-c",
|
| 765 |
+
f"{cores}",
|
| 766 |
+
BinConfig.FFMPEG_NAME,
|
| 767 |
+
"-hide_banner",
|
| 768 |
+
"-loglevel",
|
| 769 |
+
"error",
|
| 770 |
+
"-progress",
|
| 771 |
+
"pipe:1",
|
| 772 |
+
"-ss",
|
| 773 |
+
str(start_time),
|
| 774 |
+
"-i",
|
| 775 |
+
f_path,
|
| 776 |
+
"-fs",
|
| 777 |
+
str(split_size),
|
| 778 |
+
"-map",
|
| 779 |
+
"0",
|
| 780 |
+
"-map_chapters",
|
| 781 |
+
"-1",
|
| 782 |
+
"-async",
|
| 783 |
+
"1",
|
| 784 |
+
"-strict",
|
| 785 |
+
"-2",
|
| 786 |
+
"-c",
|
| 787 |
+
"copy",
|
| 788 |
+
"-threads",
|
| 789 |
+
f"{threads}",
|
| 790 |
+
out_path,
|
| 791 |
+
]
|
| 792 |
+
if not multi_streams:
|
| 793 |
+
del cmd[15]
|
| 794 |
+
del cmd[15]
|
| 795 |
+
if self._listener.is_cancelled:
|
| 796 |
+
return False
|
| 797 |
+
self._listener.subproc = await create_subprocess_exec(
|
| 798 |
+
*cmd, stdout=PIPE, stderr=PIPE
|
| 799 |
+
)
|
| 800 |
+
await self._ffmpeg_progress()
|
| 801 |
+
_, stderr = await self._listener.subproc.communicate()
|
| 802 |
+
code = self._listener.subproc.returncode
|
| 803 |
+
if self._listener.is_cancelled:
|
| 804 |
+
return False
|
| 805 |
+
if code == -9:
|
| 806 |
+
self._listener.is_cancelled = True
|
| 807 |
+
return False
|
| 808 |
+
elif code != 0:
|
| 809 |
+
try:
|
| 810 |
+
stderr = stderr.decode().strip()
|
| 811 |
+
except Exception:
|
| 812 |
+
stderr = "Unable to decode the error!"
|
| 813 |
+
with suppress(Exception):
|
| 814 |
+
await remove(out_path)
|
| 815 |
+
if multi_streams:
|
| 816 |
+
LOGGER.warning(
|
| 817 |
+
f"{stderr}. Retrying without map, -map 0 not working in all situations. Path: {f_path}"
|
| 818 |
+
)
|
| 819 |
+
multi_streams = False
|
| 820 |
+
continue
|
| 821 |
+
else:
|
| 822 |
+
LOGGER.warning(
|
| 823 |
+
f"{stderr}. Unable to split this video, if it's size less than {self._listener.max_split_size} will be uploaded as it is. Path: {f_path}"
|
| 824 |
+
)
|
| 825 |
+
return False
|
| 826 |
+
out_size = await aiopath.getsize(out_path)
|
| 827 |
+
if out_size > self._listener.max_split_size:
|
| 828 |
+
split_size -= (out_size - self._listener.max_split_size) + 5000000
|
| 829 |
+
LOGGER.warning(
|
| 830 |
+
f"Part size is {out_size}. Trying again with lower split size!. Path: {f_path}"
|
| 831 |
+
)
|
| 832 |
+
await remove(out_path)
|
| 833 |
+
continue
|
| 834 |
+
lpd = (await get_media_info(out_path))[0]
|
| 835 |
+
if lpd == 0:
|
| 836 |
+
LOGGER.error(
|
| 837 |
+
f"Something went wrong while splitting, mostly file is corrupted. Path: {f_path}"
|
| 838 |
+
)
|
| 839 |
+
break
|
| 840 |
+
elif duration == lpd:
|
| 841 |
+
LOGGER.warning(
|
| 842 |
+
f"This file has been splitted with default stream and audio, so you will only see one part with less size from orginal one because it doesn't have all streams and audios. This happens mostly with MKV videos. Path: {f_path}"
|
| 843 |
+
)
|
| 844 |
+
break
|
| 845 |
+
elif lpd <= 3:
|
| 846 |
+
await remove(out_path)
|
| 847 |
+
break
|
| 848 |
+
self._last_processed_time += lpd
|
| 849 |
+
self._last_processed_bytes += out_size
|
| 850 |
+
start_time += lpd - 3
|
| 851 |
+
i += 1
|
| 852 |
+
return True
|
bot/helper/ext_utils/metadata_utils.py
ADDED
|
@@ -0,0 +1,168 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from os.path import basename, splitext
|
| 2 |
+
from re import compile as re_compile
|
| 3 |
+
from pycountry import languages
|
| 4 |
+
from ..ext_utils.media_utils import get_streams
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
class MetadataProcessor:
|
| 8 |
+
_year_pattern = re_compile(r"\b(19|20)\d{2}\b")
|
| 9 |
+
_sanitize_pattern = re_compile(r'[<>:"/\\?*]')
|
| 10 |
+
|
| 11 |
+
def __init__(self):
|
| 12 |
+
self.vars = {}
|
| 13 |
+
self.audio_streams = []
|
| 14 |
+
self.subtitle_streams = []
|
| 15 |
+
|
| 16 |
+
@staticmethod
|
| 17 |
+
def convert_lang_code(lang_code):
|
| 18 |
+
if not lang_code or lang_code in {"unknown", "und", "none"}:
|
| 19 |
+
return lang_code
|
| 20 |
+
try:
|
| 21 |
+
if len(lang_code) == 2:
|
| 22 |
+
lang = languages.get(alpha_2=lang_code.lower())
|
| 23 |
+
elif len(lang_code) == 3:
|
| 24 |
+
lang = languages.get(alpha_3=lang_code.lower())
|
| 25 |
+
else:
|
| 26 |
+
return lang_code
|
| 27 |
+
return lang.name if lang else lang_code
|
| 28 |
+
except Exception:
|
| 29 |
+
return lang_code
|
| 30 |
+
|
| 31 |
+
async def extract_file_vars(self, file_path):
|
| 32 |
+
fname = basename(file_path)
|
| 33 |
+
bname, ext = splitext(fname)
|
| 34 |
+
self.vars = {
|
| 35 |
+
"filename": fname,
|
| 36 |
+
"basename": bname,
|
| 37 |
+
"extension": ext.lstrip("."),
|
| 38 |
+
"audiolang": "unknown",
|
| 39 |
+
"sublang": "none",
|
| 40 |
+
}
|
| 41 |
+
self.audio_streams, self.subtitle_streams = [], []
|
| 42 |
+
try:
|
| 43 |
+
for s in await get_streams(file_path) or []:
|
| 44 |
+
ctype = s.get("codec_type", "").lower()
|
| 45 |
+
slang = s.get("tags", {}).get("language", "unknown")
|
| 46 |
+
full_lang = self.convert_lang_code(slang)
|
| 47 |
+
entry = {
|
| 48 |
+
"index": s.get("index", 0),
|
| 49 |
+
"language": slang,
|
| 50 |
+
"full_language": full_lang,
|
| 51 |
+
}
|
| 52 |
+
if ctype == "audio":
|
| 53 |
+
self.audio_streams.append(entry)
|
| 54 |
+
if self.vars["audiolang"] == "unknown" and slang != "und":
|
| 55 |
+
self.vars["audiolang"] = full_lang
|
| 56 |
+
elif ctype == "subtitle":
|
| 57 |
+
self.subtitle_streams.append(entry)
|
| 58 |
+
if self.vars["sublang"] == "none" and slang != "und":
|
| 59 |
+
self.vars["sublang"] = full_lang
|
| 60 |
+
except Exception:
|
| 61 |
+
pass
|
| 62 |
+
m = self._year_pattern.findall(bname)
|
| 63 |
+
if m:
|
| 64 |
+
self.vars["year"] = m[-1]
|
| 65 |
+
|
| 66 |
+
@staticmethod
|
| 67 |
+
def parse_string(metadata_str):
|
| 68 |
+
if not metadata_str or not isinstance(metadata_str, str):
|
| 69 |
+
return {}
|
| 70 |
+
parts, current, i = [], "", 0
|
| 71 |
+
while i < len(metadata_str):
|
| 72 |
+
if (
|
| 73 |
+
metadata_str[i] == "\\"
|
| 74 |
+
and i + 1 < len(metadata_str)
|
| 75 |
+
and metadata_str[i + 1] == "|"
|
| 76 |
+
):
|
| 77 |
+
current += "|"
|
| 78 |
+
i += 2
|
| 79 |
+
elif metadata_str[i] == "|":
|
| 80 |
+
parts.append(current)
|
| 81 |
+
current = ""
|
| 82 |
+
i += 1
|
| 83 |
+
else:
|
| 84 |
+
current += metadata_str[i]
|
| 85 |
+
i += 1
|
| 86 |
+
if current:
|
| 87 |
+
parts.append(current)
|
| 88 |
+
return dict(p.split("=", 1) if "=" in p else (p, "") for p in parts)
|
| 89 |
+
|
| 90 |
+
@staticmethod
|
| 91 |
+
def merge_dicts(default_dict, cmd_dict):
|
| 92 |
+
return {**(default_dict or {}), **(cmd_dict or {})}
|
| 93 |
+
|
| 94 |
+
def apply_vars_to_stream(
|
| 95 |
+
self, metadata_dict, stream_lang=None, full_lang=None, stream_type="audio"
|
| 96 |
+
):
|
| 97 |
+
if not isinstance(metadata_dict, dict):
|
| 98 |
+
return {}
|
| 99 |
+
vars_with_stream = self.vars.copy()
|
| 100 |
+
if stream_lang and stream_lang != "unknown":
|
| 101 |
+
key = "audiolang" if stream_type == "audio" else "sublang"
|
| 102 |
+
vars_with_stream[key] = full_lang or self.convert_lang_code(stream_lang)
|
| 103 |
+
return {
|
| 104 |
+
self.sanitize(k): (
|
| 105 |
+
str(v).format(**vars_with_stream) if isinstance(v, str) else str(v)
|
| 106 |
+
)
|
| 107 |
+
for k, v in metadata_dict.items()
|
| 108 |
+
}
|
| 109 |
+
|
| 110 |
+
def apply_vars(self, metadata_dict):
|
| 111 |
+
return self.apply_vars_to_stream(metadata_dict)
|
| 112 |
+
|
| 113 |
+
def get_audio_metadata(self, audio_metadata_dict):
|
| 114 |
+
return [
|
| 115 |
+
{
|
| 116 |
+
"index": s["index"],
|
| 117 |
+
"metadata": self.apply_vars_to_stream(
|
| 118 |
+
audio_metadata_dict, s["language"], s["full_language"], "audio"
|
| 119 |
+
),
|
| 120 |
+
}
|
| 121 |
+
for s in self.audio_streams
|
| 122 |
+
]
|
| 123 |
+
|
| 124 |
+
def get_subtitle_metadata(self, subtitle_metadata_dict):
|
| 125 |
+
return [
|
| 126 |
+
{
|
| 127 |
+
"index": s["index"],
|
| 128 |
+
"metadata": self.apply_vars_to_stream(
|
| 129 |
+
subtitle_metadata_dict,
|
| 130 |
+
s["language"],
|
| 131 |
+
s["full_language"],
|
| 132 |
+
"subtitle",
|
| 133 |
+
),
|
| 134 |
+
}
|
| 135 |
+
for s in self.subtitle_streams
|
| 136 |
+
]
|
| 137 |
+
|
| 138 |
+
def sanitize(self, value):
|
| 139 |
+
return self._sanitize_pattern.sub("_", str(value))[:100]
|
| 140 |
+
|
| 141 |
+
async def process_all(
|
| 142 |
+
self,
|
| 143 |
+
video_metadata_dict,
|
| 144 |
+
audio_metadata_dict,
|
| 145 |
+
subtitle_metadata_dict,
|
| 146 |
+
file_path,
|
| 147 |
+
):
|
| 148 |
+
await self.extract_file_vars(file_path)
|
| 149 |
+
return {
|
| 150 |
+
"video": (
|
| 151 |
+
self.apply_vars(video_metadata_dict) if video_metadata_dict else {}
|
| 152 |
+
),
|
| 153 |
+
"audio_streams": (
|
| 154 |
+
self.get_audio_metadata(audio_metadata_dict)
|
| 155 |
+
if audio_metadata_dict
|
| 156 |
+
else []
|
| 157 |
+
),
|
| 158 |
+
"subtitle_streams": (
|
| 159 |
+
self.get_subtitle_metadata(subtitle_metadata_dict)
|
| 160 |
+
if subtitle_metadata_dict
|
| 161 |
+
else []
|
| 162 |
+
),
|
| 163 |
+
"global": {},
|
| 164 |
+
}
|
| 165 |
+
|
| 166 |
+
async def process(self, metadata_dict, file_path):
|
| 167 |
+
await self.extract_file_vars(file_path)
|
| 168 |
+
return self.apply_vars(metadata_dict)
|
bot/helper/ext_utils/shortener_utils.py
ADDED
|
@@ -0,0 +1,84 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from base64 import b64encode
|
| 2 |
+
from random import choice, random
|
| 3 |
+
from asyncio import sleep as asleep
|
| 4 |
+
from urllib.parse import quote
|
| 5 |
+
|
| 6 |
+
from cloudscraper import create_scraper
|
| 7 |
+
from urllib3 import disable_warnings
|
| 8 |
+
|
| 9 |
+
from ... import LOGGER, shortener_dict
|
| 10 |
+
from ...core.config_manager import Config
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
async def short_url(longurl, attempt=0):
|
| 14 |
+
if not shortener_dict and not Config.PROTECTED_API:
|
| 15 |
+
return longurl
|
| 16 |
+
if attempt >= 4:
|
| 17 |
+
return longurl
|
| 18 |
+
|
| 19 |
+
cget = create_scraper().request
|
| 20 |
+
disable_warnings()
|
| 21 |
+
try:
|
| 22 |
+
if Config.PROTECTED_API:
|
| 23 |
+
res = cget("GET", Config.PROTECTED_API, params={"url": longurl}).json()
|
| 24 |
+
if res.get("status") == "success":
|
| 25 |
+
return res["url"]
|
| 26 |
+
raise Exception(f"Protected API Error: {res}")
|
| 27 |
+
|
| 28 |
+
_shortener, _shortener_api = choice(list(shortener_dict.items()))
|
| 29 |
+
if "shorte.st" in _shortener:
|
| 30 |
+
headers = {"public-api-token": _shortener_api}
|
| 31 |
+
data = {"urlToShorten": quote(longurl)}
|
| 32 |
+
return cget(
|
| 33 |
+
"PUT", "https://api.shorte.st/v1/data/url", headers=headers, data=data
|
| 34 |
+
).json()["shortenedUrl"]
|
| 35 |
+
elif "linkvertise" in _shortener:
|
| 36 |
+
url = quote(b64encode(longurl.encode("utf-8")))
|
| 37 |
+
linkvertise = [
|
| 38 |
+
f"https://link-to.net/{_shortener_api}/{random() * 1000}/dynamic?r={url}",
|
| 39 |
+
f"https://up-to-down.net/{_shortener_api}/{random() * 1000}/dynamic?r={url}",
|
| 40 |
+
f"https://direct-link.net/{_shortener_api}/{random() * 1000}/dynamic?r={url}",
|
| 41 |
+
f"https://file-link.net/{_shortener_api}/{random() * 1000}/dynamic?r={url}",
|
| 42 |
+
]
|
| 43 |
+
return choice(linkvertise)
|
| 44 |
+
elif "bitly.com" in _shortener:
|
| 45 |
+
headers = {"Authorization": f"Bearer {_shortener_api}"}
|
| 46 |
+
return cget(
|
| 47 |
+
"POST",
|
| 48 |
+
"https://api-ssl.bit.ly/v4/shorten",
|
| 49 |
+
json={"long_url": longurl},
|
| 50 |
+
headers=headers,
|
| 51 |
+
).json()["link"]
|
| 52 |
+
elif "ouo.io" in _shortener:
|
| 53 |
+
return cget(
|
| 54 |
+
"GET", f"http://ouo.io/api/{_shortener_api}?s={longurl}", verify=False
|
| 55 |
+
).text
|
| 56 |
+
elif "cutt.ly" in _shortener:
|
| 57 |
+
return cget(
|
| 58 |
+
"GET",
|
| 59 |
+
f"http://cutt.ly/api/api.php?key={_shortener_api}&short={longurl}",
|
| 60 |
+
).json()["url"]["shortLink"]
|
| 61 |
+
else:
|
| 62 |
+
res = cget(
|
| 63 |
+
"GET",
|
| 64 |
+
f"https://{_shortener}/api?api={_shortener_api}&url={quote(longurl)}",
|
| 65 |
+
).json()
|
| 66 |
+
shorted = res["shortenedUrl"]
|
| 67 |
+
if not shorted:
|
| 68 |
+
shrtco_res = cget(
|
| 69 |
+
"GET", f"https://api.shrtco.de/v2/shorten?url={quote(longurl)}"
|
| 70 |
+
).json()
|
| 71 |
+
shrtco_link = shrtco_res["result"]["full_short_link"]
|
| 72 |
+
res = cget(
|
| 73 |
+
"GET",
|
| 74 |
+
f"https://{_shortener}/api?api={_shortener_api}&url={shrtco_link}",
|
| 75 |
+
).json()
|
| 76 |
+
shorted = res["shortenedUrl"]
|
| 77 |
+
if not shorted:
|
| 78 |
+
shorted = longurl
|
| 79 |
+
return shorted
|
| 80 |
+
except Exception as e:
|
| 81 |
+
LOGGER.error(e)
|
| 82 |
+
await asleep(0.8)
|
| 83 |
+
attempt += 1
|
| 84 |
+
return await short_url(longurl, attempt)
|
bot/helper/ext_utils/status_utils.py
ADDED
|
@@ -0,0 +1,308 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from asyncio import gather, iscoroutinefunction
|
| 2 |
+
from html import escape
|
| 3 |
+
from re import findall
|
| 4 |
+
from time import time
|
| 5 |
+
|
| 6 |
+
from psutil import cpu_percent, disk_usage, virtual_memory
|
| 7 |
+
|
| 8 |
+
from ... import (
|
| 9 |
+
DOWNLOAD_DIR,
|
| 10 |
+
bot_cache,
|
| 11 |
+
bot_start_time,
|
| 12 |
+
status_dict,
|
| 13 |
+
task_dict,
|
| 14 |
+
task_dict_lock,
|
| 15 |
+
)
|
| 16 |
+
from ...core.config_manager import Config
|
| 17 |
+
from ..telegram_helper.button_build import ButtonMaker
|
| 18 |
+
|
| 19 |
+
SIZE_UNITS = ["B", "KB", "MB", "GB", "TB", "PB"]
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
class MirrorStatus:
|
| 23 |
+
STATUS_UPLOAD = "Upload"
|
| 24 |
+
STATUS_DOWNLOAD = "Download"
|
| 25 |
+
STATUS_CLONE = "Clone"
|
| 26 |
+
STATUS_QUEUEDL = "QueueDl"
|
| 27 |
+
STATUS_QUEUEUP = "QueueUp"
|
| 28 |
+
STATUS_PAUSED = "Pause"
|
| 29 |
+
STATUS_ARCHIVE = "Archive"
|
| 30 |
+
STATUS_EXTRACT = "Extract"
|
| 31 |
+
STATUS_SPLIT = "Split"
|
| 32 |
+
STATUS_CHECK = "CheckUp"
|
| 33 |
+
STATUS_SEED = "Seed"
|
| 34 |
+
STATUS_SAMVID = "SamVid"
|
| 35 |
+
STATUS_CONVERT = "Convert"
|
| 36 |
+
STATUS_FFMPEG = "FFmpeg"
|
| 37 |
+
STATUS_YT = "YouTube"
|
| 38 |
+
STATUS_METADATA = "Metadata"
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
class EngineStatus:
|
| 42 |
+
def __init__(self):
|
| 43 |
+
ver = bot_cache.get("eng_versions", {})
|
| 44 |
+
self.STATUS_ARIA2 = f"Aria2 v{ver.get('aria2', 'N/A')}"
|
| 45 |
+
self.STATUS_AIOHTTP = f"AioHttp v{ver.get('aiohttp', 'N/A')}"
|
| 46 |
+
self.STATUS_GDAPI = f"Google-API v{ver.get('gapi', 'N/A')}"
|
| 47 |
+
self.STATUS_QBIT = f"qBit v{ver.get('qBittorrent', 'N/A')}"
|
| 48 |
+
self.STATUS_TGRAM = f"Pyro v{ver.get('pyrotgfork', 'N/A')}"
|
| 49 |
+
self.STATUS_MEGA = f"MegaCMD v{ver.get('mega', 'N/A')}"
|
| 50 |
+
self.STATUS_YTDLP = f"yt-dlp v{ver.get('yt-dlp', 'N/A')}"
|
| 51 |
+
self.STATUS_FFMPEG = f"ffmpeg v{ver.get('ffmpeg', 'N/A')}"
|
| 52 |
+
self.STATUS_7Z = f"7z v{ver.get('7z', 'N/A')}"
|
| 53 |
+
self.STATUS_RCLONE = f"RClone v{ver.get('rclone', 'N/A')}"
|
| 54 |
+
self.STATUS_SABNZBD = f"SABnzbd+ v{ver.get('SABnzbd+', 'N/A')}"
|
| 55 |
+
self.STATUS_QUEUE = "QSystem v2"
|
| 56 |
+
self.STATUS_JD = "JDownloader v2"
|
| 57 |
+
self.STATUS_YT = "Youtube-Api"
|
| 58 |
+
self.STATUS_METADATA = "Metadata"
|
| 59 |
+
self.STATUS_UPHOSTER = "Uphoster"
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
STATUSES = {
|
| 63 |
+
"ALL": "All",
|
| 64 |
+
"DL": MirrorStatus.STATUS_DOWNLOAD,
|
| 65 |
+
"UP": MirrorStatus.STATUS_UPLOAD,
|
| 66 |
+
"QD": MirrorStatus.STATUS_QUEUEDL,
|
| 67 |
+
"QU": MirrorStatus.STATUS_QUEUEUP,
|
| 68 |
+
"AR": MirrorStatus.STATUS_ARCHIVE,
|
| 69 |
+
"EX": MirrorStatus.STATUS_EXTRACT,
|
| 70 |
+
"SD": MirrorStatus.STATUS_SEED,
|
| 71 |
+
"CL": MirrorStatus.STATUS_CLONE,
|
| 72 |
+
"CM": MirrorStatus.STATUS_CONVERT,
|
| 73 |
+
"SP": MirrorStatus.STATUS_SPLIT,
|
| 74 |
+
"SV": MirrorStatus.STATUS_SAMVID,
|
| 75 |
+
"FF": MirrorStatus.STATUS_FFMPEG,
|
| 76 |
+
"PA": MirrorStatus.STATUS_PAUSED,
|
| 77 |
+
"CK": MirrorStatus.STATUS_CHECK,
|
| 78 |
+
}
|
| 79 |
+
|
| 80 |
+
|
| 81 |
+
async def get_task_by_gid(gid: str):
|
| 82 |
+
async with task_dict_lock:
|
| 83 |
+
for tk in task_dict.values():
|
| 84 |
+
if hasattr(tk, "seeding"):
|
| 85 |
+
await tk.update()
|
| 86 |
+
if tk.gid() == gid:
|
| 87 |
+
return tk
|
| 88 |
+
return None
|
| 89 |
+
|
| 90 |
+
|
| 91 |
+
async def get_specific_tasks(status, user_id):
|
| 92 |
+
if status == "All":
|
| 93 |
+
if user_id:
|
| 94 |
+
return [tk for tk in task_dict.values() if tk.listener.user_id == user_id]
|
| 95 |
+
else:
|
| 96 |
+
return list(task_dict.values())
|
| 97 |
+
tasks_to_check = (
|
| 98 |
+
[tk for tk in task_dict.values() if tk.listener.user_id == user_id]
|
| 99 |
+
if user_id
|
| 100 |
+
else list(task_dict.values())
|
| 101 |
+
)
|
| 102 |
+
coro_tasks = []
|
| 103 |
+
coro_tasks.extend(tk for tk in tasks_to_check if iscoroutinefunction(tk.status))
|
| 104 |
+
coro_statuses = await gather(*[tk.status() for tk in coro_tasks])
|
| 105 |
+
result = []
|
| 106 |
+
coro_index = 0
|
| 107 |
+
for tk in tasks_to_check:
|
| 108 |
+
if tk in coro_tasks:
|
| 109 |
+
st = coro_statuses[coro_index]
|
| 110 |
+
coro_index += 1
|
| 111 |
+
else:
|
| 112 |
+
st = tk.status()
|
| 113 |
+
if (st == status) or (
|
| 114 |
+
status == MirrorStatus.STATUS_DOWNLOAD and st not in STATUSES.values()
|
| 115 |
+
):
|
| 116 |
+
result.append(tk)
|
| 117 |
+
return result
|
| 118 |
+
|
| 119 |
+
|
| 120 |
+
async def get_all_tasks(req_status: str, user_id):
|
| 121 |
+
async with task_dict_lock:
|
| 122 |
+
return await get_specific_tasks(req_status, user_id)
|
| 123 |
+
|
| 124 |
+
|
| 125 |
+
def get_raw_file_size(size):
|
| 126 |
+
num, unit = size.split()
|
| 127 |
+
return int(float(num) * (1024 ** SIZE_UNITS.index(unit)))
|
| 128 |
+
|
| 129 |
+
|
| 130 |
+
def get_readable_file_size(size_in_bytes):
|
| 131 |
+
if not size_in_bytes:
|
| 132 |
+
return "0B"
|
| 133 |
+
|
| 134 |
+
index = 0
|
| 135 |
+
while size_in_bytes >= 1024 and index < len(SIZE_UNITS) - 1:
|
| 136 |
+
size_in_bytes /= 1024
|
| 137 |
+
index += 1
|
| 138 |
+
|
| 139 |
+
return f"{size_in_bytes:.2f}{SIZE_UNITS[index]}"
|
| 140 |
+
|
| 141 |
+
|
| 142 |
+
def get_readable_time(seconds: int):
|
| 143 |
+
periods = [("d", 86400), ("h", 3600), ("m", 60), ("s", 1)]
|
| 144 |
+
result = ""
|
| 145 |
+
for period_name, period_seconds in periods:
|
| 146 |
+
if seconds >= period_seconds:
|
| 147 |
+
period_value, seconds = divmod(seconds, period_seconds)
|
| 148 |
+
result += f"{int(period_value)}{period_name}"
|
| 149 |
+
return result
|
| 150 |
+
|
| 151 |
+
|
| 152 |
+
def get_raw_time(time_str: str) -> int:
|
| 153 |
+
time_units = {"d": 86400, "h": 3600, "m": 60, "s": 1}
|
| 154 |
+
return sum(
|
| 155 |
+
int(value) * time_units[unit]
|
| 156 |
+
for value, unit in findall(r"(\d+)([dhms])", time_str)
|
| 157 |
+
)
|
| 158 |
+
|
| 159 |
+
|
| 160 |
+
def time_to_seconds(time_duration):
|
| 161 |
+
try:
|
| 162 |
+
parts = time_duration.split(":")
|
| 163 |
+
if len(parts) == 3:
|
| 164 |
+
hours, minutes, seconds = map(float, parts)
|
| 165 |
+
elif len(parts) == 2:
|
| 166 |
+
hours = 0
|
| 167 |
+
minutes, seconds = map(float, parts)
|
| 168 |
+
elif len(parts) == 1:
|
| 169 |
+
hours = 0
|
| 170 |
+
minutes = 0
|
| 171 |
+
seconds = float(parts[0])
|
| 172 |
+
else:
|
| 173 |
+
return 0
|
| 174 |
+
return hours * 3600 + minutes * 60 + seconds
|
| 175 |
+
except Exception:
|
| 176 |
+
return 0
|
| 177 |
+
|
| 178 |
+
|
| 179 |
+
def speed_string_to_bytes(size_text: str):
|
| 180 |
+
size = 0
|
| 181 |
+
size_text = size_text.lower()
|
| 182 |
+
if "k" in size_text:
|
| 183 |
+
size += float(size_text.split("k")[0]) * 1024
|
| 184 |
+
elif "m" in size_text:
|
| 185 |
+
size += float(size_text.split("m")[0]) * 1048576
|
| 186 |
+
elif "g" in size_text:
|
| 187 |
+
size += float(size_text.split("g")[0]) * 1073741824
|
| 188 |
+
elif "t" in size_text:
|
| 189 |
+
size += float(size_text.split("t")[0]) * 1099511627776
|
| 190 |
+
elif "b" in size_text:
|
| 191 |
+
size += float(size_text.split("b")[0])
|
| 192 |
+
return size
|
| 193 |
+
|
| 194 |
+
|
| 195 |
+
def get_progress_bar_string(pct):
|
| 196 |
+
pct = float(str(pct).strip("%"))
|
| 197 |
+
p = min(max(pct, 0), 100)
|
| 198 |
+
cFull = int(p // 8)
|
| 199 |
+
p_str = "⬢" * cFull
|
| 200 |
+
p_str += "⬡" * (12 - cFull)
|
| 201 |
+
return f"[{p_str}]"
|
| 202 |
+
|
| 203 |
+
|
| 204 |
+
async def get_readable_message(sid, is_user, page_no=1, status="All", page_step=1):
|
| 205 |
+
msg = ""
|
| 206 |
+
button = None
|
| 207 |
+
|
| 208 |
+
tasks = await get_specific_tasks(status, sid if is_user else None)
|
| 209 |
+
|
| 210 |
+
STATUS_LIMIT = Config.STATUS_LIMIT
|
| 211 |
+
tasks_no = len(tasks)
|
| 212 |
+
pages = (max(tasks_no, 1) + STATUS_LIMIT - 1) // STATUS_LIMIT
|
| 213 |
+
if page_no > pages:
|
| 214 |
+
page_no = (page_no - 1) % pages + 1
|
| 215 |
+
status_dict[sid]["page_no"] = page_no
|
| 216 |
+
elif page_no < 1:
|
| 217 |
+
page_no = pages - (abs(page_no) % pages)
|
| 218 |
+
status_dict[sid]["page_no"] = page_no
|
| 219 |
+
start_position = (page_no - 1) * STATUS_LIMIT
|
| 220 |
+
|
| 221 |
+
for index, task in enumerate(
|
| 222 |
+
tasks[start_position : STATUS_LIMIT + start_position], start=1
|
| 223 |
+
):
|
| 224 |
+
if status != "All":
|
| 225 |
+
tstatus = status
|
| 226 |
+
elif iscoroutinefunction(task.status):
|
| 227 |
+
tstatus = await task.status()
|
| 228 |
+
else:
|
| 229 |
+
tstatus = task.status()
|
| 230 |
+
msg += f"<b>{index + start_position}.</b> "
|
| 231 |
+
msg += f"<b><i>{escape(f'{task.name()}')}</i></b>"
|
| 232 |
+
if task.listener.subname:
|
| 233 |
+
msg += f"\n┖ <b>Sub Name</b> → <i>{task.listener.subname}</i>"
|
| 234 |
+
elapsed = time() - task.listener.message.date.timestamp()
|
| 235 |
+
|
| 236 |
+
msg += f"\n\n<b>Task By {task.listener.message.from_user.mention(style='html')} </b> ( #ID{task.listener.message.from_user.id} )"
|
| 237 |
+
if task.listener.is_super_chat:
|
| 238 |
+
msg += f" <i>[<a href='{task.listener.message.link}'>Link</a>]</i>"
|
| 239 |
+
|
| 240 |
+
if (
|
| 241 |
+
tstatus not in [MirrorStatus.STATUS_SEED, MirrorStatus.STATUS_QUEUEUP]
|
| 242 |
+
and task.listener.progress
|
| 243 |
+
):
|
| 244 |
+
progress = task.progress()
|
| 245 |
+
msg += f"\n┟ {get_progress_bar_string(progress)} <i>{progress}</i>"
|
| 246 |
+
if task.listener.subname:
|
| 247 |
+
subsize = f" / {get_readable_file_size(task.listener.subsize)}"
|
| 248 |
+
ac = len(task.listener.files_to_proceed)
|
| 249 |
+
count = f"( {task.listener.proceed_count} / {ac or '?'} )"
|
| 250 |
+
else:
|
| 251 |
+
subsize = ""
|
| 252 |
+
count = ""
|
| 253 |
+
msg += f"\n┠ <b>Processed</b> → <i>{task.processed_bytes()}{subsize} of {task.size()}</i>"
|
| 254 |
+
if count:
|
| 255 |
+
msg += f"\n┠ <b>Count:</b> → <b>{count}</b>"
|
| 256 |
+
msg += f"\n┠ <b>Status</b> → <b>{tstatus}</b>"
|
| 257 |
+
msg += f"\n┠ <b>Speed</b> → <i>{task.speed()}</i>"
|
| 258 |
+
msg += f"\n┠ <b>Time</b> → <i>{task.eta()} of {get_readable_time(elapsed + get_raw_time(task.eta()))} ( {get_readable_time(elapsed)} )</i>"
|
| 259 |
+
if tstatus == MirrorStatus.STATUS_DOWNLOAD and (
|
| 260 |
+
task.listener.is_torrent or task.listener.is_qbit
|
| 261 |
+
):
|
| 262 |
+
try:
|
| 263 |
+
msg += f"\n┠ <b>Seeders</b> → {task.seeders_num()} | <b>Leechers</b> → {task.leechers_num()}"
|
| 264 |
+
except Exception:
|
| 265 |
+
pass
|
| 266 |
+
# TODO: Add Connected Peers
|
| 267 |
+
elif tstatus == MirrorStatus.STATUS_SEED:
|
| 268 |
+
msg += f"\n┠ <b>Size</b> → <i>{task.size()}</i> | <b>Uploaded</b> → <i>{task.uploaded_bytes()}</i>"
|
| 269 |
+
msg += f"\n┠ <b>Status</b> → <b>{tstatus}</b>"
|
| 270 |
+
msg += f"\n┠ <b>Speed</b> → <i>{task.seed_speed()}</i>"
|
| 271 |
+
msg += f"\n┠ <b>Ratio</b> → <i>{task.ratio()}</i>"
|
| 272 |
+
msg += f"\n┠ <b>Time</b> → <i>{task.seeding_time()}</i> | <b>Elapsed</b> → <i>{get_readable_time(elapsed)}</i>"
|
| 273 |
+
else:
|
| 274 |
+
msg += f"\n┠ <b>Size</b> → <i>{task.size()}</i>"
|
| 275 |
+
msg += f"\n┠ <b>Engine</b> → <i>{task.engine}</i>"
|
| 276 |
+
msg += f"\n┠ <b>In Mode</b> → <i>{task.listener.mode[0]}</i>"
|
| 277 |
+
msg += f"\n┠ <b>Out Mode</b> → <i>{task.listener.mode[1]}</i>"
|
| 278 |
+
# TODO: Add Bt Sel
|
| 279 |
+
from ..telegram_helper.bot_commands import BotCommands
|
| 280 |
+
|
| 281 |
+
msg += f"\n<b>┖ Stop</b> → <i>/{BotCommands.CancelTaskCommand[1]}_{task.gid()}</i>\n\n"
|
| 282 |
+
|
| 283 |
+
if len(msg) == 0:
|
| 284 |
+
if status == "All":
|
| 285 |
+
return None, None
|
| 286 |
+
else:
|
| 287 |
+
msg = f"No Active {status} Tasks!\n\n"
|
| 288 |
+
|
| 289 |
+
msg += "⌬ <b><u>Bot Stats</u></b>"
|
| 290 |
+
buttons = ButtonMaker()
|
| 291 |
+
if not is_user:
|
| 292 |
+
buttons.data_button("📜 TStats", f"status {sid} ov", position="header")
|
| 293 |
+
if len(tasks) > STATUS_LIMIT:
|
| 294 |
+
msg += f"<b>Page:</b> {page_no}/{pages} | <b>Tasks:</b> {tasks_no} | <b>Step:</b> {page_step}\n"
|
| 295 |
+
buttons.data_button("<<", f"status {sid} pre", position="header")
|
| 296 |
+
buttons.data_button(">>", f"status {sid} nex", position="header")
|
| 297 |
+
if tasks_no > 30:
|
| 298 |
+
for i in [1, 2, 4, 6, 8, 10, 15]:
|
| 299 |
+
buttons.data_button(i, f"status {sid} ps {i}", position="footer")
|
| 300 |
+
if status != "All" or tasks_no > 20:
|
| 301 |
+
for label, status_value in list(STATUSES.items()):
|
| 302 |
+
if status_value != status:
|
| 303 |
+
buttons.data_button(label, f"status {sid} st {status_value}")
|
| 304 |
+
buttons.data_button("♻️ Refresh", f"status {sid} ref", position="header")
|
| 305 |
+
button = buttons.build_menu(8)
|
| 306 |
+
msg += f"\n┟ <b>CPU</b> → {cpu_percent()}% | <b>F</b> → {get_readable_file_size(disk_usage(DOWNLOAD_DIR).free)} [{round(100 - disk_usage(DOWNLOAD_DIR).percent, 1)}%]"
|
| 307 |
+
msg += f"\n┖ <b>RAM</b> → {virtual_memory().percent}% | <b>UP</b> → {get_readable_time(time() - bot_start_time)}"
|
| 308 |
+
return msg, button
|
bot/helper/ext_utils/task_manager.py
ADDED
|
@@ -0,0 +1,296 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from asyncio import Event
|
| 2 |
+
from time import time
|
| 3 |
+
|
| 4 |
+
from ... import (
|
| 5 |
+
LOGGER,
|
| 6 |
+
bot_cache,
|
| 7 |
+
non_queued_dl,
|
| 8 |
+
non_queued_up,
|
| 9 |
+
queue_dict_lock,
|
| 10 |
+
queued_dl,
|
| 11 |
+
queued_up,
|
| 12 |
+
user_data,
|
| 13 |
+
)
|
| 14 |
+
from ...core.config_manager import Config
|
| 15 |
+
from ..mirror_leech_utils.gdrive_utils.search import GoogleDriveSearch
|
| 16 |
+
from ..telegram_helper.filters import CustomFilters
|
| 17 |
+
from ..telegram_helper.tg_utils import check_botpm, forcesub, verify_token
|
| 18 |
+
from .bot_utils import get_telegraph_list, sync_to_async, safe_int
|
| 19 |
+
from .files_utils import get_base_name, check_storage_threshold
|
| 20 |
+
from .links_utils import is_gdrive_id
|
| 21 |
+
from .status_utils import get_readable_time, get_readable_file_size, get_specific_tasks
|
| 22 |
+
|
| 23 |
+
|
| 24 |
+
async def stop_duplicate_check(listener):
|
| 25 |
+
if (
|
| 26 |
+
isinstance(listener.up_dest, int)
|
| 27 |
+
or listener.is_leech
|
| 28 |
+
or listener.select
|
| 29 |
+
or not is_gdrive_id(listener.up_dest)
|
| 30 |
+
or (listener.up_dest.startswith("mtp:") and listener.stop_duplicate)
|
| 31 |
+
or not listener.stop_duplicate
|
| 32 |
+
or listener.same_dir
|
| 33 |
+
):
|
| 34 |
+
return False, None
|
| 35 |
+
|
| 36 |
+
name = listener.name
|
| 37 |
+
LOGGER.info(f"Checking File/Folder if already in Drive: {name}")
|
| 38 |
+
|
| 39 |
+
if listener.compress:
|
| 40 |
+
name = f"{name}.zip"
|
| 41 |
+
elif listener.extract:
|
| 42 |
+
try:
|
| 43 |
+
name = get_base_name(name)
|
| 44 |
+
except Exception:
|
| 45 |
+
name = None
|
| 46 |
+
|
| 47 |
+
if name is not None:
|
| 48 |
+
telegraph_content, contents_no = await sync_to_async(
|
| 49 |
+
GoogleDriveSearch(stop_dup=True, no_multi=listener.is_clone).drive_list,
|
| 50 |
+
name,
|
| 51 |
+
listener.up_dest,
|
| 52 |
+
listener.user_id,
|
| 53 |
+
)
|
| 54 |
+
if telegraph_content:
|
| 55 |
+
msg = f"File/Folder is already available in Drive.\nHere are {contents_no} list results:"
|
| 56 |
+
button = await get_telegraph_list(telegraph_content)
|
| 57 |
+
return msg, button
|
| 58 |
+
|
| 59 |
+
return False, None
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
async def check_running_tasks(listener, state="dl"):
|
| 63 |
+
all_limit = safe_int(Config.QUEUE_ALL)
|
| 64 |
+
state_limit = (
|
| 65 |
+
safe_int(Config.QUEUE_DOWNLOAD)
|
| 66 |
+
if state == "dl"
|
| 67 |
+
else safe_int(Config.QUEUE_UPLOAD)
|
| 68 |
+
)
|
| 69 |
+
event = None
|
| 70 |
+
is_over_limit = False
|
| 71 |
+
async with queue_dict_lock:
|
| 72 |
+
if state == "up" and listener.mid in non_queued_dl:
|
| 73 |
+
non_queued_dl.remove(listener.mid)
|
| 74 |
+
if (
|
| 75 |
+
(all_limit or state_limit)
|
| 76 |
+
and not listener.force_run
|
| 77 |
+
and not (listener.force_upload and state == "up")
|
| 78 |
+
and not (listener.force_download and state == "dl")
|
| 79 |
+
):
|
| 80 |
+
dl_count = len(non_queued_dl)
|
| 81 |
+
up_count = len(non_queued_up)
|
| 82 |
+
t_count = dl_count if state == "dl" else up_count
|
| 83 |
+
is_over_limit = (
|
| 84 |
+
all_limit
|
| 85 |
+
and dl_count + up_count >= all_limit
|
| 86 |
+
and (not state_limit or t_count >= state_limit)
|
| 87 |
+
) or (state_limit and t_count >= state_limit)
|
| 88 |
+
if is_over_limit:
|
| 89 |
+
event = Event()
|
| 90 |
+
if state == "dl":
|
| 91 |
+
queued_dl[listener.mid] = event
|
| 92 |
+
else:
|
| 93 |
+
queued_up[listener.mid] = event
|
| 94 |
+
if not is_over_limit:
|
| 95 |
+
if state == "up":
|
| 96 |
+
non_queued_up.add(listener.mid)
|
| 97 |
+
else:
|
| 98 |
+
non_queued_dl.add(listener.mid)
|
| 99 |
+
|
| 100 |
+
return is_over_limit, event
|
| 101 |
+
|
| 102 |
+
|
| 103 |
+
async def start_dl_from_queued(mid: int):
|
| 104 |
+
queued_dl[mid].set()
|
| 105 |
+
del queued_dl[mid]
|
| 106 |
+
non_queued_dl.add(mid)
|
| 107 |
+
|
| 108 |
+
|
| 109 |
+
async def start_up_from_queued(mid: int):
|
| 110 |
+
queued_up[mid].set()
|
| 111 |
+
del queued_up[mid]
|
| 112 |
+
non_queued_up.add(mid)
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
async def start_from_queued():
|
| 116 |
+
if all_limit := safe_int(Config.QUEUE_ALL):
|
| 117 |
+
dl_limit = safe_int(Config.QUEUE_DOWNLOAD)
|
| 118 |
+
up_limit = safe_int(Config.QUEUE_UPLOAD)
|
| 119 |
+
async with queue_dict_lock:
|
| 120 |
+
dl = len(non_queued_dl)
|
| 121 |
+
up = len(non_queued_up)
|
| 122 |
+
all_ = dl + up
|
| 123 |
+
if all_ < all_limit:
|
| 124 |
+
f_tasks = all_limit - all_
|
| 125 |
+
if queued_up and (not up_limit or up < up_limit):
|
| 126 |
+
for index, mid in enumerate(list(queued_up.keys()), start=1):
|
| 127 |
+
await start_up_from_queued(mid)
|
| 128 |
+
f_tasks -= 1
|
| 129 |
+
if f_tasks == 0 or (up_limit and index >= up_limit - up):
|
| 130 |
+
break
|
| 131 |
+
if queued_dl and (not dl_limit or dl < dl_limit) and f_tasks != 0:
|
| 132 |
+
for index, mid in enumerate(list(queued_dl.keys()), start=1):
|
| 133 |
+
await start_dl_from_queued(mid)
|
| 134 |
+
if (dl_limit and index >= dl_limit - dl) or index == f_tasks:
|
| 135 |
+
break
|
| 136 |
+
return
|
| 137 |
+
|
| 138 |
+
if up_limit := Config.QUEUE_UPLOAD:
|
| 139 |
+
async with queue_dict_lock:
|
| 140 |
+
up = len(non_queued_up)
|
| 141 |
+
if queued_up and up < up_limit:
|
| 142 |
+
f_tasks = up_limit - up
|
| 143 |
+
for index, mid in enumerate(list(queued_up.keys()), start=1):
|
| 144 |
+
await start_up_from_queued(mid)
|
| 145 |
+
if index == f_tasks:
|
| 146 |
+
break
|
| 147 |
+
else:
|
| 148 |
+
async with queue_dict_lock:
|
| 149 |
+
if queued_up:
|
| 150 |
+
for mid in list(queued_up.keys()):
|
| 151 |
+
await start_up_from_queued(mid)
|
| 152 |
+
|
| 153 |
+
if dl_limit := Config.QUEUE_DOWNLOAD:
|
| 154 |
+
async with queue_dict_lock:
|
| 155 |
+
dl = len(non_queued_dl)
|
| 156 |
+
if queued_dl and dl < dl_limit:
|
| 157 |
+
f_tasks = dl_limit - dl
|
| 158 |
+
for index, mid in enumerate(list(queued_dl.keys()), start=1):
|
| 159 |
+
await start_dl_from_queued(mid)
|
| 160 |
+
if index == f_tasks:
|
| 161 |
+
break
|
| 162 |
+
else:
|
| 163 |
+
async with queue_dict_lock:
|
| 164 |
+
if queued_dl:
|
| 165 |
+
for mid in list(queued_dl.keys()):
|
| 166 |
+
await start_dl_from_queued(mid)
|
| 167 |
+
|
| 168 |
+
|
| 169 |
+
async def limit_checker(listener, yt_playlist=0):
|
| 170 |
+
LOGGER.info("Checking Size Limit...")
|
| 171 |
+
if await CustomFilters.sudo("", listener.message):
|
| 172 |
+
LOGGER.info("SUDO User. Skipping Size Limit...")
|
| 173 |
+
return
|
| 174 |
+
|
| 175 |
+
user_id, size = listener.user_id, listener.size
|
| 176 |
+
|
| 177 |
+
async def recurr_limits(limits):
|
| 178 |
+
nonlocal yt_playlist, size
|
| 179 |
+
limit_exceeded = ""
|
| 180 |
+
for condition, attr, name in limits:
|
| 181 |
+
if condition and (limit := getattr(Config, attr, 0)):
|
| 182 |
+
if attr == "PLAYLIST_LIMIT":
|
| 183 |
+
if yt_playlist >= limit:
|
| 184 |
+
limit_exceeded = f"┠ <b>{name} Limit Count</b> → {limit}"
|
| 185 |
+
else:
|
| 186 |
+
byte_limit = limit * 1024**3
|
| 187 |
+
if size >= byte_limit:
|
| 188 |
+
limit_exceeded = f"┠ <b>{name} Limit</b> → {get_readable_file_size(byte_limit)}"
|
| 189 |
+
|
| 190 |
+
LOGGER.info(
|
| 191 |
+
f"{name} Limit Breached: {listener.name} & Size: {get_readable_file_size(size)}"
|
| 192 |
+
)
|
| 193 |
+
break
|
| 194 |
+
return limit_exceeded
|
| 195 |
+
|
| 196 |
+
limits = [
|
| 197 |
+
(listener.is_torrent or listener.is_qbit, "TORRENT_LIMIT", "Torrent"),
|
| 198 |
+
(listener.is_mega, "MEGA_LIMIT", "Mega"),
|
| 199 |
+
(listener.is_gdrive, "GD_DL_LIMIT", "GDriveDL"),
|
| 200 |
+
(listener.is_clone, "CLONE_LIMIT", "Clone"),
|
| 201 |
+
(listener.is_jd, "JD_LIMIT", "JDownloader"),
|
| 202 |
+
(listener.is_nzb, "NZB_LIMIT", "SABnzbd"),
|
| 203 |
+
(listener.is_rclone, "RC_DL_LIMIT", "RCloneDL"),
|
| 204 |
+
(listener.is_ytdlp, "YTDLP_LIMIT", "YT-DLP"),
|
| 205 |
+
(bool(yt_playlist), "PLAYLIST_LIMIT", "Playlist"),
|
| 206 |
+
(True, "DIRECT_LIMIT", "Direct"),
|
| 207 |
+
]
|
| 208 |
+
limit_exceeded = await recurr_limits(limits)
|
| 209 |
+
|
| 210 |
+
if not limit_exceeded:
|
| 211 |
+
extra_limits = [
|
| 212 |
+
(listener.is_leech, "LEECH_LIMIT", "Leech"),
|
| 213 |
+
(listener.compress, "ARCHIVE_LIMIT", "Archive"),
|
| 214 |
+
(listener.extract, "EXTRACT_LIMIT", "Extract"),
|
| 215 |
+
]
|
| 216 |
+
limit_exceeded = await recurr_limits(extra_limits)
|
| 217 |
+
|
| 218 |
+
if Config.STORAGE_LIMIT and not listener.is_clone:
|
| 219 |
+
limit = Config.STORAGE_LIMIT * 1024**3
|
| 220 |
+
if not await check_storage_threshold(
|
| 221 |
+
size, limit, any([listener.compress, listener.extract])
|
| 222 |
+
):
|
| 223 |
+
limit_exceeded = f"┠ <b>Threshold Storage Limit</b> → {get_readable_file_size(limit)}"
|
| 224 |
+
|
| 225 |
+
if limit_exceeded:
|
| 226 |
+
return limit_exceeded + f"\n┖ <b>Task By</b> → {listener.tag}"
|
| 227 |
+
|
| 228 |
+
|
| 229 |
+
"""
|
| 230 |
+
class UsageChecks: # TODO: Dynamic Check for All Task
|
| 231 |
+
|
| 232 |
+
class DailyUsageChecks:
|
| 233 |
+
"""
|
| 234 |
+
|
| 235 |
+
|
| 236 |
+
async def user_interval_check(user_id):
|
| 237 |
+
bot_cache.setdefault("time_interval", {})
|
| 238 |
+
if (time_interval := bot_cache["time_interval"].get(user_id, False)) and (
|
| 239 |
+
time() - time_interval
|
| 240 |
+
) < (UTI := Config.USER_TIME_INTERVAL):
|
| 241 |
+
return UTI - (time() - time_interval)
|
| 242 |
+
bot_cache["time_interval"][user_id] = time()
|
| 243 |
+
return None
|
| 244 |
+
|
| 245 |
+
|
| 246 |
+
async def pre_task_check(message):
|
| 247 |
+
LOGGER.info("Running Pre Task Checks ...")
|
| 248 |
+
msg = []
|
| 249 |
+
button = None
|
| 250 |
+
if await CustomFilters.sudo("", message):
|
| 251 |
+
return msg, button
|
| 252 |
+
user_id = (message.from_user or message.sender_chat).id
|
| 253 |
+
if Config.RSS_CHAT and user_id == int(Config.RSS_CHAT):
|
| 254 |
+
return msg, button
|
| 255 |
+
user_dict = user_data.get(user_id, {})
|
| 256 |
+
if message.chat.type != message.chat.type.BOT:
|
| 257 |
+
if ids := Config.FORCE_SUB_IDS:
|
| 258 |
+
_msg, button = await forcesub(message, ids, button)
|
| 259 |
+
if _msg:
|
| 260 |
+
msg.append(_msg)
|
| 261 |
+
if Config.BOT_PM or user_dict.get("BOT_PM"): # or config_dict['SAFE_MODE']:
|
| 262 |
+
_msg, button = await check_botpm(message, button)
|
| 263 |
+
if _msg:
|
| 264 |
+
msg.append(_msg)
|
| 265 |
+
if (uti := Config.USER_TIME_INTERVAL) and (
|
| 266 |
+
ut := await user_interval_check(user_id)
|
| 267 |
+
):
|
| 268 |
+
msg.append(
|
| 269 |
+
f"┠ <b>Waiting Time</b> → {get_readable_time(ut)}\n┠ <i>User's Time Interval Restrictions</i> → {get_readable_time(uti)}"
|
| 270 |
+
)
|
| 271 |
+
bmax_tasks = safe_int(user_dict.get("bmax_tasks", Config.BOT_MAX_TASKS))
|
| 272 |
+
if bmax_tasks > 0 and len(await get_specific_tasks("All", False)) >= bmax_tasks:
|
| 273 |
+
msg.append(
|
| 274 |
+
f"┠ Max Concurrent Bot's Tasks Limit exceeded.\n┠ Bot Tasks Limit : {bmax_tasks} task"
|
| 275 |
+
)
|
| 276 |
+
|
| 277 |
+
maxtask = safe_int(user_dict.get("maxtask", Config.USER_MAX_TASKS))
|
| 278 |
+
if maxtask > 0 and len(await get_specific_tasks("All", user_id)) >= maxtask:
|
| 279 |
+
msg.append(
|
| 280 |
+
f"┠ Max Concurrent User's Task(s) Limit exceeded! \n┠ User Task Limit : {maxtask} tasks"
|
| 281 |
+
)
|
| 282 |
+
|
| 283 |
+
token_msg, button = await verify_token(user_id, button)
|
| 284 |
+
if token_msg is not None:
|
| 285 |
+
msg.append(token_msg)
|
| 286 |
+
|
| 287 |
+
if msg:
|
| 288 |
+
username = message.from_user.mention
|
| 289 |
+
final_msg = f"⌬ <b>Task Checks :</b>\n│\n┟ <b>Name</b> → {username}\n┃\n"
|
| 290 |
+
for i, m_part in enumerate(msg, 1):
|
| 291 |
+
final_msg += f"{m_part}\n"
|
| 292 |
+
if button is not None:
|
| 293 |
+
button = button.build_menu(2)
|
| 294 |
+
return final_msg, button
|
| 295 |
+
|
| 296 |
+
return None, None
|
bot/helper/ext_utils/telegraph_helper.py
ADDED
|
@@ -0,0 +1,85 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from asyncio import sleep
|
| 2 |
+
from secrets import token_hex
|
| 3 |
+
from telegraph.aio import Telegraph
|
| 4 |
+
from telegraph.exceptions import RetryAfterError
|
| 5 |
+
|
| 6 |
+
from ... import LOGGER
|
| 7 |
+
from ...core.config_manager import Config
|
| 8 |
+
|
| 9 |
+
|
| 10 |
+
class TelegraphHelper:
|
| 11 |
+
def __init__(self, author_name=None, author_url=None):
|
| 12 |
+
self._telegraph = Telegraph(domain="graph.org")
|
| 13 |
+
self._author_name = author_name
|
| 14 |
+
self._author_url = author_url
|
| 15 |
+
|
| 16 |
+
async def create_account(self):
|
| 17 |
+
LOGGER.info("Creating Telegraph Account")
|
| 18 |
+
try:
|
| 19 |
+
await self._telegraph.create_account(
|
| 20 |
+
short_name=token_hex(5),
|
| 21 |
+
author_name=self._author_name,
|
| 22 |
+
author_url=self._author_url,
|
| 23 |
+
)
|
| 24 |
+
except Exception as e:
|
| 25 |
+
LOGGER.error(f"Failed to create Telegraph Account: {e}")
|
| 26 |
+
|
| 27 |
+
async def create_page(self, title, content):
|
| 28 |
+
try:
|
| 29 |
+
return await self._telegraph.create_page(
|
| 30 |
+
title=title,
|
| 31 |
+
author_name=self._author_name,
|
| 32 |
+
author_url=self._author_url,
|
| 33 |
+
html_content=content,
|
| 34 |
+
)
|
| 35 |
+
except RetryAfterError as st:
|
| 36 |
+
LOGGER.warning(
|
| 37 |
+
f"Telegraph Flood control exceeded. I will sleep for {st.retry_after} seconds."
|
| 38 |
+
)
|
| 39 |
+
await sleep(st.retry_after)
|
| 40 |
+
return await self.create_page(title, content)
|
| 41 |
+
|
| 42 |
+
async def edit_page(self, path, title, content):
|
| 43 |
+
try:
|
| 44 |
+
return await self._telegraph.edit_page(
|
| 45 |
+
path=path,
|
| 46 |
+
title=title,
|
| 47 |
+
author_name=self._author_name,
|
| 48 |
+
author_url=self._author_url,
|
| 49 |
+
html_content=content,
|
| 50 |
+
)
|
| 51 |
+
except RetryAfterError as st:
|
| 52 |
+
LOGGER.warning(
|
| 53 |
+
f"Telegraph Flood control exceeded. I will sleep for {st.retry_after} seconds."
|
| 54 |
+
)
|
| 55 |
+
await sleep(st.retry_after)
|
| 56 |
+
return await self.edit_page(path, title, content)
|
| 57 |
+
|
| 58 |
+
async def edit_telegraph(self, path, telegraph_content):
|
| 59 |
+
nxt_page = 1
|
| 60 |
+
prev_page = 0
|
| 61 |
+
num_of_path = len(path)
|
| 62 |
+
for content in telegraph_content:
|
| 63 |
+
if nxt_page == 1:
|
| 64 |
+
content += (
|
| 65 |
+
f'<b><a href="https://telegra.ph/{path[nxt_page]}">Next</a></b>'
|
| 66 |
+
)
|
| 67 |
+
nxt_page += 1
|
| 68 |
+
else:
|
| 69 |
+
if prev_page <= num_of_path:
|
| 70 |
+
content += f'<b><a href="https://telegra.ph/{path[prev_page]}">Prev</a></b>'
|
| 71 |
+
prev_page += 1
|
| 72 |
+
if nxt_page < num_of_path:
|
| 73 |
+
content += f'<b> | <a href="https://telegra.ph/{path[nxt_page]}">Next</a></b>'
|
| 74 |
+
nxt_page += 1
|
| 75 |
+
await self.edit_page(
|
| 76 |
+
path=path[prev_page],
|
| 77 |
+
title="WZML-X Torrent Search",
|
| 78 |
+
content=content,
|
| 79 |
+
)
|
| 80 |
+
return
|
| 81 |
+
|
| 82 |
+
|
| 83 |
+
telegraph = TelegraphHelper(Config.AUTHOR_NAME, Config.AUTHOR_URL)
|
| 84 |
+
|
| 85 |
+
print(__name__)
|
bot/helper/languages/__init__.py
ADDED
|
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from importlib import import_module
|
| 2 |
+
from os import listdir
|
| 3 |
+
|
| 4 |
+
from ...core.config_manager import Config
|
| 5 |
+
|
| 6 |
+
LOCALES_DIR = "bot/helper/languages"
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
class Language:
|
| 10 |
+
_modules = {}
|
| 11 |
+
_user_langs = {}
|
| 12 |
+
|
| 13 |
+
def __init__(self, lang_code=None, user_id=None):
|
| 14 |
+
self.load_translations()
|
| 15 |
+
lang_code = lang_code or Config.DEFAULT_LANG
|
| 16 |
+
|
| 17 |
+
if user_id:
|
| 18 |
+
self._user_langs[user_id] = lang_code
|
| 19 |
+
self.lang_code = self._user_langs.get(
|
| 20 |
+
user_id, lang_code if lang_code in self._modules else Config.DEFAULT_LANG
|
| 21 |
+
)
|
| 22 |
+
|
| 23 |
+
@classmethod
|
| 24 |
+
def load_translations(cls):
|
| 25 |
+
if cls._modules:
|
| 26 |
+
return cls._modules
|
| 27 |
+
|
| 28 |
+
cls._modules = {}
|
| 29 |
+
for file in listdir(LOCALES_DIR):
|
| 30 |
+
if file.endswith(".py") and file != "__init__.py":
|
| 31 |
+
lang_code = file.split(".")[0]
|
| 32 |
+
cls._modules[lang_code] = import_module(
|
| 33 |
+
f"bot.helper.languages.{lang_code}"
|
| 34 |
+
)
|
| 35 |
+
return cls._modules
|
| 36 |
+
|
| 37 |
+
def __getattr__(self, key):
|
| 38 |
+
lang_module = self._modules.get(
|
| 39 |
+
self.lang_code, self._modules[Config.DEFAULT_LANG]
|
| 40 |
+
)
|
| 41 |
+
return getattr(
|
| 42 |
+
lang_module, key, getattr(self._modules[Config.DEFAULT_LANG], key, key)
|
| 43 |
+
)
|
bot/helper/languages/bn.py
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
START_MSG = """
|
| 2 |
+
এই বট লিংক | টেলিগ্রাম ফাইল | টরেন্ট | NZB | Rclone-ক্লাউড থেকে যেকোনো Rclone ক্লাউড, গুগল ড্রাইভ বা টেলিগ্রামে মিরর করতে পারে।
|
| 3 |
+
উপলব্ধ কমান্ডের তালিকা পেতে /{cmd} লিখুন।
|
| 4 |
+
"""
|
| 5 |
+
START_BUTTON1 = "গিট রিপো"
|
| 6 |
+
START_BUTTON2 = "আপডেট"
|
bot/helper/languages/en.py
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
START_MSG = """
|
| 2 |
+
This bot can mirror from links|tgfiles|torrents|nzb|rclone-cloud to any rclone cloud, Google Drive or to telegram.
|
| 3 |
+
Type /{cmd} to get a list of available commands
|
| 4 |
+
"""
|
| 5 |
+
START_BUTTON1 = "Git Repo"
|
| 6 |
+
START_BUTTON2 = "Updates"
|
bot/helper/listeners/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
|
bot/helper/listeners/aria2_listener.py
ADDED
|
@@ -0,0 +1,199 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from aiofiles.os import remove, path as aiopath
|
| 2 |
+
from asyncio import sleep, TimeoutError
|
| 3 |
+
from time import time
|
| 4 |
+
from contextlib import suppress
|
| 5 |
+
from aiohttp.client_exceptions import ClientError
|
| 6 |
+
|
| 7 |
+
from ... import task_dict_lock, task_dict, LOGGER, intervals
|
| 8 |
+
from ...core.config_manager import Config
|
| 9 |
+
from ...core.torrent_manager import TorrentManager, is_metadata, aria2_name
|
| 10 |
+
from ..ext_utils.bot_utils import bt_selection_buttons
|
| 11 |
+
from ..ext_utils.files_utils import clean_unwanted
|
| 12 |
+
from ..ext_utils.status_utils import get_task_by_gid
|
| 13 |
+
from ..ext_utils.task_manager import stop_duplicate_check, limit_checker
|
| 14 |
+
from ..mirror_leech_utils.status_utils.aria2_status import Aria2Status
|
| 15 |
+
from ..telegram_helper.message_utils import (
|
| 16 |
+
send_message,
|
| 17 |
+
delete_message,
|
| 18 |
+
update_status_message,
|
| 19 |
+
)
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
async def _on_download_started(api, data):
|
| 23 |
+
gid = data["params"][0]["gid"]
|
| 24 |
+
with suppress(TimeoutError, ClientError, Exception):
|
| 25 |
+
download, options = await api.tellStatus(gid), await api.getOption(gid)
|
| 26 |
+
if options.get("follow-torrent", "") == "false":
|
| 27 |
+
return
|
| 28 |
+
if is_metadata(download):
|
| 29 |
+
LOGGER.info(f"onDownloadStarted: {gid} METADATA")
|
| 30 |
+
await sleep(1)
|
| 31 |
+
if task := await get_task_by_gid(gid):
|
| 32 |
+
task.listener.is_torrent = True
|
| 33 |
+
if task.listener.select:
|
| 34 |
+
metamsg = "Downloading Metadata, wait then you can select files. Use torrent file to avoid this wait."
|
| 35 |
+
meta = await send_message(task.listener.message, metamsg)
|
| 36 |
+
while True:
|
| 37 |
+
await sleep(0.5)
|
| 38 |
+
if download.get("status", "") == "removed" or download.get(
|
| 39 |
+
"followedBy", []
|
| 40 |
+
):
|
| 41 |
+
await delete_message(meta)
|
| 42 |
+
break
|
| 43 |
+
download = await api.tellStatus(gid)
|
| 44 |
+
return
|
| 45 |
+
else:
|
| 46 |
+
LOGGER.info(f"onDownloadStarted: {aria2_name(download)} - Gid: {gid}")
|
| 47 |
+
await sleep(1)
|
| 48 |
+
|
| 49 |
+
await sleep(2)
|
| 50 |
+
if task := await get_task_by_gid(gid):
|
| 51 |
+
download = await api.tellStatus(gid)
|
| 52 |
+
if "bittorrent" in download:
|
| 53 |
+
task.listener.is_torrent = True
|
| 54 |
+
|
| 55 |
+
task.listener.name = aria2_name(download)
|
| 56 |
+
msg, button = await stop_duplicate_check(task.listener)
|
| 57 |
+
if msg:
|
| 58 |
+
await TorrentManager.aria2_remove(download)
|
| 59 |
+
await task.listener.on_download_error(msg, button)
|
| 60 |
+
return
|
| 61 |
+
|
| 62 |
+
task.listener.size = int(download.get("totalLength", "0"))
|
| 63 |
+
mmsg = await limit_checker(task.listener)
|
| 64 |
+
if mmsg:
|
| 65 |
+
await TorrentManager.aria2_remove(download)
|
| 66 |
+
await task.listener.on_download_error(mmsg, is_limit=True)
|
| 67 |
+
return
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
async def _on_download_complete(api, data):
|
| 71 |
+
try:
|
| 72 |
+
gid = data["params"][0]["gid"]
|
| 73 |
+
download, options = await api.tellStatus(gid), await api.getOption(gid)
|
| 74 |
+
if options.get("follow-torrent", "") == "false":
|
| 75 |
+
return
|
| 76 |
+
except (TimeoutError, ClientError, Exception) as e:
|
| 77 |
+
LOGGER.error(f"onDownloadComplete: {e}")
|
| 78 |
+
return
|
| 79 |
+
if download.get("followedBy", []):
|
| 80 |
+
new_gid = download.get("followedBy", [])[0]
|
| 81 |
+
LOGGER.info(f"Gid changed from {gid} to {new_gid}")
|
| 82 |
+
if task := await get_task_by_gid(new_gid):
|
| 83 |
+
task.listener.is_torrent = True
|
| 84 |
+
if Config.BASE_URL and task.listener.select:
|
| 85 |
+
if not task.queued:
|
| 86 |
+
await api.forcePause(new_gid)
|
| 87 |
+
SBUTTONS = bt_selection_buttons(new_gid)
|
| 88 |
+
msg = "Download paused. Choose files then press Done Selecting button to start downloading."
|
| 89 |
+
await send_message(task.listener.message, msg, SBUTTONS)
|
| 90 |
+
elif "bittorrent" in download:
|
| 91 |
+
if task := await get_task_by_gid(gid):
|
| 92 |
+
task.listener.is_torrent = True
|
| 93 |
+
if hasattr(task, "seeding") and task.seeding:
|
| 94 |
+
LOGGER.info(
|
| 95 |
+
f"Cancelling Seed: {aria2_name(download)} onDownloadComplete"
|
| 96 |
+
)
|
| 97 |
+
await TorrentManager.aria2_remove(download)
|
| 98 |
+
await task.listener.on_upload_error(
|
| 99 |
+
f"Seeding stopped with Ratio: {task.ratio()} and Time: {task.seeding_time()}"
|
| 100 |
+
)
|
| 101 |
+
else:
|
| 102 |
+
LOGGER.info(f"onDownloadComplete: {aria2_name(download)} - Gid: {gid}")
|
| 103 |
+
if task := await get_task_by_gid(gid):
|
| 104 |
+
await task.listener.on_download_complete()
|
| 105 |
+
if intervals["stopAll"]:
|
| 106 |
+
return
|
| 107 |
+
await TorrentManager.aria2_remove(download)
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
async def _on_bt_download_complete(api, data):
|
| 111 |
+
gid = data["params"][0]["gid"]
|
| 112 |
+
await sleep(1)
|
| 113 |
+
download = await api.tellStatus(gid)
|
| 114 |
+
LOGGER.info(f"onBtDownloadComplete: {aria2_name(download)} - Gid: {gid}")
|
| 115 |
+
if task := await get_task_by_gid(gid):
|
| 116 |
+
task.listener.is_torrent = True
|
| 117 |
+
if task.listener.select:
|
| 118 |
+
res = download.get("files", [])
|
| 119 |
+
for file_o in res:
|
| 120 |
+
f_path = file_o.get("path", "")
|
| 121 |
+
if file_o.get("selected", "") != "true" and await aiopath.exists(
|
| 122 |
+
f_path
|
| 123 |
+
):
|
| 124 |
+
with suppress(Exception):
|
| 125 |
+
await remove(f_path)
|
| 126 |
+
await clean_unwanted(download.get("dir", ""))
|
| 127 |
+
if task.listener.seed:
|
| 128 |
+
try:
|
| 129 |
+
await api.changeOption(gid, {"max-upload-limit": "0"})
|
| 130 |
+
except (TimeoutError, ClientError, Exception) as e:
|
| 131 |
+
LOGGER.error(
|
| 132 |
+
f"{e} You are not able to seed because you added global option seed-time=0 without adding specific seed_time for this torrent GID: {gid}"
|
| 133 |
+
)
|
| 134 |
+
else:
|
| 135 |
+
try:
|
| 136 |
+
await api.forcePause(gid)
|
| 137 |
+
except (TimeoutError, ClientError, Exception) as e:
|
| 138 |
+
LOGGER.error(f"onBtDownloadComplete: {e} GID: {gid}")
|
| 139 |
+
await task.listener.on_download_complete()
|
| 140 |
+
if intervals["stopAll"]:
|
| 141 |
+
return
|
| 142 |
+
download = await api.tellStatus(gid)
|
| 143 |
+
if (
|
| 144 |
+
task.listener.seed
|
| 145 |
+
and download.get("status", "") == "complete"
|
| 146 |
+
and await get_task_by_gid(gid)
|
| 147 |
+
):
|
| 148 |
+
LOGGER.info(f"Cancelling Seed: {aria2_name(download)}")
|
| 149 |
+
await TorrentManager.aria2_remove(download)
|
| 150 |
+
await task.listener.on_upload_error(
|
| 151 |
+
f"Seeding stopped with Ratio: {task.ratio()} and Time: {task.seeding_time()}"
|
| 152 |
+
)
|
| 153 |
+
elif (
|
| 154 |
+
task.listener.seed
|
| 155 |
+
and download.get("status", "") == "complete"
|
| 156 |
+
and not await get_task_by_gid(gid)
|
| 157 |
+
):
|
| 158 |
+
pass
|
| 159 |
+
elif task.listener.seed and not task.listener.is_cancelled:
|
| 160 |
+
async with task_dict_lock:
|
| 161 |
+
if task.listener.mid not in task_dict:
|
| 162 |
+
await TorrentManager.aria2_remove(download)
|
| 163 |
+
return
|
| 164 |
+
task_dict[task.listener.mid] = Aria2Status(task.listener, gid, True)
|
| 165 |
+
task_dict[task.listener.mid].start_time = time()
|
| 166 |
+
LOGGER.info(f"Seeding started: {aria2_name(download)} - Gid: {gid}")
|
| 167 |
+
await update_status_message(task.listener.message.chat.id)
|
| 168 |
+
else:
|
| 169 |
+
await TorrentManager.aria2_remove(download)
|
| 170 |
+
|
| 171 |
+
|
| 172 |
+
async def _on_download_stopped(_, data):
|
| 173 |
+
gid = data["params"][0]["gid"]
|
| 174 |
+
await sleep(4)
|
| 175 |
+
if task := await get_task_by_gid(gid):
|
| 176 |
+
await task.listener.on_download_error("Dead torrent!")
|
| 177 |
+
|
| 178 |
+
|
| 179 |
+
async def _on_download_error(api, data):
|
| 180 |
+
gid = data["params"][0]["gid"]
|
| 181 |
+
await sleep(1)
|
| 182 |
+
LOGGER.info(f"onDownloadError: {gid}")
|
| 183 |
+
error = "None"
|
| 184 |
+
with suppress(TimeoutError, ClientError, Exception):
|
| 185 |
+
download, options = await api.tellStatus(gid), await api.getOption(gid)
|
| 186 |
+
error = download.get("errorMessage", "")
|
| 187 |
+
LOGGER.info(f"Download Error: {error}")
|
| 188 |
+
if options.get("follow-torrent", "") == "false":
|
| 189 |
+
return
|
| 190 |
+
if task := await get_task_by_gid(gid):
|
| 191 |
+
await task.listener.on_download_error(error)
|
| 192 |
+
|
| 193 |
+
|
| 194 |
+
def add_aria2_callbacks():
|
| 195 |
+
TorrentManager.aria2.onBtDownloadComplete(_on_bt_download_complete)
|
| 196 |
+
TorrentManager.aria2.onDownloadComplete(_on_download_complete)
|
| 197 |
+
TorrentManager.aria2.onDownloadError(_on_download_error)
|
| 198 |
+
TorrentManager.aria2.onDownloadStart(_on_download_started)
|
| 199 |
+
TorrentManager.aria2.onDownloadStop(_on_download_stopped)
|
bot/helper/listeners/direct_listener.py
ADDED
|
@@ -0,0 +1,86 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from asyncio import sleep, TimeoutError
|
| 2 |
+
from aiohttp.client_exceptions import ClientError
|
| 3 |
+
|
| 4 |
+
from ... import LOGGER
|
| 5 |
+
from ...core.torrent_manager import TorrentManager, aria2_name
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
class DirectListener:
|
| 9 |
+
def __init__(self, path, listener, a2c_opt):
|
| 10 |
+
self.listener = listener
|
| 11 |
+
self._path = path
|
| 12 |
+
self._a2c_opt = a2c_opt
|
| 13 |
+
self._proc_bytes = 0
|
| 14 |
+
self._failed = 0
|
| 15 |
+
self.download_task = None
|
| 16 |
+
self.name = self.listener.name
|
| 17 |
+
|
| 18 |
+
@property
|
| 19 |
+
def processed_bytes(self):
|
| 20 |
+
if self.download_task:
|
| 21 |
+
return self._proc_bytes + int(
|
| 22 |
+
self.download_task.get("completedLength", "0")
|
| 23 |
+
)
|
| 24 |
+
return self._proc_bytes
|
| 25 |
+
|
| 26 |
+
@property
|
| 27 |
+
def speed(self):
|
| 28 |
+
return (
|
| 29 |
+
int(self.download_task.get("downloadSpeed", "0"))
|
| 30 |
+
if self.download_task
|
| 31 |
+
else 0
|
| 32 |
+
)
|
| 33 |
+
|
| 34 |
+
async def download(self, contents):
|
| 35 |
+
self.is_downloading = True
|
| 36 |
+
for content in contents:
|
| 37 |
+
if self.listener.is_cancelled:
|
| 38 |
+
break
|
| 39 |
+
if content["path"]:
|
| 40 |
+
self._a2c_opt["dir"] = f"{self._path}/{content['path']}"
|
| 41 |
+
else:
|
| 42 |
+
self._a2c_opt["dir"] = self._path
|
| 43 |
+
filename = content["filename"]
|
| 44 |
+
self._a2c_opt["out"] = filename
|
| 45 |
+
try:
|
| 46 |
+
gid = await TorrentManager.aria2.addUri(
|
| 47 |
+
uris=[content["url"]], options=self._a2c_opt, position=0
|
| 48 |
+
)
|
| 49 |
+
except (TimeoutError, ClientError, Exception) as e:
|
| 50 |
+
self._failed += 1
|
| 51 |
+
LOGGER.error(f"Unable to download {filename} due to: {e}")
|
| 52 |
+
continue
|
| 53 |
+
self.download_task = await TorrentManager.aria2.tellStatus(gid)
|
| 54 |
+
while True:
|
| 55 |
+
if self.listener.is_cancelled:
|
| 56 |
+
if self.download_task:
|
| 57 |
+
await TorrentManager.aria2_remove(self.download_task)
|
| 58 |
+
break
|
| 59 |
+
self.download_task = await TorrentManager.aria2.tellStatus(gid)
|
| 60 |
+
if error_message := self.download_task.get("errorMessage"):
|
| 61 |
+
self._failed += 1
|
| 62 |
+
LOGGER.error(
|
| 63 |
+
f"Unable to download {aria2_name(self.download_task)} due to: {error_message}"
|
| 64 |
+
)
|
| 65 |
+
await TorrentManager.aria2_remove(self.download_task)
|
| 66 |
+
break
|
| 67 |
+
elif self.download_task.get("status", "") == "complete":
|
| 68 |
+
self._proc_bytes += int(self.download_task.get("totalLength", "0"))
|
| 69 |
+
await TorrentManager.aria2_remove(self.download_task)
|
| 70 |
+
break
|
| 71 |
+
await sleep(1)
|
| 72 |
+
self.download_task = None
|
| 73 |
+
if self.listener.is_cancelled:
|
| 74 |
+
return
|
| 75 |
+
if self._failed == len(contents):
|
| 76 |
+
await self.listener.on_download_error("All files are failed to download!")
|
| 77 |
+
return
|
| 78 |
+
await self.listener.on_download_complete()
|
| 79 |
+
return
|
| 80 |
+
|
| 81 |
+
async def cancel_task(self):
|
| 82 |
+
self.listener.is_cancelled = True
|
| 83 |
+
LOGGER.info(f"Cancelling Download: {self.listener.name}")
|
| 84 |
+
await self.listener.on_download_error("Download Cancelled by User!")
|
| 85 |
+
if self.download_task:
|
| 86 |
+
await TorrentManager.aria2_remove(self.download_task)
|
bot/helper/listeners/jdownloader_listener.py
ADDED
|
@@ -0,0 +1,91 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from asyncio import sleep
|
| 2 |
+
|
| 3 |
+
from ... import intervals, jd_listener_lock, jd_downloads
|
| 4 |
+
from ..ext_utils.bot_utils import new_task
|
| 5 |
+
from ...core.jdownloader_booter import jdownloader
|
| 6 |
+
from ..ext_utils.status_utils import get_task_by_gid
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
@new_task
|
| 10 |
+
async def remove_download(gid):
|
| 11 |
+
if intervals["stopAll"]:
|
| 12 |
+
return
|
| 13 |
+
await jdownloader.device.downloads.remove_links(
|
| 14 |
+
package_ids=jd_downloads[gid]["ids"]
|
| 15 |
+
)
|
| 16 |
+
if task := await get_task_by_gid(gid):
|
| 17 |
+
await task.listener.on_download_error("Download removed manually!")
|
| 18 |
+
async with jd_listener_lock:
|
| 19 |
+
del jd_downloads[gid]
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
@new_task
|
| 23 |
+
async def _on_download_complete(gid):
|
| 24 |
+
if task := await get_task_by_gid(gid):
|
| 25 |
+
if task.listener.select:
|
| 26 |
+
async with jd_listener_lock:
|
| 27 |
+
await jdownloader.device.downloads.cleanup(
|
| 28 |
+
"DELETE_DISABLED",
|
| 29 |
+
"REMOVE_LINKS_AND_DELETE_FILES",
|
| 30 |
+
"ALL",
|
| 31 |
+
package_ids=jd_downloads[gid]["ids"],
|
| 32 |
+
)
|
| 33 |
+
await task.listener.on_download_complete()
|
| 34 |
+
if intervals["stopAll"]:
|
| 35 |
+
return
|
| 36 |
+
async with jd_listener_lock:
|
| 37 |
+
if gid in jd_downloads:
|
| 38 |
+
await jdownloader.device.downloads.remove_links(
|
| 39 |
+
package_ids=jd_downloads[gid]["ids"],
|
| 40 |
+
)
|
| 41 |
+
del jd_downloads[gid]
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
@new_task
|
| 45 |
+
async def _jd_listener():
|
| 46 |
+
while True:
|
| 47 |
+
await sleep(3)
|
| 48 |
+
async with jd_listener_lock:
|
| 49 |
+
if len(jd_downloads) == 0:
|
| 50 |
+
intervals["jd"] = ""
|
| 51 |
+
break
|
| 52 |
+
try:
|
| 53 |
+
packages = await jdownloader.device.downloads.query_packages(
|
| 54 |
+
[{"finished": True, "saveTo": True}]
|
| 55 |
+
)
|
| 56 |
+
except Exception:
|
| 57 |
+
continue
|
| 58 |
+
|
| 59 |
+
all_packages = {pack["uuid"]: pack for pack in packages}
|
| 60 |
+
for d_gid, d_dict in list(jd_downloads.items()):
|
| 61 |
+
if d_dict["status"] == "down":
|
| 62 |
+
for index, pid in enumerate(d_dict["ids"]):
|
| 63 |
+
if pid not in all_packages:
|
| 64 |
+
del jd_downloads[d_gid]["ids"][index]
|
| 65 |
+
if len(jd_downloads[d_gid]["ids"]) == 0:
|
| 66 |
+
path = jd_downloads[d_gid]["path"]
|
| 67 |
+
jd_downloads[d_gid]["ids"] = [
|
| 68 |
+
uid
|
| 69 |
+
for uid, pk in all_packages.items()
|
| 70 |
+
if pk["saveTo"].startswith(path)
|
| 71 |
+
]
|
| 72 |
+
if len(jd_downloads[d_gid]["ids"]) == 0:
|
| 73 |
+
await remove_download(d_gid)
|
| 74 |
+
|
| 75 |
+
if completed_packages := [
|
| 76 |
+
pack["uuid"] for pack in packages if pack.get("finished", False)
|
| 77 |
+
]:
|
| 78 |
+
for d_gid, d_dict in list(jd_downloads.items()):
|
| 79 |
+
if d_dict["status"] == "down":
|
| 80 |
+
is_finished = all(
|
| 81 |
+
did in completed_packages for did in d_dict["ids"]
|
| 82 |
+
)
|
| 83 |
+
if is_finished:
|
| 84 |
+
jd_downloads[d_gid]["status"] = "done"
|
| 85 |
+
await _on_download_complete(d_gid)
|
| 86 |
+
|
| 87 |
+
|
| 88 |
+
async def on_download_start():
|
| 89 |
+
async with jd_listener_lock:
|
| 90 |
+
if not intervals["jd"]:
|
| 91 |
+
intervals["jd"] = await _jd_listener()
|
bot/helper/listeners/mega_listener.py
ADDED
|
@@ -0,0 +1,282 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from time import time
|
| 2 |
+
from secrets import token_hex
|
| 3 |
+
from aiofiles.os import makedirs
|
| 4 |
+
from asyncio import create_subprocess_exec, subprocess, wait_for
|
| 5 |
+
from re import search as re_search
|
| 6 |
+
from contextlib import suppress
|
| 7 |
+
|
| 8 |
+
from ... import LOGGER, task_dict, task_dict_lock
|
| 9 |
+
from ...core.config_manager import Config
|
| 10 |
+
from ..ext_utils.status_utils import MirrorStatus
|
| 11 |
+
from ..ext_utils.bot_utils import cmd_exec
|
| 12 |
+
from ..ext_utils.task_manager import (
|
| 13 |
+
check_running_tasks,
|
| 14 |
+
stop_duplicate_check,
|
| 15 |
+
limit_checker,
|
| 16 |
+
)
|
| 17 |
+
from ..mirror_leech_utils.status_utils.mega_status import MegaDownloadStatus
|
| 18 |
+
from ..mirror_leech_utils.status_utils.queue_status import QueueStatus
|
| 19 |
+
from ..telegram_helper.message_utils import send_status_message
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
mega_tasks = {}
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
async def mega_cleanup():
|
| 26 |
+
if not mega_tasks:
|
| 27 |
+
return
|
| 28 |
+
LOGGER.info("Running Mega Cleanup...")
|
| 29 |
+
for path in list(mega_tasks.values()):
|
| 30 |
+
try:
|
| 31 |
+
await cmd_exec(["mega-rm", "-r", "-f", path])
|
| 32 |
+
except Exception as e:
|
| 33 |
+
LOGGER.error(f"Mega Restart Cleanup Failed for {path}: {e}")
|
| 34 |
+
mega_tasks.clear()
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
class MegaAppListener:
|
| 38 |
+
def __init__(self, listener):
|
| 39 |
+
self.listener = listener
|
| 40 |
+
self.process = None
|
| 41 |
+
self.gid = token_hex(5)
|
| 42 |
+
self.mega_status = None
|
| 43 |
+
self.name = ""
|
| 44 |
+
self.size = 0
|
| 45 |
+
self.temp_path = f"/wzml_{self.gid}"
|
| 46 |
+
self.mega_tags = set()
|
| 47 |
+
self._is_cleaned = False
|
| 48 |
+
self._last_time = time()
|
| 49 |
+
self._val_last = 0
|
| 50 |
+
mega_tasks[self.gid] = self.temp_path
|
| 51 |
+
|
| 52 |
+
async def login(self):
|
| 53 |
+
if (MEGA_EMAIL := Config.MEGA_EMAIL) and (
|
| 54 |
+
MEGA_PASSWORD := Config.MEGA_PASSWORD
|
| 55 |
+
):
|
| 56 |
+
try:
|
| 57 |
+
await cmd_exec(["mega-login", MEGA_EMAIL, MEGA_PASSWORD])
|
| 58 |
+
except Exception as e:
|
| 59 |
+
raise Exception(f"Mega Login Failed: {e}")
|
| 60 |
+
else:
|
| 61 |
+
raise Exception("MegaCMD: Credentials Missing! Login required")
|
| 62 |
+
|
| 63 |
+
async def create_temp_path(self):
|
| 64 |
+
await cmd_exec(["mega-mkdir", self.temp_path])
|
| 65 |
+
|
| 66 |
+
async def import_link(self):
|
| 67 |
+
stdout, stderr, ret = await cmd_exec(
|
| 68 |
+
["mega-import", self.listener.link, self.temp_path]
|
| 69 |
+
)
|
| 70 |
+
if ret != 0:
|
| 71 |
+
raise Exception(f"Mega Import Failed: {stderr}")
|
| 72 |
+
|
| 73 |
+
async def get_metadata_and_target(self):
|
| 74 |
+
stdout, _, ret = await cmd_exec(["mega-ls", "-l", self.temp_path])
|
| 75 |
+
if ret != 0 or not stdout:
|
| 76 |
+
raise Exception("Mega Metadata Failed")
|
| 77 |
+
|
| 78 |
+
lines = [line for line in stdout.strip().split("\n") if line.strip()]
|
| 79 |
+
if not lines:
|
| 80 |
+
raise Exception("Mega Import: No items found")
|
| 81 |
+
|
| 82 |
+
for line in lines:
|
| 83 |
+
match = re_search(r"\s(\d+|-)\s+\S+\s+\d{2}:\d{2}:\d{2}\s+(.*)$", line)
|
| 84 |
+
if match:
|
| 85 |
+
size_str = match.group(1)
|
| 86 |
+
self.name = match.group(2).strip()
|
| 87 |
+
self.size = int(size_str) if size_str.isdigit() else 0
|
| 88 |
+
break
|
| 89 |
+
|
| 90 |
+
if not self.name:
|
| 91 |
+
s_stdout, _, _ = await cmd_exec(["mega-ls", self.temp_path])
|
| 92 |
+
if s_stdout:
|
| 93 |
+
self.name = s_stdout.strip().split("\n")[0].strip()
|
| 94 |
+
|
| 95 |
+
if not self.name:
|
| 96 |
+
self.name = self.listener.name or f"MEGA_Download_{self.gid}"
|
| 97 |
+
|
| 98 |
+
self.listener.name = self.name
|
| 99 |
+
self.listener.size = self.size
|
| 100 |
+
|
| 101 |
+
return f"{self.temp_path}/{self.name}"
|
| 102 |
+
|
| 103 |
+
async def cleanup(self):
|
| 104 |
+
if self._is_cleaned:
|
| 105 |
+
return
|
| 106 |
+
self._is_cleaned = True
|
| 107 |
+
try:
|
| 108 |
+
LOGGER.info(f"Cleaning up Mega Task: {self.name}")
|
| 109 |
+
await cmd_exec(["mega-rm", "-r", "-f", self.temp_path])
|
| 110 |
+
if self.gid in mega_tasks:
|
| 111 |
+
del mega_tasks[self.gid]
|
| 112 |
+
except Exception as e:
|
| 113 |
+
LOGGER.error(f"Mega Cleanup Failed: {e}")
|
| 114 |
+
|
| 115 |
+
async def download(self, path):
|
| 116 |
+
try:
|
| 117 |
+
await self.login()
|
| 118 |
+
await self.create_temp_path()
|
| 119 |
+
await self.import_link()
|
| 120 |
+
target_node = await self.get_metadata_and_target()
|
| 121 |
+
|
| 122 |
+
msg, button = await stop_duplicate_check(self.listener)
|
| 123 |
+
if msg:
|
| 124 |
+
await self.listener.on_download_error(msg, button)
|
| 125 |
+
return
|
| 126 |
+
|
| 127 |
+
if limit_exceeded := await limit_checker(self.listener):
|
| 128 |
+
await self.listener.on_download_error(limit_exceeded, is_limit=True)
|
| 129 |
+
return
|
| 130 |
+
|
| 131 |
+
added_to_queue, event = await check_running_tasks(self.listener)
|
| 132 |
+
if added_to_queue:
|
| 133 |
+
LOGGER.info(f"Added to Queue/Download: {self.name}")
|
| 134 |
+
async with task_dict_lock:
|
| 135 |
+
task_dict[self.listener.mid] = QueueStatus(
|
| 136 |
+
self.listener, self.gid, "Dl"
|
| 137 |
+
)
|
| 138 |
+
await self.listener.on_download_start()
|
| 139 |
+
if self.listener.multi <= 1:
|
| 140 |
+
await send_status_message(self.listener.message)
|
| 141 |
+
await event.wait()
|
| 142 |
+
if self.listener.is_cancelled:
|
| 143 |
+
return
|
| 144 |
+
|
| 145 |
+
self.mega_status = MegaDownloadStatus(
|
| 146 |
+
self.listener, self, self.gid, MirrorStatus.STATUS_DOWNLOAD
|
| 147 |
+
)
|
| 148 |
+
async with task_dict_lock:
|
| 149 |
+
task_dict[self.listener.mid] = self.mega_status
|
| 150 |
+
|
| 151 |
+
if added_to_queue:
|
| 152 |
+
LOGGER.info(f"Start Queued Download from Mega: {self.name}")
|
| 153 |
+
else:
|
| 154 |
+
LOGGER.info(f"Download from Mega: {self.name}")
|
| 155 |
+
await self.listener.on_download_start()
|
| 156 |
+
if self.listener.multi <= 1:
|
| 157 |
+
await send_status_message(self.listener.message)
|
| 158 |
+
|
| 159 |
+
await makedirs(path, exist_ok=True)
|
| 160 |
+
|
| 161 |
+
command = ["mega-get", target_node, path]
|
| 162 |
+
|
| 163 |
+
self.process = await create_subprocess_exec(
|
| 164 |
+
*command,
|
| 165 |
+
stdout=subprocess.PIPE,
|
| 166 |
+
stderr=subprocess.STDOUT,
|
| 167 |
+
)
|
| 168 |
+
|
| 169 |
+
while True:
|
| 170 |
+
if self.listener.is_cancelled:
|
| 171 |
+
break
|
| 172 |
+
|
| 173 |
+
try:
|
| 174 |
+
line_bytes = await wait_for(
|
| 175 |
+
self.process.stdout.readuntil(b"\r"), timeout=5
|
| 176 |
+
)
|
| 177 |
+
line = line_bytes.decode().strip()
|
| 178 |
+
if not line:
|
| 179 |
+
if self.process.returncode is not None:
|
| 180 |
+
break
|
| 181 |
+
continue
|
| 182 |
+
self._parse_progress(line)
|
| 183 |
+
except TimeoutError:
|
| 184 |
+
await self.update_daemon_status()
|
| 185 |
+
if self.process.returncode is not None:
|
| 186 |
+
break
|
| 187 |
+
continue
|
| 188 |
+
except Exception:
|
| 189 |
+
break
|
| 190 |
+
|
| 191 |
+
if self.process.returncode is not None:
|
| 192 |
+
break
|
| 193 |
+
|
| 194 |
+
await self.process.wait()
|
| 195 |
+
|
| 196 |
+
if self.process.returncode == 0:
|
| 197 |
+
await self.cleanup()
|
| 198 |
+
await self.listener.on_download_complete()
|
| 199 |
+
else:
|
| 200 |
+
if self.listener.is_cancelled:
|
| 201 |
+
return
|
| 202 |
+
if self.process.returncode != -9:
|
| 203 |
+
await self.listener.on_download_error(
|
| 204 |
+
f"MegaCMD exited with {self.process.returncode}"
|
| 205 |
+
)
|
| 206 |
+
except Exception as e:
|
| 207 |
+
if self.listener.is_cancelled:
|
| 208 |
+
return
|
| 209 |
+
LOGGER.error(f"Mega Download Logic Error: {e}")
|
| 210 |
+
await self.listener.on_download_error(str(e))
|
| 211 |
+
finally:
|
| 212 |
+
await self.cleanup()
|
| 213 |
+
|
| 214 |
+
def _parse_progress(self, line):
|
| 215 |
+
multipliers = {"K": 1024, "M": 1024**2, "G": 1024**3, "T": 1024**4, "B": 1}
|
| 216 |
+
match = re_search(r"\(([\d\.]+)/([\d\.]+)\s([KMGT]?B)", line)
|
| 217 |
+
if match:
|
| 218 |
+
dl_val = float(match.group(1))
|
| 219 |
+
unit_char = (match.group(3))[0].upper()
|
| 220 |
+
mult = multipliers.get(unit_char, 1)
|
| 221 |
+
self.mega_status._downloaded_bytes = int(dl_val * mult)
|
| 222 |
+
|
| 223 |
+
if not self.listener.size or self.listener.size == 0:
|
| 224 |
+
total_val = float(match.group(2))
|
| 225 |
+
self.mega_status._size = int(total_val * mult)
|
| 226 |
+
self.listener.size = self.mega_status._size
|
| 227 |
+
|
| 228 |
+
cur_time = time()
|
| 229 |
+
if cur_time - self._last_time >= 2:
|
| 230 |
+
self.mega_status._speed = int(
|
| 231 |
+
(self.mega_status._downloaded_bytes - self._val_last)
|
| 232 |
+
/ (cur_time - self._last_time)
|
| 233 |
+
)
|
| 234 |
+
self._last_time = cur_time
|
| 235 |
+
self._val_last = self.mega_status._downloaded_bytes
|
| 236 |
+
|
| 237 |
+
async def update_daemon_status(self):
|
| 238 |
+
try:
|
| 239 |
+
stdout, _, _ = await cmd_exec(["mega-transfers", "--col-separator=|"])
|
| 240 |
+
for line in stdout.splitlines():
|
| 241 |
+
if self.gid in line:
|
| 242 |
+
parts = line.split("|")
|
| 243 |
+
if len(parts) > 1:
|
| 244 |
+
self.mega_tags.add(parts[1].strip())
|
| 245 |
+
if len(parts) > 4:
|
| 246 |
+
status = parts[5].strip().capitalize()
|
| 247 |
+
if self.mega_status._status != "Downloading":
|
| 248 |
+
self.mega_status._status = status
|
| 249 |
+
except Exception:
|
| 250 |
+
pass
|
| 251 |
+
|
| 252 |
+
async def cancel_task(self):
|
| 253 |
+
LOGGER.info(f"Cancelling {self.mega_status._status}: {self.name}")
|
| 254 |
+
self.listener.is_cancelled = True
|
| 255 |
+
|
| 256 |
+
await self.update_daemon_status()
|
| 257 |
+
|
| 258 |
+
for tag in self.mega_tags:
|
| 259 |
+
try:
|
| 260 |
+
LOGGER.info(f"Cancelling Transfer Tag: {tag}")
|
| 261 |
+
await cmd_exec(["mega-transfers", "-c", tag])
|
| 262 |
+
except Exception as e:
|
| 263 |
+
LOGGER.error(f"Mega Transfer Cancel Failed for {tag}: {e}")
|
| 264 |
+
|
| 265 |
+
try:
|
| 266 |
+
stdout, _, _ = await cmd_exec(["mega-transfers"])
|
| 267 |
+
for line in stdout.splitlines():
|
| 268 |
+
if self.gid in line:
|
| 269 |
+
parts = line.split()
|
| 270 |
+
if (
|
| 271 |
+
len(parts) > 1
|
| 272 |
+
and (tag := parts[1])
|
| 273 |
+
and tag not in self.mega_tags
|
| 274 |
+
):
|
| 275 |
+
LOGGER.info(f"Cancelling Straggler Tag: {tag}")
|
| 276 |
+
await cmd_exec(["mega-transfers", "-c", tag])
|
| 277 |
+
except Exception as e:
|
| 278 |
+
LOGGER.error(f"Mega Final Cancel Check Failed: {e}")
|
| 279 |
+
|
| 280 |
+
if self.process is not None:
|
| 281 |
+
with suppress(Exception):
|
| 282 |
+
self.process.kill()
|
bot/helper/listeners/nzb_listener.py
ADDED
|
@@ -0,0 +1,117 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from asyncio import sleep, gather
|
| 2 |
+
|
| 3 |
+
from ... import (
|
| 4 |
+
intervals,
|
| 5 |
+
sabnzbd_client,
|
| 6 |
+
nzb_jobs,
|
| 7 |
+
nzb_listener_lock,
|
| 8 |
+
LOGGER,
|
| 9 |
+
)
|
| 10 |
+
from ..ext_utils.bot_utils import new_task
|
| 11 |
+
from ..ext_utils.status_utils import get_task_by_gid, get_raw_file_size
|
| 12 |
+
from ..ext_utils.task_manager import stop_duplicate_check, limit_checker
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
async def _remove_job(nzo_id, mid):
|
| 16 |
+
res1, _ = await gather(
|
| 17 |
+
sabnzbd_client.delete_history(nzo_id, delete_files=True),
|
| 18 |
+
sabnzbd_client.delete_category(f"{mid}"),
|
| 19 |
+
)
|
| 20 |
+
if not res1:
|
| 21 |
+
await sabnzbd_client.delete_job(nzo_id, True)
|
| 22 |
+
async with nzb_listener_lock:
|
| 23 |
+
if nzo_id in nzb_jobs:
|
| 24 |
+
del nzb_jobs[nzo_id]
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
@new_task
|
| 28 |
+
async def _on_download_error(err, nzo_id, button=None, is_limit=False):
|
| 29 |
+
if task := await get_task_by_gid(nzo_id):
|
| 30 |
+
LOGGER.info(f"Cancelling Download: {task.name()}")
|
| 31 |
+
await gather(
|
| 32 |
+
task.listener.on_download_error(err, button, is_limit),
|
| 33 |
+
_remove_job(nzo_id, task.listener.mid),
|
| 34 |
+
)
|
| 35 |
+
|
| 36 |
+
|
| 37 |
+
@new_task
|
| 38 |
+
async def _stop_duplicate(nzo_id):
|
| 39 |
+
if task := await get_task_by_gid(nzo_id):
|
| 40 |
+
await task.update()
|
| 41 |
+
task.listener.name = task.name()
|
| 42 |
+
msg, button = await stop_duplicate_check(task.listener)
|
| 43 |
+
if msg:
|
| 44 |
+
await _on_download_error(msg, nzo_id, button)
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
@new_task
|
| 48 |
+
async def _size_check(nzo_id):
|
| 49 |
+
if task := await get_task_by_gid(nzo_id):
|
| 50 |
+
await task.update()
|
| 51 |
+
task.listener.size = get_raw_file_size(task.size())
|
| 52 |
+
mmsg = await limit_checker(task.listener)
|
| 53 |
+
if mmsg:
|
| 54 |
+
await _on_download_error(mmsg, nzo_id, is_limit=True)
|
| 55 |
+
|
| 56 |
+
|
| 57 |
+
@new_task
|
| 58 |
+
async def _on_download_complete(nzo_id):
|
| 59 |
+
if task := await get_task_by_gid(nzo_id):
|
| 60 |
+
await task.listener.on_download_complete()
|
| 61 |
+
if intervals["stopAll"]:
|
| 62 |
+
return
|
| 63 |
+
await _remove_job(nzo_id, task.listener.mid)
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
@new_task
|
| 67 |
+
async def _nzb_listener():
|
| 68 |
+
while not intervals["stopAll"]:
|
| 69 |
+
async with nzb_listener_lock:
|
| 70 |
+
try:
|
| 71 |
+
jobs = (await sabnzbd_client.get_history())["history"]["slots"]
|
| 72 |
+
downloads = (await sabnzbd_client.get_downloads())["queue"]["slots"]
|
| 73 |
+
if len(nzb_jobs) == 0:
|
| 74 |
+
intervals["nzb"] = ""
|
| 75 |
+
break
|
| 76 |
+
for job in jobs:
|
| 77 |
+
nzo_id = job["nzo_id"]
|
| 78 |
+
if nzo_id not in nzb_jobs:
|
| 79 |
+
continue
|
| 80 |
+
if job["status"] == "Completed":
|
| 81 |
+
if not nzb_jobs[nzo_id]["uploaded"]:
|
| 82 |
+
nzb_jobs[nzo_id]["uploaded"] = True
|
| 83 |
+
await _on_download_complete(nzo_id)
|
| 84 |
+
nzb_jobs[nzo_id]["status"] = "Completed"
|
| 85 |
+
elif job["status"] == "Failed":
|
| 86 |
+
await _on_download_error(job["fail_message"], nzo_id)
|
| 87 |
+
for dl in downloads:
|
| 88 |
+
nzo_id = dl["nzo_id"]
|
| 89 |
+
if nzo_id not in nzb_jobs:
|
| 90 |
+
continue
|
| 91 |
+
if dl["labels"] and dl["labels"][0] == "ALTERNATIVE":
|
| 92 |
+
await _on_download_error("Duplicated Job!", nzo_id)
|
| 93 |
+
continue
|
| 94 |
+
if dl["status"] == "Downloading" and not dl["filename"].startswith(
|
| 95 |
+
"Trying"
|
| 96 |
+
):
|
| 97 |
+
if not nzb_jobs[nzo_id]["stop_dup_check"]:
|
| 98 |
+
nzb_jobs[nzo_id]["stop_dup_check"] = True
|
| 99 |
+
await _stop_duplicate(nzo_id)
|
| 100 |
+
if not nzb_jobs[nzo_id]["size_check"]:
|
| 101 |
+
nzb_jobs[nzo_id]["size_check"] = True
|
| 102 |
+
await _size_check(nzo_id)
|
| 103 |
+
except Exception as e:
|
| 104 |
+
LOGGER.error(str(e))
|
| 105 |
+
await sleep(3)
|
| 106 |
+
|
| 107 |
+
|
| 108 |
+
async def on_download_start(nzo_id):
|
| 109 |
+
async with nzb_listener_lock:
|
| 110 |
+
nzb_jobs[nzo_id] = {
|
| 111 |
+
"uploaded": False,
|
| 112 |
+
"stop_dup_check": False,
|
| 113 |
+
"size_check": False,
|
| 114 |
+
"status": "Downloading",
|
| 115 |
+
}
|
| 116 |
+
if not intervals["nzb"]:
|
| 117 |
+
intervals["nzb"] = await _nzb_listener()
|
bot/helper/listeners/qbit_listener.py
ADDED
|
@@ -0,0 +1,222 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from aiofiles.os import remove, path as aiopath
|
| 2 |
+
from asyncio import sleep, TimeoutError
|
| 3 |
+
from time import time
|
| 4 |
+
from aiohttp.client_exceptions import ClientError
|
| 5 |
+
from aioqbt.exc import AQError
|
| 6 |
+
|
| 7 |
+
from ... import (
|
| 8 |
+
task_dict,
|
| 9 |
+
task_dict_lock,
|
| 10 |
+
intervals,
|
| 11 |
+
qb_torrents,
|
| 12 |
+
qb_listener_lock,
|
| 13 |
+
LOGGER,
|
| 14 |
+
)
|
| 15 |
+
from ...core.config_manager import Config
|
| 16 |
+
from ...core.torrent_manager import TorrentManager
|
| 17 |
+
from ..ext_utils.bot_utils import new_task
|
| 18 |
+
from ..ext_utils.files_utils import clean_unwanted
|
| 19 |
+
from ..ext_utils.status_utils import get_readable_time, get_task_by_gid
|
| 20 |
+
from ..ext_utils.task_manager import stop_duplicate_check, limit_checker
|
| 21 |
+
from ..mirror_leech_utils.status_utils.qbit_status import QbittorrentStatus
|
| 22 |
+
from ..telegram_helper.message_utils import update_status_message
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
async def _remove_torrent(hash_, tag):
|
| 26 |
+
await TorrentManager.qbittorrent.torrents.delete([hash_], True)
|
| 27 |
+
async with qb_listener_lock:
|
| 28 |
+
if tag in qb_torrents:
|
| 29 |
+
del qb_torrents[tag]
|
| 30 |
+
await TorrentManager.qbittorrent.torrents.delete_tags([tag])
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
@new_task
|
| 34 |
+
async def _on_download_error(err, tor, button=None, is_limit=False):
|
| 35 |
+
LOGGER.info(f"Cancelling Download: {tor.name}")
|
| 36 |
+
ext_hash = tor.hash
|
| 37 |
+
if task := await get_task_by_gid(ext_hash[:12]):
|
| 38 |
+
await task.listener.on_download_error(err, button, is_limit)
|
| 39 |
+
await TorrentManager.qbittorrent.torrents.stop([ext_hash])
|
| 40 |
+
await sleep(0.3)
|
| 41 |
+
await _remove_torrent(ext_hash, tor.tags[0])
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
@new_task
|
| 45 |
+
async def _on_seed_finish(tor):
|
| 46 |
+
ext_hash = tor.hash
|
| 47 |
+
LOGGER.info(f"Cancelling Seed: {tor.name}")
|
| 48 |
+
if task := await get_task_by_gid(ext_hash[:12]):
|
| 49 |
+
msg = f"Seeding stopped with Ratio: {round(tor.ratio, 3)} and Time: {get_readable_time(int(tor.seeding_time.total_seconds() or "0"))}"
|
| 50 |
+
await task.listener.on_upload_error(msg)
|
| 51 |
+
await _remove_torrent(ext_hash, tor.tags[0])
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
@new_task
|
| 55 |
+
async def _stop_duplicate(tor):
|
| 56 |
+
if task := await get_task_by_gid(tor.hash[:12]):
|
| 57 |
+
if task.listener.stop_duplicate:
|
| 58 |
+
task.listener.name = tor.content_path.rsplit("/", 1)[-1].rsplit(".!qB", 1)[
|
| 59 |
+
0
|
| 60 |
+
]
|
| 61 |
+
msg, button = await stop_duplicate_check(task.listener)
|
| 62 |
+
if msg:
|
| 63 |
+
await _on_download_error(msg, tor, button)
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
@new_task
|
| 67 |
+
async def _size_check(tor):
|
| 68 |
+
if task := await get_task_by_gid(tor.hash[:12]):
|
| 69 |
+
task.listener.size = tor.size
|
| 70 |
+
mmsg = await limit_checker(task.listener)
|
| 71 |
+
if mmsg:
|
| 72 |
+
await _on_download_error(mmsg, tor, is_limit=True)
|
| 73 |
+
|
| 74 |
+
|
| 75 |
+
@new_task
|
| 76 |
+
async def _on_download_complete(tor):
|
| 77 |
+
ext_hash = tor.hash
|
| 78 |
+
tag = tor.tags[0]
|
| 79 |
+
if task := await get_task_by_gid(ext_hash[:12]):
|
| 80 |
+
if not task.listener.seed:
|
| 81 |
+
await TorrentManager.qbittorrent.torrents.stop([ext_hash])
|
| 82 |
+
if task.listener.select:
|
| 83 |
+
await clean_unwanted(task.listener.dir)
|
| 84 |
+
path = tor.content_path.rsplit("/", 1)[0]
|
| 85 |
+
res = await TorrentManager.qbittorrent.torrents.files(ext_hash)
|
| 86 |
+
for f in res:
|
| 87 |
+
if f.priority == 0 and await aiopath.exists(f"{path}/{f.name}"):
|
| 88 |
+
try:
|
| 89 |
+
await remove(f"{path}/{f.name}")
|
| 90 |
+
except Exception:
|
| 91 |
+
pass
|
| 92 |
+
await task.listener.on_download_complete()
|
| 93 |
+
if intervals["stopAll"]:
|
| 94 |
+
return
|
| 95 |
+
if task.listener.seed and not task.listener.is_cancelled:
|
| 96 |
+
async with task_dict_lock:
|
| 97 |
+
if task.listener.mid in task_dict:
|
| 98 |
+
removed = False
|
| 99 |
+
task_dict[task.listener.mid] = QbittorrentStatus(
|
| 100 |
+
task.listener, True
|
| 101 |
+
)
|
| 102 |
+
else:
|
| 103 |
+
removed = True
|
| 104 |
+
if removed:
|
| 105 |
+
await _remove_torrent(ext_hash, tag)
|
| 106 |
+
return
|
| 107 |
+
async with qb_listener_lock:
|
| 108 |
+
if tag in qb_torrents:
|
| 109 |
+
qb_torrents[tag]["seeding"] = True
|
| 110 |
+
else:
|
| 111 |
+
return
|
| 112 |
+
await update_status_message(task.listener.message.chat.id)
|
| 113 |
+
LOGGER.info(f"Seeding started: {tor.name} - Hash: {ext_hash}")
|
| 114 |
+
else:
|
| 115 |
+
await _remove_torrent(ext_hash, tag)
|
| 116 |
+
else:
|
| 117 |
+
await _remove_torrent(ext_hash, tag)
|
| 118 |
+
|
| 119 |
+
|
| 120 |
+
@new_task
|
| 121 |
+
async def _qb_listener():
|
| 122 |
+
while True:
|
| 123 |
+
async with qb_listener_lock:
|
| 124 |
+
try:
|
| 125 |
+
torrents = await TorrentManager.qbittorrent.torrents.info()
|
| 126 |
+
if len(torrents) == 0:
|
| 127 |
+
intervals["qb"] = ""
|
| 128 |
+
break
|
| 129 |
+
for tor_info in torrents:
|
| 130 |
+
tag = tor_info.tags[0]
|
| 131 |
+
if tag not in qb_torrents:
|
| 132 |
+
continue
|
| 133 |
+
state = tor_info.state
|
| 134 |
+
if state == "metaDL":
|
| 135 |
+
qb_torrents[tag]["stalled_time"] = time()
|
| 136 |
+
if (
|
| 137 |
+
Config.TORRENT_TIMEOUT
|
| 138 |
+
and time() - qb_torrents[tag]["start_time"]
|
| 139 |
+
>= Config.TORRENT_TIMEOUT
|
| 140 |
+
):
|
| 141 |
+
await _on_download_error("Dead Torrent!", tor_info)
|
| 142 |
+
else:
|
| 143 |
+
await TorrentManager.qbittorrent.torrents.reannounce(
|
| 144 |
+
[tor_info.hash]
|
| 145 |
+
)
|
| 146 |
+
elif state == "downloading":
|
| 147 |
+
qb_torrents[tag]["stalled_time"] = time()
|
| 148 |
+
if not qb_torrents[tag]["stop_dup_check"]:
|
| 149 |
+
qb_torrents[tag]["stop_dup_check"] = True
|
| 150 |
+
await _stop_duplicate(tor_info)
|
| 151 |
+
if not qb_torrents[tag]["size_check"]:
|
| 152 |
+
qb_torrents[tag]["size_check"] = True
|
| 153 |
+
await _size_check(tor_info)
|
| 154 |
+
elif state == "stalledDL":
|
| 155 |
+
if (
|
| 156 |
+
not qb_torrents[tag]["rechecked"]
|
| 157 |
+
and 0.99989999999999999 < tor_info.progress < 1
|
| 158 |
+
):
|
| 159 |
+
msg = f"Force recheck - Name: {tor_info.name} Hash: "
|
| 160 |
+
msg += f"{tor_info.hash} Downloaded Bytes: {tor_info.downloaded} "
|
| 161 |
+
msg += f"Size: {tor_info.size} Total Size: {tor_info.total_size}"
|
| 162 |
+
LOGGER.warning(msg)
|
| 163 |
+
await TorrentManager.qbittorrent.torrents.recheck(
|
| 164 |
+
[tor_info.hash]
|
| 165 |
+
)
|
| 166 |
+
qb_torrents[tag]["rechecked"] = True
|
| 167 |
+
elif (
|
| 168 |
+
Config.TORRENT_TIMEOUT
|
| 169 |
+
and time() - qb_torrents[tag]["stalled_time"]
|
| 170 |
+
>= Config.TORRENT_TIMEOUT
|
| 171 |
+
):
|
| 172 |
+
await _on_download_error("Dead Torrent!", tor_info)
|
| 173 |
+
else:
|
| 174 |
+
await TorrentManager.qbittorrent.torrents.reannounce(
|
| 175 |
+
[tor_info.hash]
|
| 176 |
+
)
|
| 177 |
+
elif state == "missingFiles":
|
| 178 |
+
await TorrentManager.qbittorrent.torrents.recheck(
|
| 179 |
+
[tor_info.hash]
|
| 180 |
+
)
|
| 181 |
+
elif state == "error":
|
| 182 |
+
await _on_download_error(
|
| 183 |
+
"No enough space for this torrent on device", tor_info
|
| 184 |
+
)
|
| 185 |
+
elif (
|
| 186 |
+
int(tor_info.completion_on.timestamp()) != -1
|
| 187 |
+
and not qb_torrents[tag]["uploaded"]
|
| 188 |
+
and state
|
| 189 |
+
in [
|
| 190 |
+
"queuedUP",
|
| 191 |
+
"stalledUP",
|
| 192 |
+
"uploading",
|
| 193 |
+
"forcedUP",
|
| 194 |
+
]
|
| 195 |
+
):
|
| 196 |
+
qb_torrents[tag]["uploaded"] = True
|
| 197 |
+
await _on_download_complete(tor_info)
|
| 198 |
+
elif (
|
| 199 |
+
state in ["stoppedUP", "stoppedDL"]
|
| 200 |
+
and qb_torrents[tag]["seeding"]
|
| 201 |
+
):
|
| 202 |
+
qb_torrents[tag]["seeding"] = False
|
| 203 |
+
await _on_seed_finish(tor_info)
|
| 204 |
+
await sleep(0.5)
|
| 205 |
+
except (ClientError, TimeoutError, Exception, AQError) as e:
|
| 206 |
+
LOGGER.error(str(e))
|
| 207 |
+
await sleep(3)
|
| 208 |
+
|
| 209 |
+
|
| 210 |
+
async def on_download_start(tag):
|
| 211 |
+
async with qb_listener_lock:
|
| 212 |
+
qb_torrents[tag] = {
|
| 213 |
+
"start_time": time(),
|
| 214 |
+
"stalled_time": time(),
|
| 215 |
+
"stop_dup_check": False,
|
| 216 |
+
"size_check": False,
|
| 217 |
+
"rechecked": False,
|
| 218 |
+
"uploaded": False,
|
| 219 |
+
"seeding": False,
|
| 220 |
+
}
|
| 221 |
+
if not intervals["qb"]:
|
| 222 |
+
intervals["qb"] = await _qb_listener()
|
bot/helper/listeners/task_listener.py
ADDED
|
@@ -0,0 +1,674 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from asyncio import gather, sleep
|
| 2 |
+
from html import escape
|
| 3 |
+
from time import time
|
| 4 |
+
from mimetypes import guess_type
|
| 5 |
+
from contextlib import suppress
|
| 6 |
+
from os import path as ospath
|
| 7 |
+
|
| 8 |
+
from aiofiles.os import listdir, remove, path as aiopath
|
| 9 |
+
from requests import utils as rutils
|
| 10 |
+
|
| 11 |
+
from ... import (
|
| 12 |
+
intervals,
|
| 13 |
+
task_dict,
|
| 14 |
+
task_dict_lock,
|
| 15 |
+
LOGGER,
|
| 16 |
+
non_queued_up,
|
| 17 |
+
non_queued_dl,
|
| 18 |
+
queued_up,
|
| 19 |
+
queued_dl,
|
| 20 |
+
queue_dict_lock,
|
| 21 |
+
same_directory_lock,
|
| 22 |
+
DOWNLOAD_DIR,
|
| 23 |
+
)
|
| 24 |
+
from ...modules.metadata import apply_metadata_title
|
| 25 |
+
from ..common import TaskConfig
|
| 26 |
+
from ...core.tg_client import TgClient
|
| 27 |
+
from ...core.config_manager import Config
|
| 28 |
+
from ...core.torrent_manager import TorrentManager
|
| 29 |
+
from ..ext_utils.bot_utils import sync_to_async
|
| 30 |
+
from ..ext_utils.links_utils import encode_slink
|
| 31 |
+
from ..ext_utils.db_handler import database
|
| 32 |
+
from ..ext_utils.files_utils import (
|
| 33 |
+
clean_download,
|
| 34 |
+
clean_target,
|
| 35 |
+
create_recursive_symlink,
|
| 36 |
+
get_path_size,
|
| 37 |
+
join_files,
|
| 38 |
+
remove_excluded_files,
|
| 39 |
+
move_and_merge,
|
| 40 |
+
)
|
| 41 |
+
from ..ext_utils.links_utils import is_gdrive_id
|
| 42 |
+
from ..ext_utils.status_utils import get_readable_file_size, get_readable_time
|
| 43 |
+
from ..ext_utils.task_manager import check_running_tasks, start_from_queued
|
| 44 |
+
from ..mirror_leech_utils.uphoster_utils.gofile_utils.upload import GoFileUpload
|
| 45 |
+
from ..mirror_leech_utils.uphoster_utils.buzzheavier_utils.upload import (
|
| 46 |
+
BuzzHeavierUpload,
|
| 47 |
+
)
|
| 48 |
+
from ..mirror_leech_utils.uphoster_utils.pixeldrain_utils.upload import (
|
| 49 |
+
PixelDrainUpload,
|
| 50 |
+
)
|
| 51 |
+
from ..mirror_leech_utils.uphoster_utils.multi_upload import MultiUphosterUpload
|
| 52 |
+
from ..mirror_leech_utils.gdrive_utils.upload import GoogleDriveUpload
|
| 53 |
+
from ..mirror_leech_utils.rclone_utils.transfer import RcloneTransferHelper
|
| 54 |
+
from ..mirror_leech_utils.status_utils.uphoster_status import UphosterStatus
|
| 55 |
+
from ..mirror_leech_utils.status_utils.gdrive_status import (
|
| 56 |
+
GoogleDriveStatus,
|
| 57 |
+
)
|
| 58 |
+
from ..mirror_leech_utils.status_utils.queue_status import QueueStatus
|
| 59 |
+
from ..mirror_leech_utils.status_utils.rclone_status import RcloneStatus
|
| 60 |
+
from ..mirror_leech_utils.status_utils.telegram_status import TelegramStatus
|
| 61 |
+
from ..mirror_leech_utils.status_utils.yt_status import YtStatus
|
| 62 |
+
from ..mirror_leech_utils.upload_utils.telegram_uploader import TelegramUploader
|
| 63 |
+
from ..mirror_leech_utils.youtube_utils.youtube_upload import YouTubeUpload
|
| 64 |
+
from ..telegram_helper.button_build import ButtonMaker
|
| 65 |
+
from ..telegram_helper.message_utils import (
|
| 66 |
+
delete_message,
|
| 67 |
+
delete_status,
|
| 68 |
+
send_message,
|
| 69 |
+
update_status_message,
|
| 70 |
+
)
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
class TaskListener(TaskConfig):
|
| 74 |
+
def __init__(self):
|
| 75 |
+
super().__init__()
|
| 76 |
+
|
| 77 |
+
async def clean(self):
|
| 78 |
+
with suppress(Exception):
|
| 79 |
+
if st := intervals["status"]:
|
| 80 |
+
for intvl in list(st.values()):
|
| 81 |
+
intvl.cancel()
|
| 82 |
+
intervals["status"].clear()
|
| 83 |
+
await gather(TorrentManager.aria2.purgeDownloadResult(), delete_status())
|
| 84 |
+
|
| 85 |
+
def clear(self):
|
| 86 |
+
self.subname = ""
|
| 87 |
+
self.subsize = 0
|
| 88 |
+
self.files_to_proceed = []
|
| 89 |
+
self.proceed_count = 0
|
| 90 |
+
self.progress = True
|
| 91 |
+
|
| 92 |
+
async def remove_from_same_dir(self):
|
| 93 |
+
async with task_dict_lock:
|
| 94 |
+
if (
|
| 95 |
+
self.folder_name
|
| 96 |
+
and self.same_dir
|
| 97 |
+
and self.mid in self.same_dir[self.folder_name]["tasks"]
|
| 98 |
+
):
|
| 99 |
+
self.same_dir[self.folder_name]["tasks"].remove(self.mid)
|
| 100 |
+
self.same_dir[self.folder_name]["total"] -= 1
|
| 101 |
+
|
| 102 |
+
async def on_download_start(self):
|
| 103 |
+
mode_name = "Leech" if self.is_leech else "Mirror"
|
| 104 |
+
if self.bot_pm and self.is_super_chat:
|
| 105 |
+
self.pm_msg = await send_message(
|
| 106 |
+
self.user_id,
|
| 107 |
+
f"""➲ <b><u>Task Started :</u></b>
|
| 108 |
+
┃
|
| 109 |
+
┖ <b>Link:</b> <a href='{self.source_url}'>Click Here</a>
|
| 110 |
+
""",
|
| 111 |
+
)
|
| 112 |
+
if Config.LINKS_LOG_ID:
|
| 113 |
+
await send_message(
|
| 114 |
+
Config.LINKS_LOG_ID,
|
| 115 |
+
f"""➲ <b><u>{mode_name} Started:</u></b>
|
| 116 |
+
┃
|
| 117 |
+
┠ <b>User :</b> {self.tag} ( #ID{self.user_id} )
|
| 118 |
+
┠ <b>Message Link :</b> <a href='{self.message.link}'>Click Here</a>
|
| 119 |
+
┗ <b>Link:</b> <a href='{self.source_url}'>Click Here</a>
|
| 120 |
+
""",
|
| 121 |
+
)
|
| 122 |
+
if (
|
| 123 |
+
self.is_super_chat
|
| 124 |
+
and Config.INCOMPLETE_TASK_NOTIFIER
|
| 125 |
+
and Config.DATABASE_URL
|
| 126 |
+
):
|
| 127 |
+
await database.add_incomplete_task(
|
| 128 |
+
self.message.chat.id, self.message.link, self.tag
|
| 129 |
+
)
|
| 130 |
+
|
| 131 |
+
async def on_download_complete(self):
|
| 132 |
+
await sleep(2)
|
| 133 |
+
if self.is_cancelled:
|
| 134 |
+
return
|
| 135 |
+
multi_links = False
|
| 136 |
+
if (
|
| 137 |
+
self.folder_name
|
| 138 |
+
and self.same_dir
|
| 139 |
+
and self.mid in self.same_dir[self.folder_name]["tasks"]
|
| 140 |
+
):
|
| 141 |
+
async with same_directory_lock:
|
| 142 |
+
while True:
|
| 143 |
+
async with task_dict_lock:
|
| 144 |
+
if self.mid not in self.same_dir[self.folder_name]["tasks"]:
|
| 145 |
+
return
|
| 146 |
+
if (
|
| 147 |
+
self.same_dir[self.folder_name]["total"] <= 1
|
| 148 |
+
or len(self.same_dir[self.folder_name]["tasks"]) > 1
|
| 149 |
+
):
|
| 150 |
+
if self.same_dir[self.folder_name]["total"] > 1:
|
| 151 |
+
self.same_dir[self.folder_name]["tasks"].remove(
|
| 152 |
+
self.mid
|
| 153 |
+
)
|
| 154 |
+
self.same_dir[self.folder_name]["total"] -= 1
|
| 155 |
+
spath = f"{self.dir}{self.folder_name}"
|
| 156 |
+
des_id = list(self.same_dir[self.folder_name]["tasks"])[
|
| 157 |
+
0
|
| 158 |
+
]
|
| 159 |
+
des_path = f"{DOWNLOAD_DIR}{des_id}{self.folder_name}"
|
| 160 |
+
LOGGER.info(f"Moving files from {self.mid} to {des_id}")
|
| 161 |
+
await move_and_merge(spath, des_path, self.mid)
|
| 162 |
+
multi_links = True
|
| 163 |
+
break
|
| 164 |
+
await sleep(1)
|
| 165 |
+
async with task_dict_lock:
|
| 166 |
+
if self.is_cancelled:
|
| 167 |
+
return
|
| 168 |
+
if self.mid not in task_dict:
|
| 169 |
+
return
|
| 170 |
+
download = task_dict[self.mid]
|
| 171 |
+
self.name = download.name()
|
| 172 |
+
gid = download.gid()
|
| 173 |
+
LOGGER.info(f"Download completed: {self.name}")
|
| 174 |
+
|
| 175 |
+
if not (self.is_torrent or self.is_qbit):
|
| 176 |
+
self.seed = False
|
| 177 |
+
|
| 178 |
+
if multi_links:
|
| 179 |
+
self.seed = False
|
| 180 |
+
await self.on_upload_error(
|
| 181 |
+
f"{self.name} Downloaded!\n\nWaiting for other tasks to finish..."
|
| 182 |
+
)
|
| 183 |
+
return
|
| 184 |
+
elif self.same_dir:
|
| 185 |
+
self.seed = False
|
| 186 |
+
|
| 187 |
+
if self.folder_name:
|
| 188 |
+
self.name = self.folder_name.strip("/").split("/", 1)[0]
|
| 189 |
+
|
| 190 |
+
if not await aiopath.exists(f"{self.dir}/{self.name}"):
|
| 191 |
+
try:
|
| 192 |
+
files = await listdir(self.dir)
|
| 193 |
+
self.name = files[-1]
|
| 194 |
+
if self.name == "yt-dlp-thumb":
|
| 195 |
+
self.name = files[0]
|
| 196 |
+
except Exception as e:
|
| 197 |
+
await self.on_upload_error(str(e))
|
| 198 |
+
return
|
| 199 |
+
|
| 200 |
+
dl_path = f"{self.dir}/{self.name}"
|
| 201 |
+
self.size = await get_path_size(dl_path)
|
| 202 |
+
self.is_file = await aiopath.isfile(dl_path)
|
| 203 |
+
|
| 204 |
+
if self.seed:
|
| 205 |
+
up_dir = self.up_dir = f"{self.dir}10000"
|
| 206 |
+
up_path = f"{self.up_dir}/{self.name}"
|
| 207 |
+
await create_recursive_symlink(self.dir, self.up_dir)
|
| 208 |
+
LOGGER.info(f"Shortcut created: {dl_path} -> {up_path}")
|
| 209 |
+
else:
|
| 210 |
+
up_dir = self.dir
|
| 211 |
+
up_path = dl_path
|
| 212 |
+
|
| 213 |
+
await remove_excluded_files(self.up_dir or self.dir, self.excluded_extensions)
|
| 214 |
+
|
| 215 |
+
if not Config.QUEUE_ALL:
|
| 216 |
+
async with queue_dict_lock:
|
| 217 |
+
if self.mid in non_queued_dl:
|
| 218 |
+
non_queued_dl.remove(self.mid)
|
| 219 |
+
await start_from_queued()
|
| 220 |
+
|
| 221 |
+
if self.join and not self.is_file:
|
| 222 |
+
await join_files(up_path)
|
| 223 |
+
|
| 224 |
+
if self.extract and not self.is_nzb:
|
| 225 |
+
up_path = await self.proceed_extract(up_path, gid)
|
| 226 |
+
if self.is_cancelled:
|
| 227 |
+
return
|
| 228 |
+
self.is_file = await aiopath.isfile(up_path)
|
| 229 |
+
self.name = up_path.replace(f"{up_dir}/", "").split("/", 1)[0]
|
| 230 |
+
self.size = await get_path_size(up_dir)
|
| 231 |
+
self.clear()
|
| 232 |
+
await remove_excluded_files(up_dir, self.excluded_extensions)
|
| 233 |
+
|
| 234 |
+
if self.ffmpeg_cmds:
|
| 235 |
+
up_path = await self.proceed_ffmpeg(
|
| 236 |
+
up_path,
|
| 237 |
+
gid,
|
| 238 |
+
)
|
| 239 |
+
if self.is_cancelled:
|
| 240 |
+
return
|
| 241 |
+
self.is_file = await aiopath.isfile(up_path)
|
| 242 |
+
self.name = up_path.replace(f"{up_dir}/", "").split("/", 1)[0]
|
| 243 |
+
self.size = await get_path_size(up_dir)
|
| 244 |
+
self.clear()
|
| 245 |
+
|
| 246 |
+
if (
|
| 247 |
+
(hasattr(self, "metadata_dict") and self.metadata_dict)
|
| 248 |
+
or (hasattr(self, "audio_metadata_dict") and self.audio_metadata_dict)
|
| 249 |
+
or (hasattr(self, "video_metadata_dict") and self.video_metadata_dict)
|
| 250 |
+
):
|
| 251 |
+
up_path = await apply_metadata_title(
|
| 252 |
+
self,
|
| 253 |
+
up_path,
|
| 254 |
+
gid,
|
| 255 |
+
getattr(self, "metadata_dict", {}),
|
| 256 |
+
getattr(self, "audio_metadata_dict", {}),
|
| 257 |
+
getattr(self, "video_metadata_dict", {}),
|
| 258 |
+
)
|
| 259 |
+
if self.is_cancelled:
|
| 260 |
+
return
|
| 261 |
+
|
| 262 |
+
self.name = up_path.replace(f"{up_dir.rstrip('/')}/", "").split("/", 1)[0]
|
| 263 |
+
self.size = await get_path_size(up_path)
|
| 264 |
+
self.clear()
|
| 265 |
+
|
| 266 |
+
if self.is_leech and self.is_file:
|
| 267 |
+
fname = ospath.basename(up_path)
|
| 268 |
+
self.file_details["filename"] = fname
|
| 269 |
+
self.file_details["mime_type"] = (guess_type(fname))[
|
| 270 |
+
0
|
| 271 |
+
] or "application/octet-stream"
|
| 272 |
+
|
| 273 |
+
if self.name_swap:
|
| 274 |
+
up_path = await self.substitute(up_path)
|
| 275 |
+
if self.is_cancelled:
|
| 276 |
+
return
|
| 277 |
+
self.is_file = await aiopath.isfile(up_path)
|
| 278 |
+
self.name = up_path.replace(f"{up_dir}/", "").split("/", 1)[0]
|
| 279 |
+
|
| 280 |
+
if self.screen_shots:
|
| 281 |
+
up_path = await self.generate_screenshots(up_path)
|
| 282 |
+
if self.is_cancelled:
|
| 283 |
+
return
|
| 284 |
+
self.is_file = await aiopath.isfile(up_path)
|
| 285 |
+
self.name = up_path.replace(f"{up_dir}/", "").split("/", 1)[0]
|
| 286 |
+
self.size = await get_path_size(up_dir)
|
| 287 |
+
|
| 288 |
+
if self.convert_audio or self.convert_video:
|
| 289 |
+
up_path = await self.convert_media(
|
| 290 |
+
up_path,
|
| 291 |
+
gid,
|
| 292 |
+
)
|
| 293 |
+
if self.is_cancelled:
|
| 294 |
+
return
|
| 295 |
+
self.is_file = await aiopath.isfile(up_path)
|
| 296 |
+
self.name = up_path.replace(f"{up_dir}/", "").split("/", 1)[0]
|
| 297 |
+
self.size = await get_path_size(up_dir)
|
| 298 |
+
self.clear()
|
| 299 |
+
|
| 300 |
+
if self.sample_video:
|
| 301 |
+
up_path = await self.generate_sample_video(up_path, gid)
|
| 302 |
+
if self.is_cancelled:
|
| 303 |
+
return
|
| 304 |
+
self.is_file = await aiopath.isfile(up_path)
|
| 305 |
+
self.name = up_path.replace(f"{up_dir}/", "").split("/", 1)[0]
|
| 306 |
+
self.size = await get_path_size(up_dir)
|
| 307 |
+
self.clear()
|
| 308 |
+
|
| 309 |
+
if self.compress:
|
| 310 |
+
up_path = await self.proceed_compress(
|
| 311 |
+
up_path,
|
| 312 |
+
gid,
|
| 313 |
+
)
|
| 314 |
+
self.is_file = await aiopath.isfile(up_path)
|
| 315 |
+
if self.is_cancelled:
|
| 316 |
+
return
|
| 317 |
+
self.clear()
|
| 318 |
+
|
| 319 |
+
self.name = up_path.replace(f"{up_dir}/", "").split("/", 1)[0]
|
| 320 |
+
self.size = await get_path_size(up_dir)
|
| 321 |
+
|
| 322 |
+
if self.is_leech and not self.compress:
|
| 323 |
+
await self.proceed_split(up_path, gid)
|
| 324 |
+
if self.is_cancelled:
|
| 325 |
+
return
|
| 326 |
+
self.clear()
|
| 327 |
+
|
| 328 |
+
self.subproc = None
|
| 329 |
+
|
| 330 |
+
add_to_queue, event = await check_running_tasks(self, "up")
|
| 331 |
+
await start_from_queued()
|
| 332 |
+
if add_to_queue:
|
| 333 |
+
LOGGER.info(f"Added to Queue/Upload: {self.name}")
|
| 334 |
+
async with task_dict_lock:
|
| 335 |
+
task_dict[self.mid] = QueueStatus(self, gid, "Up")
|
| 336 |
+
await event.wait()
|
| 337 |
+
if self.is_cancelled:
|
| 338 |
+
return
|
| 339 |
+
LOGGER.info(f"Start from Queued/Upload: {self.name}")
|
| 340 |
+
|
| 341 |
+
self.size = await get_path_size(up_dir)
|
| 342 |
+
|
| 343 |
+
if self.is_yt:
|
| 344 |
+
LOGGER.info(f"Up to yt Name: {self.name}")
|
| 345 |
+
yt = YouTubeUpload(self, up_path)
|
| 346 |
+
async with task_dict_lock:
|
| 347 |
+
task_dict[self.mid] = YtStatus(self, yt, gid, "up")
|
| 348 |
+
await gather(
|
| 349 |
+
update_status_message(self.message.chat.id),
|
| 350 |
+
sync_to_async(yt.upload),
|
| 351 |
+
)
|
| 352 |
+
del yt
|
| 353 |
+
elif self.is_leech:
|
| 354 |
+
LOGGER.info(f"Leech Name: {self.name}")
|
| 355 |
+
tg = TelegramUploader(self, up_dir)
|
| 356 |
+
async with task_dict_lock:
|
| 357 |
+
task_dict[self.mid] = TelegramStatus(self, tg, gid, "up")
|
| 358 |
+
await gather(
|
| 359 |
+
update_status_message(self.message.chat.id),
|
| 360 |
+
tg.upload(),
|
| 361 |
+
)
|
| 362 |
+
del tg
|
| 363 |
+
elif self.is_uphoster:
|
| 364 |
+
LOGGER.info(f"Uphoster Upload Name: {self.name}")
|
| 365 |
+
uphoster_service = self.user_dict.get("UPHOSTER_SERVICE", "gofile")
|
| 366 |
+
services = uphoster_service.split(",")
|
| 367 |
+
ddl = MultiUphosterUpload(self, up_path, services)
|
| 368 |
+
async with task_dict_lock:
|
| 369 |
+
task_dict[self.mid] = UphosterStatus(self, ddl, gid, "up")
|
| 370 |
+
await gather(
|
| 371 |
+
update_status_message(self.message.chat.id),
|
| 372 |
+
ddl.upload(),
|
| 373 |
+
)
|
| 374 |
+
del ddl
|
| 375 |
+
elif is_gdrive_id(self.up_dest):
|
| 376 |
+
LOGGER.info(f"Gdrive Upload Name: {self.name}")
|
| 377 |
+
drive = GoogleDriveUpload(self, up_path)
|
| 378 |
+
async with task_dict_lock:
|
| 379 |
+
task_dict[self.mid] = GoogleDriveStatus(self, drive, gid, "up")
|
| 380 |
+
await gather(
|
| 381 |
+
update_status_message(self.message.chat.id),
|
| 382 |
+
sync_to_async(drive.upload),
|
| 383 |
+
)
|
| 384 |
+
del drive
|
| 385 |
+
else:
|
| 386 |
+
LOGGER.info(f"Rclone Upload Name: {self.name}")
|
| 387 |
+
RCTransfer = RcloneTransferHelper(self)
|
| 388 |
+
async with task_dict_lock:
|
| 389 |
+
task_dict[self.mid] = RcloneStatus(self, RCTransfer, gid, "up")
|
| 390 |
+
await gather(
|
| 391 |
+
update_status_message(self.message.chat.id),
|
| 392 |
+
RCTransfer.upload(up_path),
|
| 393 |
+
)
|
| 394 |
+
del RCTransfer
|
| 395 |
+
return
|
| 396 |
+
|
| 397 |
+
async def on_upload_complete(
|
| 398 |
+
self, link, files, folders, mime_type, rclone_path="", dir_id=""
|
| 399 |
+
):
|
| 400 |
+
if (
|
| 401 |
+
self.is_super_chat
|
| 402 |
+
and Config.INCOMPLETE_TASK_NOTIFIER
|
| 403 |
+
and Config.DATABASE_URL
|
| 404 |
+
):
|
| 405 |
+
await database.rm_complete_task(self.message.link)
|
| 406 |
+
msg = (
|
| 407 |
+
f"<b><i>{escape(self.name)}</i></b>\n│"
|
| 408 |
+
f"\n┟ <b>Task Size</b> → {get_readable_file_size(self.size)}"
|
| 409 |
+
f"\n┠ <b>Time Taken</b> → {get_readable_time(time() - self.message.date.timestamp())}"
|
| 410 |
+
f"\n┠ <b>In Mode</b> → {self.mode[0]}"
|
| 411 |
+
f"\n┠ <b>Out Mode</b> → {self.mode[1]}"
|
| 412 |
+
)
|
| 413 |
+
LOGGER.info(f"Task Done: {self.name}")
|
| 414 |
+
if self.is_yt:
|
| 415 |
+
buttons = ButtonMaker()
|
| 416 |
+
if mime_type == "Folder/Playlist":
|
| 417 |
+
msg += "\n┠ <b>Type</b> → Playlist"
|
| 418 |
+
msg += f"\n┖ <b>Total Videos</b> → {files}"
|
| 419 |
+
if link:
|
| 420 |
+
buttons.url_button("🔗 View Playlist", link)
|
| 421 |
+
user_message = f"{self.tag}\nYour playlist ({files} videos) has been uploaded to YouTube successfully!"
|
| 422 |
+
else:
|
| 423 |
+
msg += "\n┖ <b>Type</b> → Video"
|
| 424 |
+
if link:
|
| 425 |
+
buttons.url_button("🔗 View Video", link)
|
| 426 |
+
user_message = (
|
| 427 |
+
f"{self.tag}\nYour video has been uploaded to YouTube successfully!"
|
| 428 |
+
)
|
| 429 |
+
|
| 430 |
+
msg += f"\n\n<b>Task By: </b>{self.tag}"
|
| 431 |
+
|
| 432 |
+
button = buttons.build_menu(1) if link else None
|
| 433 |
+
|
| 434 |
+
await send_message(self.user_id, msg, button)
|
| 435 |
+
if Config.LEECH_DUMP_CHAT:
|
| 436 |
+
await send_message(int(Config.LEECH_DUMP_CHAT), msg, button)
|
| 437 |
+
await send_message(self.message, user_message, button)
|
| 438 |
+
|
| 439 |
+
elif self.is_leech:
|
| 440 |
+
msg += f"\n<b>Total Files: </b>{folders}"
|
| 441 |
+
if mime_type != 0:
|
| 442 |
+
msg += f"\n┠ <b>Corrupted Files</b> → {mime_type}"
|
| 443 |
+
msg += f"\n┖ <b>Task By</b> → {self.tag}\n\n"
|
| 444 |
+
|
| 445 |
+
if self.bot_pm:
|
| 446 |
+
pmsg = msg
|
| 447 |
+
pmsg += "〶 <b><u>Action Performed :</u></b>\n"
|
| 448 |
+
pmsg += "⋗ <i>File(s) have been sent to User PM</i>\n\n"
|
| 449 |
+
if self.is_super_chat:
|
| 450 |
+
await send_message(self.message, pmsg)
|
| 451 |
+
|
| 452 |
+
if not files and not self.is_super_chat:
|
| 453 |
+
await send_message(self.message, msg)
|
| 454 |
+
else:
|
| 455 |
+
log_chat = self.user_id if self.bot_pm else self.message
|
| 456 |
+
msg += "〶 <b><u>Files List :</u></b>\n"
|
| 457 |
+
fmsg = ""
|
| 458 |
+
for index, (link, name) in enumerate(files.items(), start=1):
|
| 459 |
+
chat_id, msg_id = link.split("/")[-2:]
|
| 460 |
+
fmsg += f"{index}. <a href='{link}'>{name}</a>"
|
| 461 |
+
if Config.MEDIA_STORE and (
|
| 462 |
+
self.is_super_chat or Config.LEECH_DUMP_CHAT
|
| 463 |
+
):
|
| 464 |
+
if chat_id.isdigit():
|
| 465 |
+
chat_id = f"-100{chat_id}"
|
| 466 |
+
flink = f"https://t.me/{TgClient.BNAME}?start={encode_slink('file' + chat_id + '&&' + msg_id)}"
|
| 467 |
+
fmsg += f"\n┖ <b>Get Media</b> → <a href='{flink}'>Store Link</a> | <a href='https://t.me/share/url?url={flink}'>Share Link</a>"
|
| 468 |
+
fmsg += "\n"
|
| 469 |
+
if len(fmsg.encode() + msg.encode()) > 4000:
|
| 470 |
+
await send_message(log_chat, msg + fmsg)
|
| 471 |
+
await sleep(1)
|
| 472 |
+
fmsg = ""
|
| 473 |
+
if fmsg != "":
|
| 474 |
+
await send_message(log_chat, msg + fmsg)
|
| 475 |
+
else:
|
| 476 |
+
msg += f"\n│\n┟ <b>Type</b> → {mime_type}"
|
| 477 |
+
if mime_type == "Folder":
|
| 478 |
+
msg += f"\n┠ <b>SubFolders</b> → {folders}"
|
| 479 |
+
msg += f"\n┠ <b>Files</b> → {files}"
|
| 480 |
+
|
| 481 |
+
multi_link_msg = ""
|
| 482 |
+
multi_links = []
|
| 483 |
+
if isinstance(link, dict) and not self.is_yt:
|
| 484 |
+
# MultiUphoster result
|
| 485 |
+
for service, result in link.items():
|
| 486 |
+
if "error" in result:
|
| 487 |
+
multi_link_msg += (
|
| 488 |
+
f"{service.capitalize()}: Error - {result['error']}\n"
|
| 489 |
+
)
|
| 490 |
+
elif result.get("link"):
|
| 491 |
+
multi_links.append(
|
| 492 |
+
(f"{service.capitalize()} Link", result["link"])
|
| 493 |
+
)
|
| 494 |
+
multi_link_msg = multi_link_msg.strip()
|
| 495 |
+
link = None # Disable single link button logic
|
| 496 |
+
|
| 497 |
+
if (
|
| 498 |
+
link
|
| 499 |
+
or rclone_path
|
| 500 |
+
and Config.RCLONE_SERVE_URL
|
| 501 |
+
and not self.private_link
|
| 502 |
+
or multi_links
|
| 503 |
+
):
|
| 504 |
+
buttons = ButtonMaker()
|
| 505 |
+
if link and Config.SHOW_CLOUD_LINK:
|
| 506 |
+
buttons.url_button("☁️ Cloud Link", link)
|
| 507 |
+
elif multi_links:
|
| 508 |
+
for name, url in multi_links:
|
| 509 |
+
buttons.url_button(name, url)
|
| 510 |
+
else:
|
| 511 |
+
msg += f"\n\nPath: <code>{rclone_path}</code>"
|
| 512 |
+
if rclone_path and Config.RCLONE_SERVE_URL and not self.private_link:
|
| 513 |
+
remote, rpath = rclone_path.split(":", 1)
|
| 514 |
+
url_path = rutils.quote(f"{rpath}")
|
| 515 |
+
share_url = f"{Config.RCLONE_SERVE_URL}/{remote}/{url_path}"
|
| 516 |
+
if mime_type == "Folder":
|
| 517 |
+
share_url += "/"
|
| 518 |
+
buttons.url_button("🔗 Rclone Link", share_url)
|
| 519 |
+
if not rclone_path and dir_id:
|
| 520 |
+
INDEX_URL = ""
|
| 521 |
+
if self.private_link:
|
| 522 |
+
INDEX_URL = self.user_dict.get("INDEX_URL", "") or ""
|
| 523 |
+
elif Config.INDEX_URL:
|
| 524 |
+
INDEX_URL = Config.INDEX_URL
|
| 525 |
+
if INDEX_URL and self.name:
|
| 526 |
+
safe_name = rutils.quote(self.name.strip("/"))
|
| 527 |
+
share_url = f"{INDEX_URL}/{safe_name}"
|
| 528 |
+
buttons.url_button("⚡ Index Link", share_url)
|
| 529 |
+
if mime_type.startswith(("image", "video", "audio")):
|
| 530 |
+
share_urls = f"{share_url}?a=view"
|
| 531 |
+
buttons.url_button("🌐 View Link", share_urls)
|
| 532 |
+
button = buttons.build_menu(2)
|
| 533 |
+
else:
|
| 534 |
+
if not multi_link_msg:
|
| 535 |
+
msg += f"\n┃\n┠ Path: <code>{rclone_path}</code>"
|
| 536 |
+
button = None
|
| 537 |
+
msg += f"\n┃\n┖ <b>Task By</b> → {self.tag}\n\n"
|
| 538 |
+
group_msg = (
|
| 539 |
+
msg + "〶 <b><u>Action Performed :</u></b>\n"
|
| 540 |
+
"⋗ <i>Cloud link(s) have been sent to User PM</i>\n\n"
|
| 541 |
+
)
|
| 542 |
+
|
| 543 |
+
if multi_link_msg:
|
| 544 |
+
group_msg += multi_link_msg + "\n"
|
| 545 |
+
msg += multi_link_msg + "\n"
|
| 546 |
+
|
| 547 |
+
if self.bot_pm and self.is_super_chat:
|
| 548 |
+
await send_message(self.user_id, msg, button)
|
| 549 |
+
|
| 550 |
+
if hasattr(Config, "MIRROR_LOG_ID") and Config.MIRROR_LOG_ID:
|
| 551 |
+
await send_message(Config.MIRROR_LOG_ID, msg, button)
|
| 552 |
+
|
| 553 |
+
await send_message(self.message, group_msg, button)
|
| 554 |
+
if self.seed:
|
| 555 |
+
await clean_target(self.up_dir)
|
| 556 |
+
async with queue_dict_lock:
|
| 557 |
+
if self.mid in non_queued_up:
|
| 558 |
+
non_queued_up.remove(self.mid)
|
| 559 |
+
await start_from_queued()
|
| 560 |
+
return
|
| 561 |
+
|
| 562 |
+
if self.pm_msg and (not Config.DELETE_LINKS or Config.CLEAN_LOG_MSG):
|
| 563 |
+
await delete_message(self.pm_msg)
|
| 564 |
+
|
| 565 |
+
await clean_download(self.dir)
|
| 566 |
+
async with task_dict_lock:
|
| 567 |
+
if self.mid in task_dict:
|
| 568 |
+
del task_dict[self.mid]
|
| 569 |
+
count = len(task_dict)
|
| 570 |
+
if count == 0:
|
| 571 |
+
await self.clean()
|
| 572 |
+
else:
|
| 573 |
+
await update_status_message(self.message.chat.id)
|
| 574 |
+
|
| 575 |
+
async with queue_dict_lock:
|
| 576 |
+
if self.mid in non_queued_up:
|
| 577 |
+
non_queued_up.remove(self.mid)
|
| 578 |
+
|
| 579 |
+
await start_from_queued()
|
| 580 |
+
|
| 581 |
+
async def on_download_error(self, error, button=None, is_limit=False):
|
| 582 |
+
async with task_dict_lock:
|
| 583 |
+
if self.mid in task_dict:
|
| 584 |
+
del task_dict[self.mid]
|
| 585 |
+
count = len(task_dict)
|
| 586 |
+
await self.remove_from_same_dir()
|
| 587 |
+
msg = (
|
| 588 |
+
f"""〶 <b><i><u>Limit Breached:</u></i></b>
|
| 589 |
+
│
|
| 590 |
+
┟ <b>Task Size</b> → {get_readable_file_size(self.size)}
|
| 591 |
+
┠ <b>In Mode</b> → {self.mode[0]}
|
| 592 |
+
┠ <b>Out Mode</b> → {self.mode[1]}
|
| 593 |
+
{error}"""
|
| 594 |
+
if is_limit
|
| 595 |
+
else f"""<i><b>〶 Download Stopped!</b></i>
|
| 596 |
+
│
|
| 597 |
+
┟ <b>Due To</b> → {escape(str(error))}
|
| 598 |
+
┠ <b>Task Size</b> → {get_readable_file_size(self.size)}
|
| 599 |
+
┠ <b>Time Taken</b> → {get_readable_time(time() - self.message.date.timestamp())}
|
| 600 |
+
┠ <b>In Mode</b> → {self.mode[0]}
|
| 601 |
+
┠ <b>Out Mode</b> → {self.mode[1]}
|
| 602 |
+
┖ <b>Task By</b> → {self.tag}"""
|
| 603 |
+
)
|
| 604 |
+
|
| 605 |
+
await send_message(self.message, msg, button)
|
| 606 |
+
if count == 0:
|
| 607 |
+
await self.clean()
|
| 608 |
+
else:
|
| 609 |
+
await update_status_message(self.message.chat.id)
|
| 610 |
+
|
| 611 |
+
if (
|
| 612 |
+
self.is_super_chat
|
| 613 |
+
and Config.INCOMPLETE_TASK_NOTIFIER
|
| 614 |
+
and Config.DATABASE_URL
|
| 615 |
+
):
|
| 616 |
+
await database.rm_complete_task(self.message.link)
|
| 617 |
+
|
| 618 |
+
async with queue_dict_lock:
|
| 619 |
+
if self.mid in queued_dl:
|
| 620 |
+
queued_dl[self.mid].set()
|
| 621 |
+
del queued_dl[self.mid]
|
| 622 |
+
if self.mid in queued_up:
|
| 623 |
+
queued_up[self.mid].set()
|
| 624 |
+
del queued_up[self.mid]
|
| 625 |
+
if self.mid in non_queued_dl:
|
| 626 |
+
non_queued_dl.remove(self.mid)
|
| 627 |
+
if self.mid in non_queued_up:
|
| 628 |
+
non_queued_up.remove(self.mid)
|
| 629 |
+
|
| 630 |
+
await start_from_queued()
|
| 631 |
+
await sleep(3)
|
| 632 |
+
await clean_download(self.dir)
|
| 633 |
+
if self.up_dir:
|
| 634 |
+
await clean_download(self.up_dir)
|
| 635 |
+
if self.thumb and await aiopath.exists(self.thumb):
|
| 636 |
+
await remove(self.thumb)
|
| 637 |
+
|
| 638 |
+
async def on_upload_error(self, error):
|
| 639 |
+
async with task_dict_lock:
|
| 640 |
+
if self.mid in task_dict:
|
| 641 |
+
del task_dict[self.mid]
|
| 642 |
+
count = len(task_dict)
|
| 643 |
+
await send_message(self.message, f"{self.tag} {escape(str(error))}")
|
| 644 |
+
if count == 0:
|
| 645 |
+
await self.clean()
|
| 646 |
+
else:
|
| 647 |
+
await update_status_message(self.message.chat.id)
|
| 648 |
+
|
| 649 |
+
if (
|
| 650 |
+
self.is_super_chat
|
| 651 |
+
and Config.INCOMPLETE_TASK_NOTIFIER
|
| 652 |
+
and Config.DATABASE_URL
|
| 653 |
+
):
|
| 654 |
+
await database.rm_complete_task(self.message.link)
|
| 655 |
+
|
| 656 |
+
async with queue_dict_lock:
|
| 657 |
+
if self.mid in queued_dl:
|
| 658 |
+
queued_dl[self.mid].set()
|
| 659 |
+
del queued_dl[self.mid]
|
| 660 |
+
if self.mid in queued_up:
|
| 661 |
+
queued_up[self.mid].set()
|
| 662 |
+
del queued_up[self.mid]
|
| 663 |
+
if self.mid in non_queued_dl:
|
| 664 |
+
non_queued_dl.remove(self.mid)
|
| 665 |
+
if self.mid in non_queued_up:
|
| 666 |
+
non_queued_up.remove(self.mid)
|
| 667 |
+
|
| 668 |
+
await start_from_queued()
|
| 669 |
+
await sleep(3)
|
| 670 |
+
await clean_download(self.dir)
|
| 671 |
+
if self.up_dir:
|
| 672 |
+
await clean_download(self.up_dir)
|
| 673 |
+
if self.thumb and await aiopath.exists(self.thumb):
|
| 674 |
+
await remove(self.thumb)
|
bot/helper/mirror_leech_utils/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
|
bot/helper/mirror_leech_utils/download_utils/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
|
bot/helper/mirror_leech_utils/download_utils/aria2_download.py
ADDED
|
@@ -0,0 +1,105 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from aiofiles.os import remove, path as aiopath
|
| 2 |
+
from aiofiles import open as aiopen
|
| 3 |
+
from base64 import b64encode
|
| 4 |
+
from aiohttp.client_exceptions import ClientError
|
| 5 |
+
from asyncio import TimeoutError
|
| 6 |
+
|
| 7 |
+
from .... import task_dict_lock, task_dict, LOGGER
|
| 8 |
+
from ....core.config_manager import Config
|
| 9 |
+
from ....core.torrent_manager import TorrentManager, is_metadata, aria2_name
|
| 10 |
+
from ...ext_utils.bot_utils import bt_selection_buttons
|
| 11 |
+
from ...ext_utils.task_manager import check_running_tasks
|
| 12 |
+
from ...mirror_leech_utils.status_utils.aria2_status import Aria2Status
|
| 13 |
+
from ...telegram_helper.message_utils import send_status_message, send_message
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
async def add_aria2_download(listener, dpath, header, ratio, seed_time):
|
| 17 |
+
if Config.DISABLE_TORRENTS and (
|
| 18 |
+
listener.link.startswith("magnet:") or listener.link.endswith(".torrent")
|
| 19 |
+
):
|
| 20 |
+
await listener.on_download_error("Torrent and magnet downloads are disabled.")
|
| 21 |
+
return
|
| 22 |
+
a2c_opt = {"dir": dpath}
|
| 23 |
+
if listener.name:
|
| 24 |
+
a2c_opt["out"] = listener.name
|
| 25 |
+
if header:
|
| 26 |
+
a2c_opt["header"] = header
|
| 27 |
+
if ratio:
|
| 28 |
+
a2c_opt["seed-ratio"] = ratio
|
| 29 |
+
if seed_time:
|
| 30 |
+
a2c_opt["seed-time"] = seed_time
|
| 31 |
+
if TORRENT_TIMEOUT := Config.TORRENT_TIMEOUT:
|
| 32 |
+
a2c_opt["bt-stop-timeout"] = f"{TORRENT_TIMEOUT}"
|
| 33 |
+
|
| 34 |
+
add_to_queue, event = await check_running_tasks(listener)
|
| 35 |
+
if add_to_queue:
|
| 36 |
+
if listener.link.startswith("magnet:"):
|
| 37 |
+
a2c_opt["pause-metadata"] = "true"
|
| 38 |
+
else:
|
| 39 |
+
a2c_opt["pause"] = "true"
|
| 40 |
+
|
| 41 |
+
try:
|
| 42 |
+
if await aiopath.exists(listener.link):
|
| 43 |
+
async with aiopen(listener.link, "rb") as tf:
|
| 44 |
+
torrent = await tf.read()
|
| 45 |
+
encoded = b64encode(torrent).decode()
|
| 46 |
+
params = [encoded, [], a2c_opt]
|
| 47 |
+
gid = await TorrentManager.aria2.jsonrpc("addTorrent", params)
|
| 48 |
+
"""gid = await TorrentManager.aria2.add_torrent(path=listener.link, options=a2c_opt)"""
|
| 49 |
+
else:
|
| 50 |
+
gid = await TorrentManager.aria2.addUri(
|
| 51 |
+
uris=[listener.link], options=a2c_opt
|
| 52 |
+
)
|
| 53 |
+
except (TimeoutError, ClientError, Exception) as e:
|
| 54 |
+
LOGGER.info(f"Aria2c Download Error: {e}")
|
| 55 |
+
await listener.on_download_error(f"{e}")
|
| 56 |
+
return
|
| 57 |
+
download = await TorrentManager.aria2.tellStatus(gid)
|
| 58 |
+
if download.get("errorMessage"):
|
| 59 |
+
error = str(download["errorMessage"]).replace("<", " ").replace(">", " ")
|
| 60 |
+
LOGGER.info(f"Aria2c Download Error: {error}")
|
| 61 |
+
await TorrentManager.aria2_remove(download)
|
| 62 |
+
await listener.on_download_error(error)
|
| 63 |
+
return
|
| 64 |
+
if await aiopath.exists(listener.link):
|
| 65 |
+
await remove(listener.link)
|
| 66 |
+
|
| 67 |
+
name = aria2_name(download)
|
| 68 |
+
async with task_dict_lock:
|
| 69 |
+
task_dict[listener.mid] = Aria2Status(listener, gid, queued=add_to_queue)
|
| 70 |
+
if add_to_queue:
|
| 71 |
+
LOGGER.info(f"Added to Queue/Download: {name}. Gid: {gid}")
|
| 72 |
+
if (
|
| 73 |
+
not listener.select or "bittorrent" not in download
|
| 74 |
+
) and listener.multi <= 1:
|
| 75 |
+
await send_status_message(listener.message)
|
| 76 |
+
else:
|
| 77 |
+
LOGGER.info(f"Aria2Download started: {name}. Gid: {gid}")
|
| 78 |
+
|
| 79 |
+
await listener.on_download_start()
|
| 80 |
+
|
| 81 |
+
if (
|
| 82 |
+
not add_to_queue
|
| 83 |
+
and (not listener.select or not Config.BASE_URL)
|
| 84 |
+
and listener.multi <= 1
|
| 85 |
+
):
|
| 86 |
+
await send_status_message(listener.message)
|
| 87 |
+
elif listener.select and "bittorrent" in download and not is_metadata(download):
|
| 88 |
+
if not add_to_queue:
|
| 89 |
+
await TorrentManager.aria2.forcePause(gid)
|
| 90 |
+
SBUTTONS = bt_selection_buttons(gid)
|
| 91 |
+
msg = "Your download paused. Choose files then press Done Selecting button to start downloading."
|
| 92 |
+
await send_message(listener.message, msg, SBUTTONS)
|
| 93 |
+
|
| 94 |
+
if add_to_queue:
|
| 95 |
+
await event.wait()
|
| 96 |
+
if listener.is_cancelled:
|
| 97 |
+
return
|
| 98 |
+
async with task_dict_lock:
|
| 99 |
+
task = task_dict[listener.mid]
|
| 100 |
+
task.queued = False
|
| 101 |
+
await task.update()
|
| 102 |
+
new_gid = task.gid()
|
| 103 |
+
|
| 104 |
+
await TorrentManager.aria2.unpause(new_gid)
|
| 105 |
+
LOGGER.info(f"Start Queued Download from Aria2c: {name}. Gid: {new_gid}")
|
bot/helper/mirror_leech_utils/download_utils/direct_downloader.py
ADDED
|
@@ -0,0 +1,68 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from secrets import token_hex
|
| 2 |
+
|
| 3 |
+
from .... import (
|
| 4 |
+
LOGGER,
|
| 5 |
+
task_dict,
|
| 6 |
+
task_dict_lock,
|
| 7 |
+
)
|
| 8 |
+
from ...ext_utils.task_manager import (
|
| 9 |
+
check_running_tasks,
|
| 10 |
+
stop_duplicate_check,
|
| 11 |
+
limit_checker,
|
| 12 |
+
)
|
| 13 |
+
from ...listeners.direct_listener import DirectListener
|
| 14 |
+
from ...mirror_leech_utils.status_utils.direct_status import DirectStatus
|
| 15 |
+
from ...mirror_leech_utils.status_utils.queue_status import QueueStatus
|
| 16 |
+
from ...telegram_helper.message_utils import send_status_message
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
async def add_direct_download(listener, path):
|
| 20 |
+
details = listener.link
|
| 21 |
+
if not (contents := details.get("contents")):
|
| 22 |
+
await listener.on_download_error("There is nothing to download!")
|
| 23 |
+
return
|
| 24 |
+
listener.size = details["total_size"]
|
| 25 |
+
|
| 26 |
+
if not listener.name:
|
| 27 |
+
listener.name = details["title"]
|
| 28 |
+
path = f"{path}/{listener.name}"
|
| 29 |
+
|
| 30 |
+
msg, button = await stop_duplicate_check(listener)
|
| 31 |
+
if msg:
|
| 32 |
+
await listener.on_download_error(msg, button)
|
| 33 |
+
return
|
| 34 |
+
|
| 35 |
+
if limit_exceeded := await limit_checker(listener):
|
| 36 |
+
await listener.on_download_error(limit_exceeded, is_limit=True)
|
| 37 |
+
return
|
| 38 |
+
|
| 39 |
+
gid = token_hex(5)
|
| 40 |
+
add_to_queue, event = await check_running_tasks(listener)
|
| 41 |
+
if add_to_queue:
|
| 42 |
+
LOGGER.info(f"Added to Queue/Download: {listener.name}")
|
| 43 |
+
async with task_dict_lock:
|
| 44 |
+
task_dict[listener.mid] = QueueStatus(listener, gid, "dl")
|
| 45 |
+
await listener.on_download_start()
|
| 46 |
+
if listener.multi <= 1:
|
| 47 |
+
await send_status_message(listener.message)
|
| 48 |
+
await event.wait()
|
| 49 |
+
if listener.is_cancelled:
|
| 50 |
+
return
|
| 51 |
+
|
| 52 |
+
a2c_opt = {"follow-torrent": "false", "follow-metalink": "false"}
|
| 53 |
+
if header := details.get("header"):
|
| 54 |
+
a2c_opt["header"] = header
|
| 55 |
+
directListener = DirectListener(path, listener, a2c_opt)
|
| 56 |
+
|
| 57 |
+
async with task_dict_lock:
|
| 58 |
+
task_dict[listener.mid] = DirectStatus(listener, directListener, gid)
|
| 59 |
+
|
| 60 |
+
if add_to_queue:
|
| 61 |
+
LOGGER.info(f"Start Queued Download from Direct Download: {listener.name}")
|
| 62 |
+
else:
|
| 63 |
+
LOGGER.info(f"Download from Direct Download: {listener.name}")
|
| 64 |
+
await listener.on_download_start()
|
| 65 |
+
if listener.multi <= 1:
|
| 66 |
+
await send_status_message(listener.message)
|
| 67 |
+
|
| 68 |
+
await directListener.download(contents)
|
bot/helper/mirror_leech_utils/download_utils/direct_link_generator.py
ADDED
|
@@ -0,0 +1,2048 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from cloudscraper import create_scraper
|
| 2 |
+
from hashlib import sha256
|
| 3 |
+
from http.cookiejar import MozillaCookieJar
|
| 4 |
+
from json import loads
|
| 5 |
+
from lxml.etree import HTML
|
| 6 |
+
from os import path as ospath
|
| 7 |
+
from re import findall, match, search
|
| 8 |
+
from requests import Session, post, get, RequestException
|
| 9 |
+
from requests.adapters import HTTPAdapter
|
| 10 |
+
from time import sleep
|
| 11 |
+
from urllib.parse import parse_qs, urlparse, quote
|
| 12 |
+
from urllib3.util.retry import Retry
|
| 13 |
+
from uuid import uuid4
|
| 14 |
+
from base64 import b64decode, b64encode
|
| 15 |
+
|
| 16 |
+
from ....core.config_manager import Config
|
| 17 |
+
from ...ext_utils.exceptions import DirectDownloadLinkException
|
| 18 |
+
from ...ext_utils.help_messages import PASSWORD_ERROR_MESSAGE
|
| 19 |
+
from ...ext_utils.links_utils import is_share_link
|
| 20 |
+
from ...ext_utils.status_utils import speed_string_to_bytes
|
| 21 |
+
|
| 22 |
+
user_agent = (
|
| 23 |
+
"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:122.0) Gecko/20100101 Firefox/122.0"
|
| 24 |
+
)
|
| 25 |
+
|
| 26 |
+
debrid_link_supported_sites = [
|
| 27 |
+
"1fichier.com",
|
| 28 |
+
"anonfiles.com",
|
| 29 |
+
"bayfiles.com",
|
| 30 |
+
"clicknupload.link",
|
| 31 |
+
"clicknupload.org",
|
| 32 |
+
"clicknupload.co",
|
| 33 |
+
"clicknupload.cc",
|
| 34 |
+
"clicknupload.download",
|
| 35 |
+
"clicknupload.club",
|
| 36 |
+
"dailyuploads.net",
|
| 37 |
+
"ddl.to",
|
| 38 |
+
"ddownload.com",
|
| 39 |
+
"ddownload.link",
|
| 40 |
+
"drop.download",
|
| 41 |
+
"dropbox.com",
|
| 42 |
+
"dropboxusercontent.com",
|
| 43 |
+
"easyupload.io",
|
| 44 |
+
"emload.com",
|
| 45 |
+
"file.al",
|
| 46 |
+
"fileaxa.com",
|
| 47 |
+
"filecat.net",
|
| 48 |
+
"filedot.to",
|
| 49 |
+
"filedot.xyz",
|
| 50 |
+
"filextras.com",
|
| 51 |
+
"filer.net",
|
| 52 |
+
"filespace.com",
|
| 53 |
+
"filestore.me",
|
| 54 |
+
"gigapeta.com",
|
| 55 |
+
"gofile.io",
|
| 56 |
+
"hexupload.net",
|
| 57 |
+
"hitfile.net",
|
| 58 |
+
"hulkshare.com",
|
| 59 |
+
"isra.cloud",
|
| 60 |
+
"katfile.com",
|
| 61 |
+
"kshared.com",
|
| 62 |
+
"mediafire.com",
|
| 63 |
+
"mega.nz",
|
| 64 |
+
"mega.co.nz",
|
| 65 |
+
"mexashare.com",
|
| 66 |
+
"mixdrop.co",
|
| 67 |
+
"mixdrop.to",
|
| 68 |
+
"mixdrop.sx",
|
| 69 |
+
"mixdrop.club",
|
| 70 |
+
"modsbase.com",
|
| 71 |
+
"nelion.me",
|
| 72 |
+
"pixeldrain.com",
|
| 73 |
+
"prefiles.com",
|
| 74 |
+
"racaty.net",
|
| 75 |
+
"rapidgator.net",
|
| 76 |
+
"rapidgator.asia",
|
| 77 |
+
"rg.to",
|
| 78 |
+
"scribd.com",
|
| 79 |
+
"send.cm",
|
| 80 |
+
"sharemods.com",
|
| 81 |
+
"silkfiles.com",
|
| 82 |
+
"soundcloud.com",
|
| 83 |
+
"streamtape.com",
|
| 84 |
+
"terabox.com",
|
| 85 |
+
"teraboxapp.com",
|
| 86 |
+
"tezfiles.com",
|
| 87 |
+
"turb.cc",
|
| 88 |
+
"turb.to",
|
| 89 |
+
"turbobit.net",
|
| 90 |
+
"turbobit.cc",
|
| 91 |
+
"turbobit.pw",
|
| 92 |
+
"turbobit.online",
|
| 93 |
+
"turbobit.ru",
|
| 94 |
+
"turbobit.live",
|
| 95 |
+
"trubobit.com",
|
| 96 |
+
"turboblt.co",
|
| 97 |
+
"uloz.to",
|
| 98 |
+
"ulozto.net",
|
| 99 |
+
"ulozto.sk",
|
| 100 |
+
"ulozto.cz",
|
| 101 |
+
"upload.ee",
|
| 102 |
+
"uploadhaven.com",
|
| 103 |
+
"up-4ever.com",
|
| 104 |
+
"up-4ever.net",
|
| 105 |
+
"uptobox.com",
|
| 106 |
+
"uptobox.fr",
|
| 107 |
+
"uptobox.eu",
|
| 108 |
+
"uptobox.link",
|
| 109 |
+
"uptostream.com",
|
| 110 |
+
"uptostream.fr",
|
| 111 |
+
"uptostream.eu",
|
| 112 |
+
"uptostream.link",
|
| 113 |
+
"upvid.pro",
|
| 114 |
+
"upvid.live",
|
| 115 |
+
"upvid.host",
|
| 116 |
+
"upvid.biz",
|
| 117 |
+
"upvid.cloud",
|
| 118 |
+
"uqload.com",
|
| 119 |
+
"uqload.co",
|
| 120 |
+
"uqload.io",
|
| 121 |
+
"userload.co",
|
| 122 |
+
"usersdrive.com",
|
| 123 |
+
"vidoza.net",
|
| 124 |
+
"voe.sx",
|
| 125 |
+
"voe-unblock.com",
|
| 126 |
+
"voeunblock1.com",
|
| 127 |
+
"voeunblock2.com",
|
| 128 |
+
"voeunblock3.com",
|
| 129 |
+
"voeunbl0ck.com",
|
| 130 |
+
"voeunblck.com",
|
| 131 |
+
"voeunblk.com",
|
| 132 |
+
"voe-un-block.com",
|
| 133 |
+
"voeun-block.net",
|
| 134 |
+
"workupload.com",
|
| 135 |
+
"world-bytez.com",
|
| 136 |
+
"worldbytez.com",
|
| 137 |
+
"world-files.com",
|
| 138 |
+
"wupfile.com",
|
| 139 |
+
"zippyshare.com",
|
| 140 |
+
]
|
| 141 |
+
|
| 142 |
+
|
| 143 |
+
def direct_link_generator(link):
|
| 144 |
+
"""direct links generator"""
|
| 145 |
+
domain = urlparse(link).hostname
|
| 146 |
+
if not domain:
|
| 147 |
+
raise DirectDownloadLinkException("ERROR: Invalid URL")
|
| 148 |
+
elif Config.DEBRID_LINK_API and any(
|
| 149 |
+
x in domain for x in debrid_link_supported_sites
|
| 150 |
+
):
|
| 151 |
+
return debrid_link(link)
|
| 152 |
+
elif "yadi.sk" in link or "disk.yandex." in link:
|
| 153 |
+
return yandex_disk(link)
|
| 154 |
+
elif "buzzheavier.com" in domain:
|
| 155 |
+
return buzzheavier(link)
|
| 156 |
+
elif "devuploads" in domain:
|
| 157 |
+
return devuploads(link)
|
| 158 |
+
elif "lulacloud.com" in domain:
|
| 159 |
+
return lulacloud(link)
|
| 160 |
+
elif "fuckingfast.co" in domain:
|
| 161 |
+
return fuckingfast_dl(link)
|
| 162 |
+
elif "mediafire.com" in domain:
|
| 163 |
+
return mediafire(link)
|
| 164 |
+
elif "osdn.net" in domain:
|
| 165 |
+
return osdn(link)
|
| 166 |
+
elif "github.com" in domain:
|
| 167 |
+
return github(link)
|
| 168 |
+
elif "hxfile.co" in domain:
|
| 169 |
+
return hxfile(link)
|
| 170 |
+
elif "1drv.ms" in domain:
|
| 171 |
+
return onedrive(link)
|
| 172 |
+
elif "pixeldrain.com" in domain:
|
| 173 |
+
return pixeldrain(link)
|
| 174 |
+
elif "racaty" in domain:
|
| 175 |
+
return racaty(link)
|
| 176 |
+
elif "1fichier.com" in domain:
|
| 177 |
+
return fichier(link)
|
| 178 |
+
elif "solidfiles.com" in domain:
|
| 179 |
+
return solidfiles(link)
|
| 180 |
+
elif "krakenfiles.com" in domain:
|
| 181 |
+
return krakenfiles(link)
|
| 182 |
+
elif "upload.ee" in domain:
|
| 183 |
+
return uploadee(link)
|
| 184 |
+
elif "gofile.io" in domain:
|
| 185 |
+
return gofile(link)
|
| 186 |
+
elif "send.cm" in domain:
|
| 187 |
+
return send_cm(link)
|
| 188 |
+
elif "tmpsend.com" in domain:
|
| 189 |
+
return tmpsend(link)
|
| 190 |
+
elif "easyupload.io" in domain:
|
| 191 |
+
return easyupload(link)
|
| 192 |
+
elif "streamvid.net" in domain:
|
| 193 |
+
return streamvid(link)
|
| 194 |
+
elif "shrdsk.me" in domain:
|
| 195 |
+
return shrdsk(link)
|
| 196 |
+
elif "u.pcloud.link" in domain:
|
| 197 |
+
return pcloud(link)
|
| 198 |
+
elif "qiwi.gg" in domain:
|
| 199 |
+
return qiwi(link)
|
| 200 |
+
elif "mp4upload.com" in domain:
|
| 201 |
+
return mp4upload(link)
|
| 202 |
+
elif "berkasdrive.com" in domain:
|
| 203 |
+
return berkasdrive(link)
|
| 204 |
+
elif "swisstransfer.com" in domain:
|
| 205 |
+
return swisstransfer(link)
|
| 206 |
+
elif "instagram.com" in domain:
|
| 207 |
+
return instagram(link)
|
| 208 |
+
elif any(x in domain for x in ["akmfiles.com", "akmfls.xyz"]):
|
| 209 |
+
return akmfiles(link)
|
| 210 |
+
elif any(
|
| 211 |
+
x in domain
|
| 212 |
+
for x in [
|
| 213 |
+
"dood.watch",
|
| 214 |
+
"doodstream.com",
|
| 215 |
+
"dood.to",
|
| 216 |
+
"dood.so",
|
| 217 |
+
"dood.cx",
|
| 218 |
+
"dood.la",
|
| 219 |
+
"dood.ws",
|
| 220 |
+
"dood.sh",
|
| 221 |
+
"doodstream.co",
|
| 222 |
+
"dood.pm",
|
| 223 |
+
"dood.wf",
|
| 224 |
+
"dood.re",
|
| 225 |
+
"dood.video",
|
| 226 |
+
"dooood.com",
|
| 227 |
+
"dood.yt",
|
| 228 |
+
"doods.yt",
|
| 229 |
+
"dood.stream",
|
| 230 |
+
"doods.pro",
|
| 231 |
+
"ds2play.com",
|
| 232 |
+
"d0o0d.com",
|
| 233 |
+
"ds2video.com",
|
| 234 |
+
"do0od.com",
|
| 235 |
+
"d000d.com",
|
| 236 |
+
]
|
| 237 |
+
):
|
| 238 |
+
return doods(link)
|
| 239 |
+
elif any(
|
| 240 |
+
x in domain
|
| 241 |
+
for x in [
|
| 242 |
+
"streamtape.com",
|
| 243 |
+
"streamtape.co",
|
| 244 |
+
"streamtape.cc",
|
| 245 |
+
"streamtape.to",
|
| 246 |
+
"streamtape.net",
|
| 247 |
+
"streamta.pe",
|
| 248 |
+
"streamtape.xyz",
|
| 249 |
+
]
|
| 250 |
+
):
|
| 251 |
+
return streamtape(link)
|
| 252 |
+
elif any(x in domain for x in ["wetransfer.com", "we.tl"]):
|
| 253 |
+
return wetransfer(link)
|
| 254 |
+
elif any(
|
| 255 |
+
x in domain
|
| 256 |
+
for x in [
|
| 257 |
+
"terabox.com",
|
| 258 |
+
"nephobox.com",
|
| 259 |
+
"4funbox.com",
|
| 260 |
+
"mirrobox.com",
|
| 261 |
+
"momerybox.com",
|
| 262 |
+
"teraboxapp.com",
|
| 263 |
+
"1024tera.com",
|
| 264 |
+
"terabox.app",
|
| 265 |
+
"gibibox.com",
|
| 266 |
+
"goaibox.com",
|
| 267 |
+
"terasharelink.com",
|
| 268 |
+
"teraboxlink.com",
|
| 269 |
+
"freeterabox.com",
|
| 270 |
+
"1024terabox.com",
|
| 271 |
+
"teraboxshare.com",
|
| 272 |
+
"terafileshare.com",
|
| 273 |
+
]
|
| 274 |
+
):
|
| 275 |
+
return terabox(link)
|
| 276 |
+
elif any(
|
| 277 |
+
x in domain
|
| 278 |
+
for x in [
|
| 279 |
+
"filelions.co",
|
| 280 |
+
"filelions.site",
|
| 281 |
+
"filelions.live",
|
| 282 |
+
"filelions.to",
|
| 283 |
+
"mycloudz.cc",
|
| 284 |
+
"cabecabean.lol",
|
| 285 |
+
"filelions.online",
|
| 286 |
+
"embedwish.com",
|
| 287 |
+
"kitabmarkaz.xyz",
|
| 288 |
+
"wishfast.top",
|
| 289 |
+
"streamwish.to",
|
| 290 |
+
"kissmovies.net",
|
| 291 |
+
]
|
| 292 |
+
):
|
| 293 |
+
return filelions_and_streamwish(link)
|
| 294 |
+
elif any(x in domain for x in ["streamhub.ink", "streamhub.to"]):
|
| 295 |
+
return streamhub(link)
|
| 296 |
+
elif any(x in domain for x in ["hubcloud.one", "hubcloud.foo"]):
|
| 297 |
+
return hubcloud(link)
|
| 298 |
+
elif any(
|
| 299 |
+
x in domain
|
| 300 |
+
for x in [
|
| 301 |
+
"linkbox.to",
|
| 302 |
+
"lbx.to",
|
| 303 |
+
"teltobx.net",
|
| 304 |
+
"telbx.net",
|
| 305 |
+
]
|
| 306 |
+
):
|
| 307 |
+
return linkBox(link)
|
| 308 |
+
elif is_share_link(link):
|
| 309 |
+
if "gdtot" in domain:
|
| 310 |
+
return gdtot(link)
|
| 311 |
+
elif "filepress" in domain:
|
| 312 |
+
return filepress(link)
|
| 313 |
+
else:
|
| 314 |
+
return sharer_scraper(link)
|
| 315 |
+
elif any(
|
| 316 |
+
x in domain
|
| 317 |
+
for x in [
|
| 318 |
+
"anonfiles.com",
|
| 319 |
+
"zippyshare.com",
|
| 320 |
+
"letsupload.io",
|
| 321 |
+
"hotfile.io",
|
| 322 |
+
"bayfiles.com",
|
| 323 |
+
"megaupload.nz",
|
| 324 |
+
"letsupload.cc",
|
| 325 |
+
"filechan.org",
|
| 326 |
+
"myfile.is",
|
| 327 |
+
"vshare.is",
|
| 328 |
+
"rapidshare.nu",
|
| 329 |
+
"lolabits.se",
|
| 330 |
+
"openload.cc",
|
| 331 |
+
"share-online.is",
|
| 332 |
+
"upvid.cc",
|
| 333 |
+
"uptobox.com",
|
| 334 |
+
"uptobox.fr",
|
| 335 |
+
]
|
| 336 |
+
):
|
| 337 |
+
raise DirectDownloadLinkException(f"ERROR: R.I.P {domain}")
|
| 338 |
+
else:
|
| 339 |
+
raise DirectDownloadLinkException(f"No Direct link function found for {link}")
|
| 340 |
+
|
| 341 |
+
|
| 342 |
+
def get_captcha_token(session, params):
|
| 343 |
+
recaptcha_api = "https://www.google.com/recaptcha/api2"
|
| 344 |
+
res = session.get(f"{recaptcha_api}/anchor", params=params)
|
| 345 |
+
anchor_html = HTML(res.text)
|
| 346 |
+
if not (anchor_token := anchor_html.xpath('//input[@id="recaptcha-token"]/@value')):
|
| 347 |
+
return
|
| 348 |
+
params["c"] = anchor_token[0]
|
| 349 |
+
params["reason"] = "q"
|
| 350 |
+
res = session.post(f"{recaptcha_api}/reload", params=params)
|
| 351 |
+
if token := findall(r'"rresp","(.*?)"', res.text):
|
| 352 |
+
return token[0]
|
| 353 |
+
|
| 354 |
+
|
| 355 |
+
def debrid_link(url):
|
| 356 |
+
cget = create_scraper().request
|
| 357 |
+
resp = cget(
|
| 358 |
+
"POST",
|
| 359 |
+
f"https://debrid-link.com/api/v2/downloader/add?access_token={Config.DEBRID_LINK_API}",
|
| 360 |
+
data={"url": url},
|
| 361 |
+
).json()
|
| 362 |
+
if resp["success"] != True:
|
| 363 |
+
raise DirectDownloadLinkException(
|
| 364 |
+
f"ERROR: {resp['error']} & ERROR ID: {resp['error_id']}"
|
| 365 |
+
)
|
| 366 |
+
if isinstance(resp["value"], dict):
|
| 367 |
+
return resp["value"]["downloadUrl"]
|
| 368 |
+
elif isinstance(resp["value"], list):
|
| 369 |
+
details = {
|
| 370 |
+
"contents": [],
|
| 371 |
+
"title": unquote(url.rstrip("/").split("/")[-1]),
|
| 372 |
+
"total_size": 0,
|
| 373 |
+
}
|
| 374 |
+
for dl in resp["value"]:
|
| 375 |
+
if dl.get("expired", False):
|
| 376 |
+
continue
|
| 377 |
+
item = {
|
| 378 |
+
"path": path.join(details["title"]),
|
| 379 |
+
"filename": dl["name"],
|
| 380 |
+
"url": dl["downloadUrl"],
|
| 381 |
+
}
|
| 382 |
+
if "size" in dl:
|
| 383 |
+
details["total_size"] += dl["size"]
|
| 384 |
+
details["contents"].append(item)
|
| 385 |
+
return details
|
| 386 |
+
|
| 387 |
+
|
| 388 |
+
def hubcloud(url):
|
| 389 |
+
try:
|
| 390 |
+
response = get(f"http://hubcloud.cfd/bypass?url={url}").json()
|
| 391 |
+
except Exception as e:
|
| 392 |
+
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
|
| 393 |
+
|
| 394 |
+
if "links" not in response or not response["links"]:
|
| 395 |
+
raise DirectDownloadLinkException("ERROR: No links found")
|
| 396 |
+
|
| 397 |
+
# Sort links by priority in descending order
|
| 398 |
+
links = sorted(response["links"], key=lambda x: x.get("priority", 0), reverse=True)
|
| 399 |
+
|
| 400 |
+
# Return the url of the highest priority link
|
| 401 |
+
return links[0]["url"]
|
| 402 |
+
|
| 403 |
+
|
| 404 |
+
def buzzheavier(link):
|
| 405 |
+
"""
|
| 406 |
+
Generate a direct download link for buzzheavier URLs.
|
| 407 |
+
@param link: URL from buzzheavier
|
| 408 |
+
@return: Direct download link
|
| 409 |
+
"""
|
| 410 |
+
link = link if link.endswith("/") else link + "/"
|
| 411 |
+
client = create_scraper()
|
| 412 |
+
try:
|
| 413 |
+
res = client.get(
|
| 414 |
+
link + "download", headers={"hx-current-url": link, "referer": link}
|
| 415 |
+
)
|
| 416 |
+
except Exception as e:
|
| 417 |
+
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
|
| 418 |
+
|
| 419 |
+
domain = urlparse(link).netloc
|
| 420 |
+
redirect_url = res.headers.get("Hx-Redirect", "None")
|
| 421 |
+
|
| 422 |
+
if redirect_url == "None":
|
| 423 |
+
raise DirectDownloadLinkException("ERROR: Direct link not found")
|
| 424 |
+
|
| 425 |
+
if not redirect_url.startswith("http"):
|
| 426 |
+
return f"https://{domain}{redirect_url}"
|
| 427 |
+
return redirect_url
|
| 428 |
+
|
| 429 |
+
|
| 430 |
+
def fuckingfast_dl(url):
|
| 431 |
+
"""
|
| 432 |
+
Generate a direct download link for fuckingfast.co URLs.
|
| 433 |
+
@param url: URL from fuckingfast.co
|
| 434 |
+
@return: Direct download link
|
| 435 |
+
"""
|
| 436 |
+
session = Session()
|
| 437 |
+
url = url.strip()
|
| 438 |
+
|
| 439 |
+
try:
|
| 440 |
+
response = session.get(url)
|
| 441 |
+
content = response.text
|
| 442 |
+
pattern = r'window\.open\((["\'])(https://fuckingfast\.co/dl/[^"\']+)\1'
|
| 443 |
+
match = search(pattern, content)
|
| 444 |
+
|
| 445 |
+
if not match:
|
| 446 |
+
raise DirectDownloadLinkException(
|
| 447 |
+
"ERROR: Could not find download link in page"
|
| 448 |
+
)
|
| 449 |
+
|
| 450 |
+
direct_url = match.group(2)
|
| 451 |
+
return direct_url
|
| 452 |
+
|
| 453 |
+
except Exception as e:
|
| 454 |
+
raise DirectDownloadLinkException(f"ERROR: {str(e)}") from e
|
| 455 |
+
finally:
|
| 456 |
+
session.close()
|
| 457 |
+
|
| 458 |
+
|
| 459 |
+
def devuploads(url):
|
| 460 |
+
"""
|
| 461 |
+
Generate a direct download link for devuploads.com URLs.
|
| 462 |
+
@param url: URL from devuploads.com
|
| 463 |
+
@return: Direct download link
|
| 464 |
+
"""
|
| 465 |
+
session = Session()
|
| 466 |
+
res = session.get(url)
|
| 467 |
+
html = HTML(res.text)
|
| 468 |
+
if not html.xpath("//input[@name]"):
|
| 469 |
+
raise DirectDownloadLinkException("ERROR: Unable to find link data")
|
| 470 |
+
data = {i.get("name"): i.get("value") for i in html.xpath("//input[@name]")}
|
| 471 |
+
res = session.post("https://gujjukhabar.in/", data=data)
|
| 472 |
+
html = HTML(res.text)
|
| 473 |
+
if not html.xpath("//input[@name]"):
|
| 474 |
+
raise DirectDownloadLinkException("ERROR: Unable to find link data")
|
| 475 |
+
data = {i.get("name"): i.get("value") for i in html.xpath("//input[@name]")}
|
| 476 |
+
resp = session.get(
|
| 477 |
+
"https://du2.devuploads.com/dlhash.php",
|
| 478 |
+
headers={
|
| 479 |
+
"Origin": "https://gujjukhabar.in",
|
| 480 |
+
"Referer": "https://gujjukhabar.in/",
|
| 481 |
+
},
|
| 482 |
+
)
|
| 483 |
+
if not resp.text:
|
| 484 |
+
raise DirectDownloadLinkException("ERROR: Unable to find ipp value")
|
| 485 |
+
data["ipp"] = resp.text.strip()
|
| 486 |
+
if not data.get("rand"):
|
| 487 |
+
raise DirectDownloadLinkException("ERROR: Unable to find rand value")
|
| 488 |
+
randpost = session.post(
|
| 489 |
+
"https://devuploads.com/token/token.php",
|
| 490 |
+
data={"rand": data["rand"], "msg": ""},
|
| 491 |
+
headers={
|
| 492 |
+
"Origin": "https://gujjukhabar.in",
|
| 493 |
+
"Referer": "https://gujjukhabar.in/",
|
| 494 |
+
},
|
| 495 |
+
)
|
| 496 |
+
if not randpost:
|
| 497 |
+
raise DirectDownloadLinkException("ERROR: Unable to find xd value")
|
| 498 |
+
data["xd"] = randpost.text.strip()
|
| 499 |
+
proxy = "http://hsakalu2:hsakalu2@45.151.162.198:6600"
|
| 500 |
+
res = session.post(url, data=data, proxies={"http": proxy, "https": proxy})
|
| 501 |
+
html = HTML(res.text)
|
| 502 |
+
if not html.xpath("//input[@name='orilink']/@value"):
|
| 503 |
+
raise DirectDownloadLinkException("ERROR: Unable to find Direct Link")
|
| 504 |
+
direct_link = html.xpath("//input[@name='orilink']/@value")
|
| 505 |
+
session.close()
|
| 506 |
+
return direct_link[0]
|
| 507 |
+
|
| 508 |
+
|
| 509 |
+
def lulacloud(url):
|
| 510 |
+
"""
|
| 511 |
+
Generate a direct download link for www.lulacloud.com URLs.
|
| 512 |
+
@param url: URL from www.lulacloud.com
|
| 513 |
+
@return: Direct download link
|
| 514 |
+
"""
|
| 515 |
+
session = Session()
|
| 516 |
+
try:
|
| 517 |
+
res = session.post(url, headers={"Referer": url}, allow_redirects=False)
|
| 518 |
+
return res.headers["location"]
|
| 519 |
+
except Exception as e:
|
| 520 |
+
raise DirectDownloadLinkException(f"ERROR: {str(e)}") from e
|
| 521 |
+
finally:
|
| 522 |
+
session.close()
|
| 523 |
+
|
| 524 |
+
|
| 525 |
+
def mediafire(url, session=None):
|
| 526 |
+
if "/folder/" in url:
|
| 527 |
+
return mediafireFolder(url)
|
| 528 |
+
if "::" in url:
|
| 529 |
+
_password = url.split("::")[-1]
|
| 530 |
+
url = url.split("::")[-2]
|
| 531 |
+
else:
|
| 532 |
+
_password = ""
|
| 533 |
+
if final_link := findall(
|
| 534 |
+
r"https?:\/\/download\d+\.mediafire\.com\/\S+\/\S+\/\S+", url
|
| 535 |
+
):
|
| 536 |
+
return final_link[0]
|
| 537 |
+
|
| 538 |
+
def _repair_download(url, session):
|
| 539 |
+
try:
|
| 540 |
+
html = HTML(session.get(url).text)
|
| 541 |
+
if new_link := html.xpath('//a[@id="continue-btn"]/@href'):
|
| 542 |
+
return mediafire(f"https://mediafire.com/{new_link[0]}")
|
| 543 |
+
except Exception as e:
|
| 544 |
+
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
|
| 545 |
+
|
| 546 |
+
if session is None:
|
| 547 |
+
session = create_scraper()
|
| 548 |
+
parsed_url = urlparse(url)
|
| 549 |
+
url = f"{parsed_url.scheme}://{parsed_url.netloc}{parsed_url.path}"
|
| 550 |
+
try:
|
| 551 |
+
html = HTML(session.get(url).text)
|
| 552 |
+
except Exception as e:
|
| 553 |
+
session.close()
|
| 554 |
+
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
|
| 555 |
+
if error := html.xpath('//p[@class="notranslate"]/text()'):
|
| 556 |
+
session.close()
|
| 557 |
+
raise DirectDownloadLinkException(f"ERROR: {error[0]}")
|
| 558 |
+
if html.xpath("//div[@class='passwordPrompt']"):
|
| 559 |
+
if not _password:
|
| 560 |
+
session.close()
|
| 561 |
+
raise DirectDownloadLinkException(
|
| 562 |
+
f"ERROR: {PASSWORD_ERROR_MESSAGE}".format(url)
|
| 563 |
+
)
|
| 564 |
+
try:
|
| 565 |
+
html = HTML(session.post(url, data={"downloadp": _password}).text)
|
| 566 |
+
except Exception as e:
|
| 567 |
+
session.close()
|
| 568 |
+
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
|
| 569 |
+
if html.xpath("//div[@class='passwordPrompt']"):
|
| 570 |
+
session.close()
|
| 571 |
+
raise DirectDownloadLinkException("ERROR: Wrong password.")
|
| 572 |
+
if not (final_link := html.xpath('//a[@aria-label="Download file"]/@href')):
|
| 573 |
+
if repair_link := html.xpath("//a[@class='retry']/@href"):
|
| 574 |
+
return _repair_download(repair_link[0], session)
|
| 575 |
+
raise DirectDownloadLinkException(
|
| 576 |
+
"ERROR: No links found in this page Try Again"
|
| 577 |
+
)
|
| 578 |
+
if final_link[0].startswith("//"):
|
| 579 |
+
final_url = f"https://{final_link[0][2:]}"
|
| 580 |
+
if _password:
|
| 581 |
+
final_url += f"::{_password}"
|
| 582 |
+
return mediafire(final_url, session)
|
| 583 |
+
session.close()
|
| 584 |
+
return final_link[0]
|
| 585 |
+
|
| 586 |
+
|
| 587 |
+
def osdn(url):
|
| 588 |
+
with create_scraper() as session:
|
| 589 |
+
try:
|
| 590 |
+
html = HTML(session.get(url).text)
|
| 591 |
+
except Exception as e:
|
| 592 |
+
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
|
| 593 |
+
if not (direct_link := html.xapth('//a[@class="mirror_link"]/@href')):
|
| 594 |
+
raise DirectDownloadLinkException("ERROR: Direct link not found")
|
| 595 |
+
return f"https://osdn.net{direct_link[0]}"
|
| 596 |
+
|
| 597 |
+
|
| 598 |
+
def yandex_disk(url: str) -> str:
|
| 599 |
+
"""Yandex.Disk direct link generator
|
| 600 |
+
Based on https://github.com/wldhx/yadisk-direct"""
|
| 601 |
+
try:
|
| 602 |
+
link = findall(r"\b(https?://(yadi\.sk|disk\.yandex\.(com|ru))\S+)", url)[0][0]
|
| 603 |
+
except IndexError:
|
| 604 |
+
return "No Yandex.Disk links found\n"
|
| 605 |
+
api = "https://cloud-api.yandex.net/v1/disk/public/resources/download?public_key={}"
|
| 606 |
+
try:
|
| 607 |
+
return get(api.format(link)).json()["href"]
|
| 608 |
+
except KeyError as e:
|
| 609 |
+
raise DirectDownloadLinkException(
|
| 610 |
+
"ERROR: File not found/Download limit reached"
|
| 611 |
+
) from e
|
| 612 |
+
|
| 613 |
+
|
| 614 |
+
def github(url):
|
| 615 |
+
"""GitHub direct links generator"""
|
| 616 |
+
try:
|
| 617 |
+
findall(r"\bhttps?://.*github\.com.*releases\S+", url)[0]
|
| 618 |
+
except IndexError as e:
|
| 619 |
+
raise DirectDownloadLinkException("No GitHub Releases links found") from e
|
| 620 |
+
with create_scraper() as session:
|
| 621 |
+
_res = session.get(url, stream=True, allow_redirects=False)
|
| 622 |
+
if "location" in _res.headers:
|
| 623 |
+
return _res.headers["location"]
|
| 624 |
+
raise DirectDownloadLinkException("ERROR: Can't extract the link")
|
| 625 |
+
|
| 626 |
+
|
| 627 |
+
def hxfile(url):
|
| 628 |
+
if not ospath.isfile("hxfile.txt"):
|
| 629 |
+
raise DirectDownloadLinkException("ERROR: hxfile.txt (cookies) Not Found!")
|
| 630 |
+
try:
|
| 631 |
+
jar = MozillaCookieJar()
|
| 632 |
+
jar.load("hxfile.txt")
|
| 633 |
+
except Exception as e:
|
| 634 |
+
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
|
| 635 |
+
cookies = {cookie.name: cookie.value for cookie in jar}
|
| 636 |
+
with Session() as session:
|
| 637 |
+
try:
|
| 638 |
+
if url.strip().endswith(".html"):
|
| 639 |
+
url = url[:-5]
|
| 640 |
+
file_code = url.split("/")[-1]
|
| 641 |
+
html = HTML(
|
| 642 |
+
session.post(
|
| 643 |
+
url,
|
| 644 |
+
data={"op": "download2", "id": file_code},
|
| 645 |
+
cookies=cookies,
|
| 646 |
+
).text
|
| 647 |
+
)
|
| 648 |
+
except Exception as e:
|
| 649 |
+
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
|
| 650 |
+
if direct_link := html.xpath("//a[@class='btn btn-dow']/@href"):
|
| 651 |
+
header = f"Referer: {url}"
|
| 652 |
+
return direct_link[0], header
|
| 653 |
+
raise DirectDownloadLinkException("ERROR: Direct download link not found")
|
| 654 |
+
|
| 655 |
+
|
| 656 |
+
def onedrive(link):
|
| 657 |
+
"""Onedrive direct link generator
|
| 658 |
+
By https://github.com/junedkh"""
|
| 659 |
+
with create_scraper() as session:
|
| 660 |
+
try:
|
| 661 |
+
link = session.get(link).url
|
| 662 |
+
parsed_link = urlparse(link)
|
| 663 |
+
link_data = parse_qs(parsed_link.query)
|
| 664 |
+
except Exception as e:
|
| 665 |
+
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
|
| 666 |
+
if not link_data:
|
| 667 |
+
raise DirectDownloadLinkException("ERROR: Unable to find link_data")
|
| 668 |
+
folder_id = link_data.get("resid")
|
| 669 |
+
if not folder_id:
|
| 670 |
+
raise DirectDownloadLinkException("ERROR: folder id not found")
|
| 671 |
+
folder_id = folder_id[0]
|
| 672 |
+
authkey = link_data.get("authkey")
|
| 673 |
+
if not authkey:
|
| 674 |
+
raise DirectDownloadLinkException("ERROR: authkey not found")
|
| 675 |
+
authkey = authkey[0]
|
| 676 |
+
boundary = uuid4()
|
| 677 |
+
headers = {"content-type": f"multipart/form-data;boundary={boundary}"}
|
| 678 |
+
data = f"--{boundary}\r\nContent-Disposition: form-data;name=data\r\nPrefer: Migration=EnableRedirect;FailOnMigratedFiles\r\nX-HTTP-Method-Override: GET\r\nContent-Type: application/json\r\n\r\n--{boundary}--"
|
| 679 |
+
try:
|
| 680 |
+
resp = session.get(
|
| 681 |
+
f"https://api.onedrive.com/v1.0/drives/{folder_id.split('!', 1)[0]}/items/{folder_id}?$select=id,@content.downloadUrl&ump=1&authKey={authkey}",
|
| 682 |
+
headers=headers,
|
| 683 |
+
data=data,
|
| 684 |
+
).json()
|
| 685 |
+
except Exception as e:
|
| 686 |
+
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
|
| 687 |
+
if "@content.downloadUrl" not in resp:
|
| 688 |
+
raise DirectDownloadLinkException("ERROR: Direct link not found")
|
| 689 |
+
return resp["@content.downloadUrl"]
|
| 690 |
+
|
| 691 |
+
|
| 692 |
+
def pixeldrain(url):
|
| 693 |
+
try:
|
| 694 |
+
url = url.rstrip("/")
|
| 695 |
+
code = url.split("/")[-1].split("?", 1)[0]
|
| 696 |
+
response = get("https://pd.cybar.xyz/", allow_redirects=True)
|
| 697 |
+
return response.url + code
|
| 698 |
+
except Exception as e:
|
| 699 |
+
raise DirectDownloadLinkException("ERROR: Direct link not found")
|
| 700 |
+
|
| 701 |
+
|
| 702 |
+
def streamtape(url):
|
| 703 |
+
splitted_url = url.split("/")
|
| 704 |
+
_id = splitted_url[4] if len(splitted_url) >= 6 else splitted_url[-1]
|
| 705 |
+
try:
|
| 706 |
+
with Session() as session:
|
| 707 |
+
html = HTML(session.get(url).text)
|
| 708 |
+
except Exception as e:
|
| 709 |
+
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
|
| 710 |
+
script = html.xpath(
|
| 711 |
+
"//script[contains(text(),'ideoooolink')]/text()"
|
| 712 |
+
) or html.xpath("//script[contains(text(),'ideoolink')]/text()")
|
| 713 |
+
if not script:
|
| 714 |
+
raise DirectDownloadLinkException("ERROR: requeries script not found")
|
| 715 |
+
if not (link := findall(r"(&expires\S+)'", script[0])):
|
| 716 |
+
raise DirectDownloadLinkException("ERROR: Download link not found")
|
| 717 |
+
return f"https://streamtape.com/get_video?id={_id}{link[-1]}"
|
| 718 |
+
|
| 719 |
+
|
| 720 |
+
def racaty(url):
|
| 721 |
+
with create_scraper() as session:
|
| 722 |
+
try:
|
| 723 |
+
url = session.get(url).url
|
| 724 |
+
json_data = {"op": "download2", "id": url.split("/")[-1]}
|
| 725 |
+
html = HTML(session.post(url, data=json_data).text)
|
| 726 |
+
except Exception as e:
|
| 727 |
+
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
|
| 728 |
+
if direct_link := html.xpath("//a[@id='uniqueExpirylink']/@href"):
|
| 729 |
+
return direct_link[0]
|
| 730 |
+
else:
|
| 731 |
+
raise DirectDownloadLinkException("ERROR: Direct link not found")
|
| 732 |
+
|
| 733 |
+
|
| 734 |
+
def fichier(link):
|
| 735 |
+
"""1Fichier direct link generator
|
| 736 |
+
Based on https://github.com/Maujar
|
| 737 |
+
"""
|
| 738 |
+
regex = r"^([http:\/\/|https:\/\/]+)?.*1fichier\.com\/\?.+"
|
| 739 |
+
gan = match(regex, link)
|
| 740 |
+
if not gan:
|
| 741 |
+
raise DirectDownloadLinkException("ERROR: The link you entered is wrong!")
|
| 742 |
+
if "::" in link:
|
| 743 |
+
pswd = link.split("::")[-1]
|
| 744 |
+
url = link.split("::")[-2]
|
| 745 |
+
else:
|
| 746 |
+
pswd = None
|
| 747 |
+
url = link
|
| 748 |
+
cget = create_scraper().request
|
| 749 |
+
try:
|
| 750 |
+
if pswd is None:
|
| 751 |
+
req = cget("post", url)
|
| 752 |
+
else:
|
| 753 |
+
pw = {"pass": pswd}
|
| 754 |
+
req = cget("post", url, data=pw)
|
| 755 |
+
except Exception as e:
|
| 756 |
+
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
|
| 757 |
+
if req.status_code == 404:
|
| 758 |
+
raise DirectDownloadLinkException(
|
| 759 |
+
"ERROR: File not found/The link you entered is wrong!"
|
| 760 |
+
)
|
| 761 |
+
html = HTML(req.text)
|
| 762 |
+
if dl_url := html.xpath('//a[@class="ok btn-general btn-orange"]/@href'):
|
| 763 |
+
return dl_url[0]
|
| 764 |
+
if not (ct_warn := html.xpath('//div[@class="ct_warn"]')):
|
| 765 |
+
raise DirectDownloadLinkException(
|
| 766 |
+
"ERROR: Error trying to generate Direct Link from 1fichier!"
|
| 767 |
+
)
|
| 768 |
+
if len(ct_warn) == 3:
|
| 769 |
+
str_2 = ct_warn[-1].text
|
| 770 |
+
if "you must wait" in str_2.lower():
|
| 771 |
+
if numbers := [int(word) for word in str_2.split() if word.isdigit()]:
|
| 772 |
+
raise DirectDownloadLinkException(
|
| 773 |
+
f"ERROR: 1fichier is on a limit. Please wait {numbers[0]} minute."
|
| 774 |
+
)
|
| 775 |
+
else:
|
| 776 |
+
raise DirectDownloadLinkException(
|
| 777 |
+
"ERROR: 1fichier is on a limit. Please wait a few minutes/hour."
|
| 778 |
+
)
|
| 779 |
+
elif "protect access" in str_2.lower():
|
| 780 |
+
raise DirectDownloadLinkException(
|
| 781 |
+
f"ERROR:\n{PASSWORD_ERROR_MESSAGE.format(link)}"
|
| 782 |
+
)
|
| 783 |
+
else:
|
| 784 |
+
raise DirectDownloadLinkException(
|
| 785 |
+
"ERROR: Failed to generate Direct Link from 1fichier!"
|
| 786 |
+
)
|
| 787 |
+
elif len(ct_warn) == 4:
|
| 788 |
+
str_1 = ct_warn[-2].text
|
| 789 |
+
str_3 = ct_warn[-1].text
|
| 790 |
+
if "you must wait" in str_1.lower():
|
| 791 |
+
if numbers := [int(word) for word in str_1.split() if word.isdigit()]:
|
| 792 |
+
raise DirectDownloadLinkException(
|
| 793 |
+
f"ERROR: 1fichier is on a limit. Please wait {numbers[0]} minute."
|
| 794 |
+
)
|
| 795 |
+
else:
|
| 796 |
+
raise DirectDownloadLinkException(
|
| 797 |
+
"ERROR: 1fichier is on a limit. Please wait a few minutes/hour."
|
| 798 |
+
)
|
| 799 |
+
elif "bad password" in str_3.lower():
|
| 800 |
+
raise DirectDownloadLinkException(
|
| 801 |
+
"ERROR: The password you entered is wrong!"
|
| 802 |
+
)
|
| 803 |
+
raise DirectDownloadLinkException(
|
| 804 |
+
"ERROR: Error trying to generate Direct Link from 1fichier!"
|
| 805 |
+
)
|
| 806 |
+
|
| 807 |
+
|
| 808 |
+
def solidfiles(url):
|
| 809 |
+
"""Solidfiles direct link generator
|
| 810 |
+
Based on https://github.com/Xonshiz/SolidFiles-Downloader
|
| 811 |
+
By https://github.com/Jusidama18"""
|
| 812 |
+
with create_scraper() as session:
|
| 813 |
+
try:
|
| 814 |
+
headers = {
|
| 815 |
+
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1985.125 Safari/537.36"
|
| 816 |
+
}
|
| 817 |
+
pageSource = session.get(url, headers=headers).text
|
| 818 |
+
mainOptions = str(
|
| 819 |
+
search(r"viewerOptions\'\,\ (.*?)\)\;", pageSource).group(1)
|
| 820 |
+
)
|
| 821 |
+
return loads(mainOptions)["downloadUrl"]
|
| 822 |
+
except Exception as e:
|
| 823 |
+
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
|
| 824 |
+
|
| 825 |
+
|
| 826 |
+
def krakenfiles(url):
|
| 827 |
+
with Session() as session:
|
| 828 |
+
try:
|
| 829 |
+
_res = session.get(url)
|
| 830 |
+
except Exception as e:
|
| 831 |
+
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
|
| 832 |
+
html = HTML(_res.text)
|
| 833 |
+
if post_url := html.xpath('//form[@id="dl-form"]/@action'):
|
| 834 |
+
post_url = f"https://krakenfiles.com{post_url[0]}"
|
| 835 |
+
else:
|
| 836 |
+
raise DirectDownloadLinkException("ERROR: Unable to find post link.")
|
| 837 |
+
if token := html.xpath('//input[@id="dl-token"]/@value'):
|
| 838 |
+
data = {"token": token[0]}
|
| 839 |
+
else:
|
| 840 |
+
raise DirectDownloadLinkException("ERROR: Unable to find token for post.")
|
| 841 |
+
try:
|
| 842 |
+
_json = session.post(post_url, data=data).json()
|
| 843 |
+
except Exception as e:
|
| 844 |
+
raise DirectDownloadLinkException(
|
| 845 |
+
f"ERROR: {e.__class__.__name__} While send post request"
|
| 846 |
+
) from e
|
| 847 |
+
if _json["status"] != "ok":
|
| 848 |
+
raise DirectDownloadLinkException(
|
| 849 |
+
"ERROR: Unable to find download after post request"
|
| 850 |
+
)
|
| 851 |
+
return _json["url"]
|
| 852 |
+
|
| 853 |
+
|
| 854 |
+
def uploadee(url):
|
| 855 |
+
with create_scraper() as session:
|
| 856 |
+
try:
|
| 857 |
+
html = HTML(session.get(url).text)
|
| 858 |
+
except Exception as e:
|
| 859 |
+
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
|
| 860 |
+
if link := html.xpath("//a[@id='d_l']/@href"):
|
| 861 |
+
return link[0]
|
| 862 |
+
else:
|
| 863 |
+
raise DirectDownloadLinkException("ERROR: Direct Link not found")
|
| 864 |
+
|
| 865 |
+
|
| 866 |
+
def terabox(url):
|
| 867 |
+
try:
|
| 868 |
+
encoded_url = quote(url)
|
| 869 |
+
final_url = f"https://teradlrobot.cheemsbackup.workers.dev/?url={encoded_url}"
|
| 870 |
+
return final_url
|
| 871 |
+
except Exception as e:
|
| 872 |
+
raise DirectDownloadLinkException("Failed to bypass Terabox URL")
|
| 873 |
+
|
| 874 |
+
|
| 875 |
+
def filepress(url):
|
| 876 |
+
with create_scraper() as session:
|
| 877 |
+
try:
|
| 878 |
+
url = session.get(url).url
|
| 879 |
+
raw = urlparse(url)
|
| 880 |
+
json_data = {
|
| 881 |
+
"id": raw.path.split("/")[-1],
|
| 882 |
+
"method": "publicDownlaod",
|
| 883 |
+
}
|
| 884 |
+
api = f"{raw.scheme}://{raw.hostname}/api/file/downlaod/"
|
| 885 |
+
res2 = session.post(
|
| 886 |
+
api,
|
| 887 |
+
headers={"Referer": f"{raw.scheme}://{raw.hostname}"},
|
| 888 |
+
json=json_data,
|
| 889 |
+
).json()
|
| 890 |
+
json_data2 = {
|
| 891 |
+
"id": res2["data"],
|
| 892 |
+
"method": "publicUserDownlaod",
|
| 893 |
+
}
|
| 894 |
+
api2 = "https://new2.filepress.store/api/file/downlaod2/"
|
| 895 |
+
res = session.post(
|
| 896 |
+
api2,
|
| 897 |
+
headers={"Referer": f"{raw.scheme}://{raw.hostname}"},
|
| 898 |
+
json=json_data2,
|
| 899 |
+
).json()
|
| 900 |
+
except Exception as e:
|
| 901 |
+
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
|
| 902 |
+
if "data" not in res:
|
| 903 |
+
raise DirectDownloadLinkException(f"ERROR: {res['statusText']}")
|
| 904 |
+
return f"https://drive.google.com/uc?id={res['data']}&export=download"
|
| 905 |
+
|
| 906 |
+
|
| 907 |
+
def gdtot(url):
|
| 908 |
+
cget = create_scraper().request
|
| 909 |
+
try:
|
| 910 |
+
res = cget("GET", f"https://gdtot.pro/file/{url.split('/')[-1]}")
|
| 911 |
+
except Exception as e:
|
| 912 |
+
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
|
| 913 |
+
token_url = HTML(res.text).xpath(
|
| 914 |
+
"//a[contains(@class,'inline-flex items-center justify-center')]/@href"
|
| 915 |
+
)
|
| 916 |
+
if not token_url:
|
| 917 |
+
try:
|
| 918 |
+
url = cget("GET", url).url
|
| 919 |
+
p_url = urlparse(url)
|
| 920 |
+
res = cget(
|
| 921 |
+
"GET", f"{p_url.scheme}://{p_url.hostname}/ddl/{url.split('/')[-1]}"
|
| 922 |
+
)
|
| 923 |
+
except Exception as e:
|
| 924 |
+
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
|
| 925 |
+
if (
|
| 926 |
+
drive_link := findall(r"myDl\('(.*?)'\)", res.text)
|
| 927 |
+
) and "drive.google.com" in drive_link[0]:
|
| 928 |
+
return drive_link[0]
|
| 929 |
+
else:
|
| 930 |
+
raise DirectDownloadLinkException(
|
| 931 |
+
"ERROR: Drive Link not found, Try in your broswer"
|
| 932 |
+
)
|
| 933 |
+
token_url = token_url[0]
|
| 934 |
+
try:
|
| 935 |
+
token_page = cget("GET", token_url)
|
| 936 |
+
except Exception as e:
|
| 937 |
+
raise DirectDownloadLinkException(
|
| 938 |
+
f"ERROR: {e.__class__.__name__} with {token_url}"
|
| 939 |
+
) from e
|
| 940 |
+
path = findall(r'\("(.*?)"\)', token_page.text)
|
| 941 |
+
if not path:
|
| 942 |
+
raise DirectDownloadLinkException("ERROR: Cannot bypass this")
|
| 943 |
+
path = path[0]
|
| 944 |
+
raw = urlparse(token_url)
|
| 945 |
+
final_url = f"{raw.scheme}://{raw.hostname}{path}"
|
| 946 |
+
return sharer_scraper(final_url)
|
| 947 |
+
|
| 948 |
+
|
| 949 |
+
def sharer_scraper(url):
|
| 950 |
+
cget = create_scraper().request
|
| 951 |
+
try:
|
| 952 |
+
url = cget("GET", url).url
|
| 953 |
+
raw = urlparse(url)
|
| 954 |
+
header = {
|
| 955 |
+
"useragent": "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.10 (KHTML, like Gecko) Chrome/7.0.548.0 Safari/534.10"
|
| 956 |
+
}
|
| 957 |
+
res = cget("GET", url, headers=header)
|
| 958 |
+
except Exception as e:
|
| 959 |
+
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
|
| 960 |
+
key = findall(r'"key",\s+"(.*?)"', res.text)
|
| 961 |
+
if not key:
|
| 962 |
+
raise DirectDownloadLinkException("ERROR: Key not found!")
|
| 963 |
+
key = key[0]
|
| 964 |
+
if not HTML(res.text).xpath("//button[@id='drc']"):
|
| 965 |
+
raise DirectDownloadLinkException(
|
| 966 |
+
"ERROR: This link don't have direct download button"
|
| 967 |
+
)
|
| 968 |
+
boundary = uuid4()
|
| 969 |
+
headers = {
|
| 970 |
+
"Content-Type": f"multipart/form-data; boundary=----WebKitFormBoundary{boundary}",
|
| 971 |
+
"x-token": raw.hostname,
|
| 972 |
+
"useragent": "Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/534.10 (KHTML, like Gecko) Chrome/7.0.548.0 Safari/534.10",
|
| 973 |
+
}
|
| 974 |
+
|
| 975 |
+
data = (
|
| 976 |
+
f'------WebKitFormBoundary{boundary}\r\nContent-Disposition: form-data; name="action"\r\n\r\ndirect\r\n'
|
| 977 |
+
f'------WebKitFormBoundary{boundary}\r\nContent-Disposition: form-data; name="key"\r\n\r\n{key}\r\n'
|
| 978 |
+
f'------WebKitFormBoundary{boundary}\r\nContent-Disposition: form-data; name="action_token"\r\n\r\n\r\n'
|
| 979 |
+
f"------WebKitFormBoundary{boundary}--\r\n"
|
| 980 |
+
)
|
| 981 |
+
try:
|
| 982 |
+
res = cget("POST", url, cookies=res.cookies, headers=headers, data=data).json()
|
| 983 |
+
except Exception as e:
|
| 984 |
+
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
|
| 985 |
+
if "url" not in res:
|
| 986 |
+
raise DirectDownloadLinkException(
|
| 987 |
+
"ERROR: Drive Link not found, Try in your broswer"
|
| 988 |
+
)
|
| 989 |
+
if "drive.google.com" in res["url"] or "drive.usercontent.google.com" in res["url"]:
|
| 990 |
+
return res["url"]
|
| 991 |
+
try:
|
| 992 |
+
res = cget("GET", res["url"])
|
| 993 |
+
except Exception as e:
|
| 994 |
+
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
|
| 995 |
+
if (drive_link := HTML(res.text).xpath("//a[contains(@class,'btn')]/@href")) and (
|
| 996 |
+
"drive.google.com" in drive_link[0]
|
| 997 |
+
or "drive.usercontent.google.com" in drive_link[0]
|
| 998 |
+
):
|
| 999 |
+
return drive_link[0]
|
| 1000 |
+
else:
|
| 1001 |
+
raise DirectDownloadLinkException(
|
| 1002 |
+
"ERROR: Drive Link not found, Try in your broswer"
|
| 1003 |
+
)
|
| 1004 |
+
|
| 1005 |
+
|
| 1006 |
+
def wetransfer(url):
|
| 1007 |
+
with create_scraper() as session:
|
| 1008 |
+
try:
|
| 1009 |
+
url = session.get(url).url
|
| 1010 |
+
splited_url = url.split("/")
|
| 1011 |
+
json_data = {"security_hash": splited_url[-1], "intent": "entire_transfer"}
|
| 1012 |
+
res = session.post(
|
| 1013 |
+
f"https://wetransfer.com/api/v4/transfers/{splited_url[-2]}/download",
|
| 1014 |
+
json=json_data,
|
| 1015 |
+
).json()
|
| 1016 |
+
except Exception as e:
|
| 1017 |
+
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
|
| 1018 |
+
if "direct_link" in res:
|
| 1019 |
+
return res["direct_link"]
|
| 1020 |
+
elif "message" in res:
|
| 1021 |
+
raise DirectDownloadLinkException(f"ERROR: {res['message']}")
|
| 1022 |
+
elif "error" in res:
|
| 1023 |
+
raise DirectDownloadLinkException(f"ERROR: {res['error']}")
|
| 1024 |
+
else:
|
| 1025 |
+
raise DirectDownloadLinkException("ERROR: cannot find direct link")
|
| 1026 |
+
|
| 1027 |
+
|
| 1028 |
+
def akmfiles(url):
|
| 1029 |
+
with create_scraper() as session:
|
| 1030 |
+
try:
|
| 1031 |
+
html = HTML(
|
| 1032 |
+
session.post(
|
| 1033 |
+
url,
|
| 1034 |
+
data={"op": "download2", "id": url.split("/")[-1]},
|
| 1035 |
+
).text
|
| 1036 |
+
)
|
| 1037 |
+
except Exception as e:
|
| 1038 |
+
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
|
| 1039 |
+
if direct_link := html.xpath("//a[contains(@class,'btn btn-dow')]/@href"):
|
| 1040 |
+
return direct_link[0]
|
| 1041 |
+
else:
|
| 1042 |
+
raise DirectDownloadLinkException("ERROR: Direct link not found")
|
| 1043 |
+
|
| 1044 |
+
|
| 1045 |
+
def shrdsk(url):
|
| 1046 |
+
with create_scraper() as session:
|
| 1047 |
+
try:
|
| 1048 |
+
_json = session.get(
|
| 1049 |
+
f"https://us-central1-affiliate2apk.cloudfunctions.net/get_data?shortid={url.split('/')[-1]}",
|
| 1050 |
+
).json()
|
| 1051 |
+
except Exception as e:
|
| 1052 |
+
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
|
| 1053 |
+
if "download_data" not in _json:
|
| 1054 |
+
raise DirectDownloadLinkException("ERROR: Download data not found")
|
| 1055 |
+
try:
|
| 1056 |
+
_res = session.get(
|
| 1057 |
+
f"https://shrdsk.me/download/{_json['download_data']}",
|
| 1058 |
+
allow_redirects=False,
|
| 1059 |
+
)
|
| 1060 |
+
if "Location" in _res.headers:
|
| 1061 |
+
return _res.headers["Location"]
|
| 1062 |
+
except Exception as e:
|
| 1063 |
+
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
|
| 1064 |
+
raise DirectDownloadLinkException("ERROR: cannot find direct link in headers")
|
| 1065 |
+
|
| 1066 |
+
|
| 1067 |
+
def linkBox(url: str):
|
| 1068 |
+
parsed_url = urlparse(url)
|
| 1069 |
+
try:
|
| 1070 |
+
shareToken = parsed_url.path.split("/")[-1]
|
| 1071 |
+
except Exception:
|
| 1072 |
+
raise DirectDownloadLinkException("ERROR: invalid URL")
|
| 1073 |
+
|
| 1074 |
+
details = {"contents": [], "title": "", "total_size": 0}
|
| 1075 |
+
|
| 1076 |
+
def __singleItem(session, itemId):
|
| 1077 |
+
try:
|
| 1078 |
+
_json = session.get(
|
| 1079 |
+
"https://www.linkbox.to/api/file/detail",
|
| 1080 |
+
params={"itemId": itemId},
|
| 1081 |
+
).json()
|
| 1082 |
+
except Exception as e:
|
| 1083 |
+
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
|
| 1084 |
+
data = _json["data"]
|
| 1085 |
+
if not data:
|
| 1086 |
+
if "msg" in _json:
|
| 1087 |
+
raise DirectDownloadLinkException(f"ERROR: {_json['msg']}")
|
| 1088 |
+
raise DirectDownloadLinkException("ERROR: data not found")
|
| 1089 |
+
itemInfo = data["itemInfo"]
|
| 1090 |
+
if not itemInfo:
|
| 1091 |
+
raise DirectDownloadLinkException("ERROR: itemInfo not found")
|
| 1092 |
+
filename = itemInfo["name"]
|
| 1093 |
+
sub_type = itemInfo.get("sub_type")
|
| 1094 |
+
if sub_type and not filename.strip().endswith(sub_type):
|
| 1095 |
+
filename += f".{sub_type}"
|
| 1096 |
+
if not details["title"]:
|
| 1097 |
+
details["title"] = filename
|
| 1098 |
+
item = {
|
| 1099 |
+
"path": "",
|
| 1100 |
+
"filename": filename,
|
| 1101 |
+
"url": itemInfo["url"],
|
| 1102 |
+
}
|
| 1103 |
+
if "size" in itemInfo:
|
| 1104 |
+
size = itemInfo["size"]
|
| 1105 |
+
if isinstance(size, str) and size.isdigit():
|
| 1106 |
+
size = float(size)
|
| 1107 |
+
details["total_size"] += size
|
| 1108 |
+
details["contents"].append(item)
|
| 1109 |
+
|
| 1110 |
+
def __fetch_links(session, _id=0, folderPath=""):
|
| 1111 |
+
params = {
|
| 1112 |
+
"shareToken": shareToken,
|
| 1113 |
+
"pageSize": 1000,
|
| 1114 |
+
"pid": _id,
|
| 1115 |
+
}
|
| 1116 |
+
try:
|
| 1117 |
+
_json = session.get(
|
| 1118 |
+
"https://www.linkbox.to/api/file/share_out_list",
|
| 1119 |
+
params=params,
|
| 1120 |
+
).json()
|
| 1121 |
+
except Exception as e:
|
| 1122 |
+
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
|
| 1123 |
+
data = _json["data"]
|
| 1124 |
+
if not data:
|
| 1125 |
+
if "msg" in _json:
|
| 1126 |
+
raise DirectDownloadLinkException(f"ERROR: {_json['msg']}")
|
| 1127 |
+
raise DirectDownloadLinkException("ERROR: data not found")
|
| 1128 |
+
try:
|
| 1129 |
+
if data["shareType"] == "singleItem":
|
| 1130 |
+
return __singleItem(session, data["itemId"])
|
| 1131 |
+
except Exception:
|
| 1132 |
+
pass
|
| 1133 |
+
if not details["title"]:
|
| 1134 |
+
details["title"] = data["dirName"]
|
| 1135 |
+
contents = data["list"]
|
| 1136 |
+
if not contents:
|
| 1137 |
+
return
|
| 1138 |
+
for content in contents:
|
| 1139 |
+
if content["type"] == "dir" and "url" not in content:
|
| 1140 |
+
if not folderPath:
|
| 1141 |
+
newFolderPath = ospath.join(details["title"], content["name"])
|
| 1142 |
+
else:
|
| 1143 |
+
newFolderPath = ospath.join(folderPath, content["name"])
|
| 1144 |
+
if not details["title"]:
|
| 1145 |
+
details["title"] = content["name"]
|
| 1146 |
+
__fetch_links(session, content["id"], newFolderPath)
|
| 1147 |
+
elif "url" in content:
|
| 1148 |
+
if not folderPath:
|
| 1149 |
+
folderPath = details["title"]
|
| 1150 |
+
filename = content["name"]
|
| 1151 |
+
if (
|
| 1152 |
+
sub_type := content.get("sub_type")
|
| 1153 |
+
) and not filename.strip().endswith(sub_type):
|
| 1154 |
+
filename += f".{sub_type}"
|
| 1155 |
+
item = {
|
| 1156 |
+
"path": ospath.join(folderPath),
|
| 1157 |
+
"filename": filename,
|
| 1158 |
+
"url": content["url"],
|
| 1159 |
+
}
|
| 1160 |
+
if "size" in content:
|
| 1161 |
+
size = content["size"]
|
| 1162 |
+
if isinstance(size, str) and size.isdigit():
|
| 1163 |
+
size = float(size)
|
| 1164 |
+
details["total_size"] += size
|
| 1165 |
+
details["contents"].append(item)
|
| 1166 |
+
|
| 1167 |
+
try:
|
| 1168 |
+
with Session() as session:
|
| 1169 |
+
__fetch_links(session)
|
| 1170 |
+
except DirectDownloadLinkException as e:
|
| 1171 |
+
raise e
|
| 1172 |
+
return details
|
| 1173 |
+
|
| 1174 |
+
|
| 1175 |
+
def gofile(url):
|
| 1176 |
+
try:
|
| 1177 |
+
if "::" in url:
|
| 1178 |
+
_password = url.split("::")[-1]
|
| 1179 |
+
_password = sha256(_password.encode("utf-8")).hexdigest()
|
| 1180 |
+
url = url.split("::")[-2]
|
| 1181 |
+
else:
|
| 1182 |
+
_password = ""
|
| 1183 |
+
_id = url.split("/")[-1]
|
| 1184 |
+
except Exception as e:
|
| 1185 |
+
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}")
|
| 1186 |
+
|
| 1187 |
+
def __get_token(session):
|
| 1188 |
+
headers = {
|
| 1189 |
+
"User-Agent": user_agent,
|
| 1190 |
+
"Accept-Encoding": "gzip, deflate, br",
|
| 1191 |
+
"Accept": "*/*",
|
| 1192 |
+
"Connection": "keep-alive",
|
| 1193 |
+
}
|
| 1194 |
+
__url = "https://api.gofile.io/accounts"
|
| 1195 |
+
try:
|
| 1196 |
+
__res = session.post(__url, headers=headers).json()
|
| 1197 |
+
if __res["status"] != "ok":
|
| 1198 |
+
raise DirectDownloadLinkException("ERROR: Failed to get token.")
|
| 1199 |
+
return __res["data"]["token"]
|
| 1200 |
+
except Exception as e:
|
| 1201 |
+
raise e
|
| 1202 |
+
|
| 1203 |
+
def __fetch_links(session, _id, folderPath=""):
|
| 1204 |
+
_url = f"https://api.gofile.io/contents/{_id}?cache=true"
|
| 1205 |
+
headers = {
|
| 1206 |
+
"User-Agent": user_agent,
|
| 1207 |
+
"Accept-Encoding": "gzip, deflate, br",
|
| 1208 |
+
"Accept": "*/*",
|
| 1209 |
+
"Connection": "keep-alive",
|
| 1210 |
+
"Authorization": "Bearer" + " " + token,
|
| 1211 |
+
"X-Website-Token": "4fd6sg89d7s6",
|
| 1212 |
+
}
|
| 1213 |
+
if _password:
|
| 1214 |
+
_url += f"&password={_password}"
|
| 1215 |
+
try:
|
| 1216 |
+
_json = session.get(_url, headers=headers).json()
|
| 1217 |
+
except Exception as e:
|
| 1218 |
+
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}")
|
| 1219 |
+
if _json["status"] in "error-passwordRequired":
|
| 1220 |
+
raise DirectDownloadLinkException(
|
| 1221 |
+
f"ERROR:\n{PASSWORD_ERROR_MESSAGE.format(url)}"
|
| 1222 |
+
)
|
| 1223 |
+
if _json["status"] in "error-passwordWrong":
|
| 1224 |
+
raise DirectDownloadLinkException("ERROR: This password is wrong !")
|
| 1225 |
+
if _json["status"] in "error-notFound":
|
| 1226 |
+
raise DirectDownloadLinkException(
|
| 1227 |
+
"ERROR: File not found on gofile's server"
|
| 1228 |
+
)
|
| 1229 |
+
if _json["status"] in "error-notPublic":
|
| 1230 |
+
raise DirectDownloadLinkException("ERROR: This folder is not public")
|
| 1231 |
+
|
| 1232 |
+
data = _json["data"]
|
| 1233 |
+
|
| 1234 |
+
if not details["title"]:
|
| 1235 |
+
details["title"] = data["name"] if data["type"] == "folder" else _id
|
| 1236 |
+
|
| 1237 |
+
contents = data["children"]
|
| 1238 |
+
for content in contents.values():
|
| 1239 |
+
if content["type"] == "folder":
|
| 1240 |
+
if not content["public"]:
|
| 1241 |
+
continue
|
| 1242 |
+
if not folderPath:
|
| 1243 |
+
newFolderPath = ospath.join(details["title"], content["name"])
|
| 1244 |
+
else:
|
| 1245 |
+
newFolderPath = ospath.join(folderPath, content["name"])
|
| 1246 |
+
__fetch_links(session, content["id"], newFolderPath)
|
| 1247 |
+
else:
|
| 1248 |
+
if not folderPath:
|
| 1249 |
+
folderPath = details["title"]
|
| 1250 |
+
item = {
|
| 1251 |
+
"path": ospath.join(folderPath),
|
| 1252 |
+
"filename": content["name"],
|
| 1253 |
+
"url": content["link"],
|
| 1254 |
+
}
|
| 1255 |
+
if "size" in content:
|
| 1256 |
+
size = content["size"]
|
| 1257 |
+
if isinstance(size, str) and size.isdigit():
|
| 1258 |
+
size = float(size)
|
| 1259 |
+
details["total_size"] += size
|
| 1260 |
+
details["contents"].append(item)
|
| 1261 |
+
|
| 1262 |
+
details = {"contents": [], "title": "", "total_size": 0}
|
| 1263 |
+
with Session() as session:
|
| 1264 |
+
try:
|
| 1265 |
+
token = __get_token(session)
|
| 1266 |
+
except Exception as e:
|
| 1267 |
+
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}")
|
| 1268 |
+
details["header"] = f"Cookie: accountToken={token}"
|
| 1269 |
+
try:
|
| 1270 |
+
__fetch_links(session, _id)
|
| 1271 |
+
except Exception as e:
|
| 1272 |
+
raise DirectDownloadLinkException(e)
|
| 1273 |
+
|
| 1274 |
+
if len(details["contents"]) == 1:
|
| 1275 |
+
return (details["contents"][0]["url"], details["header"])
|
| 1276 |
+
return details
|
| 1277 |
+
|
| 1278 |
+
|
| 1279 |
+
def mediafireFolder(url):
|
| 1280 |
+
if "::" in url:
|
| 1281 |
+
_password = url.split("::")[-1]
|
| 1282 |
+
url = url.split("::")[-2]
|
| 1283 |
+
else:
|
| 1284 |
+
_password = ""
|
| 1285 |
+
try:
|
| 1286 |
+
raw = url.split("/", 4)[-1]
|
| 1287 |
+
folderkey = raw.split("/", 1)[0]
|
| 1288 |
+
folderkey = folderkey.split(",")
|
| 1289 |
+
except Exception:
|
| 1290 |
+
raise DirectDownloadLinkException("ERROR: Could not parse ")
|
| 1291 |
+
if len(folderkey) == 1:
|
| 1292 |
+
folderkey = folderkey[0]
|
| 1293 |
+
details = {"contents": [], "title": "", "total_size": 0, "header": ""}
|
| 1294 |
+
|
| 1295 |
+
session = create_scraper()
|
| 1296 |
+
adapter = HTTPAdapter(
|
| 1297 |
+
max_retries=Retry(total=10, read=10, connect=10, backoff_factor=0.3)
|
| 1298 |
+
)
|
| 1299 |
+
session.mount("http://", adapter)
|
| 1300 |
+
session.mount("https://", adapter)
|
| 1301 |
+
session = create_scraper(
|
| 1302 |
+
browser={"browser": "firefox", "platform": "windows", "mobile": False},
|
| 1303 |
+
delay=10,
|
| 1304 |
+
sess=session,
|
| 1305 |
+
)
|
| 1306 |
+
folder_infos = []
|
| 1307 |
+
|
| 1308 |
+
def __get_info(folderkey):
|
| 1309 |
+
try:
|
| 1310 |
+
if isinstance(folderkey, list):
|
| 1311 |
+
folderkey = ",".join(folderkey)
|
| 1312 |
+
_json = session.post(
|
| 1313 |
+
"https://www.mediafire.com/api/1.5/folder/get_info.php",
|
| 1314 |
+
data={
|
| 1315 |
+
"recursive": "yes",
|
| 1316 |
+
"folder_key": folderkey,
|
| 1317 |
+
"response_format": "json",
|
| 1318 |
+
},
|
| 1319 |
+
).json()
|
| 1320 |
+
except Exception as e:
|
| 1321 |
+
raise DirectDownloadLinkException(
|
| 1322 |
+
f"ERROR: {e.__class__.__name__} While getting info"
|
| 1323 |
+
)
|
| 1324 |
+
_res = _json["response"]
|
| 1325 |
+
if "folder_infos" in _res:
|
| 1326 |
+
folder_infos.extend(_res["folder_infos"])
|
| 1327 |
+
elif "folder_info" in _res:
|
| 1328 |
+
folder_infos.append(_res["folder_info"])
|
| 1329 |
+
elif "message" in _res:
|
| 1330 |
+
raise DirectDownloadLinkException(f"ERROR: {_res['message']}")
|
| 1331 |
+
else:
|
| 1332 |
+
raise DirectDownloadLinkException("ERROR: something went wrong!")
|
| 1333 |
+
|
| 1334 |
+
try:
|
| 1335 |
+
__get_info(folderkey)
|
| 1336 |
+
except Exception as e:
|
| 1337 |
+
raise DirectDownloadLinkException(e)
|
| 1338 |
+
|
| 1339 |
+
details["title"] = folder_infos[0]["name"]
|
| 1340 |
+
|
| 1341 |
+
def __scraper(url):
|
| 1342 |
+
session = create_scraper()
|
| 1343 |
+
parsed_url = urlparse(url)
|
| 1344 |
+
url = f"{parsed_url.scheme}://{parsed_url.netloc}{parsed_url.path}"
|
| 1345 |
+
|
| 1346 |
+
def __repair_download(url):
|
| 1347 |
+
try:
|
| 1348 |
+
html = HTML(session.get(url).text)
|
| 1349 |
+
if new_link := html.xpath('//a[@id="continue-btn"]/@href'):
|
| 1350 |
+
return __scraper(f"https://mediafire.com/{new_link[0]}")
|
| 1351 |
+
except Exception:
|
| 1352 |
+
return
|
| 1353 |
+
|
| 1354 |
+
try:
|
| 1355 |
+
html = HTML(session.get(url).text)
|
| 1356 |
+
except Exception:
|
| 1357 |
+
return
|
| 1358 |
+
if html.xpath("//div[@class='passwordPrompt']"):
|
| 1359 |
+
if not _password:
|
| 1360 |
+
raise DirectDownloadLinkException(
|
| 1361 |
+
f"ERROR: {PASSWORD_ERROR_MESSAGE}".format(url)
|
| 1362 |
+
)
|
| 1363 |
+
try:
|
| 1364 |
+
html = HTML(session.post(url, data={"downloadp": _password}).text)
|
| 1365 |
+
except Exception:
|
| 1366 |
+
return
|
| 1367 |
+
if html.xpath("//div[@class='passwordPrompt']"):
|
| 1368 |
+
return
|
| 1369 |
+
if final_link := html.xpath('//a[@aria-label="Download file"]/@href'):
|
| 1370 |
+
if final_link[0].startswith("//"):
|
| 1371 |
+
return __scraper(f"https://{final_link[0][2:]}")
|
| 1372 |
+
return final_link[0]
|
| 1373 |
+
if repair_link := html.xpath("//a[@class='retry']/@href"):
|
| 1374 |
+
return __repair_download(repair_link[0])
|
| 1375 |
+
|
| 1376 |
+
def __get_content(folderKey, folderPath="", content_type="folders"):
|
| 1377 |
+
try:
|
| 1378 |
+
params = {
|
| 1379 |
+
"content_type": content_type,
|
| 1380 |
+
"folder_key": folderKey,
|
| 1381 |
+
"response_format": "json",
|
| 1382 |
+
}
|
| 1383 |
+
_json = session.get(
|
| 1384 |
+
"https://www.mediafire.com/api/1.5/folder/get_content.php",
|
| 1385 |
+
params=params,
|
| 1386 |
+
).json()
|
| 1387 |
+
except Exception as e:
|
| 1388 |
+
raise DirectDownloadLinkException(
|
| 1389 |
+
f"ERROR: {e.__class__.__name__} While getting content"
|
| 1390 |
+
)
|
| 1391 |
+
_res = _json["response"]
|
| 1392 |
+
if "message" in _res:
|
| 1393 |
+
raise DirectDownloadLinkException(f"ERROR: {_res['message']}")
|
| 1394 |
+
_folder_content = _res["folder_content"]
|
| 1395 |
+
if content_type == "folders":
|
| 1396 |
+
folders = _folder_content["folders"]
|
| 1397 |
+
for folder in folders:
|
| 1398 |
+
if folderPath:
|
| 1399 |
+
newFolderPath = ospath.join(folderPath, folder["name"])
|
| 1400 |
+
else:
|
| 1401 |
+
newFolderPath = ospath.join(folder["name"])
|
| 1402 |
+
__get_content(folder["folderkey"], newFolderPath)
|
| 1403 |
+
__get_content(folderKey, folderPath, "files")
|
| 1404 |
+
else:
|
| 1405 |
+
files = _folder_content["files"]
|
| 1406 |
+
for file in files:
|
| 1407 |
+
item = {}
|
| 1408 |
+
if not (_url := __scraper(file["links"]["normal_download"])):
|
| 1409 |
+
continue
|
| 1410 |
+
item["filename"] = file["filename"]
|
| 1411 |
+
if not folderPath:
|
| 1412 |
+
folderPath = details["title"]
|
| 1413 |
+
item["path"] = ospath.join(folderPath)
|
| 1414 |
+
item["url"] = _url
|
| 1415 |
+
if "size" in file:
|
| 1416 |
+
size = file["size"]
|
| 1417 |
+
if isinstance(size, str) and size.isdigit():
|
| 1418 |
+
size = float(size)
|
| 1419 |
+
details["total_size"] += size
|
| 1420 |
+
details["contents"].append(item)
|
| 1421 |
+
|
| 1422 |
+
try:
|
| 1423 |
+
for folder in folder_infos:
|
| 1424 |
+
__get_content(folder["folderkey"], folder["name"])
|
| 1425 |
+
except Exception as e:
|
| 1426 |
+
raise DirectDownloadLinkException(e)
|
| 1427 |
+
finally:
|
| 1428 |
+
session.close()
|
| 1429 |
+
if len(details["contents"]) == 1:
|
| 1430 |
+
return (details["contents"][0]["url"], details["header"])
|
| 1431 |
+
return details
|
| 1432 |
+
|
| 1433 |
+
|
| 1434 |
+
def cf_bypass(url):
|
| 1435 |
+
"DO NOT ABUSE THIS"
|
| 1436 |
+
try:
|
| 1437 |
+
data = {"cmd": "request.get", "url": url, "maxTimeout": 60000}
|
| 1438 |
+
_json = post(
|
| 1439 |
+
"https://cf.jmdkh.eu.org/v1",
|
| 1440 |
+
headers={"Content-Type": "application/json"},
|
| 1441 |
+
json=data,
|
| 1442 |
+
).json()
|
| 1443 |
+
if _json["status"] == "ok":
|
| 1444 |
+
return _json["solution"]["response"]
|
| 1445 |
+
except Exception as e:
|
| 1446 |
+
e
|
| 1447 |
+
raise DirectDownloadLinkException("ERROR: Con't bypass cloudflare")
|
| 1448 |
+
|
| 1449 |
+
|
| 1450 |
+
def send_cm_file(url, file_id=None):
|
| 1451 |
+
if "::" in url:
|
| 1452 |
+
_password = url.split("::")[-1]
|
| 1453 |
+
url = url.split("::")[-2]
|
| 1454 |
+
else:
|
| 1455 |
+
_password = ""
|
| 1456 |
+
_passwordNeed = False
|
| 1457 |
+
with create_scraper() as session:
|
| 1458 |
+
if file_id is None:
|
| 1459 |
+
try:
|
| 1460 |
+
html = HTML(session.get(url).text)
|
| 1461 |
+
except Exception as e:
|
| 1462 |
+
raise DirectDownloadLinkException(
|
| 1463 |
+
f"ERROR: {e.__class__.__name__}"
|
| 1464 |
+
) from e
|
| 1465 |
+
if html.xpath("//input[@name='password']"):
|
| 1466 |
+
_passwordNeed = True
|
| 1467 |
+
if not (file_id := html.xpath("//input[@name='id']/@value")):
|
| 1468 |
+
raise DirectDownloadLinkException("ERROR: file_id not found")
|
| 1469 |
+
try:
|
| 1470 |
+
data = {"op": "download2", "id": file_id}
|
| 1471 |
+
if _password and _passwordNeed:
|
| 1472 |
+
data["password"] = _password
|
| 1473 |
+
_res = session.post("https://send.cm/", data=data, allow_redirects=False)
|
| 1474 |
+
if "Location" in _res.headers:
|
| 1475 |
+
return (_res.headers["Location"], "Referer: https://send.cm/")
|
| 1476 |
+
except Exception as e:
|
| 1477 |
+
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
|
| 1478 |
+
if _passwordNeed:
|
| 1479 |
+
raise DirectDownloadLinkException(
|
| 1480 |
+
f"ERROR:\n{PASSWORD_ERROR_MESSAGE.format(url)}"
|
| 1481 |
+
)
|
| 1482 |
+
raise DirectDownloadLinkException("ERROR: Direct link not found")
|
| 1483 |
+
|
| 1484 |
+
|
| 1485 |
+
def send_cm(url):
|
| 1486 |
+
if "/d/" in url:
|
| 1487 |
+
return send_cm_file(url)
|
| 1488 |
+
elif "/s/" not in url:
|
| 1489 |
+
file_id = url.split("/")[-1]
|
| 1490 |
+
return send_cm_file(url, file_id)
|
| 1491 |
+
splitted_url = url.split("/")
|
| 1492 |
+
details = {
|
| 1493 |
+
"contents": [],
|
| 1494 |
+
"title": "",
|
| 1495 |
+
"total_size": 0,
|
| 1496 |
+
"header": "Referer: https://send.cm/",
|
| 1497 |
+
}
|
| 1498 |
+
if len(splitted_url) == 5:
|
| 1499 |
+
url += "/"
|
| 1500 |
+
splitted_url = url.split("/")
|
| 1501 |
+
if len(splitted_url) >= 7:
|
| 1502 |
+
details["title"] = splitted_url[5]
|
| 1503 |
+
else:
|
| 1504 |
+
details["title"] = splitted_url[-1]
|
| 1505 |
+
session = Session()
|
| 1506 |
+
|
| 1507 |
+
def __collectFolders(html):
|
| 1508 |
+
folders = []
|
| 1509 |
+
folders_urls = html.xpath("//h6/a/@href")
|
| 1510 |
+
folders_names = html.xpath("//h6/a/text()")
|
| 1511 |
+
for folders_url, folders_name in zip(folders_urls, folders_names):
|
| 1512 |
+
folders.append(
|
| 1513 |
+
{
|
| 1514 |
+
"folder_link": folders_url.strip(),
|
| 1515 |
+
"folder_name": folders_name.strip(),
|
| 1516 |
+
}
|
| 1517 |
+
)
|
| 1518 |
+
return folders
|
| 1519 |
+
|
| 1520 |
+
def __getFile_link(file_id):
|
| 1521 |
+
try:
|
| 1522 |
+
_res = session.post(
|
| 1523 |
+
"https://send.cm/",
|
| 1524 |
+
data={"op": "download2", "id": file_id},
|
| 1525 |
+
allow_redirects=False,
|
| 1526 |
+
)
|
| 1527 |
+
if "Location" in _res.headers:
|
| 1528 |
+
return _res.headers["Location"]
|
| 1529 |
+
except Exception:
|
| 1530 |
+
pass
|
| 1531 |
+
|
| 1532 |
+
def __getFiles(html):
|
| 1533 |
+
files = []
|
| 1534 |
+
hrefs = html.xpath('//tr[@class="selectable"]//a/@href')
|
| 1535 |
+
file_names = html.xpath('//tr[@class="selectable"]//a/text()')
|
| 1536 |
+
sizes = html.xpath('//tr[@class="selectable"]//span/text()')
|
| 1537 |
+
for href, file_name, size_text in zip(hrefs, file_names, sizes):
|
| 1538 |
+
files.append(
|
| 1539 |
+
{
|
| 1540 |
+
"file_id": href.split("/")[-1],
|
| 1541 |
+
"file_name": file_name.strip(),
|
| 1542 |
+
"size": speed_string_to_bytes(size_text.strip()),
|
| 1543 |
+
}
|
| 1544 |
+
)
|
| 1545 |
+
return files
|
| 1546 |
+
|
| 1547 |
+
def __writeContents(html_text, folderPath=""):
|
| 1548 |
+
folders = __collectFolders(html_text)
|
| 1549 |
+
for folder in folders:
|
| 1550 |
+
_html = HTML(cf_bypass(folder["folder_link"]))
|
| 1551 |
+
__writeContents(_html, ospath.join(folderPath, folder["folder_name"]))
|
| 1552 |
+
files = __getFiles(html_text)
|
| 1553 |
+
for file in files:
|
| 1554 |
+
if not (link := __getFile_link(file["file_id"])):
|
| 1555 |
+
continue
|
| 1556 |
+
item = {"url": link, "filename": file["filename"], "path": folderPath}
|
| 1557 |
+
details["total_size"] += file["size"]
|
| 1558 |
+
details["contents"].append(item)
|
| 1559 |
+
|
| 1560 |
+
try:
|
| 1561 |
+
mainHtml = HTML(cf_bypass(url))
|
| 1562 |
+
except DirectDownloadLinkException as e:
|
| 1563 |
+
session.close()
|
| 1564 |
+
raise e
|
| 1565 |
+
except Exception as e:
|
| 1566 |
+
session.close()
|
| 1567 |
+
raise DirectDownloadLinkException(
|
| 1568 |
+
f"ERROR: {e.__class__.__name__} While getting mainHtml"
|
| 1569 |
+
)
|
| 1570 |
+
try:
|
| 1571 |
+
__writeContents(mainHtml, details["title"])
|
| 1572 |
+
except DirectDownloadLinkException as e:
|
| 1573 |
+
session.close()
|
| 1574 |
+
raise e
|
| 1575 |
+
except Exception as e:
|
| 1576 |
+
session.close()
|
| 1577 |
+
raise DirectDownloadLinkException(
|
| 1578 |
+
f"ERROR: {e.__class__.__name__} While writing Contents"
|
| 1579 |
+
)
|
| 1580 |
+
session.close()
|
| 1581 |
+
if len(details["contents"]) == 1:
|
| 1582 |
+
return (details["contents"][0]["url"], details["header"])
|
| 1583 |
+
return details
|
| 1584 |
+
|
| 1585 |
+
|
| 1586 |
+
def doods(url):
|
| 1587 |
+
if "/e/" in url:
|
| 1588 |
+
url = url.replace("/e/", "/d/")
|
| 1589 |
+
parsed_url = urlparse(url)
|
| 1590 |
+
with create_scraper() as session:
|
| 1591 |
+
try:
|
| 1592 |
+
html = HTML(session.get(url).text)
|
| 1593 |
+
except Exception as e:
|
| 1594 |
+
raise DirectDownloadLinkException(
|
| 1595 |
+
f"ERROR: {e.__class__.__name__} While fetching token link"
|
| 1596 |
+
) from e
|
| 1597 |
+
if not (link := html.xpath("//div[@class='download-content']//a/@href")):
|
| 1598 |
+
raise DirectDownloadLinkException(
|
| 1599 |
+
"ERROR: Token Link not found or maybe not allow to download! open in browser."
|
| 1600 |
+
)
|
| 1601 |
+
link = f"{parsed_url.scheme}://{parsed_url.hostname}{link[0]}"
|
| 1602 |
+
sleep(2)
|
| 1603 |
+
try:
|
| 1604 |
+
_res = session.get(link)
|
| 1605 |
+
except Exception as e:
|
| 1606 |
+
raise DirectDownloadLinkException(
|
| 1607 |
+
f"ERROR: {e.__class__.__name__} While fetching download link"
|
| 1608 |
+
) from e
|
| 1609 |
+
if not (link := search(r"window\.open\('(\S+)'", _res.text)):
|
| 1610 |
+
raise DirectDownloadLinkException("ERROR: Download link not found try again")
|
| 1611 |
+
return (link.group(1), f"Referer: {parsed_url.scheme}://{parsed_url.hostname}/")
|
| 1612 |
+
|
| 1613 |
+
|
| 1614 |
+
def easyupload(url):
|
| 1615 |
+
if "::" in url:
|
| 1616 |
+
_password = url.split("::")[-1]
|
| 1617 |
+
url = url.split("::")[-2]
|
| 1618 |
+
else:
|
| 1619 |
+
_password = ""
|
| 1620 |
+
file_id = url.split("/")[-1]
|
| 1621 |
+
with create_scraper() as session:
|
| 1622 |
+
try:
|
| 1623 |
+
_res = session.get(url)
|
| 1624 |
+
except Exception as e:
|
| 1625 |
+
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}")
|
| 1626 |
+
first_page_html = HTML(_res.text)
|
| 1627 |
+
if (
|
| 1628 |
+
first_page_html.xpath("//h6[contains(text(),'Password Protected')]")
|
| 1629 |
+
and not _password
|
| 1630 |
+
):
|
| 1631 |
+
raise DirectDownloadLinkException(
|
| 1632 |
+
f"ERROR:\n{PASSWORD_ERROR_MESSAGE.format(url)}"
|
| 1633 |
+
)
|
| 1634 |
+
if not (
|
| 1635 |
+
match := search(
|
| 1636 |
+
r"https://eu(?:[1-9][0-9]?|100)\.easyupload\.io/action\.php", _res.text
|
| 1637 |
+
)
|
| 1638 |
+
):
|
| 1639 |
+
raise DirectDownloadLinkException(
|
| 1640 |
+
"ERROR: Failed to get server for EasyUpload Link"
|
| 1641 |
+
)
|
| 1642 |
+
action_url = match.group()
|
| 1643 |
+
session.headers.update({"referer": "https://easyupload.io/"})
|
| 1644 |
+
recaptcha_params = {
|
| 1645 |
+
"k": "6LfWajMdAAAAAGLXz_nxz2tHnuqa-abQqC97DIZ3",
|
| 1646 |
+
"ar": "1",
|
| 1647 |
+
"co": "aHR0cHM6Ly9lYXN5dXBsb2FkLmlvOjQ0Mw..",
|
| 1648 |
+
"hl": "en",
|
| 1649 |
+
"v": "0hCdE87LyjzAkFO5Ff-v7Hj1",
|
| 1650 |
+
"size": "invisible",
|
| 1651 |
+
"cb": "c3o1vbaxbmwe",
|
| 1652 |
+
}
|
| 1653 |
+
if not (captcha_token := get_captcha_token(session, recaptcha_params)):
|
| 1654 |
+
raise DirectDownloadLinkException("ERROR: Captcha token not found")
|
| 1655 |
+
try:
|
| 1656 |
+
data = {
|
| 1657 |
+
"type": "download-token",
|
| 1658 |
+
"url": file_id,
|
| 1659 |
+
"value": _password,
|
| 1660 |
+
"captchatoken": captcha_token,
|
| 1661 |
+
"method": "regular",
|
| 1662 |
+
}
|
| 1663 |
+
json_resp = session.post(url=action_url, data=data).json()
|
| 1664 |
+
except Exception as e:
|
| 1665 |
+
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
|
| 1666 |
+
if "download_link" in json_resp:
|
| 1667 |
+
return json_resp["download_link"]
|
| 1668 |
+
elif "data" in json_resp:
|
| 1669 |
+
raise DirectDownloadLinkException(
|
| 1670 |
+
f"ERROR: Failed to generate direct link due to {json_resp['data']}"
|
| 1671 |
+
)
|
| 1672 |
+
raise DirectDownloadLinkException(
|
| 1673 |
+
"ERROR: Failed to generate direct link from EasyUpload."
|
| 1674 |
+
)
|
| 1675 |
+
|
| 1676 |
+
|
| 1677 |
+
def filelions_and_streamwish(url):
|
| 1678 |
+
parsed_url = urlparse(url)
|
| 1679 |
+
hostname = parsed_url.hostname
|
| 1680 |
+
scheme = parsed_url.scheme
|
| 1681 |
+
if any(
|
| 1682 |
+
x in hostname
|
| 1683 |
+
for x in [
|
| 1684 |
+
"filelions.co",
|
| 1685 |
+
"filelions.live",
|
| 1686 |
+
"filelions.to",
|
| 1687 |
+
"filelions.site",
|
| 1688 |
+
"cabecabean.lol",
|
| 1689 |
+
"filelions.online",
|
| 1690 |
+
"mycloudz.cc",
|
| 1691 |
+
]
|
| 1692 |
+
):
|
| 1693 |
+
apiKey = Config.FILELION_API
|
| 1694 |
+
apiUrl = "https://vidhideapi.com"
|
| 1695 |
+
elif any(
|
| 1696 |
+
x in hostname
|
| 1697 |
+
for x in [
|
| 1698 |
+
"embedwish.com",
|
| 1699 |
+
"kissmovies.net",
|
| 1700 |
+
"kitabmarkaz.xyz",
|
| 1701 |
+
"wishfast.top",
|
| 1702 |
+
"streamwish.to",
|
| 1703 |
+
]
|
| 1704 |
+
):
|
| 1705 |
+
apiKey = Config.STREAMWISH_API
|
| 1706 |
+
apiUrl = "https://api.streamwish.com"
|
| 1707 |
+
if not apiKey:
|
| 1708 |
+
raise DirectDownloadLinkException(
|
| 1709 |
+
f"ERROR: API is not provided get it from {scheme}://{hostname}"
|
| 1710 |
+
)
|
| 1711 |
+
file_code = url.split("/")[-1]
|
| 1712 |
+
quality = ""
|
| 1713 |
+
if bool(file_code.strip().endswith(("_o", "_h", "_n", "_l"))):
|
| 1714 |
+
spited_file_code = file_code.rsplit("_", 1)
|
| 1715 |
+
quality = spited_file_code[1]
|
| 1716 |
+
file_code = spited_file_code[0]
|
| 1717 |
+
url = f"{scheme}://{hostname}/{file_code}"
|
| 1718 |
+
with Session() as session:
|
| 1719 |
+
try:
|
| 1720 |
+
_res = session.get(
|
| 1721 |
+
f"{apiUrl}/api/file/direct_link",
|
| 1722 |
+
params={"key": apiKey, "file_code": file_code, "hls": "1"},
|
| 1723 |
+
).json()
|
| 1724 |
+
except Exception as e:
|
| 1725 |
+
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
|
| 1726 |
+
if _res["status"] != 200:
|
| 1727 |
+
raise DirectDownloadLinkException(f"ERROR: {_res['msg']}")
|
| 1728 |
+
result = _res["result"]
|
| 1729 |
+
if not result["versions"]:
|
| 1730 |
+
raise DirectDownloadLinkException("ERROR: File Not Found")
|
| 1731 |
+
error = "\nProvide a quality to download the video\nAvailable Quality:"
|
| 1732 |
+
for version in result["versions"]:
|
| 1733 |
+
if quality == version["name"]:
|
| 1734 |
+
return version["url"]
|
| 1735 |
+
elif version["name"] == "l":
|
| 1736 |
+
error += "\nLow"
|
| 1737 |
+
elif version["name"] == "n":
|
| 1738 |
+
error += "\nNormal"
|
| 1739 |
+
elif version["name"] == "o":
|
| 1740 |
+
error += "\nOriginal"
|
| 1741 |
+
elif version["name"] == "h":
|
| 1742 |
+
error += "\nHD"
|
| 1743 |
+
error += f" <code>{url}_{version['name']}</code>"
|
| 1744 |
+
raise DirectDownloadLinkException(f"ERROR: {error}")
|
| 1745 |
+
|
| 1746 |
+
|
| 1747 |
+
def streamvid(url: str):
|
| 1748 |
+
file_code = url.split("/")[-1]
|
| 1749 |
+
parsed_url = urlparse(url)
|
| 1750 |
+
url = f"{parsed_url.scheme}://{parsed_url.hostname}/d/{file_code}"
|
| 1751 |
+
quality_defined = bool(url.strip().endswith(("_o", "_h", "_n", "_l")))
|
| 1752 |
+
with create_scraper() as session:
|
| 1753 |
+
try:
|
| 1754 |
+
html = HTML(session.get(url).text)
|
| 1755 |
+
except Exception as e:
|
| 1756 |
+
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
|
| 1757 |
+
if quality_defined:
|
| 1758 |
+
data = {}
|
| 1759 |
+
if not (inputs := html.xpath('//form[@id="F1"]//input')):
|
| 1760 |
+
raise DirectDownloadLinkException("ERROR: No inputs found")
|
| 1761 |
+
for i in inputs:
|
| 1762 |
+
if key := i.get("name"):
|
| 1763 |
+
data[key] = i.get("value")
|
| 1764 |
+
try:
|
| 1765 |
+
html = HTML(session.post(url, data=data).text)
|
| 1766 |
+
except Exception as e:
|
| 1767 |
+
raise DirectDownloadLinkException(
|
| 1768 |
+
f"ERROR: {e.__class__.__name__}"
|
| 1769 |
+
) from e
|
| 1770 |
+
if not (
|
| 1771 |
+
script := html.xpath(
|
| 1772 |
+
'//script[contains(text(),"document.location.href")]/text()'
|
| 1773 |
+
)
|
| 1774 |
+
):
|
| 1775 |
+
if error := html.xpath(
|
| 1776 |
+
'//div[@class="alert alert-danger"][1]/text()[2]'
|
| 1777 |
+
):
|
| 1778 |
+
raise DirectDownloadLinkException(f"ERROR: {error[0]}")
|
| 1779 |
+
raise DirectDownloadLinkException(
|
| 1780 |
+
"ERROR: direct link script not found!"
|
| 1781 |
+
)
|
| 1782 |
+
if directLink := findall(r'document\.location\.href="(.*)"', script[0]):
|
| 1783 |
+
return directLink[0]
|
| 1784 |
+
raise DirectDownloadLinkException(
|
| 1785 |
+
"ERROR: direct link not found! in the script"
|
| 1786 |
+
)
|
| 1787 |
+
elif (qualities_urls := html.xpath('//div[@id="dl_versions"]/a/@href')) and (
|
| 1788 |
+
qualities := html.xpath('//div[@id="dl_versions"]/a/text()[2]')
|
| 1789 |
+
):
|
| 1790 |
+
error = "\nProvide a quality to download the video\nAvailable Quality:"
|
| 1791 |
+
for quality_url, quality in zip(qualities_urls, qualities):
|
| 1792 |
+
error += f"\n{quality.strip()} <code>{quality_url}</code>"
|
| 1793 |
+
raise DirectDownloadLinkException(f"ERROR: {error}")
|
| 1794 |
+
elif error := html.xpath('//div[@class="not-found-text"]/text()'):
|
| 1795 |
+
raise DirectDownloadLinkException(f"ERROR: {error[0]}")
|
| 1796 |
+
raise DirectDownloadLinkException("ERROR: Something went wrong")
|
| 1797 |
+
|
| 1798 |
+
|
| 1799 |
+
def streamhub(url):
|
| 1800 |
+
file_code = url.split("/")[-1]
|
| 1801 |
+
parsed_url = urlparse(url)
|
| 1802 |
+
url = f"{parsed_url.scheme}://{parsed_url.hostname}/d/{file_code}"
|
| 1803 |
+
with create_scraper() as session:
|
| 1804 |
+
try:
|
| 1805 |
+
html = HTML(session.get(url).text)
|
| 1806 |
+
except Exception as e:
|
| 1807 |
+
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
|
| 1808 |
+
if not (inputs := html.xpath('//form[@name="F1"]//input')):
|
| 1809 |
+
raise DirectDownloadLinkException("ERROR: No inputs found")
|
| 1810 |
+
data = {}
|
| 1811 |
+
for i in inputs:
|
| 1812 |
+
if key := i.get("name"):
|
| 1813 |
+
data[key] = i.get("value")
|
| 1814 |
+
session.headers.update({"referer": url})
|
| 1815 |
+
sleep(1)
|
| 1816 |
+
try:
|
| 1817 |
+
html = HTML(session.post(url, data=data).text)
|
| 1818 |
+
except Exception as e:
|
| 1819 |
+
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
|
| 1820 |
+
if directLink := html.xpath(
|
| 1821 |
+
'//a[@class="btn btn-primary btn-go downloadbtn"]/@href'
|
| 1822 |
+
):
|
| 1823 |
+
return directLink[0]
|
| 1824 |
+
if error := html.xpath('//div[@class="alert alert-danger"]/text()[2]'):
|
| 1825 |
+
raise DirectDownloadLinkException(f"ERROR: {error[0]}")
|
| 1826 |
+
raise DirectDownloadLinkException("ERROR: direct link not found!")
|
| 1827 |
+
|
| 1828 |
+
|
| 1829 |
+
def pcloud(url):
|
| 1830 |
+
with create_scraper() as session:
|
| 1831 |
+
try:
|
| 1832 |
+
res = session.get(url)
|
| 1833 |
+
except Exception as e:
|
| 1834 |
+
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
|
| 1835 |
+
if link := findall(r".downloadlink.:..(https:.*)..", res.text):
|
| 1836 |
+
return link[0].replace(r"\/", "/")
|
| 1837 |
+
raise DirectDownloadLinkException("ERROR: Direct link not found")
|
| 1838 |
+
|
| 1839 |
+
|
| 1840 |
+
def tmpsend(url):
|
| 1841 |
+
parsed_url = urlparse(url)
|
| 1842 |
+
if any(x in parsed_url.path for x in ["thank-you", "download"]):
|
| 1843 |
+
query_params = parse_qs(parsed_url.query)
|
| 1844 |
+
if file_id := query_params.get("d"):
|
| 1845 |
+
file_id = file_id[0]
|
| 1846 |
+
elif not (file_id := parsed_url.path.strip("/")):
|
| 1847 |
+
raise DirectDownloadLinkException("ERROR: Invalid URL format")
|
| 1848 |
+
referer_url = f"https://tmpsend.com/thank-you?d={file_id}"
|
| 1849 |
+
header = f"Referer: {referer_url}"
|
| 1850 |
+
download_link = f"https://tmpsend.com/download?d={file_id}"
|
| 1851 |
+
return download_link, header
|
| 1852 |
+
|
| 1853 |
+
|
| 1854 |
+
def qiwi(url):
|
| 1855 |
+
"""qiwi.gg link generator
|
| 1856 |
+
based on https://github.com/aenulrofik"""
|
| 1857 |
+
with Session() as session:
|
| 1858 |
+
file_id = url.split("/")[-1]
|
| 1859 |
+
try:
|
| 1860 |
+
res = session.get(url).text
|
| 1861 |
+
except Exception as e:
|
| 1862 |
+
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
|
| 1863 |
+
tree = HTML(res)
|
| 1864 |
+
if name := tree.xpath('//h1[@class="page_TextHeading__VsM7r"]/text()'):
|
| 1865 |
+
ext = name[0].split(".")[-1]
|
| 1866 |
+
return f"https://spyderrock.com/{file_id}.{ext}"
|
| 1867 |
+
else:
|
| 1868 |
+
raise DirectDownloadLinkException("ERROR: File not found")
|
| 1869 |
+
|
| 1870 |
+
|
| 1871 |
+
def mp4upload(url):
|
| 1872 |
+
with Session() as session:
|
| 1873 |
+
try:
|
| 1874 |
+
url = url.replace("embed-", "")
|
| 1875 |
+
req = session.get(url).text
|
| 1876 |
+
tree = HTML(req)
|
| 1877 |
+
inputs = tree.xpath("//input")
|
| 1878 |
+
header = {"Referer": "https://www.mp4upload.com/"}
|
| 1879 |
+
data = {input.get("name"): input.get("value") for input in inputs}
|
| 1880 |
+
if not data:
|
| 1881 |
+
raise DirectDownloadLinkException("ERROR: File Not Found!")
|
| 1882 |
+
post = session.post(
|
| 1883 |
+
url,
|
| 1884 |
+
data=data,
|
| 1885 |
+
headers={
|
| 1886 |
+
"User-Agent": user_agent,
|
| 1887 |
+
"Referer": "https://www.mp4upload.com/",
|
| 1888 |
+
},
|
| 1889 |
+
).text
|
| 1890 |
+
tree = HTML(post)
|
| 1891 |
+
inputs = tree.xpath('//form[@name="F1"]//input')
|
| 1892 |
+
data = {
|
| 1893 |
+
input.get("name"): input.get("value").replace(" ", "")
|
| 1894 |
+
for input in inputs
|
| 1895 |
+
}
|
| 1896 |
+
if not data:
|
| 1897 |
+
raise DirectDownloadLinkException("ERROR: File Not Found!")
|
| 1898 |
+
data["referer"] = url
|
| 1899 |
+
direct_link = session.post(url, data=data).url
|
| 1900 |
+
return direct_link, header
|
| 1901 |
+
except Exception:
|
| 1902 |
+
raise DirectDownloadLinkException("ERROR: File Not Found!")
|
| 1903 |
+
|
| 1904 |
+
|
| 1905 |
+
def berkasdrive(url):
|
| 1906 |
+
"""berkasdrive.com link generator
|
| 1907 |
+
by https://github.com/aenulrofik"""
|
| 1908 |
+
with Session() as session:
|
| 1909 |
+
try:
|
| 1910 |
+
sesi = session.get(url).text
|
| 1911 |
+
except Exception as e:
|
| 1912 |
+
raise DirectDownloadLinkException(f"ERROR: {e.__class__.__name__}") from e
|
| 1913 |
+
html = HTML(sesi)
|
| 1914 |
+
if link := html.xpath("//script")[0].text.split('"')[1]:
|
| 1915 |
+
return b64decode(link).decode("utf-8")
|
| 1916 |
+
else:
|
| 1917 |
+
raise DirectDownloadLinkException("ERROR: File Not Found!")
|
| 1918 |
+
|
| 1919 |
+
|
| 1920 |
+
def swisstransfer(link):
|
| 1921 |
+
matched_link = match(
|
| 1922 |
+
r"https://www\.swisstransfer\.com/d/([\w-]+)(?:\:\:(\w+))?", link
|
| 1923 |
+
)
|
| 1924 |
+
if not matched_link:
|
| 1925 |
+
raise DirectDownloadLinkException(
|
| 1926 |
+
f"ERROR: Invalid SwissTransfer link format {link}"
|
| 1927 |
+
)
|
| 1928 |
+
|
| 1929 |
+
transfer_id, password = matched_link.groups()
|
| 1930 |
+
password = password or ""
|
| 1931 |
+
|
| 1932 |
+
def encode_password(password):
|
| 1933 |
+
return b64encode(password.encode("utf-8")).decode("utf-8") if password else ""
|
| 1934 |
+
|
| 1935 |
+
def getfile(transfer_id, password):
|
| 1936 |
+
url = f"https://www.swisstransfer.com/api/links/{transfer_id}"
|
| 1937 |
+
headers = {
|
| 1938 |
+
"User-Agent": "Mozilla/5.0",
|
| 1939 |
+
"Authorization": encode_password(password) if password else "",
|
| 1940 |
+
"Content-Type": "application/json" if not password else "",
|
| 1941 |
+
}
|
| 1942 |
+
response = get(url, headers=headers)
|
| 1943 |
+
|
| 1944 |
+
if response.status_code == 200:
|
| 1945 |
+
try:
|
| 1946 |
+
return response.json(), headers
|
| 1947 |
+
except ValueError:
|
| 1948 |
+
raise DirectDownloadLinkException(
|
| 1949 |
+
f"ERROR: Error parsing JSON response {response.text}"
|
| 1950 |
+
)
|
| 1951 |
+
raise DirectDownloadLinkException(
|
| 1952 |
+
f"ERROR: Error fetching file details {response.status_code}, {response.text}"
|
| 1953 |
+
)
|
| 1954 |
+
|
| 1955 |
+
def gettoken(password, containerUUID, fileUUID):
|
| 1956 |
+
url = "https://www.swisstransfer.com/api/generateDownloadToken"
|
| 1957 |
+
headers = {
|
| 1958 |
+
"User-Agent": "Mozilla/5.0",
|
| 1959 |
+
"Content-Type": "application/json",
|
| 1960 |
+
}
|
| 1961 |
+
body = {
|
| 1962 |
+
"password": password,
|
| 1963 |
+
"containerUUID": containerUUID,
|
| 1964 |
+
"fileUUID": fileUUID,
|
| 1965 |
+
}
|
| 1966 |
+
|
| 1967 |
+
response = post(url, headers=headers, json=body)
|
| 1968 |
+
|
| 1969 |
+
if response.status_code == 200:
|
| 1970 |
+
return response.text.strip().replace('"', "")
|
| 1971 |
+
raise DirectDownloadLinkException(
|
| 1972 |
+
f"ERROR: Error generating download token {response.status_code}, {response.text}"
|
| 1973 |
+
)
|
| 1974 |
+
|
| 1975 |
+
data, headers = getfile(transfer_id, password)
|
| 1976 |
+
if not data:
|
| 1977 |
+
return None
|
| 1978 |
+
|
| 1979 |
+
try:
|
| 1980 |
+
container_uuid = data["data"]["containerUUID"]
|
| 1981 |
+
download_host = data["data"]["downloadHost"]
|
| 1982 |
+
files = data["data"]["container"]["files"]
|
| 1983 |
+
folder_name = data["data"]["container"]["message"] or "unknown"
|
| 1984 |
+
except (KeyError, IndexError, TypeError) as e:
|
| 1985 |
+
raise DirectDownloadLinkException(f"ERROR: Error parsing file details {e}")
|
| 1986 |
+
|
| 1987 |
+
total_size = sum(file["fileSizeInBytes"] for file in files)
|
| 1988 |
+
|
| 1989 |
+
if len(files) == 1:
|
| 1990 |
+
file = files[0]
|
| 1991 |
+
file_uuid = file["UUID"]
|
| 1992 |
+
token = gettoken(password, container_uuid, file_uuid)
|
| 1993 |
+
download_url = f"https://{download_host}/api/download/{transfer_id}/{file_uuid}?token={token}"
|
| 1994 |
+
return download_url, "User-Agent:Mozilla/5.0"
|
| 1995 |
+
|
| 1996 |
+
contents = []
|
| 1997 |
+
for file in files:
|
| 1998 |
+
file_uuid = file["UUID"]
|
| 1999 |
+
file_name = file["fileName"]
|
| 2000 |
+
file_size = file["fileSizeInBytes"]
|
| 2001 |
+
|
| 2002 |
+
token = gettoken(password, container_uuid, file_uuid)
|
| 2003 |
+
if not token:
|
| 2004 |
+
continue
|
| 2005 |
+
|
| 2006 |
+
download_url = f"https://{download_host}/api/download/{transfer_id}/{file_uuid}?token={token}"
|
| 2007 |
+
contents.append({"filename": file_name, "path": "", "url": download_url})
|
| 2008 |
+
|
| 2009 |
+
return {
|
| 2010 |
+
"contents": contents,
|
| 2011 |
+
"title": folder_name,
|
| 2012 |
+
"total_size": total_size,
|
| 2013 |
+
"header": "User-Agent:Mozilla/5.0",
|
| 2014 |
+
}
|
| 2015 |
+
|
| 2016 |
+
|
| 2017 |
+
def instagram(link: str) -> str:
|
| 2018 |
+
"""
|
| 2019 |
+
Fetches the direct video download URL from an Instagram post.
|
| 2020 |
+
|
| 2021 |
+
Args:
|
| 2022 |
+
link (str): The Instagram post URL.
|
| 2023 |
+
|
| 2024 |
+
Returns:
|
| 2025 |
+
str: The direct video URL.
|
| 2026 |
+
|
| 2027 |
+
Raises:
|
| 2028 |
+
DirectDownloadLinkException: If any error occurs during the process.
|
| 2029 |
+
"""
|
| 2030 |
+
api_url = Config.INSTADL_API or "https://instagramcdn.vercel.app"
|
| 2031 |
+
full_url = f"{api_url}/api/video?postUrl={link}"
|
| 2032 |
+
|
| 2033 |
+
try:
|
| 2034 |
+
response = get(full_url)
|
| 2035 |
+
response.raise_for_status()
|
| 2036 |
+
data = response.json()
|
| 2037 |
+
|
| 2038 |
+
if (
|
| 2039 |
+
data.get("status") == "success"
|
| 2040 |
+
and "data" in data
|
| 2041 |
+
and "videoUrl" in data["data"]
|
| 2042 |
+
):
|
| 2043 |
+
return data["data"]["videoUrl"]
|
| 2044 |
+
|
| 2045 |
+
raise DirectDownloadLinkException("ERROR: Failed to retrieve video URL.")
|
| 2046 |
+
|
| 2047 |
+
except Exception as e:
|
| 2048 |
+
raise DirectDownloadLinkException(f"ERROR: {e}")
|