Update app.py
Browse files
app.py
CHANGED
|
@@ -1,249 +1,303 @@
|
|
| 1 |
-
# app.py
|
| 2 |
-
from fastapi import FastAPI, HTTPException
|
| 3 |
-
from fastapi.staticfiles import StaticFiles
|
| 4 |
-
from pydantic import BaseModel
|
| 5 |
-
import subprocess
|
| 6 |
import os
|
| 7 |
-
import
|
| 8 |
-
import
|
| 9 |
-
|
| 10 |
-
import
|
| 11 |
-
from
|
| 12 |
-
from
|
| 13 |
-
from
|
| 14 |
-
from
|
| 15 |
-
|
| 16 |
-
# Set up logging
|
| 17 |
-
logging.basicConfig(level=logging.INFO)
|
| 18 |
-
logger = logging.getLogger(__name__)
|
| 19 |
-
|
| 20 |
-
def env_to_cookies(env_content: str, output_file: str) -> None:
|
| 21 |
-
"""Convert environment variable content to cookie file"""
|
| 22 |
-
try:
|
| 23 |
-
# Extract content from env format
|
| 24 |
-
if '="' not in env_content:
|
| 25 |
-
raise ValueError("Invalid env content format")
|
| 26 |
-
content = env_content.split('="', 1)[1].strip('"')
|
| 27 |
-
|
| 28 |
-
# Replace escaped newlines with actual newlines
|
| 29 |
-
cookie_content = content.replace('\\n', '\n')
|
| 30 |
-
|
| 31 |
-
# Write to cookie file
|
| 32 |
-
with open(output_file, 'w') as f:
|
| 33 |
-
f.write(cookie_content)
|
| 34 |
-
|
| 35 |
-
logger.info(f"Successfully created cookie file at {output_file}")
|
| 36 |
-
except Exception as e:
|
| 37 |
-
logger.error(f"Error creating cookie file: {str(e)}")
|
| 38 |
-
raise ValueError(f"Error converting to cookie file: {str(e)}")
|
| 39 |
-
|
| 40 |
-
def get_cookies() -> str:
|
| 41 |
-
"""Get cookies from environment variable"""
|
| 42 |
-
load_dotenv()
|
| 43 |
-
cookie_content = os.getenv('COOKIES')
|
| 44 |
-
if not cookie_content:
|
| 45 |
-
raise ValueError("COOKIES environment variable not set")
|
| 46 |
-
return cookie_content
|
| 47 |
-
|
| 48 |
-
def env_to_cookies_from_env(output_file: str) -> None:
|
| 49 |
-
"""Convert environment variable from .env file to cookie file"""
|
| 50 |
-
try:
|
| 51 |
-
load_dotenv()
|
| 52 |
-
env_content = os.getenv('COOKIES')
|
| 53 |
-
logger.info("Retrieved cookies from environment variable")
|
| 54 |
-
|
| 55 |
-
if not env_content:
|
| 56 |
-
raise ValueError("COOKIES not found in environment variables")
|
| 57 |
-
|
| 58 |
-
env_to_cookies(f'COOKIES="{env_content}"', output_file)
|
| 59 |
-
except Exception as e:
|
| 60 |
-
logger.error(f"Error creating cookie file from env: {str(e)}")
|
| 61 |
-
raise ValueError(f"Error converting to cookie file: {str(e)}")
|
| 62 |
|
| 63 |
-
app = FastAPI(
|
| 64 |
-
title="GAMDL API",
|
| 65 |
-
description="API for downloading Google Drive files using gamdl",
|
| 66 |
-
version="1.0.0"
|
| 67 |
-
)
|
| 68 |
|
| 69 |
-
#
|
| 70 |
-
|
| 71 |
-
os.makedirs(DOWNLOADS_DIR, exist_ok=True)
|
| 72 |
|
| 73 |
-
#
|
| 74 |
-
|
|
|
|
| 75 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 76 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 77 |
|
|
|
|
|
|
|
|
|
|
| 78 |
|
| 79 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 80 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 81 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 82 |
|
|
|
|
|
|
|
|
|
|
| 83 |
|
| 84 |
-
|
|
|
|
|
|
|
| 85 |
|
| 86 |
-
|
| 87 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 88 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 89 |
|
| 90 |
|
| 91 |
-
# [Previous cookie handling code remains the same...]
|
| 92 |
|
| 93 |
-
|
| 94 |
-
|
| 95 |
-
|
| 96 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 97 |
|
| 98 |
-
|
| 99 |
-
|
| 100 |
-
|
| 101 |
-
|
|
|
|
|
|
|
| 102 |
|
| 103 |
-
|
| 104 |
-
|
| 105 |
-
|
| 106 |
-
|
| 107 |
-
|
| 108 |
-
|
| 109 |
-
|
| 110 |
-
|
| 111 |
-
|
| 112 |
-
|
| 113 |
-
|
| 114 |
-
|
| 115 |
-
|
| 116 |
-
|
| 117 |
-
env_to_cookies_from_env(cookie_path)
|
| 118 |
-
|
| 119 |
-
# Change to download directory
|
| 120 |
-
original_dir = os.getcwd()
|
| 121 |
-
os.chdir(download_subdir)
|
| 122 |
-
|
| 123 |
-
# Run gamdl command
|
| 124 |
-
cmd = ["gamdl", "--codec-song", "aac-legacy", request.url]
|
| 125 |
-
logger.info(f"Executing command: {' '.join(cmd)}")
|
| 126 |
-
|
| 127 |
-
process = subprocess.run(
|
| 128 |
-
cmd,
|
| 129 |
-
capture_output=True,
|
| 130 |
-
text=True
|
| 131 |
)
|
| 132 |
-
|
| 133 |
-
|
| 134 |
-
|
| 135 |
-
|
| 136 |
-
|
| 137 |
-
|
| 138 |
-
|
| 139 |
-
|
| 140 |
-
|
| 141 |
-
|
| 142 |
-
|
| 143 |
-
|
| 144 |
-
|
| 145 |
-
|
| 146 |
-
|
| 147 |
-
|
| 148 |
-
|
| 149 |
-
|
| 150 |
-
|
| 151 |
-
|
| 152 |
-
|
| 153 |
-
|
| 154 |
-
|
| 155 |
-
|
| 156 |
-
|
| 157 |
-
|
| 158 |
-
|
| 159 |
-
|
| 160 |
-
|
| 161 |
-
|
| 162 |
-
|
| 163 |
-
|
| 164 |
-
|
| 165 |
-
|
| 166 |
-
shutil.copy2(file_path, new_path)
|
| 167 |
-
|
| 168 |
-
# Get file extension
|
| 169 |
-
file_type = os.path.splitext(filename)[1].lstrip('.')
|
| 170 |
-
|
| 171 |
-
# Generate download URL
|
| 172 |
-
encoded_filename = quote(filename)
|
| 173 |
-
download_url = f"{space_url}/files/{timestamp}/{encoded_filename}"
|
| 174 |
-
|
| 175 |
-
downloaded_files.append(FileInfo(
|
| 176 |
-
filename=filename,
|
| 177 |
-
download_url=download_url,
|
| 178 |
-
file_type=file_type
|
| 179 |
-
))
|
| 180 |
|
| 181 |
-
|
|
|
|
|
|
|
| 182 |
|
| 183 |
-
|
| 184 |
-
|
| 185 |
-
|
| 186 |
-
|
| 187 |
-
|
| 188 |
-
|
| 189 |
-
|
| 190 |
-
|
| 191 |
-
|
| 192 |
-
|
| 193 |
-
|
| 194 |
-
|
| 195 |
-
# Move back to original directory
|
| 196 |
-
os.chdir(original_dir)
|
| 197 |
-
|
| 198 |
-
if not downloaded_files:
|
| 199 |
-
raise Exception("Failed to process any files")
|
| 200 |
-
|
| 201 |
-
return DownloadResponse(
|
| 202 |
-
success=True,
|
| 203 |
-
message=f"Successfully downloaded {len(downloaded_files)} files",
|
| 204 |
-
files=downloaded_files
|
| 205 |
-
)
|
| 206 |
-
|
| 207 |
-
except subprocess.CalledProcessError as e:
|
| 208 |
-
logger.error(f"Download process failed: stdout={e.stdout}, stderr={e.stderr}")
|
| 209 |
-
raise HTTPException(
|
| 210 |
-
status_code=400,
|
| 211 |
-
detail=f"Failed to download: {e.stderr or e.stdout or str(e)}"
|
| 212 |
-
)
|
| 213 |
-
except Exception as e:
|
| 214 |
-
logger.error(f"Unexpected error: {str(e)}", exc_info=True)
|
| 215 |
-
raise HTTPException(
|
| 216 |
-
status_code=500,
|
| 217 |
-
detail=f"Error: {str(e)}"
|
| 218 |
-
)
|
| 219 |
-
finally:
|
| 220 |
-
if 'original_dir' in locals():
|
| 221 |
-
os.chdir(original_dir)
|
| 222 |
|
| 223 |
-
|
| 224 |
-
|
| 225 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 226 |
|
| 227 |
-
|
| 228 |
-
|
| 229 |
-
|
| 230 |
-
|
| 231 |
-
|
| 232 |
-
|
| 233 |
-
|
| 234 |
-
|
| 235 |
-
|
| 236 |
-
|
| 237 |
-
|
| 238 |
-
|
| 239 |
-
|
| 240 |
-
|
| 241 |
-
|
| 242 |
-
|
| 243 |
-
|
| 244 |
-
|
| 245 |
-
|
| 246 |
-
|
| 247 |
-
|
| 248 |
-
|
| 249 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
import os
|
| 2 |
+
import json
|
| 3 |
+
import time
|
| 4 |
+
import urllib.parse
|
| 5 |
+
from datetime import datetime, timezone
|
| 6 |
+
from starlette.responses import JSONResponse
|
| 7 |
+
from fastapi import FastAPI, HTTPException, status, Request
|
| 8 |
+
from yt_dlp import YoutubeDL
|
| 9 |
+
from yt_dlp.version import __version__ as yt_dlp_version
|
| 10 |
+
from typing import Union, Dict
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 11 |
|
| 12 |
+
app = FastAPI(docs_url=None, redoc_url=None)
|
|
|
|
|
|
|
|
|
|
|
|
|
| 13 |
|
| 14 |
+
# Set cache directory to /tmp which is writable in Vercel
|
| 15 |
+
os.environ["XDG_CACHE_HOME"] = "/tmp"
|
|
|
|
| 16 |
|
| 17 |
+
# Rate limiting configuration
|
| 18 |
+
DAILY_LIMIT = 100 # Maximum requests per IP per day
|
| 19 |
+
RATE_LIMIT_FILE = "/tmp/rate_limits.json"
|
| 20 |
|
| 21 |
+
def load_rate_limits() -> Dict[str, Dict]:
|
| 22 |
+
"""Load rate limit data from file"""
|
| 23 |
+
try:
|
| 24 |
+
if os.path.exists(RATE_LIMIT_FILE):
|
| 25 |
+
with open(RATE_LIMIT_FILE, 'r') as f:
|
| 26 |
+
return json.load(f)
|
| 27 |
+
except Exception:
|
| 28 |
+
pass
|
| 29 |
+
return {}
|
| 30 |
|
| 31 |
+
def save_rate_limits(rate_limits: Dict[str, Dict]):
|
| 32 |
+
"""Save rate limit data to file"""
|
| 33 |
+
try:
|
| 34 |
+
with open(RATE_LIMIT_FILE, 'w') as f:
|
| 35 |
+
json.dump(rate_limits, f)
|
| 36 |
+
except Exception:
|
| 37 |
+
pass
|
| 38 |
|
| 39 |
+
def get_current_date() -> str:
|
| 40 |
+
"""Get current date as string in YYYY-MM-DD format"""
|
| 41 |
+
return datetime.now(timezone.utc).strftime('%Y-%m-%d')
|
| 42 |
|
| 43 |
+
def cleanup_old_entries(rate_limits: Dict[str, Dict]) -> Dict[str, Dict]:
|
| 44 |
+
"""Remove entries older than today"""
|
| 45 |
+
current_date = get_current_date()
|
| 46 |
+
cleaned = {}
|
| 47 |
+
|
| 48 |
+
for ip, data in rate_limits.items():
|
| 49 |
+
if data.get('date') == current_date:
|
| 50 |
+
cleaned[ip] = data
|
| 51 |
+
|
| 52 |
+
return cleaned
|
| 53 |
|
| 54 |
+
def check_rate_limit(ip: str) -> tuple[bool, int]:
|
| 55 |
+
"""
|
| 56 |
+
Check if IP has exceeded daily limit
|
| 57 |
+
Returns: (is_allowed, remaining_requests)
|
| 58 |
+
"""
|
| 59 |
+
rate_limits = load_rate_limits()
|
| 60 |
+
rate_limits = cleanup_old_entries(rate_limits)
|
| 61 |
+
|
| 62 |
+
current_date = get_current_date()
|
| 63 |
+
|
| 64 |
+
if ip not in rate_limits:
|
| 65 |
+
rate_limits[ip] = {
|
| 66 |
+
'date': current_date,
|
| 67 |
+
'count': 0
|
| 68 |
+
}
|
| 69 |
+
|
| 70 |
+
ip_data = rate_limits[ip]
|
| 71 |
+
|
| 72 |
+
# Reset count if it's a new day
|
| 73 |
+
if ip_data.get('date') != current_date:
|
| 74 |
+
ip_data['date'] = current_date
|
| 75 |
+
ip_data['count'] = 0
|
| 76 |
+
|
| 77 |
+
current_count = ip_data['count']
|
| 78 |
+
|
| 79 |
+
if current_count >= DAILY_LIMIT:
|
| 80 |
+
return False, 0
|
| 81 |
+
|
| 82 |
+
# Increment count
|
| 83 |
+
ip_data['count'] = current_count + 1
|
| 84 |
+
rate_limits[ip] = ip_data
|
| 85 |
+
|
| 86 |
+
# Save updated limits
|
| 87 |
+
save_rate_limits(rate_limits)
|
| 88 |
+
|
| 89 |
+
remaining = DAILY_LIMIT - ip_data['count']
|
| 90 |
+
return True, remaining
|
| 91 |
|
| 92 |
+
def get_client_ip(request: Request) -> str:
|
| 93 |
+
"""Extract client IP from request, handling proxies"""
|
| 94 |
+
# Check for common proxy headers
|
| 95 |
+
forwarded_for = request.headers.get("x-forwarded-for")
|
| 96 |
+
if forwarded_for:
|
| 97 |
+
# Take the first IP in the chain
|
| 98 |
+
return forwarded_for.split(",")[0].strip()
|
| 99 |
+
|
| 100 |
+
real_ip = request.headers.get("x-real-ip")
|
| 101 |
+
if real_ip:
|
| 102 |
+
return real_ip.strip()
|
| 103 |
+
|
| 104 |
+
# Fallback to direct client IP
|
| 105 |
+
return request.client.host if request.client else "unknown"
|
| 106 |
|
| 107 |
+
@app.get("/api/version")
|
| 108 |
+
async def version_info():
|
| 109 |
+
return JSONResponse({"yt_dlp": yt_dlp_version})
|
| 110 |
|
| 111 |
+
@app.get('/')
|
| 112 |
+
def main():
|
| 113 |
+
return "Chrunos Downloader API Is Running well."
|
| 114 |
|
| 115 |
+
@app.get("/api/info")
|
| 116 |
+
async def get_info(
|
| 117 |
+
request: Request,
|
| 118 |
+
url: str,
|
| 119 |
+
quality: Union[str, None] = None,
|
| 120 |
+
playlist: bool = True
|
| 121 |
+
):
|
| 122 |
+
# Get client IP
|
| 123 |
+
client_ip = get_client_ip(request)
|
| 124 |
+
|
| 125 |
+
# Check rate limit
|
| 126 |
+
is_allowed, remaining = check_rate_limit(client_ip)
|
| 127 |
|
| 128 |
+
if not is_allowed:
|
| 129 |
+
raise HTTPException(
|
| 130 |
+
status_code=status.HTTP_429_TOO_MANY_REQUESTS,
|
| 131 |
+
detail=f"Daily limit of {DAILY_LIMIT} requests exceeded. Try again tomorrow.",
|
| 132 |
+
headers={
|
| 133 |
+
"X-RateLimit-Limit": str(DAILY_LIMIT),
|
| 134 |
+
"X-RateLimit-Remaining": "0",
|
| 135 |
+
"X-RateLimit-Reset": str(int(time.time()) + 86400),
|
| 136 |
+
"Cache-Control": "no-store, max-age=0"
|
| 137 |
+
}
|
| 138 |
+
)
|
| 139 |
+
|
| 140 |
+
ydl_options = {
|
| 141 |
+
"retries": 3,
|
| 142 |
+
"encoding": "utf8",
|
| 143 |
+
"noplaylist": not playlist,
|
| 144 |
+
"dump_single_json": True,
|
| 145 |
+
"format": quality if quality else "bestvideo+bestaudio/best",
|
| 146 |
+
"ignoreerrors": True,
|
| 147 |
+
"extract_flat": playlist,
|
| 148 |
+
"cachedir": "/tmp/yt-dlp-cache",
|
| 149 |
+
"js-runtimes": "node"
|
| 150 |
+
}
|
| 151 |
+
|
| 152 |
+
with YoutubeDL(ydl_options) as ytdl:
|
| 153 |
+
try:
|
| 154 |
+
response = ytdl.extract_info(url, download=False)
|
| 155 |
+
return JSONResponse(
|
| 156 |
+
response,
|
| 157 |
+
headers={
|
| 158 |
+
"Cache-Control": "s-maxage=2592000, stale-while-revalidate",
|
| 159 |
+
"X-RateLimit-Limit": str(DAILY_LIMIT),
|
| 160 |
+
"X-RateLimit-Remaining": str(remaining),
|
| 161 |
+
"X-RateLimit-Reset": str(int(time.time()) + 86400)
|
| 162 |
+
}
|
| 163 |
+
)
|
| 164 |
+
except Exception as e:
|
| 165 |
+
print(e)
|
| 166 |
+
raise HTTPException(
|
| 167 |
+
status_code=status.HTTP_400_BAD_REQUEST,
|
| 168 |
+
detail=repr(e),
|
| 169 |
+
headers={"Cache-Control": "no-store, max-age=0"},
|
| 170 |
+
)
|
| 171 |
|
| 172 |
|
|
|
|
| 173 |
|
| 174 |
+
@app.get("/api/playlist")
|
| 175 |
+
async def get_playlist_info(
|
| 176 |
+
request: Request,
|
| 177 |
+
url: str,
|
| 178 |
+
start: int = 1,
|
| 179 |
+
end: int = 50
|
| 180 |
+
):
|
| 181 |
+
"""
|
| 182 |
+
Fetches paginated items from a playlist or user profile.
|
| 183 |
+
Strictly enforces a maximum of 50 items per request and provides a next_page URL.
|
| 184 |
+
"""
|
| 185 |
+
# 1. Validate inputs
|
| 186 |
+
if start < 1:
|
| 187 |
+
raise HTTPException(status_code=400, detail="'start' must be 1 or greater.")
|
| 188 |
+
if end < start:
|
| 189 |
+
raise HTTPException(status_code=400, detail="'end' must be greater than or equal to 'start'.")
|
| 190 |
|
| 191 |
+
# 2. Enforce a hard limit of 50 items per request
|
| 192 |
+
# If a client requests start=51 & end=1000, we force end=100.
|
| 193 |
+
requested_count = end - start + 1
|
| 194 |
+
if requested_count > 50:
|
| 195 |
+
end = start + 49
|
| 196 |
+
requested_count = 50
|
| 197 |
|
| 198 |
+
# Rate Limiting
|
| 199 |
+
client_ip = get_client_ip(request)
|
| 200 |
+
is_allowed, remaining = check_rate_limit(client_ip)
|
| 201 |
+
|
| 202 |
+
if not is_allowed:
|
| 203 |
+
raise HTTPException(
|
| 204 |
+
status_code=status.HTTP_429_TOO_MANY_REQUESTS,
|
| 205 |
+
detail=f"Daily limit of {DAILY_LIMIT} requests exceeded. Try again tomorrow.",
|
| 206 |
+
headers={
|
| 207 |
+
"X-RateLimit-Limit": str(DAILY_LIMIT),
|
| 208 |
+
"X-RateLimit-Remaining": "0",
|
| 209 |
+
"X-RateLimit-Reset": str(int(time.time()) + 86400),
|
| 210 |
+
"Cache-Control": "no-store, max-age=0"
|
| 211 |
+
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 212 |
)
|
| 213 |
+
|
| 214 |
+
ydl_options = {
|
| 215 |
+
"retries": 3,
|
| 216 |
+
"encoding": "utf8",
|
| 217 |
+
"extract_flat": "in_playlist",
|
| 218 |
+
"dump_single_json": True,
|
| 219 |
+
"ignoreerrors": True,
|
| 220 |
+
"cachedir": "/tmp/yt-dlp-cache",
|
| 221 |
+
"js-runtimes": "node",
|
| 222 |
+
# Pass the strictly clamped pagination settings to yt-dlp
|
| 223 |
+
"playliststart": start,
|
| 224 |
+
"playlistend": end
|
| 225 |
+
}
|
| 226 |
+
|
| 227 |
+
with YoutubeDL(ydl_options) as ytdl:
|
| 228 |
+
try:
|
| 229 |
+
response = ytdl.extract_info(url, download=False)
|
| 230 |
+
if not response:
|
| 231 |
+
raise HTTPException(status_code=404, detail="Playlist or profile not found.")
|
| 232 |
+
|
| 233 |
+
# Extract entries. yt-dlp might omit 'entries' if out of bounds, so default to empty list.
|
| 234 |
+
raw_entries = response.get("entries") or []
|
| 235 |
+
|
| 236 |
+
# Filter out 'None' entries (yt-dlp sometimes returns None for deleted/private videos in a playlist)
|
| 237 |
+
valid_entries = [e for e in raw_entries if e is not None]
|
| 238 |
+
|
| 239 |
+
# 3. Determine if there is a next page
|
| 240 |
+
next_page_url = None
|
| 241 |
+
|
| 242 |
+
# If yt-dlp found enough items to fill our requested quota, there is likely a next page.
|
| 243 |
+
# (Note: we check the length of raw_entries because deleted videos still consume an index slot)
|
| 244 |
+
if len(raw_entries) >= requested_count:
|
| 245 |
+
next_start = end + 1
|
| 246 |
+
next_end = next_start + 49
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 247 |
|
| 248 |
+
# Safely encode the target URL
|
| 249 |
+
encoded_url = urllib.parse.quote(url)
|
| 250 |
+
base_url = str(request.base_url).rstrip('/')
|
| 251 |
|
| 252 |
+
# Construct the ready-to-use next_page URL for the client
|
| 253 |
+
next_page_url = f"{base_url}/api/playlist?url={encoded_url}&start={next_start}&end={next_end}"
|
| 254 |
+
|
| 255 |
+
clean_response = {
|
| 256 |
+
"id": response.get("id"),
|
| 257 |
+
"title": response.get("title", "Unknown Playlist"),
|
| 258 |
+
"uploader": response.get("uploader"),
|
| 259 |
+
"items_returned": len(valid_entries),
|
| 260 |
+
"next_page": next_page_url,
|
| 261 |
+
"entries": valid_entries
|
| 262 |
+
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 263 |
|
| 264 |
+
return JSONResponse(
|
| 265 |
+
clean_response,
|
| 266 |
+
headers={
|
| 267 |
+
"Cache-Control": "s-maxage=2592000, stale-while-revalidate",
|
| 268 |
+
"X-RateLimit-Limit": str(DAILY_LIMIT),
|
| 269 |
+
"X-RateLimit-Remaining": str(remaining),
|
| 270 |
+
"X-RateLimit-Reset": str(int(time.time()) + 86400)
|
| 271 |
+
}
|
| 272 |
+
)
|
| 273 |
+
except Exception as e:
|
| 274 |
+
print(f"Error extracting playlist: {e}")
|
| 275 |
+
raise HTTPException(
|
| 276 |
+
status_code=status.HTTP_400_BAD_REQUEST,
|
| 277 |
+
detail=repr(e),
|
| 278 |
+
headers={"Cache-Control": "no-store, max-age=0"},
|
| 279 |
+
)
|
| 280 |
|
| 281 |
+
|
| 282 |
+
@app.get("/api/rate-limit-status")
|
| 283 |
+
async def get_rate_limit_status(request: Request):
|
| 284 |
+
"""Endpoint to check current rate limit status for the requesting IP"""
|
| 285 |
+
client_ip = get_client_ip(request)
|
| 286 |
+
rate_limits = load_rate_limits()
|
| 287 |
+
rate_limits = cleanup_old_entries(rate_limits)
|
| 288 |
+
|
| 289 |
+
current_date = get_current_date()
|
| 290 |
+
|
| 291 |
+
if client_ip in rate_limits and rate_limits[client_ip].get('date') == current_date:
|
| 292 |
+
used = rate_limits[client_ip]['count']
|
| 293 |
+
remaining = DAILY_LIMIT - used
|
| 294 |
+
else:
|
| 295 |
+
used = 0
|
| 296 |
+
remaining = DAILY_LIMIT
|
| 297 |
+
|
| 298 |
+
return JSONResponse({
|
| 299 |
+
"daily_limit": DAILY_LIMIT,
|
| 300 |
+
"used": used,
|
| 301 |
+
"remaining": remaining,
|
| 302 |
+
"reset_time": f"{current_date}T00:00:00Z"
|
| 303 |
+
})
|