mhdzumair coderabbitai[bot] commited on
Commit
abbeaee
·
unverified ·
1 Parent(s): 3057a00

Apply suggestions from code review

Browse files

Co-authored-by: coderabbitai[bot] <136622811+coderabbitai[bot]@users.noreply.github.com>

mediaflow_proxy/extractors/uqload.py CHANGED
@@ -3,12 +3,39 @@ import re
3
  from mediaflow_proxy.configs import settings
4
 
5
 
6
- async def uqload_url(d: str, use_request_proxy: bool):
7
- async with httpx.AsyncClient(proxy=settings.proxy_url if use_request_proxy else None) as client:
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8
 
9
- response = await client.get(d, follow_redirects=True)
10
- video_url_match = re.search(r'sources: \["(.*?)"\]', response.text)
11
- if video_url_match:
12
- final_url = video_url_match.group(1)
13
- uqload_dict = {"Referer": "https://uqload.to/"}
14
- return final_url, uqload_dict
 
 
 
 
 
 
 
 
 
 
 
 
3
  from mediaflow_proxy.configs import settings
4
 
5
 
6
+ from typing import Tuple, Dict, Optional
7
+
8
+ async def uqload_url(d: str, use_request_proxy: bool) -> Tuple[Optional[str], Dict[str, str]]:
9
+ """
10
+ Extract video URL from Uqload.
11
+
12
+ Args:
13
+ d: The Uqload video URL
14
+ use_request_proxy: Whether to use proxy for the request
15
+
16
+ Returns:
17
+ Tuple containing the extracted video URL (or None if not found) and headers dictionary
18
+
19
+ Raises:
20
+ httpx.HTTPError: If the HTTP request fails
21
+ """
22
+ if not d.startswith(('http://', 'https://')):
23
+ raise ValueError("Invalid URL format")
24
 
25
+ REFERER = "https://uqload.to/"
26
+ final_url = None
27
+
28
+ async with httpx.AsyncClient(proxy=settings.proxy_url if use_request_proxy else None) as client:
29
+ try:
30
+ response = await client.get(d, follow_redirects=True)
31
+ response.raise_for_status()
32
+
33
+ # Look for video URL in response using a more robust pattern
34
+ video_url_match = re.search(r'sources:\s*\[(["\'])(.*?)\1\]', response.text)
35
+ if video_url_match:
36
+ final_url = video_url_match.group(2)
37
+
38
+ return final_url, {"Referer": REFERER}
39
+ except httpx.HTTPError as e:
40
+ # Log the error here if logging is available
41
+ raise
mediaflow_proxy/extractors_routes.py CHANGED
@@ -10,7 +10,7 @@ host_map = {"Doodstream": doodstream_url, "Mixdrop": mixdrop_url, "Uqload": uqlo
10
 
11
 
12
  @extractor_router.get("/extractor")
13
- async def doodstream_extractor(
14
  d: str = Query(..., description="Extract Clean Link from various Hosts"),
15
  use_request_proxy: bool = Query(False, description="Whether to use the MediaFlow proxy configuration."),
16
  host: str = Query(
@@ -32,8 +32,15 @@ async def doodstream_extractor(
32
  """
33
  try:
34
  final_url, headers_dict = await host_map[host](d, use_request_proxy)
 
 
 
 
 
 
 
35
  except Exception as e:
36
- return JSONResponse(content={"error": str(e)})
37
  if redirect_stream == True:
38
  formatted_headers = format_headers(headers_dict)
39
  redirected_stream = f"/proxy/stream?api_password={settings.api_password}&d={final_url}&{formatted_headers}"
 
10
 
11
 
12
  @extractor_router.get("/extractor")
13
+ async def extract_media_url(
14
  d: str = Query(..., description="Extract Clean Link from various Hosts"),
15
  use_request_proxy: bool = Query(False, description="Whether to use the MediaFlow proxy configuration."),
16
  host: str = Query(
 
32
  """
33
  try:
34
  final_url, headers_dict = await host_map[host](d, use_request_proxy)
35
+ except KeyError:
36
+ return JSONResponse(
37
+ status_code=400,
38
+ content={"error": f"Invalid host type. Available hosts: {', '.join(host_map.keys())}"}
39
+ )
40
+ except ValueError as e:
41
+ return JSONResponse(status_code=400, content={"error": str(e)})
42
  except Exception as e:
43
+ return JSONResponse(status_code=500, content={"error": "Internal server error"})
44
  if redirect_stream == True:
45
  formatted_headers = format_headers(headers_dict)
46
  redirected_stream = f"/proxy/stream?api_password={settings.api_password}&d={final_url}&{formatted_headers}"
mediaflow_proxy/static/speedtest_progress.html CHANGED
@@ -63,34 +63,56 @@
63
  <script>
64
  const urlParams = new URLSearchParams(window.location.search);
65
  const taskId = urlParams.get("task_id");
 
 
 
66
 
 
 
 
 
67
  async function checkStatus() {
68
  try {
69
- const response = await fetch(`/speedtest/results/${taskId}`);
 
 
 
 
 
 
70
  if (!response.ok) {
71
  throw new Error('Network response was not ok');
72
  }
73
  const data = await response.json();
74
  console.log("Fetched data:", data);
 
75
 
76
- // Check if the test is still running based on the response data
77
  if (data && data.message && data.message.includes("still running")) {
78
  console.log("Test still running, polling again...");
79
- // Poll again after 5 seconds if the test is still running
80
- setTimeout(checkStatus, 5000);
81
  } else {
82
  console.log("Test complete, redirecting after a short delay...");
83
- // Redirect to the results if the test is done after a short delay
84
  setTimeout(() => {
85
  window.location.href = `/speedtest/results/${taskId}`;
86
- }, 2000); // 2 seconds delay
87
  }
88
  } catch (error) {
89
  console.error("Error fetching status:", error);
90
- // Retry after 5 seconds in case of error
91
- setTimeout(checkStatus, 5000);
 
 
 
 
92
  }
93
  }
 
 
 
 
 
 
 
94
 
95
  // Start the first status check after 120 seconds (120000 milliseconds)
96
  setTimeout(checkStatus, 120000);
@@ -105,12 +127,14 @@
105
  </head>
106
  <body class="light-mode">
107
  <div class="toggle-switch">
108
- <label for="darkModeToggle">Dark Mode</label>
109
- <input type="checkbox" id="darkModeToggle" onclick="toggleDarkMode()">
 
 
110
  </div>
111
  <div class="container">
112
  <h1>Speedtest in progress... Please wait up to 3 minutes.</h1>
113
- <div class="progress-bar"></div>
114
  </div>
115
  </body>
116
  </html>
 
63
  <script>
64
  const urlParams = new URLSearchParams(window.location.search);
65
  const taskId = urlParams.get("task_id");
66
+ if (!taskId || !/^[a-zA-Z0-9-_]+$/.test(taskId)) {
67
+ window.location.href = "/speedtest";
68
+ }
69
 
70
+ let statusCheckTimeout;
71
+ let retryCount = 0;
72
+ const MAX_RETRIES = 5;
73
+
74
  async function checkStatus() {
75
  try {
76
+ const controller = new AbortController();
77
+ const timeoutId = setTimeout(() => controller.abort(), 5000);
78
+ const response = await fetch(`/speedtest/results/${taskId}`, {
79
+ signal: controller.signal
80
+ });
81
+ clearTimeout(timeoutId);
82
+
83
  if (!response.ok) {
84
  throw new Error('Network response was not ok');
85
  }
86
  const data = await response.json();
87
  console.log("Fetched data:", data);
88
+ retryCount = 0;
89
 
 
90
  if (data && data.message && data.message.includes("still running")) {
91
  console.log("Test still running, polling again...");
92
+ statusCheckTimeout = setTimeout(checkStatus, 5000);
 
93
  } else {
94
  console.log("Test complete, redirecting after a short delay...");
 
95
  setTimeout(() => {
96
  window.location.href = `/speedtest/results/${taskId}`;
97
+ }, 2000);
98
  }
99
  } catch (error) {
100
  console.error("Error fetching status:", error);
101
+ retryCount++;
102
+ if (retryCount < MAX_RETRIES) {
103
+ statusCheckTimeout = setTimeout(checkStatus, 5000);
104
+ } else {
105
+ alert("Failed to check status after multiple attempts. Please refresh the page.");
106
+ }
107
  }
108
  }
109
+
110
+ // Cleanup on page unload
111
+ window.addEventListener('unload', () => {
112
+ if (statusCheckTimeout) {
113
+ clearTimeout(statusCheckTimeout);
114
+ }
115
+ });
116
 
117
  // Start the first status check after 120 seconds (120000 milliseconds)
118
  setTimeout(checkStatus, 120000);
 
127
  </head>
128
  <body class="light-mode">
129
  <div class="toggle-switch">
130
+ <label for="darkModeToggle" class="switch">
131
+ <input type="checkbox" id="darkModeToggle" onclick="toggleDarkMode()" aria-label="Toggle dark mode">
132
+ <span class="slider">Dark Mode</span>
133
+ </label>
134
  </div>
135
  <div class="container">
136
  <h1>Speedtest in progress... Please wait up to 3 minutes.</h1>
137
+ <div class="progress-bar" role="progressbar" aria-valuemin="0" aria-valuemax="100" aria-valuenow="0"></div>
138
  </div>
139
  </body>
140
  </html>
mediaflow_proxy/utils/rd_speedtest.py CHANGED
@@ -37,32 +37,33 @@ async def perform_speed_test():
37
  async with AsyncClient() as client:
38
  streamer = Streamer(client)
39
 
40
- for location, base_url in test_urls.items():
41
- # Generate a random float with 16 decimal places
42
- random_number = f"{random.uniform(0, 1):.16f}"
43
- url = f"{base_url}{random_number}"
44
-
45
- logging.info(f"Testing URL: {url}")
46
-
47
  start_time = time.time()
48
  total_bytes = 0
49
 
50
- try:
51
- # Stream the response
52
- async for chunk in streamer.stream_content(url, headers={}):
53
- if time.time() - start_time >= test_duration:
54
- break
55
- total_bytes += len(chunk)
56
 
57
- duration = time.time() - start_time
58
- speed_mbps = (total_bytes * 8) / (duration * 1_000_000)
59
- speed[location] = {
60
- "speed_mbps": round(speed_mbps, 2),
61
- "duration": round(duration, 2)
62
- }
63
- logging.info(f"Speed for {location}: {speed_mbps} Mbps in {duration} seconds")
64
- except Exception as e:
65
- speed[location] = {"error": str(e)}
66
- logging.error(f"Error for {location}: {e}")
67
 
 
 
 
 
 
 
 
 
 
68
  return speed
 
37
  async with AsyncClient() as client:
38
  streamer = Streamer(client)
39
 
40
+ async def test_single_url(location: str, url: str) -> Dict[str, Any]:
41
+ try:
 
 
 
 
 
42
  start_time = time.time()
43
  total_bytes = 0
44
 
45
+ async for chunk in streamer.stream_content(url, headers={}):
46
+ if time.time() - start_time >= test_duration:
47
+ break
48
+ total_bytes += len(chunk)
 
 
49
 
50
+ duration = time.time() - start_time
51
+ speed_mbps = (total_bytes * 8) / (duration * 1_000_000)
52
+ return {
53
+ "speed_mbps": round(speed_mbps, 2),
54
+ "duration": round(duration, 2)
55
+ }
56
+ except Exception as e:
57
+ logging.error(f"Error testing {location}: {e}")
58
+ return {"error": str(e)}
 
59
 
60
+ for location, base_url in test_urls.items():
61
+ random_number = f"{random.uniform(0, 1):.16f}"
62
+ url = f"{base_url}{random_number}"
63
+ logging.info(f"Testing URL: {url}")
64
+
65
+ speed[location] = await test_single_url(location, url)
66
+
67
+ # Add rate limiting between tests
68
+ await asyncio.sleep(1)
69
  return speed