Spaces:
Sleeping
Sleeping
feat: update existing files for provider toggle and proxy system
Browse files- admin_router.py +135 -4
- browser_portal.py +97 -1
- config.py +14 -256
- engine.py +70 -13
- proxy_manager.py +88 -198
- static/qaz.html +134 -74
admin_router.py
CHANGED
|
@@ -773,10 +773,7 @@ async def restart_portal_with_proxy(provider: str):
|
|
| 773 |
# Get current proxy
|
| 774 |
current_proxy = proxy_mgr.get_current_proxy()
|
| 775 |
if not current_proxy:
|
| 776 |
-
|
| 777 |
-
current_proxy = await proxy_mgr.get_working_proxy()
|
| 778 |
-
if not current_proxy:
|
| 779 |
-
raise HTTPException(status_code=503, detail="No working proxy available")
|
| 780 |
|
| 781 |
# Close existing portal
|
| 782 |
await portal.close()
|
|
@@ -794,3 +791,137 @@ async def restart_portal_with_proxy(provider: str):
|
|
| 794 |
raise
|
| 795 |
except Exception as e:
|
| 796 |
raise HTTPException(status_code=500, detail=str(e))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 773 |
# Get current proxy
|
| 774 |
current_proxy = proxy_mgr.get_current_proxy()
|
| 775 |
if not current_proxy:
|
| 776 |
+
raise HTTPException(status_code=503, detail="No custom proxy configured. Set one first.")
|
|
|
|
|
|
|
|
|
|
| 777 |
|
| 778 |
# Close existing portal
|
| 779 |
await portal.close()
|
|
|
|
| 791 |
raise
|
| 792 |
except Exception as e:
|
| 793 |
raise HTTPException(status_code=500, detail=str(e))
|
| 794 |
+
|
| 795 |
+
|
| 796 |
+
# --- Provider Toggle Management ---
|
| 797 |
+
|
| 798 |
+
class ProviderToggleRequest(BaseModel):
|
| 799 |
+
provider_id: str
|
| 800 |
+
enabled: bool
|
| 801 |
+
|
| 802 |
+
@router.get("/providers")
|
| 803 |
+
async def get_providers():
|
| 804 |
+
"""Get all providers with their enabled/disabled status."""
|
| 805 |
+
try:
|
| 806 |
+
from provider_state import get_provider_state_manager
|
| 807 |
+
|
| 808 |
+
manager = await get_provider_state_manager()
|
| 809 |
+
providers = manager.get_all_providers()
|
| 810 |
+
|
| 811 |
+
return {
|
| 812 |
+
"providers": [
|
| 813 |
+
{
|
| 814 |
+
"id": provider_id,
|
| 815 |
+
"name": config["name"],
|
| 816 |
+
"type": config["type"],
|
| 817 |
+
"enabled": config["enabled"]
|
| 818 |
+
}
|
| 819 |
+
for provider_id, config in providers.items()
|
| 820 |
+
]
|
| 821 |
+
}
|
| 822 |
+
except Exception as e:
|
| 823 |
+
raise HTTPException(status_code=500, detail=str(e))
|
| 824 |
+
|
| 825 |
+
@router.post("/providers/toggle")
|
| 826 |
+
async def toggle_provider(req: ProviderToggleRequest):
|
| 827 |
+
"""Enable or disable a provider."""
|
| 828 |
+
try:
|
| 829 |
+
from provider_state import get_provider_state_manager
|
| 830 |
+
|
| 831 |
+
manager = await get_provider_state_manager()
|
| 832 |
+
success = await manager.set_provider_state(req.provider_id, req.enabled)
|
| 833 |
+
|
| 834 |
+
if success:
|
| 835 |
+
return {
|
| 836 |
+
"status": "success",
|
| 837 |
+
"provider_id": req.provider_id,
|
| 838 |
+
"enabled": req.enabled,
|
| 839 |
+
"message": f"Provider '{req.provider_id}' {'enabled' if req.enabled else 'disabled'}"
|
| 840 |
+
}
|
| 841 |
+
else:
|
| 842 |
+
raise HTTPException(status_code=400, detail=f"Failed to toggle provider '{req.provider_id}'")
|
| 843 |
+
except HTTPException:
|
| 844 |
+
raise
|
| 845 |
+
except Exception as e:
|
| 846 |
+
raise HTTPException(status_code=500, detail=str(e))
|
| 847 |
+
|
| 848 |
+
|
| 849 |
+
# --- Custom Proxy Management ---
|
| 850 |
+
|
| 851 |
+
class SetProxyRequest(BaseModel):
|
| 852 |
+
proxy: str # Format: ip:port or protocol://ip:port
|
| 853 |
+
|
| 854 |
+
@router.post("/proxy/set")
|
| 855 |
+
async def set_custom_proxy(req: SetProxyRequest):
|
| 856 |
+
"""Set a custom proxy for the entire container."""
|
| 857 |
+
try:
|
| 858 |
+
from proxy_manager import get_proxy_manager
|
| 859 |
+
|
| 860 |
+
proxy_mgr = get_proxy_manager()
|
| 861 |
+
success = proxy_mgr.set_custom_proxy(req.proxy)
|
| 862 |
+
|
| 863 |
+
if success:
|
| 864 |
+
return {
|
| 865 |
+
"status": "success",
|
| 866 |
+
"proxy": req.proxy,
|
| 867 |
+
"message": "Custom proxy set successfully"
|
| 868 |
+
}
|
| 869 |
+
else:
|
| 870 |
+
raise HTTPException(status_code=400, detail="Invalid proxy format")
|
| 871 |
+
except HTTPException:
|
| 872 |
+
raise
|
| 873 |
+
except Exception as e:
|
| 874 |
+
raise HTTPException(status_code=500, detail=str(e))
|
| 875 |
+
|
| 876 |
+
@router.post("/proxy/clear")
|
| 877 |
+
async def clear_custom_proxy():
|
| 878 |
+
"""Clear the custom proxy."""
|
| 879 |
+
try:
|
| 880 |
+
from proxy_manager import get_proxy_manager
|
| 881 |
+
|
| 882 |
+
proxy_mgr = get_proxy_manager()
|
| 883 |
+
proxy_mgr.clear_proxy()
|
| 884 |
+
|
| 885 |
+
return {
|
| 886 |
+
"status": "success",
|
| 887 |
+
"message": "Custom proxy cleared"
|
| 888 |
+
}
|
| 889 |
+
except Exception as e:
|
| 890 |
+
raise HTTPException(status_code=500, detail=str(e))
|
| 891 |
+
|
| 892 |
+
@router.get("/proxy/status")
|
| 893 |
+
async def get_proxy_status():
|
| 894 |
+
"""Get current proxy status."""
|
| 895 |
+
try:
|
| 896 |
+
from proxy_manager import get_proxy_manager
|
| 897 |
+
|
| 898 |
+
proxy_mgr = get_proxy_manager()
|
| 899 |
+
status = proxy_mgr.get_status()
|
| 900 |
+
|
| 901 |
+
return status
|
| 902 |
+
except Exception as e:
|
| 903 |
+
raise HTTPException(status_code=500, detail=str(e))
|
| 904 |
+
|
| 905 |
+
@router.post("/proxy/test")
|
| 906 |
+
async def test_custom_proxy():
|
| 907 |
+
"""Test if the current custom proxy is working."""
|
| 908 |
+
try:
|
| 909 |
+
from proxy_manager import get_proxy_manager
|
| 910 |
+
|
| 911 |
+
proxy_mgr = get_proxy_manager()
|
| 912 |
+
|
| 913 |
+
if not proxy_mgr.get_current_proxy():
|
| 914 |
+
raise HTTPException(status_code=400, detail="No custom proxy configured")
|
| 915 |
+
|
| 916 |
+
is_working = await proxy_mgr.test_proxy()
|
| 917 |
+
status = proxy_mgr.get_status()
|
| 918 |
+
|
| 919 |
+
return {
|
| 920 |
+
"status": "success",
|
| 921 |
+
"is_working": is_working,
|
| 922 |
+
**status
|
| 923 |
+
}
|
| 924 |
+
except HTTPException:
|
| 925 |
+
raise
|
| 926 |
+
except Exception as e:
|
| 927 |
+
raise HTTPException(status_code=500, detail=str(e))
|
browser_portal.py
CHANGED
|
@@ -88,7 +88,7 @@ PORTAL_CONFIGS = {
|
|
| 88 |
|
| 89 |
|
| 90 |
class BrowserPortal:
|
| 91 |
-
"""Manages an interactive browser session."""
|
| 92 |
|
| 93 |
def __init__(self, provider: PortalProvider, config: PortalConfig):
|
| 94 |
self.provider = provider
|
|
@@ -103,6 +103,9 @@ class BrowserPortal:
|
|
| 103 |
self.message_queue = []
|
| 104 |
self.last_activity = None
|
| 105 |
self.is_logged_in = False
|
|
|
|
|
|
|
|
|
|
| 106 |
|
| 107 |
async def initialize(self, headless: bool = True, proxy: Optional[Any] = None):
|
| 108 |
"""Initialize the browser with enhanced stealth and optional proxy."""
|
|
@@ -171,6 +174,10 @@ class BrowserPortal:
|
|
| 171 |
|
| 172 |
await self.take_screenshot()
|
| 173 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 174 |
except Exception as e:
|
| 175 |
logger.error(f"Failed to initialize portal [{self.provider.value}]: {e}")
|
| 176 |
raise
|
|
@@ -411,6 +418,9 @@ class BrowserPortal:
|
|
| 411 |
async def close(self):
|
| 412 |
"""Close the browser."""
|
| 413 |
try:
|
|
|
|
|
|
|
|
|
|
| 414 |
if self.browser:
|
| 415 |
await self.browser.close()
|
| 416 |
if self.playwright:
|
|
@@ -428,6 +438,92 @@ class BrowserPortal:
|
|
| 428 |
return self.browser.is_connected()
|
| 429 |
except:
|
| 430 |
return False
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 431 |
|
| 432 |
|
| 433 |
# Portal manager
|
|
|
|
| 88 |
|
| 89 |
|
| 90 |
class BrowserPortal:
|
| 91 |
+
"""Manages an interactive browser session with auto-refresh on DOM changes."""
|
| 92 |
|
| 93 |
def __init__(self, provider: PortalProvider, config: PortalConfig):
|
| 94 |
self.provider = provider
|
|
|
|
| 103 |
self.message_queue = []
|
| 104 |
self.last_activity = None
|
| 105 |
self.is_logged_in = False
|
| 106 |
+
self._dom_change_task = None
|
| 107 |
+
self._last_dom_hash = None
|
| 108 |
+
self._auto_refresh_enabled = True
|
| 109 |
|
| 110 |
async def initialize(self, headless: bool = True, proxy: Optional[Any] = None):
|
| 111 |
"""Initialize the browser with enhanced stealth and optional proxy."""
|
|
|
|
| 174 |
|
| 175 |
await self.take_screenshot()
|
| 176 |
|
| 177 |
+
# Start DOM monitoring for auto-refresh
|
| 178 |
+
if self._auto_refresh_enabled:
|
| 179 |
+
await self.start_dom_monitoring()
|
| 180 |
+
|
| 181 |
except Exception as e:
|
| 182 |
logger.error(f"Failed to initialize portal [{self.provider.value}]: {e}")
|
| 183 |
raise
|
|
|
|
| 418 |
async def close(self):
|
| 419 |
"""Close the browser."""
|
| 420 |
try:
|
| 421 |
+
# Stop DOM monitoring first
|
| 422 |
+
await self.stop_dom_monitoring()
|
| 423 |
+
|
| 424 |
if self.browser:
|
| 425 |
await self.browser.close()
|
| 426 |
if self.playwright:
|
|
|
|
| 438 |
return self.browser.is_connected()
|
| 439 |
except:
|
| 440 |
return False
|
| 441 |
+
|
| 442 |
+
async def start_dom_monitoring(self):
|
| 443 |
+
"""Start monitoring DOM changes and auto-refresh screenshot."""
|
| 444 |
+
if not self._auto_refresh_enabled or self._dom_change_task:
|
| 445 |
+
return
|
| 446 |
+
|
| 447 |
+
self._dom_change_task = asyncio.create_task(self._dom_monitoring_loop())
|
| 448 |
+
logger.info(f"Portal [{self.provider.value}]: DOM monitoring started")
|
| 449 |
+
|
| 450 |
+
async def stop_dom_monitoring(self):
|
| 451 |
+
"""Stop DOM monitoring."""
|
| 452 |
+
if self._dom_change_task:
|
| 453 |
+
self._dom_change_task.cancel()
|
| 454 |
+
try:
|
| 455 |
+
await self._dom_change_task
|
| 456 |
+
except asyncio.CancelledError:
|
| 457 |
+
pass
|
| 458 |
+
self._dom_change_task = None
|
| 459 |
+
logger.info(f"Portal [{self.provider.value}]: DOM monitoring stopped")
|
| 460 |
+
|
| 461 |
+
async def _dom_monitoring_loop(self):
|
| 462 |
+
"""Monitor DOM for changes and refresh screenshot when needed."""
|
| 463 |
+
import hashlib
|
| 464 |
+
|
| 465 |
+
check_interval = 2.0 # Check every 2 seconds
|
| 466 |
+
|
| 467 |
+
while self.is_running() and self._auto_refresh_enabled:
|
| 468 |
+
try:
|
| 469 |
+
if self.page:
|
| 470 |
+
# Get a hash of the current DOM content
|
| 471 |
+
current_hash = await self.page.evaluate("""
|
| 472 |
+
() => {
|
| 473 |
+
// Get visible text content from main content areas
|
| 474 |
+
const selectors = [
|
| 475 |
+
'main', 'article', '[role="main"]',
|
| 476 |
+
'.chat-content', '.message-content',
|
| 477 |
+
'[data-message-author-role]',
|
| 478 |
+
'.conversation', '.response'
|
| 479 |
+
];
|
| 480 |
+
|
| 481 |
+
let content = '';
|
| 482 |
+
for (const sel of selectors) {
|
| 483 |
+
const el = document.querySelector(sel);
|
| 484 |
+
if (el) {
|
| 485 |
+
content += el.innerText || el.textContent || '';
|
| 486 |
+
}
|
| 487 |
+
}
|
| 488 |
+
|
| 489 |
+
// Fallback to body if no content areas found
|
| 490 |
+
if (!content) {
|
| 491 |
+
content = document.body ? (document.body.innerText || '') : '';
|
| 492 |
+
}
|
| 493 |
+
|
| 494 |
+
// Simple hash of the content
|
| 495 |
+
let hash = 0;
|
| 496 |
+
for (let i = 0; i < content.length; i++) {
|
| 497 |
+
const char = content.charCodeAt(i);
|
| 498 |
+
hash = ((hash << 5) - hash) + char;
|
| 499 |
+
hash = hash & hash;
|
| 500 |
+
}
|
| 501 |
+
return hash.toString();
|
| 502 |
+
}
|
| 503 |
+
""")
|
| 504 |
+
|
| 505 |
+
# If hash changed, DOM has updated
|
| 506 |
+
if self._last_dom_hash is not None and current_hash != self._last_dom_hash:
|
| 507 |
+
logger.info(f"Portal [{self.provider.value}]: DOM change detected, refreshing screenshot")
|
| 508 |
+
await self.take_screenshot()
|
| 509 |
+
|
| 510 |
+
self._last_dom_hash = current_hash
|
| 511 |
+
|
| 512 |
+
await asyncio.sleep(check_interval)
|
| 513 |
+
|
| 514 |
+
except asyncio.CancelledError:
|
| 515 |
+
break
|
| 516 |
+
except Exception as e:
|
| 517 |
+
logger.debug(f"Portal [{self.provider.value}]: DOM monitoring error: {e}")
|
| 518 |
+
await asyncio.sleep(check_interval)
|
| 519 |
+
|
| 520 |
+
def set_auto_refresh(self, enabled: bool):
|
| 521 |
+
"""Enable or disable auto-refresh on DOM changes."""
|
| 522 |
+
self._auto_refresh_enabled = enabled
|
| 523 |
+
if enabled and self.is_running():
|
| 524 |
+
asyncio.create_task(self.start_dom_monitoring())
|
| 525 |
+
elif not enabled:
|
| 526 |
+
asyncio.create_task(self.stop_dom_monitoring())
|
| 527 |
|
| 528 |
|
| 529 |
# Portal manager
|
config.py
CHANGED
|
@@ -20,114 +20,14 @@ MODEL_RANKING = [
|
|
| 20 |
("zai-glm-5", "zai", "glm-5"),
|
| 21 |
("gemini-gemini-3-flash", "gemini", "gemini-3-flash"),
|
| 22 |
|
| 23 |
-
# Tier 2 β
|
| 24 |
-
("copilot-gpt-4", "copilot", "copilot-gpt-4"),
|
| 25 |
-
|
| 26 |
-
# Tier 3 β HuggingChat Models (Top 20 by popularity/quality)
|
| 27 |
-
("huggingchat-omni", "huggingchat", "omni"),
|
| 28 |
-
("huggingchat-llama-3.3-70b", "huggingchat", "meta-llama/Llama-3.3-70B-Instruct"),
|
| 29 |
-
("huggingchat-llama-4-scout", "huggingchat", "meta-llama/Llama-4-Scout-17B-16E-Instruct"),
|
| 30 |
-
("huggingchat-llama-4-maverick", "huggingchat", "meta-llama/Llama-4-Maverick-17B-128E-Instruct"),
|
| 31 |
-
("huggingchat-kimi-k2.5", "huggingchat", "moonshotai/Kimi-K2.5"),
|
| 32 |
-
("huggingchat-kimi-k2", "huggingchat", "moonshotai/Kimi-K2-Instruct"),
|
| 33 |
-
("huggingchat-qwen3-235b", "huggingchat", "Qwen/Qwen3-235B-A22B"),
|
| 34 |
-
("huggingchat-qwen3-32b", "huggingchat", "Qwen/Qwen3-32B"),
|
| 35 |
-
("huggingchat-qwen3-14b", "huggingchat", "Qwen/Qwen3-14B"),
|
| 36 |
-
("huggingchat-qwen3-8b", "huggingchat", "Qwen/Qwen3-8B"),
|
| 37 |
-
("huggingchat-qwen2.5-72b", "huggingchat", "Qwen/Qwen2.5-72B-Instruct"),
|
| 38 |
-
("huggingchat-qwen2.5-32b", "huggingchat", "Qwen/Qwen2.5-32B-Instruct"),
|
| 39 |
-
("huggingchat-qwen2.5-7b", "huggingchat", "Qwen/Qwen2.5-7B-Instruct"),
|
| 40 |
-
("huggingchat-qwen3-coder-480b", "huggingchat", "Qwen/Qwen3-Coder-480B-A35B-Instruct"),
|
| 41 |
-
("huggingchat-qwen3-coder-30b", "huggingchat", "Qwen/Qwen3-Coder-30B-A3B-Instruct"),
|
| 42 |
-
("huggingchat-deepseek-r1", "huggingchat", "deepseek-ai/DeepSeek-R1"),
|
| 43 |
-
("huggingchat-deepseek-v3", "huggingchat", "deepseek-ai/DeepSeek-V3"),
|
| 44 |
-
("huggingchat-deepseek-v3.2", "huggingchat", "deepseek-ai/DeepSeek-V3.2"),
|
| 45 |
-
("huggingchat-zai-glm-5", "huggingchat", "zai-org/GLM-5"),
|
| 46 |
-
("huggingchat-zai-glm-4.7", "huggingchat", "zai-org/GLM-4.7"),
|
| 47 |
-
("huggingchat-zai-glm-4.5", "huggingchat", "zai-org/GLM-4.5"),
|
| 48 |
-
("huggingchat-minimax-m2.5", "huggingchat", "MiniMaxAI/MiniMax-M2.5"),
|
| 49 |
-
("huggingchat-minimax-m2.1", "huggingchat", "MiniMaxAI/MiniMax-M2.1"),
|
| 50 |
-
("huggingchat-minimax-m2", "huggingchat", "MiniMaxAI/MiniMax-M2"),
|
| 51 |
-
|
| 52 |
-
# Tier 3 β Pollinations
|
| 53 |
("pollinations-gpt-oss-20b", "pollinations", "openai"),
|
| 54 |
("pollinations-mistral-small-3.2", "pollinations", "mistral"),
|
| 55 |
("pollinations-bidara", "pollinations", "bidara"),
|
| 56 |
("pollinations-chickytutor", "pollinations", "chickytutor"),
|
| 57 |
("pollinations-midijourney", "pollinations", "midijourney"),
|
| 58 |
|
| 59 |
-
# Tier
|
| 60 |
-
("huggingchat-llama-3.1-70b", "huggingchat", "meta-llama/Meta-Llama-3-70B-Instruct"),
|
| 61 |
-
("huggingchat-llama-3.1-8b", "huggingchat", "meta-llama/Llama-3.1-8B-Instruct"),
|
| 62 |
-
("huggingchat-llama-3.2-3b", "huggingchat", "meta-llama/Llama-3.2-3B-Instruct"),
|
| 63 |
-
("huggingchat-llama-3.2-1b", "huggingchat", "meta-llama/Llama-3.2-1B-Instruct"),
|
| 64 |
-
("huggingchat-llama-3-8b", "huggingchat", "meta-llama/Meta-Llama-3-8B-Instruct"),
|
| 65 |
-
("huggingchat-qwen3-vl-235b", "huggingchat", "Qwen/Qwen3-VL-235B-A22B-Instruct"),
|
| 66 |
-
("huggingchat-qwen3-vl-32b", "huggingchat", "Qwen/Qwen3-VL-32B-Instruct"),
|
| 67 |
-
("huggingchat-qwen3-vl-30b", "huggingchat", "Qwen/Qwen3-VL-30B-A3B-Instruct"),
|
| 68 |
-
("huggingchat-qwen3-vl-8b", "huggingchat", "Qwen/Qwen3-VL-8B-Instruct"),
|
| 69 |
-
("huggingchat-qwen3-4b", "huggingchat", "Qwen/Qwen3-4B-Instruct-2507"),
|
| 70 |
-
("huggingchat-qwen2.5-vl-72b", "huggingchat", "Qwen/Qwen2.5-VL-72B-Instruct"),
|
| 71 |
-
("huggingchat-qwen2.5-vl-32b", "huggingchat", "Qwen/Qwen2.5-VL-32B-Instruct"),
|
| 72 |
-
("huggingchat-qwen2.5-vl-7b", "huggingchat", "Qwen/Qwen2.5-VL-7B-Instruct"),
|
| 73 |
-
("huggingchat-qwen2.5-coder-32b", "huggingchat", "Qwen/Qwen2.5-Coder-32B-Instruct"),
|
| 74 |
-
("huggingchat-qwen2.5-coder-7b", "huggingchat", "Qwen/Qwen2.5-Coder-7B-Instruct"),
|
| 75 |
-
("huggingchat-qwen2.5-coder-3b", "huggingchat", "Qwen/Qwen2.5-Coder-3B-Instruct"),
|
| 76 |
-
("huggingchat-qwq-32b", "huggingchat", "Qwen/QwQ-32B"),
|
| 77 |
-
("huggingchat-deepseek-r1-distill-qwen-32b", "huggingchat", "deepseek-ai/DeepSeek-R1-Distill-Qwen-32B"),
|
| 78 |
-
("huggingchat-deepseek-r1-distill-qwen-7b", "huggingchat", "deepseek-ai/DeepSeek-R1-Distill-Qwen-7B"),
|
| 79 |
-
("huggingchat-deepseek-r1-distill-llama-70b", "huggingchat", "deepseek-ai/DeepSeek-R1-Distill-Llama-70B"),
|
| 80 |
-
("huggingchat-deepseek-r1-distill-llama-8b", "huggingchat", "deepseek-ai/DeepSeek-R1-Distill-Llama-8B"),
|
| 81 |
-
("huggingchat-gemma-3-27b", "huggingchat", "google/gemma-3-27b-it"),
|
| 82 |
-
("huggingchat-mistral-7b", "huggingchat", "mistralai/Mistral-7B-Instruct-v0.2"),
|
| 83 |
-
("huggingchat-cohere-command-r", "huggingchat", "CohereLabs/c4ai-command-r-08-2024"),
|
| 84 |
-
("huggingchat-cohere-command-a", "huggingchat", "CohereLabs/c4ai-command-a-03-2025"),
|
| 85 |
-
("huggingchat-olmo-3-32b", "huggingchat", "allenai/Olmo-3.1-32B-Instruct"),
|
| 86 |
-
("huggingchat-olmo-3-7b", "huggingchat", "allenai/Olmo-3-7B-Instruct"),
|
| 87 |
-
("huggingchat-olmo-3-7b-think", "huggingchat", "allenai/Olmo-3-7B-Think"),
|
| 88 |
-
("huggingchat-saol10k-l3-70b", "huggingchat", "Sao10K/L3-70B-Euryale-v2.1"),
|
| 89 |
-
("huggingchat-saol10k-l3-8b", "huggingchat", "Sao10K/L3-8B-Stheno-v3.2"),
|
| 90 |
-
("huggingchat-wizardlm-2-8x22b", "huggingchat", "alpindale/WizardLM-2-8x22B"),
|
| 91 |
-
("huggingchat-cogito-671b", "huggingchat", "deepcogito/cogito-671b-v2.1"),
|
| 92 |
-
("huggingchat-gpt-oss-120b", "huggingchat", "openai/gpt-oss-120b"),
|
| 93 |
-
("huggingchat-gpt-oss-20b", "huggingchat", "openai/gpt-oss-20b"),
|
| 94 |
-
("huggingchat-minimax-m1-80k", "huggingchat", "MiniMaxAI/MiniMax-M1-80k"),
|
| 95 |
-
("huggingchat-zai-autoglm-phone-9b", "huggingchat", "zai-org/AutoGLM-Phone-9B-Multilingual"),
|
| 96 |
-
("huggingchat-zai-glm-4.7-fp8", "huggingchat", "zai-org/GLM-4.7-FP8"),
|
| 97 |
-
("huggingchat-zai-glm-4.6v", "huggingchat", "zai-org/GLM-4.6V"),
|
| 98 |
-
("huggingchat-zai-glm-4.6v-fp8", "huggingchat", "zai-org/GLM-4.6V-FP8"),
|
| 99 |
-
("huggingchat-zai-glm-4.6v-flash", "huggingchat", "zai-org/GLM-4.6V-Flash"),
|
| 100 |
-
("huggingchat-zai-glm-4.5-air", "huggingchat", "zai-org/GLM-4.5-Air"),
|
| 101 |
-
("huggingchat-zai-glm-4.5-air-fp8", "huggingchat", "zai-org/GLM-4.5-Air-FP8"),
|
| 102 |
-
("huggingchat-zai-glm-4.5v", "huggingchat", "zai-org/GLM-4.5V"),
|
| 103 |
-
("huggingchat-zai-glm-4.5v-fp8", "huggingchat", "zai-org/GLM-4.5V-FP8"),
|
| 104 |
-
("huggingchat-zai-glm-4.6", "huggingchat", "zai-org/GLM-4.6"),
|
| 105 |
-
("huggingchat-zai-glm-4.6-fp8", "huggingchat", "zai-org/GLM-4.6-FP8"),
|
| 106 |
-
("huggingchat-zai-glm-4-32b", "huggingchat", "zai-org/GLM-4-32B-0414"),
|
| 107 |
-
("huggingchat-nvidia-nemotron-nano-9b", "huggingchat", "nvidia/NVIDIA-Nemotron-Nano-9B-v2"),
|
| 108 |
-
("huggingchat-mimo-v2-flash", "huggingchat", "XiaomiMiMo/MiMo-V2-Flash"),
|
| 109 |
-
("huggingchat-eurollm-22b", "huggingchat", "utter-project/EuroLLM-22B-Instruct-2512"),
|
| 110 |
-
("huggingchat-trinity-mini", "huggingchat", "arcee-ai/Trinity-Mini"),
|
| 111 |
-
("huggingchat-apriel-15b-thinker", "huggingchat", "ServiceNow-AI/Apriel-1.6-15b-Thinker"),
|
| 112 |
-
("huggingchat-arch-router-1.5b", "huggingchat", "katanemo/Arch-Router-1.5B"),
|
| 113 |
-
("huggingchat-smollm3-3b", "huggingchat", "HuggingFaceTB/SmolLM3-3B"),
|
| 114 |
-
("huggingchat-hermes-2-pro-llama-3-8b", "huggingchat", "NousResearch/Hermes-2-Pro-Llama-3-8B"),
|
| 115 |
-
("huggingchat-aya-expanse-32b", "huggingchat", "CohereLabs/aya-expanse-32b"),
|
| 116 |
-
("huggingchat-aya-vision-32b", "huggingchat", "CohereLabs/aya-vision-32b"),
|
| 117 |
-
("huggingchat-gemma-sea-lion-v4-27b", "huggingchat", "aisingapore/Gemma-SEA-LION-v4-27B-IT"),
|
| 118 |
-
("huggingchat-qwen-sea-lion-v4-32b", "huggingchat", "aisingapore/Qwen-SEA-LION-v4-32B-IT"),
|
| 119 |
-
("huggingchat-dictalm-3.0-24b", "huggingchat", "dicta-il/DictaLM-3.0-24B-Thinking"),
|
| 120 |
-
("huggingchat-apertus-8b", "huggingchat", "swiss-ai/Apertus-8B-Instruct-2509"),
|
| 121 |
-
("huggingchat-swallow-70b", "huggingchat", "tokyotech-llm/Llama-3.3-Swallow-70B-Instruct-v0.4"),
|
| 122 |
-
("huggingchat-marin-8b", "huggingchat", "marin-community/marin-8b-instruct"),
|
| 123 |
-
("huggingchat-ernie-4.5-vl-424b", "huggingchat", "baidu/ERNIE-4.5-VL-424B-A47B-Base-PT"),
|
| 124 |
-
("huggingchat-ernie-4.5-vl-28b", "huggingchat", "baidu/ERNIE-4.5-VL-28B-A3B-PT"),
|
| 125 |
-
("huggingchat-ernie-4.5-300b", "huggingchat", "baidu/ERNIE-4.5-300B-A47B-Base-PT"),
|
| 126 |
-
("huggingchat-ernie-4.5-21b", "huggingchat", "baidu/ERNIE-4.5-21B-A3B-PT"),
|
| 127 |
-
("huggingchat-rnj-1-instruct", "huggingchat", "EssentialAI/rnj-1-instruct"),
|
| 128 |
-
("huggingchat-l3-8b-lunaris", "huggingchat", "Sao10K/L3-8B-Lunaris-v1"),
|
| 129 |
-
|
| 130 |
-
# Tier 5 β G4F Fallback Models
|
| 131 |
("g4f-gpt-3.5-turbo", "g4f", "gpt-3.5-turbo"),
|
| 132 |
("g4f-claude-3-haiku", "g4f", "claude-3-haiku"),
|
| 133 |
("g4f-mixtral-8x7b", "g4f", "mixtral-8x7b"),
|
|
@@ -162,6 +62,18 @@ POLLINATIONS_MODEL_NAMES = {
|
|
| 162 |
"midijourney": "midijourney",
|
| 163 |
}
|
| 164 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 165 |
# API Keys
|
| 166 |
DEMO_API_KEY = "sk-kai-demo-public"
|
| 167 |
|
|
@@ -181,160 +93,6 @@ PROVIDER_MODELS = {
|
|
| 181 |
"gemini": [
|
| 182 |
"gemini-gemini-3-flash",
|
| 183 |
],
|
| 184 |
-
"copilot": [
|
| 185 |
-
"copilot-gpt-4",
|
| 186 |
-
],
|
| 187 |
-
"huggingchat": [
|
| 188 |
-
# Top Tier
|
| 189 |
-
"huggingchat-omni",
|
| 190 |
-
"huggingchat-llama-3.3-70b",
|
| 191 |
-
"huggingchat-llama-4-scout",
|
| 192 |
-
"huggingchat-llama-4-maverick",
|
| 193 |
-
"huggingchat-llama-3.1-70b",
|
| 194 |
-
"huggingchat-llama-3.1-8b",
|
| 195 |
-
"huggingchat-llama-3.2-3b",
|
| 196 |
-
"huggingchat-llama-3.2-1b",
|
| 197 |
-
"huggingchat-llama-3-8b",
|
| 198 |
-
"huggingchat-llama-3-70b",
|
| 199 |
-
|
| 200 |
-
# Kimi Models
|
| 201 |
-
"huggingchat-kimi-k2.5",
|
| 202 |
-
"huggingchat-kimi-k2",
|
| 203 |
-
"huggingchat-kimi-k2-thinking",
|
| 204 |
-
"huggingchat-kimi-k2-instruct-0905",
|
| 205 |
-
|
| 206 |
-
# Qwen3 Models (Large)
|
| 207 |
-
"huggingchat-qwen3-235b",
|
| 208 |
-
"huggingchat-qwen3-32b",
|
| 209 |
-
"huggingchat-qwen3-14b",
|
| 210 |
-
"huggingchat-qwen3-8b",
|
| 211 |
-
"huggingchat-qwen3-4b",
|
| 212 |
-
|
| 213 |
-
# Qwen3 Vision Models
|
| 214 |
-
"huggingchat-qwen3-vl-235b",
|
| 215 |
-
"huggingchat-qwen3-vl-32b",
|
| 216 |
-
"huggingchat-qwen3-vl-30b",
|
| 217 |
-
"huggingchat-qwen3-vl-8b",
|
| 218 |
-
|
| 219 |
-
# Qwen2.5 Models
|
| 220 |
-
"huggingchat-qwen2.5-72b",
|
| 221 |
-
"huggingchat-qwen2.5-32b",
|
| 222 |
-
"huggingchat-qwen2.5-7b",
|
| 223 |
-
"huggingchat-qwen2.5-vl-72b",
|
| 224 |
-
"huggingchat-qwen2.5-vl-32b",
|
| 225 |
-
"huggingchat-qwen2.5-vl-7b",
|
| 226 |
-
"huggingchat-qwen2.5-coder-32b",
|
| 227 |
-
"huggingchat-qwen2.5-coder-7b",
|
| 228 |
-
"huggingchat-qwen2.5-coder-3b",
|
| 229 |
-
|
| 230 |
-
# Qwen Coder Models
|
| 231 |
-
"huggingchat-qwen3-coder-480b",
|
| 232 |
-
"huggingchat-qwen3-coder-30b",
|
| 233 |
-
"huggingchat-qwen3-coder-next",
|
| 234 |
-
"huggingchat-qwen3-coder-next-fp8",
|
| 235 |
-
|
| 236 |
-
# Qwen Thinking/Reasoning
|
| 237 |
-
"huggingchat-qwq-32b",
|
| 238 |
-
"huggingchat-qwen3-4b-thinking",
|
| 239 |
-
"huggingchat-qwen3-vl-235b-thinking",
|
| 240 |
-
"huggingchat-qwen3-vl-30b-thinking",
|
| 241 |
-
"huggingchat-qwen3-next-80b",
|
| 242 |
-
"huggingchat-qwen3-next-80b-thinking",
|
| 243 |
-
|
| 244 |
-
# DeepSeek Models
|
| 245 |
-
"huggingchat-deepseek-r1",
|
| 246 |
-
"huggingchat-deepseek-v3",
|
| 247 |
-
"huggingchat-deepseek-v3.2",
|
| 248 |
-
"huggingchat-deepseek-v3.2-exp",
|
| 249 |
-
"huggingchat-deepseek-r1-0528",
|
| 250 |
-
"huggingchat-deepseek-prover-v2-671b",
|
| 251 |
-
"huggingchat-deepseek-r1-distill-qwen-32b",
|
| 252 |
-
"huggingchat-deepseek-r1-distill-qwen-7b",
|
| 253 |
-
"huggingchat-deepseek-r1-distill-qwen-1.5b",
|
| 254 |
-
"huggingchat-deepseek-r1-distill-llama-70b",
|
| 255 |
-
"huggingchat-deepseek-r1-distill-llama-8b",
|
| 256 |
-
|
| 257 |
-
# Z.ai GLM Models
|
| 258 |
-
"huggingchat-zai-glm-5",
|
| 259 |
-
"huggingchat-zai-glm-4.7",
|
| 260 |
-
"huggingchat-zai-glm-4.7-fp8",
|
| 261 |
-
"huggingchat-zai-glm-4.7-flash",
|
| 262 |
-
"huggingchat-zai-glm-4.6v",
|
| 263 |
-
"huggingchat-zai-glm-4.6v-fp8",
|
| 264 |
-
"huggingchat-zai-glm-4.6v-flash",
|
| 265 |
-
"huggingchat-zai-glm-4.6",
|
| 266 |
-
"huggingchat-zai-glm-4.6-fp8",
|
| 267 |
-
"huggingchat-zai-glm-4.5",
|
| 268 |
-
"huggingchat-zai-glm-4.5-air",
|
| 269 |
-
"huggingchat-zai-glm-4.5-air-fp8",
|
| 270 |
-
"huggingchat-zai-glm-4.5v",
|
| 271 |
-
"huggingchat-zai-glm-4.5v-fp8",
|
| 272 |
-
"huggingchat-zai-glm-4-32b",
|
| 273 |
-
"huggingchat-zai-autoglm-phone-9b",
|
| 274 |
-
|
| 275 |
-
# MiniMax Models
|
| 276 |
-
"huggingchat-minimax-m2.5",
|
| 277 |
-
"huggingchat-minimax-m2.1",
|
| 278 |
-
"huggingchat-minimax-m2",
|
| 279 |
-
"huggingchat-minimax-m1-80k",
|
| 280 |
-
|
| 281 |
-
# Google Models
|
| 282 |
-
"huggingchat-gemma-3-27b",
|
| 283 |
-
"huggingchat-gemma-3n-e4b",
|
| 284 |
-
|
| 285 |
-
# Mistral
|
| 286 |
-
"huggingchat-mistral-7b",
|
| 287 |
-
|
| 288 |
-
# Cohere
|
| 289 |
-
"huggingchat-cohere-command-r",
|
| 290 |
-
"huggingchat-cohere-command-a",
|
| 291 |
-
"huggingchat-cohere-command-r7b",
|
| 292 |
-
"huggingchat-cohere-command-r7b-arabic",
|
| 293 |
-
"huggingchat-cohere-command-a-vision",
|
| 294 |
-
"huggingchat-cohere-command-a-reasoning",
|
| 295 |
-
"huggingchat-cohere-command-a-translate",
|
| 296 |
-
"huggingchat-cohere-aya-expanse-32b",
|
| 297 |
-
"huggingchat-cohere-aya-vision-32b",
|
| 298 |
-
|
| 299 |
-
# Allen AI (OLMo)
|
| 300 |
-
"huggingchat-olmo-3-32b",
|
| 301 |
-
"huggingchat-olmo-3-7b",
|
| 302 |
-
"huggingchat-olmo-3-7b-think",
|
| 303 |
-
|
| 304 |
-
# Sao10K
|
| 305 |
-
"huggingchat-saol10k-l3-70b",
|
| 306 |
-
"huggingchat-saol10k-l3-8b",
|
| 307 |
-
"huggingchat-l3-8b-lunaris",
|
| 308 |
-
|
| 309 |
-
# Other Notable Models
|
| 310 |
-
"huggingchat-wizardlm-2-8x22b",
|
| 311 |
-
"huggingchat-cogito-671b",
|
| 312 |
-
"huggingchat-cogito-671b-fp8",
|
| 313 |
-
"huggingchat-gpt-oss-120b",
|
| 314 |
-
"huggingchat-gpt-oss-20b",
|
| 315 |
-
"huggingchat-gpt-oss-safeguard-20b",
|
| 316 |
-
"huggingchat-nvidia-nemotron-nano-9b",
|
| 317 |
-
"huggingchat-mimo-v2-flash",
|
| 318 |
-
"huggingchat-eurollm-22b",
|
| 319 |
-
"huggingchat-trinity-mini",
|
| 320 |
-
"huggingchat-apriel-15b-thinker",
|
| 321 |
-
"huggingchat-arch-router-1.5b",
|
| 322 |
-
"huggingchat-smollm3-3b",
|
| 323 |
-
"huggingchat-hermes-2-pro-llama-3-8b",
|
| 324 |
-
"huggingchat-dictalm-3.0-24b",
|
| 325 |
-
"huggingchat-apertus-8b",
|
| 326 |
-
"huggingchat-swallow-70b",
|
| 327 |
-
"huggingchat-marin-8b",
|
| 328 |
-
"huggingchat-ernie-4.5-vl-424b",
|
| 329 |
-
"huggingchat-ernie-4.5-vl-28b",
|
| 330 |
-
"huggingchat-ernie-4.5-300b",
|
| 331 |
-
"huggingchat-ernie-4.5-21b",
|
| 332 |
-
"huggingchat-rnj-1-instruct",
|
| 333 |
-
|
| 334 |
-
# Sea Lion (Singapore)
|
| 335 |
-
"huggingchat-gemma-sea-lion-v4-27b",
|
| 336 |
-
"huggingchat-qwen-sea-lion-v4-32b",
|
| 337 |
-
],
|
| 338 |
"pollinations": [
|
| 339 |
"pollinations-gpt-oss-20b",
|
| 340 |
"pollinations-mistral-small-3.2",
|
|
|
|
| 20 |
("zai-glm-5", "zai", "glm-5"),
|
| 21 |
("gemini-gemini-3-flash", "gemini", "gemini-3-flash"),
|
| 22 |
|
| 23 |
+
# Tier 2 β Pollinations
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 24 |
("pollinations-gpt-oss-20b", "pollinations", "openai"),
|
| 25 |
("pollinations-mistral-small-3.2", "pollinations", "mistral"),
|
| 26 |
("pollinations-bidara", "pollinations", "bidara"),
|
| 27 |
("pollinations-chickytutor", "pollinations", "chickytutor"),
|
| 28 |
("pollinations-midijourney", "pollinations", "midijourney"),
|
| 29 |
|
| 30 |
+
# Tier 3 β G4F Fallback Models
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 31 |
("g4f-gpt-3.5-turbo", "g4f", "gpt-3.5-turbo"),
|
| 32 |
("g4f-claude-3-haiku", "g4f", "claude-3-haiku"),
|
| 33 |
("g4f-mixtral-8x7b", "g4f", "mixtral-8x7b"),
|
|
|
|
| 62 |
"midijourney": "midijourney",
|
| 63 |
}
|
| 64 |
|
| 65 |
+
# Provider Configuration - Enable/Disable providers
|
| 66 |
+
# These can be toggled via admin panel
|
| 67 |
+
PROVIDERS = {
|
| 68 |
+
"g4f": {"enabled": True, "name": "G4F (Free GPT-4)", "type": "api"},
|
| 69 |
+
"zai": {"enabled": True, "name": "Z.ai (GLM-5)", "type": "api"},
|
| 70 |
+
"gemini": {"enabled": True, "name": "Google Gemini", "type": "api"},
|
| 71 |
+
"pollinations": {"enabled": True, "name": "Pollinations", "type": "api"},
|
| 72 |
+
"huggingchat": {"enabled": True, "name": "HuggingChat", "type": "browser"},
|
| 73 |
+
"copilot": {"enabled": False, "name": "Microsoft Copilot", "type": "browser"},
|
| 74 |
+
"chatgpt": {"enabled": False, "name": "ChatGPT", "type": "browser"},
|
| 75 |
+
}
|
| 76 |
+
|
| 77 |
# API Keys
|
| 78 |
DEMO_API_KEY = "sk-kai-demo-public"
|
| 79 |
|
|
|
|
| 93 |
"gemini": [
|
| 94 |
"gemini-gemini-3-flash",
|
| 95 |
],
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 96 |
"pollinations": [
|
| 97 |
"pollinations-gpt-oss-20b",
|
| 98 |
"pollinations-mistral-small-3.2",
|
engine.py
CHANGED
|
@@ -143,13 +143,35 @@ class AIEngine:
|
|
| 143 |
return self._providers
|
| 144 |
|
| 145 |
def get_all_models(self) -> list[ModelInfo]:
|
|
|
|
|
|
|
|
|
|
| 146 |
models = []
|
| 147 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 148 |
for model_name in provider.get_available_models():
|
| 149 |
models.append(
|
| 150 |
ModelInfo(model=model_name, provider=provider.name)
|
| 151 |
)
|
| 152 |
return models
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 153 |
|
| 154 |
def _get_score(self, key: str) -> float:
|
| 155 |
"""
|
|
@@ -343,18 +365,40 @@ class AIEngine:
|
|
| 343 |
) -> dict:
|
| 344 |
"""
|
| 345 |
Send a chat message with adaptive fallback.
|
|
|
|
| 346 |
"""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 347 |
|
| 348 |
# Strict Validation
|
| 349 |
if model == "auto":
|
| 350 |
model = None
|
| 351 |
|
| 352 |
-
if provider != "auto"
|
| 353 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 354 |
|
| 355 |
if model:
|
| 356 |
-
# Check if model is a known friendly name OR a valid provider model ID
|
| 357 |
-
is_valid = (model in
|
| 358 |
if not is_valid:
|
| 359 |
# Also check strict provider/model combos if provider is set
|
| 360 |
if provider != "auto":
|
|
@@ -402,10 +446,14 @@ class AIEngine:
|
|
| 402 |
|
| 403 |
# STRICT MODE: Specific Provider + Any Model
|
| 404 |
# Walk this provider's models (sorted by score for this provider)
|
|
|
|
|
|
|
|
|
|
|
|
|
| 405 |
provider_entries = [
|
| 406 |
(fn, pn, pid)
|
| 407 |
for fn, pn, pid in MODEL_RANKING
|
| 408 |
-
if pn == provider
|
| 409 |
]
|
| 410 |
provider_entries.sort(
|
| 411 |
key=lambda x: self._get_score(f"{x[1]}/{x[2]}"),
|
|
@@ -437,15 +485,15 @@ class AIEngine:
|
|
| 437 |
# 1. Identify candidates (provider, model_id)
|
| 438 |
candidates = []
|
| 439 |
|
| 440 |
-
# Is it a friendly name?
|
| 441 |
for fn, pn, pid in MODEL_RANKING:
|
| 442 |
-
if fn == model:
|
| 443 |
candidates.append((pn, pid))
|
| 444 |
|
| 445 |
-
# If no friendly match, maybe it's a direct ID?
|
| 446 |
if not candidates:
|
| 447 |
for prov_name, prov in self._providers.items():
|
| 448 |
-
if model in prov.get_available_models():
|
| 449 |
candidates.append((prov_name, model))
|
| 450 |
|
| 451 |
if not candidates:
|
|
@@ -474,8 +522,17 @@ class AIEngine:
|
|
| 474 |
raise ValueError(f"Strict Mode: Model '{model}' failed on available providers: {errors}")
|
| 475 |
|
| 476 |
# Case 3: Global Adaptive Fallback
|
| 477 |
-
# Use the PERSISTENTLY SORTED ranking
|
| 478 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 479 |
|
| 480 |
# === "FALLEN GIANT" EXPLORATION (10% Chance) ===
|
| 481 |
# Goal: Give "Better" models a "Fair Chance" if they are currently failing.
|
|
@@ -485,7 +542,7 @@ class AIEngine:
|
|
| 485 |
# 1. Identify "Tier 1" models (The Giants)
|
| 486 |
# These are the first 5 models in the static configuration.
|
| 487 |
# We assume the config is ordered by "Intrinsic Quality".
|
| 488 |
-
tier1_models = MODEL_RANKING[:5]
|
| 489 |
tier1_keys = {f"{m[1]}/{m[2]}" for m in tier1_models}
|
| 490 |
|
| 491 |
# 2. Find a Giant that has fallen (is not in the top 3 of current ranking)
|
|
|
|
| 143 |
return self._providers
|
| 144 |
|
| 145 |
def get_all_models(self) -> list[ModelInfo]:
|
| 146 |
+
"""Get all models from enabled providers only."""
|
| 147 |
+
from provider_state import get_provider_state_manager_sync
|
| 148 |
+
|
| 149 |
models = []
|
| 150 |
+
state_manager = get_provider_state_manager_sync()
|
| 151 |
+
enabled_providers = state_manager.get_enabled_provider_ids()
|
| 152 |
+
|
| 153 |
+
for provider_id, provider in self._providers.items():
|
| 154 |
+
# Only include models from enabled providers
|
| 155 |
+
if provider_id not in enabled_providers:
|
| 156 |
+
continue
|
| 157 |
+
|
| 158 |
for model_name in provider.get_available_models():
|
| 159 |
models.append(
|
| 160 |
ModelInfo(model=model_name, provider=provider.name)
|
| 161 |
)
|
| 162 |
return models
|
| 163 |
+
|
| 164 |
+
def get_enabled_providers(self) -> dict[str, BaseProvider]:
|
| 165 |
+
"""Get only enabled providers."""
|
| 166 |
+
from provider_state import get_provider_state_manager_sync
|
| 167 |
+
|
| 168 |
+
state_manager = get_provider_state_manager_sync()
|
| 169 |
+
enabled_ids = state_manager.get_enabled_provider_ids()
|
| 170 |
+
|
| 171 |
+
return {
|
| 172 |
+
k: v for k, v in self._providers.items()
|
| 173 |
+
if k in enabled_ids
|
| 174 |
+
}
|
| 175 |
|
| 176 |
def _get_score(self, key: str) -> float:
|
| 177 |
"""
|
|
|
|
| 365 |
) -> dict:
|
| 366 |
"""
|
| 367 |
Send a chat message with adaptive fallback.
|
| 368 |
+
Only uses enabled providers.
|
| 369 |
"""
|
| 370 |
+
from provider_state import get_provider_state_manager_sync
|
| 371 |
+
|
| 372 |
+
# Get enabled providers
|
| 373 |
+
state_manager = get_provider_state_manager_sync()
|
| 374 |
+
enabled_providers = state_manager.get_enabled_provider_ids()
|
| 375 |
+
|
| 376 |
+
# Build valid sets from enabled providers only
|
| 377 |
+
valid_enabled_providers = set(self._providers.keys()) & set(enabled_providers)
|
| 378 |
+
|
| 379 |
+
# Build valid models from enabled providers only
|
| 380 |
+
valid_enabled_friendly_models = set()
|
| 381 |
+
valid_enabled_provider_models = set()
|
| 382 |
+
for fn, pn, pid in MODEL_RANKING:
|
| 383 |
+
if pn in enabled_providers:
|
| 384 |
+
valid_enabled_friendly_models.add(fn)
|
| 385 |
+
valid_enabled_provider_models.add(pid)
|
| 386 |
+
valid_enabled_provider_models.add(f"{pn}/{pid}")
|
| 387 |
|
| 388 |
# Strict Validation
|
| 389 |
if model == "auto":
|
| 390 |
model = None
|
| 391 |
|
| 392 |
+
if provider != "auto":
|
| 393 |
+
if provider not in valid_enabled_providers:
|
| 394 |
+
if provider in self._providers:
|
| 395 |
+
raise ValueError(f"Provider '{provider}' is currently disabled.")
|
| 396 |
+
else:
|
| 397 |
+
raise ValueError(f"Unknown provider '{provider}'. Available: {list(valid_enabled_providers)}")
|
| 398 |
|
| 399 |
if model:
|
| 400 |
+
# Check if model is a known friendly name OR a valid provider model ID (from enabled providers only)
|
| 401 |
+
is_valid = (model in valid_enabled_friendly_models) or (model in valid_enabled_provider_models)
|
| 402 |
if not is_valid:
|
| 403 |
# Also check strict provider/model combos if provider is set
|
| 404 |
if provider != "auto":
|
|
|
|
| 446 |
|
| 447 |
# STRICT MODE: Specific Provider + Any Model
|
| 448 |
# Walk this provider's models (sorted by score for this provider)
|
| 449 |
+
# Only include if provider is enabled
|
| 450 |
+
if provider not in enabled_providers:
|
| 451 |
+
raise ValueError(f"Provider '{provider}' is currently disabled.")
|
| 452 |
+
|
| 453 |
provider_entries = [
|
| 454 |
(fn, pn, pid)
|
| 455 |
for fn, pn, pid in MODEL_RANKING
|
| 456 |
+
if pn == provider and pn in enabled_providers
|
| 457 |
]
|
| 458 |
provider_entries.sort(
|
| 459 |
key=lambda x: self._get_score(f"{x[1]}/{x[2]}"),
|
|
|
|
| 485 |
# 1. Identify candidates (provider, model_id)
|
| 486 |
candidates = []
|
| 487 |
|
| 488 |
+
# Is it a friendly name? (Only from enabled providers)
|
| 489 |
for fn, pn, pid in MODEL_RANKING:
|
| 490 |
+
if fn == model and pn in enabled_providers:
|
| 491 |
candidates.append((pn, pid))
|
| 492 |
|
| 493 |
+
# If no friendly match, maybe it's a direct ID? (Only from enabled providers)
|
| 494 |
if not candidates:
|
| 495 |
for prov_name, prov in self._providers.items():
|
| 496 |
+
if prov_name in enabled_providers and model in prov.get_available_models():
|
| 497 |
candidates.append((prov_name, model))
|
| 498 |
|
| 499 |
if not candidates:
|
|
|
|
| 522 |
raise ValueError(f"Strict Mode: Model '{model}' failed on available providers: {errors}")
|
| 523 |
|
| 524 |
# Case 3: Global Adaptive Fallback
|
| 525 |
+
# Use the PERSISTENTLY SORTED ranking (filtered to enabled providers only)
|
| 526 |
+
full_adaptive_ranking = self._get_sorted_ranking()
|
| 527 |
+
|
| 528 |
+
# Filter to only enabled providers
|
| 529 |
+
adaptive_ranking = [
|
| 530 |
+
(fn, pn, pid) for fn, pn, pid in full_adaptive_ranking
|
| 531 |
+
if pn in enabled_providers
|
| 532 |
+
]
|
| 533 |
+
|
| 534 |
+
if not adaptive_ranking:
|
| 535 |
+
raise ValueError("No providers are currently enabled. Please enable at least one provider.")
|
| 536 |
|
| 537 |
# === "FALLEN GIANT" EXPLORATION (10% Chance) ===
|
| 538 |
# Goal: Give "Better" models a "Fair Chance" if they are currently failing.
|
|
|
|
| 542 |
# 1. Identify "Tier 1" models (The Giants)
|
| 543 |
# These are the first 5 models in the static configuration.
|
| 544 |
# We assume the config is ordered by "Intrinsic Quality".
|
| 545 |
+
tier1_models = [m for m in MODEL_RANKING[:5] if m[1] in enabled_providers]
|
| 546 |
tier1_keys = {f"{m[1]}/{m[2]}" for m in tier1_models}
|
| 547 |
|
| 548 |
# 2. Find a Giant that has fallen (is not in the top 3 of current ranking)
|
proxy_manager.py
CHANGED
|
@@ -1,15 +1,13 @@
|
|
| 1 |
"""
|
| 2 |
-
|
| 3 |
-
---------------------------------
|
| 4 |
-
|
| 5 |
-
Automatically tests proxies and keeps ONLY working ones.
|
| 6 |
"""
|
| 7 |
|
| 8 |
import asyncio
|
| 9 |
import aiohttp
|
| 10 |
-
import random
|
| 11 |
import logging
|
| 12 |
-
from typing import Optional,
|
| 13 |
from dataclasses import dataclass
|
| 14 |
from datetime import datetime
|
| 15 |
|
|
@@ -20,7 +18,7 @@ class Proxy:
|
|
| 20 |
"""Represents a proxy server."""
|
| 21 |
ip: str
|
| 22 |
port: int
|
| 23 |
-
country: str
|
| 24 |
protocol: str = "http"
|
| 25 |
last_tested: Optional[datetime] = None
|
| 26 |
is_working: bool = False
|
|
@@ -34,233 +32,125 @@ class Proxy:
|
|
| 34 |
return {
|
| 35 |
"server": f"{self.protocol}://{self.ip}:{self.port}",
|
| 36 |
}
|
| 37 |
-
|
| 38 |
-
def __hash__(self):
|
| 39 |
-
return hash((self.ip, self.port))
|
| 40 |
-
|
| 41 |
-
def __eq__(self, other):
|
| 42 |
-
if isinstance(other, Proxy):
|
| 43 |
-
return self.ip == other.ip and self.port == other.port
|
| 44 |
-
return False
|
| 45 |
|
| 46 |
|
| 47 |
-
class
|
| 48 |
-
"""Manages
|
| 49 |
|
| 50 |
def __init__(self):
|
| 51 |
-
self.
|
| 52 |
-
self.
|
| 53 |
-
self.current_proxy: Optional[Proxy] = None
|
| 54 |
|
| 55 |
-
|
| 56 |
"""
|
| 57 |
-
|
| 58 |
-
|
| 59 |
"""
|
| 60 |
-
logger.info("π Fetching fresh proxies...")
|
| 61 |
-
|
| 62 |
-
# Fetch from multiple sources
|
| 63 |
-
all_proxies = await self._fetch_from_sources()
|
| 64 |
-
|
| 65 |
-
if not all_proxies:
|
| 66 |
-
logger.error("β No proxies fetched from any source")
|
| 67 |
-
return []
|
| 68 |
-
|
| 69 |
-
logger.info(f"π Fetched {len(all_proxies)} total proxies, testing up to {max_test}...")
|
| 70 |
-
|
| 71 |
-
# Shuffle for variety
|
| 72 |
-
random.shuffle(all_proxies)
|
| 73 |
-
|
| 74 |
-
# Test proxies and collect working ones
|
| 75 |
-
working = []
|
| 76 |
-
tested = 0
|
| 77 |
-
|
| 78 |
-
for proxy in all_proxies:
|
| 79 |
-
if tested >= max_test:
|
| 80 |
-
break
|
| 81 |
-
|
| 82 |
-
tested += 1
|
| 83 |
-
logger.info(f"π§ͺ Testing proxy {tested}/{max_test}: {proxy.ip}:{proxy.port}")
|
| 84 |
-
|
| 85 |
-
if await self._test_proxy_quick(proxy):
|
| 86 |
-
working.append(proxy)
|
| 87 |
-
logger.info(f"β
WORKING! ({proxy.response_time:.2f}s) - Total working: {len(working)}")
|
| 88 |
-
|
| 89 |
-
# Stop once we have enough working proxies
|
| 90 |
-
if len(working) >= 5:
|
| 91 |
-
logger.info("β¨ Found 5 working proxies, stopping tests")
|
| 92 |
-
break
|
| 93 |
-
else:
|
| 94 |
-
logger.debug(f"β Dead proxy: {proxy.ip}:{proxy.port}")
|
| 95 |
-
|
| 96 |
-
# REPLACE the list with ONLY working proxies
|
| 97 |
-
self.working_proxies = working
|
| 98 |
-
self.current_proxy_index = 0
|
| 99 |
-
|
| 100 |
-
if working:
|
| 101 |
-
self.current_proxy = working[0]
|
| 102 |
-
logger.info(f"π― Kept {len(working)} WORKING proxies out of {tested} tested")
|
| 103 |
-
else:
|
| 104 |
-
self.current_proxy = None
|
| 105 |
-
logger.warning("β οΈ No working proxies found!")
|
| 106 |
-
|
| 107 |
-
return working
|
| 108 |
-
|
| 109 |
-
async def _fetch_from_sources(self) -> List[Proxy]:
|
| 110 |
-
"""Fetch proxies from multiple free sources."""
|
| 111 |
-
proxies = []
|
| 112 |
-
|
| 113 |
-
# Source 1: Free proxy list (HTTP)
|
| 114 |
-
try:
|
| 115 |
-
async with aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(total=15)) as session:
|
| 116 |
-
async with session.get(
|
| 117 |
-
"https://raw.githubusercontent.com/TheSpeedX/PROXY-List/master/http.txt"
|
| 118 |
-
) as response:
|
| 119 |
-
if response.status == 200:
|
| 120 |
-
text = await response.text()
|
| 121 |
-
lines = text.strip().split('\n')
|
| 122 |
-
for line in lines[:100]: # Limit to first 100
|
| 123 |
-
if ':' in line:
|
| 124 |
-
parts = line.strip().split(':')
|
| 125 |
-
if len(parts) >= 2:
|
| 126 |
-
try:
|
| 127 |
-
proxy = Proxy(
|
| 128 |
-
ip=parts[0],
|
| 129 |
-
port=int(parts[1]),
|
| 130 |
-
country='Unknown',
|
| 131 |
-
protocol='http'
|
| 132 |
-
)
|
| 133 |
-
proxies.append(proxy)
|
| 134 |
-
except:
|
| 135 |
-
pass
|
| 136 |
-
logger.info(f"β
Source 1: Got {len(proxies)} proxies")
|
| 137 |
-
except Exception as e:
|
| 138 |
-
logger.warning(f"β Source 1 failed: {e}")
|
| 139 |
-
|
| 140 |
-
# Source 2: Alternative proxy list
|
| 141 |
try:
|
| 142 |
-
|
| 143 |
-
|
| 144 |
-
|
| 145 |
-
|
| 146 |
-
|
| 147 |
-
|
| 148 |
-
|
| 149 |
-
|
| 150 |
-
|
| 151 |
-
|
| 152 |
-
|
| 153 |
-
|
| 154 |
-
|
| 155 |
-
|
| 156 |
-
|
| 157 |
-
|
| 158 |
-
|
| 159 |
-
|
| 160 |
-
|
| 161 |
-
|
| 162 |
-
|
| 163 |
-
|
| 164 |
-
|
| 165 |
-
|
| 166 |
-
|
| 167 |
-
|
| 168 |
-
|
| 169 |
-
|
| 170 |
-
# Source 3:geonode free proxies API
|
| 171 |
-
try:
|
| 172 |
-
url = "https://proxylist.geonode.com/api/proxy-list?limit=100&page=1&sort_by=lastChecked&sort_type=desc&protocols=http"
|
| 173 |
-
async with aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(total=15)) as session:
|
| 174 |
-
async with session.get(url) as response:
|
| 175 |
-
if response.status == 200:
|
| 176 |
-
data = await response.json()
|
| 177 |
-
new_count = 0
|
| 178 |
-
for item in data.get('data', []):
|
| 179 |
-
try:
|
| 180 |
-
proxy = Proxy(
|
| 181 |
-
ip=item['ip'],
|
| 182 |
-
port=int(item['port']),
|
| 183 |
-
country=item.get('country', 'Unknown'),
|
| 184 |
-
protocol='http'
|
| 185 |
-
)
|
| 186 |
-
if proxy not in proxies:
|
| 187 |
-
proxies.append(proxy)
|
| 188 |
-
new_count += 1
|
| 189 |
-
except:
|
| 190 |
-
pass
|
| 191 |
-
logger.info(f"β
Source 3: Got {new_count} new proxies")
|
| 192 |
except Exception as e:
|
| 193 |
-
logger.
|
| 194 |
-
|
| 195 |
-
|
| 196 |
-
|
| 197 |
-
|
| 198 |
-
|
| 199 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 200 |
|
| 201 |
-
async def
|
| 202 |
-
"""
|
|
|
|
|
|
|
|
|
|
|
|
|
| 203 |
try:
|
| 204 |
-
timeout = aiohttp.ClientTimeout(total=
|
| 205 |
|
| 206 |
async with aiohttp.ClientSession(timeout=timeout) as session:
|
| 207 |
start = asyncio.get_event_loop().time()
|
| 208 |
|
| 209 |
-
# Test with a simple, fast endpoint
|
| 210 |
async with session.get(
|
| 211 |
"http://httpbin.org/ip",
|
| 212 |
-
proxy=f"
|
| 213 |
ssl=False
|
| 214 |
) as response:
|
| 215 |
elapsed = asyncio.get_event_loop().time() - start
|
| 216 |
|
| 217 |
if response.status == 200:
|
| 218 |
-
|
| 219 |
-
|
| 220 |
-
|
|
|
|
| 221 |
return True
|
| 222 |
return False
|
| 223 |
|
| 224 |
except Exception as e:
|
| 225 |
-
|
|
|
|
| 226 |
return False
|
| 227 |
|
| 228 |
-
def
|
| 229 |
-
"""Get
|
| 230 |
-
if not self.
|
| 231 |
-
|
| 232 |
-
|
| 233 |
-
|
| 234 |
-
|
| 235 |
-
|
| 236 |
-
self.current_proxy = self.working_proxies[self.current_proxy_index]
|
| 237 |
|
| 238 |
-
logger.info(f"π Rotated to proxy {self.current_proxy_index + 1}/{len(self.working_proxies)}: {self.current_proxy}")
|
| 239 |
-
return self.current_proxy
|
| 240 |
-
|
| 241 |
-
def get_current_proxy(self) -> Optional[Proxy]:
|
| 242 |
-
"""Get currently selected proxy."""
|
| 243 |
-
return self.current_proxy
|
| 244 |
-
|
| 245 |
-
def get_working_proxy_list(self) -> List[Proxy]:
|
| 246 |
-
"""Get list of all working proxies."""
|
| 247 |
-
return self.working_proxies.copy()
|
| 248 |
-
|
| 249 |
-
def get_stats(self) -> Dict:
|
| 250 |
-
"""Get proxy statistics."""
|
| 251 |
return {
|
| 252 |
-
"
|
| 253 |
-
"
|
| 254 |
-
"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 255 |
}
|
| 256 |
|
| 257 |
|
| 258 |
# Global proxy manager instance
|
| 259 |
-
_proxy_manager: Optional[
|
| 260 |
|
| 261 |
-
def get_proxy_manager() ->
|
| 262 |
"""Get the global proxy manager instance."""
|
| 263 |
global _proxy_manager
|
| 264 |
if _proxy_manager is None:
|
| 265 |
-
_proxy_manager =
|
| 266 |
return _proxy_manager
|
|
|
|
| 1 |
"""
|
| 2 |
+
Proxy Manager for Browser Portals
|
| 3 |
+
---------------------------------
|
| 4 |
+
Supports custom IP proxy configuration for the entire container.
|
|
|
|
| 5 |
"""
|
| 6 |
|
| 7 |
import asyncio
|
| 8 |
import aiohttp
|
|
|
|
| 9 |
import logging
|
| 10 |
+
from typing import Optional, Dict
|
| 11 |
from dataclasses import dataclass
|
| 12 |
from datetime import datetime
|
| 13 |
|
|
|
|
| 18 |
"""Represents a proxy server."""
|
| 19 |
ip: str
|
| 20 |
port: int
|
| 21 |
+
country: str = "Custom"
|
| 22 |
protocol: str = "http"
|
| 23 |
last_tested: Optional[datetime] = None
|
| 24 |
is_working: bool = False
|
|
|
|
| 32 |
return {
|
| 33 |
"server": f"{self.protocol}://{self.ip}:{self.port}",
|
| 34 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 35 |
|
| 36 |
|
| 37 |
+
class ProxyManager:
|
| 38 |
+
"""Manages proxy configuration - primarily for custom IP."""
|
| 39 |
|
| 40 |
def __init__(self):
|
| 41 |
+
self.custom_proxy: Optional[Proxy] = None
|
| 42 |
+
self._proxy_str: Optional[str] = None
|
|
|
|
| 43 |
|
| 44 |
+
def set_custom_proxy(self, proxy_str: str) -> bool:
|
| 45 |
"""
|
| 46 |
+
Set a custom proxy from string format: ip:port or protocol://ip:port
|
| 47 |
+
Examples: 192.168.1.1:8080, http://proxy.example.com:3128
|
| 48 |
"""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 49 |
try:
|
| 50 |
+
proxy_str = proxy_str.strip()
|
| 51 |
+
|
| 52 |
+
# Parse protocol if provided
|
| 53 |
+
protocol = "http"
|
| 54 |
+
if "://" in proxy_str:
|
| 55 |
+
protocol, proxy_str = proxy_str.split("://", 1)
|
| 56 |
+
|
| 57 |
+
# Parse IP and port
|
| 58 |
+
if ":" not in proxy_str:
|
| 59 |
+
raise ValueError("Proxy must include port (e.g., ip:port)")
|
| 60 |
+
|
| 61 |
+
parts = proxy_str.rsplit(":", 1)
|
| 62 |
+
ip = parts[0]
|
| 63 |
+
port = int(parts[1])
|
| 64 |
+
|
| 65 |
+
self.custom_proxy = Proxy(
|
| 66 |
+
ip=ip,
|
| 67 |
+
port=port,
|
| 68 |
+
protocol=protocol,
|
| 69 |
+
is_working=True, # Assume working until tested
|
| 70 |
+
last_tested=datetime.now()
|
| 71 |
+
)
|
| 72 |
+
self._proxy_str = f"{protocol}://{ip}:{port}"
|
| 73 |
+
|
| 74 |
+
logger.info(f"β
Custom proxy set: {self._proxy_str}")
|
| 75 |
+
return True
|
| 76 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 77 |
except Exception as e:
|
| 78 |
+
logger.error(f"β Failed to set custom proxy: {e}")
|
| 79 |
+
return False
|
| 80 |
+
|
| 81 |
+
def clear_proxy(self):
|
| 82 |
+
"""Clear the custom proxy."""
|
| 83 |
+
self.custom_proxy = None
|
| 84 |
+
self._proxy_str = None
|
| 85 |
+
logger.info("ποΈ Custom proxy cleared")
|
| 86 |
+
|
| 87 |
+
def get_current_proxy(self) -> Optional[Proxy]:
|
| 88 |
+
"""Get the current custom proxy."""
|
| 89 |
+
return self.custom_proxy
|
| 90 |
+
|
| 91 |
+
def get_proxy_string(self) -> Optional[str]:
|
| 92 |
+
"""Get the proxy string for environment variables."""
|
| 93 |
+
return self._proxy_str
|
| 94 |
|
| 95 |
+
async def test_proxy(self, proxy: Optional[Proxy] = None) -> bool:
|
| 96 |
+
"""Test if a proxy is working."""
|
| 97 |
+
test_proxy = proxy or self.custom_proxy
|
| 98 |
+
if not test_proxy:
|
| 99 |
+
return False
|
| 100 |
+
|
| 101 |
try:
|
| 102 |
+
timeout = aiohttp.ClientTimeout(total=10)
|
| 103 |
|
| 104 |
async with aiohttp.ClientSession(timeout=timeout) as session:
|
| 105 |
start = asyncio.get_event_loop().time()
|
| 106 |
|
|
|
|
| 107 |
async with session.get(
|
| 108 |
"http://httpbin.org/ip",
|
| 109 |
+
proxy=f"{test_proxy.protocol}://{test_proxy.ip}:{test_proxy.port}",
|
| 110 |
ssl=False
|
| 111 |
) as response:
|
| 112 |
elapsed = asyncio.get_event_loop().time() - start
|
| 113 |
|
| 114 |
if response.status == 200:
|
| 115 |
+
test_proxy.is_working = True
|
| 116 |
+
test_proxy.response_time = elapsed
|
| 117 |
+
test_proxy.last_tested = datetime.now()
|
| 118 |
+
logger.info(f"β
Proxy test passed: {elapsed:.2f}s")
|
| 119 |
return True
|
| 120 |
return False
|
| 121 |
|
| 122 |
except Exception as e:
|
| 123 |
+
logger.warning(f"β Proxy test failed: {e}")
|
| 124 |
+
test_proxy.is_working = False
|
| 125 |
return False
|
| 126 |
|
| 127 |
+
def get_status(self) -> Dict:
|
| 128 |
+
"""Get proxy status."""
|
| 129 |
+
if not self.custom_proxy:
|
| 130 |
+
return {
|
| 131 |
+
"enabled": False,
|
| 132 |
+
"proxy": None,
|
| 133 |
+
"message": "No custom proxy configured"
|
| 134 |
+
}
|
|
|
|
| 135 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 136 |
return {
|
| 137 |
+
"enabled": True,
|
| 138 |
+
"proxy": str(self.custom_proxy),
|
| 139 |
+
"protocol": self.custom_proxy.protocol,
|
| 140 |
+
"ip": self.custom_proxy.ip,
|
| 141 |
+
"port": self.custom_proxy.port,
|
| 142 |
+
"is_working": self.custom_proxy.is_working,
|
| 143 |
+
"response_time": f"{self.custom_proxy.response_time:.2f}s",
|
| 144 |
+
"last_tested": self.custom_proxy.last_tested.isoformat() if self.custom_proxy.last_tested else None
|
| 145 |
}
|
| 146 |
|
| 147 |
|
| 148 |
# Global proxy manager instance
|
| 149 |
+
_proxy_manager: Optional[ProxyManager] = None
|
| 150 |
|
| 151 |
+
def get_proxy_manager() -> ProxyManager:
|
| 152 |
"""Get the global proxy manager instance."""
|
| 153 |
global _proxy_manager
|
| 154 |
if _proxy_manager is None:
|
| 155 |
+
_proxy_manager = ProxyManager()
|
| 156 |
return _proxy_manager
|
static/qaz.html
CHANGED
|
@@ -1151,34 +1151,33 @@
|
|
| 1151 |
<div id="portal-response" style="display: none; margin-top: 15px; padding: 15px; background: var(--surface); border-radius: 6px; border: 1px solid var(--border); white-space: pre-wrap; font-family: monospace; font-size: 13px; max-height: 200px; overflow-y: auto;"></div>
|
| 1152 |
</div>
|
| 1153 |
|
| 1154 |
-
<!-- π Proxy
|
| 1155 |
<div style="background: linear-gradient(135deg, rgba(139, 92, 246, 0.1), rgba(59, 130, 246, 0.1)); border: 2px solid var(--accent); border-radius: 8px; padding: 15px; margin-top: 20px;">
|
| 1156 |
<h4 style="color: var(--accent); margin-bottom: 10px; display: flex; align-items: center;">
|
| 1157 |
-
π Proxy
|
| 1158 |
<span id="proxy-status-badge" style="font-size: 11px; background: var(--surface); padding: 2px 8px; border-radius: 4px; margin-left: 10px; color: var(--text-muted);">No Proxy</span>
|
| 1159 |
</h4>
|
| 1160 |
<p style="color: var(--text-muted); font-size: 12px; margin-bottom: 12px;">
|
| 1161 |
-
|
| 1162 |
</p>
|
| 1163 |
|
| 1164 |
-
<div style="display:
|
| 1165 |
-
<
|
| 1166 |
-
|
|
|
|
|
|
|
| 1167 |
</button>
|
| 1168 |
-
<button onclick="
|
| 1169 |
-
|
| 1170 |
</button>
|
| 1171 |
-
<button onclick="
|
| 1172 |
-
β
Test
|
| 1173 |
-
</button>
|
| 1174 |
-
<button onclick="restartWithProxy()" style="background: var(--surface); color: var(--text); border: 1px solid var(--border); padding: 10px; border-radius: 6px; font-weight: 600; cursor: pointer; font-size: 12px;">
|
| 1175 |
-
π Restart with Proxy
|
| 1176 |
</button>
|
| 1177 |
</div>
|
| 1178 |
|
| 1179 |
<div id="proxy-info" style="background: var(--bg); border: 1px solid var(--border); border-radius: 6px; padding: 10px; font-family: monospace; font-size: 12px; display: none;">
|
| 1180 |
-
<div><strong>Current
|
| 1181 |
-
<div><strong>
|
| 1182 |
<div><strong>Response Time:</strong> <span id="proxy-response-display">-</span></div>
|
| 1183 |
</div>
|
| 1184 |
|
|
@@ -1187,62 +1186,67 @@
|
|
| 1187 |
</div>
|
| 1188 |
</div>
|
| 1189 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1190 |
<script>
|
| 1191 |
-
// π Proxy Management Functions
|
| 1192 |
-
async function
|
| 1193 |
-
const
|
| 1194 |
-
const
|
| 1195 |
-
|
| 1196 |
-
|
|
|
|
|
|
|
|
|
|
| 1197 |
|
| 1198 |
try {
|
| 1199 |
-
const res = await fetch('/qaz/proxy/
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1200 |
const data = await res.json();
|
| 1201 |
|
| 1202 |
if (data.status === 'success') {
|
| 1203 |
-
|
| 1204 |
-
|
| 1205 |
} else {
|
| 1206 |
throw new Error(data.message);
|
| 1207 |
}
|
| 1208 |
} catch (e) {
|
| 1209 |
showProxyMessage('β Error: ' + e.message);
|
| 1210 |
-
} finally {
|
| 1211 |
-
btn.textContent = originalText;
|
| 1212 |
-
btn.disabled = false;
|
| 1213 |
}
|
| 1214 |
}
|
| 1215 |
|
| 1216 |
-
async function
|
| 1217 |
-
|
| 1218 |
-
const originalText = btn.textContent;
|
| 1219 |
-
btn.textContent = 'Rotating...';
|
| 1220 |
-
btn.disabled = true;
|
| 1221 |
|
| 1222 |
try {
|
| 1223 |
-
const res = await fetch('/qaz/proxy/
|
| 1224 |
const data = await res.json();
|
| 1225 |
|
| 1226 |
if (data.status === 'success') {
|
| 1227 |
-
document.getElementById('proxy-
|
| 1228 |
-
|
| 1229 |
-
|
| 1230 |
-
document.getElementById('proxy-country-display').textContent = data.country;
|
| 1231 |
-
document.getElementById('proxy-response-display').textContent = data.response_time;
|
| 1232 |
-
document.getElementById('proxy-info').style.display = 'block';
|
| 1233 |
-
showProxyMessage(`β
Rotated to new IP: ${data.country} (${data.response_time})`);
|
| 1234 |
-
} else {
|
| 1235 |
-
throw new Error(data.message);
|
| 1236 |
}
|
| 1237 |
} catch (e) {
|
| 1238 |
showProxyMessage('β Error: ' + e.message);
|
| 1239 |
-
} finally {
|
| 1240 |
-
btn.textContent = originalText;
|
| 1241 |
-
btn.disabled = false;
|
| 1242 |
}
|
| 1243 |
}
|
| 1244 |
|
| 1245 |
-
async function
|
| 1246 |
const btn = event.target;
|
| 1247 |
const originalText = btn.textContent;
|
| 1248 |
btn.textContent = 'Testing...';
|
|
@@ -1253,9 +1257,13 @@
|
|
| 1253 |
const data = await res.json();
|
| 1254 |
|
| 1255 |
if (data.is_working) {
|
|
|
|
|
|
|
| 1256 |
showProxyMessage(`β
Proxy working! Response time: ${data.response_time}`);
|
| 1257 |
} else {
|
| 1258 |
-
|
|
|
|
|
|
|
| 1259 |
}
|
| 1260 |
} catch (e) {
|
| 1261 |
showProxyMessage('β Error: ' + e.message);
|
|
@@ -1265,39 +1273,26 @@
|
|
| 1265 |
}
|
| 1266 |
}
|
| 1267 |
|
| 1268 |
-
async function
|
| 1269 |
-
if (!currentProvider) {
|
| 1270 |
-
alert('Please select a provider first');
|
| 1271 |
-
return;
|
| 1272 |
-
}
|
| 1273 |
-
|
| 1274 |
-
const btn = event.target;
|
| 1275 |
-
const originalText = btn.textContent;
|
| 1276 |
-
btn.textContent = 'Restarting...';
|
| 1277 |
-
btn.disabled = true;
|
| 1278 |
-
|
| 1279 |
try {
|
| 1280 |
-
const res = await fetch(
|
| 1281 |
const data = await res.json();
|
| 1282 |
|
| 1283 |
-
if (data.
|
| 1284 |
-
|
| 1285 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1286 |
} else {
|
| 1287 |
-
|
|
|
|
|
|
|
| 1288 |
}
|
| 1289 |
} catch (e) {
|
| 1290 |
-
|
| 1291 |
-
} finally {
|
| 1292 |
-
btn.textContent = originalText;
|
| 1293 |
-
btn.disabled = false;
|
| 1294 |
-
}
|
| 1295 |
-
}
|
| 1296 |
-
|
| 1297 |
-
function updateProxyDisplay(proxy, stats) {
|
| 1298 |
-
if (proxy) {
|
| 1299 |
-
document.getElementById('current-proxy-display').textContent = proxy;
|
| 1300 |
-
document.getElementById('proxy-info').style.display = 'block';
|
| 1301 |
}
|
| 1302 |
}
|
| 1303 |
|
|
@@ -1309,6 +1304,71 @@
|
|
| 1309 |
el.textContent = '';
|
| 1310 |
}, 5000);
|
| 1311 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1312 |
</script>
|
| 1313 |
</body>
|
| 1314 |
|
|
|
|
| 1151 |
<div id="portal-response" style="display: none; margin-top: 15px; padding: 15px; background: var(--surface); border-radius: 6px; border: 1px solid var(--border); white-space: pre-wrap; font-family: monospace; font-size: 13px; max-height: 200px; overflow-y: auto;"></div>
|
| 1152 |
</div>
|
| 1153 |
|
| 1154 |
+
<!-- π Custom Proxy Configuration -->
|
| 1155 |
<div style="background: linear-gradient(135deg, rgba(139, 92, 246, 0.1), rgba(59, 130, 246, 0.1)); border: 2px solid var(--accent); border-radius: 8px; padding: 15px; margin-top: 20px;">
|
| 1156 |
<h4 style="color: var(--accent); margin-bottom: 10px; display: flex; align-items: center;">
|
| 1157 |
+
π Custom Proxy (Container-wide)
|
| 1158 |
<span id="proxy-status-badge" style="font-size: 11px; background: var(--surface); padding: 2px 8px; border-radius: 4px; margin-left: 10px; color: var(--text-muted);">No Proxy</span>
|
| 1159 |
</h4>
|
| 1160 |
<p style="color: var(--text-muted); font-size: 12px; margin-bottom: 12px;">
|
| 1161 |
+
Set your own proxy IP. Format: <code>ip:port</code> or <code>http://ip:port</code>. Applies to entire container.
|
| 1162 |
</p>
|
| 1163 |
|
| 1164 |
+
<div style="display: flex; gap: 10px; margin-bottom: 12px; flex-wrap: wrap;">
|
| 1165 |
+
<input type="text" id="custom-proxy-input" placeholder="e.g., 192.168.1.1:8080 or http://proxy.example.com:3128"
|
| 1166 |
+
style="flex: 1; min-width: 250px; background: var(--surface); border: 1px solid var(--border); color: var(--text); padding: 10px; border-radius: 6px; font-family: monospace; font-size: 13px;">
|
| 1167 |
+
<button onclick="setCustomProxy()" style="background: var(--accent); color: white; border: none; padding: 10px 20px; border-radius: 6px; font-weight: 600; cursor: pointer;">
|
| 1168 |
+
πΎ Set Proxy
|
| 1169 |
</button>
|
| 1170 |
+
<button onclick="clearCustomProxy()" style="background: var(--surface); color: var(--error); border: 1px solid var(--error); padding: 10px 20px; border-radius: 6px; font-weight: 600; cursor: pointer;">
|
| 1171 |
+
ποΈ Clear
|
| 1172 |
</button>
|
| 1173 |
+
<button onclick="testCustomProxy()" style="background: var(--success); color: white; border: none; padding: 10px 20px; border-radius: 6px; font-weight: 600; cursor: pointer;">
|
| 1174 |
+
β
Test
|
|
|
|
|
|
|
|
|
|
| 1175 |
</button>
|
| 1176 |
</div>
|
| 1177 |
|
| 1178 |
<div id="proxy-info" style="background: var(--bg); border: 1px solid var(--border); border-radius: 6px; padding: 10px; font-family: monospace; font-size: 12px; display: none;">
|
| 1179 |
+
<div><strong>Current Proxy:</strong> <span id="current-proxy-display">None</span></div>
|
| 1180 |
+
<div><strong>Status:</strong> <span id="proxy-status-display">-</span></div>
|
| 1181 |
<div><strong>Response Time:</strong> <span id="proxy-response-display">-</span></div>
|
| 1182 |
</div>
|
| 1183 |
|
|
|
|
| 1186 |
</div>
|
| 1187 |
</div>
|
| 1188 |
|
| 1189 |
+
<!-- π§ Provider Toggle Management Section -->
|
| 1190 |
+
<div class="card" style="margin-top: 30px;">
|
| 1191 |
+
<h2 style="margin-bottom: 15px;">π§ Provider Toggle Management</h2>
|
| 1192 |
+
<p style="color: var(--text-muted); margin-bottom: 20px;">
|
| 1193 |
+
Enable or disable providers. Disabled providers won't appear in the dashboard or API responses.
|
| 1194 |
+
</p>
|
| 1195 |
+
|
| 1196 |
+
<div id="providers-list" style="display: grid; gap: 10px;">
|
| 1197 |
+
<div style="text-align: center; padding: 20px; color: var(--text-muted);">Loading providers...</div>
|
| 1198 |
+
</div>
|
| 1199 |
+
</div>
|
| 1200 |
+
|
| 1201 |
<script>
|
| 1202 |
+
// π Custom Proxy Management Functions
|
| 1203 |
+
async function setCustomProxy() {
|
| 1204 |
+
const input = document.getElementById('custom-proxy-input');
|
| 1205 |
+
const proxy = input.value.trim();
|
| 1206 |
+
|
| 1207 |
+
if (!proxy) {
|
| 1208 |
+
alert('Please enter a proxy address');
|
| 1209 |
+
return;
|
| 1210 |
+
}
|
| 1211 |
|
| 1212 |
try {
|
| 1213 |
+
const res = await fetch('/qaz/proxy/set', {
|
| 1214 |
+
method: 'POST',
|
| 1215 |
+
headers: { 'Content-Type': 'application/json' },
|
| 1216 |
+
body: JSON.stringify({ proxy: proxy })
|
| 1217 |
+
});
|
| 1218 |
+
|
| 1219 |
const data = await res.json();
|
| 1220 |
|
| 1221 |
if (data.status === 'success') {
|
| 1222 |
+
showProxyMessage(`β
Proxy set: ${proxy}`);
|
| 1223 |
+
await loadProxyStatus();
|
| 1224 |
} else {
|
| 1225 |
throw new Error(data.message);
|
| 1226 |
}
|
| 1227 |
} catch (e) {
|
| 1228 |
showProxyMessage('β Error: ' + e.message);
|
|
|
|
|
|
|
|
|
|
| 1229 |
}
|
| 1230 |
}
|
| 1231 |
|
| 1232 |
+
async function clearCustomProxy() {
|
| 1233 |
+
if (!confirm('Clear the custom proxy?')) return;
|
|
|
|
|
|
|
|
|
|
| 1234 |
|
| 1235 |
try {
|
| 1236 |
+
const res = await fetch('/qaz/proxy/clear', { method: 'POST' });
|
| 1237 |
const data = await res.json();
|
| 1238 |
|
| 1239 |
if (data.status === 'success') {
|
| 1240 |
+
document.getElementById('custom-proxy-input').value = '';
|
| 1241 |
+
showProxyMessage('β
Proxy cleared');
|
| 1242 |
+
await loadProxyStatus();
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1243 |
}
|
| 1244 |
} catch (e) {
|
| 1245 |
showProxyMessage('β Error: ' + e.message);
|
|
|
|
|
|
|
|
|
|
| 1246 |
}
|
| 1247 |
}
|
| 1248 |
|
| 1249 |
+
async function testCustomProxy() {
|
| 1250 |
const btn = event.target;
|
| 1251 |
const originalText = btn.textContent;
|
| 1252 |
btn.textContent = 'Testing...';
|
|
|
|
| 1257 |
const data = await res.json();
|
| 1258 |
|
| 1259 |
if (data.is_working) {
|
| 1260 |
+
document.getElementById('proxy-status-badge').textContent = 'Working';
|
| 1261 |
+
document.getElementById('proxy-status-badge').style.color = 'var(--success)';
|
| 1262 |
showProxyMessage(`β
Proxy working! Response time: ${data.response_time}`);
|
| 1263 |
} else {
|
| 1264 |
+
document.getElementById('proxy-status-badge').textContent = 'Not Working';
|
| 1265 |
+
document.getElementById('proxy-status-badge').style.color = 'var(--error)';
|
| 1266 |
+
showProxyMessage('β Proxy not working. Check your settings.');
|
| 1267 |
}
|
| 1268 |
} catch (e) {
|
| 1269 |
showProxyMessage('β Error: ' + e.message);
|
|
|
|
| 1273 |
}
|
| 1274 |
}
|
| 1275 |
|
| 1276 |
+
async function loadProxyStatus() {
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1277 |
try {
|
| 1278 |
+
const res = await fetch('/qaz/proxy/status');
|
| 1279 |
const data = await res.json();
|
| 1280 |
|
| 1281 |
+
if (data.enabled) {
|
| 1282 |
+
document.getElementById('proxy-status-badge').textContent = 'Active';
|
| 1283 |
+
document.getElementById('proxy-status-badge').style.color = 'var(--success)';
|
| 1284 |
+
document.getElementById('current-proxy-display').textContent = data.proxy;
|
| 1285 |
+
document.getElementById('proxy-status-display').textContent = data.is_working ? 'Working' : 'Not Tested';
|
| 1286 |
+
document.getElementById('proxy-response-display').textContent = data.response_time || '-';
|
| 1287 |
+
document.getElementById('proxy-info').style.display = 'block';
|
| 1288 |
+
document.getElementById('custom-proxy-input').value = data.proxy;
|
| 1289 |
} else {
|
| 1290 |
+
document.getElementById('proxy-status-badge').textContent = 'No Proxy';
|
| 1291 |
+
document.getElementById('proxy-status-badge').style.color = 'var(--text-muted)';
|
| 1292 |
+
document.getElementById('proxy-info').style.display = 'none';
|
| 1293 |
}
|
| 1294 |
} catch (e) {
|
| 1295 |
+
console.error('Failed to load proxy status:', e);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1296 |
}
|
| 1297 |
}
|
| 1298 |
|
|
|
|
| 1304 |
el.textContent = '';
|
| 1305 |
}, 5000);
|
| 1306 |
}
|
| 1307 |
+
|
| 1308 |
+
// π§ Provider Toggle Functions
|
| 1309 |
+
async function loadProviders() {
|
| 1310 |
+
try {
|
| 1311 |
+
const res = await fetch('/qaz/providers');
|
| 1312 |
+
const data = await res.json();
|
| 1313 |
+
|
| 1314 |
+
const container = document.getElementById('providers-list');
|
| 1315 |
+
|
| 1316 |
+
if (!data.providers || data.providers.length === 0) {
|
| 1317 |
+
container.innerHTML = '<div style="text-align: center; padding: 20px; color: var(--text-muted);">No providers found</div>';
|
| 1318 |
+
return;
|
| 1319 |
+
}
|
| 1320 |
+
|
| 1321 |
+
container.innerHTML = data.providers.map(provider => `
|
| 1322 |
+
<div style="display: flex; justify-content: space-between; align-items: center; background: var(--surface); border: 1px solid var(--border); border-radius: 8px; padding: 15px;">
|
| 1323 |
+
<div>
|
| 1324 |
+
<div style="font-weight: 600; color: white;">${provider.name}</div>
|
| 1325 |
+
<div style="font-size: 12px; color: var(--text-muted); text-transform: uppercase;">${provider.type} β’ ID: ${provider.id}</div>
|
| 1326 |
+
</div>
|
| 1327 |
+
<label style="display: flex; align-items: center; cursor: pointer;">
|
| 1328 |
+
<input type="checkbox"
|
| 1329 |
+
${provider.enabled ? 'checked' : ''}
|
| 1330 |
+
onchange="toggleProvider('${provider.id}', this.checked)"
|
| 1331 |
+
style="width: 50px; height: 26px; appearance: none; background: ${provider.enabled ? 'var(--success)' : 'var(--border)'}; border-radius: 13px; position: relative; cursor: pointer; transition: background 0.3s;">
|
| 1332 |
+
<span style="margin-left: 10px; font-size: 12px; color: var(--text-muted);">${provider.enabled ? 'Enabled' : 'Disabled'}</span>
|
| 1333 |
+
</label>
|
| 1334 |
+
</div>
|
| 1335 |
+
`).join('');
|
| 1336 |
+
|
| 1337 |
+
} catch (e) {
|
| 1338 |
+
console.error('Failed to load providers:', e);
|
| 1339 |
+
document.getElementById('providers-list').innerHTML =
|
| 1340 |
+
`<div style="text-align: center; padding: 20px; color: var(--error);">Error loading providers: ${e.message}</div>`;
|
| 1341 |
+
}
|
| 1342 |
+
}
|
| 1343 |
+
|
| 1344 |
+
async function toggleProvider(providerId, enabled) {
|
| 1345 |
+
try {
|
| 1346 |
+
const res = await fetch('/qaz/providers/toggle', {
|
| 1347 |
+
method: 'POST',
|
| 1348 |
+
headers: { 'Content-Type': 'application/json' },
|
| 1349 |
+
body: JSON.stringify({ provider_id: providerId, enabled: enabled })
|
| 1350 |
+
});
|
| 1351 |
+
|
| 1352 |
+
const data = await res.json();
|
| 1353 |
+
|
| 1354 |
+
if (data.status === 'success') {
|
| 1355 |
+
// Refresh the list to show updated state
|
| 1356 |
+
await loadProviders();
|
| 1357 |
+
} else {
|
| 1358 |
+
throw new Error(data.message);
|
| 1359 |
+
}
|
| 1360 |
+
} catch (e) {
|
| 1361 |
+
alert('Error: ' + e.message);
|
| 1362 |
+
// Refresh to get correct state
|
| 1363 |
+
await loadProviders();
|
| 1364 |
+
}
|
| 1365 |
+
}
|
| 1366 |
+
|
| 1367 |
+
// Initialize on page load
|
| 1368 |
+
document.addEventListener('DOMContentLoaded', () => {
|
| 1369 |
+
loadProviders();
|
| 1370 |
+
loadProxyStatus();
|
| 1371 |
+
});
|
| 1372 |
</script>
|
| 1373 |
</body>
|
| 1374 |
|