Spaces:
Paused
Paused
Mirrowel commited on
Commit Β·
81e9ff5
1
Parent(s): 42bd5ae
fix(oauth): π escape rich markup in oauth authorization urls
Browse filesPrevent Rich markup interpretation issues when displaying OAuth authorization URLs in terminal output.
- Import `rich.markup.escape` to properly escape special characters (=, &, etc.) in URLs
- Add extensive inline documentation explaining the escaping rationale and known terminal compatibility issues
- Apply URL escaping to authorization URLs in Google OAuth, iFlow, and Qwen Code providers
- Refine headless environment detection to exclude macOS from DISPLAY checks (macOS uses Quartz, not X11)
- Improve code formatting consistency (string quotes, line wrapping) across OAuth providers
The escaped URLs display correctly in all terminal configurations while remaining clickable in supported terminals (iTerm2, Windows Terminal, etc.).
src/rotator_library/providers/google_oauth_base.py
CHANGED
|
@@ -16,35 +16,37 @@ import httpx
|
|
| 16 |
from rich.console import Console
|
| 17 |
from rich.panel import Panel
|
| 18 |
from rich.text import Text
|
|
|
|
| 19 |
|
| 20 |
from ..utils.headless_detection import is_headless_environment
|
| 21 |
|
| 22 |
-
lib_logger = logging.getLogger(
|
| 23 |
|
| 24 |
console = Console()
|
| 25 |
|
|
|
|
| 26 |
class GoogleOAuthBase:
|
| 27 |
"""
|
| 28 |
Base class for Google OAuth2 authentication providers.
|
| 29 |
-
|
| 30 |
Subclasses must override:
|
| 31 |
- CLIENT_ID: OAuth client ID
|
| 32 |
- CLIENT_SECRET: OAuth client secret
|
| 33 |
- OAUTH_SCOPES: List of OAuth scopes
|
| 34 |
- ENV_PREFIX: Prefix for environment variables (e.g., "GEMINI_CLI", "ANTIGRAVITY")
|
| 35 |
-
|
| 36 |
Subclasses may optionally override:
|
| 37 |
- CALLBACK_PORT: Local OAuth callback server port (default: 8085)
|
| 38 |
- CALLBACK_PATH: OAuth callback path (default: "/oauth2callback")
|
| 39 |
- REFRESH_EXPIRY_BUFFER_SECONDS: Time buffer before token expiry (default: 30 minutes)
|
| 40 |
"""
|
| 41 |
-
|
| 42 |
# Subclasses MUST override these
|
| 43 |
CLIENT_ID: str = None
|
| 44 |
CLIENT_SECRET: str = None
|
| 45 |
OAUTH_SCOPES: list = None
|
| 46 |
ENV_PREFIX: str = None
|
| 47 |
-
|
| 48 |
# Subclasses MAY override these
|
| 49 |
TOKEN_URI: str = "https://oauth2.googleapis.com/token"
|
| 50 |
USER_INFO_URI: str = "https://www.googleapis.com/oauth2/v1/userinfo"
|
|
@@ -57,49 +59,65 @@ class GoogleOAuthBase:
|
|
| 57 |
if self.CLIENT_ID is None:
|
| 58 |
raise NotImplementedError(f"{self.__class__.__name__} must set CLIENT_ID")
|
| 59 |
if self.CLIENT_SECRET is None:
|
| 60 |
-
raise NotImplementedError(
|
|
|
|
|
|
|
| 61 |
if self.OAUTH_SCOPES is None:
|
| 62 |
-
raise NotImplementedError(
|
|
|
|
|
|
|
| 63 |
if self.ENV_PREFIX is None:
|
| 64 |
raise NotImplementedError(f"{self.__class__.__name__} must set ENV_PREFIX")
|
| 65 |
-
|
| 66 |
self._credentials_cache: Dict[str, Dict[str, Any]] = {}
|
| 67 |
self._refresh_locks: Dict[str, asyncio.Lock] = {}
|
| 68 |
-
self._locks_lock =
|
|
|
|
|
|
|
| 69 |
# [BACKOFF TRACKING] Track consecutive failures per credential
|
| 70 |
-
self._refresh_failures: Dict[
|
| 71 |
-
|
| 72 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 73 |
# [QUEUE SYSTEM] Sequential refresh processing
|
| 74 |
self._refresh_queue: asyncio.Queue = asyncio.Queue()
|
| 75 |
self._queued_credentials: set = set() # Track credentials already in queue
|
| 76 |
-
self._unavailable_credentials: set =
|
|
|
|
|
|
|
| 77 |
self._queue_tracking_lock = asyncio.Lock() # Protects queue sets
|
| 78 |
-
self._queue_processor_task: Optional[asyncio.Task] =
|
|
|
|
|
|
|
| 79 |
|
| 80 |
def _parse_env_credential_path(self, path: str) -> Optional[str]:
|
| 81 |
"""
|
| 82 |
Parse a virtual env:// path and return the credential index.
|
| 83 |
-
|
| 84 |
Supported formats:
|
| 85 |
- "env://provider/0" - Legacy single credential (no index in env var names)
|
| 86 |
- "env://provider/1" - First numbered credential (PROVIDER_1_ACCESS_TOKEN)
|
| 87 |
- "env://provider/2" - Second numbered credential, etc.
|
| 88 |
-
|
| 89 |
Returns:
|
| 90 |
The credential index as string ("0" for legacy, "1", "2", etc. for numbered)
|
| 91 |
or None if path is not an env:// path
|
| 92 |
"""
|
| 93 |
if not path.startswith("env://"):
|
| 94 |
return None
|
| 95 |
-
|
| 96 |
# Parse: env://provider/index
|
| 97 |
parts = path[6:].split("/") # Remove "env://" prefix
|
| 98 |
if len(parts) >= 2:
|
| 99 |
return parts[1] # Return the index
|
| 100 |
return "0" # Default to legacy format
|
| 101 |
|
| 102 |
-
def _load_from_env(
|
|
|
|
|
|
|
| 103 |
"""
|
| 104 |
Load OAuth credentials from environment variables for stateless deployments.
|
| 105 |
|
|
@@ -133,7 +151,7 @@ class GoogleOAuthBase:
|
|
| 133 |
# Legacy format: PROVIDER_ACCESS_TOKEN
|
| 134 |
prefix = self.ENV_PREFIX
|
| 135 |
default_email = "env-user"
|
| 136 |
-
|
| 137 |
access_token = os.getenv(f"{prefix}_ACCESS_TOKEN")
|
| 138 |
refresh_token = os.getenv(f"{prefix}_REFRESH_TOKEN")
|
| 139 |
|
|
@@ -148,7 +166,9 @@ class GoogleOAuthBase:
|
|
| 148 |
try:
|
| 149 |
expiry_date = float(expiry_str)
|
| 150 |
except ValueError:
|
| 151 |
-
lib_logger.warning(
|
|
|
|
|
|
|
| 152 |
expiry_date = 0
|
| 153 |
|
| 154 |
creds = {
|
|
@@ -163,15 +183,16 @@ class GoogleOAuthBase:
|
|
| 163 |
"email": os.getenv(f"{prefix}_EMAIL", default_email),
|
| 164 |
"last_check_timestamp": time.time(),
|
| 165 |
"loaded_from_env": True, # Flag to indicate env-based credentials
|
| 166 |
-
"env_credential_index": credential_index
|
| 167 |
-
|
|
|
|
| 168 |
}
|
| 169 |
|
| 170 |
# Add project_id if provided
|
| 171 |
project_id = os.getenv(f"{prefix}_PROJECT_ID")
|
| 172 |
if project_id:
|
| 173 |
creds["_proxy_metadata"]["project_id"] = project_id
|
| 174 |
-
|
| 175 |
# Add tier if provided
|
| 176 |
tier = os.getenv(f"{prefix}_TIER")
|
| 177 |
if tier:
|
|
@@ -193,24 +214,32 @@ class GoogleOAuthBase:
|
|
| 193 |
# Load from environment variables with specific index
|
| 194 |
env_creds = self._load_from_env(credential_index)
|
| 195 |
if env_creds:
|
| 196 |
-
lib_logger.info(
|
|
|
|
|
|
|
| 197 |
self._credentials_cache[path] = env_creds
|
| 198 |
return env_creds
|
| 199 |
else:
|
| 200 |
-
raise IOError(
|
|
|
|
|
|
|
| 201 |
|
| 202 |
# For file paths, first try loading from legacy env vars (for backwards compatibility)
|
| 203 |
env_creds = self._load_from_env()
|
| 204 |
if env_creds:
|
| 205 |
-
lib_logger.info(
|
|
|
|
|
|
|
| 206 |
# Cache env-based credentials using the path as key
|
| 207 |
self._credentials_cache[path] = env_creds
|
| 208 |
return env_creds
|
| 209 |
|
| 210 |
# Fall back to file-based loading
|
| 211 |
try:
|
| 212 |
-
lib_logger.debug(
|
| 213 |
-
|
|
|
|
|
|
|
| 214 |
creds = json.load(f)
|
| 215 |
# Handle gcloud-style creds file which nest tokens under "credential"
|
| 216 |
if "credential" in creds:
|
|
@@ -218,11 +247,17 @@ class GoogleOAuthBase:
|
|
| 218 |
self._credentials_cache[path] = creds
|
| 219 |
return creds
|
| 220 |
except FileNotFoundError:
|
| 221 |
-
raise IOError(
|
|
|
|
|
|
|
| 222 |
except Exception as e:
|
| 223 |
-
raise IOError(
|
|
|
|
|
|
|
| 224 |
except Exception as e:
|
| 225 |
-
raise IOError(
|
|
|
|
|
|
|
| 226 |
|
| 227 |
async def _save_credentials(self, path: str, creds: Dict[str, Any]):
|
| 228 |
# Don't save to file if credentials were loaded from environment
|
|
@@ -241,10 +276,12 @@ class GoogleOAuthBase:
|
|
| 241 |
tmp_path = None
|
| 242 |
try:
|
| 243 |
# Create temp file in same directory as target (ensures same filesystem)
|
| 244 |
-
tmp_fd, tmp_path = tempfile.mkstemp(
|
|
|
|
|
|
|
| 245 |
|
| 246 |
# Write JSON to temp file
|
| 247 |
-
with os.fdopen(tmp_fd,
|
| 248 |
json.dump(creds, f, indent=2)
|
| 249 |
tmp_fd = None # fdopen closes the fd
|
| 250 |
|
|
@@ -261,10 +298,14 @@ class GoogleOAuthBase:
|
|
| 261 |
|
| 262 |
# Update cache AFTER successful file write (prevents cache/file inconsistency)
|
| 263 |
self._credentials_cache[path] = creds
|
| 264 |
-
lib_logger.debug(
|
|
|
|
|
|
|
| 265 |
|
| 266 |
except Exception as e:
|
| 267 |
-
lib_logger.error(
|
|
|
|
|
|
|
| 268 |
# Clean up temp file if it still exists
|
| 269 |
if tmp_fd is not None:
|
| 270 |
try:
|
|
@@ -279,20 +320,26 @@ class GoogleOAuthBase:
|
|
| 279 |
raise
|
| 280 |
|
| 281 |
def _is_token_expired(self, creds: Dict[str, Any]) -> bool:
|
| 282 |
-
expiry = creds.get("token_expiry")
|
| 283 |
-
if not expiry:
|
| 284 |
-
|
| 285 |
else:
|
| 286 |
expiry_timestamp = time.mktime(time.strptime(expiry, "%Y-%m-%dT%H:%M:%SZ"))
|
| 287 |
return expiry_timestamp < time.time() + self.REFRESH_EXPIRY_BUFFER_SECONDS
|
| 288 |
|
| 289 |
-
async def _refresh_token(
|
|
|
|
|
|
|
| 290 |
async with await self._get_lock(path):
|
| 291 |
# Skip the expiry check if a refresh is being forced
|
| 292 |
-
if not force and not self._is_token_expired(
|
|
|
|
|
|
|
| 293 |
return self._credentials_cache.get(path, creds)
|
| 294 |
|
| 295 |
-
lib_logger.debug(
|
|
|
|
|
|
|
| 296 |
refresh_token = creds.get("refresh_token")
|
| 297 |
if not refresh_token:
|
| 298 |
raise ValueError("No refresh_token found in credentials file.")
|
|
@@ -306,12 +353,18 @@ class GoogleOAuthBase:
|
|
| 306 |
async with httpx.AsyncClient() as client:
|
| 307 |
for attempt in range(max_retries):
|
| 308 |
try:
|
| 309 |
-
response = await client.post(
|
| 310 |
-
|
| 311 |
-
|
| 312 |
-
|
| 313 |
-
|
| 314 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 315 |
response.raise_for_status()
|
| 316 |
new_token_data = response.json()
|
| 317 |
break # Success, exit retry loop
|
|
@@ -332,7 +385,9 @@ class GoogleOAuthBase:
|
|
| 332 |
elif status_code == 429:
|
| 333 |
# Rate limit - honor Retry-After header if present
|
| 334 |
retry_after = int(e.response.headers.get("Retry-After", 60))
|
| 335 |
-
lib_logger.warning(
|
|
|
|
|
|
|
| 336 |
if attempt < max_retries - 1:
|
| 337 |
await asyncio.sleep(retry_after)
|
| 338 |
continue
|
|
@@ -341,8 +396,10 @@ class GoogleOAuthBase:
|
|
| 341 |
elif status_code >= 500 and status_code < 600:
|
| 342 |
# Server error - retry with exponential backoff
|
| 343 |
if attempt < max_retries - 1:
|
| 344 |
-
wait_time = 2
|
| 345 |
-
lib_logger.warning(
|
|
|
|
|
|
|
| 346 |
await asyncio.sleep(wait_time)
|
| 347 |
continue
|
| 348 |
raise # Final attempt failed
|
|
@@ -355,22 +412,30 @@ class GoogleOAuthBase:
|
|
| 355 |
# Network errors - retry with backoff
|
| 356 |
last_error = e
|
| 357 |
if attempt < max_retries - 1:
|
| 358 |
-
wait_time = 2
|
| 359 |
-
lib_logger.warning(
|
|
|
|
|
|
|
| 360 |
await asyncio.sleep(wait_time)
|
| 361 |
continue
|
| 362 |
raise
|
| 363 |
|
| 364 |
# [INVALID GRANT RE-AUTH] Trigger OAuth flow if refresh token is invalid
|
| 365 |
if needs_reauth:
|
| 366 |
-
lib_logger.info(
|
|
|
|
|
|
|
| 367 |
try:
|
| 368 |
# Call initialize_token to trigger OAuth flow
|
| 369 |
new_creds = await self.initialize_token(path)
|
| 370 |
return new_creds
|
| 371 |
except Exception as reauth_error:
|
| 372 |
-
lib_logger.error(
|
| 373 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 374 |
|
| 375 |
# If we exhausted retries without success
|
| 376 |
if new_token_data is None:
|
|
@@ -379,7 +444,7 @@ class GoogleOAuthBase:
|
|
| 379 |
# [FIX 1] Update OAuth token fields from response
|
| 380 |
creds["access_token"] = new_token_data["access_token"]
|
| 381 |
expiry_timestamp = time.time() + new_token_data["expires_in"]
|
| 382 |
-
creds["expiry_date"] = expiry_timestamp * 1000
|
| 383 |
|
| 384 |
# [FIX 2] Update refresh_token if server provided a new one (rare but possible with Google OAuth)
|
| 385 |
if "refresh_token" in new_token_data:
|
|
@@ -405,10 +470,20 @@ class GoogleOAuthBase:
|
|
| 405 |
creds["_proxy_metadata"]["last_check_timestamp"] = time.time()
|
| 406 |
|
| 407 |
# [VALIDATION] Verify refreshed credentials have all required fields
|
| 408 |
-
required_fields = [
|
| 409 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 410 |
if missing_fields:
|
| 411 |
-
raise ValueError(
|
|
|
|
|
|
|
| 412 |
|
| 413 |
# [VALIDATION] Optional: Test that the refreshed token is actually usable
|
| 414 |
try:
|
|
@@ -416,17 +491,23 @@ class GoogleOAuthBase:
|
|
| 416 |
test_response = await client.get(
|
| 417 |
self.USER_INFO_URI,
|
| 418 |
headers={"Authorization": f"Bearer {creds['access_token']}"},
|
| 419 |
-
timeout=5.0
|
| 420 |
)
|
| 421 |
test_response.raise_for_status()
|
| 422 |
-
lib_logger.debug(
|
|
|
|
|
|
|
| 423 |
except Exception as e:
|
| 424 |
-
lib_logger.warning(
|
|
|
|
|
|
|
| 425 |
# Don't fail the refresh - the token might still work for other endpoints
|
| 426 |
# But log it for debugging purposes
|
| 427 |
|
| 428 |
await self._save_credentials(path, creds)
|
| 429 |
-
lib_logger.debug(
|
|
|
|
|
|
|
| 430 |
return creds
|
| 431 |
|
| 432 |
async def proactively_refresh(self, credential_path: str):
|
|
@@ -451,11 +532,15 @@ class GoogleOAuthBase:
|
|
| 451 |
async def _ensure_queue_processor_running(self):
|
| 452 |
"""Lazily starts the queue processor if not already running."""
|
| 453 |
if self._queue_processor_task is None or self._queue_processor_task.done():
|
| 454 |
-
self._queue_processor_task = asyncio.create_task(
|
|
|
|
|
|
|
| 455 |
|
| 456 |
-
async def _queue_refresh(
|
|
|
|
|
|
|
| 457 |
"""Add a credential to the refresh queue if not already queued.
|
| 458 |
-
|
| 459 |
Args:
|
| 460 |
path: Credential file path
|
| 461 |
force: Force refresh even if not expired
|
|
@@ -470,9 +555,11 @@ class GoogleOAuthBase:
|
|
| 470 |
if now < backoff_until:
|
| 471 |
# Credential is in backoff for automated refresh, do not queue
|
| 472 |
remaining = int(backoff_until - now)
|
| 473 |
-
lib_logger.debug(
|
|
|
|
|
|
|
| 474 |
return
|
| 475 |
-
|
| 476 |
async with self._queue_tracking_lock:
|
| 477 |
if path not in self._queued_credentials:
|
| 478 |
self._queued_credentials.add(path)
|
|
@@ -488,14 +575,13 @@ class GoogleOAuthBase:
|
|
| 488 |
# Wait for an item with timeout to allow graceful shutdown
|
| 489 |
try:
|
| 490 |
path, force, needs_reauth = await asyncio.wait_for(
|
| 491 |
-
self._refresh_queue.get(),
|
| 492 |
-
timeout=60.0
|
| 493 |
)
|
| 494 |
except asyncio.TimeoutError:
|
| 495 |
# No items for 60s, exit to save resources
|
| 496 |
self._queue_processor_task = None
|
| 497 |
return
|
| 498 |
-
|
| 499 |
try:
|
| 500 |
# Perform the actual refresh (still using per-credential lock)
|
| 501 |
async with await self._get_lock(path):
|
|
@@ -506,16 +592,16 @@ class GoogleOAuthBase:
|
|
| 506 |
async with self._queue_tracking_lock:
|
| 507 |
self._unavailable_credentials.discard(path)
|
| 508 |
continue
|
| 509 |
-
|
| 510 |
# Perform refresh
|
| 511 |
if not creds:
|
| 512 |
creds = await self._load_credentials(path)
|
| 513 |
await self._refresh_token(path, creds, force=force)
|
| 514 |
-
|
| 515 |
# SUCCESS: Mark as available again
|
| 516 |
async with self._queue_tracking_lock:
|
| 517 |
self._unavailable_credentials.discard(path)
|
| 518 |
-
|
| 519 |
finally:
|
| 520 |
# Remove from queued set
|
| 521 |
async with self._queue_tracking_lock:
|
|
@@ -530,18 +616,26 @@ class GoogleOAuthBase:
|
|
| 530 |
async with self._queue_tracking_lock:
|
| 531 |
self._unavailable_credentials.discard(path)
|
| 532 |
|
| 533 |
-
async def initialize_token(
|
|
|
|
|
|
|
| 534 |
path = creds_or_path if isinstance(creds_or_path, str) else None
|
| 535 |
|
| 536 |
# Get display name from metadata if available, otherwise derive from path
|
| 537 |
if isinstance(creds_or_path, dict):
|
| 538 |
-
display_name = creds_or_path.get("_proxy_metadata", {}).get(
|
|
|
|
|
|
|
| 539 |
else:
|
| 540 |
display_name = Path(path).name if path else "in-memory object"
|
| 541 |
|
| 542 |
-
lib_logger.debug(
|
|
|
|
|
|
|
| 543 |
try:
|
| 544 |
-
creds =
|
|
|
|
|
|
|
| 545 |
reason = ""
|
| 546 |
if not creds.get("refresh_token"):
|
| 547 |
reason = "refresh token is missing"
|
|
@@ -553,34 +647,51 @@ class GoogleOAuthBase:
|
|
| 553 |
try:
|
| 554 |
return await self._refresh_token(path, creds)
|
| 555 |
except Exception as e:
|
| 556 |
-
lib_logger.warning(
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 557 |
|
| 558 |
-
lib_logger.warning(f"{self.ENV_PREFIX} OAuth token for '{display_name}' needs setup: {reason}.")
|
| 559 |
-
|
| 560 |
# [HEADLESS DETECTION] Check if running in headless environment
|
| 561 |
is_headless = is_headless_environment()
|
| 562 |
-
|
| 563 |
auth_code_future = asyncio.get_event_loop().create_future()
|
| 564 |
server = None
|
| 565 |
|
| 566 |
async def handle_callback(reader, writer):
|
| 567 |
try:
|
| 568 |
request_line_bytes = await reader.readline()
|
| 569 |
-
if not request_line_bytes:
|
| 570 |
-
|
| 571 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 572 |
from urllib.parse import urlparse, parse_qs
|
|
|
|
| 573 |
query_params = parse_qs(urlparse(path_str).query)
|
| 574 |
-
writer.write(
|
| 575 |
-
|
|
|
|
|
|
|
| 576 |
if not auth_code_future.done():
|
| 577 |
-
auth_code_future.set_result(query_params[
|
| 578 |
-
writer.write(
|
|
|
|
|
|
|
| 579 |
else:
|
| 580 |
-
error = query_params.get(
|
| 581 |
if not auth_code_future.done():
|
| 582 |
-
auth_code_future.set_exception(
|
| 583 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 584 |
await writer.drain()
|
| 585 |
except Exception as e:
|
| 586 |
lib_logger.error(f"Error in OAuth callback handler: {e}")
|
|
@@ -588,15 +699,25 @@ class GoogleOAuthBase:
|
|
| 588 |
writer.close()
|
| 589 |
|
| 590 |
try:
|
| 591 |
-
server = await asyncio.start_server(
|
|
|
|
|
|
|
| 592 |
from urllib.parse import urlencode
|
| 593 |
-
|
| 594 |
-
|
| 595 |
-
"
|
| 596 |
-
|
| 597 |
-
|
| 598 |
-
|
| 599 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 600 |
# [HEADLESS SUPPORT] Display appropriate instructions
|
| 601 |
if is_headless:
|
| 602 |
auth_panel_text = Text.from_markup(
|
|
@@ -606,68 +727,118 @@ class GoogleOAuthBase:
|
|
| 606 |
else:
|
| 607 |
auth_panel_text = Text.from_markup(
|
| 608 |
"1. Your browser will now open to log in and authorize the application.\n"
|
| 609 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 610 |
)
|
| 611 |
-
|
| 612 |
-
|
| 613 |
-
|
| 614 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 615 |
# [HEADLESS SUPPORT] Only attempt browser open if NOT headless
|
| 616 |
if not is_headless:
|
| 617 |
try:
|
| 618 |
webbrowser.open(auth_url)
|
| 619 |
-
lib_logger.info(
|
|
|
|
|
|
|
| 620 |
except Exception as e:
|
| 621 |
-
lib_logger.warning(
|
| 622 |
-
|
| 623 |
-
|
| 624 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 625 |
except asyncio.TimeoutError:
|
| 626 |
raise Exception("OAuth flow timed out. Please try again.")
|
| 627 |
finally:
|
| 628 |
if server:
|
| 629 |
server.close()
|
| 630 |
await server.wait_closed()
|
| 631 |
-
|
| 632 |
-
lib_logger.info(
|
|
|
|
|
|
|
| 633 |
async with httpx.AsyncClient() as client:
|
| 634 |
-
response = await client.post(
|
| 635 |
-
|
| 636 |
-
|
| 637 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 638 |
response.raise_for_status()
|
| 639 |
token_data = response.json()
|
| 640 |
# Start with the full token data from the exchange
|
| 641 |
creds = token_data.copy()
|
| 642 |
-
|
| 643 |
# Convert 'expires_in' to 'expiry_date' in milliseconds
|
| 644 |
-
creds["expiry_date"] = (
|
| 645 |
-
|
|
|
|
|
|
|
| 646 |
# Ensure client_id and client_secret are present
|
| 647 |
creds["client_id"] = self.CLIENT_ID
|
| 648 |
creds["client_secret"] = self.CLIENT_SECRET
|
| 649 |
|
| 650 |
creds["token_uri"] = self.TOKEN_URI
|
| 651 |
creds["universe_domain"] = "googleapis.com"
|
| 652 |
-
|
| 653 |
# Fetch user info and add metadata
|
| 654 |
-
user_info_response = await client.get(
|
|
|
|
|
|
|
|
|
|
| 655 |
user_info_response.raise_for_status()
|
| 656 |
user_info = user_info_response.json()
|
| 657 |
creds["_proxy_metadata"] = {
|
| 658 |
"email": user_info.get("email"),
|
| 659 |
-
"last_check_timestamp": time.time()
|
| 660 |
}
|
| 661 |
|
| 662 |
if path:
|
| 663 |
await self._save_credentials(path, creds)
|
| 664 |
-
lib_logger.info(
|
|
|
|
|
|
|
| 665 |
return creds
|
| 666 |
|
| 667 |
-
lib_logger.info(
|
|
|
|
|
|
|
| 668 |
return creds
|
| 669 |
except Exception as e:
|
| 670 |
-
raise ValueError(
|
|
|
|
|
|
|
| 671 |
|
| 672 |
async def get_auth_header(self, credential_path: str) -> Dict[str, str]:
|
| 673 |
creds = await self._load_credentials(credential_path)
|
|
@@ -675,13 +846,15 @@ class GoogleOAuthBase:
|
|
| 675 |
creds = await self._refresh_token(credential_path, creds)
|
| 676 |
return {"Authorization": f"Bearer {creds['access_token']}"}
|
| 677 |
|
| 678 |
-
async def get_user_info(
|
|
|
|
|
|
|
| 679 |
path = creds_or_path if isinstance(creds_or_path, str) else None
|
| 680 |
creds = await self._load_credentials(creds_or_path) if path else creds_or_path
|
| 681 |
|
| 682 |
if path and self._is_token_expired(creds):
|
| 683 |
creds = await self._refresh_token(path, creds)
|
| 684 |
-
|
| 685 |
# Prefer locally stored metadata
|
| 686 |
if creds.get("_proxy_metadata", {}).get("email"):
|
| 687 |
if path:
|
|
@@ -695,11 +868,11 @@ class GoogleOAuthBase:
|
|
| 695 |
response = await client.get(self.USER_INFO_URI, headers=headers)
|
| 696 |
response.raise_for_status()
|
| 697 |
user_info = response.json()
|
| 698 |
-
|
| 699 |
# Save the retrieved info for future use
|
| 700 |
creds["_proxy_metadata"] = {
|
| 701 |
"email": user_info.get("email"),
|
| 702 |
-
"last_check_timestamp": time.time()
|
| 703 |
}
|
| 704 |
if path:
|
| 705 |
await self._save_credentials(path, creds)
|
|
|
|
| 16 |
from rich.console import Console
|
| 17 |
from rich.panel import Panel
|
| 18 |
from rich.text import Text
|
| 19 |
+
from rich.markup import escape as rich_escape
|
| 20 |
|
| 21 |
from ..utils.headless_detection import is_headless_environment
|
| 22 |
|
| 23 |
+
lib_logger = logging.getLogger("rotator_library")
|
| 24 |
|
| 25 |
console = Console()
|
| 26 |
|
| 27 |
+
|
| 28 |
class GoogleOAuthBase:
|
| 29 |
"""
|
| 30 |
Base class for Google OAuth2 authentication providers.
|
| 31 |
+
|
| 32 |
Subclasses must override:
|
| 33 |
- CLIENT_ID: OAuth client ID
|
| 34 |
- CLIENT_SECRET: OAuth client secret
|
| 35 |
- OAUTH_SCOPES: List of OAuth scopes
|
| 36 |
- ENV_PREFIX: Prefix for environment variables (e.g., "GEMINI_CLI", "ANTIGRAVITY")
|
| 37 |
+
|
| 38 |
Subclasses may optionally override:
|
| 39 |
- CALLBACK_PORT: Local OAuth callback server port (default: 8085)
|
| 40 |
- CALLBACK_PATH: OAuth callback path (default: "/oauth2callback")
|
| 41 |
- REFRESH_EXPIRY_BUFFER_SECONDS: Time buffer before token expiry (default: 30 minutes)
|
| 42 |
"""
|
| 43 |
+
|
| 44 |
# Subclasses MUST override these
|
| 45 |
CLIENT_ID: str = None
|
| 46 |
CLIENT_SECRET: str = None
|
| 47 |
OAUTH_SCOPES: list = None
|
| 48 |
ENV_PREFIX: str = None
|
| 49 |
+
|
| 50 |
# Subclasses MAY override these
|
| 51 |
TOKEN_URI: str = "https://oauth2.googleapis.com/token"
|
| 52 |
USER_INFO_URI: str = "https://www.googleapis.com/oauth2/v1/userinfo"
|
|
|
|
| 59 |
if self.CLIENT_ID is None:
|
| 60 |
raise NotImplementedError(f"{self.__class__.__name__} must set CLIENT_ID")
|
| 61 |
if self.CLIENT_SECRET is None:
|
| 62 |
+
raise NotImplementedError(
|
| 63 |
+
f"{self.__class__.__name__} must set CLIENT_SECRET"
|
| 64 |
+
)
|
| 65 |
if self.OAUTH_SCOPES is None:
|
| 66 |
+
raise NotImplementedError(
|
| 67 |
+
f"{self.__class__.__name__} must set OAUTH_SCOPES"
|
| 68 |
+
)
|
| 69 |
if self.ENV_PREFIX is None:
|
| 70 |
raise NotImplementedError(f"{self.__class__.__name__} must set ENV_PREFIX")
|
| 71 |
+
|
| 72 |
self._credentials_cache: Dict[str, Dict[str, Any]] = {}
|
| 73 |
self._refresh_locks: Dict[str, asyncio.Lock] = {}
|
| 74 |
+
self._locks_lock = (
|
| 75 |
+
asyncio.Lock()
|
| 76 |
+
) # Protects the locks dict from race conditions
|
| 77 |
# [BACKOFF TRACKING] Track consecutive failures per credential
|
| 78 |
+
self._refresh_failures: Dict[
|
| 79 |
+
str, int
|
| 80 |
+
] = {} # Track consecutive failures per credential
|
| 81 |
+
self._next_refresh_after: Dict[
|
| 82 |
+
str, float
|
| 83 |
+
] = {} # Track backoff timers (Unix timestamp)
|
| 84 |
+
|
| 85 |
# [QUEUE SYSTEM] Sequential refresh processing
|
| 86 |
self._refresh_queue: asyncio.Queue = asyncio.Queue()
|
| 87 |
self._queued_credentials: set = set() # Track credentials already in queue
|
| 88 |
+
self._unavailable_credentials: set = (
|
| 89 |
+
set()
|
| 90 |
+
) # Mark credentials unavailable during re-auth
|
| 91 |
self._queue_tracking_lock = asyncio.Lock() # Protects queue sets
|
| 92 |
+
self._queue_processor_task: Optional[asyncio.Task] = (
|
| 93 |
+
None # Background worker task
|
| 94 |
+
)
|
| 95 |
|
| 96 |
def _parse_env_credential_path(self, path: str) -> Optional[str]:
|
| 97 |
"""
|
| 98 |
Parse a virtual env:// path and return the credential index.
|
| 99 |
+
|
| 100 |
Supported formats:
|
| 101 |
- "env://provider/0" - Legacy single credential (no index in env var names)
|
| 102 |
- "env://provider/1" - First numbered credential (PROVIDER_1_ACCESS_TOKEN)
|
| 103 |
- "env://provider/2" - Second numbered credential, etc.
|
| 104 |
+
|
| 105 |
Returns:
|
| 106 |
The credential index as string ("0" for legacy, "1", "2", etc. for numbered)
|
| 107 |
or None if path is not an env:// path
|
| 108 |
"""
|
| 109 |
if not path.startswith("env://"):
|
| 110 |
return None
|
| 111 |
+
|
| 112 |
# Parse: env://provider/index
|
| 113 |
parts = path[6:].split("/") # Remove "env://" prefix
|
| 114 |
if len(parts) >= 2:
|
| 115 |
return parts[1] # Return the index
|
| 116 |
return "0" # Default to legacy format
|
| 117 |
|
| 118 |
+
def _load_from_env(
|
| 119 |
+
self, credential_index: Optional[str] = None
|
| 120 |
+
) -> Optional[Dict[str, Any]]:
|
| 121 |
"""
|
| 122 |
Load OAuth credentials from environment variables for stateless deployments.
|
| 123 |
|
|
|
|
| 151 |
# Legacy format: PROVIDER_ACCESS_TOKEN
|
| 152 |
prefix = self.ENV_PREFIX
|
| 153 |
default_email = "env-user"
|
| 154 |
+
|
| 155 |
access_token = os.getenv(f"{prefix}_ACCESS_TOKEN")
|
| 156 |
refresh_token = os.getenv(f"{prefix}_REFRESH_TOKEN")
|
| 157 |
|
|
|
|
| 166 |
try:
|
| 167 |
expiry_date = float(expiry_str)
|
| 168 |
except ValueError:
|
| 169 |
+
lib_logger.warning(
|
| 170 |
+
f"Invalid {prefix}_EXPIRY_DATE value: {expiry_str}, using 0"
|
| 171 |
+
)
|
| 172 |
expiry_date = 0
|
| 173 |
|
| 174 |
creds = {
|
|
|
|
| 183 |
"email": os.getenv(f"{prefix}_EMAIL", default_email),
|
| 184 |
"last_check_timestamp": time.time(),
|
| 185 |
"loaded_from_env": True, # Flag to indicate env-based credentials
|
| 186 |
+
"env_credential_index": credential_index
|
| 187 |
+
or "0", # Track which env credential this is
|
| 188 |
+
},
|
| 189 |
}
|
| 190 |
|
| 191 |
# Add project_id if provided
|
| 192 |
project_id = os.getenv(f"{prefix}_PROJECT_ID")
|
| 193 |
if project_id:
|
| 194 |
creds["_proxy_metadata"]["project_id"] = project_id
|
| 195 |
+
|
| 196 |
# Add tier if provided
|
| 197 |
tier = os.getenv(f"{prefix}_TIER")
|
| 198 |
if tier:
|
|
|
|
| 214 |
# Load from environment variables with specific index
|
| 215 |
env_creds = self._load_from_env(credential_index)
|
| 216 |
if env_creds:
|
| 217 |
+
lib_logger.info(
|
| 218 |
+
f"Using {self.ENV_PREFIX} credentials from environment variables (index: {credential_index})"
|
| 219 |
+
)
|
| 220 |
self._credentials_cache[path] = env_creds
|
| 221 |
return env_creds
|
| 222 |
else:
|
| 223 |
+
raise IOError(
|
| 224 |
+
f"Environment variables for {self.ENV_PREFIX} credential index {credential_index} not found"
|
| 225 |
+
)
|
| 226 |
|
| 227 |
# For file paths, first try loading from legacy env vars (for backwards compatibility)
|
| 228 |
env_creds = self._load_from_env()
|
| 229 |
if env_creds:
|
| 230 |
+
lib_logger.info(
|
| 231 |
+
f"Using {self.ENV_PREFIX} credentials from environment variables"
|
| 232 |
+
)
|
| 233 |
# Cache env-based credentials using the path as key
|
| 234 |
self._credentials_cache[path] = env_creds
|
| 235 |
return env_creds
|
| 236 |
|
| 237 |
# Fall back to file-based loading
|
| 238 |
try:
|
| 239 |
+
lib_logger.debug(
|
| 240 |
+
f"Loading {self.ENV_PREFIX} credentials from file: {path}"
|
| 241 |
+
)
|
| 242 |
+
with open(path, "r") as f:
|
| 243 |
creds = json.load(f)
|
| 244 |
# Handle gcloud-style creds file which nest tokens under "credential"
|
| 245 |
if "credential" in creds:
|
|
|
|
| 247 |
self._credentials_cache[path] = creds
|
| 248 |
return creds
|
| 249 |
except FileNotFoundError:
|
| 250 |
+
raise IOError(
|
| 251 |
+
f"{self.ENV_PREFIX} OAuth credential file not found at '{path}'"
|
| 252 |
+
)
|
| 253 |
except Exception as e:
|
| 254 |
+
raise IOError(
|
| 255 |
+
f"Failed to load {self.ENV_PREFIX} OAuth credentials from '{path}': {e}"
|
| 256 |
+
)
|
| 257 |
except Exception as e:
|
| 258 |
+
raise IOError(
|
| 259 |
+
f"Failed to load {self.ENV_PREFIX} OAuth credentials from '{path}': {e}"
|
| 260 |
+
)
|
| 261 |
|
| 262 |
async def _save_credentials(self, path: str, creds: Dict[str, Any]):
|
| 263 |
# Don't save to file if credentials were loaded from environment
|
|
|
|
| 276 |
tmp_path = None
|
| 277 |
try:
|
| 278 |
# Create temp file in same directory as target (ensures same filesystem)
|
| 279 |
+
tmp_fd, tmp_path = tempfile.mkstemp(
|
| 280 |
+
dir=parent_dir, prefix=".tmp_", suffix=".json", text=True
|
| 281 |
+
)
|
| 282 |
|
| 283 |
# Write JSON to temp file
|
| 284 |
+
with os.fdopen(tmp_fd, "w") as f:
|
| 285 |
json.dump(creds, f, indent=2)
|
| 286 |
tmp_fd = None # fdopen closes the fd
|
| 287 |
|
|
|
|
| 298 |
|
| 299 |
# Update cache AFTER successful file write (prevents cache/file inconsistency)
|
| 300 |
self._credentials_cache[path] = creds
|
| 301 |
+
lib_logger.debug(
|
| 302 |
+
f"Saved updated {self.ENV_PREFIX} OAuth credentials to '{path}' (atomic write)."
|
| 303 |
+
)
|
| 304 |
|
| 305 |
except Exception as e:
|
| 306 |
+
lib_logger.error(
|
| 307 |
+
f"Failed to save updated {self.ENV_PREFIX} OAuth credentials to '{path}': {e}"
|
| 308 |
+
)
|
| 309 |
# Clean up temp file if it still exists
|
| 310 |
if tmp_fd is not None:
|
| 311 |
try:
|
|
|
|
| 320 |
raise
|
| 321 |
|
| 322 |
def _is_token_expired(self, creds: Dict[str, Any]) -> bool:
|
| 323 |
+
expiry = creds.get("token_expiry") # gcloud format
|
| 324 |
+
if not expiry: # gemini-cli format
|
| 325 |
+
expiry_timestamp = creds.get("expiry_date", 0) / 1000
|
| 326 |
else:
|
| 327 |
expiry_timestamp = time.mktime(time.strptime(expiry, "%Y-%m-%dT%H:%M:%SZ"))
|
| 328 |
return expiry_timestamp < time.time() + self.REFRESH_EXPIRY_BUFFER_SECONDS
|
| 329 |
|
| 330 |
+
async def _refresh_token(
|
| 331 |
+
self, path: str, creds: Dict[str, Any], force: bool = False
|
| 332 |
+
) -> Dict[str, Any]:
|
| 333 |
async with await self._get_lock(path):
|
| 334 |
# Skip the expiry check if a refresh is being forced
|
| 335 |
+
if not force and not self._is_token_expired(
|
| 336 |
+
self._credentials_cache.get(path, creds)
|
| 337 |
+
):
|
| 338 |
return self._credentials_cache.get(path, creds)
|
| 339 |
|
| 340 |
+
lib_logger.debug(
|
| 341 |
+
f"Refreshing {self.ENV_PREFIX} OAuth token for '{Path(path).name}' (forced: {force})..."
|
| 342 |
+
)
|
| 343 |
refresh_token = creds.get("refresh_token")
|
| 344 |
if not refresh_token:
|
| 345 |
raise ValueError("No refresh_token found in credentials file.")
|
|
|
|
| 353 |
async with httpx.AsyncClient() as client:
|
| 354 |
for attempt in range(max_retries):
|
| 355 |
try:
|
| 356 |
+
response = await client.post(
|
| 357 |
+
self.TOKEN_URI,
|
| 358 |
+
data={
|
| 359 |
+
"client_id": creds.get("client_id", self.CLIENT_ID),
|
| 360 |
+
"client_secret": creds.get(
|
| 361 |
+
"client_secret", self.CLIENT_SECRET
|
| 362 |
+
),
|
| 363 |
+
"refresh_token": refresh_token,
|
| 364 |
+
"grant_type": "refresh_token",
|
| 365 |
+
},
|
| 366 |
+
timeout=30.0,
|
| 367 |
+
)
|
| 368 |
response.raise_for_status()
|
| 369 |
new_token_data = response.json()
|
| 370 |
break # Success, exit retry loop
|
|
|
|
| 385 |
elif status_code == 429:
|
| 386 |
# Rate limit - honor Retry-After header if present
|
| 387 |
retry_after = int(e.response.headers.get("Retry-After", 60))
|
| 388 |
+
lib_logger.warning(
|
| 389 |
+
f"Rate limited (HTTP 429), retry after {retry_after}s"
|
| 390 |
+
)
|
| 391 |
if attempt < max_retries - 1:
|
| 392 |
await asyncio.sleep(retry_after)
|
| 393 |
continue
|
|
|
|
| 396 |
elif status_code >= 500 and status_code < 600:
|
| 397 |
# Server error - retry with exponential backoff
|
| 398 |
if attempt < max_retries - 1:
|
| 399 |
+
wait_time = 2**attempt # 1s, 2s, 4s
|
| 400 |
+
lib_logger.warning(
|
| 401 |
+
f"Server error (HTTP {status_code}), retry {attempt + 1}/{max_retries} in {wait_time}s"
|
| 402 |
+
)
|
| 403 |
await asyncio.sleep(wait_time)
|
| 404 |
continue
|
| 405 |
raise # Final attempt failed
|
|
|
|
| 412 |
# Network errors - retry with backoff
|
| 413 |
last_error = e
|
| 414 |
if attempt < max_retries - 1:
|
| 415 |
+
wait_time = 2**attempt
|
| 416 |
+
lib_logger.warning(
|
| 417 |
+
f"Network error during refresh: {e}, retry {attempt + 1}/{max_retries} in {wait_time}s"
|
| 418 |
+
)
|
| 419 |
await asyncio.sleep(wait_time)
|
| 420 |
continue
|
| 421 |
raise
|
| 422 |
|
| 423 |
# [INVALID GRANT RE-AUTH] Trigger OAuth flow if refresh token is invalid
|
| 424 |
if needs_reauth:
|
| 425 |
+
lib_logger.info(
|
| 426 |
+
f"Starting re-authentication for '{Path(path).name}'..."
|
| 427 |
+
)
|
| 428 |
try:
|
| 429 |
# Call initialize_token to trigger OAuth flow
|
| 430 |
new_creds = await self.initialize_token(path)
|
| 431 |
return new_creds
|
| 432 |
except Exception as reauth_error:
|
| 433 |
+
lib_logger.error(
|
| 434 |
+
f"Re-authentication failed for '{Path(path).name}': {reauth_error}"
|
| 435 |
+
)
|
| 436 |
+
raise ValueError(
|
| 437 |
+
f"Refresh token invalid and re-authentication failed: {reauth_error}"
|
| 438 |
+
)
|
| 439 |
|
| 440 |
# If we exhausted retries without success
|
| 441 |
if new_token_data is None:
|
|
|
|
| 444 |
# [FIX 1] Update OAuth token fields from response
|
| 445 |
creds["access_token"] = new_token_data["access_token"]
|
| 446 |
expiry_timestamp = time.time() + new_token_data["expires_in"]
|
| 447 |
+
creds["expiry_date"] = expiry_timestamp * 1000 # gemini-cli format
|
| 448 |
|
| 449 |
# [FIX 2] Update refresh_token if server provided a new one (rare but possible with Google OAuth)
|
| 450 |
if "refresh_token" in new_token_data:
|
|
|
|
| 470 |
creds["_proxy_metadata"]["last_check_timestamp"] = time.time()
|
| 471 |
|
| 472 |
# [VALIDATION] Verify refreshed credentials have all required fields
|
| 473 |
+
required_fields = [
|
| 474 |
+
"access_token",
|
| 475 |
+
"refresh_token",
|
| 476 |
+
"client_id",
|
| 477 |
+
"client_secret",
|
| 478 |
+
"token_uri",
|
| 479 |
+
]
|
| 480 |
+
missing_fields = [
|
| 481 |
+
field for field in required_fields if not creds.get(field)
|
| 482 |
+
]
|
| 483 |
if missing_fields:
|
| 484 |
+
raise ValueError(
|
| 485 |
+
f"Refreshed credentials missing required fields: {missing_fields}"
|
| 486 |
+
)
|
| 487 |
|
| 488 |
# [VALIDATION] Optional: Test that the refreshed token is actually usable
|
| 489 |
try:
|
|
|
|
| 491 |
test_response = await client.get(
|
| 492 |
self.USER_INFO_URI,
|
| 493 |
headers={"Authorization": f"Bearer {creds['access_token']}"},
|
| 494 |
+
timeout=5.0,
|
| 495 |
)
|
| 496 |
test_response.raise_for_status()
|
| 497 |
+
lib_logger.debug(
|
| 498 |
+
f"Token validation successful for '{Path(path).name}'"
|
| 499 |
+
)
|
| 500 |
except Exception as e:
|
| 501 |
+
lib_logger.warning(
|
| 502 |
+
f"Refreshed token validation failed for '{Path(path).name}': {e}"
|
| 503 |
+
)
|
| 504 |
# Don't fail the refresh - the token might still work for other endpoints
|
| 505 |
# But log it for debugging purposes
|
| 506 |
|
| 507 |
await self._save_credentials(path, creds)
|
| 508 |
+
lib_logger.debug(
|
| 509 |
+
f"Successfully refreshed {self.ENV_PREFIX} OAuth token for '{Path(path).name}'."
|
| 510 |
+
)
|
| 511 |
return creds
|
| 512 |
|
| 513 |
async def proactively_refresh(self, credential_path: str):
|
|
|
|
| 532 |
async def _ensure_queue_processor_running(self):
|
| 533 |
"""Lazily starts the queue processor if not already running."""
|
| 534 |
if self._queue_processor_task is None or self._queue_processor_task.done():
|
| 535 |
+
self._queue_processor_task = asyncio.create_task(
|
| 536 |
+
self._process_refresh_queue()
|
| 537 |
+
)
|
| 538 |
|
| 539 |
+
async def _queue_refresh(
|
| 540 |
+
self, path: str, force: bool = False, needs_reauth: bool = False
|
| 541 |
+
):
|
| 542 |
"""Add a credential to the refresh queue if not already queued.
|
| 543 |
+
|
| 544 |
Args:
|
| 545 |
path: Credential file path
|
| 546 |
force: Force refresh even if not expired
|
|
|
|
| 555 |
if now < backoff_until:
|
| 556 |
# Credential is in backoff for automated refresh, do not queue
|
| 557 |
remaining = int(backoff_until - now)
|
| 558 |
+
lib_logger.debug(
|
| 559 |
+
f"Skipping automated refresh for '{Path(path).name}' (in backoff for {remaining}s)"
|
| 560 |
+
)
|
| 561 |
return
|
| 562 |
+
|
| 563 |
async with self._queue_tracking_lock:
|
| 564 |
if path not in self._queued_credentials:
|
| 565 |
self._queued_credentials.add(path)
|
|
|
|
| 575 |
# Wait for an item with timeout to allow graceful shutdown
|
| 576 |
try:
|
| 577 |
path, force, needs_reauth = await asyncio.wait_for(
|
| 578 |
+
self._refresh_queue.get(), timeout=60.0
|
|
|
|
| 579 |
)
|
| 580 |
except asyncio.TimeoutError:
|
| 581 |
# No items for 60s, exit to save resources
|
| 582 |
self._queue_processor_task = None
|
| 583 |
return
|
| 584 |
+
|
| 585 |
try:
|
| 586 |
# Perform the actual refresh (still using per-credential lock)
|
| 587 |
async with await self._get_lock(path):
|
|
|
|
| 592 |
async with self._queue_tracking_lock:
|
| 593 |
self._unavailable_credentials.discard(path)
|
| 594 |
continue
|
| 595 |
+
|
| 596 |
# Perform refresh
|
| 597 |
if not creds:
|
| 598 |
creds = await self._load_credentials(path)
|
| 599 |
await self._refresh_token(path, creds, force=force)
|
| 600 |
+
|
| 601 |
# SUCCESS: Mark as available again
|
| 602 |
async with self._queue_tracking_lock:
|
| 603 |
self._unavailable_credentials.discard(path)
|
| 604 |
+
|
| 605 |
finally:
|
| 606 |
# Remove from queued set
|
| 607 |
async with self._queue_tracking_lock:
|
|
|
|
| 616 |
async with self._queue_tracking_lock:
|
| 617 |
self._unavailable_credentials.discard(path)
|
| 618 |
|
| 619 |
+
async def initialize_token(
|
| 620 |
+
self, creds_or_path: Union[Dict[str, Any], str]
|
| 621 |
+
) -> Dict[str, Any]:
|
| 622 |
path = creds_or_path if isinstance(creds_or_path, str) else None
|
| 623 |
|
| 624 |
# Get display name from metadata if available, otherwise derive from path
|
| 625 |
if isinstance(creds_or_path, dict):
|
| 626 |
+
display_name = creds_or_path.get("_proxy_metadata", {}).get(
|
| 627 |
+
"display_name", "in-memory object"
|
| 628 |
+
)
|
| 629 |
else:
|
| 630 |
display_name = Path(path).name if path else "in-memory object"
|
| 631 |
|
| 632 |
+
lib_logger.debug(
|
| 633 |
+
f"Initializing {self.ENV_PREFIX} token for '{display_name}'..."
|
| 634 |
+
)
|
| 635 |
try:
|
| 636 |
+
creds = (
|
| 637 |
+
await self._load_credentials(creds_or_path) if path else creds_or_path
|
| 638 |
+
)
|
| 639 |
reason = ""
|
| 640 |
if not creds.get("refresh_token"):
|
| 641 |
reason = "refresh token is missing"
|
|
|
|
| 647 |
try:
|
| 648 |
return await self._refresh_token(path, creds)
|
| 649 |
except Exception as e:
|
| 650 |
+
lib_logger.warning(
|
| 651 |
+
f"Automatic token refresh for '{display_name}' failed: {e}. Proceeding to interactive login."
|
| 652 |
+
)
|
| 653 |
+
|
| 654 |
+
lib_logger.warning(
|
| 655 |
+
f"{self.ENV_PREFIX} OAuth token for '{display_name}' needs setup: {reason}."
|
| 656 |
+
)
|
| 657 |
|
|
|
|
|
|
|
| 658 |
# [HEADLESS DETECTION] Check if running in headless environment
|
| 659 |
is_headless = is_headless_environment()
|
| 660 |
+
|
| 661 |
auth_code_future = asyncio.get_event_loop().create_future()
|
| 662 |
server = None
|
| 663 |
|
| 664 |
async def handle_callback(reader, writer):
|
| 665 |
try:
|
| 666 |
request_line_bytes = await reader.readline()
|
| 667 |
+
if not request_line_bytes:
|
| 668 |
+
return
|
| 669 |
+
path_str = (
|
| 670 |
+
request_line_bytes.decode("utf-8").strip().split(" ")[1]
|
| 671 |
+
)
|
| 672 |
+
while await reader.readline() != b"\r\n":
|
| 673 |
+
pass
|
| 674 |
from urllib.parse import urlparse, parse_qs
|
| 675 |
+
|
| 676 |
query_params = parse_qs(urlparse(path_str).query)
|
| 677 |
+
writer.write(
|
| 678 |
+
b"HTTP/1.1 200 OK\r\nContent-Type: text/html\r\n\r\n"
|
| 679 |
+
)
|
| 680 |
+
if "code" in query_params:
|
| 681 |
if not auth_code_future.done():
|
| 682 |
+
auth_code_future.set_result(query_params["code"][0])
|
| 683 |
+
writer.write(
|
| 684 |
+
b"<html><body><h1>Authentication successful!</h1><p>You can close this window.</p></body></html>"
|
| 685 |
+
)
|
| 686 |
else:
|
| 687 |
+
error = query_params.get("error", ["Unknown error"])[0]
|
| 688 |
if not auth_code_future.done():
|
| 689 |
+
auth_code_future.set_exception(
|
| 690 |
+
Exception(f"OAuth failed: {error}")
|
| 691 |
+
)
|
| 692 |
+
writer.write(
|
| 693 |
+
f"<html><body><h1>Authentication Failed</h1><p>Error: {error}. Please try again.</p></body></html>".encode()
|
| 694 |
+
)
|
| 695 |
await writer.drain()
|
| 696 |
except Exception as e:
|
| 697 |
lib_logger.error(f"Error in OAuth callback handler: {e}")
|
|
|
|
| 699 |
writer.close()
|
| 700 |
|
| 701 |
try:
|
| 702 |
+
server = await asyncio.start_server(
|
| 703 |
+
handle_callback, "127.0.0.1", self.CALLBACK_PORT
|
| 704 |
+
)
|
| 705 |
from urllib.parse import urlencode
|
| 706 |
+
|
| 707 |
+
auth_url = (
|
| 708 |
+
"https://accounts.google.com/o/oauth2/v2/auth?"
|
| 709 |
+
+ urlencode(
|
| 710 |
+
{
|
| 711 |
+
"client_id": self.CLIENT_ID,
|
| 712 |
+
"redirect_uri": f"http://localhost:{self.CALLBACK_PORT}{self.CALLBACK_PATH}",
|
| 713 |
+
"scope": " ".join(self.OAUTH_SCOPES),
|
| 714 |
+
"access_type": "offline",
|
| 715 |
+
"response_type": "code",
|
| 716 |
+
"prompt": "consent",
|
| 717 |
+
}
|
| 718 |
+
)
|
| 719 |
+
)
|
| 720 |
+
|
| 721 |
# [HEADLESS SUPPORT] Display appropriate instructions
|
| 722 |
if is_headless:
|
| 723 |
auth_panel_text = Text.from_markup(
|
|
|
|
| 727 |
else:
|
| 728 |
auth_panel_text = Text.from_markup(
|
| 729 |
"1. Your browser will now open to log in and authorize the application.\n"
|
| 730 |
+
"2. If it doesn't open automatically, please open the URL below manually."
|
| 731 |
+
)
|
| 732 |
+
|
| 733 |
+
console.print(
|
| 734 |
+
Panel(
|
| 735 |
+
auth_panel_text,
|
| 736 |
+
title=f"{self.ENV_PREFIX} OAuth Setup for [bold yellow]{display_name}[/bold yellow]",
|
| 737 |
+
style="bold blue",
|
| 738 |
)
|
| 739 |
+
)
|
| 740 |
+
# [URL DISPLAY] Print URL with proper escaping to prevent Rich markup issues.
|
| 741 |
+
# IMPORTANT: OAuth URLs contain special characters (=, &, etc.) that Rich might
|
| 742 |
+
# interpret as markup in some terminal configurations. We escape the URL to
|
| 743 |
+
# ensure it displays correctly.
|
| 744 |
+
#
|
| 745 |
+
# KNOWN ISSUE: If Rich rendering fails entirely (e.g., terminal doesn't support
|
| 746 |
+
# ANSI codes, or output is piped), the escaped URL should still be valid.
|
| 747 |
+
# However, if the terminal strips or mangles the output, users should copy
|
| 748 |
+
# the URL directly from logs or use --verbose to see the raw URL.
|
| 749 |
+
#
|
| 750 |
+
# The [link=...] markup creates a clickable hyperlink in supported terminals
|
| 751 |
+
# (iTerm2, Windows Terminal, etc.), but the displayed text is the escaped URL
|
| 752 |
+
# which can be safely copied even if the hyperlink doesn't work.
|
| 753 |
+
escaped_url = rich_escape(auth_url)
|
| 754 |
+
console.print(
|
| 755 |
+
f"[bold]URL:[/bold] [link={auth_url}]{escaped_url}[/link]\n"
|
| 756 |
+
)
|
| 757 |
+
|
| 758 |
# [HEADLESS SUPPORT] Only attempt browser open if NOT headless
|
| 759 |
if not is_headless:
|
| 760 |
try:
|
| 761 |
webbrowser.open(auth_url)
|
| 762 |
+
lib_logger.info(
|
| 763 |
+
"Browser opened successfully for OAuth flow"
|
| 764 |
+
)
|
| 765 |
except Exception as e:
|
| 766 |
+
lib_logger.warning(
|
| 767 |
+
f"Failed to open browser automatically: {e}. Please open the URL manually."
|
| 768 |
+
)
|
| 769 |
+
|
| 770 |
+
with console.status(
|
| 771 |
+
f"[bold green]Waiting for you to complete authentication in the browser...[/bold green]",
|
| 772 |
+
spinner="dots",
|
| 773 |
+
):
|
| 774 |
+
auth_code = await asyncio.wait_for(
|
| 775 |
+
auth_code_future, timeout=300
|
| 776 |
+
)
|
| 777 |
except asyncio.TimeoutError:
|
| 778 |
raise Exception("OAuth flow timed out. Please try again.")
|
| 779 |
finally:
|
| 780 |
if server:
|
| 781 |
server.close()
|
| 782 |
await server.wait_closed()
|
| 783 |
+
|
| 784 |
+
lib_logger.info(
|
| 785 |
+
f"Attempting to exchange authorization code for tokens..."
|
| 786 |
+
)
|
| 787 |
async with httpx.AsyncClient() as client:
|
| 788 |
+
response = await client.post(
|
| 789 |
+
self.TOKEN_URI,
|
| 790 |
+
data={
|
| 791 |
+
"code": auth_code.strip(),
|
| 792 |
+
"client_id": self.CLIENT_ID,
|
| 793 |
+
"client_secret": self.CLIENT_SECRET,
|
| 794 |
+
"redirect_uri": f"http://localhost:{self.CALLBACK_PORT}{self.CALLBACK_PATH}",
|
| 795 |
+
"grant_type": "authorization_code",
|
| 796 |
+
},
|
| 797 |
+
)
|
| 798 |
response.raise_for_status()
|
| 799 |
token_data = response.json()
|
| 800 |
# Start with the full token data from the exchange
|
| 801 |
creds = token_data.copy()
|
| 802 |
+
|
| 803 |
# Convert 'expires_in' to 'expiry_date' in milliseconds
|
| 804 |
+
creds["expiry_date"] = (
|
| 805 |
+
time.time() + creds.pop("expires_in")
|
| 806 |
+
) * 1000
|
| 807 |
+
|
| 808 |
# Ensure client_id and client_secret are present
|
| 809 |
creds["client_id"] = self.CLIENT_ID
|
| 810 |
creds["client_secret"] = self.CLIENT_SECRET
|
| 811 |
|
| 812 |
creds["token_uri"] = self.TOKEN_URI
|
| 813 |
creds["universe_domain"] = "googleapis.com"
|
| 814 |
+
|
| 815 |
# Fetch user info and add metadata
|
| 816 |
+
user_info_response = await client.get(
|
| 817 |
+
self.USER_INFO_URI,
|
| 818 |
+
headers={"Authorization": f"Bearer {creds['access_token']}"},
|
| 819 |
+
)
|
| 820 |
user_info_response.raise_for_status()
|
| 821 |
user_info = user_info_response.json()
|
| 822 |
creds["_proxy_metadata"] = {
|
| 823 |
"email": user_info.get("email"),
|
| 824 |
+
"last_check_timestamp": time.time(),
|
| 825 |
}
|
| 826 |
|
| 827 |
if path:
|
| 828 |
await self._save_credentials(path, creds)
|
| 829 |
+
lib_logger.info(
|
| 830 |
+
f"{self.ENV_PREFIX} OAuth initialized successfully for '{display_name}'."
|
| 831 |
+
)
|
| 832 |
return creds
|
| 833 |
|
| 834 |
+
lib_logger.info(
|
| 835 |
+
f"{self.ENV_PREFIX} OAuth token at '{display_name}' is valid."
|
| 836 |
+
)
|
| 837 |
return creds
|
| 838 |
except Exception as e:
|
| 839 |
+
raise ValueError(
|
| 840 |
+
f"Failed to initialize {self.ENV_PREFIX} OAuth for '{path}': {e}"
|
| 841 |
+
)
|
| 842 |
|
| 843 |
async def get_auth_header(self, credential_path: str) -> Dict[str, str]:
|
| 844 |
creds = await self._load_credentials(credential_path)
|
|
|
|
| 846 |
creds = await self._refresh_token(credential_path, creds)
|
| 847 |
return {"Authorization": f"Bearer {creds['access_token']}"}
|
| 848 |
|
| 849 |
+
async def get_user_info(
|
| 850 |
+
self, creds_or_path: Union[Dict[str, Any], str]
|
| 851 |
+
) -> Dict[str, Any]:
|
| 852 |
path = creds_or_path if isinstance(creds_or_path, str) else None
|
| 853 |
creds = await self._load_credentials(creds_or_path) if path else creds_or_path
|
| 854 |
|
| 855 |
if path and self._is_token_expired(creds):
|
| 856 |
creds = await self._refresh_token(path, creds)
|
| 857 |
+
|
| 858 |
# Prefer locally stored metadata
|
| 859 |
if creds.get("_proxy_metadata", {}).get("email"):
|
| 860 |
if path:
|
|
|
|
| 868 |
response = await client.get(self.USER_INFO_URI, headers=headers)
|
| 869 |
response.raise_for_status()
|
| 870 |
user_info = response.json()
|
| 871 |
+
|
| 872 |
# Save the retrieved info for future use
|
| 873 |
creds["_proxy_metadata"] = {
|
| 874 |
"email": user_info.get("email"),
|
| 875 |
+
"last_check_timestamp": time.time(),
|
| 876 |
}
|
| 877 |
if path:
|
| 878 |
await self._save_credentials(path, creds)
|
src/rotator_library/providers/iflow_auth_base.py
CHANGED
|
@@ -21,9 +21,10 @@ from rich.console import Console
|
|
| 21 |
from rich.panel import Panel
|
| 22 |
from rich.prompt import Prompt
|
| 23 |
from rich.text import Text
|
|
|
|
| 24 |
from ..utils.headless_detection import is_headless_environment
|
| 25 |
|
| 26 |
-
lib_logger = logging.getLogger(
|
| 27 |
|
| 28 |
IFLOW_OAUTH_AUTHORIZE_ENDPOINT = "https://iflow.cn/oauth"
|
| 29 |
IFLOW_OAUTH_TOKEN_ENDPOINT = "https://iflow.cn/oauth/token"
|
|
@@ -61,7 +62,7 @@ class OAuthCallbackServer:
|
|
| 61 |
"""Checks if the callback port is available."""
|
| 62 |
try:
|
| 63 |
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
| 64 |
-
sock.bind((
|
| 65 |
sock.close()
|
| 66 |
return True
|
| 67 |
except OSError:
|
|
@@ -76,12 +77,12 @@ class OAuthCallbackServer:
|
|
| 76 |
self.result_future = asyncio.Future()
|
| 77 |
|
| 78 |
# Setup route
|
| 79 |
-
self.app.router.add_get(
|
| 80 |
|
| 81 |
# Start server
|
| 82 |
self.runner = web.AppRunner(self.app)
|
| 83 |
await self.runner.setup()
|
| 84 |
-
self.site = web.TCPSite(self.runner,
|
| 85 |
await self.site.start()
|
| 86 |
|
| 87 |
lib_logger.debug(f"iFlow OAuth callback server started on port {self.port}")
|
|
@@ -99,34 +100,46 @@ class OAuthCallbackServer:
|
|
| 99 |
query = request.query
|
| 100 |
|
| 101 |
# Check for error parameter
|
| 102 |
-
if
|
| 103 |
-
error = query.get(
|
| 104 |
lib_logger.error(f"iFlow OAuth callback received error: {error}")
|
| 105 |
if not self.result_future.done():
|
| 106 |
self.result_future.set_exception(ValueError(f"OAuth error: {error}"))
|
| 107 |
-
return web.Response(
|
|
|
|
|
|
|
| 108 |
|
| 109 |
# Check for authorization code
|
| 110 |
-
code = query.get(
|
| 111 |
if not code:
|
| 112 |
lib_logger.error("iFlow OAuth callback missing authorization code")
|
| 113 |
if not self.result_future.done():
|
| 114 |
-
self.result_future.set_exception(
|
| 115 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 116 |
|
| 117 |
# Validate state parameter
|
| 118 |
-
state = query.get(
|
| 119 |
if state != self.expected_state:
|
| 120 |
-
lib_logger.error(
|
|
|
|
|
|
|
| 121 |
if not self.result_future.done():
|
| 122 |
self.result_future.set_exception(ValueError("State parameter mismatch"))
|
| 123 |
-
return web.Response(
|
|
|
|
|
|
|
| 124 |
|
| 125 |
# Success - set result and redirect to success page
|
| 126 |
if not self.result_future.done():
|
| 127 |
self.result_future.set_result(code)
|
| 128 |
|
| 129 |
-
return web.Response(
|
|
|
|
|
|
|
| 130 |
|
| 131 |
async def wait_for_callback(self, timeout: float = 300.0) -> str:
|
| 132 |
"""Waits for the OAuth callback and returns the authorization code."""
|
|
@@ -146,38 +159,50 @@ class IFlowAuthBase:
|
|
| 146 |
def __init__(self):
|
| 147 |
self._credentials_cache: Dict[str, Dict[str, Any]] = {}
|
| 148 |
self._refresh_locks: Dict[str, asyncio.Lock] = {}
|
| 149 |
-
self._locks_lock =
|
|
|
|
|
|
|
| 150 |
# [BACKOFF TRACKING] Track consecutive failures per credential
|
| 151 |
-
self._refresh_failures: Dict[
|
| 152 |
-
|
| 153 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 154 |
# [QUEUE SYSTEM] Sequential refresh processing
|
| 155 |
self._refresh_queue: asyncio.Queue = asyncio.Queue()
|
| 156 |
self._queued_credentials: set = set() # Track credentials already in queue
|
| 157 |
-
self._unavailable_credentials: set =
|
|
|
|
|
|
|
| 158 |
self._queue_tracking_lock = asyncio.Lock() # Protects queue sets
|
| 159 |
-
self._queue_processor_task: Optional[asyncio.Task] =
|
|
|
|
|
|
|
| 160 |
|
| 161 |
def _parse_env_credential_path(self, path: str) -> Optional[str]:
|
| 162 |
"""
|
| 163 |
Parse a virtual env:// path and return the credential index.
|
| 164 |
-
|
| 165 |
Supported formats:
|
| 166 |
- "env://provider/0" - Legacy single credential (no index in env var names)
|
| 167 |
- "env://provider/1" - First numbered credential (IFLOW_1_ACCESS_TOKEN)
|
| 168 |
-
|
| 169 |
Returns:
|
| 170 |
The credential index as string, or None if path is not an env:// path
|
| 171 |
"""
|
| 172 |
if not path.startswith("env://"):
|
| 173 |
return None
|
| 174 |
-
|
| 175 |
parts = path[6:].split("/")
|
| 176 |
if len(parts) >= 2:
|
| 177 |
return parts[1]
|
| 178 |
return "0"
|
| 179 |
|
| 180 |
-
def _load_from_env(
|
|
|
|
|
|
|
| 181 |
"""
|
| 182 |
Load OAuth credentials from environment variables for stateless deployments.
|
| 183 |
|
|
@@ -204,7 +229,7 @@ class IFlowAuthBase:
|
|
| 204 |
else:
|
| 205 |
prefix = "IFLOW"
|
| 206 |
default_email = "env-user"
|
| 207 |
-
|
| 208 |
access_token = os.getenv(f"{prefix}_ACCESS_TOKEN")
|
| 209 |
refresh_token = os.getenv(f"{prefix}_REFRESH_TOKEN")
|
| 210 |
api_key = os.getenv(f"{prefix}_API_KEY")
|
|
@@ -213,7 +238,9 @@ class IFlowAuthBase:
|
|
| 213 |
if not (access_token and refresh_token and api_key):
|
| 214 |
return None
|
| 215 |
|
| 216 |
-
lib_logger.debug(
|
|
|
|
|
|
|
| 217 |
|
| 218 |
# Parse expiry_date as string (ISO 8601 format)
|
| 219 |
expiry_str = os.getenv(f"{prefix}_EXPIRY_DATE", "")
|
|
@@ -230,8 +257,8 @@ class IFlowAuthBase:
|
|
| 230 |
"email": os.getenv(f"{prefix}_EMAIL", default_email),
|
| 231 |
"last_check_timestamp": time.time(),
|
| 232 |
"loaded_from_env": True,
|
| 233 |
-
"env_credential_index": credential_index or "0"
|
| 234 |
-
}
|
| 235 |
}
|
| 236 |
|
| 237 |
return creds
|
|
@@ -240,7 +267,7 @@ class IFlowAuthBase:
|
|
| 240 |
"""Reads credentials from file and populates the cache. No locking."""
|
| 241 |
try:
|
| 242 |
lib_logger.debug(f"Reading iFlow credentials from file: {path}")
|
| 243 |
-
with open(path,
|
| 244 |
creds = json.load(f)
|
| 245 |
self._credentials_cache[path] = creds
|
| 246 |
return creds
|
|
@@ -264,11 +291,15 @@ class IFlowAuthBase:
|
|
| 264 |
if credential_index is not None:
|
| 265 |
env_creds = self._load_from_env(credential_index)
|
| 266 |
if env_creds:
|
| 267 |
-
lib_logger.info(
|
|
|
|
|
|
|
| 268 |
self._credentials_cache[path] = env_creds
|
| 269 |
return env_creds
|
| 270 |
else:
|
| 271 |
-
raise IOError(
|
|
|
|
|
|
|
| 272 |
|
| 273 |
# For file paths, try loading from legacy env vars first
|
| 274 |
env_creds = self._load_from_env()
|
|
@@ -298,10 +329,12 @@ class IFlowAuthBase:
|
|
| 298 |
tmp_path = None
|
| 299 |
try:
|
| 300 |
# Create temp file in same directory as target (ensures same filesystem)
|
| 301 |
-
tmp_fd, tmp_path = tempfile.mkstemp(
|
|
|
|
|
|
|
| 302 |
|
| 303 |
# Write JSON to temp file
|
| 304 |
-
with os.fdopen(tmp_fd,
|
| 305 |
json.dump(creds, f, indent=2)
|
| 306 |
tmp_fd = None # fdopen closes the fd
|
| 307 |
|
|
@@ -318,10 +351,14 @@ class IFlowAuthBase:
|
|
| 318 |
|
| 319 |
# Update cache AFTER successful file write
|
| 320 |
self._credentials_cache[path] = creds
|
| 321 |
-
lib_logger.debug(
|
|
|
|
|
|
|
| 322 |
|
| 323 |
except Exception as e:
|
| 324 |
-
lib_logger.error(
|
|
|
|
|
|
|
| 325 |
# Clean up temp file if it still exists
|
| 326 |
if tmp_fd is not None:
|
| 327 |
try:
|
|
@@ -345,7 +382,8 @@ class IFlowAuthBase:
|
|
| 345 |
try:
|
| 346 |
# Parse ISO 8601 format (e.g., "2025-01-17T12:00:00Z")
|
| 347 |
from datetime import datetime
|
| 348 |
-
|
|
|
|
| 349 |
expiry_timestamp = expiry_dt.timestamp()
|
| 350 |
except (ValueError, AttributeError):
|
| 351 |
# Fallback: treat as numeric timestamp
|
|
@@ -389,7 +427,9 @@ class IFlowAuthBase:
|
|
| 389 |
|
| 390 |
return {"api_key": api_key, "email": email}
|
| 391 |
|
| 392 |
-
async def _exchange_code_for_tokens(
|
|
|
|
|
|
|
| 393 |
"""
|
| 394 |
Exchanges authorization code for access and refresh tokens.
|
| 395 |
Uses Basic Auth with client credentials.
|
|
@@ -401,7 +441,7 @@ class IFlowAuthBase:
|
|
| 401 |
headers = {
|
| 402 |
"Content-Type": "application/x-www-form-urlencoded",
|
| 403 |
"Accept": "application/json",
|
| 404 |
-
"Authorization": f"Basic {basic_auth}"
|
| 405 |
}
|
| 406 |
|
| 407 |
data = {
|
|
@@ -409,16 +449,22 @@ class IFlowAuthBase:
|
|
| 409 |
"code": code,
|
| 410 |
"redirect_uri": redirect_uri,
|
| 411 |
"client_id": IFLOW_CLIENT_ID,
|
| 412 |
-
"client_secret": IFLOW_CLIENT_SECRET
|
| 413 |
}
|
| 414 |
|
| 415 |
async with httpx.AsyncClient(timeout=30.0) as client:
|
| 416 |
-
response = await client.post(
|
|
|
|
|
|
|
| 417 |
|
| 418 |
if response.status_code != 200:
|
| 419 |
error_text = response.text
|
| 420 |
-
lib_logger.error(
|
| 421 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 422 |
|
| 423 |
token_data = response.json()
|
| 424 |
|
|
@@ -436,7 +482,10 @@ class IFlowAuthBase:
|
|
| 436 |
|
| 437 |
# Calculate expiry date
|
| 438 |
from datetime import datetime, timedelta
|
| 439 |
-
|
|
|
|
|
|
|
|
|
|
| 440 |
|
| 441 |
return {
|
| 442 |
"access_token": access_token,
|
|
@@ -445,7 +494,7 @@ class IFlowAuthBase:
|
|
| 445 |
"email": user_info["email"],
|
| 446 |
"expiry_date": expiry_date,
|
| 447 |
"token_type": token_type,
|
| 448 |
-
"scope": scope
|
| 449 |
}
|
| 450 |
|
| 451 |
async def _refresh_token(self, path: str, force: bool = False) -> Dict[str, Any]:
|
|
@@ -482,20 +531,22 @@ class IFlowAuthBase:
|
|
| 482 |
headers = {
|
| 483 |
"Content-Type": "application/x-www-form-urlencoded",
|
| 484 |
"Accept": "application/json",
|
| 485 |
-
"Authorization": f"Basic {basic_auth}"
|
| 486 |
}
|
| 487 |
|
| 488 |
data = {
|
| 489 |
"grant_type": "refresh_token",
|
| 490 |
"refresh_token": refresh_token,
|
| 491 |
"client_id": IFLOW_CLIENT_ID,
|
| 492 |
-
"client_secret": IFLOW_CLIENT_SECRET
|
| 493 |
}
|
| 494 |
|
| 495 |
async with httpx.AsyncClient(timeout=30.0) as client:
|
| 496 |
for attempt in range(max_retries):
|
| 497 |
try:
|
| 498 |
-
response = await client.post(
|
|
|
|
|
|
|
| 499 |
response.raise_for_status()
|
| 500 |
new_token_data = response.json()
|
| 501 |
break # Success
|
|
@@ -505,7 +556,9 @@ class IFlowAuthBase:
|
|
| 505 |
status_code = e.response.status_code
|
| 506 |
error_body = e.response.text
|
| 507 |
|
| 508 |
-
lib_logger.error(
|
|
|
|
|
|
|
| 509 |
|
| 510 |
# [STATUS CODE HANDLING]
|
| 511 |
# [INVALID GRANT HANDLING] Handle 401/403 by triggering re-authentication
|
|
@@ -519,7 +572,9 @@ class IFlowAuthBase:
|
|
| 519 |
|
| 520 |
elif status_code == 429:
|
| 521 |
retry_after = int(e.response.headers.get("Retry-After", 60))
|
| 522 |
-
lib_logger.warning(
|
|
|
|
|
|
|
| 523 |
if attempt < max_retries - 1:
|
| 524 |
await asyncio.sleep(retry_after)
|
| 525 |
continue
|
|
@@ -527,8 +582,10 @@ class IFlowAuthBase:
|
|
| 527 |
|
| 528 |
elif 500 <= status_code < 600:
|
| 529 |
if attempt < max_retries - 1:
|
| 530 |
-
wait_time = 2
|
| 531 |
-
lib_logger.warning(
|
|
|
|
|
|
|
| 532 |
await asyncio.sleep(wait_time)
|
| 533 |
continue
|
| 534 |
raise
|
|
@@ -539,15 +596,19 @@ class IFlowAuthBase:
|
|
| 539 |
except (httpx.RequestError, httpx.TimeoutException) as e:
|
| 540 |
last_error = e
|
| 541 |
if attempt < max_retries - 1:
|
| 542 |
-
wait_time = 2
|
| 543 |
-
lib_logger.warning(
|
|
|
|
|
|
|
| 544 |
await asyncio.sleep(wait_time)
|
| 545 |
continue
|
| 546 |
raise
|
| 547 |
|
| 548 |
# [INVALID GRANT RE-AUTH] Trigger OAuth flow if refresh token is invalid
|
| 549 |
if needs_reauth:
|
| 550 |
-
lib_logger.info(
|
|
|
|
|
|
|
| 551 |
try:
|
| 552 |
# Call initialize_token to trigger OAuth flow
|
| 553 |
new_creds = await self.initialize_token(path)
|
|
@@ -556,20 +617,34 @@ class IFlowAuthBase:
|
|
| 556 |
self._next_refresh_after.pop(path, None)
|
| 557 |
return new_creds
|
| 558 |
except Exception as reauth_error:
|
| 559 |
-
lib_logger.error(
|
|
|
|
|
|
|
| 560 |
# [BACKOFF TRACKING] Increment failure count and set backoff timer
|
| 561 |
-
self._refresh_failures[path] =
|
| 562 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 563 |
self._next_refresh_after[path] = time.time() + backoff_seconds
|
| 564 |
-
lib_logger.debug(
|
| 565 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 566 |
|
| 567 |
if new_token_data is None:
|
| 568 |
# [BACKOFF TRACKING] Increment failure count and set backoff timer
|
| 569 |
self._refresh_failures[path] = self._refresh_failures.get(path, 0) + 1
|
| 570 |
-
backoff_seconds = min(
|
|
|
|
|
|
|
| 571 |
self._next_refresh_after[path] = time.time() + backoff_seconds
|
| 572 |
-
lib_logger.debug(
|
|
|
|
|
|
|
| 573 |
raise last_error or Exception("Token refresh failed after all retries")
|
| 574 |
|
| 575 |
# Update tokens
|
|
@@ -578,14 +653,23 @@ class IFlowAuthBase:
|
|
| 578 |
raise ValueError("Missing access_token in refresh response")
|
| 579 |
|
| 580 |
creds_from_file["access_token"] = access_token
|
| 581 |
-
creds_from_file["refresh_token"] = new_token_data.get(
|
|
|
|
|
|
|
| 582 |
|
| 583 |
expires_in = new_token_data.get("expires_in", 3600)
|
| 584 |
from datetime import datetime, timedelta
|
| 585 |
-
creds_from_file["expiry_date"] = (datetime.utcnow() + timedelta(seconds=expires_in)).isoformat() + 'Z'
|
| 586 |
|
| 587 |
-
creds_from_file["
|
| 588 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 589 |
|
| 590 |
# CRITICAL: Re-fetch user info to get potentially updated API key
|
| 591 |
try:
|
|
@@ -595,7 +679,9 @@ class IFlowAuthBase:
|
|
| 595 |
if user_info.get("email"):
|
| 596 |
creds_from_file["email"] = user_info["email"]
|
| 597 |
except Exception as e:
|
| 598 |
-
lib_logger.warning(
|
|
|
|
|
|
|
| 599 |
|
| 600 |
# Ensure _proxy_metadata exists and update timestamp
|
| 601 |
if "_proxy_metadata" not in creds_from_file:
|
|
@@ -604,16 +690,22 @@ class IFlowAuthBase:
|
|
| 604 |
|
| 605 |
# [VALIDATION] Verify required fields exist after refresh
|
| 606 |
required_fields = ["access_token", "refresh_token", "api_key"]
|
| 607 |
-
missing_fields = [
|
|
|
|
|
|
|
| 608 |
if missing_fields:
|
| 609 |
-
raise ValueError(
|
|
|
|
|
|
|
| 610 |
|
| 611 |
# [BACKOFF TRACKING] Clear failure count on successful refresh
|
| 612 |
self._refresh_failures.pop(path, None)
|
| 613 |
self._next_refresh_after.pop(path, None)
|
| 614 |
|
| 615 |
await self._save_credentials(path, creds_from_file)
|
| 616 |
-
lib_logger.debug(
|
|
|
|
|
|
|
| 617 |
return creds_from_file
|
| 618 |
|
| 619 |
async def get_api_details(self, credential_identifier: str) -> Tuple[str, str]:
|
|
@@ -628,7 +720,9 @@ class IFlowAuthBase:
|
|
| 628 |
# Detect credential type
|
| 629 |
if os.path.isfile(credential_identifier):
|
| 630 |
# OAuth credential: file path to JSON
|
| 631 |
-
lib_logger.debug(
|
|
|
|
|
|
|
| 632 |
creds = await self._load_credentials(credential_identifier)
|
| 633 |
|
| 634 |
# Check if token needs refresh
|
|
@@ -653,7 +747,7 @@ class IFlowAuthBase:
|
|
| 653 |
"""
|
| 654 |
# Check if it's an env:// virtual path (OAuth credentials from environment)
|
| 655 |
is_env_path = credential_identifier.startswith("env://")
|
| 656 |
-
|
| 657 |
# Only refresh if it's an OAuth credential (file path or env:// path)
|
| 658 |
if not is_env_path and not os.path.isfile(credential_identifier):
|
| 659 |
return # Direct API key, no refresh needed
|
|
@@ -661,7 +755,9 @@ class IFlowAuthBase:
|
|
| 661 |
creds = await self._load_credentials(credential_identifier)
|
| 662 |
if self._is_token_expired(creds):
|
| 663 |
# Queue for refresh with needs_reauth=False (automated refresh)
|
| 664 |
-
await self._queue_refresh(
|
|
|
|
|
|
|
| 665 |
|
| 666 |
async def _get_lock(self, path: str) -> asyncio.Lock:
|
| 667 |
"""Gets or creates a lock for the given credential path."""
|
|
@@ -678,11 +774,15 @@ class IFlowAuthBase:
|
|
| 678 |
async def _ensure_queue_processor_running(self):
|
| 679 |
"""Lazily starts the queue processor if not already running."""
|
| 680 |
if self._queue_processor_task is None or self._queue_processor_task.done():
|
| 681 |
-
self._queue_processor_task = asyncio.create_task(
|
|
|
|
|
|
|
| 682 |
|
| 683 |
-
async def _queue_refresh(
|
|
|
|
|
|
|
| 684 |
"""Add a credential to the refresh queue if not already queued.
|
| 685 |
-
|
| 686 |
Args:
|
| 687 |
path: Credential file path
|
| 688 |
force: Force refresh even if not expired
|
|
@@ -697,9 +797,11 @@ class IFlowAuthBase:
|
|
| 697 |
if now < backoff_until:
|
| 698 |
# Credential is in backoff for automated refresh, do not queue
|
| 699 |
remaining = int(backoff_until - now)
|
| 700 |
-
lib_logger.debug(
|
|
|
|
|
|
|
| 701 |
return
|
| 702 |
-
|
| 703 |
async with self._queue_tracking_lock:
|
| 704 |
if path not in self._queued_credentials:
|
| 705 |
self._queued_credentials.add(path)
|
|
@@ -715,14 +817,13 @@ class IFlowAuthBase:
|
|
| 715 |
# Wait for an item with timeout to allow graceful shutdown
|
| 716 |
try:
|
| 717 |
path, force, needs_reauth = await asyncio.wait_for(
|
| 718 |
-
self._refresh_queue.get(),
|
| 719 |
-
timeout=60.0
|
| 720 |
)
|
| 721 |
except asyncio.TimeoutError:
|
| 722 |
# No items for 60s, exit to save resources
|
| 723 |
self._queue_processor_task = None
|
| 724 |
return
|
| 725 |
-
|
| 726 |
try:
|
| 727 |
# Perform the actual refresh (still using per-credential lock)
|
| 728 |
async with await self._get_lock(path):
|
|
@@ -733,16 +834,16 @@ class IFlowAuthBase:
|
|
| 733 |
async with self._queue_tracking_lock:
|
| 734 |
self._unavailable_credentials.discard(path)
|
| 735 |
continue
|
| 736 |
-
|
| 737 |
# Perform refresh
|
| 738 |
if not creds:
|
| 739 |
creds = await self._load_credentials(path)
|
| 740 |
await self._refresh_token(path, force=force)
|
| 741 |
-
|
| 742 |
# SUCCESS: Mark as available again
|
| 743 |
async with self._queue_tracking_lock:
|
| 744 |
self._unavailable_credentials.discard(path)
|
| 745 |
-
|
| 746 |
finally:
|
| 747 |
# Remove from queued set
|
| 748 |
async with self._queue_tracking_lock:
|
|
@@ -757,7 +858,9 @@ class IFlowAuthBase:
|
|
| 757 |
async with self._queue_tracking_lock:
|
| 758 |
self._unavailable_credentials.discard(path)
|
| 759 |
|
| 760 |
-
async def initialize_token(
|
|
|
|
|
|
|
| 761 |
"""
|
| 762 |
Initiates OAuth authorization code flow if tokens are missing or invalid.
|
| 763 |
Uses local callback server to receive authorization code.
|
|
@@ -766,14 +869,18 @@ class IFlowAuthBase:
|
|
| 766 |
|
| 767 |
# Get display name from metadata if available, otherwise derive from path
|
| 768 |
if isinstance(creds_or_path, dict):
|
| 769 |
-
display_name = creds_or_path.get("_proxy_metadata", {}).get(
|
|
|
|
|
|
|
| 770 |
else:
|
| 771 |
display_name = Path(path).name if path else "in-memory object"
|
| 772 |
|
| 773 |
lib_logger.debug(f"Initializing iFlow token for '{display_name}'...")
|
| 774 |
|
| 775 |
try:
|
| 776 |
-
creds =
|
|
|
|
|
|
|
| 777 |
|
| 778 |
reason = ""
|
| 779 |
if not creds.get("refresh_token"):
|
|
@@ -787,11 +894,15 @@ class IFlowAuthBase:
|
|
| 787 |
try:
|
| 788 |
return await self._refresh_token(path)
|
| 789 |
except Exception as e:
|
| 790 |
-
lib_logger.warning(
|
|
|
|
|
|
|
| 791 |
|
| 792 |
# Interactive OAuth flow
|
| 793 |
-
lib_logger.warning(
|
| 794 |
-
|
|
|
|
|
|
|
| 795 |
# [HEADLESS DETECTION] Check if running in headless environment
|
| 796 |
is_headless = is_headless_environment()
|
| 797 |
|
|
@@ -805,7 +916,7 @@ class IFlowAuthBase:
|
|
| 805 |
"type": "phone",
|
| 806 |
"redirect": redirect_uri,
|
| 807 |
"state": state,
|
| 808 |
-
"client_id": IFLOW_CLIENT_ID
|
| 809 |
}
|
| 810 |
auth_url = f"{IFLOW_OAUTH_AUTHORIZE_ENDPOINT}?{urlencode(auth_params)}"
|
| 811 |
|
|
@@ -829,49 +940,86 @@ class IFlowAuthBase:
|
|
| 829 |
"2. [bold]Authorize the application[/bold] to access your account.\n"
|
| 830 |
"3. You will be automatically redirected after authorization."
|
| 831 |
)
|
| 832 |
-
|
| 833 |
-
console.print(
|
| 834 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 835 |
|
| 836 |
# [HEADLESS SUPPORT] Only attempt browser open if NOT headless
|
| 837 |
if not is_headless:
|
| 838 |
try:
|
| 839 |
webbrowser.open(auth_url)
|
| 840 |
-
lib_logger.info(
|
|
|
|
|
|
|
| 841 |
except Exception as e:
|
| 842 |
-
lib_logger.warning(
|
|
|
|
|
|
|
| 843 |
|
| 844 |
# Wait for callback
|
| 845 |
-
with console.status(
|
|
|
|
|
|
|
|
|
|
| 846 |
code = await callback_server.wait_for_callback(timeout=300.0)
|
| 847 |
|
| 848 |
-
lib_logger.info(
|
|
|
|
|
|
|
| 849 |
|
| 850 |
# Exchange code for tokens and API key
|
| 851 |
-
token_data = await self._exchange_code_for_tokens(
|
|
|
|
|
|
|
| 852 |
|
| 853 |
# Update credentials
|
| 854 |
-
creds.update(
|
| 855 |
-
|
| 856 |
-
|
| 857 |
-
|
| 858 |
-
|
| 859 |
-
|
| 860 |
-
|
| 861 |
-
|
| 862 |
-
|
|
|
|
|
|
|
| 863 |
|
| 864 |
# Create metadata object
|
| 865 |
if not creds.get("_proxy_metadata"):
|
| 866 |
creds["_proxy_metadata"] = {
|
| 867 |
"email": token_data["email"],
|
| 868 |
-
"last_check_timestamp": time.time()
|
| 869 |
}
|
| 870 |
|
| 871 |
if path:
|
| 872 |
await self._save_credentials(path, creds)
|
| 873 |
|
| 874 |
-
lib_logger.info(
|
|
|
|
|
|
|
| 875 |
return creds
|
| 876 |
|
| 877 |
finally:
|
|
@@ -898,11 +1046,15 @@ class IFlowAuthBase:
|
|
| 898 |
|
| 899 |
return {"Authorization": f"Bearer {api_key}"}
|
| 900 |
|
| 901 |
-
async def get_user_info(
|
|
|
|
|
|
|
| 902 |
"""Retrieves user info from the _proxy_metadata in the credential file."""
|
| 903 |
try:
|
| 904 |
path = creds_or_path if isinstance(creds_or_path, str) else None
|
| 905 |
-
creds =
|
|
|
|
|
|
|
| 906 |
|
| 907 |
# Ensure the token is valid
|
| 908 |
if path:
|
|
@@ -912,7 +1064,9 @@ class IFlowAuthBase:
|
|
| 912 |
email = creds.get("email") or creds.get("_proxy_metadata", {}).get("email")
|
| 913 |
|
| 914 |
if not email:
|
| 915 |
-
lib_logger.warning(
|
|
|
|
|
|
|
| 916 |
|
| 917 |
# Update timestamp on check
|
| 918 |
if path and "_proxy_metadata" in creds:
|
|
|
|
| 21 |
from rich.panel import Panel
|
| 22 |
from rich.prompt import Prompt
|
| 23 |
from rich.text import Text
|
| 24 |
+
from rich.markup import escape as rich_escape
|
| 25 |
from ..utils.headless_detection import is_headless_environment
|
| 26 |
|
| 27 |
+
lib_logger = logging.getLogger("rotator_library")
|
| 28 |
|
| 29 |
IFLOW_OAUTH_AUTHORIZE_ENDPOINT = "https://iflow.cn/oauth"
|
| 30 |
IFLOW_OAUTH_TOKEN_ENDPOINT = "https://iflow.cn/oauth/token"
|
|
|
|
| 62 |
"""Checks if the callback port is available."""
|
| 63 |
try:
|
| 64 |
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
| 65 |
+
sock.bind(("", self.port))
|
| 66 |
sock.close()
|
| 67 |
return True
|
| 68 |
except OSError:
|
|
|
|
| 77 |
self.result_future = asyncio.Future()
|
| 78 |
|
| 79 |
# Setup route
|
| 80 |
+
self.app.router.add_get("/oauth2callback", self._handle_callback)
|
| 81 |
|
| 82 |
# Start server
|
| 83 |
self.runner = web.AppRunner(self.app)
|
| 84 |
await self.runner.setup()
|
| 85 |
+
self.site = web.TCPSite(self.runner, "localhost", self.port)
|
| 86 |
await self.site.start()
|
| 87 |
|
| 88 |
lib_logger.debug(f"iFlow OAuth callback server started on port {self.port}")
|
|
|
|
| 100 |
query = request.query
|
| 101 |
|
| 102 |
# Check for error parameter
|
| 103 |
+
if "error" in query:
|
| 104 |
+
error = query.get("error", "unknown_error")
|
| 105 |
lib_logger.error(f"iFlow OAuth callback received error: {error}")
|
| 106 |
if not self.result_future.done():
|
| 107 |
self.result_future.set_exception(ValueError(f"OAuth error: {error}"))
|
| 108 |
+
return web.Response(
|
| 109 |
+
status=302, headers={"Location": IFLOW_ERROR_REDIRECT_URL}
|
| 110 |
+
)
|
| 111 |
|
| 112 |
# Check for authorization code
|
| 113 |
+
code = query.get("code")
|
| 114 |
if not code:
|
| 115 |
lib_logger.error("iFlow OAuth callback missing authorization code")
|
| 116 |
if not self.result_future.done():
|
| 117 |
+
self.result_future.set_exception(
|
| 118 |
+
ValueError("Missing authorization code")
|
| 119 |
+
)
|
| 120 |
+
return web.Response(
|
| 121 |
+
status=302, headers={"Location": IFLOW_ERROR_REDIRECT_URL}
|
| 122 |
+
)
|
| 123 |
|
| 124 |
# Validate state parameter
|
| 125 |
+
state = query.get("state", "")
|
| 126 |
if state != self.expected_state:
|
| 127 |
+
lib_logger.error(
|
| 128 |
+
f"iFlow OAuth state mismatch. Expected: {self.expected_state}, Got: {state}"
|
| 129 |
+
)
|
| 130 |
if not self.result_future.done():
|
| 131 |
self.result_future.set_exception(ValueError("State parameter mismatch"))
|
| 132 |
+
return web.Response(
|
| 133 |
+
status=302, headers={"Location": IFLOW_ERROR_REDIRECT_URL}
|
| 134 |
+
)
|
| 135 |
|
| 136 |
# Success - set result and redirect to success page
|
| 137 |
if not self.result_future.done():
|
| 138 |
self.result_future.set_result(code)
|
| 139 |
|
| 140 |
+
return web.Response(
|
| 141 |
+
status=302, headers={"Location": IFLOW_SUCCESS_REDIRECT_URL}
|
| 142 |
+
)
|
| 143 |
|
| 144 |
async def wait_for_callback(self, timeout: float = 300.0) -> str:
|
| 145 |
"""Waits for the OAuth callback and returns the authorization code."""
|
|
|
|
| 159 |
def __init__(self):
|
| 160 |
self._credentials_cache: Dict[str, Dict[str, Any]] = {}
|
| 161 |
self._refresh_locks: Dict[str, asyncio.Lock] = {}
|
| 162 |
+
self._locks_lock = (
|
| 163 |
+
asyncio.Lock()
|
| 164 |
+
) # Protects the locks dict from race conditions
|
| 165 |
# [BACKOFF TRACKING] Track consecutive failures per credential
|
| 166 |
+
self._refresh_failures: Dict[
|
| 167 |
+
str, int
|
| 168 |
+
] = {} # Track consecutive failures per credential
|
| 169 |
+
self._next_refresh_after: Dict[
|
| 170 |
+
str, float
|
| 171 |
+
] = {} # Track backoff timers (Unix timestamp)
|
| 172 |
+
|
| 173 |
# [QUEUE SYSTEM] Sequential refresh processing
|
| 174 |
self._refresh_queue: asyncio.Queue = asyncio.Queue()
|
| 175 |
self._queued_credentials: set = set() # Track credentials already in queue
|
| 176 |
+
self._unavailable_credentials: set = (
|
| 177 |
+
set()
|
| 178 |
+
) # Mark credentials unavailable during re-auth
|
| 179 |
self._queue_tracking_lock = asyncio.Lock() # Protects queue sets
|
| 180 |
+
self._queue_processor_task: Optional[asyncio.Task] = (
|
| 181 |
+
None # Background worker task
|
| 182 |
+
)
|
| 183 |
|
| 184 |
def _parse_env_credential_path(self, path: str) -> Optional[str]:
|
| 185 |
"""
|
| 186 |
Parse a virtual env:// path and return the credential index.
|
| 187 |
+
|
| 188 |
Supported formats:
|
| 189 |
- "env://provider/0" - Legacy single credential (no index in env var names)
|
| 190 |
- "env://provider/1" - First numbered credential (IFLOW_1_ACCESS_TOKEN)
|
| 191 |
+
|
| 192 |
Returns:
|
| 193 |
The credential index as string, or None if path is not an env:// path
|
| 194 |
"""
|
| 195 |
if not path.startswith("env://"):
|
| 196 |
return None
|
| 197 |
+
|
| 198 |
parts = path[6:].split("/")
|
| 199 |
if len(parts) >= 2:
|
| 200 |
return parts[1]
|
| 201 |
return "0"
|
| 202 |
|
| 203 |
+
def _load_from_env(
|
| 204 |
+
self, credential_index: Optional[str] = None
|
| 205 |
+
) -> Optional[Dict[str, Any]]:
|
| 206 |
"""
|
| 207 |
Load OAuth credentials from environment variables for stateless deployments.
|
| 208 |
|
|
|
|
| 229 |
else:
|
| 230 |
prefix = "IFLOW"
|
| 231 |
default_email = "env-user"
|
| 232 |
+
|
| 233 |
access_token = os.getenv(f"{prefix}_ACCESS_TOKEN")
|
| 234 |
refresh_token = os.getenv(f"{prefix}_REFRESH_TOKEN")
|
| 235 |
api_key = os.getenv(f"{prefix}_API_KEY")
|
|
|
|
| 238 |
if not (access_token and refresh_token and api_key):
|
| 239 |
return None
|
| 240 |
|
| 241 |
+
lib_logger.debug(
|
| 242 |
+
f"Loading iFlow credentials from environment variables (prefix: {prefix})"
|
| 243 |
+
)
|
| 244 |
|
| 245 |
# Parse expiry_date as string (ISO 8601 format)
|
| 246 |
expiry_str = os.getenv(f"{prefix}_EXPIRY_DATE", "")
|
|
|
|
| 257 |
"email": os.getenv(f"{prefix}_EMAIL", default_email),
|
| 258 |
"last_check_timestamp": time.time(),
|
| 259 |
"loaded_from_env": True,
|
| 260 |
+
"env_credential_index": credential_index or "0",
|
| 261 |
+
},
|
| 262 |
}
|
| 263 |
|
| 264 |
return creds
|
|
|
|
| 267 |
"""Reads credentials from file and populates the cache. No locking."""
|
| 268 |
try:
|
| 269 |
lib_logger.debug(f"Reading iFlow credentials from file: {path}")
|
| 270 |
+
with open(path, "r") as f:
|
| 271 |
creds = json.load(f)
|
| 272 |
self._credentials_cache[path] = creds
|
| 273 |
return creds
|
|
|
|
| 291 |
if credential_index is not None:
|
| 292 |
env_creds = self._load_from_env(credential_index)
|
| 293 |
if env_creds:
|
| 294 |
+
lib_logger.info(
|
| 295 |
+
f"Using iFlow credentials from environment variables (index: {credential_index})"
|
| 296 |
+
)
|
| 297 |
self._credentials_cache[path] = env_creds
|
| 298 |
return env_creds
|
| 299 |
else:
|
| 300 |
+
raise IOError(
|
| 301 |
+
f"Environment variables for iFlow credential index {credential_index} not found"
|
| 302 |
+
)
|
| 303 |
|
| 304 |
# For file paths, try loading from legacy env vars first
|
| 305 |
env_creds = self._load_from_env()
|
|
|
|
| 329 |
tmp_path = None
|
| 330 |
try:
|
| 331 |
# Create temp file in same directory as target (ensures same filesystem)
|
| 332 |
+
tmp_fd, tmp_path = tempfile.mkstemp(
|
| 333 |
+
dir=parent_dir, prefix=".tmp_", suffix=".json", text=True
|
| 334 |
+
)
|
| 335 |
|
| 336 |
# Write JSON to temp file
|
| 337 |
+
with os.fdopen(tmp_fd, "w") as f:
|
| 338 |
json.dump(creds, f, indent=2)
|
| 339 |
tmp_fd = None # fdopen closes the fd
|
| 340 |
|
|
|
|
| 351 |
|
| 352 |
# Update cache AFTER successful file write
|
| 353 |
self._credentials_cache[path] = creds
|
| 354 |
+
lib_logger.debug(
|
| 355 |
+
f"Saved updated iFlow OAuth credentials to '{path}' (atomic write)."
|
| 356 |
+
)
|
| 357 |
|
| 358 |
except Exception as e:
|
| 359 |
+
lib_logger.error(
|
| 360 |
+
f"Failed to save updated iFlow OAuth credentials to '{path}': {e}"
|
| 361 |
+
)
|
| 362 |
# Clean up temp file if it still exists
|
| 363 |
if tmp_fd is not None:
|
| 364 |
try:
|
|
|
|
| 382 |
try:
|
| 383 |
# Parse ISO 8601 format (e.g., "2025-01-17T12:00:00Z")
|
| 384 |
from datetime import datetime
|
| 385 |
+
|
| 386 |
+
expiry_dt = datetime.fromisoformat(expiry_str.replace("Z", "+00:00"))
|
| 387 |
expiry_timestamp = expiry_dt.timestamp()
|
| 388 |
except (ValueError, AttributeError):
|
| 389 |
# Fallback: treat as numeric timestamp
|
|
|
|
| 427 |
|
| 428 |
return {"api_key": api_key, "email": email}
|
| 429 |
|
| 430 |
+
async def _exchange_code_for_tokens(
|
| 431 |
+
self, code: str, redirect_uri: str
|
| 432 |
+
) -> Dict[str, Any]:
|
| 433 |
"""
|
| 434 |
Exchanges authorization code for access and refresh tokens.
|
| 435 |
Uses Basic Auth with client credentials.
|
|
|
|
| 441 |
headers = {
|
| 442 |
"Content-Type": "application/x-www-form-urlencoded",
|
| 443 |
"Accept": "application/json",
|
| 444 |
+
"Authorization": f"Basic {basic_auth}",
|
| 445 |
}
|
| 446 |
|
| 447 |
data = {
|
|
|
|
| 449 |
"code": code,
|
| 450 |
"redirect_uri": redirect_uri,
|
| 451 |
"client_id": IFLOW_CLIENT_ID,
|
| 452 |
+
"client_secret": IFLOW_CLIENT_SECRET,
|
| 453 |
}
|
| 454 |
|
| 455 |
async with httpx.AsyncClient(timeout=30.0) as client:
|
| 456 |
+
response = await client.post(
|
| 457 |
+
IFLOW_OAUTH_TOKEN_ENDPOINT, headers=headers, data=data
|
| 458 |
+
)
|
| 459 |
|
| 460 |
if response.status_code != 200:
|
| 461 |
error_text = response.text
|
| 462 |
+
lib_logger.error(
|
| 463 |
+
f"iFlow token exchange failed: {response.status_code} {error_text}"
|
| 464 |
+
)
|
| 465 |
+
raise ValueError(
|
| 466 |
+
f"Token exchange failed: {response.status_code} {error_text}"
|
| 467 |
+
)
|
| 468 |
|
| 469 |
token_data = response.json()
|
| 470 |
|
|
|
|
| 482 |
|
| 483 |
# Calculate expiry date
|
| 484 |
from datetime import datetime, timedelta
|
| 485 |
+
|
| 486 |
+
expiry_date = (
|
| 487 |
+
datetime.utcnow() + timedelta(seconds=expires_in)
|
| 488 |
+
).isoformat() + "Z"
|
| 489 |
|
| 490 |
return {
|
| 491 |
"access_token": access_token,
|
|
|
|
| 494 |
"email": user_info["email"],
|
| 495 |
"expiry_date": expiry_date,
|
| 496 |
"token_type": token_type,
|
| 497 |
+
"scope": scope,
|
| 498 |
}
|
| 499 |
|
| 500 |
async def _refresh_token(self, path: str, force: bool = False) -> Dict[str, Any]:
|
|
|
|
| 531 |
headers = {
|
| 532 |
"Content-Type": "application/x-www-form-urlencoded",
|
| 533 |
"Accept": "application/json",
|
| 534 |
+
"Authorization": f"Basic {basic_auth}",
|
| 535 |
}
|
| 536 |
|
| 537 |
data = {
|
| 538 |
"grant_type": "refresh_token",
|
| 539 |
"refresh_token": refresh_token,
|
| 540 |
"client_id": IFLOW_CLIENT_ID,
|
| 541 |
+
"client_secret": IFLOW_CLIENT_SECRET,
|
| 542 |
}
|
| 543 |
|
| 544 |
async with httpx.AsyncClient(timeout=30.0) as client:
|
| 545 |
for attempt in range(max_retries):
|
| 546 |
try:
|
| 547 |
+
response = await client.post(
|
| 548 |
+
IFLOW_OAUTH_TOKEN_ENDPOINT, headers=headers, data=data
|
| 549 |
+
)
|
| 550 |
response.raise_for_status()
|
| 551 |
new_token_data = response.json()
|
| 552 |
break # Success
|
|
|
|
| 556 |
status_code = e.response.status_code
|
| 557 |
error_body = e.response.text
|
| 558 |
|
| 559 |
+
lib_logger.error(
|
| 560 |
+
f"[REFRESH HTTP ERROR] HTTP {status_code} for '{Path(path).name}': {error_body}"
|
| 561 |
+
)
|
| 562 |
|
| 563 |
# [STATUS CODE HANDLING]
|
| 564 |
# [INVALID GRANT HANDLING] Handle 401/403 by triggering re-authentication
|
|
|
|
| 572 |
|
| 573 |
elif status_code == 429:
|
| 574 |
retry_after = int(e.response.headers.get("Retry-After", 60))
|
| 575 |
+
lib_logger.warning(
|
| 576 |
+
f"Rate limited (HTTP 429), retry after {retry_after}s"
|
| 577 |
+
)
|
| 578 |
if attempt < max_retries - 1:
|
| 579 |
await asyncio.sleep(retry_after)
|
| 580 |
continue
|
|
|
|
| 582 |
|
| 583 |
elif 500 <= status_code < 600:
|
| 584 |
if attempt < max_retries - 1:
|
| 585 |
+
wait_time = 2**attempt
|
| 586 |
+
lib_logger.warning(
|
| 587 |
+
f"Server error (HTTP {status_code}), retry {attempt + 1}/{max_retries} in {wait_time}s"
|
| 588 |
+
)
|
| 589 |
await asyncio.sleep(wait_time)
|
| 590 |
continue
|
| 591 |
raise
|
|
|
|
| 596 |
except (httpx.RequestError, httpx.TimeoutException) as e:
|
| 597 |
last_error = e
|
| 598 |
if attempt < max_retries - 1:
|
| 599 |
+
wait_time = 2**attempt
|
| 600 |
+
lib_logger.warning(
|
| 601 |
+
f"Network error during refresh: {e}, retry {attempt + 1}/{max_retries} in {wait_time}s"
|
| 602 |
+
)
|
| 603 |
await asyncio.sleep(wait_time)
|
| 604 |
continue
|
| 605 |
raise
|
| 606 |
|
| 607 |
# [INVALID GRANT RE-AUTH] Trigger OAuth flow if refresh token is invalid
|
| 608 |
if needs_reauth:
|
| 609 |
+
lib_logger.info(
|
| 610 |
+
f"Starting re-authentication for '{Path(path).name}'..."
|
| 611 |
+
)
|
| 612 |
try:
|
| 613 |
# Call initialize_token to trigger OAuth flow
|
| 614 |
new_creds = await self.initialize_token(path)
|
|
|
|
| 617 |
self._next_refresh_after.pop(path, None)
|
| 618 |
return new_creds
|
| 619 |
except Exception as reauth_error:
|
| 620 |
+
lib_logger.error(
|
| 621 |
+
f"Re-authentication failed for '{Path(path).name}': {reauth_error}"
|
| 622 |
+
)
|
| 623 |
# [BACKOFF TRACKING] Increment failure count and set backoff timer
|
| 624 |
+
self._refresh_failures[path] = (
|
| 625 |
+
self._refresh_failures.get(path, 0) + 1
|
| 626 |
+
)
|
| 627 |
+
backoff_seconds = min(
|
| 628 |
+
300, 30 * (2 ** self._refresh_failures[path])
|
| 629 |
+
) # Max 5 min backoff
|
| 630 |
self._next_refresh_after[path] = time.time() + backoff_seconds
|
| 631 |
+
lib_logger.debug(
|
| 632 |
+
f"Setting backoff for '{Path(path).name}': {backoff_seconds}s"
|
| 633 |
+
)
|
| 634 |
+
raise ValueError(
|
| 635 |
+
f"Refresh token invalid and re-authentication failed: {reauth_error}"
|
| 636 |
+
)
|
| 637 |
|
| 638 |
if new_token_data is None:
|
| 639 |
# [BACKOFF TRACKING] Increment failure count and set backoff timer
|
| 640 |
self._refresh_failures[path] = self._refresh_failures.get(path, 0) + 1
|
| 641 |
+
backoff_seconds = min(
|
| 642 |
+
300, 30 * (2 ** self._refresh_failures[path])
|
| 643 |
+
) # Max 5 min backoff
|
| 644 |
self._next_refresh_after[path] = time.time() + backoff_seconds
|
| 645 |
+
lib_logger.debug(
|
| 646 |
+
f"Setting backoff for '{Path(path).name}': {backoff_seconds}s"
|
| 647 |
+
)
|
| 648 |
raise last_error or Exception("Token refresh failed after all retries")
|
| 649 |
|
| 650 |
# Update tokens
|
|
|
|
| 653 |
raise ValueError("Missing access_token in refresh response")
|
| 654 |
|
| 655 |
creds_from_file["access_token"] = access_token
|
| 656 |
+
creds_from_file["refresh_token"] = new_token_data.get(
|
| 657 |
+
"refresh_token", creds_from_file["refresh_token"]
|
| 658 |
+
)
|
| 659 |
|
| 660 |
expires_in = new_token_data.get("expires_in", 3600)
|
| 661 |
from datetime import datetime, timedelta
|
|
|
|
| 662 |
|
| 663 |
+
creds_from_file["expiry_date"] = (
|
| 664 |
+
datetime.utcnow() + timedelta(seconds=expires_in)
|
| 665 |
+
).isoformat() + "Z"
|
| 666 |
+
|
| 667 |
+
creds_from_file["token_type"] = new_token_data.get(
|
| 668 |
+
"token_type", creds_from_file.get("token_type", "Bearer")
|
| 669 |
+
)
|
| 670 |
+
creds_from_file["scope"] = new_token_data.get(
|
| 671 |
+
"scope", creds_from_file.get("scope", "")
|
| 672 |
+
)
|
| 673 |
|
| 674 |
# CRITICAL: Re-fetch user info to get potentially updated API key
|
| 675 |
try:
|
|
|
|
| 679 |
if user_info.get("email"):
|
| 680 |
creds_from_file["email"] = user_info["email"]
|
| 681 |
except Exception as e:
|
| 682 |
+
lib_logger.warning(
|
| 683 |
+
f"Failed to update API key during token refresh: {e}"
|
| 684 |
+
)
|
| 685 |
|
| 686 |
# Ensure _proxy_metadata exists and update timestamp
|
| 687 |
if "_proxy_metadata" not in creds_from_file:
|
|
|
|
| 690 |
|
| 691 |
# [VALIDATION] Verify required fields exist after refresh
|
| 692 |
required_fields = ["access_token", "refresh_token", "api_key"]
|
| 693 |
+
missing_fields = [
|
| 694 |
+
field for field in required_fields if not creds_from_file.get(field)
|
| 695 |
+
]
|
| 696 |
if missing_fields:
|
| 697 |
+
raise ValueError(
|
| 698 |
+
f"Refreshed credentials missing required fields: {missing_fields}"
|
| 699 |
+
)
|
| 700 |
|
| 701 |
# [BACKOFF TRACKING] Clear failure count on successful refresh
|
| 702 |
self._refresh_failures.pop(path, None)
|
| 703 |
self._next_refresh_after.pop(path, None)
|
| 704 |
|
| 705 |
await self._save_credentials(path, creds_from_file)
|
| 706 |
+
lib_logger.debug(
|
| 707 |
+
f"Successfully refreshed iFlow OAuth token for '{Path(path).name}'."
|
| 708 |
+
)
|
| 709 |
return creds_from_file
|
| 710 |
|
| 711 |
async def get_api_details(self, credential_identifier: str) -> Tuple[str, str]:
|
|
|
|
| 720 |
# Detect credential type
|
| 721 |
if os.path.isfile(credential_identifier):
|
| 722 |
# OAuth credential: file path to JSON
|
| 723 |
+
lib_logger.debug(
|
| 724 |
+
f"Using OAuth credentials from file: {credential_identifier}"
|
| 725 |
+
)
|
| 726 |
creds = await self._load_credentials(credential_identifier)
|
| 727 |
|
| 728 |
# Check if token needs refresh
|
|
|
|
| 747 |
"""
|
| 748 |
# Check if it's an env:// virtual path (OAuth credentials from environment)
|
| 749 |
is_env_path = credential_identifier.startswith("env://")
|
| 750 |
+
|
| 751 |
# Only refresh if it's an OAuth credential (file path or env:// path)
|
| 752 |
if not is_env_path and not os.path.isfile(credential_identifier):
|
| 753 |
return # Direct API key, no refresh needed
|
|
|
|
| 755 |
creds = await self._load_credentials(credential_identifier)
|
| 756 |
if self._is_token_expired(creds):
|
| 757 |
# Queue for refresh with needs_reauth=False (automated refresh)
|
| 758 |
+
await self._queue_refresh(
|
| 759 |
+
credential_identifier, force=False, needs_reauth=False
|
| 760 |
+
)
|
| 761 |
|
| 762 |
async def _get_lock(self, path: str) -> asyncio.Lock:
|
| 763 |
"""Gets or creates a lock for the given credential path."""
|
|
|
|
| 774 |
async def _ensure_queue_processor_running(self):
|
| 775 |
"""Lazily starts the queue processor if not already running."""
|
| 776 |
if self._queue_processor_task is None or self._queue_processor_task.done():
|
| 777 |
+
self._queue_processor_task = asyncio.create_task(
|
| 778 |
+
self._process_refresh_queue()
|
| 779 |
+
)
|
| 780 |
|
| 781 |
+
async def _queue_refresh(
|
| 782 |
+
self, path: str, force: bool = False, needs_reauth: bool = False
|
| 783 |
+
):
|
| 784 |
"""Add a credential to the refresh queue if not already queued.
|
| 785 |
+
|
| 786 |
Args:
|
| 787 |
path: Credential file path
|
| 788 |
force: Force refresh even if not expired
|
|
|
|
| 797 |
if now < backoff_until:
|
| 798 |
# Credential is in backoff for automated refresh, do not queue
|
| 799 |
remaining = int(backoff_until - now)
|
| 800 |
+
lib_logger.debug(
|
| 801 |
+
f"Skipping automated refresh for '{Path(path).name}' (in backoff for {remaining}s)"
|
| 802 |
+
)
|
| 803 |
return
|
| 804 |
+
|
| 805 |
async with self._queue_tracking_lock:
|
| 806 |
if path not in self._queued_credentials:
|
| 807 |
self._queued_credentials.add(path)
|
|
|
|
| 817 |
# Wait for an item with timeout to allow graceful shutdown
|
| 818 |
try:
|
| 819 |
path, force, needs_reauth = await asyncio.wait_for(
|
| 820 |
+
self._refresh_queue.get(), timeout=60.0
|
|
|
|
| 821 |
)
|
| 822 |
except asyncio.TimeoutError:
|
| 823 |
# No items for 60s, exit to save resources
|
| 824 |
self._queue_processor_task = None
|
| 825 |
return
|
| 826 |
+
|
| 827 |
try:
|
| 828 |
# Perform the actual refresh (still using per-credential lock)
|
| 829 |
async with await self._get_lock(path):
|
|
|
|
| 834 |
async with self._queue_tracking_lock:
|
| 835 |
self._unavailable_credentials.discard(path)
|
| 836 |
continue
|
| 837 |
+
|
| 838 |
# Perform refresh
|
| 839 |
if not creds:
|
| 840 |
creds = await self._load_credentials(path)
|
| 841 |
await self._refresh_token(path, force=force)
|
| 842 |
+
|
| 843 |
# SUCCESS: Mark as available again
|
| 844 |
async with self._queue_tracking_lock:
|
| 845 |
self._unavailable_credentials.discard(path)
|
| 846 |
+
|
| 847 |
finally:
|
| 848 |
# Remove from queued set
|
| 849 |
async with self._queue_tracking_lock:
|
|
|
|
| 858 |
async with self._queue_tracking_lock:
|
| 859 |
self._unavailable_credentials.discard(path)
|
| 860 |
|
| 861 |
+
async def initialize_token(
|
| 862 |
+
self, creds_or_path: Union[Dict[str, Any], str]
|
| 863 |
+
) -> Dict[str, Any]:
|
| 864 |
"""
|
| 865 |
Initiates OAuth authorization code flow if tokens are missing or invalid.
|
| 866 |
Uses local callback server to receive authorization code.
|
|
|
|
| 869 |
|
| 870 |
# Get display name from metadata if available, otherwise derive from path
|
| 871 |
if isinstance(creds_or_path, dict):
|
| 872 |
+
display_name = creds_or_path.get("_proxy_metadata", {}).get(
|
| 873 |
+
"display_name", "in-memory object"
|
| 874 |
+
)
|
| 875 |
else:
|
| 876 |
display_name = Path(path).name if path else "in-memory object"
|
| 877 |
|
| 878 |
lib_logger.debug(f"Initializing iFlow token for '{display_name}'...")
|
| 879 |
|
| 880 |
try:
|
| 881 |
+
creds = (
|
| 882 |
+
await self._load_credentials(creds_or_path) if path else creds_or_path
|
| 883 |
+
)
|
| 884 |
|
| 885 |
reason = ""
|
| 886 |
if not creds.get("refresh_token"):
|
|
|
|
| 894 |
try:
|
| 895 |
return await self._refresh_token(path)
|
| 896 |
except Exception as e:
|
| 897 |
+
lib_logger.warning(
|
| 898 |
+
f"Automatic token refresh for '{display_name}' failed: {e}. Proceeding to interactive login."
|
| 899 |
+
)
|
| 900 |
|
| 901 |
# Interactive OAuth flow
|
| 902 |
+
lib_logger.warning(
|
| 903 |
+
f"iFlow OAuth token for '{display_name}' needs setup: {reason}."
|
| 904 |
+
)
|
| 905 |
+
|
| 906 |
# [HEADLESS DETECTION] Check if running in headless environment
|
| 907 |
is_headless = is_headless_environment()
|
| 908 |
|
|
|
|
| 916 |
"type": "phone",
|
| 917 |
"redirect": redirect_uri,
|
| 918 |
"state": state,
|
| 919 |
+
"client_id": IFLOW_CLIENT_ID,
|
| 920 |
}
|
| 921 |
auth_url = f"{IFLOW_OAUTH_AUTHORIZE_ENDPOINT}?{urlencode(auth_params)}"
|
| 922 |
|
|
|
|
| 940 |
"2. [bold]Authorize the application[/bold] to access your account.\n"
|
| 941 |
"3. You will be automatically redirected after authorization."
|
| 942 |
)
|
| 943 |
+
|
| 944 |
+
console.print(
|
| 945 |
+
Panel(
|
| 946 |
+
auth_panel_text,
|
| 947 |
+
title=f"iFlow OAuth Setup for [bold yellow]{display_name}[/bold yellow]",
|
| 948 |
+
style="bold blue",
|
| 949 |
+
)
|
| 950 |
+
)
|
| 951 |
+
# [URL DISPLAY] Print URL with proper escaping to prevent Rich markup issues.
|
| 952 |
+
# IMPORTANT: OAuth URLs contain special characters (=, &, etc.) that Rich might
|
| 953 |
+
# interpret as markup in some terminal configurations. We escape the URL to
|
| 954 |
+
# ensure it displays correctly.
|
| 955 |
+
#
|
| 956 |
+
# KNOWN ISSUE: If Rich rendering fails entirely (e.g., terminal doesn't support
|
| 957 |
+
# ANSI codes, or output is piped), the escaped URL should still be valid.
|
| 958 |
+
# However, if the terminal strips or mangles the output, users should copy
|
| 959 |
+
# the URL directly from logs or use --verbose to see the raw URL.
|
| 960 |
+
#
|
| 961 |
+
# The [link=...] markup creates a clickable hyperlink in supported terminals
|
| 962 |
+
# (iTerm2, Windows Terminal, etc.), but the displayed text is the escaped URL
|
| 963 |
+
# which can be safely copied even if the hyperlink doesn't work.
|
| 964 |
+
escaped_url = rich_escape(auth_url)
|
| 965 |
+
console.print(
|
| 966 |
+
f"[bold]URL:[/bold] [link={auth_url}]{escaped_url}[/link]\n"
|
| 967 |
+
)
|
| 968 |
|
| 969 |
# [HEADLESS SUPPORT] Only attempt browser open if NOT headless
|
| 970 |
if not is_headless:
|
| 971 |
try:
|
| 972 |
webbrowser.open(auth_url)
|
| 973 |
+
lib_logger.info(
|
| 974 |
+
"Browser opened successfully for iFlow OAuth flow"
|
| 975 |
+
)
|
| 976 |
except Exception as e:
|
| 977 |
+
lib_logger.warning(
|
| 978 |
+
f"Failed to open browser automatically: {e}. Please open the URL manually."
|
| 979 |
+
)
|
| 980 |
|
| 981 |
# Wait for callback
|
| 982 |
+
with console.status(
|
| 983 |
+
"[bold green]Waiting for authorization in the browser...[/bold green]",
|
| 984 |
+
spinner="dots",
|
| 985 |
+
):
|
| 986 |
code = await callback_server.wait_for_callback(timeout=300.0)
|
| 987 |
|
| 988 |
+
lib_logger.info(
|
| 989 |
+
"Received authorization code, exchanging for tokens..."
|
| 990 |
+
)
|
| 991 |
|
| 992 |
# Exchange code for tokens and API key
|
| 993 |
+
token_data = await self._exchange_code_for_tokens(
|
| 994 |
+
code, redirect_uri
|
| 995 |
+
)
|
| 996 |
|
| 997 |
# Update credentials
|
| 998 |
+
creds.update(
|
| 999 |
+
{
|
| 1000 |
+
"access_token": token_data["access_token"],
|
| 1001 |
+
"refresh_token": token_data["refresh_token"],
|
| 1002 |
+
"api_key": token_data["api_key"],
|
| 1003 |
+
"email": token_data["email"],
|
| 1004 |
+
"expiry_date": token_data["expiry_date"],
|
| 1005 |
+
"token_type": token_data["token_type"],
|
| 1006 |
+
"scope": token_data["scope"],
|
| 1007 |
+
}
|
| 1008 |
+
)
|
| 1009 |
|
| 1010 |
# Create metadata object
|
| 1011 |
if not creds.get("_proxy_metadata"):
|
| 1012 |
creds["_proxy_metadata"] = {
|
| 1013 |
"email": token_data["email"],
|
| 1014 |
+
"last_check_timestamp": time.time(),
|
| 1015 |
}
|
| 1016 |
|
| 1017 |
if path:
|
| 1018 |
await self._save_credentials(path, creds)
|
| 1019 |
|
| 1020 |
+
lib_logger.info(
|
| 1021 |
+
f"iFlow OAuth initialized successfully for '{display_name}'."
|
| 1022 |
+
)
|
| 1023 |
return creds
|
| 1024 |
|
| 1025 |
finally:
|
|
|
|
| 1046 |
|
| 1047 |
return {"Authorization": f"Bearer {api_key}"}
|
| 1048 |
|
| 1049 |
+
async def get_user_info(
|
| 1050 |
+
self, creds_or_path: Union[Dict[str, Any], str]
|
| 1051 |
+
) -> Dict[str, Any]:
|
| 1052 |
"""Retrieves user info from the _proxy_metadata in the credential file."""
|
| 1053 |
try:
|
| 1054 |
path = creds_or_path if isinstance(creds_or_path, str) else None
|
| 1055 |
+
creds = (
|
| 1056 |
+
await self._load_credentials(creds_or_path) if path else creds_or_path
|
| 1057 |
+
)
|
| 1058 |
|
| 1059 |
# Ensure the token is valid
|
| 1060 |
if path:
|
|
|
|
| 1064 |
email = creds.get("email") or creds.get("_proxy_metadata", {}).get("email")
|
| 1065 |
|
| 1066 |
if not email:
|
| 1067 |
+
lib_logger.warning(
|
| 1068 |
+
f"No email found in iFlow credentials for '{path or 'in-memory object'}'."
|
| 1069 |
+
)
|
| 1070 |
|
| 1071 |
# Update timestamp on check
|
| 1072 |
if path and "_proxy_metadata" in creds:
|
src/rotator_library/providers/qwen_auth_base.py
CHANGED
|
@@ -19,54 +19,70 @@ from rich.console import Console
|
|
| 19 |
from rich.panel import Panel
|
| 20 |
from rich.prompt import Prompt
|
| 21 |
from rich.text import Text
|
|
|
|
| 22 |
|
| 23 |
from ..utils.headless_detection import is_headless_environment
|
| 24 |
|
| 25 |
-
lib_logger = logging.getLogger(
|
| 26 |
|
| 27 |
-
CLIENT_ID =
|
|
|
|
|
|
|
| 28 |
SCOPE = "openid profile email model.completion"
|
| 29 |
TOKEN_ENDPOINT = "https://chat.qwen.ai/api/v1/oauth2/token"
|
| 30 |
REFRESH_EXPIRY_BUFFER_SECONDS = 3 * 60 * 60 # 3 hours buffer before expiry
|
| 31 |
|
| 32 |
console = Console()
|
| 33 |
|
|
|
|
| 34 |
class QwenAuthBase:
|
| 35 |
def __init__(self):
|
| 36 |
self._credentials_cache: Dict[str, Dict[str, Any]] = {}
|
| 37 |
self._refresh_locks: Dict[str, asyncio.Lock] = {}
|
| 38 |
-
self._locks_lock =
|
|
|
|
|
|
|
| 39 |
# [BACKOFF TRACKING] Track consecutive failures per credential
|
| 40 |
-
self._refresh_failures: Dict[
|
| 41 |
-
|
| 42 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 43 |
# [QUEUE SYSTEM] Sequential refresh processing
|
| 44 |
self._refresh_queue: asyncio.Queue = asyncio.Queue()
|
| 45 |
self._queued_credentials: set = set() # Track credentials already in queue
|
| 46 |
-
self._unavailable_credentials: set =
|
|
|
|
|
|
|
| 47 |
self._queue_tracking_lock = asyncio.Lock() # Protects queue sets
|
| 48 |
-
self._queue_processor_task: Optional[asyncio.Task] =
|
|
|
|
|
|
|
| 49 |
|
| 50 |
def _parse_env_credential_path(self, path: str) -> Optional[str]:
|
| 51 |
"""
|
| 52 |
Parse a virtual env:// path and return the credential index.
|
| 53 |
-
|
| 54 |
Supported formats:
|
| 55 |
- "env://provider/0" - Legacy single credential (no index in env var names)
|
| 56 |
- "env://provider/1" - First numbered credential (QWEN_CODE_1_ACCESS_TOKEN)
|
| 57 |
-
|
| 58 |
Returns:
|
| 59 |
The credential index as string, or None if path is not an env:// path
|
| 60 |
"""
|
| 61 |
if not path.startswith("env://"):
|
| 62 |
return None
|
| 63 |
-
|
| 64 |
parts = path[6:].split("/")
|
| 65 |
if len(parts) >= 2:
|
| 66 |
return parts[1]
|
| 67 |
return "0"
|
| 68 |
|
| 69 |
-
def _load_from_env(
|
|
|
|
|
|
|
| 70 |
"""
|
| 71 |
Load OAuth credentials from environment variables for stateless deployments.
|
| 72 |
|
|
@@ -91,7 +107,7 @@ class QwenAuthBase:
|
|
| 91 |
else:
|
| 92 |
prefix = "QWEN_CODE"
|
| 93 |
default_email = "env-user"
|
| 94 |
-
|
| 95 |
access_token = os.getenv(f"{prefix}_ACCESS_TOKEN")
|
| 96 |
refresh_token = os.getenv(f"{prefix}_REFRESH_TOKEN")
|
| 97 |
|
|
@@ -99,27 +115,33 @@ class QwenAuthBase:
|
|
| 99 |
if not (access_token and refresh_token):
|
| 100 |
return None
|
| 101 |
|
| 102 |
-
lib_logger.debug(
|
|
|
|
|
|
|
| 103 |
|
| 104 |
# Parse expiry_date as float, default to 0 if not present
|
| 105 |
expiry_str = os.getenv(f"{prefix}_EXPIRY_DATE", "0")
|
| 106 |
try:
|
| 107 |
expiry_date = float(expiry_str)
|
| 108 |
except ValueError:
|
| 109 |
-
lib_logger.warning(
|
|
|
|
|
|
|
| 110 |
expiry_date = 0
|
| 111 |
|
| 112 |
creds = {
|
| 113 |
"access_token": access_token,
|
| 114 |
"refresh_token": refresh_token,
|
| 115 |
"expiry_date": expiry_date,
|
| 116 |
-
"resource_url": os.getenv(
|
|
|
|
|
|
|
| 117 |
"_proxy_metadata": {
|
| 118 |
"email": os.getenv(f"{prefix}_EMAIL", default_email),
|
| 119 |
"last_check_timestamp": time.time(),
|
| 120 |
"loaded_from_env": True,
|
| 121 |
-
"env_credential_index": credential_index or "0"
|
| 122 |
-
}
|
| 123 |
}
|
| 124 |
|
| 125 |
return creds
|
|
@@ -128,7 +150,7 @@ class QwenAuthBase:
|
|
| 128 |
"""Reads credentials from file and populates the cache. No locking."""
|
| 129 |
try:
|
| 130 |
lib_logger.debug(f"Reading Qwen credentials from file: {path}")
|
| 131 |
-
with open(path,
|
| 132 |
creds = json.load(f)
|
| 133 |
self._credentials_cache[path] = creds
|
| 134 |
return creds
|
|
@@ -152,16 +174,22 @@ class QwenAuthBase:
|
|
| 152 |
if credential_index is not None:
|
| 153 |
env_creds = self._load_from_env(credential_index)
|
| 154 |
if env_creds:
|
| 155 |
-
lib_logger.info(
|
|
|
|
|
|
|
| 156 |
self._credentials_cache[path] = env_creds
|
| 157 |
return env_creds
|
| 158 |
else:
|
| 159 |
-
raise IOError(
|
|
|
|
|
|
|
| 160 |
|
| 161 |
# For file paths, try loading from legacy env vars first
|
| 162 |
env_creds = self._load_from_env()
|
| 163 |
if env_creds:
|
| 164 |
-
lib_logger.info(
|
|
|
|
|
|
|
| 165 |
self._credentials_cache[path] = env_creds
|
| 166 |
return env_creds
|
| 167 |
|
|
@@ -184,10 +212,12 @@ class QwenAuthBase:
|
|
| 184 |
tmp_path = None
|
| 185 |
try:
|
| 186 |
# Create temp file in same directory as target (ensures same filesystem)
|
| 187 |
-
tmp_fd, tmp_path = tempfile.mkstemp(
|
|
|
|
|
|
|
| 188 |
|
| 189 |
# Write JSON to temp file
|
| 190 |
-
with os.fdopen(tmp_fd,
|
| 191 |
json.dump(creds, f, indent=2)
|
| 192 |
tmp_fd = None # fdopen closes the fd
|
| 193 |
|
|
@@ -204,10 +234,14 @@ class QwenAuthBase:
|
|
| 204 |
|
| 205 |
# Update cache AFTER successful file write
|
| 206 |
self._credentials_cache[path] = creds
|
| 207 |
-
lib_logger.debug(
|
|
|
|
|
|
|
| 208 |
|
| 209 |
except Exception as e:
|
| 210 |
-
lib_logger.error(
|
|
|
|
|
|
|
| 211 |
# Clean up temp file if it still exists
|
| 212 |
if tmp_fd is not None:
|
| 213 |
try:
|
|
@@ -252,17 +286,22 @@ class QwenAuthBase:
|
|
| 252 |
headers = {
|
| 253 |
"Content-Type": "application/x-www-form-urlencoded",
|
| 254 |
"Accept": "application/json",
|
| 255 |
-
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36"
|
| 256 |
}
|
| 257 |
|
| 258 |
async with httpx.AsyncClient() as client:
|
| 259 |
for attempt in range(max_retries):
|
| 260 |
try:
|
| 261 |
-
response = await client.post(
|
| 262 |
-
|
| 263 |
-
|
| 264 |
-
|
| 265 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 266 |
response.raise_for_status()
|
| 267 |
new_token_data = response.json()
|
| 268 |
break # Success
|
|
@@ -271,7 +310,9 @@ class QwenAuthBase:
|
|
| 271 |
last_error = e
|
| 272 |
status_code = e.response.status_code
|
| 273 |
error_body = e.response.text
|
| 274 |
-
lib_logger.error(
|
|
|
|
|
|
|
| 275 |
|
| 276 |
# [INVALID GRANT HANDLING] Handle 401/403 by triggering re-authentication
|
| 277 |
if status_code in (401, 403):
|
|
@@ -284,7 +325,9 @@ class QwenAuthBase:
|
|
| 284 |
|
| 285 |
elif status_code == 429:
|
| 286 |
retry_after = int(e.response.headers.get("Retry-After", 60))
|
| 287 |
-
lib_logger.warning(
|
|
|
|
|
|
|
| 288 |
if attempt < max_retries - 1:
|
| 289 |
await asyncio.sleep(retry_after)
|
| 290 |
continue
|
|
@@ -292,8 +335,10 @@ class QwenAuthBase:
|
|
| 292 |
|
| 293 |
elif 500 <= status_code < 600:
|
| 294 |
if attempt < max_retries - 1:
|
| 295 |
-
wait_time = 2
|
| 296 |
-
lib_logger.warning(
|
|
|
|
|
|
|
| 297 |
await asyncio.sleep(wait_time)
|
| 298 |
continue
|
| 299 |
raise
|
|
@@ -304,15 +349,19 @@ class QwenAuthBase:
|
|
| 304 |
except (httpx.RequestError, httpx.TimeoutException) as e:
|
| 305 |
last_error = e
|
| 306 |
if attempt < max_retries - 1:
|
| 307 |
-
wait_time = 2
|
| 308 |
-
lib_logger.warning(
|
|
|
|
|
|
|
| 309 |
await asyncio.sleep(wait_time)
|
| 310 |
continue
|
| 311 |
raise
|
| 312 |
|
| 313 |
# [INVALID GRANT RE-AUTH] Trigger OAuth flow if refresh token is invalid
|
| 314 |
if needs_reauth:
|
| 315 |
-
lib_logger.info(
|
|
|
|
|
|
|
| 316 |
try:
|
| 317 |
# Call initialize_token to trigger OAuth flow
|
| 318 |
new_creds = await self.initialize_token(path)
|
|
@@ -321,26 +370,46 @@ class QwenAuthBase:
|
|
| 321 |
self._next_refresh_after.pop(path, None)
|
| 322 |
return new_creds
|
| 323 |
except Exception as reauth_error:
|
| 324 |
-
lib_logger.error(
|
|
|
|
|
|
|
| 325 |
# [BACKOFF TRACKING] Increment failure count and set backoff timer
|
| 326 |
-
self._refresh_failures[path] =
|
| 327 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 328 |
self._next_refresh_after[path] = time.time() + backoff_seconds
|
| 329 |
-
lib_logger.debug(
|
| 330 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 331 |
|
| 332 |
if new_token_data is None:
|
| 333 |
# [BACKOFF TRACKING] Increment failure count and set backoff timer
|
| 334 |
self._refresh_failures[path] = self._refresh_failures.get(path, 0) + 1
|
| 335 |
-
backoff_seconds = min(
|
|
|
|
|
|
|
| 336 |
self._next_refresh_after[path] = time.time() + backoff_seconds
|
| 337 |
-
lib_logger.debug(
|
|
|
|
|
|
|
| 338 |
raise last_error or Exception("Token refresh failed after all retries")
|
| 339 |
|
| 340 |
creds_from_file["access_token"] = new_token_data["access_token"]
|
| 341 |
-
creds_from_file["refresh_token"] = new_token_data.get(
|
| 342 |
-
|
| 343 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 344 |
|
| 345 |
# Ensure _proxy_metadata exists and update timestamp
|
| 346 |
if "_proxy_metadata" not in creds_from_file:
|
|
@@ -349,16 +418,22 @@ class QwenAuthBase:
|
|
| 349 |
|
| 350 |
# [VALIDATION] Verify required fields exist after refresh
|
| 351 |
required_fields = ["access_token", "refresh_token"]
|
| 352 |
-
missing_fields = [
|
|
|
|
|
|
|
| 353 |
if missing_fields:
|
| 354 |
-
raise ValueError(
|
|
|
|
|
|
|
| 355 |
|
| 356 |
# [BACKOFF TRACKING] Clear failure count on successful refresh
|
| 357 |
self._refresh_failures.pop(path, None)
|
| 358 |
self._next_refresh_after.pop(path, None)
|
| 359 |
|
| 360 |
await self._save_credentials(path, creds_from_file)
|
| 361 |
-
lib_logger.debug(
|
|
|
|
|
|
|
| 362 |
return creds_from_file
|
| 363 |
|
| 364 |
async def get_api_details(self, credential_identifier: str) -> Tuple[str, str]:
|
|
@@ -372,12 +447,14 @@ class QwenAuthBase:
|
|
| 372 |
# Detect credential type
|
| 373 |
if os.path.isfile(credential_identifier):
|
| 374 |
# OAuth credential: file path to JSON
|
| 375 |
-
lib_logger.debug(
|
|
|
|
|
|
|
| 376 |
creds = await self._load_credentials(credential_identifier)
|
| 377 |
|
| 378 |
if self._is_token_expired(creds):
|
| 379 |
creds = await self._refresh_token(credential_identifier)
|
| 380 |
-
|
| 381 |
base_url = creds.get("resource_url", "https://portal.qwen.ai/v1")
|
| 382 |
if not base_url.startswith("http"):
|
| 383 |
base_url = f"https://{base_url}"
|
|
@@ -397,7 +474,7 @@ class QwenAuthBase:
|
|
| 397 |
"""
|
| 398 |
# Check if it's an env:// virtual path (OAuth credentials from environment)
|
| 399 |
is_env_path = credential_identifier.startswith("env://")
|
| 400 |
-
|
| 401 |
# Only refresh if it's an OAuth credential (file path or env:// path)
|
| 402 |
if not is_env_path and not os.path.isfile(credential_identifier):
|
| 403 |
return # Direct API key, no refresh needed
|
|
@@ -405,7 +482,9 @@ class QwenAuthBase:
|
|
| 405 |
creds = await self._load_credentials(credential_identifier)
|
| 406 |
if self._is_token_expired(creds):
|
| 407 |
# Queue for refresh with needs_reauth=False (automated refresh)
|
| 408 |
-
await self._queue_refresh(
|
|
|
|
|
|
|
| 409 |
|
| 410 |
async def _get_lock(self, path: str) -> asyncio.Lock:
|
| 411 |
# [FIX RACE CONDITION] Protect lock creation with a master lock
|
|
@@ -421,11 +500,15 @@ class QwenAuthBase:
|
|
| 421 |
async def _ensure_queue_processor_running(self):
|
| 422 |
"""Lazily starts the queue processor if not already running."""
|
| 423 |
if self._queue_processor_task is None or self._queue_processor_task.done():
|
| 424 |
-
self._queue_processor_task = asyncio.create_task(
|
|
|
|
|
|
|
| 425 |
|
| 426 |
-
async def _queue_refresh(
|
|
|
|
|
|
|
| 427 |
"""Add a credential to the refresh queue if not already queued.
|
| 428 |
-
|
| 429 |
Args:
|
| 430 |
path: Credential file path
|
| 431 |
force: Force refresh even if not expired
|
|
@@ -440,9 +523,11 @@ class QwenAuthBase:
|
|
| 440 |
if now < backoff_until:
|
| 441 |
# Credential is in backoff for automated refresh, do not queue
|
| 442 |
remaining = int(backoff_until - now)
|
| 443 |
-
lib_logger.debug(
|
|
|
|
|
|
|
| 444 |
return
|
| 445 |
-
|
| 446 |
async with self._queue_tracking_lock:
|
| 447 |
if path not in self._queued_credentials:
|
| 448 |
self._queued_credentials.add(path)
|
|
@@ -458,14 +543,13 @@ class QwenAuthBase:
|
|
| 458 |
# Wait for an item with timeout to allow graceful shutdown
|
| 459 |
try:
|
| 460 |
path, force, needs_reauth = await asyncio.wait_for(
|
| 461 |
-
self._refresh_queue.get(),
|
| 462 |
-
timeout=60.0
|
| 463 |
)
|
| 464 |
except asyncio.TimeoutError:
|
| 465 |
# No items for 60s, exit to save resources
|
| 466 |
self._queue_processor_task = None
|
| 467 |
return
|
| 468 |
-
|
| 469 |
try:
|
| 470 |
# Perform the actual refresh (still using per-credential lock)
|
| 471 |
async with await self._get_lock(path):
|
|
@@ -476,16 +560,16 @@ class QwenAuthBase:
|
|
| 476 |
async with self._queue_tracking_lock:
|
| 477 |
self._unavailable_credentials.discard(path)
|
| 478 |
continue
|
| 479 |
-
|
| 480 |
# Perform refresh
|
| 481 |
if not creds:
|
| 482 |
creds = await self._load_credentials(path)
|
| 483 |
await self._refresh_token(path, force=force)
|
| 484 |
-
|
| 485 |
# SUCCESS: Mark as available again
|
| 486 |
async with self._queue_tracking_lock:
|
| 487 |
self._unavailable_credentials.discard(path)
|
| 488 |
-
|
| 489 |
finally:
|
| 490 |
# Remove from queued set
|
| 491 |
async with self._queue_tracking_lock:
|
|
@@ -500,19 +584,25 @@ class QwenAuthBase:
|
|
| 500 |
async with self._queue_tracking_lock:
|
| 501 |
self._unavailable_credentials.discard(path)
|
| 502 |
|
| 503 |
-
async def initialize_token(
|
|
|
|
|
|
|
| 504 |
"""Initiates device flow if tokens are missing or invalid."""
|
| 505 |
path = creds_or_path if isinstance(creds_or_path, str) else None
|
| 506 |
|
| 507 |
# Get display name from metadata if available, otherwise derive from path
|
| 508 |
if isinstance(creds_or_path, dict):
|
| 509 |
-
display_name = creds_or_path.get("_proxy_metadata", {}).get(
|
|
|
|
|
|
|
| 510 |
else:
|
| 511 |
display_name = Path(path).name if path else "in-memory object"
|
| 512 |
|
| 513 |
lib_logger.debug(f"Initializing Qwen token for '{display_name}'...")
|
| 514 |
try:
|
| 515 |
-
creds =
|
|
|
|
|
|
|
| 516 |
|
| 517 |
reason = ""
|
| 518 |
if not creds.get("refresh_token"):
|
|
@@ -525,44 +615,58 @@ class QwenAuthBase:
|
|
| 525 |
try:
|
| 526 |
return await self._refresh_token(path)
|
| 527 |
except Exception as e:
|
| 528 |
-
lib_logger.warning(
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 529 |
|
| 530 |
-
lib_logger.warning(f"Qwen OAuth token for '{display_name}' needs setup: {reason}.")
|
| 531 |
-
|
| 532 |
# [HEADLESS DETECTION] Check if running in headless environment
|
| 533 |
is_headless = is_headless_environment()
|
| 534 |
-
|
| 535 |
-
code_verifier =
|
| 536 |
-
|
| 537 |
-
|
| 538 |
-
|
| 539 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 540 |
headers = {
|
| 541 |
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36",
|
| 542 |
"Content-Type": "application/x-www-form-urlencoded",
|
| 543 |
-
"Accept": "application/json"
|
| 544 |
}
|
| 545 |
async with httpx.AsyncClient() as client:
|
| 546 |
request_data = {
|
| 547 |
"client_id": CLIENT_ID,
|
| 548 |
"scope": SCOPE,
|
| 549 |
"code_challenge": code_challenge,
|
| 550 |
-
"code_challenge_method": "S256"
|
| 551 |
}
|
| 552 |
lib_logger.debug(f"Qwen device code request data: {request_data}")
|
| 553 |
try:
|
| 554 |
dev_response = await client.post(
|
| 555 |
"https://chat.qwen.ai/api/v1/oauth2/device/code",
|
| 556 |
headers=headers,
|
| 557 |
-
data=request_data
|
| 558 |
)
|
| 559 |
dev_response.raise_for_status()
|
| 560 |
dev_data = dev_response.json()
|
| 561 |
lib_logger.debug(f"Qwen device auth response: {dev_data}")
|
| 562 |
except httpx.HTTPStatusError as e:
|
| 563 |
-
lib_logger.error(
|
|
|
|
|
|
|
| 564 |
raise e
|
| 565 |
-
|
| 566 |
# [HEADLESS SUPPORT] Display appropriate instructions
|
| 567 |
if is_headless:
|
| 568 |
auth_panel_text = Text.from_markup(
|
|
@@ -578,33 +682,63 @@ class QwenAuthBase:
|
|
| 578 |
"2. [bold]Copy your email[/bold] or another unique identifier and authorize the application.\n"
|
| 579 |
"3. You will be prompted to enter your identifier after authorization."
|
| 580 |
)
|
| 581 |
-
|
| 582 |
-
console.print(
|
| 583 |
-
|
| 584 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 585 |
# [HEADLESS SUPPORT] Only attempt browser open if NOT headless
|
| 586 |
if not is_headless:
|
| 587 |
try:
|
| 588 |
-
webbrowser.open(dev_data[
|
| 589 |
-
lib_logger.info(
|
|
|
|
|
|
|
| 590 |
except Exception as e:
|
| 591 |
-
lib_logger.warning(
|
| 592 |
-
|
|
|
|
|
|
|
| 593 |
token_data = None
|
| 594 |
start_time = time.time()
|
| 595 |
-
interval = dev_data.get(
|
| 596 |
|
| 597 |
-
with console.status(
|
| 598 |
-
|
|
|
|
|
|
|
|
|
|
| 599 |
poll_response = await client.post(
|
| 600 |
TOKEN_ENDPOINT,
|
| 601 |
headers=headers,
|
| 602 |
data={
|
| 603 |
"grant_type": "urn:ietf:params:oauth:grant-type:device_code",
|
| 604 |
-
"device_code": dev_data[
|
| 605 |
"client_id": CLIENT_ID,
|
| 606 |
-
"code_verifier": code_verifier
|
| 607 |
-
}
|
| 608 |
)
|
| 609 |
if poll_response.status_code == 200:
|
| 610 |
token_data = poll_response.json()
|
|
@@ -614,45 +748,63 @@ class QwenAuthBase:
|
|
| 614 |
poll_data = poll_response.json()
|
| 615 |
error_type = poll_data.get("error")
|
| 616 |
if error_type == "authorization_pending":
|
| 617 |
-
lib_logger.debug(
|
|
|
|
|
|
|
| 618 |
elif error_type == "slow_down":
|
| 619 |
interval = int(interval * 1.5)
|
| 620 |
if interval > 10:
|
| 621 |
interval = 10
|
| 622 |
-
lib_logger.debug(
|
|
|
|
|
|
|
| 623 |
else:
|
| 624 |
-
raise ValueError(
|
|
|
|
|
|
|
| 625 |
else:
|
| 626 |
poll_response.raise_for_status()
|
| 627 |
-
|
| 628 |
await asyncio.sleep(interval)
|
| 629 |
-
|
| 630 |
if not token_data:
|
| 631 |
raise TimeoutError("Qwen device flow timed out.")
|
| 632 |
-
|
| 633 |
-
creds.update(
|
| 634 |
-
|
| 635 |
-
|
| 636 |
-
|
| 637 |
-
|
| 638 |
-
|
|
|
|
|
|
|
|
|
|
| 639 |
|
| 640 |
# Prompt for user identifier and create metadata object if needed
|
| 641 |
if not creds.get("_proxy_metadata", {}).get("email"):
|
| 642 |
try:
|
| 643 |
-
prompt_text = Text.from_markup(
|
|
|
|
|
|
|
| 644 |
email = Prompt.ask(prompt_text)
|
| 645 |
creds["_proxy_metadata"] = {
|
| 646 |
"email": email.strip(),
|
| 647 |
-
"last_check_timestamp": time.time()
|
| 648 |
}
|
| 649 |
except (EOFError, KeyboardInterrupt):
|
| 650 |
-
console.print(
|
| 651 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 652 |
|
| 653 |
if path:
|
| 654 |
await self._save_credentials(path, creds)
|
| 655 |
-
lib_logger.info(
|
|
|
|
|
|
|
| 656 |
return creds
|
| 657 |
|
| 658 |
lib_logger.info(f"Qwen OAuth token at '{display_name}' is valid.")
|
|
@@ -666,24 +818,32 @@ class QwenAuthBase:
|
|
| 666 |
creds = await self._refresh_token(credential_path)
|
| 667 |
return {"Authorization": f"Bearer {creds['access_token']}"}
|
| 668 |
|
| 669 |
-
async def get_user_info(
|
|
|
|
|
|
|
| 670 |
"""
|
| 671 |
Retrieves user info from the _proxy_metadata in the credential file.
|
| 672 |
"""
|
| 673 |
try:
|
| 674 |
path = creds_or_path if isinstance(creds_or_path, str) else None
|
| 675 |
-
creds =
|
| 676 |
-
|
|
|
|
|
|
|
| 677 |
# This will ensure the token is valid and metadata exists if the flow was just run
|
| 678 |
if path:
|
| 679 |
await self.initialize_token(path)
|
| 680 |
-
creds = await self._load_credentials(
|
|
|
|
|
|
|
| 681 |
|
| 682 |
metadata = creds.get("_proxy_metadata", {"email": None})
|
| 683 |
email = metadata.get("email")
|
| 684 |
|
| 685 |
if not email:
|
| 686 |
-
lib_logger.warning(
|
|
|
|
|
|
|
| 687 |
|
| 688 |
# Update timestamp on check and save if it's a file-based credential
|
| 689 |
if path and "_proxy_metadata" in creds:
|
|
@@ -693,4 +853,4 @@ class QwenAuthBase:
|
|
| 693 |
return {"email": email}
|
| 694 |
except Exception as e:
|
| 695 |
lib_logger.error(f"Failed to get Qwen user info from credentials: {e}")
|
| 696 |
-
return {"email": None}
|
|
|
|
| 19 |
from rich.panel import Panel
|
| 20 |
from rich.prompt import Prompt
|
| 21 |
from rich.text import Text
|
| 22 |
+
from rich.markup import escape as rich_escape
|
| 23 |
|
| 24 |
from ..utils.headless_detection import is_headless_environment
|
| 25 |
|
| 26 |
+
lib_logger = logging.getLogger("rotator_library")
|
| 27 |
|
| 28 |
+
CLIENT_ID = (
|
| 29 |
+
"f0304373b74a44d2b584a3fb70ca9e56" # https://api.kilocode.ai/extension-config.json
|
| 30 |
+
)
|
| 31 |
SCOPE = "openid profile email model.completion"
|
| 32 |
TOKEN_ENDPOINT = "https://chat.qwen.ai/api/v1/oauth2/token"
|
| 33 |
REFRESH_EXPIRY_BUFFER_SECONDS = 3 * 60 * 60 # 3 hours buffer before expiry
|
| 34 |
|
| 35 |
console = Console()
|
| 36 |
|
| 37 |
+
|
| 38 |
class QwenAuthBase:
|
| 39 |
def __init__(self):
|
| 40 |
self._credentials_cache: Dict[str, Dict[str, Any]] = {}
|
| 41 |
self._refresh_locks: Dict[str, asyncio.Lock] = {}
|
| 42 |
+
self._locks_lock = (
|
| 43 |
+
asyncio.Lock()
|
| 44 |
+
) # Protects the locks dict from race conditions
|
| 45 |
# [BACKOFF TRACKING] Track consecutive failures per credential
|
| 46 |
+
self._refresh_failures: Dict[
|
| 47 |
+
str, int
|
| 48 |
+
] = {} # Track consecutive failures per credential
|
| 49 |
+
self._next_refresh_after: Dict[
|
| 50 |
+
str, float
|
| 51 |
+
] = {} # Track backoff timers (Unix timestamp)
|
| 52 |
+
|
| 53 |
# [QUEUE SYSTEM] Sequential refresh processing
|
| 54 |
self._refresh_queue: asyncio.Queue = asyncio.Queue()
|
| 55 |
self._queued_credentials: set = set() # Track credentials already in queue
|
| 56 |
+
self._unavailable_credentials: set = (
|
| 57 |
+
set()
|
| 58 |
+
) # Mark credentials unavailable during re-auth
|
| 59 |
self._queue_tracking_lock = asyncio.Lock() # Protects queue sets
|
| 60 |
+
self._queue_processor_task: Optional[asyncio.Task] = (
|
| 61 |
+
None # Background worker task
|
| 62 |
+
)
|
| 63 |
|
| 64 |
def _parse_env_credential_path(self, path: str) -> Optional[str]:
|
| 65 |
"""
|
| 66 |
Parse a virtual env:// path and return the credential index.
|
| 67 |
+
|
| 68 |
Supported formats:
|
| 69 |
- "env://provider/0" - Legacy single credential (no index in env var names)
|
| 70 |
- "env://provider/1" - First numbered credential (QWEN_CODE_1_ACCESS_TOKEN)
|
| 71 |
+
|
| 72 |
Returns:
|
| 73 |
The credential index as string, or None if path is not an env:// path
|
| 74 |
"""
|
| 75 |
if not path.startswith("env://"):
|
| 76 |
return None
|
| 77 |
+
|
| 78 |
parts = path[6:].split("/")
|
| 79 |
if len(parts) >= 2:
|
| 80 |
return parts[1]
|
| 81 |
return "0"
|
| 82 |
|
| 83 |
+
def _load_from_env(
|
| 84 |
+
self, credential_index: Optional[str] = None
|
| 85 |
+
) -> Optional[Dict[str, Any]]:
|
| 86 |
"""
|
| 87 |
Load OAuth credentials from environment variables for stateless deployments.
|
| 88 |
|
|
|
|
| 107 |
else:
|
| 108 |
prefix = "QWEN_CODE"
|
| 109 |
default_email = "env-user"
|
| 110 |
+
|
| 111 |
access_token = os.getenv(f"{prefix}_ACCESS_TOKEN")
|
| 112 |
refresh_token = os.getenv(f"{prefix}_REFRESH_TOKEN")
|
| 113 |
|
|
|
|
| 115 |
if not (access_token and refresh_token):
|
| 116 |
return None
|
| 117 |
|
| 118 |
+
lib_logger.debug(
|
| 119 |
+
f"Loading Qwen Code credentials from environment variables (prefix: {prefix})"
|
| 120 |
+
)
|
| 121 |
|
| 122 |
# Parse expiry_date as float, default to 0 if not present
|
| 123 |
expiry_str = os.getenv(f"{prefix}_EXPIRY_DATE", "0")
|
| 124 |
try:
|
| 125 |
expiry_date = float(expiry_str)
|
| 126 |
except ValueError:
|
| 127 |
+
lib_logger.warning(
|
| 128 |
+
f"Invalid {prefix}_EXPIRY_DATE value: {expiry_str}, using 0"
|
| 129 |
+
)
|
| 130 |
expiry_date = 0
|
| 131 |
|
| 132 |
creds = {
|
| 133 |
"access_token": access_token,
|
| 134 |
"refresh_token": refresh_token,
|
| 135 |
"expiry_date": expiry_date,
|
| 136 |
+
"resource_url": os.getenv(
|
| 137 |
+
f"{prefix}_RESOURCE_URL", "https://portal.qwen.ai/v1"
|
| 138 |
+
),
|
| 139 |
"_proxy_metadata": {
|
| 140 |
"email": os.getenv(f"{prefix}_EMAIL", default_email),
|
| 141 |
"last_check_timestamp": time.time(),
|
| 142 |
"loaded_from_env": True,
|
| 143 |
+
"env_credential_index": credential_index or "0",
|
| 144 |
+
},
|
| 145 |
}
|
| 146 |
|
| 147 |
return creds
|
|
|
|
| 150 |
"""Reads credentials from file and populates the cache. No locking."""
|
| 151 |
try:
|
| 152 |
lib_logger.debug(f"Reading Qwen credentials from file: {path}")
|
| 153 |
+
with open(path, "r") as f:
|
| 154 |
creds = json.load(f)
|
| 155 |
self._credentials_cache[path] = creds
|
| 156 |
return creds
|
|
|
|
| 174 |
if credential_index is not None:
|
| 175 |
env_creds = self._load_from_env(credential_index)
|
| 176 |
if env_creds:
|
| 177 |
+
lib_logger.info(
|
| 178 |
+
f"Using Qwen Code credentials from environment variables (index: {credential_index})"
|
| 179 |
+
)
|
| 180 |
self._credentials_cache[path] = env_creds
|
| 181 |
return env_creds
|
| 182 |
else:
|
| 183 |
+
raise IOError(
|
| 184 |
+
f"Environment variables for Qwen Code credential index {credential_index} not found"
|
| 185 |
+
)
|
| 186 |
|
| 187 |
# For file paths, try loading from legacy env vars first
|
| 188 |
env_creds = self._load_from_env()
|
| 189 |
if env_creds:
|
| 190 |
+
lib_logger.info(
|
| 191 |
+
"Using Qwen Code credentials from environment variables"
|
| 192 |
+
)
|
| 193 |
self._credentials_cache[path] = env_creds
|
| 194 |
return env_creds
|
| 195 |
|
|
|
|
| 212 |
tmp_path = None
|
| 213 |
try:
|
| 214 |
# Create temp file in same directory as target (ensures same filesystem)
|
| 215 |
+
tmp_fd, tmp_path = tempfile.mkstemp(
|
| 216 |
+
dir=parent_dir, prefix=".tmp_", suffix=".json", text=True
|
| 217 |
+
)
|
| 218 |
|
| 219 |
# Write JSON to temp file
|
| 220 |
+
with os.fdopen(tmp_fd, "w") as f:
|
| 221 |
json.dump(creds, f, indent=2)
|
| 222 |
tmp_fd = None # fdopen closes the fd
|
| 223 |
|
|
|
|
| 234 |
|
| 235 |
# Update cache AFTER successful file write
|
| 236 |
self._credentials_cache[path] = creds
|
| 237 |
+
lib_logger.debug(
|
| 238 |
+
f"Saved updated Qwen OAuth credentials to '{path}' (atomic write)."
|
| 239 |
+
)
|
| 240 |
|
| 241 |
except Exception as e:
|
| 242 |
+
lib_logger.error(
|
| 243 |
+
f"Failed to save updated Qwen OAuth credentials to '{path}': {e}"
|
| 244 |
+
)
|
| 245 |
# Clean up temp file if it still exists
|
| 246 |
if tmp_fd is not None:
|
| 247 |
try:
|
|
|
|
| 286 |
headers = {
|
| 287 |
"Content-Type": "application/x-www-form-urlencoded",
|
| 288 |
"Accept": "application/json",
|
| 289 |
+
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36",
|
| 290 |
}
|
| 291 |
|
| 292 |
async with httpx.AsyncClient() as client:
|
| 293 |
for attempt in range(max_retries):
|
| 294 |
try:
|
| 295 |
+
response = await client.post(
|
| 296 |
+
TOKEN_ENDPOINT,
|
| 297 |
+
headers=headers,
|
| 298 |
+
data={
|
| 299 |
+
"grant_type": "refresh_token",
|
| 300 |
+
"refresh_token": refresh_token,
|
| 301 |
+
"client_id": CLIENT_ID,
|
| 302 |
+
},
|
| 303 |
+
timeout=30.0,
|
| 304 |
+
)
|
| 305 |
response.raise_for_status()
|
| 306 |
new_token_data = response.json()
|
| 307 |
break # Success
|
|
|
|
| 310 |
last_error = e
|
| 311 |
status_code = e.response.status_code
|
| 312 |
error_body = e.response.text
|
| 313 |
+
lib_logger.error(
|
| 314 |
+
f"HTTP {status_code} for '{Path(path).name}': {error_body}"
|
| 315 |
+
)
|
| 316 |
|
| 317 |
# [INVALID GRANT HANDLING] Handle 401/403 by triggering re-authentication
|
| 318 |
if status_code in (401, 403):
|
|
|
|
| 325 |
|
| 326 |
elif status_code == 429:
|
| 327 |
retry_after = int(e.response.headers.get("Retry-After", 60))
|
| 328 |
+
lib_logger.warning(
|
| 329 |
+
f"Rate limited (HTTP 429), retry after {retry_after}s"
|
| 330 |
+
)
|
| 331 |
if attempt < max_retries - 1:
|
| 332 |
await asyncio.sleep(retry_after)
|
| 333 |
continue
|
|
|
|
| 335 |
|
| 336 |
elif 500 <= status_code < 600:
|
| 337 |
if attempt < max_retries - 1:
|
| 338 |
+
wait_time = 2**attempt
|
| 339 |
+
lib_logger.warning(
|
| 340 |
+
f"Server error (HTTP {status_code}), retry {attempt + 1}/{max_retries} in {wait_time}s"
|
| 341 |
+
)
|
| 342 |
await asyncio.sleep(wait_time)
|
| 343 |
continue
|
| 344 |
raise
|
|
|
|
| 349 |
except (httpx.RequestError, httpx.TimeoutException) as e:
|
| 350 |
last_error = e
|
| 351 |
if attempt < max_retries - 1:
|
| 352 |
+
wait_time = 2**attempt
|
| 353 |
+
lib_logger.warning(
|
| 354 |
+
f"Network error during refresh: {e}, retry {attempt + 1}/{max_retries} in {wait_time}s"
|
| 355 |
+
)
|
| 356 |
await asyncio.sleep(wait_time)
|
| 357 |
continue
|
| 358 |
raise
|
| 359 |
|
| 360 |
# [INVALID GRANT RE-AUTH] Trigger OAuth flow if refresh token is invalid
|
| 361 |
if needs_reauth:
|
| 362 |
+
lib_logger.info(
|
| 363 |
+
f"Starting re-authentication for '{Path(path).name}'..."
|
| 364 |
+
)
|
| 365 |
try:
|
| 366 |
# Call initialize_token to trigger OAuth flow
|
| 367 |
new_creds = await self.initialize_token(path)
|
|
|
|
| 370 |
self._next_refresh_after.pop(path, None)
|
| 371 |
return new_creds
|
| 372 |
except Exception as reauth_error:
|
| 373 |
+
lib_logger.error(
|
| 374 |
+
f"Re-authentication failed for '{Path(path).name}': {reauth_error}"
|
| 375 |
+
)
|
| 376 |
# [BACKOFF TRACKING] Increment failure count and set backoff timer
|
| 377 |
+
self._refresh_failures[path] = (
|
| 378 |
+
self._refresh_failures.get(path, 0) + 1
|
| 379 |
+
)
|
| 380 |
+
backoff_seconds = min(
|
| 381 |
+
300, 30 * (2 ** self._refresh_failures[path])
|
| 382 |
+
) # Max 5 min backoff
|
| 383 |
self._next_refresh_after[path] = time.time() + backoff_seconds
|
| 384 |
+
lib_logger.debug(
|
| 385 |
+
f"Setting backoff for '{Path(path).name}': {backoff_seconds}s"
|
| 386 |
+
)
|
| 387 |
+
raise ValueError(
|
| 388 |
+
f"Refresh token invalid and re-authentication failed: {reauth_error}"
|
| 389 |
+
)
|
| 390 |
|
| 391 |
if new_token_data is None:
|
| 392 |
# [BACKOFF TRACKING] Increment failure count and set backoff timer
|
| 393 |
self._refresh_failures[path] = self._refresh_failures.get(path, 0) + 1
|
| 394 |
+
backoff_seconds = min(
|
| 395 |
+
300, 30 * (2 ** self._refresh_failures[path])
|
| 396 |
+
) # Max 5 min backoff
|
| 397 |
self._next_refresh_after[path] = time.time() + backoff_seconds
|
| 398 |
+
lib_logger.debug(
|
| 399 |
+
f"Setting backoff for '{Path(path).name}': {backoff_seconds}s"
|
| 400 |
+
)
|
| 401 |
raise last_error or Exception("Token refresh failed after all retries")
|
| 402 |
|
| 403 |
creds_from_file["access_token"] = new_token_data["access_token"]
|
| 404 |
+
creds_from_file["refresh_token"] = new_token_data.get(
|
| 405 |
+
"refresh_token", creds_from_file["refresh_token"]
|
| 406 |
+
)
|
| 407 |
+
creds_from_file["expiry_date"] = (
|
| 408 |
+
time.time() + new_token_data["expires_in"]
|
| 409 |
+
) * 1000
|
| 410 |
+
creds_from_file["resource_url"] = new_token_data.get(
|
| 411 |
+
"resource_url", creds_from_file.get("resource_url")
|
| 412 |
+
)
|
| 413 |
|
| 414 |
# Ensure _proxy_metadata exists and update timestamp
|
| 415 |
if "_proxy_metadata" not in creds_from_file:
|
|
|
|
| 418 |
|
| 419 |
# [VALIDATION] Verify required fields exist after refresh
|
| 420 |
required_fields = ["access_token", "refresh_token"]
|
| 421 |
+
missing_fields = [
|
| 422 |
+
field for field in required_fields if not creds_from_file.get(field)
|
| 423 |
+
]
|
| 424 |
if missing_fields:
|
| 425 |
+
raise ValueError(
|
| 426 |
+
f"Refreshed credentials missing required fields: {missing_fields}"
|
| 427 |
+
)
|
| 428 |
|
| 429 |
# [BACKOFF TRACKING] Clear failure count on successful refresh
|
| 430 |
self._refresh_failures.pop(path, None)
|
| 431 |
self._next_refresh_after.pop(path, None)
|
| 432 |
|
| 433 |
await self._save_credentials(path, creds_from_file)
|
| 434 |
+
lib_logger.debug(
|
| 435 |
+
f"Successfully refreshed Qwen OAuth token for '{Path(path).name}'."
|
| 436 |
+
)
|
| 437 |
return creds_from_file
|
| 438 |
|
| 439 |
async def get_api_details(self, credential_identifier: str) -> Tuple[str, str]:
|
|
|
|
| 447 |
# Detect credential type
|
| 448 |
if os.path.isfile(credential_identifier):
|
| 449 |
# OAuth credential: file path to JSON
|
| 450 |
+
lib_logger.debug(
|
| 451 |
+
f"Using OAuth credentials from file: {credential_identifier}"
|
| 452 |
+
)
|
| 453 |
creds = await self._load_credentials(credential_identifier)
|
| 454 |
|
| 455 |
if self._is_token_expired(creds):
|
| 456 |
creds = await self._refresh_token(credential_identifier)
|
| 457 |
+
|
| 458 |
base_url = creds.get("resource_url", "https://portal.qwen.ai/v1")
|
| 459 |
if not base_url.startswith("http"):
|
| 460 |
base_url = f"https://{base_url}"
|
|
|
|
| 474 |
"""
|
| 475 |
# Check if it's an env:// virtual path (OAuth credentials from environment)
|
| 476 |
is_env_path = credential_identifier.startswith("env://")
|
| 477 |
+
|
| 478 |
# Only refresh if it's an OAuth credential (file path or env:// path)
|
| 479 |
if not is_env_path and not os.path.isfile(credential_identifier):
|
| 480 |
return # Direct API key, no refresh needed
|
|
|
|
| 482 |
creds = await self._load_credentials(credential_identifier)
|
| 483 |
if self._is_token_expired(creds):
|
| 484 |
# Queue for refresh with needs_reauth=False (automated refresh)
|
| 485 |
+
await self._queue_refresh(
|
| 486 |
+
credential_identifier, force=False, needs_reauth=False
|
| 487 |
+
)
|
| 488 |
|
| 489 |
async def _get_lock(self, path: str) -> asyncio.Lock:
|
| 490 |
# [FIX RACE CONDITION] Protect lock creation with a master lock
|
|
|
|
| 500 |
async def _ensure_queue_processor_running(self):
|
| 501 |
"""Lazily starts the queue processor if not already running."""
|
| 502 |
if self._queue_processor_task is None or self._queue_processor_task.done():
|
| 503 |
+
self._queue_processor_task = asyncio.create_task(
|
| 504 |
+
self._process_refresh_queue()
|
| 505 |
+
)
|
| 506 |
|
| 507 |
+
async def _queue_refresh(
|
| 508 |
+
self, path: str, force: bool = False, needs_reauth: bool = False
|
| 509 |
+
):
|
| 510 |
"""Add a credential to the refresh queue if not already queued.
|
| 511 |
+
|
| 512 |
Args:
|
| 513 |
path: Credential file path
|
| 514 |
force: Force refresh even if not expired
|
|
|
|
| 523 |
if now < backoff_until:
|
| 524 |
# Credential is in backoff for automated refresh, do not queue
|
| 525 |
remaining = int(backoff_until - now)
|
| 526 |
+
lib_logger.debug(
|
| 527 |
+
f"Skipping automated refresh for '{Path(path).name}' (in backoff for {remaining}s)"
|
| 528 |
+
)
|
| 529 |
return
|
| 530 |
+
|
| 531 |
async with self._queue_tracking_lock:
|
| 532 |
if path not in self._queued_credentials:
|
| 533 |
self._queued_credentials.add(path)
|
|
|
|
| 543 |
# Wait for an item with timeout to allow graceful shutdown
|
| 544 |
try:
|
| 545 |
path, force, needs_reauth = await asyncio.wait_for(
|
| 546 |
+
self._refresh_queue.get(), timeout=60.0
|
|
|
|
| 547 |
)
|
| 548 |
except asyncio.TimeoutError:
|
| 549 |
# No items for 60s, exit to save resources
|
| 550 |
self._queue_processor_task = None
|
| 551 |
return
|
| 552 |
+
|
| 553 |
try:
|
| 554 |
# Perform the actual refresh (still using per-credential lock)
|
| 555 |
async with await self._get_lock(path):
|
|
|
|
| 560 |
async with self._queue_tracking_lock:
|
| 561 |
self._unavailable_credentials.discard(path)
|
| 562 |
continue
|
| 563 |
+
|
| 564 |
# Perform refresh
|
| 565 |
if not creds:
|
| 566 |
creds = await self._load_credentials(path)
|
| 567 |
await self._refresh_token(path, force=force)
|
| 568 |
+
|
| 569 |
# SUCCESS: Mark as available again
|
| 570 |
async with self._queue_tracking_lock:
|
| 571 |
self._unavailable_credentials.discard(path)
|
| 572 |
+
|
| 573 |
finally:
|
| 574 |
# Remove from queued set
|
| 575 |
async with self._queue_tracking_lock:
|
|
|
|
| 584 |
async with self._queue_tracking_lock:
|
| 585 |
self._unavailable_credentials.discard(path)
|
| 586 |
|
| 587 |
+
async def initialize_token(
|
| 588 |
+
self, creds_or_path: Union[Dict[str, Any], str]
|
| 589 |
+
) -> Dict[str, Any]:
|
| 590 |
"""Initiates device flow if tokens are missing or invalid."""
|
| 591 |
path = creds_or_path if isinstance(creds_or_path, str) else None
|
| 592 |
|
| 593 |
# Get display name from metadata if available, otherwise derive from path
|
| 594 |
if isinstance(creds_or_path, dict):
|
| 595 |
+
display_name = creds_or_path.get("_proxy_metadata", {}).get(
|
| 596 |
+
"display_name", "in-memory object"
|
| 597 |
+
)
|
| 598 |
else:
|
| 599 |
display_name = Path(path).name if path else "in-memory object"
|
| 600 |
|
| 601 |
lib_logger.debug(f"Initializing Qwen token for '{display_name}'...")
|
| 602 |
try:
|
| 603 |
+
creds = (
|
| 604 |
+
await self._load_credentials(creds_or_path) if path else creds_or_path
|
| 605 |
+
)
|
| 606 |
|
| 607 |
reason = ""
|
| 608 |
if not creds.get("refresh_token"):
|
|
|
|
| 615 |
try:
|
| 616 |
return await self._refresh_token(path)
|
| 617 |
except Exception as e:
|
| 618 |
+
lib_logger.warning(
|
| 619 |
+
f"Automatic token refresh for '{display_name}' failed: {e}. Proceeding to interactive login."
|
| 620 |
+
)
|
| 621 |
+
|
| 622 |
+
lib_logger.warning(
|
| 623 |
+
f"Qwen OAuth token for '{display_name}' needs setup: {reason}."
|
| 624 |
+
)
|
| 625 |
|
|
|
|
|
|
|
| 626 |
# [HEADLESS DETECTION] Check if running in headless environment
|
| 627 |
is_headless = is_headless_environment()
|
| 628 |
+
|
| 629 |
+
code_verifier = (
|
| 630 |
+
base64.urlsafe_b64encode(secrets.token_bytes(32))
|
| 631 |
+
.decode("utf-8")
|
| 632 |
+
.rstrip("=")
|
| 633 |
+
)
|
| 634 |
+
code_challenge = (
|
| 635 |
+
base64.urlsafe_b64encode(
|
| 636 |
+
hashlib.sha256(code_verifier.encode("utf-8")).digest()
|
| 637 |
+
)
|
| 638 |
+
.decode("utf-8")
|
| 639 |
+
.rstrip("=")
|
| 640 |
+
)
|
| 641 |
+
|
| 642 |
headers = {
|
| 643 |
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36",
|
| 644 |
"Content-Type": "application/x-www-form-urlencoded",
|
| 645 |
+
"Accept": "application/json",
|
| 646 |
}
|
| 647 |
async with httpx.AsyncClient() as client:
|
| 648 |
request_data = {
|
| 649 |
"client_id": CLIENT_ID,
|
| 650 |
"scope": SCOPE,
|
| 651 |
"code_challenge": code_challenge,
|
| 652 |
+
"code_challenge_method": "S256",
|
| 653 |
}
|
| 654 |
lib_logger.debug(f"Qwen device code request data: {request_data}")
|
| 655 |
try:
|
| 656 |
dev_response = await client.post(
|
| 657 |
"https://chat.qwen.ai/api/v1/oauth2/device/code",
|
| 658 |
headers=headers,
|
| 659 |
+
data=request_data,
|
| 660 |
)
|
| 661 |
dev_response.raise_for_status()
|
| 662 |
dev_data = dev_response.json()
|
| 663 |
lib_logger.debug(f"Qwen device auth response: {dev_data}")
|
| 664 |
except httpx.HTTPStatusError as e:
|
| 665 |
+
lib_logger.error(
|
| 666 |
+
f"Qwen device code request failed with status {e.response.status_code}: {e.response.text}"
|
| 667 |
+
)
|
| 668 |
raise e
|
| 669 |
+
|
| 670 |
# [HEADLESS SUPPORT] Display appropriate instructions
|
| 671 |
if is_headless:
|
| 672 |
auth_panel_text = Text.from_markup(
|
|
|
|
| 682 |
"2. [bold]Copy your email[/bold] or another unique identifier and authorize the application.\n"
|
| 683 |
"3. You will be prompted to enter your identifier after authorization."
|
| 684 |
)
|
| 685 |
+
|
| 686 |
+
console.print(
|
| 687 |
+
Panel(
|
| 688 |
+
auth_panel_text,
|
| 689 |
+
title=f"Qwen OAuth Setup for [bold yellow]{display_name}[/bold yellow]",
|
| 690 |
+
style="bold blue",
|
| 691 |
+
)
|
| 692 |
+
)
|
| 693 |
+
# [URL DISPLAY] Print URL with proper escaping to prevent Rich markup issues.
|
| 694 |
+
# IMPORTANT: OAuth URLs contain special characters (=, &, etc.) that Rich might
|
| 695 |
+
# interpret as markup in some terminal configurations. We escape the URL to
|
| 696 |
+
# ensure it displays correctly.
|
| 697 |
+
#
|
| 698 |
+
# KNOWN ISSUE: If Rich rendering fails entirely (e.g., terminal doesn't support
|
| 699 |
+
# ANSI codes, or output is piped), the escaped URL should still be valid.
|
| 700 |
+
# However, if the terminal strips or mangles the output, users should copy
|
| 701 |
+
# the URL directly from logs or use --verbose to see the raw URL.
|
| 702 |
+
#
|
| 703 |
+
# The [link=...] markup creates a clickable hyperlink in supported terminals
|
| 704 |
+
# (iTerm2, Windows Terminal, etc.), but the displayed text is the escaped URL
|
| 705 |
+
# which can be safely copied even if the hyperlink doesn't work.
|
| 706 |
+
verification_url = dev_data["verification_uri_complete"]
|
| 707 |
+
escaped_url = rich_escape(verification_url)
|
| 708 |
+
console.print(
|
| 709 |
+
f"[bold]URL:[/bold] [link={verification_url}]{escaped_url}[/link]\n"
|
| 710 |
+
)
|
| 711 |
+
|
| 712 |
# [HEADLESS SUPPORT] Only attempt browser open if NOT headless
|
| 713 |
if not is_headless:
|
| 714 |
try:
|
| 715 |
+
webbrowser.open(dev_data["verification_uri_complete"])
|
| 716 |
+
lib_logger.info(
|
| 717 |
+
"Browser opened successfully for Qwen OAuth flow"
|
| 718 |
+
)
|
| 719 |
except Exception as e:
|
| 720 |
+
lib_logger.warning(
|
| 721 |
+
f"Failed to open browser automatically: {e}. Please open the URL manually."
|
| 722 |
+
)
|
| 723 |
+
|
| 724 |
token_data = None
|
| 725 |
start_time = time.time()
|
| 726 |
+
interval = dev_data.get("interval", 5)
|
| 727 |
|
| 728 |
+
with console.status(
|
| 729 |
+
"[bold green]Polling for token, please complete authentication in the browser...[/bold green]",
|
| 730 |
+
spinner="dots",
|
| 731 |
+
) as status:
|
| 732 |
+
while time.time() - start_time < dev_data["expires_in"]:
|
| 733 |
poll_response = await client.post(
|
| 734 |
TOKEN_ENDPOINT,
|
| 735 |
headers=headers,
|
| 736 |
data={
|
| 737 |
"grant_type": "urn:ietf:params:oauth:grant-type:device_code",
|
| 738 |
+
"device_code": dev_data["device_code"],
|
| 739 |
"client_id": CLIENT_ID,
|
| 740 |
+
"code_verifier": code_verifier,
|
| 741 |
+
},
|
| 742 |
)
|
| 743 |
if poll_response.status_code == 200:
|
| 744 |
token_data = poll_response.json()
|
|
|
|
| 748 |
poll_data = poll_response.json()
|
| 749 |
error_type = poll_data.get("error")
|
| 750 |
if error_type == "authorization_pending":
|
| 751 |
+
lib_logger.debug(
|
| 752 |
+
f"Polling status: {error_type}, waiting {interval}s"
|
| 753 |
+
)
|
| 754 |
elif error_type == "slow_down":
|
| 755 |
interval = int(interval * 1.5)
|
| 756 |
if interval > 10:
|
| 757 |
interval = 10
|
| 758 |
+
lib_logger.debug(
|
| 759 |
+
f"Polling status: {error_type}, waiting {interval}s"
|
| 760 |
+
)
|
| 761 |
else:
|
| 762 |
+
raise ValueError(
|
| 763 |
+
f"Token polling failed: {poll_data.get('error_description', error_type)}"
|
| 764 |
+
)
|
| 765 |
else:
|
| 766 |
poll_response.raise_for_status()
|
| 767 |
+
|
| 768 |
await asyncio.sleep(interval)
|
| 769 |
+
|
| 770 |
if not token_data:
|
| 771 |
raise TimeoutError("Qwen device flow timed out.")
|
| 772 |
+
|
| 773 |
+
creds.update(
|
| 774 |
+
{
|
| 775 |
+
"access_token": token_data["access_token"],
|
| 776 |
+
"refresh_token": token_data.get("refresh_token"),
|
| 777 |
+
"expiry_date": (time.time() + token_data["expires_in"])
|
| 778 |
+
* 1000,
|
| 779 |
+
"resource_url": token_data.get("resource_url"),
|
| 780 |
+
}
|
| 781 |
+
)
|
| 782 |
|
| 783 |
# Prompt for user identifier and create metadata object if needed
|
| 784 |
if not creds.get("_proxy_metadata", {}).get("email"):
|
| 785 |
try:
|
| 786 |
+
prompt_text = Text.from_markup(
|
| 787 |
+
f"\\n[bold]Please enter your email or a unique identifier for [yellow]'{display_name}'[/yellow][/bold]"
|
| 788 |
+
)
|
| 789 |
email = Prompt.ask(prompt_text)
|
| 790 |
creds["_proxy_metadata"] = {
|
| 791 |
"email": email.strip(),
|
| 792 |
+
"last_check_timestamp": time.time(),
|
| 793 |
}
|
| 794 |
except (EOFError, KeyboardInterrupt):
|
| 795 |
+
console.print(
|
| 796 |
+
"\\n[bold yellow]No identifier provided. Deduplication will not be possible.[/bold yellow]"
|
| 797 |
+
)
|
| 798 |
+
creds["_proxy_metadata"] = {
|
| 799 |
+
"email": None,
|
| 800 |
+
"last_check_timestamp": time.time(),
|
| 801 |
+
}
|
| 802 |
|
| 803 |
if path:
|
| 804 |
await self._save_credentials(path, creds)
|
| 805 |
+
lib_logger.info(
|
| 806 |
+
f"Qwen OAuth initialized successfully for '{display_name}'."
|
| 807 |
+
)
|
| 808 |
return creds
|
| 809 |
|
| 810 |
lib_logger.info(f"Qwen OAuth token at '{display_name}' is valid.")
|
|
|
|
| 818 |
creds = await self._refresh_token(credential_path)
|
| 819 |
return {"Authorization": f"Bearer {creds['access_token']}"}
|
| 820 |
|
| 821 |
+
async def get_user_info(
|
| 822 |
+
self, creds_or_path: Union[Dict[str, Any], str]
|
| 823 |
+
) -> Dict[str, Any]:
|
| 824 |
"""
|
| 825 |
Retrieves user info from the _proxy_metadata in the credential file.
|
| 826 |
"""
|
| 827 |
try:
|
| 828 |
path = creds_or_path if isinstance(creds_or_path, str) else None
|
| 829 |
+
creds = (
|
| 830 |
+
await self._load_credentials(creds_or_path) if path else creds_or_path
|
| 831 |
+
)
|
| 832 |
+
|
| 833 |
# This will ensure the token is valid and metadata exists if the flow was just run
|
| 834 |
if path:
|
| 835 |
await self.initialize_token(path)
|
| 836 |
+
creds = await self._load_credentials(
|
| 837 |
+
path
|
| 838 |
+
) # Re-load after potential init
|
| 839 |
|
| 840 |
metadata = creds.get("_proxy_metadata", {"email": None})
|
| 841 |
email = metadata.get("email")
|
| 842 |
|
| 843 |
if not email:
|
| 844 |
+
lib_logger.warning(
|
| 845 |
+
f"No email found in _proxy_metadata for '{path or 'in-memory object'}'."
|
| 846 |
+
)
|
| 847 |
|
| 848 |
# Update timestamp on check and save if it's a file-based credential
|
| 849 |
if path and "_proxy_metadata" in creds:
|
|
|
|
| 853 |
return {"email": email}
|
| 854 |
except Exception as e:
|
| 855 |
lib_logger.error(f"Failed to get Qwen user info from credentials: {e}")
|
| 856 |
+
return {"email": None}
|
src/rotator_library/utils/headless_detection.py
CHANGED
|
@@ -1,24 +1,27 @@
|
|
| 1 |
# src/rotator_library/utils/headless_detection.py
|
| 2 |
|
| 3 |
import os
|
|
|
|
| 4 |
import logging
|
| 5 |
|
| 6 |
-
lib_logger = logging.getLogger(
|
| 7 |
|
| 8 |
# Import console for user-visible output
|
| 9 |
try:
|
| 10 |
from rich.console import Console
|
|
|
|
| 11 |
console = Console()
|
| 12 |
except ImportError:
|
| 13 |
console = None
|
| 14 |
|
|
|
|
| 15 |
def is_headless_environment() -> bool:
|
| 16 |
"""
|
| 17 |
Detects if the current environment is headless (no GUI available).
|
| 18 |
-
|
| 19 |
Returns:
|
| 20 |
True if headless environment is detected, False otherwise
|
| 21 |
-
|
| 22 |
Detection logic:
|
| 23 |
- Linux/Unix: Check DISPLAY environment variable
|
| 24 |
- SSH detection: Check SSH_CONNECTION or SSH_CLIENT
|
|
@@ -26,17 +29,20 @@ def is_headless_environment() -> bool:
|
|
| 26 |
- Windows: Check SESSIONNAME for service/headless indicators
|
| 27 |
"""
|
| 28 |
headless_indicators = []
|
| 29 |
-
|
| 30 |
-
# Check DISPLAY for Linux
|
| 31 |
-
|
|
|
|
|
|
|
|
|
|
| 32 |
display = os.getenv("DISPLAY")
|
| 33 |
if display is None or display.strip() == "":
|
| 34 |
-
headless_indicators.append("No DISPLAY variable (Linux
|
| 35 |
-
|
| 36 |
# Check for SSH connection
|
| 37 |
if os.getenv("SSH_CONNECTION") or os.getenv("SSH_CLIENT") or os.getenv("SSH_TTY"):
|
| 38 |
headless_indicators.append("SSH connection detected")
|
| 39 |
-
|
| 40 |
# Check for CI environments
|
| 41 |
ci_vars = [
|
| 42 |
"CI", # Generic CI indicator
|
|
@@ -55,30 +61,38 @@ def is_headless_environment() -> bool:
|
|
| 55 |
if os.getenv(var):
|
| 56 |
headless_indicators.append(f"CI environment detected ({var})")
|
| 57 |
break
|
| 58 |
-
|
| 59 |
# Check Windows session type
|
| 60 |
-
if os.name ==
|
| 61 |
session_name = os.getenv("SESSIONNAME", "").lower()
|
| 62 |
if session_name in ["services", "rdp-tcp"]:
|
| 63 |
headless_indicators.append(f"Windows headless session ({session_name})")
|
| 64 |
-
|
| 65 |
# Detect Docker/container environment
|
| 66 |
if os.path.exists("/.dockerenv") or os.path.exists("/run/.containerenv"):
|
| 67 |
headless_indicators.append("Container environment detected")
|
| 68 |
-
|
| 69 |
# Determine if headless
|
| 70 |
is_headless = len(headless_indicators) > 0
|
| 71 |
-
|
| 72 |
if is_headless:
|
| 73 |
# Log to logger
|
| 74 |
-
lib_logger.info(
|
| 75 |
-
|
|
|
|
|
|
|
| 76 |
# Print to console for user visibility
|
| 77 |
if console:
|
| 78 |
-
console.print(
|
| 79 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 80 |
else:
|
| 81 |
# Only log to debug, no console output
|
| 82 |
-
lib_logger.debug(
|
| 83 |
-
|
|
|
|
|
|
|
| 84 |
return is_headless
|
|
|
|
| 1 |
# src/rotator_library/utils/headless_detection.py
|
| 2 |
|
| 3 |
import os
|
| 4 |
+
import sys
|
| 5 |
import logging
|
| 6 |
|
| 7 |
+
lib_logger = logging.getLogger("rotator_library")
|
| 8 |
|
| 9 |
# Import console for user-visible output
|
| 10 |
try:
|
| 11 |
from rich.console import Console
|
| 12 |
+
|
| 13 |
console = Console()
|
| 14 |
except ImportError:
|
| 15 |
console = None
|
| 16 |
|
| 17 |
+
|
| 18 |
def is_headless_environment() -> bool:
|
| 19 |
"""
|
| 20 |
Detects if the current environment is headless (no GUI available).
|
| 21 |
+
|
| 22 |
Returns:
|
| 23 |
True if headless environment is detected, False otherwise
|
| 24 |
+
|
| 25 |
Detection logic:
|
| 26 |
- Linux/Unix: Check DISPLAY environment variable
|
| 27 |
- SSH detection: Check SSH_CONNECTION or SSH_CLIENT
|
|
|
|
| 29 |
- Windows: Check SESSIONNAME for service/headless indicators
|
| 30 |
"""
|
| 31 |
headless_indicators = []
|
| 32 |
+
|
| 33 |
+
# Check DISPLAY for Linux GUI availability (skip on Windows and macOS)
|
| 34 |
+
# NOTE: DISPLAY is an X11 (X Window System) variable used on Linux.
|
| 35 |
+
# macOS uses its native Quartz windowing system, NOT X11, so DISPLAY is
|
| 36 |
+
# typically unset on macOS even with a full GUI. Only check DISPLAY on Linux.
|
| 37 |
+
if os.name != "nt" and sys.platform != "darwin": # Linux only
|
| 38 |
display = os.getenv("DISPLAY")
|
| 39 |
if display is None or display.strip() == "":
|
| 40 |
+
headless_indicators.append("No DISPLAY variable (Linux headless)")
|
| 41 |
+
|
| 42 |
# Check for SSH connection
|
| 43 |
if os.getenv("SSH_CONNECTION") or os.getenv("SSH_CLIENT") or os.getenv("SSH_TTY"):
|
| 44 |
headless_indicators.append("SSH connection detected")
|
| 45 |
+
|
| 46 |
# Check for CI environments
|
| 47 |
ci_vars = [
|
| 48 |
"CI", # Generic CI indicator
|
|
|
|
| 61 |
if os.getenv(var):
|
| 62 |
headless_indicators.append(f"CI environment detected ({var})")
|
| 63 |
break
|
| 64 |
+
|
| 65 |
# Check Windows session type
|
| 66 |
+
if os.name == "nt": # Windows
|
| 67 |
session_name = os.getenv("SESSIONNAME", "").lower()
|
| 68 |
if session_name in ["services", "rdp-tcp"]:
|
| 69 |
headless_indicators.append(f"Windows headless session ({session_name})")
|
| 70 |
+
|
| 71 |
# Detect Docker/container environment
|
| 72 |
if os.path.exists("/.dockerenv") or os.path.exists("/run/.containerenv"):
|
| 73 |
headless_indicators.append("Container environment detected")
|
| 74 |
+
|
| 75 |
# Determine if headless
|
| 76 |
is_headless = len(headless_indicators) > 0
|
| 77 |
+
|
| 78 |
if is_headless:
|
| 79 |
# Log to logger
|
| 80 |
+
lib_logger.info(
|
| 81 |
+
f"Headless environment detected: {'; '.join(headless_indicators)}"
|
| 82 |
+
)
|
| 83 |
+
|
| 84 |
# Print to console for user visibility
|
| 85 |
if console:
|
| 86 |
+
console.print(
|
| 87 |
+
f"[yellow]βΉ Headless environment detected:[/yellow] {'; '.join(headless_indicators)}"
|
| 88 |
+
)
|
| 89 |
+
console.print(
|
| 90 |
+
"[yellow]β Browser will NOT open automatically. Please use the URL below.[/yellow]\n"
|
| 91 |
+
)
|
| 92 |
else:
|
| 93 |
# Only log to debug, no console output
|
| 94 |
+
lib_logger.debug(
|
| 95 |
+
"GUI environment detected, browser auto-open will be attempted"
|
| 96 |
+
)
|
| 97 |
+
|
| 98 |
return is_headless
|