Spaces:
Paused
refactor(core): 🔨 centralize path management for PyInstaller compatibility
Browse filesIntroduce a new `paths.py` utility module that provides centralized path resolution for all data files (logs, cache, OAuth credentials, usage data) with support for both PyInstaller EXE and script/library runtime modes.
- Add `get_default_root()` to auto-detect EXE directory when frozen, otherwise uses CWD
- Add `get_logs_dir()`, `get_cache_dir()`, `get_oauth_dir()`, and `get_data_file()` helpers
- Update `RotatingClient` to accept optional `data_dir` parameter for override capability
- Refactor all hardcoded path calculations to use centralized utilities
- Update `CredentialManager` to accept `oauth_dir` parameter
- Convert `failure_logger` to lazy initialization with configurable logs directory
- Migrate `UsageManager` to accept optional `file_path` with automatic defaults
- Update all provider-specific logging directories to use centralized helpers
- Refactor credential tool and settings tool to use path utilities
- Update main.py to load .env files from correct root directory in both modes
This change eliminates fragile `Path(__file__).resolve().parent` patterns and ensures all file operations work correctly when the application is packaged as a standalone executable.
- src/proxy_app/detailed_logger.py +9 -3
- src/proxy_app/main.py +13 -11
- src/proxy_app/settings_tool.py +7 -3
- src/rotator_library/client.py +27 -5
- src/rotator_library/credential_manager.py +70 -34
- src/rotator_library/credential_tool.py +43 -31
- src/rotator_library/failure_logger.py +53 -10
- src/rotator_library/providers/antigravity_provider.py +23 -9
- src/rotator_library/providers/gemini_cli_provider.py +19 -10
- src/rotator_library/providers/iflow_provider.py +10 -3
- src/rotator_library/providers/qwen_code_provider.py +8 -3
- src/rotator_library/usage_manager.py +14 -4
- src/rotator_library/utils/__init__.py +12 -0
- src/rotator_library/utils/paths.py +99 -0
|
@@ -11,9 +11,15 @@ from rotator_library.utils.resilient_io import (
|
|
| 11 |
safe_log_write,
|
| 12 |
safe_mkdir,
|
| 13 |
)
|
|
|
|
| 14 |
|
| 15 |
-
|
| 16 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 17 |
|
| 18 |
|
| 19 |
class DetailedLogger:
|
|
@@ -31,7 +37,7 @@ class DetailedLogger:
|
|
| 31 |
self.start_time = time.time()
|
| 32 |
self.request_id = str(uuid.uuid4())
|
| 33 |
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
| 34 |
-
self.log_dir =
|
| 35 |
self.streaming = False
|
| 36 |
self._dir_available = safe_mkdir(self.log_dir, logging)
|
| 37 |
|
|
|
|
| 11 |
safe_log_write,
|
| 12 |
safe_mkdir,
|
| 13 |
)
|
| 14 |
+
from rotator_library.utils.paths import get_logs_dir
|
| 15 |
|
| 16 |
+
|
| 17 |
+
def _get_detailed_logs_dir() -> Path:
|
| 18 |
+
"""Get the detailed logs directory, creating it if needed."""
|
| 19 |
+
logs_dir = get_logs_dir()
|
| 20 |
+
detailed_dir = logs_dir / "detailed_logs"
|
| 21 |
+
detailed_dir.mkdir(parents=True, exist_ok=True)
|
| 22 |
+
return detailed_dir
|
| 23 |
|
| 24 |
|
| 25 |
class DetailedLogger:
|
|
|
|
| 37 |
self.start_time = time.time()
|
| 38 |
self.request_id = str(uuid.uuid4())
|
| 39 |
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
| 40 |
+
self.log_dir = _get_detailed_logs_dir() / f"{timestamp}_{self.request_id}"
|
| 41 |
self.streaming = False
|
| 42 |
self._dir_available = safe_mkdir(self.log_dir, logging)
|
| 43 |
|
|
@@ -51,12 +51,15 @@ _start_time = time.time()
|
|
| 51 |
# Load all .env files from root folder (main .env first, then any additional *.env files)
|
| 52 |
from dotenv import load_dotenv
|
| 53 |
from glob import glob
|
|
|
|
|
|
|
|
|
|
|
|
|
| 54 |
|
| 55 |
# Load main .env first
|
| 56 |
-
load_dotenv()
|
| 57 |
|
| 58 |
# Load any additional .env files (e.g., antigravity_all_combined.env, gemini_cli_all_combined.env)
|
| 59 |
-
_root_dir = Path.cwd()
|
| 60 |
_env_files_found = list(_root_dir.glob("*.env"))
|
| 61 |
for _env_file in sorted(_root_dir.glob("*.env")):
|
| 62 |
if _env_file.name != ".env": # Skip main .env (already loaded)
|
|
@@ -234,8 +237,7 @@ print(
|
|
| 234 |
# Note: Debug logging will be added after logging configuration below
|
| 235 |
|
| 236 |
# --- Logging Configuration ---
|
| 237 |
-
LOG_DIR =
|
| 238 |
-
LOG_DIR.mkdir(exist_ok=True)
|
| 239 |
|
| 240 |
# Configure a console handler with color (INFO and above only, no DEBUG)
|
| 241 |
console_handler = colorlog.StreamHandler(sys.stdout)
|
|
@@ -570,11 +572,11 @@ async def lifespan(app: FastAPI):
|
|
| 570 |
)
|
| 571 |
|
| 572 |
# Log loaded credentials summary (compact, always visible for deployment verification)
|
| 573 |
-
#_api_summary = ', '.join([f"{p}:{len(c)}" for p, c in api_keys.items()]) if api_keys else "none"
|
| 574 |
-
#_oauth_summary = ', '.join([f"{p}:{len(c)}" for p, c in oauth_credentials.items()]) if oauth_credentials else "none"
|
| 575 |
-
#_total_summary = ', '.join([f"{p}:{len(c)}" for p, c in client.all_credentials.items()])
|
| 576 |
-
#print(f"🔑 Credentials loaded: {_total_summary} (API: {_api_summary} | OAuth: {_oauth_summary})")
|
| 577 |
-
client.background_refresher.start()
|
| 578 |
app.state.rotating_client = client
|
| 579 |
|
| 580 |
# Warn if no provider credentials are configured
|
|
@@ -1263,8 +1265,8 @@ async def cost_estimate(request: Request, _=Depends(verify_api_key)):
|
|
| 1263 |
|
| 1264 |
|
| 1265 |
if __name__ == "__main__":
|
| 1266 |
-
# Define ENV_FILE for onboarding checks
|
| 1267 |
-
ENV_FILE =
|
| 1268 |
|
| 1269 |
# Check if launcher TUI should be shown (no arguments provided)
|
| 1270 |
if len(sys.argv) == 1:
|
|
|
|
| 51 |
# Load all .env files from root folder (main .env first, then any additional *.env files)
|
| 52 |
from dotenv import load_dotenv
|
| 53 |
from glob import glob
|
| 54 |
+
from rotator_library.utils.paths import get_default_root, get_logs_dir, get_data_file
|
| 55 |
+
|
| 56 |
+
# Get the application root directory (EXE dir if frozen, else CWD)
|
| 57 |
+
_root_dir = get_default_root()
|
| 58 |
|
| 59 |
# Load main .env first
|
| 60 |
+
load_dotenv(_root_dir / ".env")
|
| 61 |
|
| 62 |
# Load any additional .env files (e.g., antigravity_all_combined.env, gemini_cli_all_combined.env)
|
|
|
|
| 63 |
_env_files_found = list(_root_dir.glob("*.env"))
|
| 64 |
for _env_file in sorted(_root_dir.glob("*.env")):
|
| 65 |
if _env_file.name != ".env": # Skip main .env (already loaded)
|
|
|
|
| 237 |
# Note: Debug logging will be added after logging configuration below
|
| 238 |
|
| 239 |
# --- Logging Configuration ---
|
| 240 |
+
LOG_DIR = get_logs_dir(_root_dir)
|
|
|
|
| 241 |
|
| 242 |
# Configure a console handler with color (INFO and above only, no DEBUG)
|
| 243 |
console_handler = colorlog.StreamHandler(sys.stdout)
|
|
|
|
| 572 |
)
|
| 573 |
|
| 574 |
# Log loaded credentials summary (compact, always visible for deployment verification)
|
| 575 |
+
# _api_summary = ', '.join([f"{p}:{len(c)}" for p, c in api_keys.items()]) if api_keys else "none"
|
| 576 |
+
# _oauth_summary = ', '.join([f"{p}:{len(c)}" for p, c in oauth_credentials.items()]) if oauth_credentials else "none"
|
| 577 |
+
# _total_summary = ', '.join([f"{p}:{len(c)}" for p, c in client.all_credentials.items()])
|
| 578 |
+
# print(f"🔑 Credentials loaded: {_total_summary} (API: {_api_summary} | OAuth: {_oauth_summary})")
|
| 579 |
+
client.background_refresher.start() # Start the background task
|
| 580 |
app.state.rotating_client = client
|
| 581 |
|
| 582 |
# Warn if no provider credentials are configured
|
|
|
|
| 1265 |
|
| 1266 |
|
| 1267 |
if __name__ == "__main__":
|
| 1268 |
+
# Define ENV_FILE for onboarding checks using centralized path
|
| 1269 |
+
ENV_FILE = get_data_file(".env")
|
| 1270 |
|
| 1271 |
# Check if launcher TUI should be shown (no arguments provided)
|
| 1272 |
if len(sys.argv) == 1:
|
|
@@ -12,6 +12,8 @@ from rich.prompt import Prompt, IntPrompt, Confirm
|
|
| 12 |
from rich.panel import Panel
|
| 13 |
from dotenv import set_key, unset_key
|
| 14 |
|
|
|
|
|
|
|
| 15 |
console = Console()
|
| 16 |
|
| 17 |
# Import default OAuth port values from provider modules
|
|
@@ -54,7 +56,7 @@ class AdvancedSettings:
|
|
| 54 |
"""Manages pending changes to .env"""
|
| 55 |
|
| 56 |
def __init__(self):
|
| 57 |
-
self.env_file =
|
| 58 |
self.pending_changes = {} # key -> value (None means delete)
|
| 59 |
self.load_current_settings()
|
| 60 |
|
|
@@ -561,7 +563,7 @@ class SettingsTool:
|
|
| 561 |
|
| 562 |
def get_available_providers(self) -> List[str]:
|
| 563 |
"""Get list of providers that have credentials configured"""
|
| 564 |
-
env_file =
|
| 565 |
providers = set()
|
| 566 |
|
| 567 |
# Scan for providers with API keys from local .env
|
|
@@ -584,7 +586,9 @@ class SettingsTool:
|
|
| 584 |
pass
|
| 585 |
|
| 586 |
# Also check for OAuth providers from files
|
| 587 |
-
|
|
|
|
|
|
|
| 588 |
if oauth_dir.exists():
|
| 589 |
for file in oauth_dir.glob("*_oauth_*.json"):
|
| 590 |
provider = file.name.split("_oauth_")[0]
|
|
|
|
| 12 |
from rich.panel import Panel
|
| 13 |
from dotenv import set_key, unset_key
|
| 14 |
|
| 15 |
+
from rotator_library.utils.paths import get_data_file
|
| 16 |
+
|
| 17 |
console = Console()
|
| 18 |
|
| 19 |
# Import default OAuth port values from provider modules
|
|
|
|
| 56 |
"""Manages pending changes to .env"""
|
| 57 |
|
| 58 |
def __init__(self):
|
| 59 |
+
self.env_file = get_data_file(".env")
|
| 60 |
self.pending_changes = {} # key -> value (None means delete)
|
| 61 |
self.load_current_settings()
|
| 62 |
|
|
|
|
| 563 |
|
| 564 |
def get_available_providers(self) -> List[str]:
|
| 565 |
"""Get list of providers that have credentials configured"""
|
| 566 |
+
env_file = get_data_file(".env")
|
| 567 |
providers = set()
|
| 568 |
|
| 569 |
# Scan for providers with API keys from local .env
|
|
|
|
| 586 |
pass
|
| 587 |
|
| 588 |
# Also check for OAuth providers from files
|
| 589 |
+
from rotator_library.utils.paths import get_oauth_dir
|
| 590 |
+
|
| 591 |
+
oauth_dir = get_oauth_dir()
|
| 592 |
if oauth_dir.exists():
|
| 593 |
for file in oauth_dir.glob("*_oauth_*.json"):
|
| 594 |
provider = file.name.split("_oauth_")[0]
|
|
@@ -10,6 +10,7 @@ import litellm
|
|
| 10 |
from litellm.exceptions import APIConnectionError
|
| 11 |
from litellm.litellm_core_utils.token_counter import token_counter
|
| 12 |
import logging
|
|
|
|
| 13 |
from typing import List, Dict, Any, AsyncGenerator, Optional, Union
|
| 14 |
|
| 15 |
lib_logger = logging.getLogger("rotator_library")
|
|
@@ -19,7 +20,7 @@ lib_logger = logging.getLogger("rotator_library")
|
|
| 19 |
lib_logger.propagate = False
|
| 20 |
|
| 21 |
from .usage_manager import UsageManager
|
| 22 |
-
from .failure_logger import log_failure
|
| 23 |
from .error_handler import (
|
| 24 |
PreRequestCallbackError,
|
| 25 |
classify_error,
|
|
@@ -37,6 +38,7 @@ from .cooldown_manager import CooldownManager
|
|
| 37 |
from .credential_manager import CredentialManager
|
| 38 |
from .background_refresher import BackgroundRefresher
|
| 39 |
from .model_definitions import ModelDefinitions
|
|
|
|
| 40 |
|
| 41 |
|
| 42 |
class StreamedAPIError(Exception):
|
|
@@ -58,7 +60,7 @@ class RotatingClient:
|
|
| 58 |
api_keys: Optional[Dict[str, List[str]]] = None,
|
| 59 |
oauth_credentials: Optional[Dict[str, List[str]]] = None,
|
| 60 |
max_retries: int = 2,
|
| 61 |
-
usage_file_path: str =
|
| 62 |
configure_logging: bool = True,
|
| 63 |
global_timeout: int = 30,
|
| 64 |
abort_on_callback_error: bool = True,
|
|
@@ -68,6 +70,7 @@ class RotatingClient:
|
|
| 68 |
enable_request_logging: bool = False,
|
| 69 |
max_concurrent_requests_per_key: Optional[Dict[str, int]] = None,
|
| 70 |
rotation_tolerance: float = 3.0,
|
|
|
|
| 71 |
):
|
| 72 |
"""
|
| 73 |
Initialize the RotatingClient with intelligent credential rotation.
|
|
@@ -76,7 +79,7 @@ class RotatingClient:
|
|
| 76 |
api_keys: Dictionary mapping provider names to lists of API keys
|
| 77 |
oauth_credentials: Dictionary mapping provider names to OAuth credential paths
|
| 78 |
max_retries: Maximum number of retry attempts per credential
|
| 79 |
-
usage_file_path: Path to store usage statistics
|
| 80 |
configure_logging: Whether to configure library logging
|
| 81 |
global_timeout: Global timeout for requests in seconds
|
| 82 |
abort_on_callback_error: Whether to abort on pre-request callback errors
|
|
@@ -89,7 +92,18 @@ class RotatingClient:
|
|
| 89 |
- 0.0: Deterministic, least-used credential always selected
|
| 90 |
- 2.0 - 4.0 (default, recommended): Balanced randomness, can pick credentials within 2 uses of max
|
| 91 |
- 5.0+: High randomness, more unpredictable selection patterns
|
|
|
|
|
|
|
| 92 |
"""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 93 |
os.environ["LITELLM_LOG"] = "ERROR"
|
| 94 |
litellm.set_verbose = False
|
| 95 |
litellm.drop_params = True
|
|
@@ -124,7 +138,9 @@ class RotatingClient:
|
|
| 124 |
if oauth_credentials:
|
| 125 |
self.oauth_credentials = oauth_credentials
|
| 126 |
else:
|
| 127 |
-
self.credential_manager = CredentialManager(
|
|
|
|
|
|
|
| 128 |
self.oauth_credentials = self.credential_manager.discover_and_prepare()
|
| 129 |
self.background_refresher = BackgroundRefresher(self)
|
| 130 |
self.oauth_providers = set(self.oauth_credentials.keys())
|
|
@@ -242,8 +258,14 @@ class RotatingClient:
|
|
| 242 |
f"Provider '{provider}' sequential fallback multiplier: {fallback}x"
|
| 243 |
)
|
| 244 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 245 |
self.usage_manager = UsageManager(
|
| 246 |
-
file_path=
|
| 247 |
rotation_tolerance=rotation_tolerance,
|
| 248 |
provider_rotation_modes=provider_rotation_modes,
|
| 249 |
provider_plugins=PROVIDER_PLUGINS,
|
|
|
|
| 10 |
from litellm.exceptions import APIConnectionError
|
| 11 |
from litellm.litellm_core_utils.token_counter import token_counter
|
| 12 |
import logging
|
| 13 |
+
from pathlib import Path
|
| 14 |
from typing import List, Dict, Any, AsyncGenerator, Optional, Union
|
| 15 |
|
| 16 |
lib_logger = logging.getLogger("rotator_library")
|
|
|
|
| 20 |
lib_logger.propagate = False
|
| 21 |
|
| 22 |
from .usage_manager import UsageManager
|
| 23 |
+
from .failure_logger import log_failure, configure_failure_logger
|
| 24 |
from .error_handler import (
|
| 25 |
PreRequestCallbackError,
|
| 26 |
classify_error,
|
|
|
|
| 38 |
from .credential_manager import CredentialManager
|
| 39 |
from .background_refresher import BackgroundRefresher
|
| 40 |
from .model_definitions import ModelDefinitions
|
| 41 |
+
from .utils.paths import get_default_root, get_logs_dir, get_oauth_dir, get_data_file
|
| 42 |
|
| 43 |
|
| 44 |
class StreamedAPIError(Exception):
|
|
|
|
| 60 |
api_keys: Optional[Dict[str, List[str]]] = None,
|
| 61 |
oauth_credentials: Optional[Dict[str, List[str]]] = None,
|
| 62 |
max_retries: int = 2,
|
| 63 |
+
usage_file_path: Optional[Union[str, Path]] = None,
|
| 64 |
configure_logging: bool = True,
|
| 65 |
global_timeout: int = 30,
|
| 66 |
abort_on_callback_error: bool = True,
|
|
|
|
| 70 |
enable_request_logging: bool = False,
|
| 71 |
max_concurrent_requests_per_key: Optional[Dict[str, int]] = None,
|
| 72 |
rotation_tolerance: float = 3.0,
|
| 73 |
+
data_dir: Optional[Union[str, Path]] = None,
|
| 74 |
):
|
| 75 |
"""
|
| 76 |
Initialize the RotatingClient with intelligent credential rotation.
|
|
|
|
| 79 |
api_keys: Dictionary mapping provider names to lists of API keys
|
| 80 |
oauth_credentials: Dictionary mapping provider names to OAuth credential paths
|
| 81 |
max_retries: Maximum number of retry attempts per credential
|
| 82 |
+
usage_file_path: Path to store usage statistics. If None, uses data_dir/key_usage.json
|
| 83 |
configure_logging: Whether to configure library logging
|
| 84 |
global_timeout: Global timeout for requests in seconds
|
| 85 |
abort_on_callback_error: Whether to abort on pre-request callback errors
|
|
|
|
| 92 |
- 0.0: Deterministic, least-used credential always selected
|
| 93 |
- 2.0 - 4.0 (default, recommended): Balanced randomness, can pick credentials within 2 uses of max
|
| 94 |
- 5.0+: High randomness, more unpredictable selection patterns
|
| 95 |
+
data_dir: Root directory for all data files (logs, cache, oauth_creds, key_usage.json).
|
| 96 |
+
If None, auto-detects: EXE directory if frozen, else current working directory.
|
| 97 |
"""
|
| 98 |
+
# Resolve data_dir early - this becomes the root for all file operations
|
| 99 |
+
if data_dir is not None:
|
| 100 |
+
self.data_dir = Path(data_dir).resolve()
|
| 101 |
+
else:
|
| 102 |
+
self.data_dir = get_default_root()
|
| 103 |
+
|
| 104 |
+
# Configure failure logger to use correct logs directory
|
| 105 |
+
configure_failure_logger(get_logs_dir(self.data_dir))
|
| 106 |
+
|
| 107 |
os.environ["LITELLM_LOG"] = "ERROR"
|
| 108 |
litellm.set_verbose = False
|
| 109 |
litellm.drop_params = True
|
|
|
|
| 138 |
if oauth_credentials:
|
| 139 |
self.oauth_credentials = oauth_credentials
|
| 140 |
else:
|
| 141 |
+
self.credential_manager = CredentialManager(
|
| 142 |
+
os.environ, oauth_dir=get_oauth_dir(self.data_dir)
|
| 143 |
+
)
|
| 144 |
self.oauth_credentials = self.credential_manager.discover_and_prepare()
|
| 145 |
self.background_refresher = BackgroundRefresher(self)
|
| 146 |
self.oauth_providers = set(self.oauth_credentials.keys())
|
|
|
|
| 258 |
f"Provider '{provider}' sequential fallback multiplier: {fallback}x"
|
| 259 |
)
|
| 260 |
|
| 261 |
+
# Resolve usage file path - use provided path or default to data_dir
|
| 262 |
+
if usage_file_path is not None:
|
| 263 |
+
resolved_usage_path = Path(usage_file_path)
|
| 264 |
+
else:
|
| 265 |
+
resolved_usage_path = self.data_dir / "key_usage.json"
|
| 266 |
+
|
| 267 |
self.usage_manager = UsageManager(
|
| 268 |
+
file_path=resolved_usage_path,
|
| 269 |
rotation_tolerance=rotation_tolerance,
|
| 270 |
provider_rotation_modes=provider_rotation_modes,
|
| 271 |
provider_plugins=PROVIDER_PLUGINS,
|
|
@@ -3,12 +3,11 @@ import re
|
|
| 3 |
import shutil
|
| 4 |
import logging
|
| 5 |
from pathlib import Path
|
| 6 |
-
from typing import Dict, List, Optional, Set
|
| 7 |
|
| 8 |
-
|
| 9 |
|
| 10 |
-
|
| 11 |
-
OAUTH_BASE_DIR.mkdir(exist_ok=True)
|
| 12 |
|
| 13 |
# Standard directories where tools like `gemini login` store credentials.
|
| 14 |
DEFAULT_OAUTH_DIRS = {
|
|
@@ -33,38 +32,53 @@ class CredentialManager:
|
|
| 33 |
"""
|
| 34 |
Discovers OAuth credential files from standard locations, copies them locally,
|
| 35 |
and updates the configuration to use the local paths.
|
| 36 |
-
|
| 37 |
Also discovers environment variable-based OAuth credentials for stateless deployments.
|
| 38 |
Supports two env var formats:
|
| 39 |
-
|
| 40 |
1. Single credential (legacy): PROVIDER_ACCESS_TOKEN, PROVIDER_REFRESH_TOKEN
|
| 41 |
2. Multiple credentials (numbered): PROVIDER_1_ACCESS_TOKEN, PROVIDER_2_ACCESS_TOKEN, etc.
|
| 42 |
-
|
| 43 |
When env-based credentials are detected, virtual paths like "env://provider/1" are created.
|
| 44 |
"""
|
| 45 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 46 |
self.env_vars = env_vars
|
|
|
|
|
|
|
| 47 |
|
| 48 |
def _discover_env_oauth_credentials(self) -> Dict[str, List[str]]:
|
| 49 |
"""
|
| 50 |
Discover OAuth credentials defined via environment variables.
|
| 51 |
-
|
| 52 |
Supports two formats:
|
| 53 |
1. Single credential: ANTIGRAVITY_ACCESS_TOKEN + ANTIGRAVITY_REFRESH_TOKEN
|
| 54 |
2. Multiple credentials: ANTIGRAVITY_1_ACCESS_TOKEN + ANTIGRAVITY_1_REFRESH_TOKEN, etc.
|
| 55 |
-
|
| 56 |
Returns:
|
| 57 |
Dict mapping provider name to list of virtual paths (e.g., "env://antigravity/1")
|
| 58 |
"""
|
| 59 |
env_credentials: Dict[str, Set[str]] = {}
|
| 60 |
-
|
| 61 |
for provider, env_prefix in ENV_OAUTH_PROVIDERS.items():
|
| 62 |
found_indices: Set[str] = set()
|
| 63 |
-
|
| 64 |
# Check for numbered credentials (PROVIDER_N_ACCESS_TOKEN pattern)
|
| 65 |
# Pattern: ANTIGRAVITY_1_ACCESS_TOKEN, ANTIGRAVITY_2_ACCESS_TOKEN, etc.
|
| 66 |
numbered_pattern = re.compile(rf"^{env_prefix}_(\d+)_ACCESS_TOKEN$")
|
| 67 |
-
|
| 68 |
for key in self.env_vars.keys():
|
| 69 |
match = numbered_pattern.match(key)
|
| 70 |
if match:
|
|
@@ -73,28 +87,34 @@ class CredentialManager:
|
|
| 73 |
refresh_key = f"{env_prefix}_{index}_REFRESH_TOKEN"
|
| 74 |
if refresh_key in self.env_vars and self.env_vars[refresh_key]:
|
| 75 |
found_indices.add(index)
|
| 76 |
-
|
| 77 |
# Check for legacy single credential (PROVIDER_ACCESS_TOKEN pattern)
|
| 78 |
# Only use this if no numbered credentials exist
|
| 79 |
if not found_indices:
|
| 80 |
access_key = f"{env_prefix}_ACCESS_TOKEN"
|
| 81 |
refresh_key = f"{env_prefix}_REFRESH_TOKEN"
|
| 82 |
-
if (
|
| 83 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 84 |
# Use "0" as the index for legacy single credential
|
| 85 |
found_indices.add("0")
|
| 86 |
-
|
| 87 |
if found_indices:
|
| 88 |
env_credentials[provider] = found_indices
|
| 89 |
-
lib_logger.info(
|
| 90 |
-
|
|
|
|
|
|
|
| 91 |
# Convert to virtual paths
|
| 92 |
result: Dict[str, List[str]] = {}
|
| 93 |
for provider, indices in env_credentials.items():
|
| 94 |
# Sort indices numerically for consistent ordering
|
| 95 |
sorted_indices = sorted(indices, key=lambda x: int(x))
|
| 96 |
result[provider] = [f"env://{provider}/{idx}" for idx in sorted_indices]
|
| 97 |
-
|
| 98 |
return result
|
| 99 |
|
| 100 |
def discover_and_prepare(self) -> Dict[str, List[str]]:
|
|
@@ -105,7 +125,9 @@ class CredentialManager:
|
|
| 105 |
# These take priority for stateless deployments
|
| 106 |
env_oauth_creds = self._discover_env_oauth_credentials()
|
| 107 |
for provider, virtual_paths in env_oauth_creds.items():
|
| 108 |
-
lib_logger.info(
|
|
|
|
|
|
|
| 109 |
final_config[provider] = virtual_paths
|
| 110 |
|
| 111 |
# Extract OAuth file paths from environment variables
|
|
@@ -115,21 +137,29 @@ class CredentialManager:
|
|
| 115 |
provider = key.split("_OAUTH_")[0].lower()
|
| 116 |
if provider not in env_oauth_paths:
|
| 117 |
env_oauth_paths[provider] = []
|
| 118 |
-
if value:
|
| 119 |
env_oauth_paths[provider].append(value)
|
| 120 |
|
| 121 |
# PHASE 2: Discover file-based OAuth credentials
|
| 122 |
for provider, default_dir in DEFAULT_OAUTH_DIRS.items():
|
| 123 |
# Skip if already discovered from environment variables
|
| 124 |
if provider in final_config:
|
| 125 |
-
lib_logger.debug(
|
|
|
|
|
|
|
| 126 |
continue
|
| 127 |
-
|
| 128 |
# Check for existing local credentials first. If found, use them and skip discovery.
|
| 129 |
-
local_provider_creds = sorted(
|
|
|
|
|
|
|
| 130 |
if local_provider_creds:
|
| 131 |
-
lib_logger.info(
|
| 132 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 133 |
continue
|
| 134 |
|
| 135 |
# If no local credentials exist, proceed with a one-time discovery and copy.
|
|
@@ -140,13 +170,13 @@ class CredentialManager:
|
|
| 140 |
path = Path(path_str).expanduser()
|
| 141 |
if path.exists():
|
| 142 |
discovered_paths.add(path)
|
| 143 |
-
|
| 144 |
# 2. If no overrides are provided via .env, scan the default directory
|
| 145 |
# [MODIFIED] This logic is now disabled to prefer local-first credential management.
|
| 146 |
# if not discovered_paths and default_dir.exists():
|
| 147 |
# for json_file in default_dir.glob('*.json'):
|
| 148 |
# discovered_paths.add(json_file)
|
| 149 |
-
|
| 150 |
if not discovered_paths:
|
| 151 |
lib_logger.debug(f"No credential files found for provider: {provider}")
|
| 152 |
continue
|
|
@@ -156,18 +186,24 @@ class CredentialManager:
|
|
| 156 |
for i, source_path in enumerate(sorted(list(discovered_paths))):
|
| 157 |
account_id = i + 1
|
| 158 |
local_filename = f"{provider}_oauth_{account_id}.json"
|
| 159 |
-
local_path =
|
| 160 |
|
| 161 |
try:
|
| 162 |
# Since we've established no local files exist, we can copy directly.
|
| 163 |
shutil.copy(source_path, local_path)
|
| 164 |
-
lib_logger.info(
|
|
|
|
|
|
|
| 165 |
prepared_paths.append(str(local_path.resolve()))
|
| 166 |
except Exception as e:
|
| 167 |
-
lib_logger.error(
|
| 168 |
-
|
|
|
|
|
|
|
| 169 |
if prepared_paths:
|
| 170 |
-
lib_logger.info(
|
|
|
|
|
|
|
| 171 |
final_config[provider] = prepared_paths
|
| 172 |
|
| 173 |
lib_logger.info("OAuth credential discovery complete.")
|
|
|
|
| 3 |
import shutil
|
| 4 |
import logging
|
| 5 |
from pathlib import Path
|
| 6 |
+
from typing import Dict, List, Optional, Set, Union
|
| 7 |
|
| 8 |
+
from .utils.paths import get_oauth_dir
|
| 9 |
|
| 10 |
+
lib_logger = logging.getLogger("rotator_library")
|
|
|
|
| 11 |
|
| 12 |
# Standard directories where tools like `gemini login` store credentials.
|
| 13 |
DEFAULT_OAUTH_DIRS = {
|
|
|
|
| 32 |
"""
|
| 33 |
Discovers OAuth credential files from standard locations, copies them locally,
|
| 34 |
and updates the configuration to use the local paths.
|
| 35 |
+
|
| 36 |
Also discovers environment variable-based OAuth credentials for stateless deployments.
|
| 37 |
Supports two env var formats:
|
| 38 |
+
|
| 39 |
1. Single credential (legacy): PROVIDER_ACCESS_TOKEN, PROVIDER_REFRESH_TOKEN
|
| 40 |
2. Multiple credentials (numbered): PROVIDER_1_ACCESS_TOKEN, PROVIDER_2_ACCESS_TOKEN, etc.
|
| 41 |
+
|
| 42 |
When env-based credentials are detected, virtual paths like "env://provider/1" are created.
|
| 43 |
"""
|
| 44 |
+
|
| 45 |
+
def __init__(
|
| 46 |
+
self,
|
| 47 |
+
env_vars: Dict[str, str],
|
| 48 |
+
oauth_dir: Optional[Union[Path, str]] = None,
|
| 49 |
+
):
|
| 50 |
+
"""
|
| 51 |
+
Initialize the CredentialManager.
|
| 52 |
+
|
| 53 |
+
Args:
|
| 54 |
+
env_vars: Dictionary of environment variables (typically os.environ).
|
| 55 |
+
oauth_dir: Directory for storing OAuth credentials.
|
| 56 |
+
If None, uses get_oauth_dir() which respects EXE vs script mode.
|
| 57 |
+
"""
|
| 58 |
self.env_vars = env_vars
|
| 59 |
+
self.oauth_base_dir = Path(oauth_dir) if oauth_dir else get_oauth_dir()
|
| 60 |
+
self.oauth_base_dir.mkdir(parents=True, exist_ok=True)
|
| 61 |
|
| 62 |
def _discover_env_oauth_credentials(self) -> Dict[str, List[str]]:
|
| 63 |
"""
|
| 64 |
Discover OAuth credentials defined via environment variables.
|
| 65 |
+
|
| 66 |
Supports two formats:
|
| 67 |
1. Single credential: ANTIGRAVITY_ACCESS_TOKEN + ANTIGRAVITY_REFRESH_TOKEN
|
| 68 |
2. Multiple credentials: ANTIGRAVITY_1_ACCESS_TOKEN + ANTIGRAVITY_1_REFRESH_TOKEN, etc.
|
| 69 |
+
|
| 70 |
Returns:
|
| 71 |
Dict mapping provider name to list of virtual paths (e.g., "env://antigravity/1")
|
| 72 |
"""
|
| 73 |
env_credentials: Dict[str, Set[str]] = {}
|
| 74 |
+
|
| 75 |
for provider, env_prefix in ENV_OAUTH_PROVIDERS.items():
|
| 76 |
found_indices: Set[str] = set()
|
| 77 |
+
|
| 78 |
# Check for numbered credentials (PROVIDER_N_ACCESS_TOKEN pattern)
|
| 79 |
# Pattern: ANTIGRAVITY_1_ACCESS_TOKEN, ANTIGRAVITY_2_ACCESS_TOKEN, etc.
|
| 80 |
numbered_pattern = re.compile(rf"^{env_prefix}_(\d+)_ACCESS_TOKEN$")
|
| 81 |
+
|
| 82 |
for key in self.env_vars.keys():
|
| 83 |
match = numbered_pattern.match(key)
|
| 84 |
if match:
|
|
|
|
| 87 |
refresh_key = f"{env_prefix}_{index}_REFRESH_TOKEN"
|
| 88 |
if refresh_key in self.env_vars and self.env_vars[refresh_key]:
|
| 89 |
found_indices.add(index)
|
| 90 |
+
|
| 91 |
# Check for legacy single credential (PROVIDER_ACCESS_TOKEN pattern)
|
| 92 |
# Only use this if no numbered credentials exist
|
| 93 |
if not found_indices:
|
| 94 |
access_key = f"{env_prefix}_ACCESS_TOKEN"
|
| 95 |
refresh_key = f"{env_prefix}_REFRESH_TOKEN"
|
| 96 |
+
if (
|
| 97 |
+
access_key in self.env_vars
|
| 98 |
+
and self.env_vars[access_key]
|
| 99 |
+
and refresh_key in self.env_vars
|
| 100 |
+
and self.env_vars[refresh_key]
|
| 101 |
+
):
|
| 102 |
# Use "0" as the index for legacy single credential
|
| 103 |
found_indices.add("0")
|
| 104 |
+
|
| 105 |
if found_indices:
|
| 106 |
env_credentials[provider] = found_indices
|
| 107 |
+
lib_logger.info(
|
| 108 |
+
f"Found {len(found_indices)} env-based credential(s) for {provider}"
|
| 109 |
+
)
|
| 110 |
+
|
| 111 |
# Convert to virtual paths
|
| 112 |
result: Dict[str, List[str]] = {}
|
| 113 |
for provider, indices in env_credentials.items():
|
| 114 |
# Sort indices numerically for consistent ordering
|
| 115 |
sorted_indices = sorted(indices, key=lambda x: int(x))
|
| 116 |
result[provider] = [f"env://{provider}/{idx}" for idx in sorted_indices]
|
| 117 |
+
|
| 118 |
return result
|
| 119 |
|
| 120 |
def discover_and_prepare(self) -> Dict[str, List[str]]:
|
|
|
|
| 125 |
# These take priority for stateless deployments
|
| 126 |
env_oauth_creds = self._discover_env_oauth_credentials()
|
| 127 |
for provider, virtual_paths in env_oauth_creds.items():
|
| 128 |
+
lib_logger.info(
|
| 129 |
+
f"Using {len(virtual_paths)} env-based credential(s) for {provider}"
|
| 130 |
+
)
|
| 131 |
final_config[provider] = virtual_paths
|
| 132 |
|
| 133 |
# Extract OAuth file paths from environment variables
|
|
|
|
| 137 |
provider = key.split("_OAUTH_")[0].lower()
|
| 138 |
if provider not in env_oauth_paths:
|
| 139 |
env_oauth_paths[provider] = []
|
| 140 |
+
if value: # Only consider non-empty values
|
| 141 |
env_oauth_paths[provider].append(value)
|
| 142 |
|
| 143 |
# PHASE 2: Discover file-based OAuth credentials
|
| 144 |
for provider, default_dir in DEFAULT_OAUTH_DIRS.items():
|
| 145 |
# Skip if already discovered from environment variables
|
| 146 |
if provider in final_config:
|
| 147 |
+
lib_logger.debug(
|
| 148 |
+
f"Skipping file discovery for {provider} - using env-based credentials"
|
| 149 |
+
)
|
| 150 |
continue
|
| 151 |
+
|
| 152 |
# Check for existing local credentials first. If found, use them and skip discovery.
|
| 153 |
+
local_provider_creds = sorted(
|
| 154 |
+
list(self.oauth_base_dir.glob(f"{provider}_oauth_*.json"))
|
| 155 |
+
)
|
| 156 |
if local_provider_creds:
|
| 157 |
+
lib_logger.info(
|
| 158 |
+
f"Found {len(local_provider_creds)} existing local credential(s) for {provider}. Skipping discovery."
|
| 159 |
+
)
|
| 160 |
+
final_config[provider] = [
|
| 161 |
+
str(p.resolve()) for p in local_provider_creds
|
| 162 |
+
]
|
| 163 |
continue
|
| 164 |
|
| 165 |
# If no local credentials exist, proceed with a one-time discovery and copy.
|
|
|
|
| 170 |
path = Path(path_str).expanduser()
|
| 171 |
if path.exists():
|
| 172 |
discovered_paths.add(path)
|
| 173 |
+
|
| 174 |
# 2. If no overrides are provided via .env, scan the default directory
|
| 175 |
# [MODIFIED] This logic is now disabled to prefer local-first credential management.
|
| 176 |
# if not discovered_paths and default_dir.exists():
|
| 177 |
# for json_file in default_dir.glob('*.json'):
|
| 178 |
# discovered_paths.add(json_file)
|
| 179 |
+
|
| 180 |
if not discovered_paths:
|
| 181 |
lib_logger.debug(f"No credential files found for provider: {provider}")
|
| 182 |
continue
|
|
|
|
| 186 |
for i, source_path in enumerate(sorted(list(discovered_paths))):
|
| 187 |
account_id = i + 1
|
| 188 |
local_filename = f"{provider}_oauth_{account_id}.json"
|
| 189 |
+
local_path = self.oauth_base_dir / local_filename
|
| 190 |
|
| 191 |
try:
|
| 192 |
# Since we've established no local files exist, we can copy directly.
|
| 193 |
shutil.copy(source_path, local_path)
|
| 194 |
+
lib_logger.info(
|
| 195 |
+
f"Copied '{source_path.name}' to local pool at '{local_path}'."
|
| 196 |
+
)
|
| 197 |
prepared_paths.append(str(local_path.resolve()))
|
| 198 |
except Exception as e:
|
| 199 |
+
lib_logger.error(
|
| 200 |
+
f"Failed to process OAuth file from '{source_path}': {e}"
|
| 201 |
+
)
|
| 202 |
+
|
| 203 |
if prepared_paths:
|
| 204 |
+
lib_logger.info(
|
| 205 |
+
f"Discovered and prepared {len(prepared_paths)} credential(s) for provider: {provider}"
|
| 206 |
+
)
|
| 207 |
final_config[provider] = prepared_paths
|
| 208 |
|
| 209 |
lib_logger.info("OAuth credential discovery complete.")
|
|
@@ -14,10 +14,20 @@ from rich.panel import Panel
|
|
| 14 |
from rich.prompt import Prompt
|
| 15 |
from rich.text import Text
|
| 16 |
|
| 17 |
-
|
| 18 |
-
|
| 19 |
-
|
| 20 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 21 |
|
| 22 |
console = Console()
|
| 23 |
|
|
@@ -54,19 +64,19 @@ def ensure_env_defaults():
|
|
| 54 |
"""
|
| 55 |
Ensures the .env file exists and contains essential default values like PROXY_API_KEY.
|
| 56 |
"""
|
| 57 |
-
if not
|
| 58 |
-
|
| 59 |
console.print(
|
| 60 |
-
f"Creating a new [bold yellow]{
|
| 61 |
)
|
| 62 |
|
| 63 |
# Check for PROXY_API_KEY, similar to setup_env.bat
|
| 64 |
-
if get_key(str(
|
| 65 |
default_key = "VerysecretKey"
|
| 66 |
console.print(
|
| 67 |
-
f"Adding default [bold cyan]PROXY_API_KEY[/bold cyan] to [bold yellow]{
|
| 68 |
)
|
| 69 |
-
set_key(str(
|
| 70 |
|
| 71 |
|
| 72 |
async def setup_api_key():
|
|
@@ -224,8 +234,8 @@ async def setup_api_key():
|
|
| 224 |
api_key = Prompt.ask(f"Enter the API key for {display_name}")
|
| 225 |
|
| 226 |
# Check for duplicate API key value
|
| 227 |
-
if
|
| 228 |
-
with open(
|
| 229 |
for line in f:
|
| 230 |
line = line.strip()
|
| 231 |
if line.startswith(api_var_base) and "=" in line:
|
|
@@ -244,7 +254,9 @@ async def setup_api_key():
|
|
| 244 |
)
|
| 245 |
)
|
| 246 |
|
| 247 |
-
set_key(
|
|
|
|
|
|
|
| 248 |
|
| 249 |
success_text = Text.from_markup(
|
| 250 |
f"Successfully updated existing key [bold yellow]'{existing_key_name}'[/bold yellow]."
|
|
@@ -275,8 +287,8 @@ async def setup_api_key():
|
|
| 275 |
key_index = 1
|
| 276 |
while True:
|
| 277 |
key_name = f"{api_var_base}_{key_index}"
|
| 278 |
-
if
|
| 279 |
-
with open(
|
| 280 |
if not any(line.startswith(f"{key_name}=") for line in f):
|
| 281 |
break
|
| 282 |
else:
|
|
@@ -284,7 +296,7 @@ async def setup_api_key():
|
|
| 284 |
key_index += 1
|
| 285 |
|
| 286 |
key_name = f"{api_var_base}_{key_index}"
|
| 287 |
-
set_key(str(
|
| 288 |
|
| 289 |
success_text = Text.from_markup(
|
| 290 |
f"Successfully added {display_name} API key as [bold yellow]'{key_name}'[/bold yellow]."
|
|
@@ -327,7 +339,7 @@ async def setup_new_credential(provider_name: str):
|
|
| 327 |
# - File path determination (new or existing)
|
| 328 |
# - Credential file saving
|
| 329 |
# - Post-auth discovery (tier/project for Google OAuth providers)
|
| 330 |
-
result = await auth_instance.setup_credential(
|
| 331 |
|
| 332 |
if not result.success:
|
| 333 |
console.print(
|
|
@@ -386,7 +398,7 @@ async def export_gemini_cli_to_env():
|
|
| 386 |
auth_instance = auth_class()
|
| 387 |
|
| 388 |
# List available credentials using auth class
|
| 389 |
-
credentials = auth_instance.list_credentials(
|
| 390 |
|
| 391 |
if not credentials:
|
| 392 |
console.print(
|
|
@@ -427,7 +439,7 @@ async def export_gemini_cli_to_env():
|
|
| 427 |
|
| 428 |
# Use auth class to export
|
| 429 |
env_path = auth_instance.export_credential_to_env(
|
| 430 |
-
cred_info["file_path"],
|
| 431 |
)
|
| 432 |
|
| 433 |
if env_path:
|
|
@@ -481,7 +493,7 @@ async def export_qwen_code_to_env():
|
|
| 481 |
auth_instance = auth_class()
|
| 482 |
|
| 483 |
# List available credentials using auth class
|
| 484 |
-
credentials = auth_instance.list_credentials(
|
| 485 |
|
| 486 |
if not credentials:
|
| 487 |
console.print(
|
|
@@ -522,7 +534,7 @@ async def export_qwen_code_to_env():
|
|
| 522 |
|
| 523 |
# Use auth class to export
|
| 524 |
env_path = auth_instance.export_credential_to_env(
|
| 525 |
-
cred_info["file_path"],
|
| 526 |
)
|
| 527 |
|
| 528 |
if env_path:
|
|
@@ -573,7 +585,7 @@ async def export_iflow_to_env():
|
|
| 573 |
auth_instance = auth_class()
|
| 574 |
|
| 575 |
# List available credentials using auth class
|
| 576 |
-
credentials = auth_instance.list_credentials(
|
| 577 |
|
| 578 |
if not credentials:
|
| 579 |
console.print(
|
|
@@ -614,7 +626,7 @@ async def export_iflow_to_env():
|
|
| 614 |
|
| 615 |
# Use auth class to export
|
| 616 |
env_path = auth_instance.export_credential_to_env(
|
| 617 |
-
cred_info["file_path"],
|
| 618 |
)
|
| 619 |
|
| 620 |
if env_path:
|
|
@@ -667,7 +679,7 @@ async def export_antigravity_to_env():
|
|
| 667 |
auth_instance = auth_class()
|
| 668 |
|
| 669 |
# List available credentials using auth class
|
| 670 |
-
credentials = auth_instance.list_credentials(
|
| 671 |
|
| 672 |
if not credentials:
|
| 673 |
console.print(
|
|
@@ -708,7 +720,7 @@ async def export_antigravity_to_env():
|
|
| 708 |
|
| 709 |
# Use auth class to export
|
| 710 |
env_path = auth_instance.export_credential_to_env(
|
| 711 |
-
cred_info["file_path"],
|
| 712 |
)
|
| 713 |
|
| 714 |
if env_path:
|
|
@@ -769,7 +781,7 @@ async def export_all_provider_credentials(provider_name: str):
|
|
| 769 |
)
|
| 770 |
|
| 771 |
# List all credentials using auth class
|
| 772 |
-
credentials = auth_instance.list_credentials(
|
| 773 |
|
| 774 |
if not credentials:
|
| 775 |
console.print(
|
|
@@ -786,7 +798,7 @@ async def export_all_provider_credentials(provider_name: str):
|
|
| 786 |
try:
|
| 787 |
# Use auth class to export
|
| 788 |
env_path = auth_instance.export_credential_to_env(
|
| 789 |
-
cred_info["file_path"],
|
| 790 |
)
|
| 791 |
|
| 792 |
if env_path:
|
|
@@ -837,7 +849,7 @@ async def combine_provider_credentials(provider_name: str):
|
|
| 837 |
)
|
| 838 |
|
| 839 |
# List all credentials using auth class
|
| 840 |
-
credentials = auth_instance.list_credentials(
|
| 841 |
|
| 842 |
if not credentials:
|
| 843 |
console.print(
|
|
@@ -879,7 +891,7 @@ async def combine_provider_credentials(provider_name: str):
|
|
| 879 |
|
| 880 |
# Write combined file
|
| 881 |
combined_filename = f"{provider_name}_all_combined.env"
|
| 882 |
-
combined_filepath =
|
| 883 |
|
| 884 |
with open(combined_filepath, "w") as f:
|
| 885 |
f.write("\n".join(combined_lines))
|
|
@@ -929,7 +941,7 @@ async def combine_all_credentials():
|
|
| 929 |
except Exception:
|
| 930 |
continue # Skip providers that don't have auth classes
|
| 931 |
|
| 932 |
-
credentials = auth_instance.list_credentials(
|
| 933 |
|
| 934 |
if not credentials:
|
| 935 |
continue
|
|
@@ -972,7 +984,7 @@ async def combine_all_credentials():
|
|
| 972 |
|
| 973 |
# Write combined file
|
| 974 |
combined_filename = "all_providers_combined.env"
|
| 975 |
-
combined_filepath =
|
| 976 |
|
| 977 |
with open(combined_filepath, "w") as f:
|
| 978 |
f.write("\n".join(combined_lines))
|
|
|
|
| 14 |
from rich.prompt import Prompt
|
| 15 |
from rich.text import Text
|
| 16 |
|
| 17 |
+
from .utils.paths import get_oauth_dir, get_data_file
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
def _get_oauth_base_dir() -> Path:
|
| 21 |
+
"""Get the OAuth base directory (lazy, respects EXE vs script mode)."""
|
| 22 |
+
oauth_dir = get_oauth_dir()
|
| 23 |
+
oauth_dir.mkdir(parents=True, exist_ok=True)
|
| 24 |
+
return oauth_dir
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
def _get_env_file() -> Path:
|
| 28 |
+
"""Get the .env file path (lazy, respects EXE vs script mode)."""
|
| 29 |
+
return get_data_file(".env")
|
| 30 |
+
|
| 31 |
|
| 32 |
console = Console()
|
| 33 |
|
|
|
|
| 64 |
"""
|
| 65 |
Ensures the .env file exists and contains essential default values like PROXY_API_KEY.
|
| 66 |
"""
|
| 67 |
+
if not _get_env_file().is_file():
|
| 68 |
+
_get_env_file().touch()
|
| 69 |
console.print(
|
| 70 |
+
f"Creating a new [bold yellow]{_get_env_file().name}[/bold yellow] file..."
|
| 71 |
)
|
| 72 |
|
| 73 |
# Check for PROXY_API_KEY, similar to setup_env.bat
|
| 74 |
+
if get_key(str(_get_env_file()), "PROXY_API_KEY") is None:
|
| 75 |
default_key = "VerysecretKey"
|
| 76 |
console.print(
|
| 77 |
+
f"Adding default [bold cyan]PROXY_API_KEY[/bold cyan] to [bold yellow]{_get_env_file().name}[/bold yellow]..."
|
| 78 |
)
|
| 79 |
+
set_key(str(_get_env_file()), "PROXY_API_KEY", default_key)
|
| 80 |
|
| 81 |
|
| 82 |
async def setup_api_key():
|
|
|
|
| 234 |
api_key = Prompt.ask(f"Enter the API key for {display_name}")
|
| 235 |
|
| 236 |
# Check for duplicate API key value
|
| 237 |
+
if _get_env_file().is_file():
|
| 238 |
+
with open(_get_env_file(), "r") as f:
|
| 239 |
for line in f:
|
| 240 |
line = line.strip()
|
| 241 |
if line.startswith(api_var_base) and "=" in line:
|
|
|
|
| 254 |
)
|
| 255 |
)
|
| 256 |
|
| 257 |
+
set_key(
|
| 258 |
+
str(_get_env_file()), existing_key_name, api_key
|
| 259 |
+
)
|
| 260 |
|
| 261 |
success_text = Text.from_markup(
|
| 262 |
f"Successfully updated existing key [bold yellow]'{existing_key_name}'[/bold yellow]."
|
|
|
|
| 287 |
key_index = 1
|
| 288 |
while True:
|
| 289 |
key_name = f"{api_var_base}_{key_index}"
|
| 290 |
+
if _get_env_file().is_file():
|
| 291 |
+
with open(_get_env_file(), "r") as f:
|
| 292 |
if not any(line.startswith(f"{key_name}=") for line in f):
|
| 293 |
break
|
| 294 |
else:
|
|
|
|
| 296 |
key_index += 1
|
| 297 |
|
| 298 |
key_name = f"{api_var_base}_{key_index}"
|
| 299 |
+
set_key(str(_get_env_file()), key_name, api_key)
|
| 300 |
|
| 301 |
success_text = Text.from_markup(
|
| 302 |
f"Successfully added {display_name} API key as [bold yellow]'{key_name}'[/bold yellow]."
|
|
|
|
| 339 |
# - File path determination (new or existing)
|
| 340 |
# - Credential file saving
|
| 341 |
# - Post-auth discovery (tier/project for Google OAuth providers)
|
| 342 |
+
result = await auth_instance.setup_credential(_get_oauth_base_dir())
|
| 343 |
|
| 344 |
if not result.success:
|
| 345 |
console.print(
|
|
|
|
| 398 |
auth_instance = auth_class()
|
| 399 |
|
| 400 |
# List available credentials using auth class
|
| 401 |
+
credentials = auth_instance.list_credentials(_get_oauth_base_dir())
|
| 402 |
|
| 403 |
if not credentials:
|
| 404 |
console.print(
|
|
|
|
| 439 |
|
| 440 |
# Use auth class to export
|
| 441 |
env_path = auth_instance.export_credential_to_env(
|
| 442 |
+
cred_info["file_path"], _get_oauth_base_dir()
|
| 443 |
)
|
| 444 |
|
| 445 |
if env_path:
|
|
|
|
| 493 |
auth_instance = auth_class()
|
| 494 |
|
| 495 |
# List available credentials using auth class
|
| 496 |
+
credentials = auth_instance.list_credentials(_get_oauth_base_dir())
|
| 497 |
|
| 498 |
if not credentials:
|
| 499 |
console.print(
|
|
|
|
| 534 |
|
| 535 |
# Use auth class to export
|
| 536 |
env_path = auth_instance.export_credential_to_env(
|
| 537 |
+
cred_info["file_path"], _get_oauth_base_dir()
|
| 538 |
)
|
| 539 |
|
| 540 |
if env_path:
|
|
|
|
| 585 |
auth_instance = auth_class()
|
| 586 |
|
| 587 |
# List available credentials using auth class
|
| 588 |
+
credentials = auth_instance.list_credentials(_get_oauth_base_dir())
|
| 589 |
|
| 590 |
if not credentials:
|
| 591 |
console.print(
|
|
|
|
| 626 |
|
| 627 |
# Use auth class to export
|
| 628 |
env_path = auth_instance.export_credential_to_env(
|
| 629 |
+
cred_info["file_path"], _get_oauth_base_dir()
|
| 630 |
)
|
| 631 |
|
| 632 |
if env_path:
|
|
|
|
| 679 |
auth_instance = auth_class()
|
| 680 |
|
| 681 |
# List available credentials using auth class
|
| 682 |
+
credentials = auth_instance.list_credentials(_get_oauth_base_dir())
|
| 683 |
|
| 684 |
if not credentials:
|
| 685 |
console.print(
|
|
|
|
| 720 |
|
| 721 |
# Use auth class to export
|
| 722 |
env_path = auth_instance.export_credential_to_env(
|
| 723 |
+
cred_info["file_path"], _get_oauth_base_dir()
|
| 724 |
)
|
| 725 |
|
| 726 |
if env_path:
|
|
|
|
| 781 |
)
|
| 782 |
|
| 783 |
# List all credentials using auth class
|
| 784 |
+
credentials = auth_instance.list_credentials(_get_oauth_base_dir())
|
| 785 |
|
| 786 |
if not credentials:
|
| 787 |
console.print(
|
|
|
|
| 798 |
try:
|
| 799 |
# Use auth class to export
|
| 800 |
env_path = auth_instance.export_credential_to_env(
|
| 801 |
+
cred_info["file_path"], _get_oauth_base_dir()
|
| 802 |
)
|
| 803 |
|
| 804 |
if env_path:
|
|
|
|
| 849 |
)
|
| 850 |
|
| 851 |
# List all credentials using auth class
|
| 852 |
+
credentials = auth_instance.list_credentials(_get_oauth_base_dir())
|
| 853 |
|
| 854 |
if not credentials:
|
| 855 |
console.print(
|
|
|
|
| 891 |
|
| 892 |
# Write combined file
|
| 893 |
combined_filename = f"{provider_name}_all_combined.env"
|
| 894 |
+
combined_filepath = _get_oauth_base_dir() / combined_filename
|
| 895 |
|
| 896 |
with open(combined_filepath, "w") as f:
|
| 897 |
f.write("\n".join(combined_lines))
|
|
|
|
| 941 |
except Exception:
|
| 942 |
continue # Skip providers that don't have auth classes
|
| 943 |
|
| 944 |
+
credentials = auth_instance.list_credentials(_get_oauth_base_dir())
|
| 945 |
|
| 946 |
if not credentials:
|
| 947 |
continue
|
|
|
|
| 984 |
|
| 985 |
# Write combined file
|
| 986 |
combined_filename = "all_providers_combined.env"
|
| 987 |
+
combined_filepath = _get_oauth_base_dir() / combined_filename
|
| 988 |
|
| 989 |
with open(combined_filepath, "w") as f:
|
| 990 |
f.write("\n".join(combined_lines))
|
|
@@ -1,9 +1,12 @@
|
|
| 1 |
import logging
|
| 2 |
import json
|
| 3 |
from logging.handlers import RotatingFileHandler
|
| 4 |
-
import
|
| 5 |
from datetime import datetime
|
|
|
|
|
|
|
| 6 |
from .error_handler import mask_credential
|
|
|
|
| 7 |
|
| 8 |
|
| 9 |
class JsonFormatter(logging.Formatter):
|
|
@@ -14,9 +17,37 @@ class JsonFormatter(logging.Formatter):
|
|
| 14 |
return json.dumps(record.msg)
|
| 15 |
|
| 16 |
|
| 17 |
-
|
| 18 |
-
|
| 19 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 20 |
logger = logging.getLogger("failure_logger")
|
| 21 |
logger.setLevel(logging.INFO)
|
| 22 |
logger.propagate = False
|
|
@@ -25,11 +56,10 @@ def setup_failure_logger():
|
|
| 25 |
logger.handlers.clear()
|
| 26 |
|
| 27 |
try:
|
| 28 |
-
|
| 29 |
-
os.makedirs(log_dir, exist_ok=True)
|
| 30 |
|
| 31 |
handler = RotatingFileHandler(
|
| 32 |
-
|
| 33 |
maxBytes=5 * 1024 * 1024, # 5 MB
|
| 34 |
backupCount=2,
|
| 35 |
)
|
|
@@ -43,8 +73,21 @@ def setup_failure_logger():
|
|
| 43 |
return logger
|
| 44 |
|
| 45 |
|
| 46 |
-
|
| 47 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 48 |
|
| 49 |
# Get the main library logger for concise, propagated messages
|
| 50 |
main_lib_logger = logging.getLogger("rotator_library")
|
|
@@ -174,7 +217,7 @@ def log_failure(
|
|
| 174 |
|
| 175 |
# Log to failure logger with resilience - if it fails, just continue
|
| 176 |
try:
|
| 177 |
-
|
| 178 |
except (OSError, IOError) as e:
|
| 179 |
# Log file write failed - log to console instead
|
| 180 |
logging.warning(f"Failed to write to failures.log: {e}")
|
|
|
|
| 1 |
import logging
|
| 2 |
import json
|
| 3 |
from logging.handlers import RotatingFileHandler
|
| 4 |
+
from pathlib import Path
|
| 5 |
from datetime import datetime
|
| 6 |
+
from typing import Optional, Union
|
| 7 |
+
|
| 8 |
from .error_handler import mask_credential
|
| 9 |
+
from .utils.paths import get_logs_dir
|
| 10 |
|
| 11 |
|
| 12 |
class JsonFormatter(logging.Formatter):
|
|
|
|
| 17 |
return json.dumps(record.msg)
|
| 18 |
|
| 19 |
|
| 20 |
+
# Module-level state for lazy initialization
|
| 21 |
+
_failure_logger: Optional[logging.Logger] = None
|
| 22 |
+
_configured_logs_dir: Optional[Path] = None
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
def configure_failure_logger(logs_dir: Optional[Union[Path, str]] = None) -> None:
|
| 26 |
+
"""
|
| 27 |
+
Configure the failure logger to use a specific logs directory.
|
| 28 |
+
|
| 29 |
+
Call this before first use if you want to override the default location.
|
| 30 |
+
If not called, the logger will use get_logs_dir() on first use.
|
| 31 |
+
|
| 32 |
+
Args:
|
| 33 |
+
logs_dir: Path to the logs directory. If None, uses get_logs_dir().
|
| 34 |
+
"""
|
| 35 |
+
global _configured_logs_dir, _failure_logger
|
| 36 |
+
_configured_logs_dir = Path(logs_dir) if logs_dir else None
|
| 37 |
+
# Reset logger so it gets reconfigured on next use
|
| 38 |
+
_failure_logger = None
|
| 39 |
+
|
| 40 |
+
|
| 41 |
+
def _setup_failure_logger(logs_dir: Path) -> logging.Logger:
|
| 42 |
+
"""
|
| 43 |
+
Sets up a dedicated JSON logger for writing detailed failure logs to a file.
|
| 44 |
+
|
| 45 |
+
Args:
|
| 46 |
+
logs_dir: Path to the logs directory.
|
| 47 |
+
|
| 48 |
+
Returns:
|
| 49 |
+
Configured logger instance.
|
| 50 |
+
"""
|
| 51 |
logger = logging.getLogger("failure_logger")
|
| 52 |
logger.setLevel(logging.INFO)
|
| 53 |
logger.propagate = False
|
|
|
|
| 56 |
logger.handlers.clear()
|
| 57 |
|
| 58 |
try:
|
| 59 |
+
logs_dir.mkdir(parents=True, exist_ok=True)
|
|
|
|
| 60 |
|
| 61 |
handler = RotatingFileHandler(
|
| 62 |
+
logs_dir / "failures.log",
|
| 63 |
maxBytes=5 * 1024 * 1024, # 5 MB
|
| 64 |
backupCount=2,
|
| 65 |
)
|
|
|
|
| 73 |
return logger
|
| 74 |
|
| 75 |
|
| 76 |
+
def get_failure_logger() -> logging.Logger:
|
| 77 |
+
"""
|
| 78 |
+
Get the failure logger, initializing it lazily if needed.
|
| 79 |
+
|
| 80 |
+
Returns:
|
| 81 |
+
The configured failure logger.
|
| 82 |
+
"""
|
| 83 |
+
global _failure_logger, _configured_logs_dir
|
| 84 |
+
|
| 85 |
+
if _failure_logger is None:
|
| 86 |
+
logs_dir = _configured_logs_dir if _configured_logs_dir else get_logs_dir()
|
| 87 |
+
_failure_logger = _setup_failure_logger(logs_dir)
|
| 88 |
+
|
| 89 |
+
return _failure_logger
|
| 90 |
+
|
| 91 |
|
| 92 |
# Get the main library logger for concise, propagated messages
|
| 93 |
main_lib_logger = logging.getLogger("rotator_library")
|
|
|
|
| 217 |
|
| 218 |
# Log to failure logger with resilience - if it fails, just continue
|
| 219 |
try:
|
| 220 |
+
get_failure_logger().error(detailed_log_data)
|
| 221 |
except (OSError, IOError) as e:
|
| 222 |
# Log file write failed - log to console instead
|
| 223 |
logging.warning(f"Failed to write to failures.log: {e}")
|
|
@@ -39,6 +39,7 @@ from .antigravity_auth_base import AntigravityAuthBase
|
|
| 39 |
from .provider_cache import ProviderCache
|
| 40 |
from ..model_definitions import ModelDefinitions
|
| 41 |
from ..timeout_config import TimeoutConfig
|
|
|
|
| 42 |
|
| 43 |
|
| 44 |
# =============================================================================
|
|
@@ -106,12 +107,23 @@ DEFAULT_SAFETY_SETTINGS = [
|
|
| 106 |
{"category": "HARM_CATEGORY_CIVIC_INTEGRITY", "threshold": "BLOCK_NONE"},
|
| 107 |
]
|
| 108 |
|
| 109 |
-
|
| 110 |
-
|
| 111 |
-
|
| 112 |
-
|
| 113 |
-
|
| 114 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 115 |
|
| 116 |
# Gemini 3 tool fix system instruction (prevents hallucination)
|
| 117 |
DEFAULT_GEMINI3_SYSTEM_INSTRUCTION = """<CRITICAL_TOOL_USAGE_INSTRUCTIONS>
|
|
@@ -426,7 +438,9 @@ class AntigravityFileLogger:
|
|
| 426 |
|
| 427 |
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S_%f")
|
| 428 |
safe_model = model_name.replace("/", "_").replace(":", "_")
|
| 429 |
-
self.log_dir =
|
|
|
|
|
|
|
| 430 |
|
| 431 |
try:
|
| 432 |
self.log_dir.mkdir(parents=True, exist_ok=True)
|
|
@@ -731,13 +745,13 @@ class AntigravityProvider(AntigravityAuthBase, ProviderInterface):
|
|
| 731 |
|
| 732 |
# Initialize caches using shared ProviderCache
|
| 733 |
self._signature_cache = ProviderCache(
|
| 734 |
-
|
| 735 |
memory_ttl,
|
| 736 |
disk_ttl,
|
| 737 |
env_prefix="ANTIGRAVITY_SIGNATURE",
|
| 738 |
)
|
| 739 |
self._thinking_cache = ProviderCache(
|
| 740 |
-
|
| 741 |
memory_ttl,
|
| 742 |
disk_ttl,
|
| 743 |
env_prefix="ANTIGRAVITY_THINKING",
|
|
|
|
| 39 |
from .provider_cache import ProviderCache
|
| 40 |
from ..model_definitions import ModelDefinitions
|
| 41 |
from ..timeout_config import TimeoutConfig
|
| 42 |
+
from ..utils.paths import get_logs_dir, get_cache_dir
|
| 43 |
|
| 44 |
|
| 45 |
# =============================================================================
|
|
|
|
| 107 |
{"category": "HARM_CATEGORY_CIVIC_INTEGRITY", "threshold": "BLOCK_NONE"},
|
| 108 |
]
|
| 109 |
|
| 110 |
+
|
| 111 |
+
# Directory paths - use centralized path management
|
| 112 |
+
def _get_antigravity_logs_dir():
|
| 113 |
+
return get_logs_dir() / "antigravity_logs"
|
| 114 |
+
|
| 115 |
+
|
| 116 |
+
def _get_antigravity_cache_dir():
|
| 117 |
+
return get_cache_dir(subdir="antigravity")
|
| 118 |
+
|
| 119 |
+
|
| 120 |
+
def _get_gemini3_signature_cache_file():
|
| 121 |
+
return _get_antigravity_cache_dir() / "gemini3_signatures.json"
|
| 122 |
+
|
| 123 |
+
|
| 124 |
+
def _get_claude_thinking_cache_file():
|
| 125 |
+
return _get_antigravity_cache_dir() / "claude_thinking.json"
|
| 126 |
+
|
| 127 |
|
| 128 |
# Gemini 3 tool fix system instruction (prevents hallucination)
|
| 129 |
DEFAULT_GEMINI3_SYSTEM_INSTRUCTION = """<CRITICAL_TOOL_USAGE_INSTRUCTIONS>
|
|
|
|
| 438 |
|
| 439 |
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S_%f")
|
| 440 |
safe_model = model_name.replace("/", "_").replace(":", "_")
|
| 441 |
+
self.log_dir = (
|
| 442 |
+
_get_antigravity_logs_dir() / f"{timestamp}_{safe_model}_{uuid.uuid4()}"
|
| 443 |
+
)
|
| 444 |
|
| 445 |
try:
|
| 446 |
self.log_dir.mkdir(parents=True, exist_ok=True)
|
|
|
|
| 745 |
|
| 746 |
# Initialize caches using shared ProviderCache
|
| 747 |
self._signature_cache = ProviderCache(
|
| 748 |
+
_get_gemini3_signature_cache_file(),
|
| 749 |
memory_ttl,
|
| 750 |
disk_ttl,
|
| 751 |
env_prefix="ANTIGRAVITY_SIGNATURE",
|
| 752 |
)
|
| 753 |
self._thinking_cache = ProviderCache(
|
| 754 |
+
_get_claude_thinking_cache_file(),
|
| 755 |
memory_ttl,
|
| 756 |
disk_ttl,
|
| 757 |
env_prefix="ANTIGRAVITY_THINKING",
|
|
@@ -12,6 +12,7 @@ from .gemini_auth_base import GeminiAuthBase
|
|
| 12 |
from .provider_cache import ProviderCache
|
| 13 |
from ..model_definitions import ModelDefinitions
|
| 14 |
from ..timeout_config import TimeoutConfig
|
|
|
|
| 15 |
import litellm
|
| 16 |
from litellm.exceptions import RateLimitError
|
| 17 |
from ..error_handler import extract_retry_after_from_body
|
|
@@ -22,8 +23,22 @@ from datetime import datetime
|
|
| 22 |
|
| 23 |
lib_logger = logging.getLogger("rotator_library")
|
| 24 |
|
| 25 |
-
|
| 26 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 27 |
|
| 28 |
|
| 29 |
class _GeminiCliFileLogger:
|
|
@@ -39,7 +54,7 @@ class _GeminiCliFileLogger:
|
|
| 39 |
# Sanitize model name for directory
|
| 40 |
safe_model_name = model_name.replace("/", "_").replace(":", "_")
|
| 41 |
self.log_dir = (
|
| 42 |
-
|
| 43 |
)
|
| 44 |
try:
|
| 45 |
self.log_dir.mkdir(parents=True, exist_ok=True)
|
|
@@ -103,12 +118,6 @@ HARDCODED_MODELS = [
|
|
| 103 |
"gemini-3-pro-preview",
|
| 104 |
]
|
| 105 |
|
| 106 |
-
# Cache directory for Gemini CLI
|
| 107 |
-
CACHE_DIR = (
|
| 108 |
-
Path(__file__).resolve().parent.parent.parent.parent / "cache" / "gemini_cli"
|
| 109 |
-
)
|
| 110 |
-
GEMINI3_SIGNATURE_CACHE_FILE = CACHE_DIR / "gemini3_signatures.json"
|
| 111 |
-
|
| 112 |
# Gemini 3 tool fix system instruction (prevents hallucination)
|
| 113 |
DEFAULT_GEMINI3_SYSTEM_INSTRUCTION = """<CRITICAL_TOOL_USAGE_INSTRUCTIONS>
|
| 114 |
You are operating in a CUSTOM ENVIRONMENT where tool definitions COMPLETELY DIFFER from your training data.
|
|
@@ -392,7 +401,7 @@ class GeminiCliProvider(GeminiAuthBase, ProviderInterface):
|
|
| 392 |
|
| 393 |
# Initialize signature cache for Gemini 3 thoughtSignatures
|
| 394 |
self._signature_cache = ProviderCache(
|
| 395 |
-
|
| 396 |
memory_ttl,
|
| 397 |
disk_ttl,
|
| 398 |
env_prefix="GEMINI_CLI_SIGNATURE",
|
|
|
|
| 12 |
from .provider_cache import ProviderCache
|
| 13 |
from ..model_definitions import ModelDefinitions
|
| 14 |
from ..timeout_config import TimeoutConfig
|
| 15 |
+
from ..utils.paths import get_logs_dir, get_cache_dir
|
| 16 |
import litellm
|
| 17 |
from litellm.exceptions import RateLimitError
|
| 18 |
from ..error_handler import extract_retry_after_from_body
|
|
|
|
| 23 |
|
| 24 |
lib_logger = logging.getLogger("rotator_library")
|
| 25 |
|
| 26 |
+
|
| 27 |
+
def _get_gemini_cli_logs_dir() -> Path:
|
| 28 |
+
"""Get the Gemini CLI logs directory."""
|
| 29 |
+
logs_dir = get_logs_dir() / "gemini_cli_logs"
|
| 30 |
+
logs_dir.mkdir(parents=True, exist_ok=True)
|
| 31 |
+
return logs_dir
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
def _get_gemini_cli_cache_dir() -> Path:
|
| 35 |
+
"""Get the Gemini CLI cache directory."""
|
| 36 |
+
return get_cache_dir(subdir="gemini_cli")
|
| 37 |
+
|
| 38 |
+
|
| 39 |
+
def _get_gemini3_signature_cache_file() -> Path:
|
| 40 |
+
"""Get the Gemini 3 signature cache file path."""
|
| 41 |
+
return _get_gemini_cli_cache_dir() / "gemini3_signatures.json"
|
| 42 |
|
| 43 |
|
| 44 |
class _GeminiCliFileLogger:
|
|
|
|
| 54 |
# Sanitize model name for directory
|
| 55 |
safe_model_name = model_name.replace("/", "_").replace(":", "_")
|
| 56 |
self.log_dir = (
|
| 57 |
+
_get_gemini_cli_logs_dir() / f"{timestamp}_{safe_model_name}_{request_id}"
|
| 58 |
)
|
| 59 |
try:
|
| 60 |
self.log_dir.mkdir(parents=True, exist_ok=True)
|
|
|
|
| 118 |
"gemini-3-pro-preview",
|
| 119 |
]
|
| 120 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 121 |
# Gemini 3 tool fix system instruction (prevents hallucination)
|
| 122 |
DEFAULT_GEMINI3_SYSTEM_INSTRUCTION = """<CRITICAL_TOOL_USAGE_INSTRUCTIONS>
|
| 123 |
You are operating in a CUSTOM ENVIRONMENT where tool definitions COMPLETELY DIFFER from your training data.
|
|
|
|
| 401 |
|
| 402 |
# Initialize signature cache for Gemini 3 thoughtSignatures
|
| 403 |
self._signature_cache = ProviderCache(
|
| 404 |
+
_get_gemini3_signature_cache_file(),
|
| 405 |
memory_ttl,
|
| 406 |
disk_ttl,
|
| 407 |
env_prefix="GEMINI_CLI_SIGNATURE",
|
|
@@ -11,6 +11,7 @@ from .provider_interface import ProviderInterface
|
|
| 11 |
from .iflow_auth_base import IFlowAuthBase
|
| 12 |
from ..model_definitions import ModelDefinitions
|
| 13 |
from ..timeout_config import TimeoutConfig
|
|
|
|
| 14 |
import litellm
|
| 15 |
from litellm.exceptions import RateLimitError, AuthenticationError
|
| 16 |
from pathlib import Path
|
|
@@ -19,8 +20,12 @@ from datetime import datetime
|
|
| 19 |
|
| 20 |
lib_logger = logging.getLogger("rotator_library")
|
| 21 |
|
| 22 |
-
|
| 23 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 24 |
|
| 25 |
|
| 26 |
class _IFlowFileLogger:
|
|
@@ -35,7 +40,9 @@ class _IFlowFileLogger:
|
|
| 35 |
request_id = str(uuid.uuid4())
|
| 36 |
# Sanitize model name for directory
|
| 37 |
safe_model_name = model_name.replace("/", "_").replace(":", "_")
|
| 38 |
-
self.log_dir =
|
|
|
|
|
|
|
| 39 |
try:
|
| 40 |
self.log_dir.mkdir(parents=True, exist_ok=True)
|
| 41 |
except Exception as e:
|
|
|
|
| 11 |
from .iflow_auth_base import IFlowAuthBase
|
| 12 |
from ..model_definitions import ModelDefinitions
|
| 13 |
from ..timeout_config import TimeoutConfig
|
| 14 |
+
from ..utils.paths import get_logs_dir
|
| 15 |
import litellm
|
| 16 |
from litellm.exceptions import RateLimitError, AuthenticationError
|
| 17 |
from pathlib import Path
|
|
|
|
| 20 |
|
| 21 |
lib_logger = logging.getLogger("rotator_library")
|
| 22 |
|
| 23 |
+
|
| 24 |
+
def _get_iflow_logs_dir() -> Path:
|
| 25 |
+
"""Get the iFlow logs directory."""
|
| 26 |
+
logs_dir = get_logs_dir() / "iflow_logs"
|
| 27 |
+
logs_dir.mkdir(parents=True, exist_ok=True)
|
| 28 |
+
return logs_dir
|
| 29 |
|
| 30 |
|
| 31 |
class _IFlowFileLogger:
|
|
|
|
| 40 |
request_id = str(uuid.uuid4())
|
| 41 |
# Sanitize model name for directory
|
| 42 |
safe_model_name = model_name.replace("/", "_").replace(":", "_")
|
| 43 |
+
self.log_dir = (
|
| 44 |
+
_get_iflow_logs_dir() / f"{timestamp}_{safe_model_name}_{request_id}"
|
| 45 |
+
)
|
| 46 |
try:
|
| 47 |
self.log_dir.mkdir(parents=True, exist_ok=True)
|
| 48 |
except Exception as e:
|
|
@@ -11,6 +11,7 @@ from .provider_interface import ProviderInterface
|
|
| 11 |
from .qwen_auth_base import QwenAuthBase
|
| 12 |
from ..model_definitions import ModelDefinitions
|
| 13 |
from ..timeout_config import TimeoutConfig
|
|
|
|
| 14 |
import litellm
|
| 15 |
from litellm.exceptions import RateLimitError, AuthenticationError
|
| 16 |
from pathlib import Path
|
|
@@ -19,8 +20,12 @@ from datetime import datetime
|
|
| 19 |
|
| 20 |
lib_logger = logging.getLogger("rotator_library")
|
| 21 |
|
| 22 |
-
|
| 23 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 24 |
|
| 25 |
|
| 26 |
class _QwenCodeFileLogger:
|
|
@@ -36,7 +41,7 @@ class _QwenCodeFileLogger:
|
|
| 36 |
# Sanitize model name for directory
|
| 37 |
safe_model_name = model_name.replace("/", "_").replace(":", "_")
|
| 38 |
self.log_dir = (
|
| 39 |
-
|
| 40 |
)
|
| 41 |
try:
|
| 42 |
self.log_dir.mkdir(parents=True, exist_ok=True)
|
|
|
|
| 11 |
from .qwen_auth_base import QwenAuthBase
|
| 12 |
from ..model_definitions import ModelDefinitions
|
| 13 |
from ..timeout_config import TimeoutConfig
|
| 14 |
+
from ..utils.paths import get_logs_dir
|
| 15 |
import litellm
|
| 16 |
from litellm.exceptions import RateLimitError, AuthenticationError
|
| 17 |
from pathlib import Path
|
|
|
|
| 20 |
|
| 21 |
lib_logger = logging.getLogger("rotator_library")
|
| 22 |
|
| 23 |
+
|
| 24 |
+
def _get_qwen_code_logs_dir() -> Path:
|
| 25 |
+
"""Get the Qwen Code logs directory."""
|
| 26 |
+
logs_dir = get_logs_dir() / "qwen_code_logs"
|
| 27 |
+
logs_dir.mkdir(parents=True, exist_ok=True)
|
| 28 |
+
return logs_dir
|
| 29 |
|
| 30 |
|
| 31 |
class _QwenCodeFileLogger:
|
|
|
|
| 41 |
# Sanitize model name for directory
|
| 42 |
safe_model_name = model_name.replace("/", "_").replace(":", "_")
|
| 43 |
self.log_dir = (
|
| 44 |
+
_get_qwen_code_logs_dir() / f"{timestamp}_{safe_model_name}_{request_id}"
|
| 45 |
)
|
| 46 |
try:
|
| 47 |
self.log_dir.mkdir(parents=True, exist_ok=True)
|
|
@@ -5,13 +5,15 @@ import logging
|
|
| 5 |
import asyncio
|
| 6 |
import random
|
| 7 |
from datetime import date, datetime, timezone, time as dt_time
|
| 8 |
-
from
|
|
|
|
| 9 |
import aiofiles
|
| 10 |
import litellm
|
| 11 |
|
| 12 |
from .error_handler import ClassifiedError, NoAvailableKeysError, mask_credential
|
| 13 |
from .providers import PROVIDER_PLUGINS
|
| 14 |
from .utils.resilient_io import ResilientStateWriter
|
|
|
|
| 15 |
|
| 16 |
lib_logger = logging.getLogger("rotator_library")
|
| 17 |
lib_logger.propagate = False
|
|
@@ -51,7 +53,7 @@ class UsageManager:
|
|
| 51 |
|
| 52 |
def __init__(
|
| 53 |
self,
|
| 54 |
-
file_path: str =
|
| 55 |
daily_reset_time_utc: Optional[str] = "03:00",
|
| 56 |
rotation_tolerance: float = 0.0,
|
| 57 |
provider_rotation_modes: Optional[Dict[str, str]] = None,
|
|
@@ -66,7 +68,8 @@ class UsageManager:
|
|
| 66 |
Initialize the UsageManager.
|
| 67 |
|
| 68 |
Args:
|
| 69 |
-
file_path: Path to the usage data JSON file
|
|
|
|
| 70 |
daily_reset_time_utc: Time in UTC when daily stats should reset (HH:MM format)
|
| 71 |
rotation_tolerance: Tolerance for weighted random credential rotation.
|
| 72 |
- 0.0: Deterministic, least-used credential always selected
|
|
@@ -86,7 +89,14 @@ class UsageManager:
|
|
| 86 |
Used in sequential mode when priority not in priority_multipliers.
|
| 87 |
Example: {"antigravity": 2}
|
| 88 |
"""
|
| 89 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 90 |
self.rotation_tolerance = rotation_tolerance
|
| 91 |
self.provider_rotation_modes = provider_rotation_modes or {}
|
| 92 |
self.provider_plugins = provider_plugins or PROVIDER_PLUGINS
|
|
|
|
| 5 |
import asyncio
|
| 6 |
import random
|
| 7 |
from datetime import date, datetime, timezone, time as dt_time
|
| 8 |
+
from pathlib import Path
|
| 9 |
+
from typing import Any, Dict, List, Optional, Set, Tuple, Union
|
| 10 |
import aiofiles
|
| 11 |
import litellm
|
| 12 |
|
| 13 |
from .error_handler import ClassifiedError, NoAvailableKeysError, mask_credential
|
| 14 |
from .providers import PROVIDER_PLUGINS
|
| 15 |
from .utils.resilient_io import ResilientStateWriter
|
| 16 |
+
from .utils.paths import get_data_file
|
| 17 |
|
| 18 |
lib_logger = logging.getLogger("rotator_library")
|
| 19 |
lib_logger.propagate = False
|
|
|
|
| 53 |
|
| 54 |
def __init__(
|
| 55 |
self,
|
| 56 |
+
file_path: Optional[Union[str, Path]] = None,
|
| 57 |
daily_reset_time_utc: Optional[str] = "03:00",
|
| 58 |
rotation_tolerance: float = 0.0,
|
| 59 |
provider_rotation_modes: Optional[Dict[str, str]] = None,
|
|
|
|
| 68 |
Initialize the UsageManager.
|
| 69 |
|
| 70 |
Args:
|
| 71 |
+
file_path: Path to the usage data JSON file. If None, uses get_data_file("key_usage.json").
|
| 72 |
+
Can be absolute Path, relative Path, or string.
|
| 73 |
daily_reset_time_utc: Time in UTC when daily stats should reset (HH:MM format)
|
| 74 |
rotation_tolerance: Tolerance for weighted random credential rotation.
|
| 75 |
- 0.0: Deterministic, least-used credential always selected
|
|
|
|
| 89 |
Used in sequential mode when priority not in priority_multipliers.
|
| 90 |
Example: {"antigravity": 2}
|
| 91 |
"""
|
| 92 |
+
# Resolve file_path - use default if not provided
|
| 93 |
+
if file_path is None:
|
| 94 |
+
self.file_path = str(get_data_file("key_usage.json"))
|
| 95 |
+
elif isinstance(file_path, Path):
|
| 96 |
+
self.file_path = str(file_path)
|
| 97 |
+
else:
|
| 98 |
+
# String path - could be relative or absolute
|
| 99 |
+
self.file_path = file_path
|
| 100 |
self.rotation_tolerance = rotation_tolerance
|
| 101 |
self.provider_rotation_modes = provider_rotation_modes or {}
|
| 102 |
self.provider_plugins = provider_plugins or PROVIDER_PLUGINS
|
|
@@ -1,6 +1,13 @@
|
|
| 1 |
# src/rotator_library/utils/__init__.py
|
| 2 |
|
| 3 |
from .headless_detection import is_headless_environment
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 4 |
from .reauth_coordinator import get_reauth_coordinator, ReauthCoordinator
|
| 5 |
from .resilient_io import (
|
| 6 |
BufferedWriteRegistry,
|
|
@@ -12,6 +19,11 @@ from .resilient_io import (
|
|
| 12 |
|
| 13 |
__all__ = [
|
| 14 |
"is_headless_environment",
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 15 |
"get_reauth_coordinator",
|
| 16 |
"ReauthCoordinator",
|
| 17 |
"BufferedWriteRegistry",
|
|
|
|
| 1 |
# src/rotator_library/utils/__init__.py
|
| 2 |
|
| 3 |
from .headless_detection import is_headless_environment
|
| 4 |
+
from .paths import (
|
| 5 |
+
get_default_root,
|
| 6 |
+
get_logs_dir,
|
| 7 |
+
get_cache_dir,
|
| 8 |
+
get_oauth_dir,
|
| 9 |
+
get_data_file,
|
| 10 |
+
)
|
| 11 |
from .reauth_coordinator import get_reauth_coordinator, ReauthCoordinator
|
| 12 |
from .resilient_io import (
|
| 13 |
BufferedWriteRegistry,
|
|
|
|
| 19 |
|
| 20 |
__all__ = [
|
| 21 |
"is_headless_environment",
|
| 22 |
+
"get_default_root",
|
| 23 |
+
"get_logs_dir",
|
| 24 |
+
"get_cache_dir",
|
| 25 |
+
"get_oauth_dir",
|
| 26 |
+
"get_data_file",
|
| 27 |
"get_reauth_coordinator",
|
| 28 |
"ReauthCoordinator",
|
| 29 |
"BufferedWriteRegistry",
|
|
@@ -0,0 +1,99 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# src/rotator_library/utils/paths.py
|
| 2 |
+
"""
|
| 3 |
+
Centralized path management for the rotator library.
|
| 4 |
+
|
| 5 |
+
Supports two runtime modes:
|
| 6 |
+
1. PyInstaller EXE -> files in the directory containing the executable
|
| 7 |
+
2. Script/Library -> files in the current working directory (overridable)
|
| 8 |
+
|
| 9 |
+
Library users can override by passing `data_dir` to RotatingClient.
|
| 10 |
+
"""
|
| 11 |
+
|
| 12 |
+
import sys
|
| 13 |
+
from pathlib import Path
|
| 14 |
+
from typing import Optional, Union
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
def get_default_root() -> Path:
|
| 18 |
+
"""
|
| 19 |
+
Get the default root directory for data files.
|
| 20 |
+
|
| 21 |
+
- EXE mode (PyInstaller): directory containing the executable
|
| 22 |
+
- Otherwise: current working directory
|
| 23 |
+
|
| 24 |
+
Returns:
|
| 25 |
+
Path to the root directory
|
| 26 |
+
"""
|
| 27 |
+
if getattr(sys, "frozen", False):
|
| 28 |
+
# Running as PyInstaller bundle - use executable's directory
|
| 29 |
+
return Path(sys.executable).parent
|
| 30 |
+
# Running as script or library - use current working directory
|
| 31 |
+
return Path.cwd()
|
| 32 |
+
|
| 33 |
+
|
| 34 |
+
def get_logs_dir(root: Optional[Union[Path, str]] = None) -> Path:
|
| 35 |
+
"""
|
| 36 |
+
Get the logs directory, creating it if needed.
|
| 37 |
+
|
| 38 |
+
Args:
|
| 39 |
+
root: Optional root directory. If None, uses get_default_root().
|
| 40 |
+
|
| 41 |
+
Returns:
|
| 42 |
+
Path to the logs directory
|
| 43 |
+
"""
|
| 44 |
+
base = Path(root) if root else get_default_root()
|
| 45 |
+
logs_dir = base / "logs"
|
| 46 |
+
logs_dir.mkdir(exist_ok=True)
|
| 47 |
+
return logs_dir
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
def get_cache_dir(
|
| 51 |
+
root: Optional[Union[Path, str]] = None, subdir: Optional[str] = None
|
| 52 |
+
) -> Path:
|
| 53 |
+
"""
|
| 54 |
+
Get the cache directory, optionally with a subdirectory.
|
| 55 |
+
|
| 56 |
+
Args:
|
| 57 |
+
root: Optional root directory. If None, uses get_default_root().
|
| 58 |
+
subdir: Optional subdirectory name (e.g., "gemini_cli", "antigravity")
|
| 59 |
+
|
| 60 |
+
Returns:
|
| 61 |
+
Path to the cache directory (or subdirectory)
|
| 62 |
+
"""
|
| 63 |
+
base = Path(root) if root else get_default_root()
|
| 64 |
+
cache_dir = base / "cache"
|
| 65 |
+
if subdir:
|
| 66 |
+
cache_dir = cache_dir / subdir
|
| 67 |
+
cache_dir.mkdir(parents=True, exist_ok=True)
|
| 68 |
+
return cache_dir
|
| 69 |
+
|
| 70 |
+
|
| 71 |
+
def get_oauth_dir(root: Optional[Union[Path, str]] = None) -> Path:
|
| 72 |
+
"""
|
| 73 |
+
Get the OAuth credentials directory, creating it if needed.
|
| 74 |
+
|
| 75 |
+
Args:
|
| 76 |
+
root: Optional root directory. If None, uses get_default_root().
|
| 77 |
+
|
| 78 |
+
Returns:
|
| 79 |
+
Path to the oauth_creds directory
|
| 80 |
+
"""
|
| 81 |
+
base = Path(root) if root else get_default_root()
|
| 82 |
+
oauth_dir = base / "oauth_creds"
|
| 83 |
+
oauth_dir.mkdir(exist_ok=True)
|
| 84 |
+
return oauth_dir
|
| 85 |
+
|
| 86 |
+
|
| 87 |
+
def get_data_file(filename: str, root: Optional[Union[Path, str]] = None) -> Path:
|
| 88 |
+
"""
|
| 89 |
+
Get the path to a data file in the root directory.
|
| 90 |
+
|
| 91 |
+
Args:
|
| 92 |
+
filename: Name of the file (e.g., "key_usage.json", ".env")
|
| 93 |
+
root: Optional root directory. If None, uses get_default_root().
|
| 94 |
+
|
| 95 |
+
Returns:
|
| 96 |
+
Path to the file (does not create the file)
|
| 97 |
+
"""
|
| 98 |
+
base = Path(root) if root else get_default_root()
|
| 99 |
+
return base / filename
|