Mirrowel commited on
Commit
f439788
·
1 Parent(s): 92211ea

refactor(auth): 🔨 consolidate OAuth credential management into base classes

Browse files

This commit refactors the OAuth credential setup and export functionality by moving all credential management logic from the credential_tool into the provider auth base classes. This follows the established pattern where auth classes own their credential lifecycle.

Key changes:
- Moved setup_credential(), build_env_lines(), export_credential_to_env(), list_credentials(), and delete_credential() methods from credential_tool.py into google_oauth_base.py, qwen_auth_base.py, and iflow_auth_base.py
- Introduced CredentialSetupResult dataclass for standardized return values from setup operations
- Added _post_auth_discovery() hook in google_oauth_base.py, with implementations in gemini_auth_base.py and antigravity_auth_base.py to perform tier/project discovery immediately after OAuth authentication
- Moved _discover_project_id() and _persist_project_metadata() from provider classes into their respective auth base classes (gemini_auth_base.py and antigravity_auth_base.py)
- Simplified credential_tool.py export functions to delegate to auth class methods instead of implementing logic inline
- Removed duplicate helper functions (_build_env_export_content, _get_credential_number_from_filename, etc.) that are now handled by auth classes
- Removed unused imports (re, time from credential_tool.py; asyncio from provider files)

This consolidation:
- Improves code organization by keeping credential management logic with authentication logic
- Eliminates code duplication across export functions
- Makes it easier for future OAuth providers to implement credential management by inheriting base functionality
- Ensures tier/project discovery happens during authentication rather than on first API request, improving user experience

BREAKING CHANGE: The internal API for credential management has changed. External code that directly imported helper functions like _build_env_export_content() from credential_tool.py will need to use the new auth class methods instead (e.g., auth_instance.build_env_lines()).

src/rotator_library/credential_tool.py CHANGED
@@ -3,12 +3,11 @@
3
  import asyncio
4
  import json
5
  import os
6
- import re
7
  import time
8
  from pathlib import Path
9
  from dotenv import set_key, get_key
10
 
11
- # NOTE: Heavy imports (provider_factory, PROVIDER_PLUGINS) are deferred
12
  # to avoid 6-7 second delay before showing loading screen
13
  from rich.console import Console
14
  from rich.panel import Panel
@@ -26,12 +25,14 @@ console = Console()
26
  _provider_factory = None
27
  _provider_plugins = None
28
 
 
29
  def _ensure_providers_loaded():
30
  """Lazy load provider modules only when needed"""
31
  global _provider_factory, _provider_plugins
32
  if _provider_factory is None:
33
  from . import provider_factory as pf
34
  from .providers import PROVIDER_PLUGINS as pp
 
35
  _provider_factory = pf
36
  _provider_plugins = pp
37
  return _provider_factory, _provider_plugins
@@ -39,100 +40,35 @@ def _ensure_providers_loaded():
39
 
40
  def clear_screen():
41
  """
42
- Cross-platform terminal clear that works robustly on both
43
  classic Windows conhost and modern terminals (Windows Terminal, Linux, Mac).
44
-
45
  Uses native OS commands instead of ANSI escape sequences:
46
  - Windows (conhost & Windows Terminal): cls
47
  - Unix-like systems (Linux, Mac): clear
48
  """
49
- os.system('cls' if os.name == 'nt' else 'clear')
50
 
51
 
52
- def _get_credential_number_from_filename(filename: str) -> int:
53
- """
54
- Extract credential number from filename like 'provider_oauth_1.json' -> 1
55
- """
56
- match = re.search(r'_oauth_(\d+)\.json$', filename)
57
- if match:
58
- return int(match.group(1))
59
- return 1
60
-
61
-
62
- def _build_env_export_content(
63
- provider_prefix: str,
64
- cred_number: int,
65
- creds: dict,
66
- email: str,
67
- extra_fields: dict = None,
68
- include_client_creds: bool = True
69
- ) -> tuple[list[str], str]:
70
- """
71
- Build .env content for OAuth credential export with numbered format.
72
- Exports all fields from the JSON file as a 1-to-1 mirror.
73
-
74
- Args:
75
- provider_prefix: Environment variable prefix (e.g., "ANTIGRAVITY", "GEMINI_CLI")
76
- cred_number: Credential number for this export (1, 2, 3, etc.)
77
- creds: The credential dictionary loaded from JSON
78
- email: User email for comments
79
- extra_fields: Optional dict of additional fields to include
80
- include_client_creds: Whether to include client_id/secret (Google OAuth providers)
81
-
82
- Returns:
83
- Tuple of (env_lines list, numbered_prefix string for display)
84
- """
85
- # Use numbered format: PROVIDER_N_ACCESS_TOKEN
86
- numbered_prefix = f"{provider_prefix}_{cred_number}"
87
-
88
- env_lines = [
89
- f"# {provider_prefix} Credential #{cred_number} for: {email}",
90
- f"# Exported from: {provider_prefix.lower()}_oauth_{cred_number}.json",
91
- f"# Generated at: {time.strftime('%Y-%m-%d %H:%M:%S')}",
92
- f"# ",
93
- f"# To combine multiple credentials into one .env file, copy these lines",
94
- f"# and ensure each credential has a unique number (1, 2, 3, etc.)",
95
- "",
96
- f"{numbered_prefix}_ACCESS_TOKEN={creds.get('access_token', '')}",
97
- f"{numbered_prefix}_REFRESH_TOKEN={creds.get('refresh_token', '')}",
98
- f"{numbered_prefix}_SCOPE={creds.get('scope', '')}",
99
- f"{numbered_prefix}_TOKEN_TYPE={creds.get('token_type', 'Bearer')}",
100
- f"{numbered_prefix}_ID_TOKEN={creds.get('id_token', '')}",
101
- f"{numbered_prefix}_EXPIRY_DATE={creds.get('expiry_date', 0)}",
102
- ]
103
-
104
- if include_client_creds:
105
- env_lines.extend([
106
- f"{numbered_prefix}_CLIENT_ID={creds.get('client_id', '')}",
107
- f"{numbered_prefix}_CLIENT_SECRET={creds.get('client_secret', '')}",
108
- f"{numbered_prefix}_TOKEN_URI={creds.get('token_uri', 'https://oauth2.googleapis.com/token')}",
109
- f"{numbered_prefix}_UNIVERSE_DOMAIN={creds.get('universe_domain', 'googleapis.com')}",
110
- ])
111
-
112
- env_lines.append(f"{numbered_prefix}_EMAIL={email}")
113
-
114
- # Add extra provider-specific fields
115
- if extra_fields:
116
- for key, value in extra_fields.items():
117
- if value: # Only add non-empty values
118
- env_lines.append(f"{numbered_prefix}_{key}={value}")
119
-
120
- return env_lines, numbered_prefix
121
-
122
  def ensure_env_defaults():
123
  """
124
  Ensures the .env file exists and contains essential default values like PROXY_API_KEY.
125
  """
126
  if not ENV_FILE.is_file():
127
  ENV_FILE.touch()
128
- console.print(f"Creating a new [bold yellow]{ENV_FILE.name}[/bold yellow] file...")
 
 
129
 
130
  # Check for PROXY_API_KEY, similar to setup_env.bat
131
  if get_key(str(ENV_FILE), "PROXY_API_KEY") is None:
132
  default_key = "VerysecretKey"
133
- console.print(f"Adding default [bold cyan]PROXY_API_KEY[/bold cyan] to [bold yellow]{ENV_FILE.name}[/bold yellow]...")
 
 
134
  set_key(str(ENV_FILE), "PROXY_API_KEY", default_key)
135
 
 
136
  async def setup_api_key():
137
  """
138
  Interactively sets up a new API key for a provider.
@@ -144,41 +80,74 @@ async def setup_api_key():
144
 
145
  # Verified list of LiteLLM providers with their friendly names and API key variables
146
  LITELLM_PROVIDERS = {
147
- "OpenAI": "OPENAI_API_KEY", "Anthropic": "ANTHROPIC_API_KEY",
148
- "Google AI Studio (Gemini)": "GEMINI_API_KEY", "Azure OpenAI": "AZURE_API_KEY",
149
- "Vertex AI": "GOOGLE_API_KEY", "AWS Bedrock": "AWS_ACCESS_KEY_ID",
150
- "Cohere": "COHERE_API_KEY", "Chutes": "CHUTES_API_KEY",
 
 
 
 
151
  "Mistral AI": "MISTRAL_API_KEY",
152
- "Codestral (Mistral)": "CODESTRAL_API_KEY", "Groq": "GROQ_API_KEY",
153
- "Perplexity": "PERPLEXITYAI_API_KEY", "xAI": "XAI_API_KEY",
154
- "Together AI": "TOGETHERAI_API_KEY", "Fireworks AI": "FIREWORKS_AI_API_KEY",
155
- "Replicate": "REPLICATE_API_KEY", "Hugging Face": "HUGGINGFACE_API_KEY",
156
- "Anyscale": "ANYSCALE_API_KEY", "NVIDIA NIM": "NVIDIA_NIM_API_KEY",
157
- "Deepseek": "DEEPSEEK_API_KEY", "AI21": "AI21_API_KEY",
158
- "Cerebras": "CEREBRAS_API_KEY", "Moonshot": "MOONSHOT_API_KEY",
159
- "Ollama": "OLLAMA_API_KEY", "Xinference": "XINFERENCE_API_KEY",
160
- "Infinity": "INFINITY_API_KEY", "OpenRouter": "OPENROUTER_API_KEY",
161
- "Deepinfra": "DEEPINFRA_API_KEY", "Cloudflare": "CLOUDFLARE_API_KEY",
162
- "Baseten": "BASETEN_API_KEY", "Modal": "MODAL_API_KEY",
163
- "Databricks": "DATABRICKS_API_KEY", "AWS SageMaker": "AWS_ACCESS_KEY_ID",
164
- "IBM watsonx.ai": "WATSONX_APIKEY", "Predibase": "PREDIBASE_API_KEY",
165
- "Clarifai": "CLARIFAI_API_KEY", "NLP Cloud": "NLP_CLOUD_API_KEY",
166
- "Voyage AI": "VOYAGE_API_KEY", "Jina AI": "JINA_API_KEY",
167
- "Hyperbolic": "HYPERBOLIC_API_KEY", "Morph": "MORPH_API_KEY",
168
- "Lambda AI": "LAMBDA_API_KEY", "Novita AI": "NOVITA_API_KEY",
169
- "Aleph Alpha": "ALEPH_ALPHA_API_KEY", "SambaNova": "SAMBANOVA_API_KEY",
170
- "FriendliAI": "FRIENDLI_TOKEN", "Galadriel": "GALADRIEL_API_KEY",
171
- "CompactifAI": "COMPACTIFAI_API_KEY", "Lemonade": "LEMONADE_API_KEY",
172
- "GradientAI": "GRADIENTAI_API_KEY", "Featherless AI": "FEATHERLESS_AI_API_KEY",
173
- "Nebius AI Studio": "NEBIUS_API_KEY", "Dashscope (Qwen)": "DASHSCOPE_API_KEY",
174
- "Bytez": "BYTEZ_API_KEY", "Oracle OCI": "OCI_API_KEY",
175
- "DataRobot": "DATAROBOT_API_KEY", "OVHCloud": "OVHCLOUD_API_KEY",
176
- "Volcengine": "VOLCENGINE_API_KEY", "Snowflake": "SNOWFLAKE_API_KEY",
177
- "Nscale": "NSCALE_API_KEY", "Recraft": "RECRAFT_API_KEY",
178
- "v0": "V0_API_KEY", "Vercel": "VERCEL_AI_GATEWAY_API_KEY",
179
- "Topaz": "TOPAZ_API_KEY", "ElevenLabs": "ELEVENLABS_API_KEY",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
180
  "Deepgram": "DEEPGRAM_API_KEY",
181
- "GitHub Models": "GITHUB_TOKEN", "GitHub Copilot": "GITHUB_COPILOT_API_KEY",
 
182
  }
183
 
184
  # Discover custom providers and add them to the list
@@ -186,37 +155,37 @@ async def setup_api_key():
186
  # qwen_code API key support is a fallback
187
  # iflow API key support is a feature
188
  _, PROVIDER_PLUGINS = _ensure_providers_loaded()
189
-
190
  # Build a set of environment variables already in LITELLM_PROVIDERS
191
  # to avoid duplicates based on the actual API key names
192
  litellm_env_vars = set(LITELLM_PROVIDERS.values())
193
-
194
  # Providers to exclude from API key list
195
  exclude_providers = {
196
- 'gemini_cli', # OAuth-only
197
- 'antigravity', # OAuth-only
198
- 'qwen_code', # API key is fallback, OAuth is primary - don't advertise
199
- 'openai_compatible', # Base class, not a real provider
200
  }
201
-
202
  discovered_providers = {}
203
  for provider_key in PROVIDER_PLUGINS.keys():
204
  if provider_key in exclude_providers:
205
  continue
206
-
207
  # Create environment variable name
208
  env_var = provider_key.upper() + "_API_KEY"
209
-
210
  # Check if this env var already exists in LITELLM_PROVIDERS
211
  # This catches duplicates like GEMINI_API_KEY, MISTRAL_API_KEY, etc.
212
  if env_var in litellm_env_vars:
213
  # Already in LITELLM_PROVIDERS with better name, skip this one
214
  continue
215
-
216
  # Create display name for this custom provider
217
- display_name = provider_key.replace('_', ' ').title()
218
  discovered_providers[display_name] = env_var
219
-
220
  # LITELLM_PROVIDERS takes precedence (comes first in merge)
221
  combined_providers = {**LITELLM_PROVIDERS, **discovered_providers}
222
  provider_display_list = sorted(combined_providers.keys())
@@ -231,15 +200,19 @@ async def setup_api_key():
231
  else:
232
  provider_text.append(f" {i + 1}. {provider_name}\n")
233
 
234
- console.print(Panel(provider_text, title="Available Providers for API Key", style="bold blue"))
 
 
235
 
236
  choice = Prompt.ask(
237
- Text.from_markup("[bold]Please select a provider or type [red]'b'[/red] to go back[/bold]"),
 
 
238
  choices=[str(i + 1) for i in range(len(provider_display_list))] + ["b"],
239
- show_choices=False
240
  )
241
 
242
- if choice.lower() == 'b':
243
  return
244
 
245
  try:
@@ -256,54 +229,81 @@ async def setup_api_key():
256
  for line in f:
257
  line = line.strip()
258
  if line.startswith(api_var_base) and "=" in line:
259
- existing_key_name, _, existing_key_value = line.partition("=")
 
 
260
  if existing_key_value == api_key:
261
- warning_text = Text.from_markup(f"This API key already exists as [bold yellow]'{existing_key_name}'[/bold yellow]. Overwriting...")
262
- console.print(Panel(warning_text, style="bold yellow", title="Updating API Key"))
 
 
 
 
 
 
 
 
263
 
264
  set_key(str(ENV_FILE), existing_key_name, api_key)
265
 
266
- success_text = Text.from_markup(f"Successfully updated existing key [bold yellow]'{existing_key_name}'[/bold yellow].")
267
- console.print(Panel(success_text, style="bold green", title="Success"))
 
 
 
 
 
 
 
 
268
  return
269
 
270
  # Special handling for AWS
271
  if display_name in ["AWS Bedrock", "AWS SageMaker"]:
272
- console.print(Panel(
273
- Text.from_markup(
274
- "This provider requires both an Access Key ID and a Secret Access Key.\n"
275
- f"The key you entered will be saved as [bold yellow]{api_var_base}_1[/bold yellow].\n"
276
- "Please manually add the [bold cyan]AWS_SECRET_ACCESS_KEY_1[/bold cyan] to your .env file."
277
- ),
278
- title="[bold yellow]Additional Step Required[/bold yellow]",
279
- border_style="yellow"
280
- ))
 
 
281
 
282
  key_index = 1
283
  while True:
284
  key_name = f"{api_var_base}_{key_index}"
285
  if ENV_FILE.is_file():
286
- with open(ENV_FILE, "r") as f:
287
  if not any(line.startswith(f"{key_name}=") for line in f):
288
  break
289
  else:
290
  break
291
  key_index += 1
292
-
293
  key_name = f"{api_var_base}_{key_index}"
294
  set_key(str(ENV_FILE), key_name, api_key)
295
-
296
- success_text = Text.from_markup(f"Successfully added {display_name} API key as [bold yellow]'{key_name}'[/bold yellow].")
 
 
297
  console.print(Panel(success_text, style="bold green", title="Success"))
298
 
299
  else:
300
  console.print("[bold red]Invalid choice. Please try again.[/bold red]")
301
  except ValueError:
302
- console.print("[bold red]Invalid input. Please enter a number or 'b'.[/bold red]")
 
 
 
303
 
304
  async def setup_new_credential(provider_name: str):
305
  """
306
  Interactively sets up a new OAuth credential for a given provider.
 
 
307
  """
308
  try:
309
  provider_factory, _ = _ensure_providers_loaded()
@@ -315,668 +315,602 @@ async def setup_new_credential(provider_name: str):
315
  "gemini_cli": "Gemini CLI (OAuth)",
316
  "qwen_code": "Qwen Code (OAuth - also supports API keys)",
317
  "iflow": "iFlow (OAuth - also supports API keys)",
318
- "antigravity": "Antigravity (OAuth)"
319
  }
320
- display_name = oauth_friendly_names.get(provider_name, provider_name.replace('_', ' ').title())
321
-
322
- # Pass provider metadata to auth classes for better display
323
- temp_creds = {
324
- "_proxy_metadata": {
325
- "provider_name": provider_name,
326
- "display_name": display_name
327
- }
328
- }
329
- initialized_creds = await auth_instance.initialize_token(temp_creds)
330
-
331
- user_info = await auth_instance.get_user_info(initialized_creds)
332
- email = user_info.get("email")
333
 
334
- if not email:
335
- console.print(Panel(f"Could not retrieve a unique identifier for {provider_name}. Aborting.", style="bold red", title="Error"))
 
 
 
 
 
 
 
 
 
 
 
 
 
 
336
  return
337
 
338
- for cred_file in OAUTH_BASE_DIR.glob(f"{provider_name}_oauth_*.json"):
339
- with open(cred_file, 'r') as f:
340
- existing_creds = json.load(f)
341
-
342
- metadata = existing_creds.get("_proxy_metadata", {})
343
- if metadata.get("email") == email:
344
- warning_text = Text.from_markup(f"Found existing credential for [bold cyan]'{email}'[/bold cyan] at [bold yellow]'{cred_file.name}'[/bold yellow]. Overwriting...")
345
- console.print(Panel(warning_text, style="bold yellow", title="Updating Credential"))
 
 
 
346
 
347
- # Overwrite the existing file in-place
348
- with open(cred_file, 'w') as f:
349
- json.dump(initialized_creds, f, indent=2)
 
 
350
 
351
- success_text = Text.from_markup(f"Successfully updated credential at [bold yellow]'{cred_file.name}'[/bold yellow] for user [bold cyan]'{email}'[/bold cyan].")
352
- console.print(Panel(success_text, style="bold green", title="Success"))
353
- return
354
-
355
- existing_files = list(OAUTH_BASE_DIR.glob(f"{provider_name}_oauth_*.json"))
356
- next_num = 1
357
- if existing_files:
358
- nums = [int(re.search(r'_(\d+)\.json$', f.name).group(1)) for f in existing_files if re.search(r'_(\d+)\.json$', f.name)]
359
- if nums:
360
- next_num = max(nums) + 1
361
-
362
- new_filename = f"{provider_name}_oauth_{next_num}.json"
363
- new_filepath = OAUTH_BASE_DIR / new_filename
364
-
365
- with open(new_filepath, 'w') as f:
366
- json.dump(initialized_creds, f, indent=2)
367
-
368
- success_text = Text.from_markup(f"Successfully created new credential at [bold yellow]'{new_filepath.name}'[/bold yellow] for user [bold cyan]'{email}'[/bold cyan].")
369
  console.print(Panel(success_text, style="bold green", title="Success"))
370
 
371
  except Exception as e:
372
- console.print(Panel(f"An error occurred during setup for {provider_name}: {e}", style="bold red", title="Error"))
 
 
 
 
 
 
373
 
374
 
375
  async def export_gemini_cli_to_env():
376
  """
377
  Export a Gemini CLI credential JSON file to .env format.
378
- Uses numbered format (GEMINI_CLI_1_*, GEMINI_CLI_2_*) for multiple credential support.
379
  """
380
- console.print(Panel("[bold cyan]Export Gemini CLI Credential to .env[/bold cyan]", expand=False))
 
 
 
 
 
 
 
 
 
381
 
382
- # Find all gemini_cli credentials
383
- gemini_cli_files = sorted(list(OAUTH_BASE_DIR.glob("gemini_cli_oauth_*.json")))
384
 
385
- if not gemini_cli_files:
386
- console.print(Panel("No Gemini CLI credentials found. Please add one first using 'Add OAuth Credential'.",
387
- style="bold red", title="No Credentials"))
 
 
 
 
 
388
  return
389
 
390
  # Display available credentials
391
  cred_text = Text()
392
- for i, cred_file in enumerate(gemini_cli_files):
393
- try:
394
- with open(cred_file, 'r') as f:
395
- creds = json.load(f)
396
- email = creds.get("_proxy_metadata", {}).get("email", "unknown")
397
- cred_text.append(f" {i + 1}. {cred_file.name} ({email})\n")
398
- except Exception as e:
399
- cred_text.append(f" {i + 1}. {cred_file.name} (error reading: {e})\n")
400
 
401
- console.print(Panel(cred_text, title="Available Gemini CLI Credentials", style="bold blue"))
 
 
402
 
403
  choice = Prompt.ask(
404
- Text.from_markup("[bold]Please select a credential to export or type [red]'b'[/red] to go back[/bold]"),
405
- choices=[str(i + 1) for i in range(len(gemini_cli_files))] + ["b"],
406
- show_choices=False
 
 
407
  )
408
 
409
- if choice.lower() == 'b':
410
  return
411
 
412
  try:
413
  choice_index = int(choice) - 1
414
- if 0 <= choice_index < len(gemini_cli_files):
415
- cred_file = gemini_cli_files[choice_index]
416
-
417
- # Load the credential
418
- with open(cred_file, 'r') as f:
419
- creds = json.load(f)
420
 
421
- # Extract metadata
422
- email = creds.get("_proxy_metadata", {}).get("email", "unknown")
423
- project_id = creds.get("_proxy_metadata", {}).get("project_id", "")
424
- tier = creds.get("_proxy_metadata", {}).get("tier", "")
425
-
426
- # Get credential number from filename
427
- cred_number = _get_credential_number_from_filename(cred_file.name)
428
-
429
- # Generate .env file name with credential number
430
- safe_email = email.replace("@", "_at_").replace(".", "_")
431
- env_filename = f"gemini_cli_{cred_number}_{safe_email}.env"
432
- env_filepath = OAUTH_BASE_DIR / env_filename
433
-
434
- # Build extra fields
435
- extra_fields = {}
436
- if project_id:
437
- extra_fields["PROJECT_ID"] = project_id
438
- if tier:
439
- extra_fields["TIER"] = tier
440
-
441
- # Build .env content using helper
442
- env_lines, numbered_prefix = _build_env_export_content(
443
- provider_prefix="GEMINI_CLI",
444
- cred_number=cred_number,
445
- creds=creds,
446
- email=email,
447
- extra_fields=extra_fields,
448
- include_client_creds=True
449
  )
450
 
451
- # Write to .env file
452
- with open(env_filepath, 'w') as f:
453
- f.write('\n'.join(env_lines))
454
-
455
- success_text = Text.from_markup(
456
- f"Successfully exported credential to [bold yellow]'{env_filepath}'[/bold yellow]\n\n"
457
- f"[bold]Environment variable prefix:[/bold] [cyan]{numbered_prefix}_*[/cyan]\n\n"
458
- f"[bold]To use this credential:[/bold]\n"
459
- f"1. Copy the contents to your main .env file, OR\n"
460
- f"2. Source it: [bold cyan]source {env_filepath.name}[/bold cyan] (Linux/Mac)\n"
461
- f"3. Or on Windows: [bold cyan]Get-Content {env_filepath.name} | ForEach-Object {{ $_ -replace '^([^#].*)$', 'set $1' }} | cmd[/bold cyan]\n\n"
462
- f"[bold]To combine multiple credentials:[/bold]\n"
463
- f"Copy lines from multiple .env files into one file.\n"
464
- f"Each credential uses a unique number ({numbered_prefix}_*)."
465
- )
466
- console.print(Panel(success_text, style="bold green", title="Success"))
 
 
 
 
467
  else:
468
  console.print("[bold red]Invalid choice. Please try again.[/bold red]")
469
  except ValueError:
470
- console.print("[bold red]Invalid input. Please enter a number or 'b'.[/bold red]")
 
 
471
  except Exception as e:
472
- console.print(Panel(f"An error occurred during export: {e}", style="bold red", title="Error"))
 
 
 
 
473
 
474
 
475
  async def export_qwen_code_to_env():
476
  """
477
  Export a Qwen Code credential JSON file to .env format.
478
- Generates one .env file per credential.
479
  """
480
- console.print(Panel("[bold cyan]Export Qwen Code Credential to .env[/bold cyan]", expand=False))
 
 
 
 
481
 
482
- # Find all qwen_code credentials
483
- qwen_code_files = list(OAUTH_BASE_DIR.glob("qwen_code_oauth_*.json"))
 
 
484
 
485
- if not qwen_code_files:
486
- console.print(Panel("No Qwen Code credentials found. Please add one first using 'Add OAuth Credential'.",
487
- style="bold red", title="No Credentials"))
 
 
 
 
 
 
 
 
488
  return
489
 
490
  # Display available credentials
491
  cred_text = Text()
492
- for i, cred_file in enumerate(qwen_code_files):
493
- try:
494
- with open(cred_file, 'r') as f:
495
- creds = json.load(f)
496
- email = creds.get("_proxy_metadata", {}).get("email", "unknown")
497
- cred_text.append(f" {i + 1}. {cred_file.name} ({email})\n")
498
- except Exception as e:
499
- cred_text.append(f" {i + 1}. {cred_file.name} (error reading: {e})\n")
500
 
501
- console.print(Panel(cred_text, title="Available Qwen Code Credentials", style="bold blue"))
 
 
502
 
503
  choice = Prompt.ask(
504
- Text.from_markup("[bold]Please select a credential to export or type [red]'b'[/red] to go back[/bold]"),
505
- choices=[str(i + 1) for i in range(len(qwen_code_files))] + ["b"],
506
- show_choices=False
 
 
507
  )
508
 
509
- if choice.lower() == 'b':
510
  return
511
 
512
  try:
513
  choice_index = int(choice) - 1
514
- if 0 <= choice_index < len(qwen_code_files):
515
- cred_file = qwen_code_files[choice_index]
516
-
517
- # Load the credential
518
- with open(cred_file, 'r') as f:
519
- creds = json.load(f)
520
 
521
- # Extract metadata
522
- email = creds.get("_proxy_metadata", {}).get("email", "unknown")
523
-
524
- # Get credential number from filename
525
- cred_number = _get_credential_number_from_filename(cred_file.name)
526
-
527
- # Generate .env file name with credential number
528
- safe_email = email.replace("@", "_at_").replace(".", "_")
529
- env_filename = f"qwen_code_{cred_number}_{safe_email}.env"
530
- env_filepath = OAUTH_BASE_DIR / env_filename
531
-
532
- # Use numbered format: QWEN_CODE_N_*
533
- numbered_prefix = f"QWEN_CODE_{cred_number}"
534
-
535
- # Build .env content (Qwen has different structure)
536
- env_lines = [
537
- f"# QWEN_CODE Credential #{cred_number} for: {email}",
538
- f"# Generated at: {time.strftime('%Y-%m-%d %H:%M:%S')}",
539
- f"# ",
540
- f"# To combine multiple credentials into one .env file, copy these lines",
541
- f"# and ensure each credential has a unique number (1, 2, 3, etc.)",
542
- "",
543
- f"{numbered_prefix}_ACCESS_TOKEN={creds.get('access_token', '')}",
544
- f"{numbered_prefix}_REFRESH_TOKEN={creds.get('refresh_token', '')}",
545
- f"{numbered_prefix}_EXPIRY_DATE={creds.get('expiry_date', 0)}",
546
- f"{numbered_prefix}_RESOURCE_URL={creds.get('resource_url', 'https://portal.qwen.ai/v1')}",
547
- f"{numbered_prefix}_EMAIL={email}",
548
- ]
549
-
550
- # Write to .env file
551
- with open(env_filepath, 'w') as f:
552
- f.write('\n'.join(env_lines))
553
-
554
- success_text = Text.from_markup(
555
- f"Successfully exported credential to [bold yellow]'{env_filepath}'[/bold yellow]\n\n"
556
- f"[bold]Environment variable prefix:[/bold] [cyan]{numbered_prefix}_*[/cyan]\n\n"
557
- f"[bold]To use this credential:[/bold]\n"
558
- f"1. Copy the contents to your main .env file, OR\n"
559
- f"2. Source it: [bold cyan]source {env_filepath.name}[/bold cyan] (Linux/Mac)\n\n"
560
- f"[bold]To combine multiple credentials:[/bold]\n"
561
- f"Copy lines from multiple .env files into one file.\n"
562
- f"Each credential uses a unique number ({numbered_prefix}_*)."
563
  )
564
- console.print(Panel(success_text, style="bold green", title="Success"))
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
565
  else:
566
  console.print("[bold red]Invalid choice. Please try again.[/bold red]")
567
  except ValueError:
568
- console.print("[bold red]Invalid input. Please enter a number or 'b'.[/bold red]")
 
 
569
  except Exception as e:
570
- console.print(Panel(f"An error occurred during export: {e}", style="bold red", title="Error"))
 
 
 
 
571
 
572
 
573
  async def export_iflow_to_env():
574
  """
575
  Export an iFlow credential JSON file to .env format.
576
- Uses numbered format (IFLOW_1_*, IFLOW_2_*) for multiple credential support.
577
  """
578
- console.print(Panel("[bold cyan]Export iFlow Credential to .env[/bold cyan]", expand=False))
 
 
 
 
 
 
 
579
 
580
- # Find all iflow credentials
581
- iflow_files = sorted(list(OAUTH_BASE_DIR.glob("iflow_oauth_*.json")))
582
 
583
- if not iflow_files:
584
- console.print(Panel("No iFlow credentials found. Please add one first using 'Add OAuth Credential'.",
585
- style="bold red", title="No Credentials"))
 
 
 
 
 
586
  return
587
 
588
  # Display available credentials
589
  cred_text = Text()
590
- for i, cred_file in enumerate(iflow_files):
591
- try:
592
- with open(cred_file, 'r') as f:
593
- creds = json.load(f)
594
- email = creds.get("_proxy_metadata", {}).get("email", "unknown")
595
- cred_text.append(f" {i + 1}. {cred_file.name} ({email})\n")
596
- except Exception as e:
597
- cred_text.append(f" {i + 1}. {cred_file.name} (error reading: {e})\n")
598
 
599
- console.print(Panel(cred_text, title="Available iFlow Credentials", style="bold blue"))
 
 
600
 
601
  choice = Prompt.ask(
602
- Text.from_markup("[bold]Please select a credential to export or type [red]'b'[/red] to go back[/bold]"),
603
- choices=[str(i + 1) for i in range(len(iflow_files))] + ["b"],
604
- show_choices=False
 
 
605
  )
606
 
607
- if choice.lower() == 'b':
608
  return
609
 
610
  try:
611
  choice_index = int(choice) - 1
612
- if 0 <= choice_index < len(iflow_files):
613
- cred_file = iflow_files[choice_index]
614
-
615
- # Load the credential
616
- with open(cred_file, 'r') as f:
617
- creds = json.load(f)
618
-
619
- # Extract metadata
620
- email = creds.get("_proxy_metadata", {}).get("email", "unknown")
621
-
622
- # Get credential number from filename
623
- cred_number = _get_credential_number_from_filename(cred_file.name)
624
-
625
- # Generate .env file name with credential number
626
- safe_email = email.replace("@", "_at_").replace(".", "_")
627
- env_filename = f"iflow_{cred_number}_{safe_email}.env"
628
- env_filepath = OAUTH_BASE_DIR / env_filename
629
-
630
- # Use numbered format: IFLOW_N_*
631
- numbered_prefix = f"IFLOW_{cred_number}"
632
-
633
- # Build .env content (iFlow has different structure with API key)
634
- env_lines = [
635
- f"# IFLOW Credential #{cred_number} for: {email}",
636
- f"# Generated at: {time.strftime('%Y-%m-%d %H:%M:%S')}",
637
- f"# ",
638
- f"# To combine multiple credentials into one .env file, copy these lines",
639
- f"# and ensure each credential has a unique number (1, 2, 3, etc.)",
640
- "",
641
- f"{numbered_prefix}_ACCESS_TOKEN={creds.get('access_token', '')}",
642
- f"{numbered_prefix}_REFRESH_TOKEN={creds.get('refresh_token', '')}",
643
- f"{numbered_prefix}_API_KEY={creds.get('api_key', '')}",
644
- f"{numbered_prefix}_EXPIRY_DATE={creds.get('expiry_date', '')}",
645
- f"{numbered_prefix}_EMAIL={email}",
646
- f"{numbered_prefix}_TOKEN_TYPE={creds.get('token_type', 'Bearer')}",
647
- f"{numbered_prefix}_SCOPE={creds.get('scope', 'read write')}",
648
- ]
649
-
650
- # Write to .env file
651
- with open(env_filepath, 'w') as f:
652
- f.write('\n'.join(env_lines))
653
 
654
- success_text = Text.from_markup(
655
- f"Successfully exported credential to [bold yellow]'{env_filepath}'[/bold yellow]\n\n"
656
- f"[bold]Environment variable prefix:[/bold] [cyan]{numbered_prefix}_*[/cyan]\n\n"
657
- f"[bold]To use this credential:[/bold]\n"
658
- f"1. Copy the contents to your main .env file, OR\n"
659
- f"2. Source it: [bold cyan]source {env_filepath.name}[/bold cyan] (Linux/Mac)\n\n"
660
- f"[bold]To combine multiple credentials:[/bold]\n"
661
- f"Copy lines from multiple .env files into one file.\n"
662
- f"Each credential uses a unique number ({numbered_prefix}_*)."
663
  )
664
- console.print(Panel(success_text, style="bold green", title="Success"))
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
665
  else:
666
  console.print("[bold red]Invalid choice. Please try again.[/bold red]")
667
  except ValueError:
668
- console.print("[bold red]Invalid input. Please enter a number or 'b'.[/bold red]")
 
 
669
  except Exception as e:
670
- console.print(Panel(f"An error occurred during export: {e}", style="bold red", title="Error"))
 
 
 
 
671
 
672
 
673
  async def export_antigravity_to_env():
674
  """
675
  Export an Antigravity credential JSON file to .env format.
676
- Uses numbered format (ANTIGRAVITY_1_*, ANTIGRAVITY_2_*) for multiple credential support.
677
  """
678
- console.print(Panel("[bold cyan]Export Antigravity Credential to .env[/bold cyan]", expand=False))
 
 
 
 
 
 
 
 
 
679
 
680
- # Find all antigravity credentials
681
- antigravity_files = sorted(list(OAUTH_BASE_DIR.glob("antigravity_oauth_*.json")))
682
 
683
- if not antigravity_files:
684
- console.print(Panel("No Antigravity credentials found. Please add one first using 'Add OAuth Credential'.",
685
- style="bold red", title="No Credentials"))
 
 
 
 
 
686
  return
687
 
688
  # Display available credentials
689
  cred_text = Text()
690
- for i, cred_file in enumerate(antigravity_files):
691
- try:
692
- with open(cred_file, 'r') as f:
693
- creds = json.load(f)
694
- email = creds.get("_proxy_metadata", {}).get("email", "unknown")
695
- cred_text.append(f" {i + 1}. {cred_file.name} ({email})\n")
696
- except Exception as e:
697
- cred_text.append(f" {i + 1}. {cred_file.name} (error reading: {e})\n")
698
 
699
- console.print(Panel(cred_text, title="Available Antigravity Credentials", style="bold blue"))
 
 
700
 
701
  choice = Prompt.ask(
702
- Text.from_markup("[bold]Please select a credential to export or type [red]'b'[/red] to go back[/bold]"),
703
- choices=[str(i + 1) for i in range(len(antigravity_files))] + ["b"],
704
- show_choices=False
 
 
705
  )
706
 
707
- if choice.lower() == 'b':
708
  return
709
 
710
  try:
711
  choice_index = int(choice) - 1
712
- if 0 <= choice_index < len(antigravity_files):
713
- cred_file = antigravity_files[choice_index]
714
-
715
- # Load the credential
716
- with open(cred_file, 'r') as f:
717
- creds = json.load(f)
718
 
719
- # Extract metadata
720
- email = creds.get("_proxy_metadata", {}).get("email", "unknown")
721
-
722
- # Get credential number from filename
723
- cred_number = _get_credential_number_from_filename(cred_file.name)
724
-
725
- # Generate .env file name with credential number
726
- safe_email = email.replace("@", "_at_").replace(".", "_")
727
- env_filename = f"antigravity_{cred_number}_{safe_email}.env"
728
- env_filepath = OAUTH_BASE_DIR / env_filename
729
-
730
- # Build .env content using helper
731
- env_lines, numbered_prefix = _build_env_export_content(
732
- provider_prefix="ANTIGRAVITY",
733
- cred_number=cred_number,
734
- creds=creds,
735
- email=email,
736
- extra_fields=None,
737
- include_client_creds=True
738
  )
739
 
740
- # Write to .env file
741
- with open(env_filepath, 'w') as f:
742
- f.write('\n'.join(env_lines))
743
-
744
- success_text = Text.from_markup(
745
- f"Successfully exported credential to [bold yellow]'{env_filepath}'[/bold yellow]\n\n"
746
- f"[bold]Environment variable prefix:[/bold] [cyan]{numbered_prefix}_*[/cyan]\n\n"
747
- f"[bold]To use this credential:[/bold]\n"
748
- f"1. Copy the contents to your main .env file, OR\n"
749
- f"2. Source it: [bold cyan]source {env_filepath.name}[/bold cyan] (Linux/Mac)\n"
750
- f"3. Or on Windows: [bold cyan]Get-Content {env_filepath.name} | ForEach-Object {{ $_ -replace '^([^#].*)$', 'set $1' }} | cmd[/bold cyan]\n\n"
751
- f"[bold]To combine multiple credentials:[/bold]\n"
752
- f"Copy lines from multiple .env files into one file.\n"
753
- f"Each credential uses a unique number ({numbered_prefix}_*)."
754
- )
755
- console.print(Panel(success_text, style="bold green", title="Success"))
 
 
 
 
756
  else:
757
  console.print("[bold red]Invalid choice. Please try again.[/bold red]")
758
  except ValueError:
759
- console.print("[bold red]Invalid input. Please enter a number or 'b'.[/bold red]")
 
 
760
  except Exception as e:
761
- console.print(Panel(f"An error occurred during export: {e}", style="bold red", title="Error"))
762
-
763
-
764
- def _build_gemini_cli_env_lines(creds: dict, cred_number: int) -> list[str]:
765
- """Build .env lines for a Gemini CLI credential."""
766
- email = creds.get("_proxy_metadata", {}).get("email", "unknown")
767
- project_id = creds.get("_proxy_metadata", {}).get("project_id", "")
768
- tier = creds.get("_proxy_metadata", {}).get("tier", "")
769
-
770
- extra_fields = {}
771
- if project_id:
772
- extra_fields["PROJECT_ID"] = project_id
773
- if tier:
774
- extra_fields["TIER"] = tier
775
-
776
- env_lines, _ = _build_env_export_content(
777
- provider_prefix="GEMINI_CLI",
778
- cred_number=cred_number,
779
- creds=creds,
780
- email=email,
781
- extra_fields=extra_fields,
782
- include_client_creds=True
783
- )
784
- return env_lines
785
-
786
-
787
- def _build_qwen_code_env_lines(creds: dict, cred_number: int) -> list[str]:
788
- """Build .env lines for a Qwen Code credential."""
789
- email = creds.get("_proxy_metadata", {}).get("email", "unknown")
790
- numbered_prefix = f"QWEN_CODE_{cred_number}"
791
-
792
- env_lines = [
793
- f"# QWEN_CODE Credential #{cred_number} for: {email}",
794
- f"# Generated at: {time.strftime('%Y-%m-%d %H:%M:%S')}",
795
- "",
796
- f"{numbered_prefix}_ACCESS_TOKEN={creds.get('access_token', '')}",
797
- f"{numbered_prefix}_REFRESH_TOKEN={creds.get('refresh_token', '')}",
798
- f"{numbered_prefix}_EXPIRY_DATE={creds.get('expiry_date', 0)}",
799
- f"{numbered_prefix}_RESOURCE_URL={creds.get('resource_url', 'https://portal.qwen.ai/v1')}",
800
- f"{numbered_prefix}_EMAIL={email}",
801
- ]
802
- return env_lines
803
-
804
-
805
- def _build_iflow_env_lines(creds: dict, cred_number: int) -> list[str]:
806
- """Build .env lines for an iFlow credential."""
807
- email = creds.get("_proxy_metadata", {}).get("email", "unknown")
808
- numbered_prefix = f"IFLOW_{cred_number}"
809
-
810
- env_lines = [
811
- f"# IFLOW Credential #{cred_number} for: {email}",
812
- f"# Generated at: {time.strftime('%Y-%m-%d %H:%M:%S')}",
813
- "",
814
- f"{numbered_prefix}_ACCESS_TOKEN={creds.get('access_token', '')}",
815
- f"{numbered_prefix}_REFRESH_TOKEN={creds.get('refresh_token', '')}",
816
- f"{numbered_prefix}_API_KEY={creds.get('api_key', '')}",
817
- f"{numbered_prefix}_EXPIRY_DATE={creds.get('expiry_date', '')}",
818
- f"{numbered_prefix}_EMAIL={email}",
819
- f"{numbered_prefix}_TOKEN_TYPE={creds.get('token_type', 'Bearer')}",
820
- f"{numbered_prefix}_SCOPE={creds.get('scope', 'read write')}",
821
- ]
822
- return env_lines
823
-
824
-
825
- def _build_antigravity_env_lines(creds: dict, cred_number: int) -> list[str]:
826
- """Build .env lines for an Antigravity credential."""
827
- email = creds.get("_proxy_metadata", {}).get("email", "unknown")
828
-
829
- env_lines, _ = _build_env_export_content(
830
- provider_prefix="ANTIGRAVITY",
831
- cred_number=cred_number,
832
- creds=creds,
833
- email=email,
834
- extra_fields=None,
835
- include_client_creds=True
836
- )
837
- return env_lines
838
 
839
 
840
  async def export_all_provider_credentials(provider_name: str):
841
  """
842
  Export all credentials for a specific provider to individual .env files.
 
843
  """
844
- provider_config = {
845
- "gemini_cli": ("GEMINI_CLI", _build_gemini_cli_env_lines),
846
- "qwen_code": ("QWEN_CODE", _build_qwen_code_env_lines),
847
- "iflow": ("IFLOW", _build_iflow_env_lines),
848
- "antigravity": ("ANTIGRAVITY", _build_antigravity_env_lines),
849
- }
850
-
851
- if provider_name not in provider_config:
852
  console.print(f"[bold red]Unknown provider: {provider_name}[/bold red]")
853
  return
854
-
855
- prefix, build_func = provider_config[provider_name]
856
- display_name = prefix.replace("_", " ").title()
857
-
858
- console.print(Panel(f"[bold cyan]Export All {display_name} Credentials[/bold cyan]", expand=False))
859
-
860
- # Find all credentials for this provider
861
- cred_files = sorted(list(OAUTH_BASE_DIR.glob(f"{provider_name}_oauth_*.json")))
862
-
863
- if not cred_files:
864
- console.print(Panel(f"No {display_name} credentials found.", style="bold red", title="No Credentials"))
 
 
 
 
 
 
 
 
 
 
865
  return
866
-
867
  exported_count = 0
868
- for cred_file in cred_files:
869
  try:
870
- with open(cred_file, 'r') as f:
871
- creds = json.load(f)
872
-
873
- email = creds.get("_proxy_metadata", {}).get("email", "unknown")
874
- cred_number = _get_credential_number_from_filename(cred_file.name)
875
-
876
- # Generate .env file name
877
- safe_email = email.replace("@", "_at_").replace(".", "_")
878
- env_filename = f"{provider_name}_{cred_number}_{safe_email}.env"
879
- env_filepath = OAUTH_BASE_DIR / env_filename
880
-
881
- # Build and write .env content
882
- env_lines = build_func(creds, cred_number)
883
- with open(env_filepath, 'w') as f:
884
- f.write('\n'.join(env_lines))
885
-
886
- console.print(f" ✓ Exported [cyan]{cred_file.name}[/cyan] → [yellow]{env_filename}[/yellow]")
887
- exported_count += 1
888
-
889
  except Exception as e:
890
- console.print(f" ✗ Failed to export {cred_file.name}: {e}")
891
-
892
- console.print(Panel(
893
- f"Successfully exported {exported_count}/{len(cred_files)} {display_name} credentials to individual .env files.",
894
- style="bold green", title="Export Complete"
895
- ))
 
 
 
 
 
896
 
897
 
898
  async def combine_provider_credentials(provider_name: str):
899
  """
900
  Combine all credentials for a specific provider into a single .env file.
 
901
  """
902
- provider_config = {
903
- "gemini_cli": ("GEMINI_CLI", _build_gemini_cli_env_lines),
904
- "qwen_code": ("QWEN_CODE", _build_qwen_code_env_lines),
905
- "iflow": ("IFLOW", _build_iflow_env_lines),
906
- "antigravity": ("ANTIGRAVITY", _build_antigravity_env_lines),
907
- }
908
-
909
- if provider_name not in provider_config:
910
  console.print(f"[bold red]Unknown provider: {provider_name}[/bold red]")
911
  return
912
-
913
- prefix, build_func = provider_config[provider_name]
914
- display_name = prefix.replace("_", " ").title()
915
-
916
- console.print(Panel(f"[bold cyan]Combine All {display_name} Credentials[/bold cyan]", expand=False))
917
-
918
- # Find all credentials for this provider
919
- cred_files = sorted(list(OAUTH_BASE_DIR.glob(f"{provider_name}_oauth_*.json")))
920
-
921
- if not cred_files:
922
- console.print(Panel(f"No {display_name} credentials found.", style="bold red", title="No Credentials"))
 
 
 
 
 
 
 
 
 
 
923
  return
924
-
925
  combined_lines = [
926
  f"# Combined {display_name} Credentials",
927
  f"# Generated at: {time.strftime('%Y-%m-%d %H:%M:%S')}",
928
- f"# Total credentials: {len(cred_files)}",
929
  "#",
930
  "# Copy all lines below into your main .env file",
931
  "",
932
  ]
933
-
934
  combined_count = 0
935
- for cred_file in cred_files:
936
  try:
937
- with open(cred_file, 'r') as f:
 
938
  creds = json.load(f)
939
-
940
- cred_number = _get_credential_number_from_filename(cred_file.name)
941
- env_lines = build_func(creds, cred_number)
942
-
943
  combined_lines.extend(env_lines)
944
  combined_lines.append("") # Blank line between credentials
945
  combined_count += 1
946
-
947
  except Exception as e:
948
- console.print(f" ✗ Failed to process {cred_file.name}: {e}")
949
-
 
 
950
  # Write combined file
951
  combined_filename = f"{provider_name}_all_combined.env"
952
  combined_filepath = OAUTH_BASE_DIR / combined_filename
953
-
954
- with open(combined_filepath, 'w') as f:
955
- f.write('\n'.join(combined_lines))
956
-
957
- console.print(Panel(
958
- Text.from_markup(
959
- f"Successfully combined {combined_count} {display_name} credentials into:\n"
960
- f"[bold yellow]{combined_filepath}[/bold yellow]\n\n"
961
- f"[bold]To use:[/bold] Copy the contents into your main .env file."
962
- ),
963
- style="bold green", title="Combine Complete"
964
- ))
 
 
 
965
 
966
 
967
  async def combine_all_credentials():
968
  """
969
  Combine ALL credentials from ALL providers into a single .env file.
 
970
  """
971
- console.print(Panel("[bold cyan]Combine All Provider Credentials[/bold cyan]", expand=False))
972
-
973
- provider_config = {
974
- "gemini_cli": ("GEMINI_CLI", _build_gemini_cli_env_lines),
975
- "qwen_code": ("QWEN_CODE", _build_qwen_code_env_lines),
976
- "iflow": ("IFLOW", _build_iflow_env_lines),
977
- "antigravity": ("ANTIGRAVITY", _build_antigravity_env_lines),
978
- }
979
-
980
  combined_lines = [
981
  "# Combined All Provider Credentials",
982
  f"# Generated at: {time.strftime('%Y-%m-%d %H:%M:%S')}",
@@ -984,63 +918,83 @@ async def combine_all_credentials():
984
  "# Copy all lines below into your main .env file",
985
  "",
986
  ]
987
-
988
  total_count = 0
989
  provider_counts = {}
990
-
991
- for provider_name, (prefix, build_func) in provider_config.items():
992
- cred_files = sorted(list(OAUTH_BASE_DIR.glob(f"{provider_name}_oauth_*.json")))
993
-
994
- if not cred_files:
 
 
 
 
 
 
995
  continue
996
-
997
- display_name = prefix.replace("_", " ").title()
998
  combined_lines.append(f"# ===== {display_name} Credentials =====")
999
  combined_lines.append("")
1000
-
1001
  provider_count = 0
1002
- for cred_file in cred_files:
1003
  try:
1004
- with open(cred_file, 'r') as f:
 
1005
  creds = json.load(f)
1006
-
1007
- cred_number = _get_credential_number_from_filename(cred_file.name)
1008
- env_lines = build_func(creds, cred_number)
1009
-
1010
  combined_lines.extend(env_lines)
1011
  combined_lines.append("")
1012
  provider_count += 1
1013
  total_count += 1
1014
-
1015
  except Exception as e:
1016
- console.print(f" ✗ Failed to process {cred_file.name}: {e}")
1017
-
 
 
1018
  provider_counts[display_name] = provider_count
1019
-
1020
  if total_count == 0:
1021
- console.print(Panel("No credentials found to combine.", style="bold red", title="No Credentials"))
 
 
 
 
 
 
1022
  return
1023
-
1024
  # Write combined file
1025
  combined_filename = "all_providers_combined.env"
1026
  combined_filepath = OAUTH_BASE_DIR / combined_filename
1027
-
1028
- with open(combined_filepath, 'w') as f:
1029
- f.write('\n'.join(combined_lines))
1030
-
1031
  # Build summary
1032
- summary_lines = [f" • {name}: {count} credential(s)" for name, count in provider_counts.items()]
 
 
1033
  summary = "\n".join(summary_lines)
1034
-
1035
- console.print(Panel(
1036
- Text.from_markup(
1037
- f"Successfully combined {total_count} credentials from {len(provider_counts)} providers:\n"
1038
- f"{summary}\n\n"
1039
- f"[bold]Output file:[/bold] [yellow]{combined_filepath}[/yellow]\n\n"
1040
- f"[bold]To use:[/bold] Copy the contents into your main .env file."
1041
- ),
1042
- style="bold green", title="Combine Complete"
1043
- ))
 
 
 
1044
 
1045
 
1046
  async def export_credentials_submenu():
@@ -1049,40 +1003,65 @@ async def export_credentials_submenu():
1049
  """
1050
  while True:
1051
  clear_screen()
1052
- console.print(Panel("[bold cyan]Export Credentials to .env[/bold cyan]", title="--- API Key Proxy ---", expand=False))
1053
-
1054
- console.print(Panel(
1055
- Text.from_markup(
1056
- "[bold]Individual Exports:[/bold]\n"
1057
- "1. Export Gemini CLI credential\n"
1058
- "2. Export Qwen Code credential\n"
1059
- "3. Export iFlow credential\n"
1060
- "4. Export Antigravity credential\n"
1061
- "\n"
1062
- "[bold]Bulk Exports (per provider):[/bold]\n"
1063
- "5. Export ALL Gemini CLI credentials\n"
1064
- "6. Export ALL Qwen Code credentials\n"
1065
- "7. Export ALL iFlow credentials\n"
1066
- "8. Export ALL Antigravity credentials\n"
1067
- "\n"
1068
- "[bold]Combine Credentials:[/bold]\n"
1069
- "9. Combine all Gemini CLI into one file\n"
1070
- "10. Combine all Qwen Code into one file\n"
1071
- "11. Combine all iFlow into one file\n"
1072
- "12. Combine all Antigravity into one file\n"
1073
- "13. Combine ALL providers into one file"
1074
- ),
1075
- title="Choose export option",
1076
- style="bold blue"
1077
- ))
 
 
 
 
 
 
 
 
1078
 
1079
  export_choice = Prompt.ask(
1080
- Text.from_markup("[bold]Please select an option or type [red]'b'[/red] to go back[/bold]"),
1081
- choices=["1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "b"],
1082
- show_choices=False
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1083
  )
1084
 
1085
- if export_choice.lower() == 'b':
1086
  break
1087
 
1088
  # Individual exports
@@ -1146,39 +1125,53 @@ async def export_credentials_submenu():
1146
  async def main(clear_on_start=True):
1147
  """
1148
  An interactive CLI tool to add new credentials.
1149
-
1150
  Args:
1151
- clear_on_start: If False, skip initial screen clear (used when called from launcher
1152
  to preserve the loading screen)
1153
  """
1154
  ensure_env_defaults()
1155
-
1156
  # Only show header if we're clearing (standalone mode)
1157
  if clear_on_start:
1158
- console.print(Panel("[bold cyan]Interactive Credential Setup[/bold cyan]", title="--- API Key Proxy ---", expand=False))
1159
-
 
 
 
 
 
 
1160
  while True:
1161
  # Clear screen between menu selections for cleaner UX
1162
  clear_screen()
1163
- console.print(Panel("[bold cyan]Interactive Credential Setup[/bold cyan]", title="--- API Key Proxy ---", expand=False))
1164
-
1165
- console.print(Panel(
1166
- Text.from_markup(
1167
- "1. Add OAuth Credential\n"
1168
- "2. Add API Key\n"
1169
- "3. Export Credentials"
1170
- ),
1171
- title="Choose credential type",
1172
- style="bold blue"
1173
- ))
 
 
 
 
 
 
1174
 
1175
  setup_type = Prompt.ask(
1176
- Text.from_markup("[bold]Please select an option or type [red]'q'[/red] to quit[/bold]"),
 
 
1177
  choices=["1", "2", "3", "q"],
1178
- show_choices=False
1179
  )
1180
 
1181
- if setup_type.lower() == 'q':
1182
  break
1183
 
1184
  if setup_type == "1":
@@ -1190,69 +1183,88 @@ async def main(clear_on_start=True):
1190
  "iflow": "iFlow (OAuth - also supports API keys)",
1191
  "antigravity": "Antigravity (OAuth)",
1192
  }
1193
-
1194
  provider_text = Text()
1195
  for i, provider in enumerate(available_providers):
1196
- display_name = oauth_friendly_names.get(provider, provider.replace('_', ' ').title())
 
 
1197
  provider_text.append(f" {i + 1}. {display_name}\n")
1198
-
1199
- console.print(Panel(provider_text, title="Available Providers for OAuth", style="bold blue"))
 
 
 
 
 
 
1200
 
1201
  choice = Prompt.ask(
1202
- Text.from_markup("[bold]Please select a provider or type [red]'b'[/red] to go back[/bold]"),
 
 
1203
  choices=[str(i + 1) for i in range(len(available_providers))] + ["b"],
1204
- show_choices=False
1205
  )
1206
 
1207
- if choice.lower() == 'b':
1208
  continue
1209
-
1210
  try:
1211
  choice_index = int(choice) - 1
1212
  if 0 <= choice_index < len(available_providers):
1213
  provider_name = available_providers[choice_index]
1214
- display_name = oauth_friendly_names.get(provider_name, provider_name.replace('_', ' ').title())
1215
- console.print(f"\nStarting OAuth setup for [bold cyan]{display_name}[/bold cyan]...")
 
 
 
 
1216
  await setup_new_credential(provider_name)
1217
  # Don't clear after OAuth - user needs to see full flow
1218
  console.print("\n[dim]Press Enter to return to main menu...[/dim]")
1219
  input()
1220
  else:
1221
- console.print("[bold red]Invalid choice. Please try again.[/bold red]")
 
 
1222
  await asyncio.sleep(1.5)
1223
  except ValueError:
1224
- console.print("[bold red]Invalid input. Please enter a number or 'b'.[/bold red]")
 
 
1225
  await asyncio.sleep(1.5)
1226
 
1227
  elif setup_type == "2":
1228
  await setup_api_key()
1229
- #console.print("\n[dim]Press Enter to return to main menu...[/dim]")
1230
- #input()
1231
 
1232
  elif setup_type == "3":
1233
  await export_credentials_submenu()
1234
 
 
1235
  def run_credential_tool(from_launcher=False):
1236
  """
1237
  Entry point for credential tool.
1238
-
1239
  Args:
1240
  from_launcher: If True, skip loading screen (launcher already showed it)
1241
  """
1242
  # Check if we need to show loading screen
1243
  if not from_launcher:
1244
  # Standalone mode - show full loading UI
1245
- os.system('cls' if os.name == 'nt' else 'clear')
1246
-
1247
  _start_time = time.time()
1248
-
1249
  # Phase 1: Show initial message
1250
  print("━" * 70)
1251
  print("Interactive Credential Setup Tool")
1252
  print("GitHub: https://github.com/Mirrowel/LLM-API-Key-Proxy")
1253
  print("━" * 70)
1254
  print("Loading credential management components...")
1255
-
1256
  # Phase 2: Load dependencies with spinner
1257
  with console.status("Loading authentication providers...", spinner="dots"):
1258
  _ensure_providers_loaded()
@@ -1261,14 +1273,16 @@ def run_credential_tool(from_launcher=False):
1261
  with console.status("Initializing credential tool...", spinner="dots"):
1262
  time.sleep(0.2) # Brief pause for UI consistency
1263
  console.print("✓ Credential tool initialized")
1264
-
1265
  _elapsed = time.time() - _start_time
1266
  _, PROVIDER_PLUGINS = _ensure_providers_loaded()
1267
- print(f"✓ Tool ready in {_elapsed:.2f}s ({len(PROVIDER_PLUGINS)} providers available)")
1268
-
 
 
1269
  # Small delay to let user see the ready message
1270
  time.sleep(0.5)
1271
-
1272
  # Run the main async event loop
1273
  # If from launcher, don't clear screen at start to preserve loading messages
1274
  try:
 
3
  import asyncio
4
  import json
5
  import os
 
6
  import time
7
  from pathlib import Path
8
  from dotenv import set_key, get_key
9
 
10
+ # NOTE: Heavy imports (provider_factory, PROVIDER_PLUGINS) are deferred
11
  # to avoid 6-7 second delay before showing loading screen
12
  from rich.console import Console
13
  from rich.panel import Panel
 
25
  _provider_factory = None
26
  _provider_plugins = None
27
 
28
+
29
  def _ensure_providers_loaded():
30
  """Lazy load provider modules only when needed"""
31
  global _provider_factory, _provider_plugins
32
  if _provider_factory is None:
33
  from . import provider_factory as pf
34
  from .providers import PROVIDER_PLUGINS as pp
35
+
36
  _provider_factory = pf
37
  _provider_plugins = pp
38
  return _provider_factory, _provider_plugins
 
40
 
41
  def clear_screen():
42
  """
43
+ Cross-platform terminal clear that works robustly on both
44
  classic Windows conhost and modern terminals (Windows Terminal, Linux, Mac).
45
+
46
  Uses native OS commands instead of ANSI escape sequences:
47
  - Windows (conhost & Windows Terminal): cls
48
  - Unix-like systems (Linux, Mac): clear
49
  """
50
+ os.system("cls" if os.name == "nt" else "clear")
51
 
52
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
53
  def ensure_env_defaults():
54
  """
55
  Ensures the .env file exists and contains essential default values like PROXY_API_KEY.
56
  """
57
  if not ENV_FILE.is_file():
58
  ENV_FILE.touch()
59
+ console.print(
60
+ f"Creating a new [bold yellow]{ENV_FILE.name}[/bold yellow] file..."
61
+ )
62
 
63
  # Check for PROXY_API_KEY, similar to setup_env.bat
64
  if get_key(str(ENV_FILE), "PROXY_API_KEY") is None:
65
  default_key = "VerysecretKey"
66
+ console.print(
67
+ f"Adding default [bold cyan]PROXY_API_KEY[/bold cyan] to [bold yellow]{ENV_FILE.name}[/bold yellow]..."
68
+ )
69
  set_key(str(ENV_FILE), "PROXY_API_KEY", default_key)
70
 
71
+
72
  async def setup_api_key():
73
  """
74
  Interactively sets up a new API key for a provider.
 
80
 
81
  # Verified list of LiteLLM providers with their friendly names and API key variables
82
  LITELLM_PROVIDERS = {
83
+ "OpenAI": "OPENAI_API_KEY",
84
+ "Anthropic": "ANTHROPIC_API_KEY",
85
+ "Google AI Studio (Gemini)": "GEMINI_API_KEY",
86
+ "Azure OpenAI": "AZURE_API_KEY",
87
+ "Vertex AI": "GOOGLE_API_KEY",
88
+ "AWS Bedrock": "AWS_ACCESS_KEY_ID",
89
+ "Cohere": "COHERE_API_KEY",
90
+ "Chutes": "CHUTES_API_KEY",
91
  "Mistral AI": "MISTRAL_API_KEY",
92
+ "Codestral (Mistral)": "CODESTRAL_API_KEY",
93
+ "Groq": "GROQ_API_KEY",
94
+ "Perplexity": "PERPLEXITYAI_API_KEY",
95
+ "xAI": "XAI_API_KEY",
96
+ "Together AI": "TOGETHERAI_API_KEY",
97
+ "Fireworks AI": "FIREWORKS_AI_API_KEY",
98
+ "Replicate": "REPLICATE_API_KEY",
99
+ "Hugging Face": "HUGGINGFACE_API_KEY",
100
+ "Anyscale": "ANYSCALE_API_KEY",
101
+ "NVIDIA NIM": "NVIDIA_NIM_API_KEY",
102
+ "Deepseek": "DEEPSEEK_API_KEY",
103
+ "AI21": "AI21_API_KEY",
104
+ "Cerebras": "CEREBRAS_API_KEY",
105
+ "Moonshot": "MOONSHOT_API_KEY",
106
+ "Ollama": "OLLAMA_API_KEY",
107
+ "Xinference": "XINFERENCE_API_KEY",
108
+ "Infinity": "INFINITY_API_KEY",
109
+ "OpenRouter": "OPENROUTER_API_KEY",
110
+ "Deepinfra": "DEEPINFRA_API_KEY",
111
+ "Cloudflare": "CLOUDFLARE_API_KEY",
112
+ "Baseten": "BASETEN_API_KEY",
113
+ "Modal": "MODAL_API_KEY",
114
+ "Databricks": "DATABRICKS_API_KEY",
115
+ "AWS SageMaker": "AWS_ACCESS_KEY_ID",
116
+ "IBM watsonx.ai": "WATSONX_APIKEY",
117
+ "Predibase": "PREDIBASE_API_KEY",
118
+ "Clarifai": "CLARIFAI_API_KEY",
119
+ "NLP Cloud": "NLP_CLOUD_API_KEY",
120
+ "Voyage AI": "VOYAGE_API_KEY",
121
+ "Jina AI": "JINA_API_KEY",
122
+ "Hyperbolic": "HYPERBOLIC_API_KEY",
123
+ "Morph": "MORPH_API_KEY",
124
+ "Lambda AI": "LAMBDA_API_KEY",
125
+ "Novita AI": "NOVITA_API_KEY",
126
+ "Aleph Alpha": "ALEPH_ALPHA_API_KEY",
127
+ "SambaNova": "SAMBANOVA_API_KEY",
128
+ "FriendliAI": "FRIENDLI_TOKEN",
129
+ "Galadriel": "GALADRIEL_API_KEY",
130
+ "CompactifAI": "COMPACTIFAI_API_KEY",
131
+ "Lemonade": "LEMONADE_API_KEY",
132
+ "GradientAI": "GRADIENTAI_API_KEY",
133
+ "Featherless AI": "FEATHERLESS_AI_API_KEY",
134
+ "Nebius AI Studio": "NEBIUS_API_KEY",
135
+ "Dashscope (Qwen)": "DASHSCOPE_API_KEY",
136
+ "Bytez": "BYTEZ_API_KEY",
137
+ "Oracle OCI": "OCI_API_KEY",
138
+ "DataRobot": "DATAROBOT_API_KEY",
139
+ "OVHCloud": "OVHCLOUD_API_KEY",
140
+ "Volcengine": "VOLCENGINE_API_KEY",
141
+ "Snowflake": "SNOWFLAKE_API_KEY",
142
+ "Nscale": "NSCALE_API_KEY",
143
+ "Recraft": "RECRAFT_API_KEY",
144
+ "v0": "V0_API_KEY",
145
+ "Vercel": "VERCEL_AI_GATEWAY_API_KEY",
146
+ "Topaz": "TOPAZ_API_KEY",
147
+ "ElevenLabs": "ELEVENLABS_API_KEY",
148
  "Deepgram": "DEEPGRAM_API_KEY",
149
+ "GitHub Models": "GITHUB_TOKEN",
150
+ "GitHub Copilot": "GITHUB_COPILOT_API_KEY",
151
  }
152
 
153
  # Discover custom providers and add them to the list
 
155
  # qwen_code API key support is a fallback
156
  # iflow API key support is a feature
157
  _, PROVIDER_PLUGINS = _ensure_providers_loaded()
158
+
159
  # Build a set of environment variables already in LITELLM_PROVIDERS
160
  # to avoid duplicates based on the actual API key names
161
  litellm_env_vars = set(LITELLM_PROVIDERS.values())
162
+
163
  # Providers to exclude from API key list
164
  exclude_providers = {
165
+ "gemini_cli", # OAuth-only
166
+ "antigravity", # OAuth-only
167
+ "qwen_code", # API key is fallback, OAuth is primary - don't advertise
168
+ "openai_compatible", # Base class, not a real provider
169
  }
170
+
171
  discovered_providers = {}
172
  for provider_key in PROVIDER_PLUGINS.keys():
173
  if provider_key in exclude_providers:
174
  continue
175
+
176
  # Create environment variable name
177
  env_var = provider_key.upper() + "_API_KEY"
178
+
179
  # Check if this env var already exists in LITELLM_PROVIDERS
180
  # This catches duplicates like GEMINI_API_KEY, MISTRAL_API_KEY, etc.
181
  if env_var in litellm_env_vars:
182
  # Already in LITELLM_PROVIDERS with better name, skip this one
183
  continue
184
+
185
  # Create display name for this custom provider
186
+ display_name = provider_key.replace("_", " ").title()
187
  discovered_providers[display_name] = env_var
188
+
189
  # LITELLM_PROVIDERS takes precedence (comes first in merge)
190
  combined_providers = {**LITELLM_PROVIDERS, **discovered_providers}
191
  provider_display_list = sorted(combined_providers.keys())
 
200
  else:
201
  provider_text.append(f" {i + 1}. {provider_name}\n")
202
 
203
+ console.print(
204
+ Panel(provider_text, title="Available Providers for API Key", style="bold blue")
205
+ )
206
 
207
  choice = Prompt.ask(
208
+ Text.from_markup(
209
+ "[bold]Please select a provider or type [red]'b'[/red] to go back[/bold]"
210
+ ),
211
  choices=[str(i + 1) for i in range(len(provider_display_list))] + ["b"],
212
+ show_choices=False,
213
  )
214
 
215
+ if choice.lower() == "b":
216
  return
217
 
218
  try:
 
229
  for line in f:
230
  line = line.strip()
231
  if line.startswith(api_var_base) and "=" in line:
232
+ existing_key_name, _, existing_key_value = line.partition(
233
+ "="
234
+ )
235
  if existing_key_value == api_key:
236
+ warning_text = Text.from_markup(
237
+ f"This API key already exists as [bold yellow]'{existing_key_name}'[/bold yellow]. Overwriting..."
238
+ )
239
+ console.print(
240
+ Panel(
241
+ warning_text,
242
+ style="bold yellow",
243
+ title="Updating API Key",
244
+ )
245
+ )
246
 
247
  set_key(str(ENV_FILE), existing_key_name, api_key)
248
 
249
+ success_text = Text.from_markup(
250
+ f"Successfully updated existing key [bold yellow]'{existing_key_name}'[/bold yellow]."
251
+ )
252
+ console.print(
253
+ Panel(
254
+ success_text,
255
+ style="bold green",
256
+ title="Success",
257
+ )
258
+ )
259
  return
260
 
261
  # Special handling for AWS
262
  if display_name in ["AWS Bedrock", "AWS SageMaker"]:
263
+ console.print(
264
+ Panel(
265
+ Text.from_markup(
266
+ "This provider requires both an Access Key ID and a Secret Access Key.\n"
267
+ f"The key you entered will be saved as [bold yellow]{api_var_base}_1[/bold yellow].\n"
268
+ "Please manually add the [bold cyan]AWS_SECRET_ACCESS_KEY_1[/bold cyan] to your .env file."
269
+ ),
270
+ title="[bold yellow]Additional Step Required[/bold yellow]",
271
+ border_style="yellow",
272
+ )
273
+ )
274
 
275
  key_index = 1
276
  while True:
277
  key_name = f"{api_var_base}_{key_index}"
278
  if ENV_FILE.is_file():
279
+ with open(ENV_FILE, "r") as f:
280
  if not any(line.startswith(f"{key_name}=") for line in f):
281
  break
282
  else:
283
  break
284
  key_index += 1
285
+
286
  key_name = f"{api_var_base}_{key_index}"
287
  set_key(str(ENV_FILE), key_name, api_key)
288
+
289
+ success_text = Text.from_markup(
290
+ f"Successfully added {display_name} API key as [bold yellow]'{key_name}'[/bold yellow]."
291
+ )
292
  console.print(Panel(success_text, style="bold green", title="Success"))
293
 
294
  else:
295
  console.print("[bold red]Invalid choice. Please try again.[/bold red]")
296
  except ValueError:
297
+ console.print(
298
+ "[bold red]Invalid input. Please enter a number or 'b'.[/bold red]"
299
+ )
300
+
301
 
302
  async def setup_new_credential(provider_name: str):
303
  """
304
  Interactively sets up a new OAuth credential for a given provider.
305
+
306
+ Delegates all credential management logic to the auth class's setup_credential() method.
307
  """
308
  try:
309
  provider_factory, _ = _ensure_providers_loaded()
 
315
  "gemini_cli": "Gemini CLI (OAuth)",
316
  "qwen_code": "Qwen Code (OAuth - also supports API keys)",
317
  "iflow": "iFlow (OAuth - also supports API keys)",
318
+ "antigravity": "Antigravity (OAuth)",
319
  }
320
+ display_name = oauth_friendly_names.get(
321
+ provider_name, provider_name.replace("_", " ").title()
322
+ )
 
 
 
 
 
 
 
 
 
 
323
 
324
+ # Call the auth class's setup_credential() method which handles the entire flow:
325
+ # - OAuth authentication
326
+ # - Email extraction for deduplication
327
+ # - File path determination (new or existing)
328
+ # - Credential file saving
329
+ # - Post-auth discovery (tier/project for Google OAuth providers)
330
+ result = await auth_instance.setup_credential(OAUTH_BASE_DIR)
331
+
332
+ if not result.success:
333
+ console.print(
334
+ Panel(
335
+ f"Credential setup failed: {result.error}",
336
+ style="bold red",
337
+ title="Error",
338
+ )
339
+ )
340
  return
341
 
342
+ # Display success message with details
343
+ if result.is_update:
344
+ success_text = Text.from_markup(
345
+ f"Successfully updated credential at [bold yellow]'{Path(result.file_path).name}'[/bold yellow] "
346
+ f"for user [bold cyan]'{result.email}'[/bold cyan]."
347
+ )
348
+ else:
349
+ success_text = Text.from_markup(
350
+ f"Successfully created new credential at [bold yellow]'{Path(result.file_path).name}'[/bold yellow] "
351
+ f"for user [bold cyan]'{result.email}'[/bold cyan]."
352
+ )
353
 
354
+ # Add tier/project info if available (Google OAuth providers)
355
+ if hasattr(result, "tier") and result.tier:
356
+ success_text.append(f"\nTier: {result.tier}")
357
+ if hasattr(result, "project_id") and result.project_id:
358
+ success_text.append(f"\nProject: {result.project_id}")
359
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
360
  console.print(Panel(success_text, style="bold green", title="Success"))
361
 
362
  except Exception as e:
363
+ console.print(
364
+ Panel(
365
+ f"An error occurred during setup for {provider_name}: {e}",
366
+ style="bold red",
367
+ title="Error",
368
+ )
369
+ )
370
 
371
 
372
  async def export_gemini_cli_to_env():
373
  """
374
  Export a Gemini CLI credential JSON file to .env format.
375
+ Uses the auth class's build_env_lines() and list_credentials() methods.
376
  """
377
+ console.print(
378
+ Panel(
379
+ "[bold cyan]Export Gemini CLI Credential to .env[/bold cyan]", expand=False
380
+ )
381
+ )
382
+
383
+ # Get auth instance for this provider
384
+ provider_factory, _ = _ensure_providers_loaded()
385
+ auth_class = provider_factory.get_provider_auth_class("gemini_cli")
386
+ auth_instance = auth_class()
387
 
388
+ # List available credentials using auth class
389
+ credentials = auth_instance.list_credentials(OAUTH_BASE_DIR)
390
 
391
+ if not credentials:
392
+ console.print(
393
+ Panel(
394
+ "No Gemini CLI credentials found. Please add one first using 'Add OAuth Credential'.",
395
+ style="bold red",
396
+ title="No Credentials",
397
+ )
398
+ )
399
  return
400
 
401
  # Display available credentials
402
  cred_text = Text()
403
+ for i, cred_info in enumerate(credentials):
404
+ cred_text.append(
405
+ f" {i + 1}. {Path(cred_info['file_path']).name} ({cred_info['email']})\n"
406
+ )
 
 
 
 
407
 
408
+ console.print(
409
+ Panel(cred_text, title="Available Gemini CLI Credentials", style="bold blue")
410
+ )
411
 
412
  choice = Prompt.ask(
413
+ Text.from_markup(
414
+ "[bold]Please select a credential to export or type [red]'b'[/red] to go back[/bold]"
415
+ ),
416
+ choices=[str(i + 1) for i in range(len(credentials))] + ["b"],
417
+ show_choices=False,
418
  )
419
 
420
+ if choice.lower() == "b":
421
  return
422
 
423
  try:
424
  choice_index = int(choice) - 1
425
+ if 0 <= choice_index < len(credentials):
426
+ cred_info = credentials[choice_index]
 
 
 
 
427
 
428
+ # Use auth class to export
429
+ env_path = auth_instance.export_credential_to_env(
430
+ cred_info["file_path"], OAUTH_BASE_DIR
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
431
  )
432
 
433
+ if env_path:
434
+ numbered_prefix = f"GEMINI_CLI_{cred_info['number']}"
435
+ success_text = Text.from_markup(
436
+ f"Successfully exported credential to [bold yellow]'{Path(env_path).name}'[/bold yellow]\n\n"
437
+ f"[bold]Environment variable prefix:[/bold] [cyan]{numbered_prefix}_*[/cyan]\n\n"
438
+ f"[bold]To use this credential:[/bold]\n"
439
+ f"1. Copy the contents to your main .env file, OR\n"
440
+ f"2. Source it: [bold cyan]source {Path(env_path).name}[/bold cyan] (Linux/Mac)\n"
441
+ f"3. Or on Windows: [bold cyan]Get-Content {Path(env_path).name} | ForEach-Object {{ $_ -replace '^([^#].*)$', 'set $1' }} | cmd[/bold cyan]\n\n"
442
+ f"[bold]To combine multiple credentials:[/bold]\n"
443
+ f"Copy lines from multiple .env files into one file.\n"
444
+ f"Each credential uses a unique number ({numbered_prefix}_*)."
445
+ )
446
+ console.print(Panel(success_text, style="bold green", title="Success"))
447
+ else:
448
+ console.print(
449
+ Panel(
450
+ "Failed to export credential", style="bold red", title="Error"
451
+ )
452
+ )
453
  else:
454
  console.print("[bold red]Invalid choice. Please try again.[/bold red]")
455
  except ValueError:
456
+ console.print(
457
+ "[bold red]Invalid input. Please enter a number or 'b'.[/bold red]"
458
+ )
459
  except Exception as e:
460
+ console.print(
461
+ Panel(
462
+ f"An error occurred during export: {e}", style="bold red", title="Error"
463
+ )
464
+ )
465
 
466
 
467
  async def export_qwen_code_to_env():
468
  """
469
  Export a Qwen Code credential JSON file to .env format.
470
+ Uses the auth class's build_env_lines() and list_credentials() methods.
471
  """
472
+ console.print(
473
+ Panel(
474
+ "[bold cyan]Export Qwen Code Credential to .env[/bold cyan]", expand=False
475
+ )
476
+ )
477
 
478
+ # Get auth instance for this provider
479
+ provider_factory, _ = _ensure_providers_loaded()
480
+ auth_class = provider_factory.get_provider_auth_class("qwen_code")
481
+ auth_instance = auth_class()
482
 
483
+ # List available credentials using auth class
484
+ credentials = auth_instance.list_credentials(OAUTH_BASE_DIR)
485
+
486
+ if not credentials:
487
+ console.print(
488
+ Panel(
489
+ "No Qwen Code credentials found. Please add one first using 'Add OAuth Credential'.",
490
+ style="bold red",
491
+ title="No Credentials",
492
+ )
493
+ )
494
  return
495
 
496
  # Display available credentials
497
  cred_text = Text()
498
+ for i, cred_info in enumerate(credentials):
499
+ cred_text.append(
500
+ f" {i + 1}. {Path(cred_info['file_path']).name} ({cred_info['email']})\n"
501
+ )
 
 
 
 
502
 
503
+ console.print(
504
+ Panel(cred_text, title="Available Qwen Code Credentials", style="bold blue")
505
+ )
506
 
507
  choice = Prompt.ask(
508
+ Text.from_markup(
509
+ "[bold]Please select a credential to export or type [red]'b'[/red] to go back[/bold]"
510
+ ),
511
+ choices=[str(i + 1) for i in range(len(credentials))] + ["b"],
512
+ show_choices=False,
513
  )
514
 
515
+ if choice.lower() == "b":
516
  return
517
 
518
  try:
519
  choice_index = int(choice) - 1
520
+ if 0 <= choice_index < len(credentials):
521
+ cred_info = credentials[choice_index]
 
 
 
 
522
 
523
+ # Use auth class to export
524
+ env_path = auth_instance.export_credential_to_env(
525
+ cred_info["file_path"], OAUTH_BASE_DIR
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
526
  )
527
+
528
+ if env_path:
529
+ numbered_prefix = f"QWEN_CODE_{cred_info['number']}"
530
+ success_text = Text.from_markup(
531
+ f"Successfully exported credential to [bold yellow]'{Path(env_path).name}'[/bold yellow]\n\n"
532
+ f"[bold]Environment variable prefix:[/bold] [cyan]{numbered_prefix}_*[/cyan]\n\n"
533
+ f"[bold]To use this credential:[/bold]\n"
534
+ f"1. Copy the contents to your main .env file, OR\n"
535
+ f"2. Source it: [bold cyan]source {Path(env_path).name}[/bold cyan] (Linux/Mac)\n\n"
536
+ f"[bold]To combine multiple credentials:[/bold]\n"
537
+ f"Copy lines from multiple .env files into one file.\n"
538
+ f"Each credential uses a unique number ({numbered_prefix}_*)."
539
+ )
540
+ console.print(Panel(success_text, style="bold green", title="Success"))
541
+ else:
542
+ console.print(
543
+ Panel(
544
+ "Failed to export credential", style="bold red", title="Error"
545
+ )
546
+ )
547
  else:
548
  console.print("[bold red]Invalid choice. Please try again.[/bold red]")
549
  except ValueError:
550
+ console.print(
551
+ "[bold red]Invalid input. Please enter a number or 'b'.[/bold red]"
552
+ )
553
  except Exception as e:
554
+ console.print(
555
+ Panel(
556
+ f"An error occurred during export: {e}", style="bold red", title="Error"
557
+ )
558
+ )
559
 
560
 
561
  async def export_iflow_to_env():
562
  """
563
  Export an iFlow credential JSON file to .env format.
564
+ Uses the auth class's build_env_lines() and list_credentials() methods.
565
  """
566
+ console.print(
567
+ Panel("[bold cyan]Export iFlow Credential to .env[/bold cyan]", expand=False)
568
+ )
569
+
570
+ # Get auth instance for this provider
571
+ provider_factory, _ = _ensure_providers_loaded()
572
+ auth_class = provider_factory.get_provider_auth_class("iflow")
573
+ auth_instance = auth_class()
574
 
575
+ # List available credentials using auth class
576
+ credentials = auth_instance.list_credentials(OAUTH_BASE_DIR)
577
 
578
+ if not credentials:
579
+ console.print(
580
+ Panel(
581
+ "No iFlow credentials found. Please add one first using 'Add OAuth Credential'.",
582
+ style="bold red",
583
+ title="No Credentials",
584
+ )
585
+ )
586
  return
587
 
588
  # Display available credentials
589
  cred_text = Text()
590
+ for i, cred_info in enumerate(credentials):
591
+ cred_text.append(
592
+ f" {i + 1}. {Path(cred_info['file_path']).name} ({cred_info['email']})\n"
593
+ )
 
 
 
 
594
 
595
+ console.print(
596
+ Panel(cred_text, title="Available iFlow Credentials", style="bold blue")
597
+ )
598
 
599
  choice = Prompt.ask(
600
+ Text.from_markup(
601
+ "[bold]Please select a credential to export or type [red]'b'[/red] to go back[/bold]"
602
+ ),
603
+ choices=[str(i + 1) for i in range(len(credentials))] + ["b"],
604
+ show_choices=False,
605
  )
606
 
607
+ if choice.lower() == "b":
608
  return
609
 
610
  try:
611
  choice_index = int(choice) - 1
612
+ if 0 <= choice_index < len(credentials):
613
+ cred_info = credentials[choice_index]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
614
 
615
+ # Use auth class to export
616
+ env_path = auth_instance.export_credential_to_env(
617
+ cred_info["file_path"], OAUTH_BASE_DIR
 
 
 
 
 
 
618
  )
619
+
620
+ if env_path:
621
+ numbered_prefix = f"IFLOW_{cred_info['number']}"
622
+ success_text = Text.from_markup(
623
+ f"Successfully exported credential to [bold yellow]'{Path(env_path).name}'[/bold yellow]\n\n"
624
+ f"[bold]Environment variable prefix:[/bold] [cyan]{numbered_prefix}_*[/cyan]\n\n"
625
+ f"[bold]To use this credential:[/bold]\n"
626
+ f"1. Copy the contents to your main .env file, OR\n"
627
+ f"2. Source it: [bold cyan]source {Path(env_path).name}[/bold cyan] (Linux/Mac)\n\n"
628
+ f"[bold]To combine multiple credentials:[/bold]\n"
629
+ f"Copy lines from multiple .env files into one file.\n"
630
+ f"Each credential uses a unique number ({numbered_prefix}_*)."
631
+ )
632
+ console.print(Panel(success_text, style="bold green", title="Success"))
633
+ else:
634
+ console.print(
635
+ Panel(
636
+ "Failed to export credential", style="bold red", title="Error"
637
+ )
638
+ )
639
  else:
640
  console.print("[bold red]Invalid choice. Please try again.[/bold red]")
641
  except ValueError:
642
+ console.print(
643
+ "[bold red]Invalid input. Please enter a number or 'b'.[/bold red]"
644
+ )
645
  except Exception as e:
646
+ console.print(
647
+ Panel(
648
+ f"An error occurred during export: {e}", style="bold red", title="Error"
649
+ )
650
+ )
651
 
652
 
653
  async def export_antigravity_to_env():
654
  """
655
  Export an Antigravity credential JSON file to .env format.
656
+ Uses the auth class's build_env_lines() and list_credentials() methods.
657
  """
658
+ console.print(
659
+ Panel(
660
+ "[bold cyan]Export Antigravity Credential to .env[/bold cyan]", expand=False
661
+ )
662
+ )
663
+
664
+ # Get auth instance for this provider
665
+ provider_factory, _ = _ensure_providers_loaded()
666
+ auth_class = provider_factory.get_provider_auth_class("antigravity")
667
+ auth_instance = auth_class()
668
 
669
+ # List available credentials using auth class
670
+ credentials = auth_instance.list_credentials(OAUTH_BASE_DIR)
671
 
672
+ if not credentials:
673
+ console.print(
674
+ Panel(
675
+ "No Antigravity credentials found. Please add one first using 'Add OAuth Credential'.",
676
+ style="bold red",
677
+ title="No Credentials",
678
+ )
679
+ )
680
  return
681
 
682
  # Display available credentials
683
  cred_text = Text()
684
+ for i, cred_info in enumerate(credentials):
685
+ cred_text.append(
686
+ f" {i + 1}. {Path(cred_info['file_path']).name} ({cred_info['email']})\n"
687
+ )
 
 
 
 
688
 
689
+ console.print(
690
+ Panel(cred_text, title="Available Antigravity Credentials", style="bold blue")
691
+ )
692
 
693
  choice = Prompt.ask(
694
+ Text.from_markup(
695
+ "[bold]Please select a credential to export or type [red]'b'[/red] to go back[/bold]"
696
+ ),
697
+ choices=[str(i + 1) for i in range(len(credentials))] + ["b"],
698
+ show_choices=False,
699
  )
700
 
701
+ if choice.lower() == "b":
702
  return
703
 
704
  try:
705
  choice_index = int(choice) - 1
706
+ if 0 <= choice_index < len(credentials):
707
+ cred_info = credentials[choice_index]
 
 
 
 
708
 
709
+ # Use auth class to export
710
+ env_path = auth_instance.export_credential_to_env(
711
+ cred_info["file_path"], OAUTH_BASE_DIR
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
712
  )
713
 
714
+ if env_path:
715
+ numbered_prefix = f"ANTIGRAVITY_{cred_info['number']}"
716
+ success_text = Text.from_markup(
717
+ f"Successfully exported credential to [bold yellow]'{Path(env_path).name}'[/bold yellow]\n\n"
718
+ f"[bold]Environment variable prefix:[/bold] [cyan]{numbered_prefix}_*[/cyan]\n\n"
719
+ f"[bold]To use this credential:[/bold]\n"
720
+ f"1. Copy the contents to your main .env file, OR\n"
721
+ f"2. Source it: [bold cyan]source {Path(env_path).name}[/bold cyan] (Linux/Mac)\n"
722
+ f"3. Or on Windows: [bold cyan]Get-Content {Path(env_path).name} | ForEach-Object {{ $_ -replace '^([^#].*)$', 'set $1' }} | cmd[/bold cyan]\n\n"
723
+ f"[bold]To combine multiple credentials:[/bold]\n"
724
+ f"Copy lines from multiple .env files into one file.\n"
725
+ f"Each credential uses a unique number ({numbered_prefix}_*)."
726
+ )
727
+ console.print(Panel(success_text, style="bold green", title="Success"))
728
+ else:
729
+ console.print(
730
+ Panel(
731
+ "Failed to export credential", style="bold red", title="Error"
732
+ )
733
+ )
734
  else:
735
  console.print("[bold red]Invalid choice. Please try again.[/bold red]")
736
  except ValueError:
737
+ console.print(
738
+ "[bold red]Invalid input. Please enter a number or 'b'.[/bold red]"
739
+ )
740
  except Exception as e:
741
+ console.print(
742
+ Panel(
743
+ f"An error occurred during export: {e}", style="bold red", title="Error"
744
+ )
745
+ )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
746
 
747
 
748
  async def export_all_provider_credentials(provider_name: str):
749
  """
750
  Export all credentials for a specific provider to individual .env files.
751
+ Uses the auth class's list_credentials() and export_credential_to_env() methods.
752
  """
753
+ # Get auth instance for this provider
754
+ provider_factory, _ = _ensure_providers_loaded()
755
+ try:
756
+ auth_class = provider_factory.get_provider_auth_class(provider_name)
757
+ auth_instance = auth_class()
758
+ except Exception:
 
 
759
  console.print(f"[bold red]Unknown provider: {provider_name}[/bold red]")
760
  return
761
+
762
+ display_name = provider_name.replace("_", " ").title()
763
+
764
+ console.print(
765
+ Panel(
766
+ f"[bold cyan]Export All {display_name} Credentials[/bold cyan]",
767
+ expand=False,
768
+ )
769
+ )
770
+
771
+ # List all credentials using auth class
772
+ credentials = auth_instance.list_credentials(OAUTH_BASE_DIR)
773
+
774
+ if not credentials:
775
+ console.print(
776
+ Panel(
777
+ f"No {display_name} credentials found.",
778
+ style="bold red",
779
+ title="No Credentials",
780
+ )
781
+ )
782
  return
783
+
784
  exported_count = 0
785
+ for cred_info in credentials:
786
  try:
787
+ # Use auth class to export
788
+ env_path = auth_instance.export_credential_to_env(
789
+ cred_info["file_path"], OAUTH_BASE_DIR
790
+ )
791
+
792
+ if env_path:
793
+ console.print(
794
+ f" ✓ Exported [cyan]{Path(cred_info['file_path']).name}[/cyan] → [yellow]{Path(env_path).name}[/yellow]"
795
+ )
796
+ exported_count += 1
797
+ else:
798
+ console.print(
799
+ f" ✗ Failed to export {Path(cred_info['file_path']).name}"
800
+ )
801
+
 
 
 
 
802
  except Exception as e:
803
+ console.print(
804
+ f" ✗ Failed to export {Path(cred_info['file_path']).name}: {e}"
805
+ )
806
+
807
+ console.print(
808
+ Panel(
809
+ f"Successfully exported {exported_count}/{len(credentials)} {display_name} credentials to individual .env files.",
810
+ style="bold green",
811
+ title="Export Complete",
812
+ )
813
+ )
814
 
815
 
816
  async def combine_provider_credentials(provider_name: str):
817
  """
818
  Combine all credentials for a specific provider into a single .env file.
819
+ Uses the auth class's list_credentials() and build_env_lines() methods.
820
  """
821
+ # Get auth instance for this provider
822
+ provider_factory, _ = _ensure_providers_loaded()
823
+ try:
824
+ auth_class = provider_factory.get_provider_auth_class(provider_name)
825
+ auth_instance = auth_class()
826
+ except Exception:
 
 
827
  console.print(f"[bold red]Unknown provider: {provider_name}[/bold red]")
828
  return
829
+
830
+ display_name = provider_name.replace("_", " ").title()
831
+
832
+ console.print(
833
+ Panel(
834
+ f"[bold cyan]Combine All {display_name} Credentials[/bold cyan]",
835
+ expand=False,
836
+ )
837
+ )
838
+
839
+ # List all credentials using auth class
840
+ credentials = auth_instance.list_credentials(OAUTH_BASE_DIR)
841
+
842
+ if not credentials:
843
+ console.print(
844
+ Panel(
845
+ f"No {display_name} credentials found.",
846
+ style="bold red",
847
+ title="No Credentials",
848
+ )
849
+ )
850
  return
851
+
852
  combined_lines = [
853
  f"# Combined {display_name} Credentials",
854
  f"# Generated at: {time.strftime('%Y-%m-%d %H:%M:%S')}",
855
+ f"# Total credentials: {len(credentials)}",
856
  "#",
857
  "# Copy all lines below into your main .env file",
858
  "",
859
  ]
860
+
861
  combined_count = 0
862
+ for cred_info in credentials:
863
  try:
864
+ # Load credential file
865
+ with open(cred_info["file_path"], "r") as f:
866
  creds = json.load(f)
867
+
868
+ # Use auth class to build env lines
869
+ env_lines = auth_instance.build_env_lines(creds, cred_info["number"])
870
+
871
  combined_lines.extend(env_lines)
872
  combined_lines.append("") # Blank line between credentials
873
  combined_count += 1
874
+
875
  except Exception as e:
876
+ console.print(
877
+ f" ✗ Failed to process {Path(cred_info['file_path']).name}: {e}"
878
+ )
879
+
880
  # Write combined file
881
  combined_filename = f"{provider_name}_all_combined.env"
882
  combined_filepath = OAUTH_BASE_DIR / combined_filename
883
+
884
+ with open(combined_filepath, "w") as f:
885
+ f.write("\n".join(combined_lines))
886
+
887
+ console.print(
888
+ Panel(
889
+ Text.from_markup(
890
+ f"Successfully combined {combined_count} {display_name} credentials into:\n"
891
+ f"[bold yellow]{combined_filepath}[/bold yellow]\n\n"
892
+ f"[bold]To use:[/bold] Copy the contents into your main .env file."
893
+ ),
894
+ style="bold green",
895
+ title="Combine Complete",
896
+ )
897
+ )
898
 
899
 
900
  async def combine_all_credentials():
901
  """
902
  Combine ALL credentials from ALL providers into a single .env file.
903
+ Uses auth class list_credentials() and build_env_lines() methods.
904
  """
905
+ console.print(
906
+ Panel("[bold cyan]Combine All Provider Credentials[/bold cyan]", expand=False)
907
+ )
908
+
909
+ # List of providers that support OAuth credentials
910
+ oauth_providers = ["gemini_cli", "qwen_code", "iflow", "antigravity"]
911
+
912
+ provider_factory, _ = _ensure_providers_loaded()
913
+
914
  combined_lines = [
915
  "# Combined All Provider Credentials",
916
  f"# Generated at: {time.strftime('%Y-%m-%d %H:%M:%S')}",
 
918
  "# Copy all lines below into your main .env file",
919
  "",
920
  ]
921
+
922
  total_count = 0
923
  provider_counts = {}
924
+
925
+ for provider_name in oauth_providers:
926
+ try:
927
+ auth_class = provider_factory.get_provider_auth_class(provider_name)
928
+ auth_instance = auth_class()
929
+ except Exception:
930
+ continue # Skip providers that don't have auth classes
931
+
932
+ credentials = auth_instance.list_credentials(OAUTH_BASE_DIR)
933
+
934
+ if not credentials:
935
  continue
936
+
937
+ display_name = provider_name.replace("_", " ").title()
938
  combined_lines.append(f"# ===== {display_name} Credentials =====")
939
  combined_lines.append("")
940
+
941
  provider_count = 0
942
+ for cred_info in credentials:
943
  try:
944
+ # Load credential file
945
+ with open(cred_info["file_path"], "r") as f:
946
  creds = json.load(f)
947
+
948
+ # Use auth class to build env lines
949
+ env_lines = auth_instance.build_env_lines(creds, cred_info["number"])
950
+
951
  combined_lines.extend(env_lines)
952
  combined_lines.append("")
953
  provider_count += 1
954
  total_count += 1
955
+
956
  except Exception as e:
957
+ console.print(
958
+ f" ✗ Failed to process {Path(cred_info['file_path']).name}: {e}"
959
+ )
960
+
961
  provider_counts[display_name] = provider_count
962
+
963
  if total_count == 0:
964
+ console.print(
965
+ Panel(
966
+ "No credentials found to combine.",
967
+ style="bold red",
968
+ title="No Credentials",
969
+ )
970
+ )
971
  return
972
+
973
  # Write combined file
974
  combined_filename = "all_providers_combined.env"
975
  combined_filepath = OAUTH_BASE_DIR / combined_filename
976
+
977
+ with open(combined_filepath, "w") as f:
978
+ f.write("\n".join(combined_lines))
979
+
980
  # Build summary
981
+ summary_lines = [
982
+ f" • {name}: {count} credential(s)" for name, count in provider_counts.items()
983
+ ]
984
  summary = "\n".join(summary_lines)
985
+
986
+ console.print(
987
+ Panel(
988
+ Text.from_markup(
989
+ f"Successfully combined {total_count} credentials from {len(provider_counts)} providers:\n"
990
+ f"{summary}\n\n"
991
+ f"[bold]Output file:[/bold] [yellow]{combined_filepath}[/yellow]\n\n"
992
+ f"[bold]To use:[/bold] Copy the contents into your main .env file."
993
+ ),
994
+ style="bold green",
995
+ title="Combine Complete",
996
+ )
997
+ )
998
 
999
 
1000
  async def export_credentials_submenu():
 
1003
  """
1004
  while True:
1005
  clear_screen()
1006
+ console.print(
1007
+ Panel(
1008
+ "[bold cyan]Export Credentials to .env[/bold cyan]",
1009
+ title="--- API Key Proxy ---",
1010
+ expand=False,
1011
+ )
1012
+ )
1013
+
1014
+ console.print(
1015
+ Panel(
1016
+ Text.from_markup(
1017
+ "[bold]Individual Exports:[/bold]\n"
1018
+ "1. Export Gemini CLI credential\n"
1019
+ "2. Export Qwen Code credential\n"
1020
+ "3. Export iFlow credential\n"
1021
+ "4. Export Antigravity credential\n"
1022
+ "\n"
1023
+ "[bold]Bulk Exports (per provider):[/bold]\n"
1024
+ "5. Export ALL Gemini CLI credentials\n"
1025
+ "6. Export ALL Qwen Code credentials\n"
1026
+ "7. Export ALL iFlow credentials\n"
1027
+ "8. Export ALL Antigravity credentials\n"
1028
+ "\n"
1029
+ "[bold]Combine Credentials:[/bold]\n"
1030
+ "9. Combine all Gemini CLI into one file\n"
1031
+ "10. Combine all Qwen Code into one file\n"
1032
+ "11. Combine all iFlow into one file\n"
1033
+ "12. Combine all Antigravity into one file\n"
1034
+ "13. Combine ALL providers into one file"
1035
+ ),
1036
+ title="Choose export option",
1037
+ style="bold blue",
1038
+ )
1039
+ )
1040
 
1041
  export_choice = Prompt.ask(
1042
+ Text.from_markup(
1043
+ "[bold]Please select an option or type [red]'b'[/red] to go back[/bold]"
1044
+ ),
1045
+ choices=[
1046
+ "1",
1047
+ "2",
1048
+ "3",
1049
+ "4",
1050
+ "5",
1051
+ "6",
1052
+ "7",
1053
+ "8",
1054
+ "9",
1055
+ "10",
1056
+ "11",
1057
+ "12",
1058
+ "13",
1059
+ "b",
1060
+ ],
1061
+ show_choices=False,
1062
  )
1063
 
1064
+ if export_choice.lower() == "b":
1065
  break
1066
 
1067
  # Individual exports
 
1125
  async def main(clear_on_start=True):
1126
  """
1127
  An interactive CLI tool to add new credentials.
1128
+
1129
  Args:
1130
+ clear_on_start: If False, skip initial screen clear (used when called from launcher
1131
  to preserve the loading screen)
1132
  """
1133
  ensure_env_defaults()
1134
+
1135
  # Only show header if we're clearing (standalone mode)
1136
  if clear_on_start:
1137
+ console.print(
1138
+ Panel(
1139
+ "[bold cyan]Interactive Credential Setup[/bold cyan]",
1140
+ title="--- API Key Proxy ---",
1141
+ expand=False,
1142
+ )
1143
+ )
1144
+
1145
  while True:
1146
  # Clear screen between menu selections for cleaner UX
1147
  clear_screen()
1148
+ console.print(
1149
+ Panel(
1150
+ "[bold cyan]Interactive Credential Setup[/bold cyan]",
1151
+ title="--- API Key Proxy ---",
1152
+ expand=False,
1153
+ )
1154
+ )
1155
+
1156
+ console.print(
1157
+ Panel(
1158
+ Text.from_markup(
1159
+ "1. Add OAuth Credential\n2. Add API Key\n3. Export Credentials"
1160
+ ),
1161
+ title="Choose credential type",
1162
+ style="bold blue",
1163
+ )
1164
+ )
1165
 
1166
  setup_type = Prompt.ask(
1167
+ Text.from_markup(
1168
+ "[bold]Please select an option or type [red]'q'[/red] to quit[/bold]"
1169
+ ),
1170
  choices=["1", "2", "3", "q"],
1171
+ show_choices=False,
1172
  )
1173
 
1174
+ if setup_type.lower() == "q":
1175
  break
1176
 
1177
  if setup_type == "1":
 
1183
  "iflow": "iFlow (OAuth - also supports API keys)",
1184
  "antigravity": "Antigravity (OAuth)",
1185
  }
1186
+
1187
  provider_text = Text()
1188
  for i, provider in enumerate(available_providers):
1189
+ display_name = oauth_friendly_names.get(
1190
+ provider, provider.replace("_", " ").title()
1191
+ )
1192
  provider_text.append(f" {i + 1}. {display_name}\n")
1193
+
1194
+ console.print(
1195
+ Panel(
1196
+ provider_text,
1197
+ title="Available Providers for OAuth",
1198
+ style="bold blue",
1199
+ )
1200
+ )
1201
 
1202
  choice = Prompt.ask(
1203
+ Text.from_markup(
1204
+ "[bold]Please select a provider or type [red]'b'[/red] to go back[/bold]"
1205
+ ),
1206
  choices=[str(i + 1) for i in range(len(available_providers))] + ["b"],
1207
+ show_choices=False,
1208
  )
1209
 
1210
+ if choice.lower() == "b":
1211
  continue
1212
+
1213
  try:
1214
  choice_index = int(choice) - 1
1215
  if 0 <= choice_index < len(available_providers):
1216
  provider_name = available_providers[choice_index]
1217
+ display_name = oauth_friendly_names.get(
1218
+ provider_name, provider_name.replace("_", " ").title()
1219
+ )
1220
+ console.print(
1221
+ f"\nStarting OAuth setup for [bold cyan]{display_name}[/bold cyan]..."
1222
+ )
1223
  await setup_new_credential(provider_name)
1224
  # Don't clear after OAuth - user needs to see full flow
1225
  console.print("\n[dim]Press Enter to return to main menu...[/dim]")
1226
  input()
1227
  else:
1228
+ console.print(
1229
+ "[bold red]Invalid choice. Please try again.[/bold red]"
1230
+ )
1231
  await asyncio.sleep(1.5)
1232
  except ValueError:
1233
+ console.print(
1234
+ "[bold red]Invalid input. Please enter a number or 'b'.[/bold red]"
1235
+ )
1236
  await asyncio.sleep(1.5)
1237
 
1238
  elif setup_type == "2":
1239
  await setup_api_key()
1240
+ # console.print("\n[dim]Press Enter to return to main menu...[/dim]")
1241
+ # input()
1242
 
1243
  elif setup_type == "3":
1244
  await export_credentials_submenu()
1245
 
1246
+
1247
  def run_credential_tool(from_launcher=False):
1248
  """
1249
  Entry point for credential tool.
1250
+
1251
  Args:
1252
  from_launcher: If True, skip loading screen (launcher already showed it)
1253
  """
1254
  # Check if we need to show loading screen
1255
  if not from_launcher:
1256
  # Standalone mode - show full loading UI
1257
+ os.system("cls" if os.name == "nt" else "clear")
1258
+
1259
  _start_time = time.time()
1260
+
1261
  # Phase 1: Show initial message
1262
  print("━" * 70)
1263
  print("Interactive Credential Setup Tool")
1264
  print("GitHub: https://github.com/Mirrowel/LLM-API-Key-Proxy")
1265
  print("━" * 70)
1266
  print("Loading credential management components...")
1267
+
1268
  # Phase 2: Load dependencies with spinner
1269
  with console.status("Loading authentication providers...", spinner="dots"):
1270
  _ensure_providers_loaded()
 
1273
  with console.status("Initializing credential tool...", spinner="dots"):
1274
  time.sleep(0.2) # Brief pause for UI consistency
1275
  console.print("✓ Credential tool initialized")
1276
+
1277
  _elapsed = time.time() - _start_time
1278
  _, PROVIDER_PLUGINS = _ensure_providers_loaded()
1279
+ print(
1280
+ f"✓ Tool ready in {_elapsed:.2f}s ({len(PROVIDER_PLUGINS)} providers available)"
1281
+ )
1282
+
1283
  # Small delay to let user see the ready message
1284
  time.sleep(0.5)
1285
+
1286
  # Run the main async event loop
1287
  # If from launcher, don't clear screen at start to preserve loading messages
1288
  try:
src/rotator_library/providers/antigravity_auth_base.py CHANGED
@@ -1,16 +1,36 @@
1
  # src/rotator_library/providers/antigravity_auth_base.py
2
 
 
 
 
 
 
 
 
 
 
3
  from .google_oauth_base import GoogleOAuthBase
4
 
 
 
 
 
 
 
5
  class AntigravityAuthBase(GoogleOAuthBase):
6
  """
7
  Antigravity OAuth2 authentication implementation.
8
-
9
  Inherits all OAuth functionality from GoogleOAuthBase with Antigravity-specific configuration.
10
  Uses Antigravity's OAuth credentials and includes additional scopes for cclog and experimentsandconfigs.
 
 
 
11
  """
12
-
13
- CLIENT_ID = "1071006060591-tmhssin2h21lcre235vtolojh4g403ep.apps.googleusercontent.com"
 
 
14
  CLIENT_SECRET = "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"
15
  OAUTH_SCOPES = [
16
  "https://www.googleapis.com/auth/cloud-platform",
@@ -22,3 +42,600 @@ class AntigravityAuthBase(GoogleOAuthBase):
22
  ENV_PREFIX = "ANTIGRAVITY"
23
  CALLBACK_PORT = 51121
24
  CALLBACK_PATH = "/oauthcallback"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  # src/rotator_library/providers/antigravity_auth_base.py
2
 
3
+ import asyncio
4
+ import json
5
+ import logging
6
+ import os
7
+ from pathlib import Path
8
+ from typing import Any, Dict, Optional, List
9
+
10
+ import httpx
11
+
12
  from .google_oauth_base import GoogleOAuthBase
13
 
14
+ lib_logger = logging.getLogger("rotator_library")
15
+
16
+ # Code Assist endpoint for project discovery
17
+ CODE_ASSIST_ENDPOINT = "https://cloudcode-pa.googleapis.com/v1internal"
18
+
19
+
20
  class AntigravityAuthBase(GoogleOAuthBase):
21
  """
22
  Antigravity OAuth2 authentication implementation.
23
+
24
  Inherits all OAuth functionality from GoogleOAuthBase with Antigravity-specific configuration.
25
  Uses Antigravity's OAuth credentials and includes additional scopes for cclog and experimentsandconfigs.
26
+
27
+ Also provides project/tier discovery functionality that runs during authentication,
28
+ ensuring credentials have their tier and project_id cached before any API requests.
29
  """
30
+
31
+ CLIENT_ID = (
32
+ "1071006060591-tmhssin2h21lcre235vtolojh4g403ep.apps.googleusercontent.com"
33
+ )
34
  CLIENT_SECRET = "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"
35
  OAUTH_SCOPES = [
36
  "https://www.googleapis.com/auth/cloud-platform",
 
42
  ENV_PREFIX = "ANTIGRAVITY"
43
  CALLBACK_PORT = 51121
44
  CALLBACK_PATH = "/oauthcallback"
45
+
46
+ def __init__(self):
47
+ super().__init__()
48
+ # Project and tier caches - shared between auth base and provider
49
+ self.project_id_cache: Dict[str, str] = {}
50
+ self.project_tier_cache: Dict[str, str] = {}
51
+
52
+ # =========================================================================
53
+ # POST-AUTH DISCOVERY HOOK
54
+ # =========================================================================
55
+
56
+ async def _post_auth_discovery(
57
+ self, credential_path: str, access_token: str
58
+ ) -> None:
59
+ """
60
+ Discover and cache tier/project information immediately after OAuth authentication.
61
+
62
+ This is called by GoogleOAuthBase._perform_interactive_oauth() after successful auth,
63
+ ensuring tier and project_id are cached during the authentication flow rather than
64
+ waiting for the first API request.
65
+
66
+ Args:
67
+ credential_path: Path to the credential file
68
+ access_token: The newly obtained access token
69
+ """
70
+ lib_logger.debug(
71
+ f"Starting post-auth discovery for Antigravity credential: {Path(credential_path).name}"
72
+ )
73
+
74
+ # Skip if already discovered (shouldn't happen during fresh auth, but be defensive)
75
+ if (
76
+ credential_path in self.project_id_cache
77
+ and credential_path in self.project_tier_cache
78
+ ):
79
+ lib_logger.debug(
80
+ f"Tier and project already cached for {Path(credential_path).name}, skipping discovery"
81
+ )
82
+ return
83
+
84
+ # Call _discover_project_id which handles tier/project discovery and persistence
85
+ # Pass empty litellm_params since we're in auth context (no model-specific overrides)
86
+ project_id = await self._discover_project_id(
87
+ credential_path, access_token, litellm_params={}
88
+ )
89
+
90
+ tier = self.project_tier_cache.get(credential_path, "unknown")
91
+ lib_logger.info(
92
+ f"Post-auth discovery complete for {Path(credential_path).name}: "
93
+ f"tier={tier}, project={project_id}"
94
+ )
95
+
96
+ # =========================================================================
97
+ # PROJECT ID DISCOVERY
98
+ # =========================================================================
99
+
100
+ async def _discover_project_id(
101
+ self, credential_path: str, access_token: str, litellm_params: Dict[str, Any]
102
+ ) -> str:
103
+ """
104
+ Discovers the Google Cloud Project ID, with caching and onboarding for new accounts.
105
+
106
+ This follows the official Gemini CLI discovery flow adapted for Antigravity:
107
+ 1. Check in-memory cache
108
+ 2. Check configured project_id override (litellm_params or env var)
109
+ 3. Check persisted project_id in credential file
110
+ 4. Call loadCodeAssist to check if user is already known (has currentTier)
111
+ - If currentTier exists AND cloudaicompanionProject returned: use server's project
112
+ - If no currentTier: user needs onboarding
113
+ 5. Onboard user (FREE tier: pass cloudaicompanionProject=None for server-managed)
114
+ 6. Fallback to GCP Resource Manager project listing
115
+
116
+ Note: Unlike GeminiCli, Antigravity doesn't use tier-based credential prioritization,
117
+ but we still cache tier info for debugging and consistency.
118
+ """
119
+ lib_logger.debug(
120
+ f"Starting Antigravity project discovery for credential: {credential_path}"
121
+ )
122
+
123
+ # Check in-memory cache first
124
+ if credential_path in self.project_id_cache:
125
+ cached_project = self.project_id_cache[credential_path]
126
+ lib_logger.debug(f"Using cached project ID: {cached_project}")
127
+ return cached_project
128
+
129
+ # Check for configured project ID override (from litellm_params or env var)
130
+ configured_project_id = (
131
+ litellm_params.get("project_id")
132
+ or os.getenv("ANTIGRAVITY_PROJECT_ID")
133
+ or os.getenv("GOOGLE_CLOUD_PROJECT")
134
+ )
135
+ if configured_project_id:
136
+ lib_logger.debug(
137
+ f"Found configured project_id override: {configured_project_id}"
138
+ )
139
+
140
+ # Load credentials from file to check for persisted project_id and tier
141
+ # Skip for env:// paths (environment-based credentials don't persist to files)
142
+ credential_index = self._parse_env_credential_path(credential_path)
143
+ if credential_index is None:
144
+ # Only try to load from file if it's not an env:// path
145
+ try:
146
+ with open(credential_path, "r") as f:
147
+ creds = json.load(f)
148
+
149
+ metadata = creds.get("_proxy_metadata", {})
150
+ persisted_project_id = metadata.get("project_id")
151
+ persisted_tier = metadata.get("tier")
152
+
153
+ if persisted_project_id:
154
+ lib_logger.info(
155
+ f"Loaded persisted project ID from credential file: {persisted_project_id}"
156
+ )
157
+ self.project_id_cache[credential_path] = persisted_project_id
158
+
159
+ # Also load tier if available (for debugging/logging purposes)
160
+ if persisted_tier:
161
+ self.project_tier_cache[credential_path] = persisted_tier
162
+ lib_logger.debug(f"Loaded persisted tier: {persisted_tier}")
163
+
164
+ return persisted_project_id
165
+ except (FileNotFoundError, json.JSONDecodeError, KeyError) as e:
166
+ lib_logger.debug(f"Could not load persisted project ID from file: {e}")
167
+
168
+ lib_logger.debug(
169
+ "No cached or configured project ID found, initiating discovery..."
170
+ )
171
+ headers = {
172
+ "Authorization": f"Bearer {access_token}",
173
+ "Content-Type": "application/json",
174
+ }
175
+
176
+ discovered_project_id = None
177
+ discovered_tier = None
178
+
179
+ async with httpx.AsyncClient() as client:
180
+ # 1. Try discovery endpoint with loadCodeAssist
181
+ lib_logger.debug(
182
+ "Attempting project discovery via Code Assist loadCodeAssist endpoint..."
183
+ )
184
+ try:
185
+ # Build metadata - include duetProject only if we have a configured project
186
+ core_client_metadata = {
187
+ "ideType": "IDE_UNSPECIFIED",
188
+ "platform": "PLATFORM_UNSPECIFIED",
189
+ "pluginType": "GEMINI",
190
+ }
191
+ if configured_project_id:
192
+ core_client_metadata["duetProject"] = configured_project_id
193
+
194
+ # Build load request - pass configured_project_id if available, otherwise None
195
+ load_request = {
196
+ "cloudaicompanionProject": configured_project_id, # Can be None
197
+ "metadata": core_client_metadata,
198
+ }
199
+
200
+ lib_logger.debug(
201
+ f"Sending loadCodeAssist request with cloudaicompanionProject={configured_project_id}"
202
+ )
203
+ response = await client.post(
204
+ f"{CODE_ASSIST_ENDPOINT}:loadCodeAssist",
205
+ headers=headers,
206
+ json=load_request,
207
+ timeout=20,
208
+ )
209
+ response.raise_for_status()
210
+ data = response.json()
211
+
212
+ # Log full response for debugging
213
+ lib_logger.debug(
214
+ f"loadCodeAssist full response keys: {list(data.keys())}"
215
+ )
216
+
217
+ # Extract tier information
218
+ allowed_tiers = data.get("allowedTiers", [])
219
+ current_tier = data.get("currentTier")
220
+
221
+ lib_logger.debug(f"=== Tier Information ===")
222
+ lib_logger.debug(f"currentTier: {current_tier}")
223
+ lib_logger.debug(f"allowedTiers count: {len(allowed_tiers)}")
224
+ for i, tier in enumerate(allowed_tiers):
225
+ tier_id = tier.get("id", "unknown")
226
+ is_default = tier.get("isDefault", False)
227
+ user_defined = tier.get("userDefinedCloudaicompanionProject", False)
228
+ lib_logger.debug(
229
+ f" Tier {i + 1}: id={tier_id}, isDefault={is_default}, userDefinedProject={user_defined}"
230
+ )
231
+ lib_logger.debug(f"========================")
232
+
233
+ # Determine the current tier ID
234
+ current_tier_id = None
235
+ if current_tier:
236
+ current_tier_id = current_tier.get("id")
237
+ lib_logger.debug(f"User has currentTier: {current_tier_id}")
238
+
239
+ # Check if user is already known to server (has currentTier)
240
+ if current_tier_id:
241
+ # User is already onboarded - check for project from server
242
+ server_project = data.get("cloudaicompanionProject")
243
+
244
+ # Check if this tier requires user-defined project (paid tiers)
245
+ requires_user_project = any(
246
+ t.get("id") == current_tier_id
247
+ and t.get("userDefinedCloudaicompanionProject", False)
248
+ for t in allowed_tiers
249
+ )
250
+ is_free_tier = current_tier_id == "free-tier"
251
+
252
+ if server_project:
253
+ # Server returned a project - use it (server wins)
254
+ project_id = server_project
255
+ lib_logger.debug(f"Server returned project: {project_id}")
256
+ elif configured_project_id:
257
+ # No server project but we have configured one - use it
258
+ project_id = configured_project_id
259
+ lib_logger.debug(
260
+ f"No server project, using configured: {project_id}"
261
+ )
262
+ elif is_free_tier:
263
+ # Free tier user without server project - try onboarding
264
+ lib_logger.debug(
265
+ "Free tier user with currentTier but no project - will try onboarding"
266
+ )
267
+ project_id = None
268
+ elif requires_user_project:
269
+ # Paid tier requires a project ID to be set
270
+ raise ValueError(
271
+ f"Paid tier '{current_tier_id}' requires setting ANTIGRAVITY_PROJECT_ID environment variable."
272
+ )
273
+ else:
274
+ # Unknown tier without project - proceed to onboarding
275
+ lib_logger.warning(
276
+ f"Tier '{current_tier_id}' has no project and none configured - will try onboarding"
277
+ )
278
+ project_id = None
279
+
280
+ if project_id:
281
+ # Cache tier info
282
+ self.project_tier_cache[credential_path] = current_tier_id
283
+ discovered_tier = current_tier_id
284
+
285
+ # Log appropriately based on tier
286
+ is_paid = current_tier_id and current_tier_id not in [
287
+ "free-tier",
288
+ "legacy-tier",
289
+ "unknown",
290
+ ]
291
+ if is_paid:
292
+ lib_logger.info(
293
+ f"Using Antigravity paid tier '{current_tier_id}' with project: {project_id}"
294
+ )
295
+ else:
296
+ lib_logger.info(
297
+ f"Discovered Antigravity project ID via loadCodeAssist: {project_id}"
298
+ )
299
+
300
+ self.project_id_cache[credential_path] = project_id
301
+ discovered_project_id = project_id
302
+
303
+ # Persist to credential file
304
+ await self._persist_project_metadata(
305
+ credential_path, project_id, discovered_tier
306
+ )
307
+
308
+ return project_id
309
+
310
+ # 2. User needs onboarding - no currentTier or no project found
311
+ lib_logger.info(
312
+ "No existing Antigravity session found (no currentTier), attempting to onboard user..."
313
+ )
314
+
315
+ # Determine which tier to onboard with
316
+ onboard_tier = None
317
+ for tier in allowed_tiers:
318
+ if tier.get("isDefault"):
319
+ onboard_tier = tier
320
+ break
321
+
322
+ # Fallback to legacy tier if no default
323
+ if not onboard_tier and allowed_tiers:
324
+ for tier in allowed_tiers:
325
+ if tier.get("id") == "legacy-tier":
326
+ onboard_tier = tier
327
+ break
328
+ if not onboard_tier:
329
+ onboard_tier = allowed_tiers[0]
330
+
331
+ if not onboard_tier:
332
+ raise ValueError("No onboarding tiers available from server")
333
+
334
+ tier_id = onboard_tier.get("id", "free-tier")
335
+ requires_user_project = onboard_tier.get(
336
+ "userDefinedCloudaicompanionProject", False
337
+ )
338
+
339
+ lib_logger.debug(
340
+ f"Onboarding with tier: {tier_id}, requiresUserProject: {requires_user_project}"
341
+ )
342
+
343
+ # Build onboard request based on tier type
344
+ # FREE tier: cloudaicompanionProject = None (server-managed)
345
+ # PAID tier: cloudaicompanionProject = configured_project_id
346
+ is_free_tier = tier_id == "free-tier"
347
+
348
+ if is_free_tier:
349
+ # Free tier uses server-managed project
350
+ onboard_request = {
351
+ "tierId": tier_id,
352
+ "cloudaicompanionProject": None, # Server will create/manage
353
+ "metadata": core_client_metadata,
354
+ }
355
+ lib_logger.debug(
356
+ "Free tier onboarding: using server-managed project"
357
+ )
358
+ else:
359
+ # Paid/legacy tier requires user-provided project
360
+ if not configured_project_id and requires_user_project:
361
+ raise ValueError(
362
+ f"Tier '{tier_id}' requires setting ANTIGRAVITY_PROJECT_ID environment variable."
363
+ )
364
+ onboard_request = {
365
+ "tierId": tier_id,
366
+ "cloudaicompanionProject": configured_project_id,
367
+ "metadata": {
368
+ **core_client_metadata,
369
+ "duetProject": configured_project_id,
370
+ }
371
+ if configured_project_id
372
+ else core_client_metadata,
373
+ }
374
+ lib_logger.debug(
375
+ f"Paid tier onboarding: using project {configured_project_id}"
376
+ )
377
+
378
+ lib_logger.debug("Initiating onboardUser request...")
379
+ lro_response = await client.post(
380
+ f"{CODE_ASSIST_ENDPOINT}:onboardUser",
381
+ headers=headers,
382
+ json=onboard_request,
383
+ timeout=30,
384
+ )
385
+ lro_response.raise_for_status()
386
+ lro_data = lro_response.json()
387
+ lib_logger.debug(
388
+ f"Initial onboarding response: done={lro_data.get('done')}"
389
+ )
390
+
391
+ # Poll for onboarding completion (up to 5 minutes)
392
+ for i in range(150): # 150 × 2s = 5 minutes
393
+ if lro_data.get("done"):
394
+ lib_logger.debug(
395
+ f"Onboarding completed after {i} polling attempts"
396
+ )
397
+ break
398
+ await asyncio.sleep(2)
399
+ if (i + 1) % 15 == 0: # Log every 30 seconds
400
+ lib_logger.info(
401
+ f"Still waiting for onboarding completion... ({(i + 1) * 2}s elapsed)"
402
+ )
403
+ lib_logger.debug(
404
+ f"Polling onboarding status... (Attempt {i + 1}/150)"
405
+ )
406
+ lro_response = await client.post(
407
+ f"{CODE_ASSIST_ENDPOINT}:onboardUser",
408
+ headers=headers,
409
+ json=onboard_request,
410
+ timeout=30,
411
+ )
412
+ lro_response.raise_for_status()
413
+ lro_data = lro_response.json()
414
+
415
+ if not lro_data.get("done"):
416
+ lib_logger.error("Onboarding process timed out after 5 minutes")
417
+ raise ValueError(
418
+ "Onboarding process timed out after 5 minutes. Please try again or contact support."
419
+ )
420
+
421
+ # Extract project ID from LRO response
422
+ # Note: onboardUser returns response.cloudaicompanionProject as an object with .id
423
+ lro_response_data = lro_data.get("response", {})
424
+ lro_project_obj = lro_response_data.get("cloudaicompanionProject", {})
425
+ project_id = (
426
+ lro_project_obj.get("id")
427
+ if isinstance(lro_project_obj, dict)
428
+ else None
429
+ )
430
+
431
+ # Fallback to configured project if LRO didn't return one
432
+ if not project_id and configured_project_id:
433
+ project_id = configured_project_id
434
+ lib_logger.debug(
435
+ f"LRO didn't return project, using configured: {project_id}"
436
+ )
437
+
438
+ if not project_id:
439
+ lib_logger.error(
440
+ "Onboarding completed but no project ID in response and none configured"
441
+ )
442
+ raise ValueError(
443
+ "Onboarding completed, but no project ID was returned. "
444
+ "For paid tiers, set ANTIGRAVITY_PROJECT_ID environment variable."
445
+ )
446
+
447
+ lib_logger.debug(
448
+ f"Successfully extracted project ID from onboarding response: {project_id}"
449
+ )
450
+
451
+ # Cache tier info
452
+ self.project_tier_cache[credential_path] = tier_id
453
+ discovered_tier = tier_id
454
+ lib_logger.debug(f"Cached tier information: {tier_id}")
455
+
456
+ # Log concise message based on tier
457
+ is_paid = tier_id and tier_id not in ["free-tier", "legacy-tier"]
458
+ if is_paid:
459
+ lib_logger.info(
460
+ f"Using Antigravity paid tier '{tier_id}' with project: {project_id}"
461
+ )
462
+ else:
463
+ lib_logger.info(
464
+ f"Successfully onboarded user and discovered project ID: {project_id}"
465
+ )
466
+
467
+ self.project_id_cache[credential_path] = project_id
468
+ discovered_project_id = project_id
469
+
470
+ # Persist to credential file
471
+ await self._persist_project_metadata(
472
+ credential_path, project_id, discovered_tier
473
+ )
474
+
475
+ return project_id
476
+
477
+ except httpx.HTTPStatusError as e:
478
+ error_body = ""
479
+ try:
480
+ error_body = e.response.text
481
+ except Exception:
482
+ pass
483
+ if e.response.status_code == 403:
484
+ lib_logger.error(
485
+ f"Antigravity Code Assist API access denied (403). Response: {error_body}"
486
+ )
487
+ lib_logger.error(
488
+ "Possible causes: 1) cloudaicompanion.googleapis.com API not enabled, 2) Wrong project ID for paid tier, 3) Account lacks permissions"
489
+ )
490
+ elif e.response.status_code == 404:
491
+ lib_logger.warning(
492
+ f"Antigravity Code Assist endpoint not found (404). Falling back to project listing."
493
+ )
494
+ elif e.response.status_code == 412:
495
+ # Precondition Failed - often means wrong project for free tier onboarding
496
+ lib_logger.error(
497
+ f"Precondition failed (412): {error_body}. This may mean the project ID is incompatible with the selected tier."
498
+ )
499
+ else:
500
+ lib_logger.warning(
501
+ f"Antigravity onboarding/discovery failed with status {e.response.status_code}: {error_body}. Falling back to project listing."
502
+ )
503
+ except httpx.RequestError as e:
504
+ lib_logger.warning(
505
+ f"Antigravity onboarding/discovery network error: {e}. Falling back to project listing."
506
+ )
507
+
508
+ # 3. Fallback to listing all available GCP projects (last resort)
509
+ lib_logger.debug(
510
+ "Attempting to discover project via GCP Resource Manager API..."
511
+ )
512
+ try:
513
+ async with httpx.AsyncClient() as client:
514
+ lib_logger.debug(
515
+ "Querying Cloud Resource Manager for available projects..."
516
+ )
517
+ response = await client.get(
518
+ "https://cloudresourcemanager.googleapis.com/v1/projects",
519
+ headers=headers,
520
+ timeout=20,
521
+ )
522
+ response.raise_for_status()
523
+ projects = response.json().get("projects", [])
524
+ lib_logger.debug(f"Found {len(projects)} total projects")
525
+ active_projects = [
526
+ p for p in projects if p.get("lifecycleState") == "ACTIVE"
527
+ ]
528
+ lib_logger.debug(f"Found {len(active_projects)} active projects")
529
+
530
+ if not projects:
531
+ lib_logger.error(
532
+ "No GCP projects found for this account. Please create a project in Google Cloud Console."
533
+ )
534
+ elif not active_projects:
535
+ lib_logger.error(
536
+ "No active GCP projects found. Please activate a project in Google Cloud Console."
537
+ )
538
+ else:
539
+ project_id = active_projects[0]["projectId"]
540
+ lib_logger.info(
541
+ f"Discovered Antigravity project ID from active projects list: {project_id}"
542
+ )
543
+ lib_logger.debug(
544
+ f"Selected first active project: {project_id} (out of {len(active_projects)} active projects)"
545
+ )
546
+ self.project_id_cache[credential_path] = project_id
547
+ discovered_project_id = project_id
548
+
549
+ # Persist to credential file (no tier info from resource manager)
550
+ await self._persist_project_metadata(
551
+ credential_path, project_id, None
552
+ )
553
+
554
+ return project_id
555
+ except httpx.HTTPStatusError as e:
556
+ if e.response.status_code == 403:
557
+ lib_logger.error(
558
+ "Failed to list GCP projects due to a 403 Forbidden error. The Cloud Resource Manager API may not be enabled, or your account lacks the 'resourcemanager.projects.list' permission."
559
+ )
560
+ else:
561
+ lib_logger.error(
562
+ f"Failed to list GCP projects with status {e.response.status_code}: {e}"
563
+ )
564
+ except httpx.RequestError as e:
565
+ lib_logger.error(f"Network error while listing GCP projects: {e}")
566
+
567
+ raise ValueError(
568
+ "Could not auto-discover Antigravity project ID. Possible causes:\n"
569
+ " 1. The cloudaicompanion.googleapis.com API is not enabled (enable it in Google Cloud Console)\n"
570
+ " 2. No active GCP projects exist for this account (create one in Google Cloud Console)\n"
571
+ " 3. Account lacks necessary permissions\n"
572
+ "To manually specify a project, set ANTIGRAVITY_PROJECT_ID in your .env file."
573
+ )
574
+
575
+ async def _persist_project_metadata(
576
+ self, credential_path: str, project_id: str, tier: Optional[str]
577
+ ):
578
+ """Persists project ID and tier to the credential file for faster future startups."""
579
+ # Skip persistence for env:// paths (environment-based credentials)
580
+ credential_index = self._parse_env_credential_path(credential_path)
581
+ if credential_index is not None:
582
+ lib_logger.debug(
583
+ f"Skipping project metadata persistence for env:// credential path: {credential_path}"
584
+ )
585
+ return
586
+
587
+ try:
588
+ # Load current credentials
589
+ with open(credential_path, "r") as f:
590
+ creds = json.load(f)
591
+
592
+ # Update metadata
593
+ if "_proxy_metadata" not in creds:
594
+ creds["_proxy_metadata"] = {}
595
+
596
+ creds["_proxy_metadata"]["project_id"] = project_id
597
+ if tier:
598
+ creds["_proxy_metadata"]["tier"] = tier
599
+
600
+ # Save back using the existing save method (handles atomic writes and permissions)
601
+ await self._save_credentials(credential_path, creds)
602
+
603
+ lib_logger.debug(
604
+ f"Persisted project_id and tier to credential file: {credential_path}"
605
+ )
606
+ except Exception as e:
607
+ lib_logger.warning(
608
+ f"Failed to persist project metadata to credential file: {e}"
609
+ )
610
+ # Non-fatal - just means slower startup next time
611
+
612
+ # =========================================================================
613
+ # CREDENTIAL MANAGEMENT OVERRIDES
614
+ # =========================================================================
615
+
616
+ def _get_provider_file_prefix(self) -> str:
617
+ """Return the file prefix for Antigravity credentials."""
618
+ return "antigravity"
619
+
620
+ def build_env_lines(self, creds: Dict[str, Any], cred_number: int) -> List[str]:
621
+ """
622
+ Generate .env file lines for an Antigravity credential.
623
+
624
+ Includes tier and project_id from _proxy_metadata.
625
+ """
626
+ # Get base lines from parent class
627
+ lines = super().build_env_lines(creds, cred_number)
628
+
629
+ # Add Antigravity-specific fields (tier and project_id)
630
+ metadata = creds.get("_proxy_metadata", {})
631
+ prefix = f"{self.ENV_PREFIX}_{cred_number}"
632
+
633
+ project_id = metadata.get("project_id", "")
634
+ tier = metadata.get("tier", "")
635
+
636
+ if project_id:
637
+ lines.append(f"{prefix}_PROJECT_ID={project_id}")
638
+ if tier:
639
+ lines.append(f"{prefix}_TIER={tier}")
640
+
641
+ return lines
src/rotator_library/providers/antigravity_provider.py CHANGED
@@ -718,12 +718,7 @@ class AntigravityProvider(AntigravityAuthBase, ProviderInterface):
718
  def __init__(self):
719
  super().__init__()
720
  self.model_definitions = ModelDefinitions()
721
- self.project_id_cache: Dict[
722
- str, str
723
- ] = {} # Cache project ID per credential path
724
- self.project_tier_cache: Dict[
725
- str, str
726
- ] = {} # Cache project tier per credential path (for debugging)
727
 
728
  # Base URL management
729
  self._base_url_index = 0
@@ -931,6 +926,8 @@ class AntigravityProvider(AntigravityAuthBase, ProviderInterface):
931
 
932
  return loaded
933
 
 
 
934
  # =========================================================================
935
  # MODEL UTILITIES
936
  # =========================================================================
@@ -1007,524 +1004,7 @@ class AntigravityProvider(AntigravityAuthBase, ProviderInterface):
1007
 
1008
  return "thinking_" + "_".join(key_parts) if key_parts else None
1009
 
1010
- # =========================================================================
1011
- # PROJECT ID DISCOVERY
1012
- # =========================================================================
1013
-
1014
- async def _discover_project_id(
1015
- self, credential_path: str, access_token: str, litellm_params: Dict[str, Any]
1016
- ) -> str:
1017
- """
1018
- Discovers the Google Cloud Project ID, with caching and onboarding for new accounts.
1019
-
1020
- This follows the official Gemini CLI discovery flow adapted for Antigravity:
1021
- 1. Check in-memory cache
1022
- 2. Check configured project_id override (litellm_params or env var)
1023
- 3. Check persisted project_id in credential file
1024
- 4. Call loadCodeAssist to check if user is already known (has currentTier)
1025
- - If currentTier exists AND cloudaicompanionProject returned: use server's project
1026
- - If no currentTier: user needs onboarding
1027
- 5. Onboard user (FREE tier: pass cloudaicompanionProject=None for server-managed)
1028
- 6. Fallback to GCP Resource Manager project listing
1029
-
1030
- Note: Unlike GeminiCli, Antigravity doesn't use tier-based credential prioritization,
1031
- but we still cache tier info for debugging and consistency.
1032
- """
1033
- lib_logger.debug(
1034
- f"Starting Antigravity project discovery for credential: {credential_path}"
1035
- )
1036
-
1037
- # Check in-memory cache first
1038
- if credential_path in self.project_id_cache:
1039
- cached_project = self.project_id_cache[credential_path]
1040
- lib_logger.debug(f"Using cached project ID: {cached_project}")
1041
- return cached_project
1042
-
1043
- # Check for configured project ID override (from litellm_params or env var)
1044
- configured_project_id = (
1045
- litellm_params.get("project_id")
1046
- or os.getenv("ANTIGRAVITY_PROJECT_ID")
1047
- or os.getenv("GOOGLE_CLOUD_PROJECT")
1048
- )
1049
- if configured_project_id:
1050
- lib_logger.debug(
1051
- f"Found configured project_id override: {configured_project_id}"
1052
- )
1053
-
1054
- # Load credentials from file to check for persisted project_id and tier
1055
- # Skip for env:// paths (environment-based credentials don't persist to files)
1056
- credential_index = self._parse_env_credential_path(credential_path)
1057
- if credential_index is None:
1058
- # Only try to load from file if it's not an env:// path
1059
- try:
1060
- with open(credential_path, "r") as f:
1061
- creds = json.load(f)
1062
-
1063
- metadata = creds.get("_proxy_metadata", {})
1064
- persisted_project_id = metadata.get("project_id")
1065
- persisted_tier = metadata.get("tier")
1066
-
1067
- if persisted_project_id:
1068
- lib_logger.info(
1069
- f"Loaded persisted project ID from credential file: {persisted_project_id}"
1070
- )
1071
- self.project_id_cache[credential_path] = persisted_project_id
1072
-
1073
- # Also load tier if available (for debugging/logging purposes)
1074
- if persisted_tier:
1075
- self.project_tier_cache[credential_path] = persisted_tier
1076
- lib_logger.debug(f"Loaded persisted tier: {persisted_tier}")
1077
-
1078
- return persisted_project_id
1079
- except (FileNotFoundError, json.JSONDecodeError, KeyError) as e:
1080
- lib_logger.debug(f"Could not load persisted project ID from file: {e}")
1081
-
1082
- lib_logger.debug(
1083
- "No cached or configured project ID found, initiating discovery..."
1084
- )
1085
- headers = {
1086
- "Authorization": f"Bearer {access_token}",
1087
- "Content-Type": "application/json",
1088
- }
1089
-
1090
- discovered_project_id = None
1091
- discovered_tier = None
1092
-
1093
- # Use production endpoint for loadCodeAssist (more reliable than sandbox URLs)
1094
- code_assist_endpoint = "https://cloudcode-pa.googleapis.com/v1internal"
1095
-
1096
- async with httpx.AsyncClient() as client:
1097
- # 1. Try discovery endpoint with loadCodeAssist
1098
- lib_logger.debug(
1099
- "Attempting project discovery via Code Assist loadCodeAssist endpoint..."
1100
- )
1101
- try:
1102
- # Build metadata - include duetProject only if we have a configured project
1103
- core_client_metadata = {
1104
- "ideType": "IDE_UNSPECIFIED",
1105
- "platform": "PLATFORM_UNSPECIFIED",
1106
- "pluginType": "GEMINI",
1107
- }
1108
- if configured_project_id:
1109
- core_client_metadata["duetProject"] = configured_project_id
1110
-
1111
- # Build load request - pass configured_project_id if available, otherwise None
1112
- load_request = {
1113
- "cloudaicompanionProject": configured_project_id, # Can be None
1114
- "metadata": core_client_metadata,
1115
- }
1116
-
1117
- lib_logger.debug(
1118
- f"Sending loadCodeAssist request with cloudaicompanionProject={configured_project_id}"
1119
- )
1120
- response = await client.post(
1121
- f"{code_assist_endpoint}:loadCodeAssist",
1122
- headers=headers,
1123
- json=load_request,
1124
- timeout=20,
1125
- )
1126
- response.raise_for_status()
1127
- data = response.json()
1128
-
1129
- # Log full response for debugging
1130
- lib_logger.debug(
1131
- f"loadCodeAssist full response keys: {list(data.keys())}"
1132
- )
1133
-
1134
- # Extract tier information
1135
- allowed_tiers = data.get("allowedTiers", [])
1136
- current_tier = data.get("currentTier")
1137
-
1138
- lib_logger.debug(f"=== Tier Information ===")
1139
- lib_logger.debug(f"currentTier: {current_tier}")
1140
- lib_logger.debug(f"allowedTiers count: {len(allowed_tiers)}")
1141
- for i, tier in enumerate(allowed_tiers):
1142
- tier_id = tier.get("id", "unknown")
1143
- is_default = tier.get("isDefault", False)
1144
- user_defined = tier.get("userDefinedCloudaicompanionProject", False)
1145
- lib_logger.debug(
1146
- f" Tier {i + 1}: id={tier_id}, isDefault={is_default}, userDefinedProject={user_defined}"
1147
- )
1148
- lib_logger.debug(f"========================")
1149
-
1150
- # Determine the current tier ID
1151
- current_tier_id = None
1152
- if current_tier:
1153
- current_tier_id = current_tier.get("id")
1154
- lib_logger.debug(f"User has currentTier: {current_tier_id}")
1155
-
1156
- # Check if user is already known to server (has currentTier)
1157
- if current_tier_id:
1158
- # User is already onboarded - check for project from server
1159
- server_project = data.get("cloudaicompanionProject")
1160
-
1161
- # Check if this tier requires user-defined project (paid tiers)
1162
- requires_user_project = any(
1163
- t.get("id") == current_tier_id
1164
- and t.get("userDefinedCloudaicompanionProject", False)
1165
- for t in allowed_tiers
1166
- )
1167
- is_free_tier = current_tier_id == "free-tier"
1168
-
1169
- if server_project:
1170
- # Server returned a project - use it (server wins)
1171
- project_id = server_project
1172
- lib_logger.debug(f"Server returned project: {project_id}")
1173
- elif configured_project_id:
1174
- # No server project but we have configured one - use it
1175
- project_id = configured_project_id
1176
- lib_logger.debug(
1177
- f"No server project, using configured: {project_id}"
1178
- )
1179
- elif is_free_tier:
1180
- # Free tier user without server project - try onboarding
1181
- lib_logger.debug(
1182
- "Free tier user with currentTier but no project - will try onboarding"
1183
- )
1184
- project_id = None
1185
- elif requires_user_project:
1186
- # Paid tier requires a project ID to be set
1187
- raise ValueError(
1188
- f"Paid tier '{current_tier_id}' requires setting ANTIGRAVITY_PROJECT_ID environment variable."
1189
- )
1190
- else:
1191
- # Unknown tier without project - proceed to onboarding
1192
- lib_logger.warning(
1193
- f"Tier '{current_tier_id}' has no project and none configured - will try onboarding"
1194
- )
1195
- project_id = None
1196
-
1197
- if project_id:
1198
- # Cache tier info
1199
- self.project_tier_cache[credential_path] = current_tier_id
1200
- discovered_tier = current_tier_id
1201
-
1202
- # Log appropriately based on tier
1203
- is_paid = current_tier_id and current_tier_id not in [
1204
- "free-tier",
1205
- "legacy-tier",
1206
- "unknown",
1207
- ]
1208
- if is_paid:
1209
- lib_logger.info(
1210
- f"Using Antigravity paid tier '{current_tier_id}' with project: {project_id}"
1211
- )
1212
- else:
1213
- lib_logger.info(
1214
- f"Discovered Antigravity project ID via loadCodeAssist: {project_id}"
1215
- )
1216
-
1217
- self.project_id_cache[credential_path] = project_id
1218
- discovered_project_id = project_id
1219
-
1220
- # Persist to credential file
1221
- await self._persist_project_metadata(
1222
- credential_path, project_id, discovered_tier
1223
- )
1224
-
1225
- return project_id
1226
-
1227
- # 2. User needs onboarding - no currentTier or no project found
1228
- lib_logger.info(
1229
- "No existing Antigravity session found (no currentTier), attempting to onboard user..."
1230
- )
1231
-
1232
- # Determine which tier to onboard with
1233
- onboard_tier = None
1234
- for tier in allowed_tiers:
1235
- if tier.get("isDefault"):
1236
- onboard_tier = tier
1237
- break
1238
-
1239
- # Fallback to legacy tier if no default
1240
- if not onboard_tier and allowed_tiers:
1241
- for tier in allowed_tiers:
1242
- if tier.get("id") == "legacy-tier":
1243
- onboard_tier = tier
1244
- break
1245
- if not onboard_tier:
1246
- onboard_tier = allowed_tiers[0]
1247
-
1248
- if not onboard_tier:
1249
- raise ValueError("No onboarding tiers available from server")
1250
-
1251
- tier_id = onboard_tier.get("id", "free-tier")
1252
- requires_user_project = onboard_tier.get(
1253
- "userDefinedCloudaicompanionProject", False
1254
- )
1255
-
1256
- lib_logger.debug(
1257
- f"Onboarding with tier: {tier_id}, requiresUserProject: {requires_user_project}"
1258
- )
1259
-
1260
- # Build onboard request based on tier type
1261
- # FREE tier: cloudaicompanionProject = None (server-managed)
1262
- # PAID tier: cloudaicompanionProject = configured_project_id
1263
- is_free_tier = tier_id == "free-tier"
1264
-
1265
- if is_free_tier:
1266
- # Free tier uses server-managed project
1267
- onboard_request = {
1268
- "tierId": tier_id,
1269
- "cloudaicompanionProject": None, # Server will create/manage
1270
- "metadata": core_client_metadata,
1271
- }
1272
- lib_logger.debug(
1273
- "Free tier onboarding: using server-managed project"
1274
- )
1275
- else:
1276
- # Paid/legacy tier requires user-provided project
1277
- if not configured_project_id and requires_user_project:
1278
- raise ValueError(
1279
- f"Tier '{tier_id}' requires setting ANTIGRAVITY_PROJECT_ID environment variable."
1280
- )
1281
- onboard_request = {
1282
- "tierId": tier_id,
1283
- "cloudaicompanionProject": configured_project_id,
1284
- "metadata": {
1285
- **core_client_metadata,
1286
- "duetProject": configured_project_id,
1287
- }
1288
- if configured_project_id
1289
- else core_client_metadata,
1290
- }
1291
- lib_logger.debug(
1292
- f"Paid tier onboarding: using project {configured_project_id}"
1293
- )
1294
-
1295
- lib_logger.debug("Initiating onboardUser request...")
1296
- lro_response = await client.post(
1297
- f"{code_assist_endpoint}:onboardUser",
1298
- headers=headers,
1299
- json=onboard_request,
1300
- timeout=30,
1301
- )
1302
- lro_response.raise_for_status()
1303
- lro_data = lro_response.json()
1304
- lib_logger.debug(
1305
- f"Initial onboarding response: done={lro_data.get('done')}"
1306
- )
1307
-
1308
- # Poll for onboarding completion (up to 5 minutes)
1309
- for i in range(150): # 150 × 2s = 5 minutes
1310
- if lro_data.get("done"):
1311
- lib_logger.debug(
1312
- f"Onboarding completed after {i} polling attempts"
1313
- )
1314
- break
1315
- await asyncio.sleep(2)
1316
- if (i + 1) % 15 == 0: # Log every 30 seconds
1317
- lib_logger.info(
1318
- f"Still waiting for onboarding completion... ({(i + 1) * 2}s elapsed)"
1319
- )
1320
- lib_logger.debug(
1321
- f"Polling onboarding status... (Attempt {i + 1}/150)"
1322
- )
1323
- lro_response = await client.post(
1324
- f"{code_assist_endpoint}:onboardUser",
1325
- headers=headers,
1326
- json=onboard_request,
1327
- timeout=30,
1328
- )
1329
- lro_response.raise_for_status()
1330
- lro_data = lro_response.json()
1331
-
1332
- if not lro_data.get("done"):
1333
- lib_logger.error("Onboarding process timed out after 5 minutes")
1334
- raise ValueError(
1335
- "Onboarding process timed out after 5 minutes. Please try again or contact support."
1336
- )
1337
-
1338
- # Extract project ID from LRO response
1339
- # Note: onboardUser returns response.cloudaicompanionProject as an object with .id
1340
- lro_response_data = lro_data.get("response", {})
1341
- lro_project_obj = lro_response_data.get("cloudaicompanionProject", {})
1342
- project_id = (
1343
- lro_project_obj.get("id")
1344
- if isinstance(lro_project_obj, dict)
1345
- else None
1346
- )
1347
-
1348
- # Fallback to configured project if LRO didn't return one
1349
- if not project_id and configured_project_id:
1350
- project_id = configured_project_id
1351
- lib_logger.debug(
1352
- f"LRO didn't return project, using configured: {project_id}"
1353
- )
1354
-
1355
- if not project_id:
1356
- lib_logger.error(
1357
- "Onboarding completed but no project ID in response and none configured"
1358
- )
1359
- raise ValueError(
1360
- "Onboarding completed, but no project ID was returned. "
1361
- "For paid tiers, set ANTIGRAVITY_PROJECT_ID environment variable."
1362
- )
1363
-
1364
- lib_logger.debug(
1365
- f"Successfully extracted project ID from onboarding response: {project_id}"
1366
- )
1367
-
1368
- # Cache tier info
1369
- self.project_tier_cache[credential_path] = tier_id
1370
- discovered_tier = tier_id
1371
- lib_logger.debug(f"Cached tier information: {tier_id}")
1372
-
1373
- # Log concise message based on tier
1374
- is_paid = tier_id and tier_id not in ["free-tier", "legacy-tier"]
1375
- if is_paid:
1376
- lib_logger.info(
1377
- f"Using Antigravity paid tier '{tier_id}' with project: {project_id}"
1378
- )
1379
- else:
1380
- lib_logger.info(
1381
- f"Successfully onboarded user and discovered project ID: {project_id}"
1382
- )
1383
-
1384
- self.project_id_cache[credential_path] = project_id
1385
- discovered_project_id = project_id
1386
-
1387
- # Persist to credential file
1388
- await self._persist_project_metadata(
1389
- credential_path, project_id, discovered_tier
1390
- )
1391
-
1392
- return project_id
1393
-
1394
- except httpx.HTTPStatusError as e:
1395
- error_body = ""
1396
- try:
1397
- error_body = e.response.text
1398
- except Exception:
1399
- pass
1400
- if e.response.status_code == 403:
1401
- lib_logger.error(
1402
- f"Antigravity Code Assist API access denied (403). Response: {error_body}"
1403
- )
1404
- lib_logger.error(
1405
- "Possible causes: 1) cloudaicompanion.googleapis.com API not enabled, 2) Wrong project ID for paid tier, 3) Account lacks permissions"
1406
- )
1407
- elif e.response.status_code == 404:
1408
- lib_logger.warning(
1409
- f"Antigravity Code Assist endpoint not found (404). Falling back to project listing."
1410
- )
1411
- elif e.response.status_code == 412:
1412
- # Precondition Failed - often means wrong project for free tier onboarding
1413
- lib_logger.error(
1414
- f"Precondition failed (412): {error_body}. This may mean the project ID is incompatible with the selected tier."
1415
- )
1416
- else:
1417
- lib_logger.warning(
1418
- f"Antigravity onboarding/discovery failed with status {e.response.status_code}: {error_body}. Falling back to project listing."
1419
- )
1420
- except httpx.RequestError as e:
1421
- lib_logger.warning(
1422
- f"Antigravity onboarding/discovery network error: {e}. Falling back to project listing."
1423
- )
1424
-
1425
- # 3. Fallback to listing all available GCP projects (last resort)
1426
- lib_logger.debug(
1427
- "Attempting to discover project via GCP Resource Manager API..."
1428
- )
1429
- try:
1430
- async with httpx.AsyncClient() as client:
1431
- lib_logger.debug(
1432
- "Querying Cloud Resource Manager for available projects..."
1433
- )
1434
- response = await client.get(
1435
- "https://cloudresourcemanager.googleapis.com/v1/projects",
1436
- headers=headers,
1437
- timeout=20,
1438
- )
1439
- response.raise_for_status()
1440
- projects = response.json().get("projects", [])
1441
- lib_logger.debug(f"Found {len(projects)} total projects")
1442
- active_projects = [
1443
- p for p in projects if p.get("lifecycleState") == "ACTIVE"
1444
- ]
1445
- lib_logger.debug(f"Found {len(active_projects)} active projects")
1446
-
1447
- if not projects:
1448
- lib_logger.error(
1449
- "No GCP projects found for this account. Please create a project in Google Cloud Console."
1450
- )
1451
- elif not active_projects:
1452
- lib_logger.error(
1453
- "No active GCP projects found. Please activate a project in Google Cloud Console."
1454
- )
1455
- else:
1456
- project_id = active_projects[0]["projectId"]
1457
- lib_logger.info(
1458
- f"Discovered Antigravity project ID from active projects list: {project_id}"
1459
- )
1460
- lib_logger.debug(
1461
- f"Selected first active project: {project_id} (out of {len(active_projects)} active projects)"
1462
- )
1463
- self.project_id_cache[credential_path] = project_id
1464
- discovered_project_id = project_id
1465
-
1466
- # Persist to credential file (no tier info from resource manager)
1467
- await self._persist_project_metadata(
1468
- credential_path, project_id, None
1469
- )
1470
-
1471
- return project_id
1472
- except httpx.HTTPStatusError as e:
1473
- if e.response.status_code == 403:
1474
- lib_logger.error(
1475
- "Failed to list GCP projects due to a 403 Forbidden error. The Cloud Resource Manager API may not be enabled, or your account lacks the 'resourcemanager.projects.list' permission."
1476
- )
1477
- else:
1478
- lib_logger.error(
1479
- f"Failed to list GCP projects with status {e.response.status_code}: {e}"
1480
- )
1481
- except httpx.RequestError as e:
1482
- lib_logger.error(f"Network error while listing GCP projects: {e}")
1483
-
1484
- raise ValueError(
1485
- "Could not auto-discover Antigravity project ID. Possible causes:\n"
1486
- " 1. The cloudaicompanion.googleapis.com API is not enabled (enable it in Google Cloud Console)\n"
1487
- " 2. No active GCP projects exist for this account (create one in Google Cloud Console)\n"
1488
- " 3. Account lacks necessary permissions\n"
1489
- "To manually specify a project, set ANTIGRAVITY_PROJECT_ID in your .env file."
1490
- )
1491
-
1492
- async def _persist_project_metadata(
1493
- self, credential_path: str, project_id: str, tier: Optional[str]
1494
- ):
1495
- """Persists project ID and tier to the credential file for faster future startups."""
1496
- # Skip persistence for env:// paths (environment-based credentials)
1497
- credential_index = self._parse_env_credential_path(credential_path)
1498
- if credential_index is not None:
1499
- lib_logger.debug(
1500
- f"Skipping project metadata persistence for env:// credential path: {credential_path}"
1501
- )
1502
- return
1503
-
1504
- try:
1505
- # Load current credentials
1506
- with open(credential_path, "r") as f:
1507
- creds = json.load(f)
1508
-
1509
- # Update metadata
1510
- if "_proxy_metadata" not in creds:
1511
- creds["_proxy_metadata"] = {}
1512
-
1513
- creds["_proxy_metadata"]["project_id"] = project_id
1514
- if tier:
1515
- creds["_proxy_metadata"]["tier"] = tier
1516
-
1517
- # Save back using the existing save method (handles atomic writes and permissions)
1518
- await self._save_credentials(credential_path, creds)
1519
-
1520
- lib_logger.debug(
1521
- f"Persisted project_id and tier to credential file: {credential_path}"
1522
- )
1523
- except Exception as e:
1524
- lib_logger.warning(
1525
- f"Failed to persist project metadata to credential file: {e}"
1526
- )
1527
- # Non-fatal - just means slower startup next time
1528
 
1529
  # =========================================================================
1530
  # THINKING MODE SANITIZATION
@@ -2559,9 +2039,9 @@ class AntigravityProvider(AntigravityAuthBase, ProviderInterface):
2559
  f"Ignoring duplicate - this may indicate malformed conversation history."
2560
  )
2561
  continue
2562
- #lib_logger.debug(
2563
  # f"[Grouping] Collected response for ID: {resp_id}"
2564
- #)
2565
  collected_responses[resp_id] = resp
2566
 
2567
  # Try to satisfy pending groups (newest first)
@@ -2576,10 +2056,10 @@ class AntigravityProvider(AntigravityAuthBase, ProviderInterface):
2576
  collected_responses.pop(gid) for gid in group_ids
2577
  ]
2578
  new_contents.append({"parts": group_responses, "role": "user"})
2579
- #lib_logger.debug(
2580
  # f"[Grouping] Satisfied group with {len(group_responses)} responses: "
2581
  # f"ids={group_ids}"
2582
- #)
2583
  pending_groups.pop(i)
2584
  break
2585
  continue
@@ -2599,10 +2079,10 @@ class AntigravityProvider(AntigravityAuthBase, ProviderInterface):
2599
  ]
2600
 
2601
  if call_ids:
2602
- #lib_logger.debug(
2603
  # f"[Grouping] Created pending group expecting {len(call_ids)} responses: "
2604
  # f"ids={call_ids}, names={func_names}"
2605
- #)
2606
  pending_groups.append(
2607
  {
2608
  "ids": call_ids,
 
718
  def __init__(self):
719
  super().__init__()
720
  self.model_definitions = ModelDefinitions()
721
+ # NOTE: project_id_cache and project_tier_cache are inherited from AntigravityAuthBase
 
 
 
 
 
722
 
723
  # Base URL management
724
  self._base_url_index = 0
 
926
 
927
  return loaded
928
 
929
+ # NOTE: _post_auth_discovery() is inherited from AntigravityAuthBase
930
+
931
  # =========================================================================
932
  # MODEL UTILITIES
933
  # =========================================================================
 
1004
 
1005
  return "thinking_" + "_".join(key_parts) if key_parts else None
1006
 
1007
+ # NOTE: _discover_project_id() and _persist_project_metadata() are inherited from AntigravityAuthBase
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1008
 
1009
  # =========================================================================
1010
  # THINKING MODE SANITIZATION
 
2039
  f"Ignoring duplicate - this may indicate malformed conversation history."
2040
  )
2041
  continue
2042
+ # lib_logger.debug(
2043
  # f"[Grouping] Collected response for ID: {resp_id}"
2044
+ # )
2045
  collected_responses[resp_id] = resp
2046
 
2047
  # Try to satisfy pending groups (newest first)
 
2056
  collected_responses.pop(gid) for gid in group_ids
2057
  ]
2058
  new_contents.append({"parts": group_responses, "role": "user"})
2059
+ # lib_logger.debug(
2060
  # f"[Grouping] Satisfied group with {len(group_responses)} responses: "
2061
  # f"ids={group_ids}"
2062
+ # )
2063
  pending_groups.pop(i)
2064
  break
2065
  continue
 
2079
  ]
2080
 
2081
  if call_ids:
2082
+ # lib_logger.debug(
2083
  # f"[Grouping] Created pending group expecting {len(call_ids)} responses: "
2084
  # f"ids={call_ids}, names={func_names}"
2085
+ # )
2086
  pending_groups.append(
2087
  {
2088
  "ids": call_ids,
src/rotator_library/providers/gemini_auth_base.py CHANGED
@@ -1,15 +1,35 @@
1
  # src/rotator_library/providers/gemini_auth_base.py
2
 
 
 
 
 
 
 
 
 
 
3
  from .google_oauth_base import GoogleOAuthBase
4
 
 
 
 
 
 
 
5
  class GeminiAuthBase(GoogleOAuthBase):
6
  """
7
  Gemini CLI OAuth2 authentication implementation.
8
-
9
  Inherits all OAuth functionality from GoogleOAuthBase with Gemini-specific configuration.
 
 
 
10
  """
11
-
12
- CLIENT_ID = "681255809395-oo8ft2oprdrnp9e3aqf6av3hmdib135j.apps.googleusercontent.com"
 
 
13
  CLIENT_SECRET = "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"
14
  OAUTH_SCOPES = [
15
  "https://www.googleapis.com/auth/cloud-platform",
@@ -18,4 +38,606 @@ class GeminiAuthBase(GoogleOAuthBase):
18
  ]
19
  ENV_PREFIX = "GEMINI_CLI"
20
  CALLBACK_PORT = 8085
21
- CALLBACK_PATH = "/oauth2callback"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  # src/rotator_library/providers/gemini_auth_base.py
2
 
3
+ import asyncio
4
+ import json
5
+ import logging
6
+ import os
7
+ from pathlib import Path
8
+ from typing import Any, Dict, Optional, List
9
+
10
+ import httpx
11
+
12
  from .google_oauth_base import GoogleOAuthBase
13
 
14
+ lib_logger = logging.getLogger("rotator_library")
15
+
16
+ # Code Assist endpoint for project discovery
17
+ CODE_ASSIST_ENDPOINT = "https://cloudcode-pa.googleapis.com/v1internal"
18
+
19
+
20
  class GeminiAuthBase(GoogleOAuthBase):
21
  """
22
  Gemini CLI OAuth2 authentication implementation.
23
+
24
  Inherits all OAuth functionality from GoogleOAuthBase with Gemini-specific configuration.
25
+
26
+ Also provides project/tier discovery functionality that runs during authentication,
27
+ ensuring credentials have their tier and project_id cached before any API requests.
28
  """
29
+
30
+ CLIENT_ID = (
31
+ "681255809395-oo8ft2oprdrnp9e3aqf6av3hmdib135j.apps.googleusercontent.com"
32
+ )
33
  CLIENT_SECRET = "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"
34
  OAUTH_SCOPES = [
35
  "https://www.googleapis.com/auth/cloud-platform",
 
38
  ]
39
  ENV_PREFIX = "GEMINI_CLI"
40
  CALLBACK_PORT = 8085
41
+ CALLBACK_PATH = "/oauth2callback"
42
+
43
+ def __init__(self):
44
+ super().__init__()
45
+ # Project and tier caches - shared between auth base and provider
46
+ self.project_id_cache: Dict[str, str] = {}
47
+ self.project_tier_cache: Dict[str, str] = {}
48
+
49
+ # =========================================================================
50
+ # POST-AUTH DISCOVERY HOOK
51
+ # =========================================================================
52
+
53
+ async def _post_auth_discovery(
54
+ self, credential_path: str, access_token: str
55
+ ) -> None:
56
+ """
57
+ Discover and cache tier/project information immediately after OAuth authentication.
58
+
59
+ This is called by GoogleOAuthBase._perform_interactive_oauth() after successful auth,
60
+ ensuring tier and project_id are cached during the authentication flow rather than
61
+ waiting for the first API request.
62
+
63
+ Args:
64
+ credential_path: Path to the credential file
65
+ access_token: The newly obtained access token
66
+ """
67
+ lib_logger.debug(
68
+ f"Starting post-auth discovery for GeminiCli credential: {Path(credential_path).name}"
69
+ )
70
+
71
+ # Skip if already discovered (shouldn't happen during fresh auth, but be defensive)
72
+ if (
73
+ credential_path in self.project_id_cache
74
+ and credential_path in self.project_tier_cache
75
+ ):
76
+ lib_logger.debug(
77
+ f"Tier and project already cached for {Path(credential_path).name}, skipping discovery"
78
+ )
79
+ return
80
+
81
+ # Call _discover_project_id which handles tier/project discovery and persistence
82
+ # Pass empty litellm_params since we're in auth context (no model-specific overrides)
83
+ project_id = await self._discover_project_id(
84
+ credential_path, access_token, litellm_params={}
85
+ )
86
+
87
+ tier = self.project_tier_cache.get(credential_path, "unknown")
88
+ lib_logger.info(
89
+ f"Post-auth discovery complete for {Path(credential_path).name}: "
90
+ f"tier={tier}, project={project_id}"
91
+ )
92
+
93
+ # =========================================================================
94
+ # PROJECT ID DISCOVERY
95
+ # =========================================================================
96
+
97
+ async def _discover_project_id(
98
+ self, credential_path: str, access_token: str, litellm_params: Dict[str, Any]
99
+ ) -> str:
100
+ """
101
+ Discovers the Google Cloud Project ID, with caching and onboarding for new accounts.
102
+
103
+ This follows the official Gemini CLI discovery flow:
104
+ 1. Check in-memory cache
105
+ 2. Check configured project_id override (litellm_params or env var)
106
+ 3. Check persisted project_id in credential file
107
+ 4. Call loadCodeAssist to check if user is already known (has currentTier)
108
+ - If currentTier exists AND cloudaicompanionProject returned: use server's project
109
+ - If currentTier exists but NO cloudaicompanionProject: use configured project_id (paid tier requires this)
110
+ - If no currentTier: user needs onboarding
111
+ 5. Onboard user based on tier:
112
+ - FREE tier: pass cloudaicompanionProject=None (server-managed)
113
+ - PAID tier: pass cloudaicompanionProject=configured_project_id
114
+ 6. Fallback to GCP Resource Manager project listing
115
+ """
116
+ lib_logger.debug(
117
+ f"Starting project discovery for credential: {credential_path}"
118
+ )
119
+
120
+ # Check in-memory cache first
121
+ if credential_path in self.project_id_cache:
122
+ cached_project = self.project_id_cache[credential_path]
123
+ lib_logger.debug(f"Using cached project ID: {cached_project}")
124
+ return cached_project
125
+
126
+ # Check for configured project ID override (from litellm_params or env var)
127
+ # This is REQUIRED for paid tier users per the official CLI behavior
128
+ configured_project_id = litellm_params.get("project_id") or os.getenv(
129
+ "GEMINI_CLI_PROJECT_ID"
130
+ )
131
+ if configured_project_id:
132
+ lib_logger.debug(
133
+ f"Found configured project_id override: {configured_project_id}"
134
+ )
135
+
136
+ # Load credentials from file to check for persisted project_id and tier
137
+ # Skip for env:// paths (environment-based credentials don't persist to files)
138
+ credential_index = self._parse_env_credential_path(credential_path)
139
+ if credential_index is None:
140
+ # Only try to load from file if it's not an env:// path
141
+ try:
142
+ with open(credential_path, "r") as f:
143
+ creds = json.load(f)
144
+
145
+ metadata = creds.get("_proxy_metadata", {})
146
+ persisted_project_id = metadata.get("project_id")
147
+ persisted_tier = metadata.get("tier")
148
+
149
+ if persisted_project_id:
150
+ lib_logger.info(
151
+ f"Loaded persisted project ID from credential file: {persisted_project_id}"
152
+ )
153
+ self.project_id_cache[credential_path] = persisted_project_id
154
+
155
+ # Also load tier if available
156
+ if persisted_tier:
157
+ self.project_tier_cache[credential_path] = persisted_tier
158
+ lib_logger.debug(f"Loaded persisted tier: {persisted_tier}")
159
+
160
+ return persisted_project_id
161
+ except (FileNotFoundError, json.JSONDecodeError, KeyError) as e:
162
+ lib_logger.debug(f"Could not load persisted project ID from file: {e}")
163
+
164
+ lib_logger.debug(
165
+ "No cached or configured project ID found, initiating discovery..."
166
+ )
167
+ headers = {
168
+ "Authorization": f"Bearer {access_token}",
169
+ "Content-Type": "application/json",
170
+ }
171
+
172
+ discovered_project_id = None
173
+ discovered_tier = None
174
+
175
+ async with httpx.AsyncClient() as client:
176
+ # 1. Try discovery endpoint with loadCodeAssist
177
+ lib_logger.debug(
178
+ "Attempting project discovery via Code Assist loadCodeAssist endpoint..."
179
+ )
180
+ try:
181
+ # Build metadata - include duetProject only if we have a configured project
182
+ core_client_metadata = {
183
+ "ideType": "IDE_UNSPECIFIED",
184
+ "platform": "PLATFORM_UNSPECIFIED",
185
+ "pluginType": "GEMINI",
186
+ }
187
+ if configured_project_id:
188
+ core_client_metadata["duetProject"] = configured_project_id
189
+
190
+ # Build load request - pass configured_project_id if available, otherwise None
191
+ load_request = {
192
+ "cloudaicompanionProject": configured_project_id, # Can be None
193
+ "metadata": core_client_metadata,
194
+ }
195
+
196
+ lib_logger.debug(
197
+ f"Sending loadCodeAssist request with cloudaicompanionProject={configured_project_id}"
198
+ )
199
+ response = await client.post(
200
+ f"{CODE_ASSIST_ENDPOINT}:loadCodeAssist",
201
+ headers=headers,
202
+ json=load_request,
203
+ timeout=20,
204
+ )
205
+ response.raise_for_status()
206
+ data = response.json()
207
+
208
+ # Log full response for debugging
209
+ lib_logger.debug(
210
+ f"loadCodeAssist full response keys: {list(data.keys())}"
211
+ )
212
+
213
+ # Extract and log ALL tier information for debugging
214
+ allowed_tiers = data.get("allowedTiers", [])
215
+ current_tier = data.get("currentTier")
216
+
217
+ lib_logger.debug(f"=== Tier Information ===")
218
+ lib_logger.debug(f"currentTier: {current_tier}")
219
+ lib_logger.debug(f"allowedTiers count: {len(allowed_tiers)}")
220
+ for i, tier in enumerate(allowed_tiers):
221
+ tier_id = tier.get("id", "unknown")
222
+ is_default = tier.get("isDefault", False)
223
+ user_defined = tier.get("userDefinedCloudaicompanionProject", False)
224
+ lib_logger.debug(
225
+ f" Tier {i + 1}: id={tier_id}, isDefault={is_default}, userDefinedProject={user_defined}"
226
+ )
227
+ lib_logger.debug(f"========================")
228
+
229
+ # Determine the current tier ID
230
+ current_tier_id = None
231
+ if current_tier:
232
+ current_tier_id = current_tier.get("id")
233
+ lib_logger.debug(f"User has currentTier: {current_tier_id}")
234
+
235
+ # Check if user is already known to server (has currentTier)
236
+ if current_tier_id:
237
+ # User is already onboarded - check for project from server
238
+ server_project = data.get("cloudaicompanionProject")
239
+
240
+ # Check if this tier requires user-defined project (paid tiers)
241
+ requires_user_project = any(
242
+ t.get("id") == current_tier_id
243
+ and t.get("userDefinedCloudaicompanionProject", False)
244
+ for t in allowed_tiers
245
+ )
246
+ is_free_tier = current_tier_id == "free-tier"
247
+
248
+ if server_project:
249
+ # Server returned a project - use it (server wins)
250
+ # This is the normal case for FREE tier users
251
+ project_id = server_project
252
+ lib_logger.debug(f"Server returned project: {project_id}")
253
+ elif configured_project_id:
254
+ # No server project but we have configured one - use it
255
+ # This is the PAID TIER case where server doesn't return a project
256
+ project_id = configured_project_id
257
+ lib_logger.debug(
258
+ f"No server project, using configured: {project_id}"
259
+ )
260
+ elif is_free_tier:
261
+ # Free tier user without server project - this shouldn't happen normally
262
+ # but let's not fail, just proceed to onboarding
263
+ lib_logger.debug(
264
+ "Free tier user with currentTier but no project - will try onboarding"
265
+ )
266
+ project_id = None
267
+ elif requires_user_project:
268
+ # Paid tier requires a project ID to be set
269
+ raise ValueError(
270
+ f"Paid tier '{current_tier_id}' requires setting GEMINI_CLI_PROJECT_ID environment variable. "
271
+ "See https://goo.gle/gemini-cli-auth-docs#workspace-gca"
272
+ )
273
+ else:
274
+ # Unknown tier without project - proceed carefully
275
+ lib_logger.warning(
276
+ f"Tier '{current_tier_id}' has no project and none configured - will try onboarding"
277
+ )
278
+ project_id = None
279
+
280
+ if project_id:
281
+ # Cache tier info
282
+ self.project_tier_cache[credential_path] = current_tier_id
283
+ discovered_tier = current_tier_id
284
+
285
+ # Log appropriately based on tier
286
+ is_paid = current_tier_id and current_tier_id not in [
287
+ "free-tier",
288
+ "legacy-tier",
289
+ "unknown",
290
+ ]
291
+ if is_paid:
292
+ lib_logger.info(
293
+ f"Using Gemini paid tier '{current_tier_id}' with project: {project_id}"
294
+ )
295
+ else:
296
+ lib_logger.info(
297
+ f"Discovered Gemini project ID via loadCodeAssist: {project_id}"
298
+ )
299
+
300
+ self.project_id_cache[credential_path] = project_id
301
+ discovered_project_id = project_id
302
+
303
+ # Persist to credential file
304
+ await self._persist_project_metadata(
305
+ credential_path, project_id, discovered_tier
306
+ )
307
+
308
+ return project_id
309
+
310
+ # 2. User needs onboarding - no currentTier
311
+ lib_logger.info(
312
+ "No existing Gemini session found (no currentTier), attempting to onboard user..."
313
+ )
314
+
315
+ # Determine which tier to onboard with
316
+ onboard_tier = None
317
+ for tier in allowed_tiers:
318
+ if tier.get("isDefault"):
319
+ onboard_tier = tier
320
+ break
321
+
322
+ # Fallback to LEGACY tier if no default (requires user project)
323
+ if not onboard_tier and allowed_tiers:
324
+ # Look for legacy-tier as fallback
325
+ for tier in allowed_tiers:
326
+ if tier.get("id") == "legacy-tier":
327
+ onboard_tier = tier
328
+ break
329
+ # If still no tier, use first available
330
+ if not onboard_tier:
331
+ onboard_tier = allowed_tiers[0]
332
+
333
+ if not onboard_tier:
334
+ raise ValueError("No onboarding tiers available from server")
335
+
336
+ tier_id = onboard_tier.get("id", "free-tier")
337
+ requires_user_project = onboard_tier.get(
338
+ "userDefinedCloudaicompanionProject", False
339
+ )
340
+
341
+ lib_logger.debug(
342
+ f"Onboarding with tier: {tier_id}, requiresUserProject: {requires_user_project}"
343
+ )
344
+
345
+ # Build onboard request based on tier type (following official CLI logic)
346
+ # FREE tier: cloudaicompanionProject = None (server-managed)
347
+ # PAID tier: cloudaicompanionProject = configured_project_id (user must provide)
348
+ is_free_tier = tier_id == "free-tier"
349
+
350
+ if is_free_tier:
351
+ # Free tier uses server-managed project
352
+ onboard_request = {
353
+ "tierId": tier_id,
354
+ "cloudaicompanionProject": None, # Server will create/manage
355
+ "metadata": core_client_metadata,
356
+ }
357
+ lib_logger.debug(
358
+ "Free tier onboarding: using server-managed project"
359
+ )
360
+ else:
361
+ # Paid/legacy tier requires user-provided project
362
+ if not configured_project_id and requires_user_project:
363
+ raise ValueError(
364
+ f"Tier '{tier_id}' requires setting GEMINI_CLI_PROJECT_ID environment variable. "
365
+ "See https://goo.gle/gemini-cli-auth-docs#workspace-gca"
366
+ )
367
+ onboard_request = {
368
+ "tierId": tier_id,
369
+ "cloudaicompanionProject": configured_project_id,
370
+ "metadata": {
371
+ **core_client_metadata,
372
+ "duetProject": configured_project_id,
373
+ }
374
+ if configured_project_id
375
+ else core_client_metadata,
376
+ }
377
+ lib_logger.debug(
378
+ f"Paid tier onboarding: using project {configured_project_id}"
379
+ )
380
+
381
+ lib_logger.debug("Initiating onboardUser request...")
382
+ lro_response = await client.post(
383
+ f"{CODE_ASSIST_ENDPOINT}:onboardUser",
384
+ headers=headers,
385
+ json=onboard_request,
386
+ timeout=30,
387
+ )
388
+ lro_response.raise_for_status()
389
+ lro_data = lro_response.json()
390
+ lib_logger.debug(
391
+ f"Initial onboarding response: done={lro_data.get('done')}"
392
+ )
393
+
394
+ for i in range(150): # Poll for up to 5 minutes (150 × 2s)
395
+ if lro_data.get("done"):
396
+ lib_logger.debug(
397
+ f"Onboarding completed after {i} polling attempts"
398
+ )
399
+ break
400
+ await asyncio.sleep(2)
401
+ if (i + 1) % 15 == 0: # Log every 30 seconds
402
+ lib_logger.info(
403
+ f"Still waiting for onboarding completion... ({(i + 1) * 2}s elapsed)"
404
+ )
405
+ lib_logger.debug(
406
+ f"Polling onboarding status... (Attempt {i + 1}/150)"
407
+ )
408
+ lro_response = await client.post(
409
+ f"{CODE_ASSIST_ENDPOINT}:onboardUser",
410
+ headers=headers,
411
+ json=onboard_request,
412
+ timeout=30,
413
+ )
414
+ lro_response.raise_for_status()
415
+ lro_data = lro_response.json()
416
+
417
+ if not lro_data.get("done"):
418
+ lib_logger.error("Onboarding process timed out after 5 minutes")
419
+ raise ValueError(
420
+ "Onboarding process timed out after 5 minutes. Please try again or contact support."
421
+ )
422
+
423
+ # Extract project ID from LRO response
424
+ # Note: onboardUser returns response.cloudaicompanionProject as an object with .id
425
+ lro_response_data = lro_data.get("response", {})
426
+ lro_project_obj = lro_response_data.get("cloudaicompanionProject", {})
427
+ project_id = (
428
+ lro_project_obj.get("id")
429
+ if isinstance(lro_project_obj, dict)
430
+ else None
431
+ )
432
+
433
+ # Fallback to configured project if LRO didn't return one
434
+ if not project_id and configured_project_id:
435
+ project_id = configured_project_id
436
+ lib_logger.debug(
437
+ f"LRO didn't return project, using configured: {project_id}"
438
+ )
439
+
440
+ if not project_id:
441
+ lib_logger.error(
442
+ "Onboarding completed but no project ID in response and none configured"
443
+ )
444
+ raise ValueError(
445
+ "Onboarding completed, but no project ID was returned. "
446
+ "For paid tiers, set GEMINI_CLI_PROJECT_ID environment variable."
447
+ )
448
+
449
+ lib_logger.debug(
450
+ f"Successfully extracted project ID from onboarding response: {project_id}"
451
+ )
452
+
453
+ # Cache tier info
454
+ self.project_tier_cache[credential_path] = tier_id
455
+ discovered_tier = tier_id
456
+ lib_logger.debug(f"Cached tier information: {tier_id}")
457
+
458
+ # Log concise message for paid projects
459
+ is_paid = tier_id and tier_id not in ["free-tier", "legacy-tier"]
460
+ if is_paid:
461
+ lib_logger.info(
462
+ f"Using Gemini paid tier '{tier_id}' with project: {project_id}"
463
+ )
464
+ else:
465
+ lib_logger.info(
466
+ f"Successfully onboarded user and discovered project ID: {project_id}"
467
+ )
468
+
469
+ self.project_id_cache[credential_path] = project_id
470
+ discovered_project_id = project_id
471
+
472
+ # Persist to credential file
473
+ await self._persist_project_metadata(
474
+ credential_path, project_id, discovered_tier
475
+ )
476
+
477
+ return project_id
478
+
479
+ except httpx.HTTPStatusError as e:
480
+ error_body = ""
481
+ try:
482
+ error_body = e.response.text
483
+ except Exception:
484
+ pass
485
+ if e.response.status_code == 403:
486
+ lib_logger.error(
487
+ f"Gemini Code Assist API access denied (403). Response: {error_body}"
488
+ )
489
+ lib_logger.error(
490
+ "Possible causes: 1) cloudaicompanion.googleapis.com API not enabled, 2) Wrong project ID for paid tier, 3) Account lacks permissions"
491
+ )
492
+ elif e.response.status_code == 404:
493
+ lib_logger.warning(
494
+ f"Gemini Code Assist endpoint not found (404). Falling back to project listing."
495
+ )
496
+ elif e.response.status_code == 412:
497
+ # Precondition Failed - often means wrong project for free tier onboarding
498
+ lib_logger.error(
499
+ f"Precondition failed (412): {error_body}. This may mean the project ID is incompatible with the selected tier."
500
+ )
501
+ else:
502
+ lib_logger.warning(
503
+ f"Gemini onboarding/discovery failed with status {e.response.status_code}: {error_body}. Falling back to project listing."
504
+ )
505
+ except httpx.RequestError as e:
506
+ lib_logger.warning(
507
+ f"Gemini onboarding/discovery network error: {e}. Falling back to project listing."
508
+ )
509
+
510
+ # 3. Fallback to listing all available GCP projects (last resort)
511
+ lib_logger.debug(
512
+ "Attempting to discover project via GCP Resource Manager API..."
513
+ )
514
+ try:
515
+ async with httpx.AsyncClient() as client:
516
+ lib_logger.debug(
517
+ "Querying Cloud Resource Manager for available projects..."
518
+ )
519
+ response = await client.get(
520
+ "https://cloudresourcemanager.googleapis.com/v1/projects",
521
+ headers=headers,
522
+ timeout=20,
523
+ )
524
+ response.raise_for_status()
525
+ projects = response.json().get("projects", [])
526
+ lib_logger.debug(f"Found {len(projects)} total projects")
527
+ active_projects = [
528
+ p for p in projects if p.get("lifecycleState") == "ACTIVE"
529
+ ]
530
+ lib_logger.debug(f"Found {len(active_projects)} active projects")
531
+
532
+ if not projects:
533
+ lib_logger.error(
534
+ "No GCP projects found for this account. Please create a project in Google Cloud Console."
535
+ )
536
+ elif not active_projects:
537
+ lib_logger.error(
538
+ "No active GCP projects found. Please activate a project in Google Cloud Console."
539
+ )
540
+ else:
541
+ project_id = active_projects[0]["projectId"]
542
+ lib_logger.info(
543
+ f"Discovered Gemini project ID from active projects list: {project_id}"
544
+ )
545
+ lib_logger.debug(
546
+ f"Selected first active project: {project_id} (out of {len(active_projects)} active projects)"
547
+ )
548
+ self.project_id_cache[credential_path] = project_id
549
+ discovered_project_id = project_id
550
+
551
+ # Persist to credential file (no tier info from resource manager)
552
+ await self._persist_project_metadata(
553
+ credential_path, project_id, None
554
+ )
555
+
556
+ return project_id
557
+ except httpx.HTTPStatusError as e:
558
+ if e.response.status_code == 403:
559
+ lib_logger.error(
560
+ "Failed to list GCP projects due to a 403 Forbidden error. The Cloud Resource Manager API may not be enabled, or your account lacks the 'resourcemanager.projects.list' permission."
561
+ )
562
+ else:
563
+ lib_logger.error(
564
+ f"Failed to list GCP projects with status {e.response.status_code}: {e}"
565
+ )
566
+ except httpx.RequestError as e:
567
+ lib_logger.error(f"Network error while listing GCP projects: {e}")
568
+
569
+ raise ValueError(
570
+ "Could not auto-discover Gemini project ID. Possible causes:\n"
571
+ " 1. The cloudaicompanion.googleapis.com API is not enabled (enable it in Google Cloud Console)\n"
572
+ " 2. No active GCP projects exist for this account (create one in Google Cloud Console)\n"
573
+ " 3. Account lacks necessary permissions\n"
574
+ "To manually specify a project, set GEMINI_CLI_PROJECT_ID in your .env file."
575
+ )
576
+
577
+ async def _persist_project_metadata(
578
+ self, credential_path: str, project_id: str, tier: Optional[str]
579
+ ):
580
+ """Persists project ID and tier to the credential file for faster future startups."""
581
+ # Skip persistence for env:// paths (environment-based credentials)
582
+ credential_index = self._parse_env_credential_path(credential_path)
583
+ if credential_index is not None:
584
+ lib_logger.debug(
585
+ f"Skipping project metadata persistence for env:// credential path: {credential_path}"
586
+ )
587
+ return
588
+
589
+ try:
590
+ # Load current credentials
591
+ with open(credential_path, "r") as f:
592
+ creds = json.load(f)
593
+
594
+ # Update metadata
595
+ if "_proxy_metadata" not in creds:
596
+ creds["_proxy_metadata"] = {}
597
+
598
+ creds["_proxy_metadata"]["project_id"] = project_id
599
+ if tier:
600
+ creds["_proxy_metadata"]["tier"] = tier
601
+
602
+ # Save back using the existing save method (handles atomic writes and permissions)
603
+ await self._save_credentials(credential_path, creds)
604
+
605
+ lib_logger.debug(
606
+ f"Persisted project_id and tier to credential file: {credential_path}"
607
+ )
608
+ except Exception as e:
609
+ lib_logger.warning(
610
+ f"Failed to persist project metadata to credential file: {e}"
611
+ )
612
+ # Non-fatal - just means slower startup next time
613
+
614
+ # =========================================================================
615
+ # CREDENTIAL MANAGEMENT OVERRIDES
616
+ # =========================================================================
617
+
618
+ def _get_provider_file_prefix(self) -> str:
619
+ """Return the file prefix for Gemini CLI credentials."""
620
+ return "gemini_cli"
621
+
622
+ def build_env_lines(self, creds: Dict[str, Any], cred_number: int) -> List[str]:
623
+ """
624
+ Generate .env file lines for a Gemini CLI credential.
625
+
626
+ Includes tier and project_id from _proxy_metadata.
627
+ """
628
+ # Get base lines from parent class
629
+ lines = super().build_env_lines(creds, cred_number)
630
+
631
+ # Add Gemini-specific fields (tier and project_id)
632
+ metadata = creds.get("_proxy_metadata", {})
633
+ prefix = f"{self.ENV_PREFIX}_{cred_number}"
634
+
635
+ project_id = metadata.get("project_id", "")
636
+ tier = metadata.get("tier", "")
637
+
638
+ if project_id:
639
+ lines.append(f"{prefix}_PROJECT_ID={project_id}")
640
+ if tier:
641
+ lines.append(f"{prefix}_TIER={tier}")
642
+
643
+ return lines
src/rotator_library/providers/gemini_cli_provider.py CHANGED
@@ -383,12 +383,7 @@ class GeminiCliProvider(GeminiAuthBase, ProviderInterface):
383
  def __init__(self):
384
  super().__init__()
385
  self.model_definitions = ModelDefinitions()
386
- self.project_id_cache: Dict[
387
- str, str
388
- ] = {} # Cache project ID per credential path
389
- self.project_tier_cache: Dict[
390
- str, str
391
- ] = {} # Cache project tier per credential path
392
 
393
  # Gemini 3 configuration from environment
394
  memory_ttl = _env_int("GEMINI_CLI_SIGNATURE_CACHE_TTL", 3600)
@@ -580,6 +575,8 @@ class GeminiCliProvider(GeminiAuthBase, ProviderInterface):
580
 
581
  return loaded
582
 
 
 
583
  # =========================================================================
584
  # MODEL UTILITIES
585
  # =========================================================================
@@ -595,520 +592,7 @@ class GeminiCliProvider(GeminiAuthBase, ProviderInterface):
595
  return name[len(self._gemini3_tool_prefix) :]
596
  return name
597
 
598
- async def _discover_project_id(
599
- self, credential_path: str, access_token: str, litellm_params: Dict[str, Any]
600
- ) -> str:
601
- """
602
- Discovers the Google Cloud Project ID, with caching and onboarding for new accounts.
603
-
604
- This follows the official Gemini CLI discovery flow:
605
- 1. Check in-memory cache
606
- 2. Check configured project_id override (litellm_params or env var)
607
- 3. Check persisted project_id in credential file
608
- 4. Call loadCodeAssist to check if user is already known (has currentTier)
609
- - If currentTier exists AND cloudaicompanionProject returned: use server's project
610
- - If currentTier exists but NO cloudaicompanionProject: use configured project_id (paid tier requires this)
611
- - If no currentTier: user needs onboarding
612
- 5. Onboard user based on tier:
613
- - FREE tier: pass cloudaicompanionProject=None (server-managed)
614
- - PAID tier: pass cloudaicompanionProject=configured_project_id
615
- 6. Fallback to GCP Resource Manager project listing
616
- """
617
- lib_logger.debug(
618
- f"Starting project discovery for credential: {credential_path}"
619
- )
620
-
621
- # Check in-memory cache first
622
- if credential_path in self.project_id_cache:
623
- cached_project = self.project_id_cache[credential_path]
624
- lib_logger.debug(f"Using cached project ID: {cached_project}")
625
- return cached_project
626
-
627
- # Check for configured project ID override (from litellm_params or env var)
628
- # This is REQUIRED for paid tier users per the official CLI behavior
629
- configured_project_id = litellm_params.get("project_id")
630
- if configured_project_id:
631
- lib_logger.debug(
632
- f"Found configured project_id override: {configured_project_id}"
633
- )
634
-
635
- # Load credentials from file to check for persisted project_id and tier
636
- # Skip for env:// paths (environment-based credentials don't persist to files)
637
- credential_index = self._parse_env_credential_path(credential_path)
638
- if credential_index is None:
639
- # Only try to load from file if it's not an env:// path
640
- try:
641
- with open(credential_path, "r") as f:
642
- creds = json.load(f)
643
-
644
- metadata = creds.get("_proxy_metadata", {})
645
- persisted_project_id = metadata.get("project_id")
646
- persisted_tier = metadata.get("tier")
647
-
648
- if persisted_project_id:
649
- lib_logger.info(
650
- f"Loaded persisted project ID from credential file: {persisted_project_id}"
651
- )
652
- self.project_id_cache[credential_path] = persisted_project_id
653
-
654
- # Also load tier if available
655
- if persisted_tier:
656
- self.project_tier_cache[credential_path] = persisted_tier
657
- lib_logger.debug(f"Loaded persisted tier: {persisted_tier}")
658
-
659
- return persisted_project_id
660
- except (FileNotFoundError, json.JSONDecodeError, KeyError) as e:
661
- lib_logger.debug(f"Could not load persisted project ID from file: {e}")
662
-
663
- lib_logger.debug(
664
- "No cached or configured project ID found, initiating discovery..."
665
- )
666
- headers = {
667
- "Authorization": f"Bearer {access_token}",
668
- "Content-Type": "application/json",
669
- }
670
-
671
- discovered_project_id = None
672
- discovered_tier = None
673
-
674
- async with httpx.AsyncClient() as client:
675
- # 1. Try discovery endpoint with loadCodeAssist
676
- lib_logger.debug(
677
- "Attempting project discovery via Code Assist loadCodeAssist endpoint..."
678
- )
679
- try:
680
- # Build metadata - include duetProject only if we have a configured project
681
- core_client_metadata = {
682
- "ideType": "IDE_UNSPECIFIED",
683
- "platform": "PLATFORM_UNSPECIFIED",
684
- "pluginType": "GEMINI",
685
- }
686
- if configured_project_id:
687
- core_client_metadata["duetProject"] = configured_project_id
688
-
689
- # Build load request - pass configured_project_id if available, otherwise None
690
- load_request = {
691
- "cloudaicompanionProject": configured_project_id, # Can be None
692
- "metadata": core_client_metadata,
693
- }
694
-
695
- lib_logger.debug(
696
- f"Sending loadCodeAssist request with cloudaicompanionProject={configured_project_id}"
697
- )
698
- response = await client.post(
699
- f"{CODE_ASSIST_ENDPOINT}:loadCodeAssist",
700
- headers=headers,
701
- json=load_request,
702
- timeout=20,
703
- )
704
- response.raise_for_status()
705
- data = response.json()
706
-
707
- # Log full response for debugging
708
- lib_logger.debug(
709
- f"loadCodeAssist full response keys: {list(data.keys())}"
710
- )
711
-
712
- # Extract and log ALL tier information for debugging
713
- allowed_tiers = data.get("allowedTiers", [])
714
- current_tier = data.get("currentTier")
715
-
716
- lib_logger.debug(f"=== Tier Information ===")
717
- lib_logger.debug(f"currentTier: {current_tier}")
718
- lib_logger.debug(f"allowedTiers count: {len(allowed_tiers)}")
719
- for i, tier in enumerate(allowed_tiers):
720
- tier_id = tier.get("id", "unknown")
721
- is_default = tier.get("isDefault", False)
722
- user_defined = tier.get("userDefinedCloudaicompanionProject", False)
723
- lib_logger.debug(
724
- f" Tier {i + 1}: id={tier_id}, isDefault={is_default}, userDefinedProject={user_defined}"
725
- )
726
- lib_logger.debug(f"========================")
727
-
728
- # Determine the current tier ID
729
- current_tier_id = None
730
- if current_tier:
731
- current_tier_id = current_tier.get("id")
732
- lib_logger.debug(f"User has currentTier: {current_tier_id}")
733
-
734
- # Check if user is already known to server (has currentTier)
735
- if current_tier_id:
736
- # User is already onboarded - check for project from server
737
- server_project = data.get("cloudaicompanionProject")
738
-
739
- # Check if this tier requires user-defined project (paid tiers)
740
- requires_user_project = any(
741
- t.get("id") == current_tier_id
742
- and t.get("userDefinedCloudaicompanionProject", False)
743
- for t in allowed_tiers
744
- )
745
- is_free_tier = current_tier_id == "free-tier"
746
-
747
- if server_project:
748
- # Server returned a project - use it (server wins)
749
- # This is the normal case for FREE tier users
750
- project_id = server_project
751
- lib_logger.debug(f"Server returned project: {project_id}")
752
- elif configured_project_id:
753
- # No server project but we have configured one - use it
754
- # This is the PAID TIER case where server doesn't return a project
755
- project_id = configured_project_id
756
- lib_logger.debug(
757
- f"No server project, using configured: {project_id}"
758
- )
759
- elif is_free_tier:
760
- # Free tier user without server project - this shouldn't happen normally
761
- # but let's not fail, just proceed to onboarding
762
- lib_logger.debug(
763
- "Free tier user with currentTier but no project - will try onboarding"
764
- )
765
- project_id = None
766
- elif requires_user_project:
767
- # Paid tier requires a project ID to be set
768
- raise ValueError(
769
- f"Paid tier '{current_tier_id}' requires setting GEMINI_CLI_PROJECT_ID environment variable. "
770
- "See https://goo.gle/gemini-cli-auth-docs#workspace-gca"
771
- )
772
- else:
773
- # Unknown tier without project - proceed carefully
774
- lib_logger.warning(
775
- f"Tier '{current_tier_id}' has no project and none configured - will try onboarding"
776
- )
777
- project_id = None
778
-
779
- if project_id:
780
- # Cache tier info
781
- self.project_tier_cache[credential_path] = current_tier_id
782
- discovered_tier = current_tier_id
783
-
784
- # Log appropriately based on tier
785
- is_paid = current_tier_id and current_tier_id not in [
786
- "free-tier",
787
- "legacy-tier",
788
- "unknown",
789
- ]
790
- if is_paid:
791
- lib_logger.info(
792
- f"Using Gemini paid tier '{current_tier_id}' with project: {project_id}"
793
- )
794
- else:
795
- lib_logger.info(
796
- f"Discovered Gemini project ID via loadCodeAssist: {project_id}"
797
- )
798
-
799
- self.project_id_cache[credential_path] = project_id
800
- discovered_project_id = project_id
801
-
802
- # Persist to credential file
803
- await self._persist_project_metadata(
804
- credential_path, project_id, discovered_tier
805
- )
806
-
807
- return project_id
808
-
809
- # 2. User needs onboarding - no currentTier
810
- lib_logger.info(
811
- "No existing Gemini session found (no currentTier), attempting to onboard user..."
812
- )
813
-
814
- # Determine which tier to onboard with
815
- onboard_tier = None
816
- for tier in allowed_tiers:
817
- if tier.get("isDefault"):
818
- onboard_tier = tier
819
- break
820
-
821
- # Fallback to LEGACY tier if no default (requires user project)
822
- if not onboard_tier and allowed_tiers:
823
- # Look for legacy-tier as fallback
824
- for tier in allowed_tiers:
825
- if tier.get("id") == "legacy-tier":
826
- onboard_tier = tier
827
- break
828
- # If still no tier, use first available
829
- if not onboard_tier:
830
- onboard_tier = allowed_tiers[0]
831
-
832
- if not onboard_tier:
833
- raise ValueError("No onboarding tiers available from server")
834
-
835
- tier_id = onboard_tier.get("id", "free-tier")
836
- requires_user_project = onboard_tier.get(
837
- "userDefinedCloudaicompanionProject", False
838
- )
839
-
840
- lib_logger.debug(
841
- f"Onboarding with tier: {tier_id}, requiresUserProject: {requires_user_project}"
842
- )
843
-
844
- # Build onboard request based on tier type (following official CLI logic)
845
- # FREE tier: cloudaicompanionProject = None (server-managed)
846
- # PAID tier: cloudaicompanionProject = configured_project_id (user must provide)
847
- is_free_tier = tier_id == "free-tier"
848
-
849
- if is_free_tier:
850
- # Free tier uses server-managed project
851
- onboard_request = {
852
- "tierId": tier_id,
853
- "cloudaicompanionProject": None, # Server will create/manage
854
- "metadata": core_client_metadata,
855
- }
856
- lib_logger.debug(
857
- "Free tier onboarding: using server-managed project"
858
- )
859
- else:
860
- # Paid/legacy tier requires user-provided project
861
- if not configured_project_id and requires_user_project:
862
- raise ValueError(
863
- f"Tier '{tier_id}' requires setting GEMINI_CLI_PROJECT_ID environment variable. "
864
- "See https://goo.gle/gemini-cli-auth-docs#workspace-gca"
865
- )
866
- onboard_request = {
867
- "tierId": tier_id,
868
- "cloudaicompanionProject": configured_project_id,
869
- "metadata": {
870
- **core_client_metadata,
871
- "duetProject": configured_project_id,
872
- }
873
- if configured_project_id
874
- else core_client_metadata,
875
- }
876
- lib_logger.debug(
877
- f"Paid tier onboarding: using project {configured_project_id}"
878
- )
879
-
880
- lib_logger.debug("Initiating onboardUser request...")
881
- lro_response = await client.post(
882
- f"{CODE_ASSIST_ENDPOINT}:onboardUser",
883
- headers=headers,
884
- json=onboard_request,
885
- timeout=30,
886
- )
887
- lro_response.raise_for_status()
888
- lro_data = lro_response.json()
889
- lib_logger.debug(
890
- f"Initial onboarding response: done={lro_data.get('done')}"
891
- )
892
-
893
- for i in range(150): # Poll for up to 5 minutes (150 × 2s)
894
- if lro_data.get("done"):
895
- lib_logger.debug(
896
- f"Onboarding completed after {i} polling attempts"
897
- )
898
- break
899
- await asyncio.sleep(2)
900
- if (i + 1) % 15 == 0: # Log every 30 seconds
901
- lib_logger.info(
902
- f"Still waiting for onboarding completion... ({(i + 1) * 2}s elapsed)"
903
- )
904
- lib_logger.debug(
905
- f"Polling onboarding status... (Attempt {i + 1}/150)"
906
- )
907
- lro_response = await client.post(
908
- f"{CODE_ASSIST_ENDPOINT}:onboardUser",
909
- headers=headers,
910
- json=onboard_request,
911
- timeout=30,
912
- )
913
- lro_response.raise_for_status()
914
- lro_data = lro_response.json()
915
-
916
- if not lro_data.get("done"):
917
- lib_logger.error("Onboarding process timed out after 5 minutes")
918
- raise ValueError(
919
- "Onboarding process timed out after 5 minutes. Please try again or contact support."
920
- )
921
-
922
- # Extract project ID from LRO response
923
- # Note: onboardUser returns response.cloudaicompanionProject as an object with .id
924
- lro_response_data = lro_data.get("response", {})
925
- lro_project_obj = lro_response_data.get("cloudaicompanionProject", {})
926
- project_id = (
927
- lro_project_obj.get("id")
928
- if isinstance(lro_project_obj, dict)
929
- else None
930
- )
931
-
932
- # Fallback to configured project if LRO didn't return one
933
- if not project_id and configured_project_id:
934
- project_id = configured_project_id
935
- lib_logger.debug(
936
- f"LRO didn't return project, using configured: {project_id}"
937
- )
938
-
939
- if not project_id:
940
- lib_logger.error(
941
- "Onboarding completed but no project ID in response and none configured"
942
- )
943
- raise ValueError(
944
- "Onboarding completed, but no project ID was returned. "
945
- "For paid tiers, set GEMINI_CLI_PROJECT_ID environment variable."
946
- )
947
-
948
- lib_logger.debug(
949
- f"Successfully extracted project ID from onboarding response: {project_id}"
950
- )
951
-
952
- # Cache tier info
953
- self.project_tier_cache[credential_path] = tier_id
954
- discovered_tier = tier_id
955
- lib_logger.debug(f"Cached tier information: {tier_id}")
956
-
957
- # Log concise message for paid projects
958
- is_paid = tier_id and tier_id not in ["free-tier", "legacy-tier"]
959
- if is_paid:
960
- lib_logger.info(
961
- f"Using Gemini paid tier '{tier_id}' with project: {project_id}"
962
- )
963
- else:
964
- lib_logger.info(
965
- f"Successfully onboarded user and discovered project ID: {project_id}"
966
- )
967
-
968
- self.project_id_cache[credential_path] = project_id
969
- discovered_project_id = project_id
970
-
971
- # Persist to credential file
972
- await self._persist_project_metadata(
973
- credential_path, project_id, discovered_tier
974
- )
975
-
976
- return project_id
977
-
978
- except httpx.HTTPStatusError as e:
979
- error_body = ""
980
- try:
981
- error_body = e.response.text
982
- except Exception:
983
- pass
984
- if e.response.status_code == 403:
985
- lib_logger.error(
986
- f"Gemini Code Assist API access denied (403). Response: {error_body}"
987
- )
988
- lib_logger.error(
989
- "Possible causes: 1) cloudaicompanion.googleapis.com API not enabled, 2) Wrong project ID for paid tier, 3) Account lacks permissions"
990
- )
991
- elif e.response.status_code == 404:
992
- lib_logger.warning(
993
- f"Gemini Code Assist endpoint not found (404). Falling back to project listing."
994
- )
995
- elif e.response.status_code == 412:
996
- # Precondition Failed - often means wrong project for free tier onboarding
997
- lib_logger.error(
998
- f"Precondition failed (412): {error_body}. This may mean the project ID is incompatible with the selected tier."
999
- )
1000
- else:
1001
- lib_logger.warning(
1002
- f"Gemini onboarding/discovery failed with status {e.response.status_code}: {error_body}. Falling back to project listing."
1003
- )
1004
- except httpx.RequestError as e:
1005
- lib_logger.warning(
1006
- f"Gemini onboarding/discovery network error: {e}. Falling back to project listing."
1007
- )
1008
-
1009
- # 3. Fallback to listing all available GCP projects (last resort)
1010
- lib_logger.debug(
1011
- "Attempting to discover project via GCP Resource Manager API..."
1012
- )
1013
- try:
1014
- async with httpx.AsyncClient() as client:
1015
- lib_logger.debug(
1016
- "Querying Cloud Resource Manager for available projects..."
1017
- )
1018
- response = await client.get(
1019
- "https://cloudresourcemanager.googleapis.com/v1/projects",
1020
- headers=headers,
1021
- timeout=20,
1022
- )
1023
- response.raise_for_status()
1024
- projects = response.json().get("projects", [])
1025
- lib_logger.debug(f"Found {len(projects)} total projects")
1026
- active_projects = [
1027
- p for p in projects if p.get("lifecycleState") == "ACTIVE"
1028
- ]
1029
- lib_logger.debug(f"Found {len(active_projects)} active projects")
1030
-
1031
- if not projects:
1032
- lib_logger.error(
1033
- "No GCP projects found for this account. Please create a project in Google Cloud Console."
1034
- )
1035
- elif not active_projects:
1036
- lib_logger.error(
1037
- "No active GCP projects found. Please activate a project in Google Cloud Console."
1038
- )
1039
- else:
1040
- project_id = active_projects[0]["projectId"]
1041
- lib_logger.info(
1042
- f"Discovered Gemini project ID from active projects list: {project_id}"
1043
- )
1044
- lib_logger.debug(
1045
- f"Selected first active project: {project_id} (out of {len(active_projects)} active projects)"
1046
- )
1047
- self.project_id_cache[credential_path] = project_id
1048
- discovered_project_id = project_id
1049
-
1050
- # [NEW] Persist to credential file (no tier info from resource manager)
1051
- await self._persist_project_metadata(
1052
- credential_path, project_id, None
1053
- )
1054
-
1055
- return project_id
1056
- except httpx.HTTPStatusError as e:
1057
- if e.response.status_code == 403:
1058
- lib_logger.error(
1059
- "Failed to list GCP projects due to a 403 Forbidden error. The Cloud Resource Manager API may not be enabled, or your account lacks the 'resourcemanager.projects.list' permission."
1060
- )
1061
- else:
1062
- lib_logger.error(
1063
- f"Failed to list GCP projects with status {e.response.status_code}: {e}"
1064
- )
1065
- except httpx.RequestError as e:
1066
- lib_logger.error(f"Network error while listing GCP projects: {e}")
1067
-
1068
- raise ValueError(
1069
- "Could not auto-discover Gemini project ID. Possible causes:\n"
1070
- " 1. The cloudaicompanion.googleapis.com API is not enabled (enable it in Google Cloud Console)\n"
1071
- " 2. No active GCP projects exist for this account (create one in Google Cloud Console)\n"
1072
- " 3. Account lacks necessary permissions\n"
1073
- "To manually specify a project, set GEMINI_CLI_PROJECT_ID in your .env file."
1074
- )
1075
-
1076
- async def _persist_project_metadata(
1077
- self, credential_path: str, project_id: str, tier: Optional[str]
1078
- ):
1079
- """Persists project ID and tier to the credential file for faster future startups."""
1080
- # Skip persistence for env:// paths (environment-based credentials)
1081
- credential_index = self._parse_env_credential_path(credential_path)
1082
- if credential_index is not None:
1083
- lib_logger.debug(
1084
- f"Skipping project metadata persistence for env:// credential path: {credential_path}"
1085
- )
1086
- return
1087
-
1088
- try:
1089
- # Load current credentials
1090
- with open(credential_path, "r") as f:
1091
- creds = json.load(f)
1092
-
1093
- # Update metadata
1094
- if "_proxy_metadata" not in creds:
1095
- creds["_proxy_metadata"] = {}
1096
-
1097
- creds["_proxy_metadata"]["project_id"] = project_id
1098
- if tier:
1099
- creds["_proxy_metadata"]["tier"] = tier
1100
-
1101
- # Save back using the existing save method (handles atomic writes and permissions)
1102
- await self._save_credentials(credential_path, creds)
1103
-
1104
- lib_logger.debug(
1105
- f"Persisted project_id and tier to credential file: {credential_path}"
1106
- )
1107
- except Exception as e:
1108
- lib_logger.warning(
1109
- f"Failed to persist project metadata to credential file: {e}"
1110
- )
1111
- # Non-fatal - just means slower startup next time
1112
 
1113
  def _check_mixed_tier_warning(self):
1114
  """Check if mixed free/paid tier credentials are loaded and emit warning."""
 
383
  def __init__(self):
384
  super().__init__()
385
  self.model_definitions = ModelDefinitions()
386
+ # NOTE: project_id_cache and project_tier_cache are inherited from GeminiAuthBase
 
 
 
 
 
387
 
388
  # Gemini 3 configuration from environment
389
  memory_ttl = _env_int("GEMINI_CLI_SIGNATURE_CACHE_TTL", 3600)
 
575
 
576
  return loaded
577
 
578
+ # NOTE: _post_auth_discovery() is inherited from GeminiAuthBase
579
+
580
  # =========================================================================
581
  # MODEL UTILITIES
582
  # =========================================================================
 
592
  return name[len(self._gemini3_tool_prefix) :]
593
  return name
594
 
595
+ # NOTE: _discover_project_id() and _persist_project_metadata() are inherited from GeminiAuthBase
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
596
 
597
  def _check_mixed_tier_warning(self):
598
  """Check if mixed free/paid tier credentials are loaded and emit warning."""
src/rotator_library/providers/google_oauth_base.py CHANGED
@@ -1,14 +1,17 @@
1
  # src/rotator_library/providers/google_oauth_base.py
2
 
3
  import os
 
4
  import webbrowser
5
- from typing import Union, Optional
 
6
  import json
7
  import time
8
  import asyncio
9
  import logging
10
  from pathlib import Path
11
  from typing import Dict, Any
 
12
 
13
  import httpx
14
  from rich.console import Console
@@ -25,6 +28,24 @@ lib_logger = logging.getLogger("rotator_library")
25
  console = Console()
26
 
27
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
28
  class GoogleOAuthBase:
29
  """
30
  Base class for Google OAuth2 authentication providers.
@@ -840,6 +861,18 @@ class GoogleOAuthBase:
840
  lib_logger.info(
841
  f"{self.ENV_PREFIX} OAuth initialized successfully for '{display_name}'."
842
  )
 
 
 
 
 
 
 
 
 
 
 
 
843
  return new_creds
844
 
845
  async def initialize_token(
@@ -945,6 +978,23 @@ class GoogleOAuthBase:
945
  return {"Authorization": f"Bearer {cached['access_token']}"}
946
  raise
947
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
948
  async def get_user_info(
949
  self, creds_or_path: Union[Dict[str, Any], str]
950
  ) -> Dict[str, Any]:
@@ -976,3 +1026,372 @@ class GoogleOAuthBase:
976
  if path:
977
  await self._save_credentials(path, creds)
978
  return {"email": user_info.get("email")}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  # src/rotator_library/providers/google_oauth_base.py
2
 
3
  import os
4
+ import re
5
  import webbrowser
6
+ from dataclasses import dataclass, field
7
+ from typing import Union, Optional, List
8
  import json
9
  import time
10
  import asyncio
11
  import logging
12
  from pathlib import Path
13
  from typing import Dict, Any
14
+ from glob import glob
15
 
16
  import httpx
17
  from rich.console import Console
 
28
  console = Console()
29
 
30
 
31
+ @dataclass
32
+ class CredentialSetupResult:
33
+ """
34
+ Standardized result structure for credential setup operations.
35
+
36
+ Used by all auth classes to return consistent setup results to the credential tool.
37
+ """
38
+
39
+ success: bool
40
+ file_path: Optional[str] = None
41
+ email: Optional[str] = None
42
+ tier: Optional[str] = None
43
+ project_id: Optional[str] = None
44
+ is_update: bool = False
45
+ error: Optional[str] = None
46
+ credentials: Optional[Dict[str, Any]] = field(default=None, repr=False)
47
+
48
+
49
  class GoogleOAuthBase:
50
  """
51
  Base class for Google OAuth2 authentication providers.
 
861
  lib_logger.info(
862
  f"{self.ENV_PREFIX} OAuth initialized successfully for '{display_name}'."
863
  )
864
+
865
+ # Perform post-auth discovery (tier, project, etc.) while we have a fresh token
866
+ if path:
867
+ try:
868
+ await self._post_auth_discovery(path, new_creds["access_token"])
869
+ except Exception as e:
870
+ # Don't fail auth if discovery fails - it can be retried on first request
871
+ lib_logger.warning(
872
+ f"Post-auth discovery failed for '{display_name}': {e}. "
873
+ "Tier/project will be discovered on first request."
874
+ )
875
+
876
  return new_creds
877
 
878
  async def initialize_token(
 
978
  return {"Authorization": f"Bearer {cached['access_token']}"}
979
  raise
980
 
981
+ async def _post_auth_discovery(
982
+ self, credential_path: str, access_token: str
983
+ ) -> None:
984
+ """
985
+ Hook for subclasses to perform post-authentication discovery.
986
+
987
+ Called after successful OAuth authentication (both initial and re-auth).
988
+ Subclasses can override this to discover and cache tier/project information
989
+ during the authentication flow rather than waiting for the first API request.
990
+
991
+ Args:
992
+ credential_path: Path to the credential file
993
+ access_token: The newly obtained access token
994
+ """
995
+ # Default implementation does nothing - subclasses can override
996
+ pass
997
+
998
  async def get_user_info(
999
  self, creds_or_path: Union[Dict[str, Any], str]
1000
  ) -> Dict[str, Any]:
 
1026
  if path:
1027
  await self._save_credentials(path, creds)
1028
  return {"email": user_info.get("email")}
1029
+
1030
+ # =========================================================================
1031
+ # CREDENTIAL MANAGEMENT METHODS
1032
+ # =========================================================================
1033
+
1034
+ def _get_provider_file_prefix(self) -> str:
1035
+ """
1036
+ Get the file name prefix for this provider's credential files.
1037
+
1038
+ Override in subclasses if the prefix differs from ENV_PREFIX.
1039
+ Default: lowercase ENV_PREFIX with underscores (e.g., "gemini_cli")
1040
+ """
1041
+ return self.ENV_PREFIX.lower()
1042
+
1043
+ def _get_oauth_base_dir(self) -> Path:
1044
+ """
1045
+ Get the base directory for OAuth credential files.
1046
+
1047
+ Can be overridden to customize credential storage location.
1048
+ """
1049
+ return Path.cwd() / "oauth_creds"
1050
+
1051
+ def _find_existing_credential_by_email(
1052
+ self, email: str, base_dir: Optional[Path] = None
1053
+ ) -> Optional[Path]:
1054
+ """
1055
+ Find an existing credential file for the given email.
1056
+
1057
+ Args:
1058
+ email: Email address to search for
1059
+ base_dir: Directory to search in (defaults to oauth_creds)
1060
+
1061
+ Returns:
1062
+ Path to existing credential file, or None if not found
1063
+ """
1064
+ if base_dir is None:
1065
+ base_dir = self._get_oauth_base_dir()
1066
+
1067
+ prefix = self._get_provider_file_prefix()
1068
+ pattern = str(base_dir / f"{prefix}_oauth_*.json")
1069
+
1070
+ for cred_file in glob(pattern):
1071
+ try:
1072
+ with open(cred_file, "r") as f:
1073
+ creds = json.load(f)
1074
+ existing_email = creds.get("_proxy_metadata", {}).get("email")
1075
+ if existing_email == email:
1076
+ return Path(cred_file)
1077
+ except (json.JSONDecodeError, IOError) as e:
1078
+ lib_logger.debug(f"Could not read credential file {cred_file}: {e}")
1079
+ continue
1080
+
1081
+ return None
1082
+
1083
+ def _get_next_credential_number(self, base_dir: Optional[Path] = None) -> int:
1084
+ """
1085
+ Get the next available credential number for new credential files.
1086
+
1087
+ Args:
1088
+ base_dir: Directory to scan (defaults to oauth_creds)
1089
+
1090
+ Returns:
1091
+ Next available credential number (1, 2, 3, etc.)
1092
+ """
1093
+ if base_dir is None:
1094
+ base_dir = self._get_oauth_base_dir()
1095
+
1096
+ prefix = self._get_provider_file_prefix()
1097
+ pattern = str(base_dir / f"{prefix}_oauth_*.json")
1098
+
1099
+ existing_numbers = []
1100
+ for cred_file in glob(pattern):
1101
+ match = re.search(r"_oauth_(\d+)\.json$", cred_file)
1102
+ if match:
1103
+ existing_numbers.append(int(match.group(1)))
1104
+
1105
+ if not existing_numbers:
1106
+ return 1
1107
+ return max(existing_numbers) + 1
1108
+
1109
+ def _build_credential_path(
1110
+ self, base_dir: Optional[Path] = None, number: Optional[int] = None
1111
+ ) -> Path:
1112
+ """
1113
+ Build a path for a new credential file.
1114
+
1115
+ Args:
1116
+ base_dir: Directory for credential files (defaults to oauth_creds)
1117
+ number: Credential number (auto-determined if None)
1118
+
1119
+ Returns:
1120
+ Path for the new credential file
1121
+ """
1122
+ if base_dir is None:
1123
+ base_dir = self._get_oauth_base_dir()
1124
+
1125
+ if number is None:
1126
+ number = self._get_next_credential_number(base_dir)
1127
+
1128
+ prefix = self._get_provider_file_prefix()
1129
+ filename = f"{prefix}_oauth_{number}.json"
1130
+ return base_dir / filename
1131
+
1132
+ async def setup_credential(
1133
+ self, base_dir: Optional[Path] = None
1134
+ ) -> CredentialSetupResult:
1135
+ """
1136
+ Complete credential setup flow: OAuth -> save -> discovery.
1137
+
1138
+ This is the main entry point for setting up new credentials.
1139
+ Handles the entire lifecycle:
1140
+ 1. Perform OAuth authentication
1141
+ 2. Get user info (email) for deduplication
1142
+ 3. Find existing credential or create new file path
1143
+ 4. Save credentials to file
1144
+ 5. Perform post-auth discovery (tier/project for Google OAuth)
1145
+
1146
+ Args:
1147
+ base_dir: Directory for credential files (defaults to oauth_creds)
1148
+
1149
+ Returns:
1150
+ CredentialSetupResult with status and details
1151
+ """
1152
+ if base_dir is None:
1153
+ base_dir = self._get_oauth_base_dir()
1154
+
1155
+ # Ensure directory exists
1156
+ base_dir.mkdir(exist_ok=True)
1157
+
1158
+ try:
1159
+ # Step 1: Perform OAuth authentication (returns credentials dict)
1160
+ temp_creds = {
1161
+ "_proxy_metadata": {"display_name": f"new {self.ENV_PREFIX} credential"}
1162
+ }
1163
+ new_creds = await self.initialize_token(temp_creds)
1164
+
1165
+ # Step 2: Get user info for deduplication
1166
+ user_info = await self.get_user_info(new_creds)
1167
+ email = user_info.get("email")
1168
+
1169
+ if not email:
1170
+ return CredentialSetupResult(
1171
+ success=False, error="Could not retrieve email from OAuth response"
1172
+ )
1173
+
1174
+ # Step 3: Check for existing credential with same email
1175
+ existing_path = self._find_existing_credential_by_email(email, base_dir)
1176
+ is_update = existing_path is not None
1177
+
1178
+ if is_update:
1179
+ file_path = existing_path
1180
+ lib_logger.info(
1181
+ f"Found existing credential for {email}, updating {file_path.name}"
1182
+ )
1183
+ else:
1184
+ file_path = self._build_credential_path(base_dir)
1185
+ lib_logger.info(
1186
+ f"Creating new credential for {email} at {file_path.name}"
1187
+ )
1188
+
1189
+ # Step 4: Save credentials to file
1190
+ await self._save_credentials(str(file_path), new_creds)
1191
+
1192
+ # Step 5: Perform post-auth discovery (tier, project_id)
1193
+ # This is already called in _perform_interactive_oauth, but we call it again
1194
+ # in case credentials were loaded from existing token
1195
+ tier = None
1196
+ project_id = None
1197
+ try:
1198
+ await self._post_auth_discovery(
1199
+ str(file_path), new_creds["access_token"]
1200
+ )
1201
+ # Reload credentials to get discovered metadata
1202
+ with open(file_path, "r") as f:
1203
+ updated_creds = json.load(f)
1204
+ tier = updated_creds.get("_proxy_metadata", {}).get("tier")
1205
+ project_id = updated_creds.get("_proxy_metadata", {}).get("project_id")
1206
+ new_creds = updated_creds
1207
+ except Exception as e:
1208
+ lib_logger.warning(
1209
+ f"Post-auth discovery failed: {e}. Tier/project will be discovered on first request."
1210
+ )
1211
+
1212
+ return CredentialSetupResult(
1213
+ success=True,
1214
+ file_path=str(file_path),
1215
+ email=email,
1216
+ tier=tier,
1217
+ project_id=project_id,
1218
+ is_update=is_update,
1219
+ credentials=new_creds,
1220
+ )
1221
+
1222
+ except Exception as e:
1223
+ lib_logger.error(f"Credential setup failed: {e}")
1224
+ return CredentialSetupResult(success=False, error=str(e))
1225
+
1226
+ def build_env_lines(self, creds: Dict[str, Any], cred_number: int) -> List[str]:
1227
+ """
1228
+ Generate .env file lines for a credential.
1229
+
1230
+ Subclasses should override to include provider-specific fields
1231
+ (e.g., tier, project_id for Google OAuth providers).
1232
+
1233
+ Args:
1234
+ creds: Credential dictionary loaded from JSON
1235
+ cred_number: Credential number (1, 2, 3, etc.)
1236
+
1237
+ Returns:
1238
+ List of .env file lines
1239
+ """
1240
+ email = creds.get("_proxy_metadata", {}).get("email", "unknown")
1241
+ prefix = f"{self.ENV_PREFIX}_{cred_number}"
1242
+
1243
+ lines = [
1244
+ f"# {self.ENV_PREFIX} Credential #{cred_number} for: {email}",
1245
+ f"# Exported from: {self._get_provider_file_prefix()}_oauth_{cred_number}.json",
1246
+ f"# Generated at: {time.strftime('%Y-%m-%d %H:%M:%S')}",
1247
+ "#",
1248
+ "# To combine multiple credentials into one .env file, copy these lines",
1249
+ "# and ensure each credential has a unique number (1, 2, 3, etc.)",
1250
+ "",
1251
+ f"{prefix}_ACCESS_TOKEN={creds.get('access_token', '')}",
1252
+ f"{prefix}_REFRESH_TOKEN={creds.get('refresh_token', '')}",
1253
+ f"{prefix}_SCOPE={creds.get('scope', '')}",
1254
+ f"{prefix}_TOKEN_TYPE={creds.get('token_type', 'Bearer')}",
1255
+ f"{prefix}_ID_TOKEN={creds.get('id_token', '')}",
1256
+ f"{prefix}_EXPIRY_DATE={creds.get('expiry_date', 0)}",
1257
+ f"{prefix}_CLIENT_ID={creds.get('client_id', '')}",
1258
+ f"{prefix}_CLIENT_SECRET={creds.get('client_secret', '')}",
1259
+ f"{prefix}_TOKEN_URI={creds.get('token_uri', 'https://oauth2.googleapis.com/token')}",
1260
+ f"{prefix}_UNIVERSE_DOMAIN={creds.get('universe_domain', 'googleapis.com')}",
1261
+ f"{prefix}_EMAIL={email}",
1262
+ ]
1263
+
1264
+ return lines
1265
+
1266
+ def export_credential_to_env(
1267
+ self, credential_path: str, output_dir: Optional[Path] = None
1268
+ ) -> Optional[str]:
1269
+ """
1270
+ Export a credential file to .env format.
1271
+
1272
+ Args:
1273
+ credential_path: Path to the credential JSON file
1274
+ output_dir: Directory for output .env file (defaults to same as credential)
1275
+
1276
+ Returns:
1277
+ Path to the exported .env file, or None on error
1278
+ """
1279
+ try:
1280
+ cred_path = Path(credential_path)
1281
+
1282
+ # Load credential
1283
+ with open(cred_path, "r") as f:
1284
+ creds = json.load(f)
1285
+
1286
+ # Extract metadata
1287
+ email = creds.get("_proxy_metadata", {}).get("email", "unknown")
1288
+
1289
+ # Get credential number from filename
1290
+ match = re.search(r"_oauth_(\d+)\.json$", cred_path.name)
1291
+ cred_number = int(match.group(1)) if match else 1
1292
+
1293
+ # Build output path
1294
+ if output_dir is None:
1295
+ output_dir = cred_path.parent
1296
+
1297
+ safe_email = email.replace("@", "_at_").replace(".", "_")
1298
+ prefix = self._get_provider_file_prefix()
1299
+ env_filename = f"{prefix}_{cred_number}_{safe_email}.env"
1300
+ env_path = output_dir / env_filename
1301
+
1302
+ # Build and write content
1303
+ env_lines = self.build_env_lines(creds, cred_number)
1304
+ with open(env_path, "w") as f:
1305
+ f.write("\n".join(env_lines))
1306
+
1307
+ lib_logger.info(f"Exported credential to {env_path}")
1308
+ return str(env_path)
1309
+
1310
+ except Exception as e:
1311
+ lib_logger.error(f"Failed to export credential: {e}")
1312
+ return None
1313
+
1314
+ def list_credentials(self, base_dir: Optional[Path] = None) -> List[Dict[str, Any]]:
1315
+ """
1316
+ List all credential files for this provider.
1317
+
1318
+ Args:
1319
+ base_dir: Directory to search (defaults to oauth_creds)
1320
+
1321
+ Returns:
1322
+ List of dicts with credential info:
1323
+ - file_path: Path to credential file
1324
+ - email: User email
1325
+ - tier: Tier info (if available)
1326
+ - project_id: Project ID (if available)
1327
+ - number: Credential number
1328
+ """
1329
+ if base_dir is None:
1330
+ base_dir = self._get_oauth_base_dir()
1331
+
1332
+ prefix = self._get_provider_file_prefix()
1333
+ pattern = str(base_dir / f"{prefix}_oauth_*.json")
1334
+
1335
+ credentials = []
1336
+ for cred_file in sorted(glob(pattern)):
1337
+ try:
1338
+ with open(cred_file, "r") as f:
1339
+ creds = json.load(f)
1340
+
1341
+ metadata = creds.get("_proxy_metadata", {})
1342
+
1343
+ # Extract number from filename
1344
+ match = re.search(r"_oauth_(\d+)\.json$", cred_file)
1345
+ number = int(match.group(1)) if match else 0
1346
+
1347
+ credentials.append(
1348
+ {
1349
+ "file_path": cred_file,
1350
+ "email": metadata.get("email", "unknown"),
1351
+ "tier": metadata.get("tier"),
1352
+ "project_id": metadata.get("project_id"),
1353
+ "number": number,
1354
+ }
1355
+ )
1356
+ except Exception as e:
1357
+ lib_logger.debug(f"Could not read credential file {cred_file}: {e}")
1358
+ continue
1359
+
1360
+ return credentials
1361
+
1362
+ def delete_credential(self, credential_path: str) -> bool:
1363
+ """
1364
+ Delete a credential file.
1365
+
1366
+ Args:
1367
+ credential_path: Path to the credential file
1368
+
1369
+ Returns:
1370
+ True if deleted successfully, False otherwise
1371
+ """
1372
+ try:
1373
+ cred_path = Path(credential_path)
1374
+
1375
+ # Validate that it's one of our credential files
1376
+ prefix = self._get_provider_file_prefix()
1377
+ if not cred_path.name.startswith(f"{prefix}_oauth_"):
1378
+ lib_logger.error(
1379
+ f"File {cred_path.name} does not appear to be a {self.ENV_PREFIX} credential"
1380
+ )
1381
+ return False
1382
+
1383
+ if not cred_path.exists():
1384
+ lib_logger.warning(f"Credential file does not exist: {credential_path}")
1385
+ return False
1386
+
1387
+ # Remove from cache if present
1388
+ self._credentials_cache.pop(credential_path, None)
1389
+
1390
+ # Delete the file
1391
+ cred_path.unlink()
1392
+ lib_logger.info(f"Deleted credential file: {credential_path}")
1393
+ return True
1394
+
1395
+ except Exception as e:
1396
+ lib_logger.error(f"Failed to delete credential: {e}")
1397
+ return False
src/rotator_library/providers/iflow_auth_base.py CHANGED
@@ -9,8 +9,11 @@ import logging
9
  import webbrowser
10
  import socket
11
  import os
 
 
12
  from pathlib import Path
13
- from typing import Dict, Any, Tuple, Union, Optional
 
14
  from urllib.parse import urlencode, parse_qs, urlparse
15
 
16
  import httpx
@@ -40,6 +43,20 @@ IFLOW_CLIENT_SECRET = "4Z3YjXycVsQvyGF1etiNlIBB4RsqSDtW"
40
  CALLBACK_PORT = 11451
41
 
42
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
43
  def get_callback_port() -> int:
44
  """
45
  Get the OAuth callback port, checking environment variable first.
@@ -1165,3 +1182,261 @@ class IFlowAuthBase:
1165
  except Exception as e:
1166
  lib_logger.error(f"Failed to get iFlow user info from credentials: {e}")
1167
  return {"email": None}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9
  import webbrowser
10
  import socket
11
  import os
12
+ import re
13
+ from dataclasses import dataclass, field
14
  from pathlib import Path
15
+ from glob import glob
16
+ from typing import Dict, Any, Tuple, Union, Optional, List
17
  from urllib.parse import urlencode, parse_qs, urlparse
18
 
19
  import httpx
 
43
  CALLBACK_PORT = 11451
44
 
45
 
46
+ @dataclass
47
+ class IFlowCredentialSetupResult:
48
+ """
49
+ Standardized result structure for iFlow credential setup operations.
50
+ """
51
+
52
+ success: bool
53
+ file_path: Optional[str] = None
54
+ email: Optional[str] = None
55
+ is_update: bool = False
56
+ error: Optional[str] = None
57
+ credentials: Optional[Dict[str, Any]] = field(default=None, repr=False)
58
+
59
+
60
  def get_callback_port() -> int:
61
  """
62
  Get the OAuth callback port, checking environment variable first.
 
1182
  except Exception as e:
1183
  lib_logger.error(f"Failed to get iFlow user info from credentials: {e}")
1184
  return {"email": None}
1185
+
1186
+ # =========================================================================
1187
+ # CREDENTIAL MANAGEMENT METHODS
1188
+ # =========================================================================
1189
+
1190
+ def _get_provider_file_prefix(self) -> str:
1191
+ """Return the file prefix for iFlow credentials."""
1192
+ return "iflow"
1193
+
1194
+ def _get_oauth_base_dir(self) -> Path:
1195
+ """Get the base directory for OAuth credential files."""
1196
+ return Path.cwd() / "oauth_creds"
1197
+
1198
+ def _find_existing_credential_by_email(
1199
+ self, email: str, base_dir: Optional[Path] = None
1200
+ ) -> Optional[Path]:
1201
+ """Find an existing credential file for the given email."""
1202
+ if base_dir is None:
1203
+ base_dir = self._get_oauth_base_dir()
1204
+
1205
+ prefix = self._get_provider_file_prefix()
1206
+ pattern = str(base_dir / f"{prefix}_oauth_*.json")
1207
+
1208
+ for cred_file in glob(pattern):
1209
+ try:
1210
+ with open(cred_file, "r") as f:
1211
+ creds = json.load(f)
1212
+ existing_email = creds.get("email") or creds.get(
1213
+ "_proxy_metadata", {}
1214
+ ).get("email")
1215
+ if existing_email == email:
1216
+ return Path(cred_file)
1217
+ except (json.JSONDecodeError, IOError) as e:
1218
+ lib_logger.debug(f"Could not read credential file {cred_file}: {e}")
1219
+ continue
1220
+
1221
+ return None
1222
+
1223
+ def _get_next_credential_number(self, base_dir: Optional[Path] = None) -> int:
1224
+ """Get the next available credential number."""
1225
+ if base_dir is None:
1226
+ base_dir = self._get_oauth_base_dir()
1227
+
1228
+ prefix = self._get_provider_file_prefix()
1229
+ pattern = str(base_dir / f"{prefix}_oauth_*.json")
1230
+
1231
+ existing_numbers = []
1232
+ for cred_file in glob(pattern):
1233
+ match = re.search(r"_oauth_(\d+)\.json$", cred_file)
1234
+ if match:
1235
+ existing_numbers.append(int(match.group(1)))
1236
+
1237
+ if not existing_numbers:
1238
+ return 1
1239
+ return max(existing_numbers) + 1
1240
+
1241
+ def _build_credential_path(
1242
+ self, base_dir: Optional[Path] = None, number: Optional[int] = None
1243
+ ) -> Path:
1244
+ """Build a path for a new credential file."""
1245
+ if base_dir is None:
1246
+ base_dir = self._get_oauth_base_dir()
1247
+
1248
+ if number is None:
1249
+ number = self._get_next_credential_number(base_dir)
1250
+
1251
+ prefix = self._get_provider_file_prefix()
1252
+ filename = f"{prefix}_oauth_{number}.json"
1253
+ return base_dir / filename
1254
+
1255
+ async def setup_credential(
1256
+ self, base_dir: Optional[Path] = None
1257
+ ) -> IFlowCredentialSetupResult:
1258
+ """
1259
+ Complete credential setup flow: OAuth -> save.
1260
+
1261
+ This is the main entry point for setting up new credentials.
1262
+ """
1263
+ if base_dir is None:
1264
+ base_dir = self._get_oauth_base_dir()
1265
+
1266
+ # Ensure directory exists
1267
+ base_dir.mkdir(exist_ok=True)
1268
+
1269
+ try:
1270
+ # Step 1: Perform OAuth authentication
1271
+ temp_creds = {"_proxy_metadata": {"display_name": "new iFlow credential"}}
1272
+ new_creds = await self.initialize_token(temp_creds)
1273
+
1274
+ # Step 2: Get user info for deduplication
1275
+ email = new_creds.get("email") or new_creds.get("_proxy_metadata", {}).get(
1276
+ "email"
1277
+ )
1278
+
1279
+ if not email:
1280
+ return IFlowCredentialSetupResult(
1281
+ success=False, error="Could not retrieve email from OAuth response"
1282
+ )
1283
+
1284
+ # Step 3: Check for existing credential with same email
1285
+ existing_path = self._find_existing_credential_by_email(email, base_dir)
1286
+ is_update = existing_path is not None
1287
+
1288
+ if is_update:
1289
+ file_path = existing_path
1290
+ lib_logger.info(
1291
+ f"Found existing credential for {email}, updating {file_path.name}"
1292
+ )
1293
+ else:
1294
+ file_path = self._build_credential_path(base_dir)
1295
+ lib_logger.info(
1296
+ f"Creating new credential for {email} at {file_path.name}"
1297
+ )
1298
+
1299
+ # Step 4: Save credentials to file
1300
+ await self._save_credentials(str(file_path), new_creds)
1301
+
1302
+ return IFlowCredentialSetupResult(
1303
+ success=True,
1304
+ file_path=str(file_path),
1305
+ email=email,
1306
+ is_update=is_update,
1307
+ credentials=new_creds,
1308
+ )
1309
+
1310
+ except Exception as e:
1311
+ lib_logger.error(f"Credential setup failed: {e}")
1312
+ return IFlowCredentialSetupResult(success=False, error=str(e))
1313
+
1314
+ def build_env_lines(self, creds: Dict[str, Any], cred_number: int) -> List[str]:
1315
+ """Generate .env file lines for an iFlow credential."""
1316
+ email = creds.get("email") or creds.get("_proxy_metadata", {}).get(
1317
+ "email", "unknown"
1318
+ )
1319
+ prefix = f"IFLOW_{cred_number}"
1320
+
1321
+ lines = [
1322
+ f"# IFLOW Credential #{cred_number} for: {email}",
1323
+ f"# Exported from: iflow_oauth_{cred_number}.json",
1324
+ f"# Generated at: {time.strftime('%Y-%m-%d %H:%M:%S')}",
1325
+ "#",
1326
+ "# To combine multiple credentials into one .env file, copy these lines",
1327
+ "# and ensure each credential has a unique number (1, 2, 3, etc.)",
1328
+ "",
1329
+ f"{prefix}_ACCESS_TOKEN={creds.get('access_token', '')}",
1330
+ f"{prefix}_REFRESH_TOKEN={creds.get('refresh_token', '')}",
1331
+ f"{prefix}_API_KEY={creds.get('api_key', '')}",
1332
+ f"{prefix}_EXPIRY_DATE={creds.get('expiry_date', '')}",
1333
+ f"{prefix}_EMAIL={email}",
1334
+ f"{prefix}_TOKEN_TYPE={creds.get('token_type', 'Bearer')}",
1335
+ f"{prefix}_SCOPE={creds.get('scope', 'read write')}",
1336
+ ]
1337
+
1338
+ return lines
1339
+
1340
+ def export_credential_to_env(
1341
+ self, credential_path: str, output_dir: Optional[Path] = None
1342
+ ) -> Optional[str]:
1343
+ """Export a credential file to .env format."""
1344
+ try:
1345
+ cred_path = Path(credential_path)
1346
+
1347
+ # Load credential
1348
+ with open(cred_path, "r") as f:
1349
+ creds = json.load(f)
1350
+
1351
+ # Extract metadata
1352
+ email = creds.get("email") or creds.get("_proxy_metadata", {}).get(
1353
+ "email", "unknown"
1354
+ )
1355
+
1356
+ # Get credential number from filename
1357
+ match = re.search(r"_oauth_(\d+)\.json$", cred_path.name)
1358
+ cred_number = int(match.group(1)) if match else 1
1359
+
1360
+ # Build output path
1361
+ if output_dir is None:
1362
+ output_dir = cred_path.parent
1363
+
1364
+ safe_email = email.replace("@", "_at_").replace(".", "_")
1365
+ env_filename = f"iflow_{cred_number}_{safe_email}.env"
1366
+ env_path = output_dir / env_filename
1367
+
1368
+ # Build and write content
1369
+ env_lines = self.build_env_lines(creds, cred_number)
1370
+ with open(env_path, "w") as f:
1371
+ f.write("\n".join(env_lines))
1372
+
1373
+ lib_logger.info(f"Exported credential to {env_path}")
1374
+ return str(env_path)
1375
+
1376
+ except Exception as e:
1377
+ lib_logger.error(f"Failed to export credential: {e}")
1378
+ return None
1379
+
1380
+ def list_credentials(self, base_dir: Optional[Path] = None) -> List[Dict[str, Any]]:
1381
+ """List all iFlow credential files."""
1382
+ if base_dir is None:
1383
+ base_dir = self._get_oauth_base_dir()
1384
+
1385
+ prefix = self._get_provider_file_prefix()
1386
+ pattern = str(base_dir / f"{prefix}_oauth_*.json")
1387
+
1388
+ credentials = []
1389
+ for cred_file in sorted(glob(pattern)):
1390
+ try:
1391
+ with open(cred_file, "r") as f:
1392
+ creds = json.load(f)
1393
+
1394
+ email = creds.get("email") or creds.get("_proxy_metadata", {}).get(
1395
+ "email", "unknown"
1396
+ )
1397
+
1398
+ # Extract number from filename
1399
+ match = re.search(r"_oauth_(\d+)\.json$", cred_file)
1400
+ number = int(match.group(1)) if match else 0
1401
+
1402
+ credentials.append(
1403
+ {
1404
+ "file_path": cred_file,
1405
+ "email": email,
1406
+ "number": number,
1407
+ }
1408
+ )
1409
+ except Exception as e:
1410
+ lib_logger.debug(f"Could not read credential file {cred_file}: {e}")
1411
+ continue
1412
+
1413
+ return credentials
1414
+
1415
+ def delete_credential(self, credential_path: str) -> bool:
1416
+ """Delete a credential file."""
1417
+ try:
1418
+ cred_path = Path(credential_path)
1419
+
1420
+ # Validate that it's one of our credential files
1421
+ prefix = self._get_provider_file_prefix()
1422
+ if not cred_path.name.startswith(f"{prefix}_oauth_"):
1423
+ lib_logger.error(
1424
+ f"File {cred_path.name} does not appear to be an iFlow credential"
1425
+ )
1426
+ return False
1427
+
1428
+ if not cred_path.exists():
1429
+ lib_logger.warning(f"Credential file does not exist: {credential_path}")
1430
+ return False
1431
+
1432
+ # Remove from cache if present
1433
+ self._credentials_cache.pop(credential_path, None)
1434
+
1435
+ # Delete the file
1436
+ cred_path.unlink()
1437
+ lib_logger.info(f"Deleted credential file: {credential_path}")
1438
+ return True
1439
+
1440
+ except Exception as e:
1441
+ lib_logger.error(f"Failed to delete credential: {e}")
1442
+ return False
src/rotator_library/providers/qwen_auth_base.py CHANGED
@@ -9,8 +9,11 @@ import asyncio
9
  import logging
10
  import webbrowser
11
  import os
 
 
12
  from pathlib import Path
13
- from typing import Dict, Any, Tuple, Union, Optional
 
14
 
15
  import httpx
16
  from rich.console import Console
@@ -35,6 +38,20 @@ REFRESH_EXPIRY_BUFFER_SECONDS = 3 * 60 * 60 # 3 hours buffer before expiry
35
  console = Console()
36
 
37
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
38
  class QwenAuthBase:
39
  def __init__(self):
40
  self._credentials_cache: Dict[str, Dict[str, Any]] = {}
@@ -928,3 +945,251 @@ class QwenAuthBase:
928
  except Exception as e:
929
  lib_logger.error(f"Failed to get Qwen user info from credentials: {e}")
930
  return {"email": None}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9
  import logging
10
  import webbrowser
11
  import os
12
+ import re
13
+ from dataclasses import dataclass, field
14
  from pathlib import Path
15
+ from glob import glob
16
+ from typing import Dict, Any, Tuple, Union, Optional, List
17
 
18
  import httpx
19
  from rich.console import Console
 
38
  console = Console()
39
 
40
 
41
+ @dataclass
42
+ class QwenCredentialSetupResult:
43
+ """
44
+ Standardized result structure for Qwen credential setup operations.
45
+ """
46
+
47
+ success: bool
48
+ file_path: Optional[str] = None
49
+ email: Optional[str] = None
50
+ is_update: bool = False
51
+ error: Optional[str] = None
52
+ credentials: Optional[Dict[str, Any]] = field(default=None, repr=False)
53
+
54
+
55
  class QwenAuthBase:
56
  def __init__(self):
57
  self._credentials_cache: Dict[str, Dict[str, Any]] = {}
 
945
  except Exception as e:
946
  lib_logger.error(f"Failed to get Qwen user info from credentials: {e}")
947
  return {"email": None}
948
+
949
+ # =========================================================================
950
+ # CREDENTIAL MANAGEMENT METHODS
951
+ # =========================================================================
952
+
953
+ def _get_provider_file_prefix(self) -> str:
954
+ """Return the file prefix for Qwen credentials."""
955
+ return "qwen_code"
956
+
957
+ def _get_oauth_base_dir(self) -> Path:
958
+ """Get the base directory for OAuth credential files."""
959
+ return Path.cwd() / "oauth_creds"
960
+
961
+ def _find_existing_credential_by_email(
962
+ self, email: str, base_dir: Optional[Path] = None
963
+ ) -> Optional[Path]:
964
+ """Find an existing credential file for the given email."""
965
+ if base_dir is None:
966
+ base_dir = self._get_oauth_base_dir()
967
+
968
+ prefix = self._get_provider_file_prefix()
969
+ pattern = str(base_dir / f"{prefix}_oauth_*.json")
970
+
971
+ for cred_file in glob(pattern):
972
+ try:
973
+ with open(cred_file, "r") as f:
974
+ creds = json.load(f)
975
+ existing_email = creds.get("_proxy_metadata", {}).get("email")
976
+ if existing_email == email:
977
+ return Path(cred_file)
978
+ except (json.JSONDecodeError, IOError) as e:
979
+ lib_logger.debug(f"Could not read credential file {cred_file}: {e}")
980
+ continue
981
+
982
+ return None
983
+
984
+ def _get_next_credential_number(self, base_dir: Optional[Path] = None) -> int:
985
+ """Get the next available credential number."""
986
+ if base_dir is None:
987
+ base_dir = self._get_oauth_base_dir()
988
+
989
+ prefix = self._get_provider_file_prefix()
990
+ pattern = str(base_dir / f"{prefix}_oauth_*.json")
991
+
992
+ existing_numbers = []
993
+ for cred_file in glob(pattern):
994
+ match = re.search(r"_oauth_(\d+)\.json$", cred_file)
995
+ if match:
996
+ existing_numbers.append(int(match.group(1)))
997
+
998
+ if not existing_numbers:
999
+ return 1
1000
+ return max(existing_numbers) + 1
1001
+
1002
+ def _build_credential_path(
1003
+ self, base_dir: Optional[Path] = None, number: Optional[int] = None
1004
+ ) -> Path:
1005
+ """Build a path for a new credential file."""
1006
+ if base_dir is None:
1007
+ base_dir = self._get_oauth_base_dir()
1008
+
1009
+ if number is None:
1010
+ number = self._get_next_credential_number(base_dir)
1011
+
1012
+ prefix = self._get_provider_file_prefix()
1013
+ filename = f"{prefix}_oauth_{number}.json"
1014
+ return base_dir / filename
1015
+
1016
+ async def setup_credential(
1017
+ self, base_dir: Optional[Path] = None
1018
+ ) -> QwenCredentialSetupResult:
1019
+ """
1020
+ Complete credential setup flow: OAuth -> save.
1021
+
1022
+ This is the main entry point for setting up new credentials.
1023
+ """
1024
+ if base_dir is None:
1025
+ base_dir = self._get_oauth_base_dir()
1026
+
1027
+ # Ensure directory exists
1028
+ base_dir.mkdir(exist_ok=True)
1029
+
1030
+ try:
1031
+ # Step 1: Perform OAuth authentication
1032
+ temp_creds = {
1033
+ "_proxy_metadata": {"display_name": "new Qwen Code credential"}
1034
+ }
1035
+ new_creds = await self.initialize_token(temp_creds)
1036
+
1037
+ # Step 2: Get user info for deduplication
1038
+ email = new_creds.get("_proxy_metadata", {}).get("email")
1039
+
1040
+ if not email:
1041
+ return QwenCredentialSetupResult(
1042
+ success=False, error="Could not retrieve email from OAuth response"
1043
+ )
1044
+
1045
+ # Step 3: Check for existing credential with same email
1046
+ existing_path = self._find_existing_credential_by_email(email, base_dir)
1047
+ is_update = existing_path is not None
1048
+
1049
+ if is_update:
1050
+ file_path = existing_path
1051
+ lib_logger.info(
1052
+ f"Found existing credential for {email}, updating {file_path.name}"
1053
+ )
1054
+ else:
1055
+ file_path = self._build_credential_path(base_dir)
1056
+ lib_logger.info(
1057
+ f"Creating new credential for {email} at {file_path.name}"
1058
+ )
1059
+
1060
+ # Step 4: Save credentials to file
1061
+ await self._save_credentials(str(file_path), new_creds)
1062
+
1063
+ return QwenCredentialSetupResult(
1064
+ success=True,
1065
+ file_path=str(file_path),
1066
+ email=email,
1067
+ is_update=is_update,
1068
+ credentials=new_creds,
1069
+ )
1070
+
1071
+ except Exception as e:
1072
+ lib_logger.error(f"Credential setup failed: {e}")
1073
+ return QwenCredentialSetupResult(success=False, error=str(e))
1074
+
1075
+ def build_env_lines(self, creds: Dict[str, Any], cred_number: int) -> List[str]:
1076
+ """Generate .env file lines for a Qwen credential."""
1077
+ email = creds.get("_proxy_metadata", {}).get("email", "unknown")
1078
+ prefix = f"QWEN_CODE_{cred_number}"
1079
+
1080
+ lines = [
1081
+ f"# QWEN_CODE Credential #{cred_number} for: {email}",
1082
+ f"# Exported from: qwen_code_oauth_{cred_number}.json",
1083
+ f"# Generated at: {time.strftime('%Y-%m-%d %H:%M:%S')}",
1084
+ "#",
1085
+ "# To combine multiple credentials into one .env file, copy these lines",
1086
+ "# and ensure each credential has a unique number (1, 2, 3, etc.)",
1087
+ "",
1088
+ f"{prefix}_ACCESS_TOKEN={creds.get('access_token', '')}",
1089
+ f"{prefix}_REFRESH_TOKEN={creds.get('refresh_token', '')}",
1090
+ f"{prefix}_EXPIRY_DATE={creds.get('expiry_date', 0)}",
1091
+ f"{prefix}_RESOURCE_URL={creds.get('resource_url', 'https://portal.qwen.ai/v1')}",
1092
+ f"{prefix}_EMAIL={email}",
1093
+ ]
1094
+
1095
+ return lines
1096
+
1097
+ def export_credential_to_env(
1098
+ self, credential_path: str, output_dir: Optional[Path] = None
1099
+ ) -> Optional[str]:
1100
+ """Export a credential file to .env format."""
1101
+ try:
1102
+ cred_path = Path(credential_path)
1103
+
1104
+ # Load credential
1105
+ with open(cred_path, "r") as f:
1106
+ creds = json.load(f)
1107
+
1108
+ # Extract metadata
1109
+ email = creds.get("_proxy_metadata", {}).get("email", "unknown")
1110
+
1111
+ # Get credential number from filename
1112
+ match = re.search(r"_oauth_(\d+)\.json$", cred_path.name)
1113
+ cred_number = int(match.group(1)) if match else 1
1114
+
1115
+ # Build output path
1116
+ if output_dir is None:
1117
+ output_dir = cred_path.parent
1118
+
1119
+ safe_email = email.replace("@", "_at_").replace(".", "_")
1120
+ env_filename = f"qwen_code_{cred_number}_{safe_email}.env"
1121
+ env_path = output_dir / env_filename
1122
+
1123
+ # Build and write content
1124
+ env_lines = self.build_env_lines(creds, cred_number)
1125
+ with open(env_path, "w") as f:
1126
+ f.write("\n".join(env_lines))
1127
+
1128
+ lib_logger.info(f"Exported credential to {env_path}")
1129
+ return str(env_path)
1130
+
1131
+ except Exception as e:
1132
+ lib_logger.error(f"Failed to export credential: {e}")
1133
+ return None
1134
+
1135
+ def list_credentials(self, base_dir: Optional[Path] = None) -> List[Dict[str, Any]]:
1136
+ """List all Qwen credential files."""
1137
+ if base_dir is None:
1138
+ base_dir = self._get_oauth_base_dir()
1139
+
1140
+ prefix = self._get_provider_file_prefix()
1141
+ pattern = str(base_dir / f"{prefix}_oauth_*.json")
1142
+
1143
+ credentials = []
1144
+ for cred_file in sorted(glob(pattern)):
1145
+ try:
1146
+ with open(cred_file, "r") as f:
1147
+ creds = json.load(f)
1148
+
1149
+ metadata = creds.get("_proxy_metadata", {})
1150
+
1151
+ # Extract number from filename
1152
+ match = re.search(r"_oauth_(\d+)\.json$", cred_file)
1153
+ number = int(match.group(1)) if match else 0
1154
+
1155
+ credentials.append(
1156
+ {
1157
+ "file_path": cred_file,
1158
+ "email": metadata.get("email", "unknown"),
1159
+ "number": number,
1160
+ }
1161
+ )
1162
+ except Exception as e:
1163
+ lib_logger.debug(f"Could not read credential file {cred_file}: {e}")
1164
+ continue
1165
+
1166
+ return credentials
1167
+
1168
+ def delete_credential(self, credential_path: str) -> bool:
1169
+ """Delete a credential file."""
1170
+ try:
1171
+ cred_path = Path(credential_path)
1172
+
1173
+ # Validate that it's one of our credential files
1174
+ prefix = self._get_provider_file_prefix()
1175
+ if not cred_path.name.startswith(f"{prefix}_oauth_"):
1176
+ lib_logger.error(
1177
+ f"File {cred_path.name} does not appear to be a Qwen Code credential"
1178
+ )
1179
+ return False
1180
+
1181
+ if not cred_path.exists():
1182
+ lib_logger.warning(f"Credential file does not exist: {credential_path}")
1183
+ return False
1184
+
1185
+ # Remove from cache if present
1186
+ self._credentials_cache.pop(credential_path, None)
1187
+
1188
+ # Delete the file
1189
+ cred_path.unlink()
1190
+ lib_logger.info(f"Deleted credential file: {credential_path}")
1191
+ return True
1192
+
1193
+ except Exception as e:
1194
+ lib_logger.error(f"Failed to delete credential: {e}")
1195
+ return False