cstr commited on
Commit
f3548e7
·
verified ·
1 Parent(s): e9afc1d

Update format_transplant.py

Browse files
Files changed (1) hide show
  1. format_transplant.py +10 -6
format_transplant.py CHANGED
@@ -374,8 +374,8 @@ PROVIDER_DEFAULTS: Dict[str, Dict[str, Any]] = {
374
  "ollama": {
375
  "base_url": "http://localhost:11434/api",
376
  "env": "OLLAMA_API_KEY",
377
- "model": "ministral-3b-instruct-2512-q4_K_M",
378
- "fallbacks": ["cas/llama-3.2-3b-instruct:latest", "llama3.2", "mistral", "phi3"],
379
  "batch_size": 15
380
  },
381
  }
@@ -408,11 +408,15 @@ def llm_config_from_args(
408
  """Build an LLMConfig from CLI/UI inputs, filling defaults from PROVIDER_DEFAULTS."""
409
  import os
410
  defaults = PROVIDER_DEFAULTS.get(provider_str, {})
411
- resolved_key = api_key or os.getenv(defaults.get("env", ""), "")
412
- if not resolved_key:
 
 
 
 
413
  raise ValueError(
414
  f"No API key for provider '{provider_str}'. "
415
- f"Set env var {defaults.get('env', '?')} or pass --llm-key."
416
  )
417
 
418
  # Handle 'auto' or 'default' markers from UI/CLI
@@ -423,7 +427,7 @@ def llm_config_from_args(
423
  return LLMConfig(
424
  provider=LLMProvider(provider_str),
425
  model=resolved_model or defaults.get("model", ""),
426
- api_key=resolved_key,
427
  base_url=defaults.get("base_url"),
428
  para_batch_size=defaults.get("batch_size", 15),
429
  fallback_models=defaults.get("fallbacks", []),
 
374
  "ollama": {
375
  "base_url": "http://localhost:11434/api",
376
  "env": "OLLAMA_API_KEY",
377
+ "model": "ollama.com/library/ministral-3:3b-instruct-2512-q4_K_M",
378
+ "fallbacks": ["cas/llama-3.2-3b-instruct:latest", "llama3.2:latest", "mistral:latest", "phi3:latest"],
379
  "batch_size": 15
380
  },
381
  }
 
408
  """Build an LLMConfig from CLI/UI inputs, filling defaults from PROVIDER_DEFAULTS."""
409
  import os
410
  defaults = PROVIDER_DEFAULTS.get(provider_str, {})
411
+
412
+ # Resolve key: from args, then env, then fallback to empty for Ollama
413
+ env_var = defaults.get("env", "")
414
+ resolved_key = api_key or os.getenv(env_var, "")
415
+
416
+ if not resolved_key and provider_str != "ollama":
417
  raise ValueError(
418
  f"No API key for provider '{provider_str}'. "
419
+ f"Set env var {env_var or '?'} or pass --llm-key."
420
  )
421
 
422
  # Handle 'auto' or 'default' markers from UI/CLI
 
427
  return LLMConfig(
428
  provider=LLMProvider(provider_str),
429
  model=resolved_model or defaults.get("model", ""),
430
+ api_key=resolved_key or "no-key-needed",
431
  base_url=defaults.get("base_url"),
432
  para_batch_size=defaults.get("batch_size", 15),
433
  fallback_models=defaults.get("fallbacks", []),