Fancellu commited on
Commit
256b43c
·
verified ·
1 Parent(s): 353b1bc

Using gemini 3 flash now

Browse files
Files changed (1) hide show
  1. utils/call_llm.py +1 -1
utils/call_llm.py CHANGED
@@ -68,7 +68,7 @@ def call_llm(prompt: str, use_cache: bool = True) -> str:
68
  api_key=os.getenv("GEMINI_API_KEY", ""),
69
  )
70
  # pro was too slow and janky
71
- model = os.getenv("GEMINI_MODEL", "gemini-2.5-flash")
72
  # model = os.getenv("GEMINI_MODEL", "gemini-2.5-flash-preview-04-17")
73
 
74
  response = client.models.generate_content(model=model, contents=[prompt])
 
68
  api_key=os.getenv("GEMINI_API_KEY", ""),
69
  )
70
  # pro was too slow and janky
71
+ model = os.getenv("GEMINI_MODEL", "gemini-3-flash-preview")
72
  # model = os.getenv("GEMINI_MODEL", "gemini-2.5-flash-preview-04-17")
73
 
74
  response = client.models.generate_content(model=model, contents=[prompt])