NavyDevilDoc commited on
Commit
4f369a1
·
verified ·
1 Parent(s): 13a7929

Update src/llm_client.py

Browse files
Files changed (1) hide show
  1. src/llm_client.py +1 -1
src/llm_client.py CHANGED
@@ -27,7 +27,7 @@ def ask_llm(query, context, mode="Executive Summary", model_provider="Gemini"):
27
 
28
  # NEW SYNTAX: Call generate_content via the 'models' attribute
29
  response = client.models.generate_content(
30
- model='gemini-1.5-flash', # or 'gemini-2.0-flash' if available to you
31
  contents=full_prompt,
32
  config=types.GenerateContentConfig(
33
  system_instruction=system_instruction,
 
27
 
28
  # NEW SYNTAX: Call generate_content via the 'models' attribute
29
  response = client.models.generate_content(
30
+ model='gemini-2.0-flash', # or 'gemini-2.0-flash' if available to you
31
  contents=full_prompt,
32
  config=types.GenerateContentConfig(
33
  system_instruction=system_instruction,