cuongnguyen0610 commited on
Commit
3bc23ba
·
verified ·
1 Parent(s): 0e73920

Upload 2 files

Browse files
Files changed (2) hide show
  1. agent.py +10 -4
  2. requirements.txt +9 -7
agent.py CHANGED
@@ -1,4 +1,5 @@
1
- from llama_index.llms.openai import OpenAI
 
2
  from llama_index.tools.wikipedia.base import WikipediaToolSpec
3
  from llama_index.core.llms import ChatMessage
4
  from llama_index.core.agent import ReActAgent
@@ -69,10 +70,15 @@ class CuongBasicAgent:
69
  Below is the current conversation consisting of interleaving human and assistant messages.
70
  """
71
  react_system_prompt = PromptTemplate(system_prompt)
72
- #llm = DeepInfraLLM(
73
- # model="meta-llama/Llama-4-Maverick-17B-128E-Instruct-FP8",api_key=os.getenv("DEEPINFRA_API_KEY"))
 
 
 
 
 
 
74
 
75
- llm = OpenAI(model='Gemini-2.5', api_key=os.getenv("GEMINI_API_KEY"))
76
  agent = ReActAgent.from_tools(
77
  llm=llm,
78
  tools=WikipediaToolSpec().to_tool_list() + TavilyToolSpec(api_key=os.getenv('TAVILY_API_KEY')).to_tool_list(),
 
1
+ from llama_index.llms.openai import OpenAI # Keep if you might switch back to OpenAI
2
+ from llama_index.llms.gemini import Gemini # Import the Gemini LLM
3
  from llama_index.tools.wikipedia.base import WikipediaToolSpec
4
  from llama_index.core.llms import ChatMessage
5
  from llama_index.core.agent import ReActAgent
 
70
  Below is the current conversation consisting of interleaving human and assistant messages.
71
  """
72
  react_system_prompt = PromptTemplate(system_prompt)
73
+
74
+ # --- CHANGE HERE: Use Gemini directly ---
75
+ # Make sure you have your GOOGLE_API_KEY set as an environment variable
76
+ llm = Gemini(model='gemini-pro', api_key=os.getenv("GOOGLE_API_KEY"))
77
+ # 'gemini-pro' is a commonly used model for general tasks.
78
+ # You might also use 'gemini-1.5-pro' for larger contexts.
79
+ # Check the official Google Gemini API documentation for available model names.
80
+ # --- END CHANGE ---
81
 
 
82
  agent = ReActAgent.from_tools(
83
  llm=llm,
84
  tools=WikipediaToolSpec().to_tool_list() + TavilyToolSpec(api_key=os.getenv('TAVILY_API_KEY')).to_tool_list(),
requirements.txt CHANGED
@@ -1,7 +1,9 @@
1
- gradio
2
- requests
3
- pandas
4
- llama-index-llms-openai
5
- llama-index-tools-wikipedia
6
- llama-index-llms-deepinfra
7
- llama-index-tools-tavily_research
 
 
 
1
+ gradio>=4.0.0
2
+ requests>=2.31.0
3
+ pandas>=2.0.0
4
+ llama-index-core>=0.10.0 # Fundamental LlamaIndex package
5
+ llama-index-llms-openai>=0.1.0 # For OpenAI API (or Gemini via OpenAI compatibility)
6
+ llama-index-llms-gemini>=0.1.0 # Crucial if you are directly using llama_index.llms.gemini.Gemini
7
+ llama-index-llms-deepinfra>=0.1.0 # If you plan to use DeepInfra models
8
+ llama-index-tools-wikipedia>=0.1.0
9
+ llama-index-tools-tavily-research>=0.1.0