e-candeloro commited on
Commit
2cc34e0
·
1 Parent(s): 1c80084

changed to python 3.10

Browse files
Files changed (5) hide show
  1. .python-version +1 -1
  2. agent.py +6 -8
  3. pyproject.toml +2 -2
  4. requirements.txt +0 -1
  5. uv.lock +0 -0
.python-version CHANGED
@@ -1 +1 @@
1
- 3.11
 
1
+ 3.10
agent.py CHANGED
@@ -1,5 +1,4 @@
1
  import os
2
- import json
3
  from pathlib import Path
4
  from typing import Annotated, Optional, TypedDict, List
5
 
@@ -11,17 +10,14 @@ from langchain_core.messages import (
11
  HumanMessage,
12
  SystemMessage,
13
  )
14
- from langchain_core.tools import tool
15
- from langchain_google_genai import ChatGoogleGenerativeAI
16
  from langchain_groq import ChatGroq
17
  from langgraph.graph import START, StateGraph
18
  from langgraph.graph.message import add_messages
19
  from langgraph.prebuilt import ToolNode, tools_condition
20
 
21
  from tools import get_tools
22
- from langchain.globals import set_verbose
23
 
24
- set_verbose(True)
25
 
26
  # ---------------------- helpers ------------------------------------------------
27
  ENCODING = tiktoken.get_encoding("cl100k_base")
@@ -36,14 +32,16 @@ def build_graph():
36
  load_dotenv()
37
 
38
  base_prompt = PromptTemplate(
39
- template=Path(__file__).with_name("base_prompt.txt").read_text("utf-8"),
 
40
  input_variables=["tools", "file_info"],
41
  )
42
 
43
  TOOLS = get_tools()
44
 
45
  llm = ChatGroq(
46
- model="qwen/qwen3-32b", # valid Groq model id # was llama3-8b-8192
 
47
  temperature=0,
48
  api_key=os.getenv("GROQ_API_KEY"),
49
  )
@@ -100,7 +98,7 @@ if __name__ == "__main__":
100
  {
101
  "messages": [
102
  HumanMessage(
103
- content="Search the surname of the equine veterinarian mentioned in 1.E Exercises from the chemistry materials licensed by Marisa Alviar-Agnew & Henry Agnew under the CK-12 license in LibreText's Introductory Chemistry materials as compiled 08/21/2023?"
104
  )
105
  ],
106
  "input_file": "",
 
1
  import os
 
2
  from pathlib import Path
3
  from typing import Annotated, Optional, TypedDict, List
4
 
 
10
  HumanMessage,
11
  SystemMessage,
12
  )
13
+
 
14
  from langchain_groq import ChatGroq
15
  from langgraph.graph import START, StateGraph
16
  from langgraph.graph.message import add_messages
17
  from langgraph.prebuilt import ToolNode, tools_condition
18
 
19
  from tools import get_tools
 
20
 
 
21
 
22
  # ---------------------- helpers ------------------------------------------------
23
  ENCODING = tiktoken.get_encoding("cl100k_base")
 
32
  load_dotenv()
33
 
34
  base_prompt = PromptTemplate(
35
+ template=Path(__file__).with_name(
36
+ "base_prompt.txt").read_text("utf-8"),
37
  input_variables=["tools", "file_info"],
38
  )
39
 
40
  TOOLS = get_tools()
41
 
42
  llm = ChatGroq(
43
+ # valid Groq model id - less powerful but cheaper alternative: llama3-8b-8192
44
+ model="qwen/qwen3-32b",
45
  temperature=0,
46
  api_key=os.getenv("GROQ_API_KEY"),
47
  )
 
98
  {
99
  "messages": [
100
  HumanMessage(
101
+ content="What is the capital of France? "
102
  )
103
  ],
104
  "input_file": "",
pyproject.toml CHANGED
@@ -1,9 +1,9 @@
1
  [project]
2
  name = "hf-agent-gaia-30"
3
  version = "0.1.0"
4
- description = "Add your description here"
5
  readme = "README.md"
6
- requires-python = ">=3.11"
7
 
8
  dependencies = [
9
  "arxiv>=2.2.0",
 
1
  [project]
2
  name = "hf-agent-gaia-30"
3
  version = "0.1.0"
4
+ description = ""
5
  readme = "README.md"
6
+ requires-python = ">=3.10"
7
 
8
  dependencies = [
9
  "arxiv>=2.2.0",
requirements.txt CHANGED
@@ -14,7 +14,6 @@ langchain-huggingface,
14
  langchain-ollama,
15
  langchain-tavily,
16
  langgraph,
17
- langmem,
18
  matplotlib,
19
  networkx,
20
  numpy,
 
14
  langchain-ollama,
15
  langchain-tavily,
16
  langgraph,
 
17
  matplotlib,
18
  networkx,
19
  numpy,
uv.lock CHANGED
The diff for this file is too large to render. See raw diff