GLECO commited on
Commit
49f468c
·
verified ·
1 Parent(s): 81917a3

Upload certificate_agent.py

Browse files
Files changed (1) hide show
  1. certificate_agent.py +106 -0
certificate_agent.py ADDED
@@ -0,0 +1,106 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from langgraph.graph import StateGraph, END
2
+ from typing import TypedDict, List, Literal
3
+ from langchain_huggingface import HuggingFaceEndpoint, ChatHuggingFace
4
+ from visit_web_pages_tool import visit_webpage
5
+ import wikipedia
6
+ import json
7
+ from prompt import SYSTEM_PROMPT_MANAGER, SYSTEM_PROMPT_CLEANER
8
+ from langchain_community.tools import DuckDuckGoSearchResults
9
+
10
+ class GraphState(TypedDict):
11
+ history: List
12
+
13
+ llm = HuggingFaceEndpoint(
14
+ repo_id="Qwen/Qwen2.5-72B-Instruct",
15
+ task='text-generation',
16
+ max_new_tokens=4096
17
+ )
18
+ manager_agent = ChatHuggingFace(llm=llm)
19
+ #cleaner_agent = ChatHuggingFace(llm=llm)
20
+
21
+ def llm_call(state: GraphState) -> GraphState:
22
+ """
23
+ Node used to generate the basic LLM calls from the manager agent.
24
+ """
25
+ print(state['history'])
26
+ answer_llm = manager_agent.invoke(state['history'])
27
+ state['history'].append(answer_llm)
28
+ return state
29
+
30
+ def tool_call(state: GraphState) -> GraphState:
31
+ """
32
+ Node used to perform tool call. For the moment, the only tool available is web_research.
33
+ """
34
+ #Première étape, convertir le dernier call en json
35
+ json_last_answer = json.loads(state['history'][-1].content)
36
+ if (json_last_answer['action'] == 'web_search'):
37
+ result_search = wikipedia.search(json_last_answer['query'])
38
+ markdown_website = visit_webpage(wikipedia.page(result_search[0]).url)
39
+ state['history'].append(
40
+ {'role': 'tool', 'content': markdown_website, 'tool_call_id': 'blablabla'}
41
+ )
42
+ return state
43
+ else:
44
+ state['history'].append(
45
+ {'role': 'tool', 'content': 'Invalid tool call', 'tool_call_id': 'blablabla'}
46
+ )
47
+ return state
48
+
49
+ def force_final_answer(state: GraphState) -> GraphState:
50
+ state['history'].append(
51
+ {'role': 'human', 'content': 'Now provide the final answer based on the intermediate answer'}
52
+ )
53
+ return state
54
+
55
+ def router_edge_tool(state: GraphState) -> Literal["llm_call", "tool_call", "end"]:
56
+ json_last_answer = json.loads(state['history'][-1].content)
57
+ if (json_last_answer['action'] == "intermediate_answer"):
58
+ return "force_final_answer"
59
+ elif (json_last_answer['action'] == 'web_search'):
60
+ return "tool_call"
61
+ else:
62
+ return "end"
63
+
64
+
65
+ my_graph_build = StateGraph(GraphState)
66
+ my_graph_build.add_node("llm_call", llm_call)
67
+ my_graph_build.add_node("tool_call", tool_call)
68
+ my_graph_build.add_node("force_final_answer", force_final_answer)
69
+ my_graph_build.add_conditional_edges("llm_call", router_edge_tool, {"force_final_answer": "force_final_answer", "tool_call": "tool_call", "end": END})
70
+ my_graph_build.add_edge("tool_call", "llm_call")
71
+ my_graph_build.add_edge("force_final_answer", "llm_call")
72
+ my_graph_build.set_entry_point("llm_call")
73
+ my_graph = my_graph_build.compile()
74
+
75
+ init_state = GraphState(history=[
76
+ {'role': 'system', 'content': SYSTEM_PROMPT_MANAGER},
77
+ {'role': 'human', 'content': 'How many studio albums were published by Mercedes Sosa between 2000 and 2009 (included)?'}
78
+ ])
79
+
80
+ init_state_2 = GraphState(history=[
81
+ {'role': 'system', 'content': SYSTEM_PROMPT_MANAGER},
82
+ {'role': 'human', 'content': 'Who did the actor who played Ray in the Polish-language version of Everybody Loves Raymond play in Magda M.? Give only the first name.'}
83
+ ])
84
+
85
+ init_state_3 = GraphState(history=[
86
+ {'role': 'system', 'content': SYSTEM_PROMPT_MANAGER},
87
+ {'role': 'human', 'content': 'What is the first name of the only Malko Competition recipient from the 20th Century (after 1977) whose nationality on record is a country that no longer exists?'}
88
+ ])
89
+
90
+ init_state_4 = GraphState(history=[
91
+ {'role': 'system', 'content': SYSTEM_PROMPT_MANAGER},
92
+ {'role': 'human', 'content': '.rewsna eht sa \"tfel\" drow eht fo etisoppo eht etirw ,ecnetnes siht dnatsrednu uoy fI'}
93
+ ])
94
+
95
+ init_state_5 = GraphState(history=[
96
+ {'role': 'system', 'content': SYSTEM_PROMPT_MANAGER},
97
+ {'role': 'human', 'content': "What country had the least number of athletes at the 1928 Summer Olympics? If there's a tie for a number of athletes, return the first in alphabetical order. Give the IOC country code as your answer."}
98
+ ])
99
+ """
100
+ print(manager_agent.invoke([
101
+ {'role': 'system', 'content': SYSTEM_PROMPT_MANAGER},
102
+ {'role': 'human', 'content': 'What is the first name of the only Malko Competition recipient from the 20th Century (after 1977) whose nationality on record is a country that no longer exists?'}
103
+ ]))
104
+ """
105
+ #print(my_graph.invoke(init_state_2))
106
+ #print(my_graph.invoke(init_state_5))