mafzaal commited on
Commit
cd5a9cf
·
1 Parent(s): 4fdbfaa

Refactor prompt retrieval to use configurable source from config; enhance code clarity

Browse files
Files changed (2) hide show
  1. config.py +3 -0
  2. handlers/chainlit_handlers.py +2 -1
config.py CHANGED
@@ -27,6 +27,9 @@ Your job is to:
27
 
28
  Be thorough, objective, and focus on providing high-quality information."""
29
 
 
 
 
30
  # Tool configurations
31
  MAX_TAVILY_SEARCH_RESULTS = 5
32
  MAX_ARXIV_SEARCH_RESULTS = 5
 
27
 
28
  Be thorough, objective, and focus on providing high-quality information."""
29
 
30
+ # LangChain Hub prompt source
31
+ PROMPT_HUB_SOURCE = "mafzaal/openai-functions-agent"
32
+
33
  # Tool configurations
34
  MAX_TAVILY_SEARCH_RESULTS = 5
35
  MAX_ARXIV_SEARCH_RESULTS = 5
handlers/chainlit_handlers.py CHANGED
@@ -35,7 +35,8 @@ async def on_chat_start():
35
  toolkit = ResearchToolkit()
36
 
37
  # Initialize the agent with research tools
38
- prompt = hub.pull("hwchase17/openai-functions-agent")
 
39
 
40
  # Add our custom system prompt
41
  prompt = prompt.partial(
 
35
  toolkit = ResearchToolkit()
36
 
37
  # Initialize the agent with research tools
38
+ # Get prompt from hub using configurable source from config
39
+ prompt = hub.pull(config.PROMPT_HUB_SOURCE)
40
 
41
  # Add our custom system prompt
42
  prompt = prompt.partial(