pratikmurali commited on
Commit
e521af9
·
0 Parent(s):

Simple LangGraph Agent Initial Commit

Browse files
Files changed (16) hide show
  1. .chainlit/config.toml +120 -0
  2. .chainlit/translations/en-US.json +214 -0
  3. .dockerignore +35 -0
  4. .gitignore +10 -0
  5. .python-version +1 -0
  6. Dockerfile +30 -0
  7. README.md +59 -0
  8. chainlit.md +14 -0
  9. chainlit.yaml +10 -0
  10. config.py +32 -0
  11. graph.py +102 -0
  12. main.py +139 -0
  13. pyproject.toml +20 -0
  14. requirements.txt +606 -0
  15. tools.py +53 -0
  16. uv.lock +0 -0
.chainlit/config.toml ADDED
@@ -0,0 +1,120 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [project]
2
+ # Whether to enable telemetry (default: true). No personal data is collected.
3
+ enable_telemetry = true
4
+
5
+
6
+ # List of environment variables to be provided by each user to use the app.
7
+ user_env = []
8
+
9
+ # Duration (in seconds) during which the session is saved when the connection is lost
10
+ session_timeout = 3600
11
+
12
+ # Duration (in seconds) of the user session expiry
13
+ user_session_timeout = 1296000 # 15 days
14
+
15
+ # Enable third parties caching (e.g., LangChain cache)
16
+ cache = false
17
+
18
+ # Authorized origins
19
+ allow_origins = ["*"]
20
+
21
+ [features]
22
+ # Process and display HTML in messages. This can be a security risk (see https://stackoverflow.com/questions/19603097/why-is-it-dangerous-to-render-user-generated-html-or-javascript)
23
+ unsafe_allow_html = false
24
+
25
+ # Process and display mathematical expressions. This can clash with "$" characters in messages.
26
+ latex = false
27
+
28
+ # Autoscroll new user messages at the top of the window
29
+ user_message_autoscroll = true
30
+
31
+ # Automatically tag threads with the current chat profile (if a chat profile is used)
32
+ auto_tag_thread = true
33
+
34
+ # Allow users to edit their own messages
35
+ edit_message = true
36
+
37
+ # Authorize users to spontaneously upload files with messages
38
+ [features.spontaneous_file_upload]
39
+ enabled = true
40
+ # Define accepted file types using MIME types
41
+ # Examples:
42
+ # 1. For specific file types:
43
+ # accept = ["image/jpeg", "image/png", "application/pdf"]
44
+ # 2. For all files of certain type:
45
+ # accept = ["image/*", "audio/*", "video/*"]
46
+ # 3. For specific file extensions:
47
+ # accept = { "application/octet-stream" = [".xyz", ".pdb"] }
48
+ # Note: Using "*/*" is not recommended as it may cause browser warnings
49
+ accept = ["*/*"]
50
+ max_files = 20
51
+ max_size_mb = 500
52
+
53
+ [features.audio]
54
+ # Sample rate of the audio
55
+ sample_rate = 24000
56
+
57
+ [features.mcp.sse]
58
+ enabled = true
59
+
60
+ [features.mcp.stdio]
61
+ enabled = true
62
+ # Only the executables in the allow list can be used for MCP stdio server.
63
+ # Only need the base name of the executable, e.g. "npx", not "/usr/bin/npx".
64
+ # Please don't comment this line for now, we need it to parse the executable name.
65
+ allowed_executables = [ "npx", "uvx" ]
66
+
67
+ [UI]
68
+ # Name of the assistant.
69
+ name = "Assistant"
70
+
71
+ # default_theme = "dark"
72
+
73
+ # layout = "wide"
74
+
75
+ # default_sidebar_state = "open"
76
+
77
+ # Description of the assistant. This is used for HTML tags.
78
+ # description = ""
79
+
80
+ # Chain of Thought (CoT) display mode. Can be "hidden", "tool_call" or "full".
81
+ cot = "full"
82
+
83
+ # Specify a CSS file that can be used to customize the user interface.
84
+ # The CSS file can be served from the public directory or via an external link.
85
+ # custom_css = "/public/test.css"
86
+
87
+ # Specify additional attributes for a custom CSS file
88
+ # custom_css_attributes = "media=\"print\""
89
+
90
+ # Specify a JavaScript file that can be used to customize the user interface.
91
+ # The JavaScript file can be served from the public directory.
92
+ # custom_js = "/public/test.js"
93
+
94
+ # Specify additional attributes for custom JS file
95
+ # custom_js_attributes = "async type = \"module\""
96
+
97
+ # Custom login page image, relative to public directory or external URL
98
+ # login_page_image = "/public/custom-background.jpg"
99
+
100
+ # Custom login page image filter (Tailwind internal filters, no dark/light variants)
101
+ # login_page_image_filter = "brightness-50 grayscale"
102
+ # login_page_image_dark_filter = "contrast-200 blur-sm"
103
+
104
+ # Specify a custom meta image url.
105
+ # custom_meta_image_url = "https://chainlit-cloud.s3.eu-west-3.amazonaws.com/logo/chainlit_banner.png"
106
+
107
+ # Specify a custom build directory for the frontend.
108
+ # This can be used to customize the frontend code.
109
+ # Be careful: If this is a relative path, it should not start with a slash.
110
+ # custom_build = "./public/build"
111
+
112
+ # Specify optional one or more custom links in the header.
113
+ # [[UI.header_links]]
114
+ # name = "Issues"
115
+ # display_name = "Report Issue"
116
+ # icon_url = "https://avatars.githubusercontent.com/u/128686189?s=200&v=4"
117
+ # url = "https://github.com/Chainlit/chainlit/issues"
118
+
119
+ [meta]
120
+ generated_by = "2.5.5"
.chainlit/translations/en-US.json ADDED
@@ -0,0 +1,214 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "common": {
3
+ "actions": {
4
+ "cancel": "Cancel",
5
+ "confirm": "Confirm",
6
+ "continue": "Continue",
7
+ "goBack": "Go Back",
8
+ "reset": "Reset",
9
+ "submit": "Submit"
10
+ },
11
+ "status": {
12
+ "loading": "Loading...",
13
+ "error": {
14
+ "default": "An error occurred",
15
+ "serverConnection": "Could not reach the server"
16
+ }
17
+ }
18
+ },
19
+ "auth": {
20
+ "login": {
21
+ "title": "Login to access the app",
22
+ "form": {
23
+ "email": {
24
+ "label": "Email address",
25
+ "required": "email is a required field"
26
+ },
27
+ "password": {
28
+ "label": "Password",
29
+ "required": "password is a required field"
30
+ },
31
+ "actions": {
32
+ "signin": "Sign In"
33
+ },
34
+ "alternativeText": {
35
+ "or": "OR"
36
+ }
37
+ },
38
+ "errors": {
39
+ "default": "Unable to sign in",
40
+ "signin": "Try signing in with a different account",
41
+ "oauthSignin": "Try signing in with a different account",
42
+ "redirectUriMismatch": "The redirect URI is not matching the oauth app configuration",
43
+ "oauthCallback": "Try signing in with a different account",
44
+ "oauthCreateAccount": "Try signing in with a different account",
45
+ "emailCreateAccount": "Try signing in with a different account",
46
+ "callback": "Try signing in with a different account",
47
+ "oauthAccountNotLinked": "To confirm your identity, sign in with the same account you used originally",
48
+ "emailSignin": "The e-mail could not be sent",
49
+ "emailVerify": "Please verify your email, a new email has been sent",
50
+ "credentialsSignin": "Sign in failed. Check the details you provided are correct",
51
+ "sessionRequired": "Please sign in to access this page"
52
+ }
53
+ },
54
+ "provider": {
55
+ "continue": "Continue with {{provider}}"
56
+ }
57
+ },
58
+ "chat": {
59
+ "input": {
60
+ "placeholder": "Type your message here...",
61
+ "actions": {
62
+ "send": "Send message",
63
+ "stop": "Stop Task",
64
+ "attachFiles": "Attach files"
65
+ }
66
+ },
67
+ "speech": {
68
+ "start": "Start recording",
69
+ "stop": "Stop recording",
70
+ "connecting": "Connecting"
71
+ },
72
+ "fileUpload": {
73
+ "dragDrop": "Drag and drop files here",
74
+ "browse": "Browse Files",
75
+ "sizeLimit": "Limit:",
76
+ "errors": {
77
+ "failed": "Failed to upload",
78
+ "cancelled": "Cancelled upload of"
79
+ }
80
+ },
81
+ "messages": {
82
+ "status": {
83
+ "using": "Using",
84
+ "used": "Used"
85
+ },
86
+ "actions": {
87
+ "copy": {
88
+ "button": "Copy to clipboard",
89
+ "success": "Copied!"
90
+ }
91
+ },
92
+ "feedback": {
93
+ "positive": "Helpful",
94
+ "negative": "Not helpful",
95
+ "edit": "Edit feedback",
96
+ "dialog": {
97
+ "title": "Add a comment",
98
+ "submit": "Submit feedback"
99
+ },
100
+ "status": {
101
+ "updating": "Updating",
102
+ "updated": "Feedback updated"
103
+ }
104
+ }
105
+ },
106
+ "history": {
107
+ "title": "Last Inputs",
108
+ "empty": "Such empty...",
109
+ "show": "Show history"
110
+ },
111
+ "settings": {
112
+ "title": "Settings panel"
113
+ },
114
+ "watermark": "Built with"
115
+ },
116
+ "threadHistory": {
117
+ "sidebar": {
118
+ "title": "Past Chats",
119
+ "filters": {
120
+ "search": "Search",
121
+ "placeholder": "Search conversations..."
122
+ },
123
+ "timeframes": {
124
+ "today": "Today",
125
+ "yesterday": "Yesterday",
126
+ "previous7days": "Previous 7 days",
127
+ "previous30days": "Previous 30 days"
128
+ },
129
+ "empty": "No threads found",
130
+ "actions": {
131
+ "close": "Close sidebar",
132
+ "open": "Open sidebar"
133
+ }
134
+ },
135
+ "thread": {
136
+ "untitled": "Untitled Conversation",
137
+ "menu": {
138
+ "rename": "Rename",
139
+ "delete": "Delete"
140
+ },
141
+ "actions": {
142
+ "delete": {
143
+ "title": "Confirm deletion",
144
+ "description": "This will delete the thread as well as its messages and elements. This action cannot be undone",
145
+ "success": "Chat deleted",
146
+ "inProgress": "Deleting chat"
147
+ },
148
+ "rename": {
149
+ "title": "Rename Thread",
150
+ "description": "Enter a new name for this thread",
151
+ "form": {
152
+ "name": {
153
+ "label": "Name",
154
+ "placeholder": "Enter new name"
155
+ }
156
+ },
157
+ "success": "Thread renamed!",
158
+ "inProgress": "Renaming thread"
159
+ }
160
+ }
161
+ }
162
+ },
163
+ "navigation": {
164
+ "header": {
165
+ "chat": "Chat",
166
+ "readme": "Readme",
167
+ "theme": {
168
+ "light": "Light Theme",
169
+ "dark": "Dark Theme",
170
+ "system": "Follow System"
171
+ }
172
+ },
173
+ "newChat": {
174
+ "button": "New Chat",
175
+ "dialog": {
176
+ "title": "Create New Chat",
177
+ "description": "This will clear your current chat history. Are you sure you want to continue?",
178
+ "tooltip": "New Chat"
179
+ }
180
+ },
181
+ "user": {
182
+ "menu": {
183
+ "settings": "Settings",
184
+ "settingsKey": "S",
185
+ "apiKeys": "API Keys",
186
+ "logout": "Logout"
187
+ }
188
+ }
189
+ },
190
+ "apiKeys": {
191
+ "title": "Required API Keys",
192
+ "description": "To use this app, the following API keys are required. The keys are stored on your device's local storage.",
193
+ "success": {
194
+ "saved": "Saved successfully"
195
+ }
196
+ },
197
+ "alerts": {
198
+ "info": "Info",
199
+ "note": "Note",
200
+ "tip": "Tip",
201
+ "important": "Important",
202
+ "warning": "Warning",
203
+ "caution": "Caution",
204
+ "debug": "Debug",
205
+ "example": "Example",
206
+ "success": "Success",
207
+ "help": "Help",
208
+ "idea": "Idea",
209
+ "pending": "Pending",
210
+ "security": "Security",
211
+ "beta": "Beta",
212
+ "best-practice": "Best Practice"
213
+ }
214
+ }
.dockerignore ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Version control
2
+ .git
3
+ .gitignore
4
+
5
+ # Virtual environments
6
+ .venv
7
+ venv
8
+ env
9
+
10
+ # Python cache files
11
+ __pycache__/
12
+ *.py[cod]
13
+ *$py.class
14
+ .pytest_cache
15
+ .coverage
16
+ htmlcov/
17
+
18
+ # Build directories
19
+ dist/
20
+ build/
21
+ *.egg-info/
22
+
23
+ # IDE directories
24
+ .idea/
25
+ .vscode/
26
+
27
+ # Environment files (security - don't include real .env in production)
28
+ # .env
29
+
30
+ # Logs
31
+ logs/
32
+ *.log
33
+
34
+ # Chainlit cache
35
+ .chainlit/cache
.gitignore ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ # Python-generated files
2
+ __pycache__/
3
+ *.py[oc]
4
+ build/
5
+ dist/
6
+ wheels/
7
+ *.egg-info
8
+
9
+ # Virtual environments
10
+ .venv
.python-version ADDED
@@ -0,0 +1 @@
 
 
1
+ 3.11
Dockerfile ADDED
@@ -0,0 +1,30 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Get a distribution that has uv already installed
2
+ FROM ghcr.io/astral-sh/uv:python3.13-bookworm-slim
3
+
4
+ # Add user - this is the user that will run the app
5
+ # If you do not set user, the app will run as root (undesirable)
6
+ RUN useradd -m -u 1000 user
7
+ USER user
8
+
9
+ # Set the home directory and path
10
+ ENV HOME=/home/user \
11
+ PATH=/home/user/.local/bin:$PATH
12
+
13
+ # Set the working directory
14
+ WORKDIR $HOME/app
15
+
16
+ # Copy the app to the container
17
+ COPY --chown=user . $HOME/app/
18
+
19
+ # Install the dependencies
20
+ # RUN uv sync --frozen
21
+ RUN uv sync
22
+
23
+ # Set the port for Hugging Face
24
+ ENV PORT=7860
25
+ # Expose the port Chainlit runs on
26
+ EXPOSE 7860
27
+
28
+ # Command to run the application
29
+ # Run the app
30
+ CMD ["uv", "run", "chainlit", "run", "main.py", "--host", "0.0.0.0", "--port", "7860"]
README.md ADDED
@@ -0,0 +1,59 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # LangGraph Tool-Calling Agent
2
+
3
+ A research assistant powered by LangGraph and Chainlit that can search the web and query arXiv papers to answer questions.
4
+
5
+ ## Features
6
+
7
+ - Multi-tool agent with web search capabilities (Tavily and DuckDuckGo)
8
+ - Academic research with arXiv integration
9
+ - Interactive Chainlit web interface
10
+ - Streaming responses with real-time tool usage visibility
11
+
12
+ ## Local Development
13
+
14
+ 1. Clone the repository
15
+ 2. Create a virtual environment: `python -m venv .venv`
16
+ 3. Activate the virtual environment:
17
+ - Windows: `.venv\Scripts\activate`
18
+ - macOS/Linux: `source .venv/bin/activate`
19
+ 4. Install dependencies: `pip install -r requirements.txt`
20
+ 5. Create a `.env` file with your API keys:
21
+ ```
22
+ OPENAI_API_KEY=your_openai_key_here
23
+ TAVILY_API_KEY=your_tavily_key_here
24
+ LANGCHAIN_API_KEY=your_langchain_key_here
25
+ LANGCHAIN_TRACING_V2=true
26
+ LANGCHAIN_PROJECT=tool-calling-agent
27
+ ```
28
+ 6. Run the application: `chainlit run main.py`
29
+ 7. Open your browser to http://localhost:8501
30
+
31
+ ## Deploying to Hugging Face Spaces
32
+
33
+ 1. Create a new Space on Hugging Face (https://huggingface.co/new-space)
34
+ 2. Choose "Docker" as the Space SDK
35
+ 3. Clone your Space repository
36
+ 4. Copy your project files to the cloned repository
37
+ 5. Add your API keys as repository secrets in the Space settings
38
+ 6. Push your changes to Hugging Face
39
+ 7. Your app will build and deploy automatically
40
+
41
+ ### Environment Variables for Hugging Face
42
+
43
+ Make sure to add these environment variables in your Hugging Face Space settings:
44
+
45
+ - `OPENAI_API_KEY`
46
+ - `TAVILY_API_KEY`
47
+ - `LANGCHAIN_API_KEY`
48
+ - `LANGCHAIN_TRACING_V2`
49
+ - `LANGCHAIN_PROJECT`
50
+
51
+ ## Project Structure
52
+
53
+ - `main.py` - Entry point and Chainlit handler
54
+ - `config.py` - Configuration management
55
+ - `tools.py` - Tool definitions and setup
56
+ - `graph.py` - LangGraph agent implementation
57
+ - `chainlit.yaml` - Chainlit configuration
58
+ - `Dockerfile` - Container definition for deployment
59
+
chainlit.md ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Welcome to Chainlit! 🚀🤖
2
+
3
+ Hi there, Developer! 👋 We're excited to have you on board. Chainlit is a powerful tool designed to help you prototype, debug and share applications built on top of LLMs.
4
+
5
+ ## Useful Links 🔗
6
+
7
+ - **Documentation:** Get started with our comprehensive [Chainlit Documentation](https://docs.chainlit.io) 📚
8
+ - **Discord Community:** Join our friendly [Chainlit Discord](https://discord.gg/k73SQ3FyUh) to ask questions, share your projects, and connect with other developers! 💬
9
+
10
+ We can't wait to see what you create with Chainlit! Happy coding! 💻😊
11
+
12
+ ## Welcome screen
13
+
14
+ To modify the welcome screen, edit the `chainlit.md` file at the root of your project. If you do not want a welcome screen, just leave this file empty.
chainlit.yaml ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ # chainlit.yaml
2
+ chainlit_server:
3
+ port: 8501
4
+ cors:
5
+ allowed_origins: ["*"]
6
+
7
+ ui:
8
+ name: "LangGraph Agent"
9
+ default_expand_messages: true
10
+ theme: light
config.py ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Configuration module for the LangGraph application.
3
+ """
4
+ import os
5
+ from uuid import uuid4
6
+ from typing import Dict, Any
7
+
8
+ def load_config() -> Dict[str, Any]:
9
+ """
10
+ Load configuration from environment variables.
11
+
12
+ Returns:
13
+ Dict[str, Any]: Configuration dictionary
14
+ """
15
+ config = {
16
+ "openai_api_key": os.getenv("OPENAI_API_KEY"),
17
+ "tavily_api_key": os.getenv("TAVILY_API_KEY"),
18
+ "langchain_api_key": os.getenv("LANGCHAIN_API_KEY"),
19
+ "langchain_tracing_v2": os.getenv("LANGCHAIN_TRACING_V2"),
20
+ "langchain_project": f"AIE6 - HW5 Advanced Build - {uuid4().hex[0:8]}",
21
+ "model_name": "gpt-4o-mini",
22
+ "temperature": 0,
23
+ "tavily_max_results": 5
24
+ }
25
+
26
+ # Validate required configuration
27
+ required_keys = ["openai_api_key", "tavily_api_key"]
28
+ for key in required_keys:
29
+ if not config[key]:
30
+ raise ValueError(f"Missing required environment variable for {key}")
31
+
32
+ return config
graph.py ADDED
@@ -0,0 +1,102 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Module for creating and configuring the LangGraph agent workflow.
3
+ """
4
+ from typing import List, Dict, Any
5
+
6
+ from langsmith import traceable
7
+ from pydantic import BaseModel
8
+ from typing import Annotated
9
+
10
+ from langgraph.graph import StateGraph, END
11
+ from langgraph.graph.message import add_messages
12
+ from langgraph.prebuilt import ToolNode
13
+ from langchain_openai import ChatOpenAI
14
+
15
+ class AgentState(BaseModel):
16
+ """
17
+ State model for the LangGraph agent.
18
+ """
19
+ messages: Annotated[list, add_messages]
20
+
21
+ @traceable (
22
+ metadata=
23
+ {
24
+ "model_name": "gpt-4o-mini",
25
+ "model_provider": "ChatOpenAI"
26
+ }
27
+ )
28
+ def call_model(state: AgentState) -> AgentState:
29
+ """
30
+ Node function that calls the LLM to generate a response.
31
+
32
+ Args:
33
+ state: Current state containing messages
34
+
35
+ Returns:
36
+ Updated state with model response
37
+ """
38
+ # Access the model from the global context
39
+ # The model is bound to this function when the graph is created
40
+ #model = call_model.__globals__["_model"]
41
+ model = get_model()
42
+ messages = state.messages
43
+ response = model.invoke(messages)
44
+ return {"messages": [response]}
45
+
46
+ def should_continue(state: AgentState) -> str:
47
+ """
48
+ Conditional edge function to determine the next node.
49
+
50
+ Args:
51
+ state: Current agent state
52
+
53
+ Returns:
54
+ String indicating the next node or END
55
+ """
56
+ last_message = state.messages[-1]
57
+
58
+ if last_message.tool_calls:
59
+ return "action"
60
+
61
+ return END
62
+
63
+ def create_agent_graph(tools: List, model: ChatOpenAI) -> StateGraph:
64
+ """
65
+ Create the LangGraph agent workflow.
66
+
67
+ Args:
68
+ tools: List of LangChain tools
69
+ model: ChatOpenAI model with tools bound
70
+
71
+ Returns:
72
+ Compiled StateGraph
73
+ """
74
+ # Create tool node to execute tools
75
+ tool_node = ToolNode(tools)
76
+
77
+ # Set model in the global context for call_model function
78
+ # This avoids issues with serialization when the graph is compiled
79
+ global _model
80
+ _model = model
81
+
82
+
83
+ # Create the graph
84
+ graph = StateGraph(AgentState)
85
+
86
+ # Add nodes
87
+ graph.add_node("action", tool_node)
88
+ graph.add_node("agent", call_model)
89
+
90
+ # Add edges
91
+ graph.set_entry_point("agent")
92
+ graph.add_edge("action", "agent")
93
+ graph.add_conditional_edges(
94
+ "agent",
95
+ should_continue
96
+ )
97
+
98
+ # Compile the graph
99
+ return graph.compile()
100
+
101
+ def get_model():
102
+ return get_model.__globals__["_model"]
main.py ADDED
@@ -0,0 +1,139 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from dotenv import load_dotenv
2
+ import os
3
+
4
+ from langchain_core.messages import HumanMessage
5
+ from langchain_core.runnables.config import RunnableConfig
6
+
7
+ from config import load_config
8
+ from tools import create_tools
9
+ from graph import create_agent_graph
10
+
11
+ import chainlit as cl
12
+ import re
13
+
14
+ # Initialize resources at module level for Chainlit
15
+ load_dotenv()
16
+ config = load_config()
17
+ tools, model = create_tools(config)
18
+ graph = create_agent_graph(tools, model)
19
+
20
+ def clean_content(content):
21
+ """Clean and format content for better readability.
22
+
23
+ Args:
24
+ content: The content to clean
25
+
26
+ Returns:
27
+ Cleaned content string
28
+ """
29
+ if not content:
30
+ return ""
31
+
32
+ # Replace escaped newlines with actual newlines
33
+ content = content.replace('\\n', '\n')
34
+
35
+ # Remove excessive backslashes
36
+ content = content.replace('\\\\', '\\')
37
+
38
+ # Format URLs properly
39
+ content = re.sub(r'(?<![\[\("])https?://[^\s\'"]+', r'[Link](\g<0>)', content)
40
+
41
+ # Remove raw markdown links that didn't render
42
+ content = re.sub(r'\\*\[([^\]]+)\]\\*\(([^)]+)\)', r'[\1](\2)', content)
43
+
44
+ # Clean up repeated newlines
45
+ content = re.sub(r'\n{3,}', '\n\n', content)
46
+
47
+ return content
48
+
49
+ @cl.on_chat_start
50
+ async def on_chat_start():
51
+ """Set up the session state when a new chat starts."""
52
+ cl.user_session.set("graph", graph)
53
+ # Send welcome message
54
+ welcome = cl.Message(content="👋 Hello! I'm a research assistant powered by LangGraph. I can search the web using Tavily and query arXiv papers. How can I help you today?")
55
+ await welcome.send()
56
+
57
+ @cl.on_message
58
+ async def on_message(message: cl.Message):
59
+ """Process incoming user messages with the LangGraph agent."""
60
+ # Get the compiled graph from session
61
+ graph = cl.user_session.get("graph")
62
+
63
+ # Create input with the user's message
64
+ inputs = {
65
+ "messages": [
66
+ HumanMessage(content=message.content)
67
+ ]
68
+ }
69
+
70
+ # Set up callback handler for streaming
71
+ cb = cl.LangchainCallbackHandler()
72
+ runnable_config = RunnableConfig(callbacks=[cb])
73
+
74
+ # Send thinking indicator
75
+ thinking = cl.Message(content="🤔 Thinking...")
76
+ await thinking.send()
77
+
78
+ # Send initial response message that will be streamed to
79
+ response_message = cl.Message(content="")
80
+ await response_message.send()
81
+
82
+ # Remove thinking indicator once we start getting results
83
+ await thinking.remove()
84
+
85
+ # Process with agent and stream results
86
+ last_tool_name = None
87
+ async for chunk in graph.astream(inputs, stream_mode="values", config=runnable_config):
88
+ if "messages" in chunk and chunk["messages"]:
89
+ last_message = chunk["messages"][-1]
90
+
91
+ # Handle main content
92
+ if hasattr(last_message, "content") and last_message.content:
93
+ clean_text = clean_content(last_message.content)
94
+ await response_message.stream_token(clean_text)
95
+
96
+ # Handle tool calls - avoid duplicates and clean up display
97
+ if hasattr(last_message, "tool_calls") and last_message.tool_calls:
98
+ tool_call = last_message.tool_calls[0]
99
+ tool_name = tool_call.get('name', '')
100
+
101
+ # Only show each tool call once
102
+ if tool_name != last_tool_name:
103
+ last_tool_name = tool_name
104
+
105
+ # Format tool call message
106
+ formatted_name = f"🔧 Using {tool_name.replace('_', ' ').title()}"
107
+ tool_message = cl.Message(
108
+ content=formatted_name,
109
+ author="Agent"
110
+ )
111
+ await tool_message.send()
112
+
113
+ # Handle tool results - clean up and format nicely
114
+ if hasattr(last_message, "name") and last_message.name:
115
+ tool_name = last_message.name
116
+ tool_content = last_message.content
117
+
118
+ # Truncate long tool results and format them
119
+ if len(tool_content) > 300:
120
+ tool_content = tool_content[:297] + "..."
121
+
122
+ tool_content = clean_content(tool_content)
123
+
124
+ # Add emoji based on tool type
125
+ emoji = "🔍"
126
+ if "arxiv" in tool_name.lower():
127
+ emoji = "📚"
128
+ elif "search" in tool_name.lower() or "tavily" in tool_name.lower():
129
+ emoji = "🌐"
130
+
131
+ formatted_title = f"{emoji} {tool_name.replace('_', ' ').title()} Results:"
132
+ tool_result = cl.Message(
133
+ content=f"{formatted_title}\n\n{tool_content}",
134
+ author="Tool"
135
+ )
136
+ await tool_result.send()
137
+ # Only used when running the script directly, not with Chainlit
138
+ if __name__ == "__main__":
139
+ print("Please run this app with Chainlit: chainlit run main.py")
pyproject.toml ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [project]
2
+ name = "tool-calling-agent"
3
+ version = "0.1.0"
4
+ description = "Add your description here"
5
+ readme = "README.md"
6
+ requires-python = ">=3.11, <3.12"
7
+ dependencies = [
8
+ "arxiv>=2.2.0",
9
+ "chainlit>=2.5.5",
10
+ "duckduckgo-search>=8.0.1",
11
+ "langchain>=0.3.23",
12
+ "langchain-community>=0.3.21",
13
+ "langchain-core>=0.3.52",
14
+ "langchain-openai>=0.3.13",
15
+ "langgraph>=0.3.30",
16
+ "langgraph-prebuilt>=0.1.8",
17
+ "pydantic>=2.11.3",
18
+ "python-dotenv>=1.1.0",
19
+ "tavily-python>=0.5.4",
20
+ ]
requirements.txt ADDED
@@ -0,0 +1,606 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file was autogenerated by uv via the following command:
2
+ # uv pip compile pyproject.toml -o requirements.txt
3
+ aiofiles==24.1.0
4
+ # via chainlit
5
+ aiohappyeyeballs==2.6.1
6
+ # via aiohttp
7
+ aiohttp==3.11.16
8
+ # via
9
+ # langchain-community
10
+ # traceloop-sdk
11
+ aiosignal==1.3.2
12
+ # via aiohttp
13
+ annotated-types==0.7.0
14
+ # via pydantic
15
+ anthropic==0.49.0
16
+ # via opentelemetry-instrumentation-bedrock
17
+ anyio==4.9.0
18
+ # via
19
+ # anthropic
20
+ # asyncer
21
+ # httpx
22
+ # mcp
23
+ # openai
24
+ # sse-starlette
25
+ # starlette
26
+ # watchfiles
27
+ arxiv==2.2.0
28
+ # via tool-calling-agent (pyproject.toml)
29
+ asyncer==0.0.7
30
+ # via chainlit
31
+ attrs==25.3.0
32
+ # via aiohttp
33
+ backoff==2.2.1
34
+ # via posthog
35
+ bidict==0.23.1
36
+ # via python-socketio
37
+ certifi==2025.1.31
38
+ # via
39
+ # httpcore
40
+ # httpx
41
+ # requests
42
+ chainlit==2.5.5
43
+ # via tool-calling-agent (pyproject.toml)
44
+ charset-normalizer==3.4.1
45
+ # via requests
46
+ chevron==0.14.0
47
+ # via literalai
48
+ click==8.1.8
49
+ # via
50
+ # chainlit
51
+ # duckduckgo-search
52
+ # uvicorn
53
+ colorama==0.4.6
54
+ # via traceloop-sdk
55
+ dataclasses-json==0.6.7
56
+ # via
57
+ # chainlit
58
+ # langchain-community
59
+ deprecated==1.2.18
60
+ # via
61
+ # opentelemetry-api
62
+ # opentelemetry-exporter-otlp-proto-grpc
63
+ # opentelemetry-exporter-otlp-proto-http
64
+ # opentelemetry-semantic-conventions
65
+ # traceloop-sdk
66
+ distro==1.9.0
67
+ # via
68
+ # anthropic
69
+ # openai
70
+ # posthog
71
+ duckduckgo-search==8.0.1
72
+ # via tool-calling-agent (pyproject.toml)
73
+ fastapi==0.115.12
74
+ # via chainlit
75
+ feedparser==6.0.11
76
+ # via arxiv
77
+ filelock==3.18.0
78
+ # via huggingface-hub
79
+ filetype==1.2.0
80
+ # via chainlit
81
+ frozenlist==1.6.0
82
+ # via
83
+ # aiohttp
84
+ # aiosignal
85
+ fsspec==2025.3.2
86
+ # via huggingface-hub
87
+ googleapis-common-protos==1.70.0
88
+ # via
89
+ # opentelemetry-exporter-otlp-proto-grpc
90
+ # opentelemetry-exporter-otlp-proto-http
91
+ grpcio==1.71.0
92
+ # via opentelemetry-exporter-otlp-proto-grpc
93
+ h11==0.14.0
94
+ # via
95
+ # httpcore
96
+ # uvicorn
97
+ # wsproto
98
+ httpcore==1.0.8
99
+ # via httpx
100
+ httpx==0.28.1
101
+ # via
102
+ # anthropic
103
+ # chainlit
104
+ # langgraph-sdk
105
+ # langsmith
106
+ # literalai
107
+ # mcp
108
+ # openai
109
+ # tavily-python
110
+ httpx-sse==0.4.0
111
+ # via
112
+ # langchain-community
113
+ # mcp
114
+ huggingface-hub==0.30.2
115
+ # via tokenizers
116
+ idna==3.10
117
+ # via
118
+ # anyio
119
+ # httpx
120
+ # requests
121
+ # yarl
122
+ importlib-metadata==8.6.1
123
+ # via opentelemetry-api
124
+ inflection==0.5.1
125
+ # via opentelemetry-instrumentation-llamaindex
126
+ jinja2==3.1.6
127
+ # via traceloop-sdk
128
+ jiter==0.9.0
129
+ # via
130
+ # anthropic
131
+ # openai
132
+ jsonpatch==1.33
133
+ # via langchain-core
134
+ jsonpointer==3.0.0
135
+ # via jsonpatch
136
+ langchain==0.3.23
137
+ # via
138
+ # tool-calling-agent (pyproject.toml)
139
+ # langchain-community
140
+ langchain-community==0.3.21
141
+ # via tool-calling-agent (pyproject.toml)
142
+ langchain-core==0.3.54
143
+ # via
144
+ # tool-calling-agent (pyproject.toml)
145
+ # langchain
146
+ # langchain-community
147
+ # langchain-openai
148
+ # langchain-text-splitters
149
+ # langgraph
150
+ # langgraph-checkpoint
151
+ # langgraph-prebuilt
152
+ langchain-openai==0.3.14
153
+ # via tool-calling-agent (pyproject.toml)
154
+ langchain-text-splitters==0.3.8
155
+ # via langchain
156
+ langgraph==0.3.31
157
+ # via tool-calling-agent (pyproject.toml)
158
+ langgraph-checkpoint==2.0.24
159
+ # via
160
+ # langgraph
161
+ # langgraph-prebuilt
162
+ langgraph-prebuilt==0.1.8
163
+ # via
164
+ # tool-calling-agent (pyproject.toml)
165
+ # langgraph
166
+ langgraph-sdk==0.1.61
167
+ # via langgraph
168
+ langsmith==0.3.32
169
+ # via
170
+ # langchain
171
+ # langchain-community
172
+ # langchain-core
173
+ lazify==0.4.0
174
+ # via chainlit
175
+ literalai==0.1.201
176
+ # via chainlit
177
+ lxml==5.3.2
178
+ # via duckduckgo-search
179
+ markupsafe==3.0.2
180
+ # via jinja2
181
+ marshmallow==3.26.1
182
+ # via dataclasses-json
183
+ mcp==1.6.0
184
+ # via chainlit
185
+ monotonic==1.6
186
+ # via posthog
187
+ multidict==6.4.3
188
+ # via
189
+ # aiohttp
190
+ # yarl
191
+ mypy-extensions==1.0.0
192
+ # via typing-inspect
193
+ nest-asyncio==1.6.0
194
+ # via chainlit
195
+ numpy==2.2.4
196
+ # via langchain-community
197
+ openai==1.75.0
198
+ # via langchain-openai
199
+ opentelemetry-api==1.31.1
200
+ # via
201
+ # opentelemetry-exporter-otlp-proto-grpc
202
+ # opentelemetry-exporter-otlp-proto-http
203
+ # opentelemetry-instrumentation
204
+ # opentelemetry-instrumentation-alephalpha
205
+ # opentelemetry-instrumentation-anthropic
206
+ # opentelemetry-instrumentation-bedrock
207
+ # opentelemetry-instrumentation-chromadb
208
+ # opentelemetry-instrumentation-cohere
209
+ # opentelemetry-instrumentation-crewai
210
+ # opentelemetry-instrumentation-google-generativeai
211
+ # opentelemetry-instrumentation-groq
212
+ # opentelemetry-instrumentation-haystack
213
+ # opentelemetry-instrumentation-lancedb
214
+ # opentelemetry-instrumentation-langchain
215
+ # opentelemetry-instrumentation-llamaindex
216
+ # opentelemetry-instrumentation-logging
217
+ # opentelemetry-instrumentation-marqo
218
+ # opentelemetry-instrumentation-milvus
219
+ # opentelemetry-instrumentation-mistralai
220
+ # opentelemetry-instrumentation-ollama
221
+ # opentelemetry-instrumentation-openai
222
+ # opentelemetry-instrumentation-pinecone
223
+ # opentelemetry-instrumentation-qdrant
224
+ # opentelemetry-instrumentation-replicate
225
+ # opentelemetry-instrumentation-requests
226
+ # opentelemetry-instrumentation-sagemaker
227
+ # opentelemetry-instrumentation-sqlalchemy
228
+ # opentelemetry-instrumentation-threading
229
+ # opentelemetry-instrumentation-together
230
+ # opentelemetry-instrumentation-transformers
231
+ # opentelemetry-instrumentation-urllib3
232
+ # opentelemetry-instrumentation-vertexai
233
+ # opentelemetry-instrumentation-watsonx
234
+ # opentelemetry-instrumentation-weaviate
235
+ # opentelemetry-sdk
236
+ # opentelemetry-semantic-conventions
237
+ # traceloop-sdk
238
+ # uptrace
239
+ opentelemetry-exporter-otlp==1.31.1
240
+ # via uptrace
241
+ opentelemetry-exporter-otlp-proto-common==1.31.1
242
+ # via
243
+ # opentelemetry-exporter-otlp-proto-grpc
244
+ # opentelemetry-exporter-otlp-proto-http
245
+ opentelemetry-exporter-otlp-proto-grpc==1.31.1
246
+ # via
247
+ # opentelemetry-exporter-otlp
248
+ # traceloop-sdk
249
+ opentelemetry-exporter-otlp-proto-http==1.31.1
250
+ # via
251
+ # opentelemetry-exporter-otlp
252
+ # traceloop-sdk
253
+ opentelemetry-instrumentation==0.52b1
254
+ # via
255
+ # opentelemetry-instrumentation-alephalpha
256
+ # opentelemetry-instrumentation-anthropic
257
+ # opentelemetry-instrumentation-bedrock
258
+ # opentelemetry-instrumentation-chromadb
259
+ # opentelemetry-instrumentation-cohere
260
+ # opentelemetry-instrumentation-crewai
261
+ # opentelemetry-instrumentation-google-generativeai
262
+ # opentelemetry-instrumentation-groq
263
+ # opentelemetry-instrumentation-haystack
264
+ # opentelemetry-instrumentation-lancedb
265
+ # opentelemetry-instrumentation-langchain
266
+ # opentelemetry-instrumentation-llamaindex
267
+ # opentelemetry-instrumentation-logging
268
+ # opentelemetry-instrumentation-marqo
269
+ # opentelemetry-instrumentation-milvus
270
+ # opentelemetry-instrumentation-mistralai
271
+ # opentelemetry-instrumentation-ollama
272
+ # opentelemetry-instrumentation-openai
273
+ # opentelemetry-instrumentation-pinecone
274
+ # opentelemetry-instrumentation-qdrant
275
+ # opentelemetry-instrumentation-replicate
276
+ # opentelemetry-instrumentation-requests
277
+ # opentelemetry-instrumentation-sagemaker
278
+ # opentelemetry-instrumentation-sqlalchemy
279
+ # opentelemetry-instrumentation-threading
280
+ # opentelemetry-instrumentation-together
281
+ # opentelemetry-instrumentation-transformers
282
+ # opentelemetry-instrumentation-urllib3
283
+ # opentelemetry-instrumentation-vertexai
284
+ # opentelemetry-instrumentation-watsonx
285
+ # opentelemetry-instrumentation-weaviate
286
+ # uptrace
287
+ opentelemetry-instrumentation-alephalpha==0.39.2
288
+ # via traceloop-sdk
289
+ opentelemetry-instrumentation-anthropic==0.39.2
290
+ # via traceloop-sdk
291
+ opentelemetry-instrumentation-bedrock==0.39.2
292
+ # via traceloop-sdk
293
+ opentelemetry-instrumentation-chromadb==0.39.2
294
+ # via traceloop-sdk
295
+ opentelemetry-instrumentation-cohere==0.39.2
296
+ # via traceloop-sdk
297
+ opentelemetry-instrumentation-crewai==0.39.2
298
+ # via traceloop-sdk
299
+ opentelemetry-instrumentation-google-generativeai==0.39.2
300
+ # via traceloop-sdk
301
+ opentelemetry-instrumentation-groq==0.39.2
302
+ # via traceloop-sdk
303
+ opentelemetry-instrumentation-haystack==0.39.2
304
+ # via traceloop-sdk
305
+ opentelemetry-instrumentation-lancedb==0.39.2
306
+ # via traceloop-sdk
307
+ opentelemetry-instrumentation-langchain==0.39.2
308
+ # via traceloop-sdk
309
+ opentelemetry-instrumentation-llamaindex==0.39.2
310
+ # via traceloop-sdk
311
+ opentelemetry-instrumentation-logging==0.52b1
312
+ # via traceloop-sdk
313
+ opentelemetry-instrumentation-marqo==0.39.2
314
+ # via traceloop-sdk
315
+ opentelemetry-instrumentation-milvus==0.39.2
316
+ # via traceloop-sdk
317
+ opentelemetry-instrumentation-mistralai==0.39.2
318
+ # via traceloop-sdk
319
+ opentelemetry-instrumentation-ollama==0.39.2
320
+ # via traceloop-sdk
321
+ opentelemetry-instrumentation-openai==0.39.2
322
+ # via traceloop-sdk
323
+ opentelemetry-instrumentation-pinecone==0.39.2
324
+ # via traceloop-sdk
325
+ opentelemetry-instrumentation-qdrant==0.39.2
326
+ # via traceloop-sdk
327
+ opentelemetry-instrumentation-replicate==0.39.2
328
+ # via traceloop-sdk
329
+ opentelemetry-instrumentation-requests==0.52b1
330
+ # via traceloop-sdk
331
+ opentelemetry-instrumentation-sagemaker==0.39.2
332
+ # via traceloop-sdk
333
+ opentelemetry-instrumentation-sqlalchemy==0.52b1
334
+ # via traceloop-sdk
335
+ opentelemetry-instrumentation-threading==0.52b1
336
+ # via traceloop-sdk
337
+ opentelemetry-instrumentation-together==0.39.2
338
+ # via traceloop-sdk
339
+ opentelemetry-instrumentation-transformers==0.39.2
340
+ # via traceloop-sdk
341
+ opentelemetry-instrumentation-urllib3==0.52b1
342
+ # via traceloop-sdk
343
+ opentelemetry-instrumentation-vertexai==0.39.2
344
+ # via traceloop-sdk
345
+ opentelemetry-instrumentation-watsonx==0.39.2
346
+ # via traceloop-sdk
347
+ opentelemetry-instrumentation-weaviate==0.39.2
348
+ # via traceloop-sdk
349
+ opentelemetry-proto==1.31.1
350
+ # via
351
+ # opentelemetry-exporter-otlp-proto-common
352
+ # opentelemetry-exporter-otlp-proto-grpc
353
+ # opentelemetry-exporter-otlp-proto-http
354
+ opentelemetry-sdk==1.31.1
355
+ # via
356
+ # opentelemetry-exporter-otlp-proto-grpc
357
+ # opentelemetry-exporter-otlp-proto-http
358
+ # traceloop-sdk
359
+ # uptrace
360
+ opentelemetry-semantic-conventions==0.52b1
361
+ # via
362
+ # opentelemetry-instrumentation
363
+ # opentelemetry-instrumentation-alephalpha
364
+ # opentelemetry-instrumentation-anthropic
365
+ # opentelemetry-instrumentation-bedrock
366
+ # opentelemetry-instrumentation-chromadb
367
+ # opentelemetry-instrumentation-cohere
368
+ # opentelemetry-instrumentation-crewai
369
+ # opentelemetry-instrumentation-google-generativeai
370
+ # opentelemetry-instrumentation-groq
371
+ # opentelemetry-instrumentation-haystack
372
+ # opentelemetry-instrumentation-lancedb
373
+ # opentelemetry-instrumentation-langchain
374
+ # opentelemetry-instrumentation-llamaindex
375
+ # opentelemetry-instrumentation-marqo
376
+ # opentelemetry-instrumentation-milvus
377
+ # opentelemetry-instrumentation-mistralai
378
+ # opentelemetry-instrumentation-ollama
379
+ # opentelemetry-instrumentation-openai
380
+ # opentelemetry-instrumentation-pinecone
381
+ # opentelemetry-instrumentation-qdrant
382
+ # opentelemetry-instrumentation-replicate
383
+ # opentelemetry-instrumentation-requests
384
+ # opentelemetry-instrumentation-sagemaker
385
+ # opentelemetry-instrumentation-sqlalchemy
386
+ # opentelemetry-instrumentation-together
387
+ # opentelemetry-instrumentation-transformers
388
+ # opentelemetry-instrumentation-urllib3
389
+ # opentelemetry-instrumentation-vertexai
390
+ # opentelemetry-instrumentation-watsonx
391
+ # opentelemetry-instrumentation-weaviate
392
+ # opentelemetry-sdk
393
+ opentelemetry-semantic-conventions-ai==0.4.3
394
+ # via
395
+ # opentelemetry-instrumentation-alephalpha
396
+ # opentelemetry-instrumentation-anthropic
397
+ # opentelemetry-instrumentation-bedrock
398
+ # opentelemetry-instrumentation-chromadb
399
+ # opentelemetry-instrumentation-cohere
400
+ # opentelemetry-instrumentation-crewai
401
+ # opentelemetry-instrumentation-google-generativeai
402
+ # opentelemetry-instrumentation-groq
403
+ # opentelemetry-instrumentation-haystack
404
+ # opentelemetry-instrumentation-lancedb
405
+ # opentelemetry-instrumentation-langchain
406
+ # opentelemetry-instrumentation-llamaindex
407
+ # opentelemetry-instrumentation-marqo
408
+ # opentelemetry-instrumentation-milvus
409
+ # opentelemetry-instrumentation-mistralai
410
+ # opentelemetry-instrumentation-ollama
411
+ # opentelemetry-instrumentation-openai
412
+ # opentelemetry-instrumentation-pinecone
413
+ # opentelemetry-instrumentation-qdrant
414
+ # opentelemetry-instrumentation-replicate
415
+ # opentelemetry-instrumentation-sagemaker
416
+ # opentelemetry-instrumentation-together
417
+ # opentelemetry-instrumentation-transformers
418
+ # opentelemetry-instrumentation-vertexai
419
+ # opentelemetry-instrumentation-watsonx
420
+ # opentelemetry-instrumentation-weaviate
421
+ # traceloop-sdk
422
+ opentelemetry-util-http==0.52b1
423
+ # via
424
+ # opentelemetry-instrumentation-requests
425
+ # opentelemetry-instrumentation-urllib3
426
+ orjson==3.10.16
427
+ # via
428
+ # langgraph-sdk
429
+ # langsmith
430
+ ormsgpack==1.9.1
431
+ # via langgraph-checkpoint
432
+ packaging==24.2
433
+ # via
434
+ # chainlit
435
+ # huggingface-hub
436
+ # langchain-core
437
+ # langsmith
438
+ # literalai
439
+ # marshmallow
440
+ # opentelemetry-instrumentation
441
+ # opentelemetry-instrumentation-sqlalchemy
442
+ posthog==3.25.0
443
+ # via traceloop-sdk
444
+ primp==0.15.0
445
+ # via duckduckgo-search
446
+ propcache==0.3.1
447
+ # via
448
+ # aiohttp
449
+ # yarl
450
+ protobuf==5.29.4
451
+ # via
452
+ # googleapis-common-protos
453
+ # opentelemetry-proto
454
+ pydantic==2.11.3
455
+ # via
456
+ # tool-calling-agent (pyproject.toml)
457
+ # anthropic
458
+ # chainlit
459
+ # fastapi
460
+ # langchain
461
+ # langchain-core
462
+ # langsmith
463
+ # literalai
464
+ # mcp
465
+ # openai
466
+ # pydantic-settings
467
+ # traceloop-sdk
468
+ pydantic-core==2.33.1
469
+ # via pydantic
470
+ pydantic-settings==2.9.1
471
+ # via
472
+ # langchain-community
473
+ # mcp
474
+ pyjwt==2.10.1
475
+ # via chainlit
476
+ python-dateutil==2.9.0.post0
477
+ # via posthog
478
+ python-dotenv==1.1.0
479
+ # via
480
+ # tool-calling-agent (pyproject.toml)
481
+ # chainlit
482
+ # pydantic-settings
483
+ python-engineio==4.12.0
484
+ # via python-socketio
485
+ python-multipart==0.0.18
486
+ # via chainlit
487
+ python-socketio==5.13.0
488
+ # via chainlit
489
+ pyyaml==6.0.2
490
+ # via
491
+ # huggingface-hub
492
+ # langchain
493
+ # langchain-community
494
+ # langchain-core
495
+ regex==2024.11.6
496
+ # via tiktoken
497
+ requests==2.32.3
498
+ # via
499
+ # arxiv
500
+ # huggingface-hub
501
+ # langchain
502
+ # langchain-community
503
+ # langsmith
504
+ # opentelemetry-exporter-otlp-proto-http
505
+ # posthog
506
+ # requests-toolbelt
507
+ # tavily-python
508
+ # tiktoken
509
+ requests-toolbelt==1.0.0
510
+ # via langsmith
511
+ sgmllib3k==1.0.0
512
+ # via feedparser
513
+ simple-websocket==1.1.0
514
+ # via python-engineio
515
+ six==1.17.0
516
+ # via
517
+ # posthog
518
+ # python-dateutil
519
+ sniffio==1.3.1
520
+ # via
521
+ # anthropic
522
+ # anyio
523
+ # openai
524
+ sqlalchemy==2.0.40
525
+ # via
526
+ # langchain
527
+ # langchain-community
528
+ sse-starlette==2.2.1
529
+ # via mcp
530
+ starlette==0.41.3
531
+ # via
532
+ # chainlit
533
+ # fastapi
534
+ # mcp
535
+ # sse-starlette
536
+ syncer==2.0.3
537
+ # via chainlit
538
+ tavily-python==0.5.4
539
+ # via tool-calling-agent (pyproject.toml)
540
+ tenacity==9.1.2
541
+ # via
542
+ # langchain-community
543
+ # langchain-core
544
+ # traceloop-sdk
545
+ tiktoken==0.9.0
546
+ # via
547
+ # langchain-openai
548
+ # opentelemetry-instrumentation-openai
549
+ # tavily-python
550
+ tokenizers==0.21.1
551
+ # via opentelemetry-instrumentation-bedrock
552
+ tomli==2.2.1
553
+ # via chainlit
554
+ tqdm==4.67.1
555
+ # via
556
+ # huggingface-hub
557
+ # openai
558
+ traceloop-sdk==0.39.2
559
+ # via literalai
560
+ typing-extensions==4.13.2
561
+ # via
562
+ # anthropic
563
+ # anyio
564
+ # fastapi
565
+ # huggingface-hub
566
+ # langchain-core
567
+ # openai
568
+ # opentelemetry-sdk
569
+ # pydantic
570
+ # pydantic-core
571
+ # sqlalchemy
572
+ # typing-inspect
573
+ # typing-inspection
574
+ typing-inspect==0.9.0
575
+ # via dataclasses-json
576
+ typing-inspection==0.4.0
577
+ # via
578
+ # pydantic
579
+ # pydantic-settings
580
+ uptrace==1.31.0
581
+ # via chainlit
582
+ urllib3==2.4.0
583
+ # via requests
584
+ uvicorn==0.34.1
585
+ # via
586
+ # chainlit
587
+ # mcp
588
+ watchfiles==0.20.0
589
+ # via chainlit
590
+ wrapt==1.17.2
591
+ # via
592
+ # deprecated
593
+ # opentelemetry-instrumentation
594
+ # opentelemetry-instrumentation-sqlalchemy
595
+ # opentelemetry-instrumentation-threading
596
+ # opentelemetry-instrumentation-urllib3
597
+ wsproto==1.2.0
598
+ # via simple-websocket
599
+ xxhash==3.5.0
600
+ # via langgraph
601
+ yarl==1.20.0
602
+ # via aiohttp
603
+ zipp==3.21.0
604
+ # via importlib-metadata
605
+ zstandard==0.23.0
606
+ # via langsmith
tools.py ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Module for configuring and creating tools for the LangGraph agent.
3
+ """
4
+ from typing import Dict, Any, List, Tuple
5
+
6
+ from langchain_community.tools.tavily_search import TavilySearchResults
7
+ from langchain_community.tools.arxiv.tool import ArxivQueryRun
8
+ from langchain_community.tools import DuckDuckGoSearchRun
9
+ from langchain_openai import ChatOpenAI
10
+
11
+
12
+ def create_tools(config: Dict[str, Any]) -> Tuple[List, ChatOpenAI]:
13
+ """
14
+ Create LangChain tools and model for use in the agent.
15
+
16
+ Args:
17
+ config: Configuration dictionary
18
+
19
+ Returns:
20
+ Tuple containing:
21
+ - List of tools
22
+ - ChatOpenAI model
23
+ """
24
+ # Initialize Tavily search tool
25
+ tavily_tool = TavilySearchResults(
26
+ max_results=config["tavily_max_results"],
27
+ api_key=config["tavily_api_key"]
28
+ )
29
+
30
+ # Initialize ArXiv tool
31
+ arxiv_tool = ArxivQueryRun()
32
+
33
+ # Initialize DuckDuckGo search tool
34
+ duckduckgo_tool = DuckDuckGoSearchRun()
35
+
36
+ # Create the tool belt
37
+ tool_belt = [
38
+ tavily_tool,
39
+ arxiv_tool,
40
+ duckduckgo_tool,
41
+ ]
42
+
43
+ # Initialize the OpenAI model
44
+ model = ChatOpenAI(
45
+ model=config["model_name"],
46
+ temperature=config["temperature"],
47
+ api_key=config["openai_api_key"]
48
+ )
49
+
50
+ # Bind tools to the model
51
+ model_with_tools = model.bind_tools(tool_belt)
52
+
53
+ return tool_belt, model_with_tools
uv.lock ADDED
The diff for this file is too large to render. See raw diff