AlexFoxalt commited on
Commit
71b0b4f
Β·
1 Parent(s): ff2b350

Init commit

Browse files
Files changed (7) hide show
  1. .idea/.gitignore +8 -0
  2. .python-version +1 -0
  3. Dockerfile +31 -0
  4. app.py +141 -0
  5. chainlit.md +14 -0
  6. pyproject.toml +17 -0
  7. uv.lock +0 -0
.idea/.gitignore ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ # Default ignored files
2
+ /shelf/
3
+ /workspace.xml
4
+ # Editor-based HTTP Client requests
5
+ /httpRequests/
6
+ # Datasource local storage ignored files
7
+ /dataSources/
8
+ /dataSources.local.xml
.python-version ADDED
@@ -0,0 +1 @@
 
 
1
+ 3.13
Dockerfile ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ # Get a distribution that has uv already installed
3
+ FROM ghcr.io/astral-sh/uv:python3.13-bookworm-slim
4
+
5
+ # Add user - this is the user that will run the app
6
+ # If you do not set user, the app will run as root (undesirable)
7
+ RUN useradd -m -u 1000 user
8
+ USER user
9
+
10
+ # Set the home directory and path
11
+ ENV HOME=/home/user \
12
+ PATH=/home/user/.local/bin:$PATH
13
+
14
+ ENV UVICORN_WS_PROTOCOL=websockets
15
+
16
+
17
+ # Set the working directory
18
+ WORKDIR $HOME/app
19
+
20
+ # Copy the app to the container
21
+ COPY --chown=user . $HOME/app
22
+
23
+ # Install the dependencies
24
+ # RUN uv sync --frozen
25
+ RUN uv sync
26
+
27
+ # Expose the port
28
+ EXPOSE 7860
29
+
30
+ # Run the app
31
+ CMD ["uv", "run", "chainlit", "run", "app.py", "--host", "0.0.0.0", "--port", "7860"]
app.py ADDED
@@ -0,0 +1,141 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import TypedDict, Annotated
2
+
3
+ import chainlit as cl
4
+ from dotenv import load_dotenv
5
+ from langchain_community.tools.arxiv.tool import ArxivQueryRun
6
+ from langchain_community.tools.tavily_search import TavilySearchResults
7
+ from langchain_core.messages import HumanMessage, SystemMessage, ToolMessage
8
+ from langchain_community.tools import WikipediaQueryRun
9
+ from langchain_community.utilities import WikipediaAPIWrapper
10
+ from langchain_openai import ChatOpenAI
11
+ from langgraph.graph import StateGraph, END
12
+ from langgraph.graph.message import add_messages
13
+ from langgraph.prebuilt import ToolNode
14
+ from langchain.tools import tool
15
+
16
+ load_dotenv()
17
+
18
+
19
+ system_prompt = """
20
+ You are a highly intelligent AI assistant designed to provide helpful, accurate, and well-structured responses to user queries.
21
+ You use Retrieval-Augmented Generation (RAG) to incorporate relevant external knowledge. Follow these rules:
22
+
23
+ 1. **Use Retrieved Knowledge First**:
24
+ - When external documents are available, prioritize them.
25
+ - Summarize instead of copying text.
26
+ - If sources conflict, highlight differences.
27
+
28
+ 2. **Fallback to General Knowledge**:
29
+ - If no retrieved data, use your own knowledge.
30
+ - If uncertain, clarify the limitation.
31
+
32
+ 3. **Provide Clear, Structured Responses**:
33
+ - Use bullet points, step-by-step formats, and concise explanations.
34
+ - Provide citations where needed.
35
+
36
+ 4. **Maintain Context & Engagement**:
37
+ - Keep track of prior conversation.
38
+ - If a query is unclear, ask for clarification.
39
+
40
+ 5. **Security & Ethics**:
41
+ - Avoid harmful, illegal, or biased content.
42
+ - Never ask for personal information.
43
+ """
44
+
45
+
46
+ class State(TypedDict):
47
+ messages: Annotated[list, add_messages]
48
+
49
+
50
+ def call_model(state):
51
+ messages = state["messages"]
52
+ response = model.invoke(messages)
53
+ return {"messages": [response]}
54
+
55
+
56
+ def should_continue(state):
57
+ last_message = state["messages"][-1]
58
+ if last_message.tool_calls:
59
+ return "action"
60
+ return END
61
+
62
+
63
+ def parse_output(input_state):
64
+ return input_state["messages"][-1].content
65
+
66
+
67
+ model = ChatOpenAI(model_name="gpt-4o", temperature=0, streaming=True)
68
+
69
+ wiki_desc = """
70
+ Use it only if the query is about history.
71
+ A wrapper around Wikipedia.
72
+ Useful for when you need to answer general questions about
73
+ people, places, companies, facts, historical events, or other subjects.
74
+ Input should be a search query.
75
+ """
76
+ wiki = WikipediaQueryRun(
77
+ description=wiki_desc,
78
+ api_wrapper=WikipediaAPIWrapper(top_k_results=1, doc_content_chars_max=100),
79
+ )
80
+
81
+ tav_desc = """
82
+ Use it only if the query is about news.
83
+ A search engine optimized for comprehensive, accurate, and trusted results.
84
+ Useful for when you need to answer questions about current events.
85
+ Input should be a search query.
86
+ """
87
+ tav = TavilySearchResults(description=tav_desc, max_results=1)
88
+
89
+ arx_desc = """
90
+ Use it only if the query is about science.
91
+ A wrapper around Arxiv.org
92
+ Useful for when you need to answer questions about Physics, Mathematics,
93
+ Computer Science, Quantitative Biology, Quantitative Finance, Statistics,
94
+ Electrical Engineering, and Economics from scientific articles on arxiv.org.
95
+ Input should be a search query.
96
+ """
97
+ arx = ArxivQueryRun(description=arx_desc)
98
+
99
+
100
+ @tool("IdiotQueryRun", return_direct=True)
101
+ def idiot_query_run(query: str) -> str:
102
+ """
103
+ Use it only if the query is about strange or unreal things.
104
+ A wrapper around memes from internet.
105
+ Input should be a search query.
106
+ """
107
+ return "Don't believe everything you read in internet. (c)Albert Einstein"
108
+
109
+
110
+ tool_belt = [wiki, tav, arx, idiot_query_run]
111
+
112
+ model = model.bind_tools(tool_belt)
113
+ tool_node = ToolNode(tool_belt)
114
+
115
+
116
+ uncompiled_graph = StateGraph(State)
117
+ uncompiled_graph.add_node("agent", call_model)
118
+ uncompiled_graph.add_node("action", tool_node)
119
+ uncompiled_graph.set_entry_point("agent")
120
+ uncompiled_graph.add_conditional_edges("agent", should_continue)
121
+ uncompiled_graph.add_edge("action", "agent")
122
+ graph = uncompiled_graph.compile()
123
+
124
+
125
+ @cl.on_chat_start
126
+ async def on_chat_start():
127
+ msg = cl.Message(content="Hello! Please ask your questions.")
128
+ await msg.send()
129
+
130
+
131
+ @cl.on_message
132
+ async def main(message):
133
+ inputs = {"messages": [SystemMessage(system_prompt), HumanMessage(message.content)]}
134
+ messages = graph.invoke(inputs)["messages"]
135
+ response = messages[-1].content
136
+ if isinstance(messages[-2], ToolMessage):
137
+ response += f"\n\nThis response was generated using `{messages[-2].name}` tool"
138
+ msg = cl.Message(content="")
139
+ for i in response:
140
+ await msg.stream_token(i)
141
+ await msg.send()
chainlit.md ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Welcome to Chainlit! πŸš€πŸ€–
2
+
3
+ Hi there, Developer! πŸ‘‹ We're excited to have you on board. Chainlit is a powerful tool designed to help you prototype, debug and share applications built on top of LLMs.
4
+
5
+ ## Useful Links πŸ”—
6
+
7
+ - **Documentation:** Get started with our comprehensive [Chainlit Documentation](https://docs.chainlit.io) πŸ“š
8
+ - **Discord Community:** Join our friendly [Chainlit Discord](https://discord.gg/k73SQ3FyUh) to ask questions, share your projects, and connect with other developers! πŸ’¬
9
+
10
+ We can't wait to see what you create with Chainlit! Happy coding! πŸ’»πŸ˜Š
11
+
12
+ ## Welcome screen
13
+
14
+ To modify the welcome screen, edit the `chainlit.md` file at the root of your project. If you do not want a welcome screen, just leave this file empty.
pyproject.toml ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [project]
2
+ name = "05-our-first-agent-with-langgraph"
3
+ version = "0.1.0"
4
+ description = "Add your description here"
5
+ readme = "README.md"
6
+ requires-python = ">=3.13"
7
+ dependencies = [
8
+ "arxiv>=2.1.3",
9
+ "chainlit>=2.1.0",
10
+ "duckduckgo-search==5.3.1b1",
11
+ "jupyter>=1.1.1",
12
+ "langchain>=0.3.15",
13
+ "langchain-community>=0.3.15",
14
+ "langchain-openai>=0.3.2",
15
+ "langgraph>=0.2.67",
16
+ "wikipedia>=1.4.0",
17
+ ]
uv.lock ADDED
The diff for this file is too large to render. See raw diff