File size: 3,996 Bytes
6464fce | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 | from typing import Annotated, TypedDict
from langgraph.graph import StateGraph, START, END
from langgraph.graph.message import add_messages
from langgraph.prebuilt import ToolNode, tools_condition
from langchain_core.messages import BaseMessage, HumanMessage, AIMessage, SystemMessage
from langchain_openai import ChatOpenAI
from langsmith import traceable
import sqlite3
from datetime import datetime
from config import *
from tools import ALL_TOOLS
class ChatState(TypedDict):
"""Basic state - just messages"""
messages: Annotated[list[BaseMessage], add_messages]
class MemoryManager:
"""Simple memory with SQLite storage"""
def __init__(self, db_path: str = "chat_history.db"):
self.db_path = db_path
self._init_db()
def _init_db(self):
"""Initialize database"""
conn = sqlite3.connect(self.db_path)
cursor = conn.cursor()
cursor.execute("""
CREATE TABLE IF NOT EXISTS chat_history (
id INTEGER PRIMARY KEY AUTOINCREMENT,
session_id TEXT NOT NULL,
role TEXT NOT NULL,
content TEXT NOT NULL,
timestamp DATETIME DEFAULT CURRENT_TIMESTAMP
)
""")
conn.commit()
conn.close()
@traceable(name="add_message_to_memory")
def add_message(self, session_id: str, role: str, content: str):
"""Add message to history"""
conn = sqlite3.connect(self.db_path)
cursor = conn.cursor()
cursor.execute(
"INSERT INTO chat_history (session_id, role, content) VALUES (?, ?, ?)",
(session_id, role, content)
)
conn.commit()
conn.close()
@traceable(name="get_recent_messages_from_memory")
def get_recent_messages(self, session_id: str, limit: int = 5):
"""Get recent messages"""
conn = sqlite3.connect(self.db_path)
cursor = conn.cursor()
cursor.execute(
"""SELECT role, content FROM chat_history
WHERE session_id = ?
ORDER BY id DESC LIMIT ?""",
(session_id, limit)
)
rows = cursor.fetchall()
conn.close()
return [{"role": r[0], "content": r[1]} for r in reversed(rows)]
@traceable(name="get_conversation_context")
def get_context(self, session_id: str) -> str:
"""Get conversation context"""
recent = self.get_recent_messages(session_id, limit=5)
if recent:
recent_text = "\n".join([f"{m['role']}: {m['content']}" for m in recent])
return f"Recent conversation:\n{recent_text}"
return ""
def create_agent():
"""
Create basic agent with:
Memory
Tool calling
LangSmith tracing
Streaming support
"""
llm = ChatOpenAI(
api_key=GROQ_API_KEY,
base_url="https://api.groq.com/openai/v1",
model=GROQ_MODEL,
temperature=0,
streaming=True
)
llm_with_tools = llm.bind_tools(ALL_TOOLS)
memory = MemoryManager()
@traceable(name="chat_node")
def chat_node(state: ChatState):
"""Just invoke LLM, nothing else"""
messages = state['messages']
response = llm_with_tools.invoke(messages)
return {"messages": [response]}
graph = StateGraph(ChatState)
graph.add_node("chat", chat_node)
graph.add_node("tools", ToolNode(ALL_TOOLS))
graph.add_edge(START, "chat")
graph.add_conditional_edges("chat", tools_condition)
graph.add_edge("tools", "chat")
return graph.compile(), memory
if __name__ == "__main__":
agent, mem = create_agent()
print(" Basic Agent created successfully")
print(" Features: Memory, Tool Calling, LangSmith Tracing")
print(" No advanced features") |