Spaces:
Runtime error
Runtime error
chnages
Browse files- app/agents/context_agent.py +2 -2
- app/agents/memory_manager_agent.py +2 -2
- app/core/config.py +3 -0
- app/database/connection.py +1 -1
- app/graph.py +23 -13
- app/main.py +26 -6
- app/persistance/__init__.py +0 -0
- app/persistance/memory_store_checkpointer_config.py +10 -0
app/agents/context_agent.py
CHANGED
|
@@ -4,7 +4,7 @@ from langchain_groq import ChatGroq
|
|
| 4 |
from app.prompts.context_agent_prompt import context_agent_template
|
| 5 |
from app.tools.context_agent_tools import context_agent_tools
|
| 6 |
from typing import Any
|
| 7 |
-
from app.
|
| 8 |
|
| 9 |
context_agent = create_agent(
|
| 10 |
model=ChatGroq(
|
|
@@ -12,7 +12,7 @@ context_agent = create_agent(
|
|
| 12 |
temperature=0.1,
|
| 13 |
),
|
| 14 |
tools=context_agent_tools,
|
| 15 |
-
store=
|
| 16 |
middleware=[
|
| 17 |
ToolCallLimitMiddleware[Any,None](
|
| 18 |
tool_name="search_memory",
|
|
|
|
| 4 |
from app.prompts.context_agent_prompt import context_agent_template
|
| 5 |
from app.tools.context_agent_tools import context_agent_tools
|
| 6 |
from typing import Any
|
| 7 |
+
from app.persistance.memory_store_checkpointer_config import memory_store
|
| 8 |
|
| 9 |
context_agent = create_agent(
|
| 10 |
model=ChatGroq(
|
|
|
|
| 12 |
temperature=0.1,
|
| 13 |
),
|
| 14 |
tools=context_agent_tools,
|
| 15 |
+
store=memory_store,
|
| 16 |
middleware=[
|
| 17 |
ToolCallLimitMiddleware[Any,None](
|
| 18 |
tool_name="search_memory",
|
app/agents/memory_manager_agent.py
CHANGED
|
@@ -2,7 +2,7 @@ import types
|
|
| 2 |
from langchain_groq import ChatGroq
|
| 3 |
from langmem import create_memory_store_manager
|
| 4 |
from app.schemas.memory_agent_schema import EmailMemory
|
| 5 |
-
from app.
|
| 6 |
import os
|
| 7 |
from app.core.config import settings
|
| 8 |
|
|
@@ -51,7 +51,7 @@ memory_manager_agent = create_memory_store_manager(
|
|
| 51 |
model,
|
| 52 |
schemas=[EmailMemory],
|
| 53 |
namespace=namespace,
|
| 54 |
-
store=
|
| 55 |
instructions="Extract required info from incoming mail and its reply .",
|
| 56 |
enable_inserts=True,
|
| 57 |
enable_deletes=True,
|
|
|
|
| 2 |
from langchain_groq import ChatGroq
|
| 3 |
from langmem import create_memory_store_manager
|
| 4 |
from app.schemas.memory_agent_schema import EmailMemory
|
| 5 |
+
from app.persistance.memory_store_checkpointer_config import memory_store
|
| 6 |
import os
|
| 7 |
from app.core.config import settings
|
| 8 |
|
|
|
|
| 51 |
model,
|
| 52 |
schemas=[EmailMemory],
|
| 53 |
namespace=namespace,
|
| 54 |
+
store=memory_store,
|
| 55 |
instructions="Extract required info from incoming mail and its reply .",
|
| 56 |
enable_inserts=True,
|
| 57 |
enable_deletes=True,
|
app/core/config.py
CHANGED
|
@@ -8,6 +8,9 @@ class Settings(BaseSettings):
|
|
| 8 |
GROQ_API_KEY: str
|
| 9 |
DB_URL_FOR_CHECKPOINTER_STORE: str
|
| 10 |
|
|
|
|
|
|
|
|
|
|
| 11 |
DB_URL_FOR_SQL_AL:str
|
| 12 |
|
| 13 |
model_config = SettingsConfigDict(
|
|
|
|
| 8 |
GROQ_API_KEY: str
|
| 9 |
DB_URL_FOR_CHECKPOINTER_STORE: str
|
| 10 |
|
| 11 |
+
GMAIL_CREDENTIALS_PATH: str = "credentials.json"
|
| 12 |
+
GMAIL_TOKEN_PATH: str = "token.json"
|
| 13 |
+
|
| 14 |
DB_URL_FOR_SQL_AL:str
|
| 15 |
|
| 16 |
model_config = SettingsConfigDict(
|
app/database/connection.py
CHANGED
|
@@ -35,5 +35,5 @@ pool = ConnectionPool(
|
|
| 35 |
conninfo=DB_URL_FOR_CHECKPOINTER_STORE,
|
| 36 |
min_size=1,
|
| 37 |
max_size=10,
|
| 38 |
-
kwargs={"autocommit": True}
|
| 39 |
)
|
|
|
|
| 35 |
conninfo=DB_URL_FOR_CHECKPOINTER_STORE,
|
| 36 |
min_size=1,
|
| 37 |
max_size=10,
|
| 38 |
+
kwargs={"autocommit": True,"row_factory": dict}
|
| 39 |
)
|
app/graph.py
CHANGED
|
@@ -17,7 +17,8 @@ from app.nodes.check_token_count_node import *
|
|
| 17 |
from psycopg import OperationalError # Or sqlalchemy.exc.OperationalError depending on your driver
|
| 18 |
from app.tools.email_writing_agent_tools import email_writing_agent_tools
|
| 19 |
from IPython.display import Image, display
|
| 20 |
-
|
|
|
|
| 21 |
|
| 22 |
# Define a standard retry policy for database-heavy nodes
|
| 23 |
db_retry_policy = RetryPolicy(
|
|
@@ -126,20 +127,29 @@ builder.add_edge("archive_node", END)
|
|
| 126 |
|
| 127 |
|
| 128 |
|
| 129 |
-
graph=builder.compile()
|
| 130 |
|
| 131 |
-
|
| 132 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 133 |
|
| 134 |
display(graph)
|
| 135 |
|
| 136 |
|
| 137 |
-
try:
|
| 138 |
-
|
| 139 |
-
|
| 140 |
-
|
| 141 |
-
|
| 142 |
-
|
| 143 |
-
except Exception as e:
|
| 144 |
-
|
| 145 |
-
|
|
|
|
| 17 |
from psycopg import OperationalError # Or sqlalchemy.exc.OperationalError depending on your driver
|
| 18 |
from app.tools.email_writing_agent_tools import email_writing_agent_tools
|
| 19 |
from IPython.display import Image, display
|
| 20 |
+
from app.persistance.memory_store_checkpointer_config import memory_store, checkpointer
|
| 21 |
+
from langchain_google_community import GmailToolkit
|
| 22 |
|
| 23 |
# Define a standard retry policy for database-heavy nodes
|
| 24 |
db_retry_policy = RetryPolicy(
|
|
|
|
| 127 |
|
| 128 |
|
| 129 |
|
|
|
|
| 130 |
|
| 131 |
+
|
| 132 |
+
|
| 133 |
+
|
| 134 |
+
|
| 135 |
+
|
| 136 |
+
toolkit = GmailToolkit()
|
| 137 |
+
|
| 138 |
+
|
| 139 |
+
graph=builder.compile(checkpointer=checkpointer, store=memory_store)
|
| 140 |
+
|
| 141 |
+
|
| 142 |
+
|
| 143 |
|
| 144 |
display(graph)
|
| 145 |
|
| 146 |
|
| 147 |
+
# try:
|
| 148 |
+
# # This creates a PNG and saves it to your project folder
|
| 149 |
+
# graph_png = graph.get_graph().draw_mermaid_png()
|
| 150 |
+
# with open("graph.png", "wb") as f:
|
| 151 |
+
# f.write(graph_png)
|
| 152 |
+
# print("--- Graph image saved as 'graph.png' ---")
|
| 153 |
+
# except Exception as e:
|
| 154 |
+
# # This happens if you don't have the 'pypydot' or 'graphviz' dependencies
|
| 155 |
+
# print(f"Could not generate graph image: {e}")
|
app/main.py
CHANGED
|
@@ -1,13 +1,33 @@
|
|
| 1 |
from app.graph import graph
|
| 2 |
-
from app.
|
| 3 |
-
|
| 4 |
-
from
|
| 5 |
-
from app.utils.embeddings import remote_embeddings
|
| 6 |
|
|
|
|
|
|
|
| 7 |
|
| 8 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 9 |
|
| 10 |
-
|
| 11 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 12 |
|
| 13 |
|
|
|
|
| 1 |
from app.graph import graph
|
| 2 |
+
from app.state.state import EmailAgentState
|
| 3 |
+
import time
|
| 4 |
+
from psycopg import OperationalError
|
|
|
|
| 5 |
|
| 6 |
+
# Define your thread configuration
|
| 7 |
+
config = {"configurable": {"thread_id": "user_abc_123"}}
|
| 8 |
|
| 9 |
|
| 10 |
+
input_data: EmailAgentState = {
|
| 11 |
+
"user_email_id": "gaykaratharva7@gmail.com",
|
| 12 |
+
"user_id": 1,
|
| 13 |
+
"user_name": "Atharva",
|
| 14 |
+
"sender_email_id": "atharvagaykar36@gmail.com",
|
| 15 |
+
"sender_subject": "URGENT: Validation of Hybrid Phishing Detection Model & XGBoost Integration",
|
| 16 |
+
"sender_email_body": """Dear Atharva,\r\n\r\nI have completed the integration of the *AI-Driven Email Threat Detection*\r\npipeline. We are currently utilizing the fine-tuned DistilBERT model to\r\ngenerate semantic embeddings for incoming messages.\r\n\r\nTo ensure the system is correctly identifying malicious intent, I've\r\nprocessed a suspicious sample using our custom structural tokens: [SSUB],\r\n[SBODY], [LINK], and [PHONE]. This preserves the email's structural context\r\nwhile protecting sensitive data.\r\n[MODEL EVALUATION DATA]\r\n\r\n*1. Semantic Context:* The fine-tuned DistilBERT has mapped the input to a\r\nhigh-dimensional vector space. Initial checks suggest strong clustering\r\nwith known phishing signatures.\r\n\r\n*2. URL Feature Engineering:* Our hybrid pipeline extracted numerical\r\nindicators from the embedded links.\r\n\r\n -\r\n\r\n Subdomain count: 4\r\n -\r\n\r\n Suspicious keywords: 2\r\n -\r\n\r\n Special characters: @, -, .\r\n -\r\n\r\n Redirection detected: True\r\n\r\n*3. XGBoost Classification:* The combined feature set (DistilBERT\r\nembeddings + numerical URL features) has been passed to the XGBoost\r\nclassifier.\r\n\r\n -\r\n\r\n *Current Test Accuracy:* 99.35%\r\n\r\nFinal Confirmation Needed:\r\n\r\n 1.\r\n\r\n Should we deploy the current XGBoost weights to the:\r\n https://huggingface.co/spaces/Gaykar/ClassifyEmail\r\n 2.\r\n\r\n Do you want to review the Hybrid_model_preparation.ipynb logic before we\r\n push to the:\r\n https://github.com/Atharva-Gaykar/AI-Driven-Email-Threat-Detection\r\n 3.\r\n\r\n Are the [LINK] and [PHONE] placeholders correctly masking the PII\r\n (Personally Identifiable Information) according to the project spec?\r\n\r\nPlease provide your approval to proceed with the Docker deployment.\r\n\r\nBest regards,\r\n\r\nVinit Security AI Engineer\r\n"""
|
| 17 |
+
}
|
| 18 |
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
|
| 22 |
+
for i in range(3): # Try 3 times to account for Neon wake-up
|
| 23 |
+
try:
|
| 24 |
+
result = graph.invoke(input_data, config=config)
|
| 25 |
+
break
|
| 26 |
+
except OperationalError:
|
| 27 |
+
print("Waiting for Neon to wake up...")
|
| 28 |
+
time.sleep(5)
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
print(result)
|
| 32 |
|
| 33 |
|
app/persistance/__init__.py
ADDED
|
File without changes
|
app/persistance/memory_store_checkpointer_config.py
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from app.database.connection import pool
|
| 2 |
+
from langgraph.checkpoint.postgres import PostgresSaver
|
| 3 |
+
from langgraph.store.postgres import PostgresStore
|
| 4 |
+
from app.utils.embeddings import remote_embeddings
|
| 5 |
+
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
|
| 9 |
+
checkpointer = PostgresSaver(pool)
|
| 10 |
+
memory_store = PostgresStore(pool, index={"dims": 384, "embed": remote_embeddings})
|