FoodDeliveryChatAgent / chat_agent.py
karora1804's picture
Upload folder using huggingface_hub
14dd252 verified
# import the packages required for API execution
import json
import os
import pandas as pd
import sqlite3
from langchain.agents import create_sql_agent, initialize_agent # Added initialize_agent
from langchain_core.messages import SystemMessage, HumanMessage
from langchain.agents.agent_types import AgentType
from langchain.sql_database import SQLDatabase
from langchain.agents.agent_toolkits import SQLDatabaseToolkit
from langchain import hub
from langchain.agents import load_tools
from langchain.agents import Tool
from pydantic import BaseModel, Field, ValidationError
from typing import List, Optional, Dict
import warnings
# warnings.filterwarnings("ignore", category=DeprecationWarning)
warnings.filterwarnings("ignore")
# Groq API key added to the groq portal and google collab
# accessing the groq API generated
# from google.colab import userdata
#os.environ['GROQ_API_KEY'] = userdata.get('GROQ_API_KEY')#Complete the code by calling the Groq API key
#from google.colab import userdata
from langchain_groq import ChatGroq # Import Groq LLM
# Get the API key from Colab secrets
#groq_api_key = userdata.get('GROQ_API_KEY') #Complete the code by calling the API key
# groq_api_key = os.environ['GROQ_API_KEY']
groq_api_key = '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'
#userdata.get('GROQ_API_KEY') #Complete the code by calling the API key
# let us create an LLM instances using low temp.
# model temp should very between (0-1.0) lets take low temp as 0.2 (deterministic)
# using llama-3.1-8b-instant for long conversations and clear, accurate summaries for dialogues.
# Low creativity (deterministic) LLM
llm = ChatGroq(
model="meta-llama/llama-4-scout-17b-16e-instruct", #Call the Groq model
temperature=0, #Complete the code by providing low temperature
max_tokens=None,
timeout=None,
max_retries=3,
groq_api_key=groq_api_key
)
# defining the schema which shall be used as part of system prompt
# Human-readable schema description (for LLM prompt only)
ORDERS_SCHEMA = """
Table: orders_new
Columns:
- order_id (TEXT)
- cust_id (TEXT)
- order_time (TEXT)
- order_status_std (TEXT)
- payment_status_std (TEXT)
- item_in_order (TEXT)
- preparing_eta (TEXT)
- prepared_time (TEXT)
- delivery_eta (TEXT)
- delivery_time (TEXT)
"""
system_message = f"""
You are an expert & secure SQLite assistant specialized in answering questions using the customer_orders.db SQLite database.
{ORDERS_SCHEMA}
Security Rules:
1. You are a read-only, helpful database assistant.
If the user mentions 'hacking' or 'unauthorized access', immediately refuse the request.
2. NEVER generate DROP, DELETE, UPDATE, INSERT, ALTER, or CREATE statements.
Your sole purpose is read-only querying.
3. Do not provide the full table schema, definition, or a list of all tables.
Only query tables relevant to the user's specific request.
4. Only generate safe, read-only SQL queries (SELECT).
5. Only run query when order id is given otherwise refuse gracefully.
"""
current_dir = os.path.dirname(os.path.abspath(__file__))
db_path = os.path.join(current_dir, "customer_orders.db")
db = SQLDatabase.from_uri(f"sqlite:///{db_path}")
# upload the customer database and configure for sql access.
#db = SQLDatabase.from_uri("sqlite:///customer_orders.db")
#Initialize the toolkit with customer database and the LLM
toolkit = SQLDatabaseToolkit(db=db, llm=llm)
#Create the SQL agent with the system message
db_agent = create_sql_agent(
llm=llm,
toolkit=toolkit,
verbose=False,
system_message=SystemMessage(system_message)
)
# Define Order Query Tool
# Create a function that the tool will call
def fetch_order_info(query: str) -> str:
"""Fetches raw order details from the database using the SQL Agent."""
try:
# We pass the user's query directly to your pre-configured db_agent
response = db_agent.invoke(query)
return response["output"]
except Exception as e:
return f"Error retrieving data: {str(e)}"
# Define the Tool
order_query_tool = Tool(
name="Order_Query_Tool",
func=fetch_order_info,
description="Useful for when you need to answer questions about order status, delivery times, or specific order details."
)
def polish_response(raw_data: str) -> str:
"""Refines raw data into a polite, formal, and customer-friendly response."""
refinement_prompt = [
SystemMessage(content="You are a polite customer service representative. Take the provided raw data and turn it into a helpful, professional, and warm response. Do not change the facts."),
HumanMessage(content=f"Raw Data: {raw_data}")
]
response = llm.invoke(refinement_prompt)
return response.content
answer_tool = Tool(
name="Answer_Polishing_Tool",
func=polish_response,
description="Useful for refining a technical or raw database response into a polite and formal customer message."
)