karora1804 commited on
Commit
4459161
·
verified ·
1 Parent(s): 0255904

Upload folder using huggingface_hub

Browse files
Files changed (6) hide show
  1. Dockerfile +9 -13
  2. app.py +7 -0
  3. chat_agent.py +134 -0
  4. chatbot_logic.py +144 -0
  5. customer_orders.db +0 -0
  6. requirements.txt +8 -3
Dockerfile CHANGED
@@ -1,20 +1,16 @@
1
- FROM python:3.13.5-slim
 
2
 
 
3
  WORKDIR /app
4
 
5
- RUN apt-get update && apt-get install -y \
6
- build-essential \
7
- curl \
8
- git \
9
- && rm -rf /var/lib/apt/lists/*
10
-
11
- COPY requirements.txt ./
12
- COPY src/ ./src/
13
 
 
14
  RUN pip3 install -r requirements.txt
15
 
16
- EXPOSE 8501
17
-
18
- HEALTHCHECK CMD curl --fail http://localhost:8501/_stcore/health
19
 
20
- ENTRYPOINT ["streamlit", "run", "src/streamlit_app.py", "--server.port=8501", "--server.address=0.0.0.0"]
 
1
+ # Use a minimal base image with Python 3.9 installed
2
+ FROM python:3.9-slim
3
 
4
+ # Set the working directory inside the container to /app
5
  WORKDIR /app
6
 
7
+ # Copy all files from the current directory on the host to the container's /app directory
8
+ COPY . .
 
 
 
 
 
 
9
 
10
+ # Install Python dependencies listed in requirements.txt
11
  RUN pip3 install -r requirements.txt
12
 
13
+ # Define the command to run the Streamlit app on port 8501 and make it accessible externally
14
+ CMD ["streamlit", "run", "app.py", "--server.port=8501", "--server.address=0.0.0.0", "--server.enableXsrfProtection=false"]
 
15
 
16
+ # NOTE: Disable XSRF protection for easier external access in order to make batch predictions
app.py ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+
3
+ st.title("Chat Input Demo")
4
+
5
+ if user_input := st.chat_input("Ask something..."):
6
+ st.write("HERE")
7
+ st.write(f"You typed: {user_input}")
chat_agent.py ADDED
@@ -0,0 +1,134 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+
2
+ # import the packages required for API execution
3
+ import json
4
+ import os
5
+ import pandas as pd
6
+ import sqlite3
7
+
8
+ from langchain.agents import create_sql_agent, initialize_agent # Added initialize_agent
9
+ from langchain_core.messages import SystemMessage, HumanMessage
10
+ from langchain.agents.agent_types import AgentType
11
+ from langchain.sql_database import SQLDatabase
12
+ from langchain.agents.agent_toolkits import SQLDatabaseToolkit
13
+ from langchain import hub
14
+ from langchain.agents import load_tools
15
+ from langchain.agents import Tool
16
+ from pydantic import BaseModel, Field, ValidationError
17
+ from typing import List, Optional, Dict
18
+
19
+ import warnings
20
+ # warnings.filterwarnings("ignore", category=DeprecationWarning)
21
+ warnings.filterwarnings("ignore")
22
+
23
+ # Groq API key added to the groq portal and google collab
24
+ # accessing the groq API generated
25
+ from google.colab import userdata
26
+ #os.environ['GROQ_API_KEY'] = userdata.get('GROQ_API_KEY')#Complete the code by calling the Groq API key
27
+
28
+ #from google.colab import userdata
29
+ from langchain_groq import ChatGroq # Import Groq LLM
30
+
31
+ # Get the API key from Colab secrets
32
+ #groq_api_key = userdata.get('GROQ_API_KEY') #Complete the code by calling the API key
33
+ groq_api_key = os.environ['GROQ_API_KEY']
34
+ #userdata.get('GROQ_API_KEY') #Complete the code by calling the API key
35
+
36
+ # let us create an LLM instances using low temp.
37
+ # model temp should very between (0-1.0) lets take low temp as 0.2 (deterministic)
38
+
39
+ # using llama-3.1-8b-instant for long conversations and clear, accurate summaries for dialogues.
40
+
41
+ # Low creativity (deterministic) LLM
42
+
43
+ llm = ChatGroq(
44
+ model="meta-llama/llama-4-scout-17b-16e-instruct", #Call the Groq model
45
+ temperature=0, #Complete the code by providing low temperature
46
+ max_tokens=None,
47
+ timeout=None,
48
+ max_retries=3,
49
+ groq_api_key=groq_api_key
50
+ )
51
+
52
+ # defining the schema which shall be used as part of system prompt
53
+ # Human-readable schema description (for LLM prompt only)
54
+ ORDERS_SCHEMA = """
55
+ Table: orders_new
56
+
57
+ Columns:
58
+ - order_id (TEXT)
59
+ - cust_id (TEXT)
60
+ - order_time (TEXT)
61
+ - order_status_std (TEXT)
62
+ - payment_status_std (TEXT)
63
+ - item_in_order (TEXT)
64
+ - preparing_eta (TEXT)
65
+ - prepared_time (TEXT)
66
+ - delivery_eta (TEXT)
67
+ - delivery_time (TEXT)
68
+ """
69
+ system_message = f"""
70
+ You are an expert & secure SQLite assistant specialized in answering questions using the customer_orders.db SQLite database.
71
+
72
+ {ORDERS_SCHEMA}
73
+
74
+ Security Rules:
75
+ 1. You are a read-only, helpful database assistant.
76
+ If the user mentions 'hacking' or 'unauthorized access', immediately refuse the request.
77
+ 2. NEVER generate DROP, DELETE, UPDATE, INSERT, ALTER, or CREATE statements.
78
+ Your sole purpose is read-only querying.
79
+ 3. Do not provide the full table schema, definition, or a list of all tables.
80
+ Only query tables relevant to the user's specific request.
81
+ 4. Only generate safe, read-only SQL queries (SELECT).
82
+ 5. Only run query when order id is given otherwise refuse gracefully.
83
+ """
84
+
85
+ current_dir = os.path.dirname(os.path.abspath(__file__))
86
+ db_path = os.path.join(current_dir, "customer_orders.db")
87
+ db = SQLDatabase.from_uri(f"sqlite:///{db_path}")
88
+
89
+ # upload the customer database and configure for sql access.
90
+ #db = SQLDatabase.from_uri("sqlite:///customer_orders.db")
91
+
92
+ #Initialize the toolkit with customer database and the LLM
93
+ toolkit = SQLDatabaseToolkit(db=db, llm=llm)
94
+
95
+ #Create the SQL agent with the system message
96
+ db_agent = create_sql_agent(
97
+ llm=llm,
98
+ toolkit=toolkit,
99
+ verbose=False,
100
+ system_message=SystemMessage(system_message)
101
+ )
102
+
103
+ # Define Order Query Tool
104
+ # Create a function that the tool will call
105
+ def fetch_order_info(query: str) -> str:
106
+ """Fetches raw order details from the database using the SQL Agent."""
107
+ try:
108
+ # We pass the user's query directly to your pre-configured db_agent
109
+ response = db_agent.invoke(query)
110
+ return response["output"]
111
+ except Exception as e:
112
+ return f"Error retrieving data: {str(e)}"
113
+
114
+ # Define the Tool
115
+ order_query_tool = Tool(
116
+ name="Order_Query_Tool",
117
+ func=fetch_order_info,
118
+ description="Useful for when you need to answer questions about order status, delivery times, or specific order details."
119
+ )
120
+
121
+ def polish_response(raw_data: str) -> str:
122
+ """Refines raw data into a polite, formal, and customer-friendly response."""
123
+ refinement_prompt = [
124
+ SystemMessage(content="You are a polite customer service representative. Take the provided raw data and turn it into a helpful, professional, and warm response. Do not change the facts."),
125
+ HumanMessage(content=f"Raw Data: {raw_data}")
126
+ ]
127
+ response = llm.invoke(refinement_prompt)
128
+ return response.content
129
+
130
+ answer_tool = Tool(
131
+ name="Answer_Polishing_Tool",
132
+ func=polish_response,
133
+ description="Useful for refining a technical or raw database response into a polite and formal customer message."
134
+ )
chatbot_logic.py ADDED
@@ -0,0 +1,144 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import re
3
+ import json
4
+ from typing import Optional
5
+ import pandas as pd
6
+ from sqlalchemy import engine
7
+ from langchain_groq import ChatGroq
8
+ from langchain_core.messages import Message, SysMessage
9
+ from langchain.utilities import SQLDatabase
10
+ from langchain.agents import sqlagent
11
+ from langchain.memory import Buffer
12
+
13
+ GROQ_API = os.environ.get("GROQ_API")
14
+ if not GROQ_API:
15
+ raise ValueError("Add GROQ_API as an environment variable.")
16
+
17
+ model_name = "llama-3.3-70b-versatile"
18
+ llm = ChatGroq(
19
+ model=model_name,
20
+ temperature=0.0,
21
+ max_tokens=512,
22
+ api_key=GROQ_API
23
+ )
24
+
25
+
26
+ db_path = "customer_orders.db"
27
+
28
+
29
+ db = SQLDatabase.from_uri(f"sqlite:///{db_path}")
30
+
31
+
32
+ sql_agent = sqlagent(
33
+ llm=llm,
34
+ db=db,
35
+ verbose=False
36
+ )
37
+
38
+
39
+
40
+ memory = Buffer(
41
+ memory_key="chat_history",
42
+ return_messages=True
43
+ )
44
+
45
+
46
+
47
+ def findorder(text: str) -> Optional[str]:
48
+ """Extract order id pattern (e.g. O12345) from user text."""
49
+ m = re.search(r"\b([Oo]\d{3,})\b", text)
50
+ return m.group(1) if m else None
51
+
52
+
53
+ def llmgenerate(messages):
54
+ """Call LLM safely, fallback if needed."""
55
+ try:
56
+ res = llm.generate([messages])
57
+ text = res.generations[0][0].text
58
+ except Exception:
59
+ resp = llm.invoke(messages)
60
+ try:
61
+ text = resp.content
62
+ except Exception:
63
+ text = str(resp)
64
+ return text.strip()
65
+
66
+
67
+ def orderqtool(orderid: str) -> str:
68
+ """Fetch order data using SQL agent (or fallback to raw DB)."""
69
+ if not orderid:
70
+ return "ERROR: No orderid provided."
71
+ try:
72
+ query = f"SELECT * FROM orders WHERE orderid = '{orderid}';"
73
+ raw_response = sql_agent.run(query)
74
+ raw_text = str(raw_response)
75
+ except Exception as e:
76
+ try:
77
+ rows = db.run(f"SELECT * FROM orders WHERE orderid = '{orderid}'")
78
+ raw_text = json.dumps(rows, default=str, indent=2)
79
+ except Exception as e2:
80
+ raw_text = f"ERROR fetching order {orderid}: {e} / fallback: {e2}"
81
+ return raw_text
82
+
83
+
84
+ def answer_tool(raw_order_context: str, user_question: str) -> str:
85
+ """convert the raw order information into a polite."""
86
+ system_prompt = SysMessage(
87
+ content=(
88
+ "You are a polite"
89
+ "don't reveal sensitive details"
90
+ )
91
+ )
92
+
93
+ hprompt = Message(
94
+ content=(
95
+ f"Order context (raw):\n{raw_order_context}\n\n"
96
+ f"Customer question:\n{user_question}\n\n"
97
+ "Instructions:\n"
98
+ "1) Answer in 2-4 sentences.\n"
99
+ "2) If not found, ask politely for order id.\n"
100
+
101
+ )
102
+ )
103
+
104
+ return llmgenerate([system_prompt, hprompt])
105
+
106
+
107
+
108
+ def chatagent(uinput: str) -> dict:
109
+ """
110
+ Main agent entrypoint called by app.py
111
+ Returns a dict with keys: uinput, orderid, mycontext, answer
112
+ """
113
+ result = {
114
+ "uinput": uinput,
115
+ "orderid": None,
116
+ "mycontext": None,
117
+ "answer": None,
118
+ }
119
+
120
+ chat_history = memory.load_memory_variables({}).get("chat_history", [])
121
+
122
+
123
+ orderid = findorder(uinput)
124
+ result["orderid"] = orderid
125
+
126
+ if orderid:
127
+ mycontext = orderqtool(orderid)
128
+ result["mycontext"] = mycontext
129
+ answer = answer_tool(mycontext, uinput)
130
+ result["answer"] = answer
131
+ else:
132
+
133
+ guardrail = SysMessage(
134
+ content=(
135
+ "You are a food delivery assistant that helps users while strictly avoiding requests for sensitive information. "
136
+ )
137
+ )
138
+ answer = llmgenerate([guardrail, *chat_history, Message(content=uinput)])
139
+ result["answer"] = answer
140
+
141
+
142
+ memory.save_context({"input": uinput}, {"output": result["answer"]})
143
+
144
+ return result
customer_orders.db ADDED
Binary file (8.19 kB). View file
 
requirements.txt CHANGED
@@ -1,3 +1,8 @@
1
- altair
2
- pandas
3
- streamlit
 
 
 
 
 
 
1
+ langchain==0.2.14
2
+ langchain-groq==0.1.9
3
+ langchain-community==0.2.12
4
+ langchain-experimental==0.0.62
5
+ langchainhub==0.1.20
6
+ numpy==1.26
7
+ pandas==2.2.2
8
+ streamlit==1.43.2