Syed Sadaqat Yar commited on
Commit
4d65ce5
·
1 Parent(s): 29dd5c3

feat: Deploy NEXUS E-commerce RAG Application

Browse files
Files changed (11) hide show
  1. .gitattributes +0 -35
  2. Dockerfile +0 -20
  3. README.md +71 -19
  4. agent_setup.py +28 -0
  5. app.py +76 -0
  6. rag_setup.py +43 -0
  7. requirements.txt +14 -3
  8. runner.py +12 -0
  9. runtime.txt +1 -0
  10. src/streamlit_app.py +0 -40
  11. tools.py +17 -0
.gitattributes DELETED
@@ -1,35 +0,0 @@
1
- *.7z filter=lfs diff=lfs merge=lfs -text
2
- *.arrow filter=lfs diff=lfs merge=lfs -text
3
- *.bin filter=lfs diff=lfs merge=lfs -text
4
- *.bz2 filter=lfs diff=lfs merge=lfs -text
5
- *.ckpt filter=lfs diff=lfs merge=lfs -text
6
- *.ftz filter=lfs diff=lfs merge=lfs -text
7
- *.gz filter=lfs diff=lfs merge=lfs -text
8
- *.h5 filter=lfs diff=lfs merge=lfs -text
9
- *.joblib filter=lfs diff=lfs merge=lfs -text
10
- *.lfs.* filter=lfs diff=lfs merge=lfs -text
11
- *.mlmodel filter=lfs diff=lfs merge=lfs -text
12
- *.model filter=lfs diff=lfs merge=lfs -text
13
- *.msgpack filter=lfs diff=lfs merge=lfs -text
14
- *.npy filter=lfs diff=lfs merge=lfs -text
15
- *.npz filter=lfs diff=lfs merge=lfs -text
16
- *.onnx filter=lfs diff=lfs merge=lfs -text
17
- *.ot filter=lfs diff=lfs merge=lfs -text
18
- *.parquet filter=lfs diff=lfs merge=lfs -text
19
- *.pb filter=lfs diff=lfs merge=lfs -text
20
- *.pickle filter=lfs diff=lfs merge=lfs -text
21
- *.pkl filter=lfs diff=lfs merge=lfs -text
22
- *.pt filter=lfs diff=lfs merge=lfs -text
23
- *.pth filter=lfs diff=lfs merge=lfs -text
24
- *.rar filter=lfs diff=lfs merge=lfs -text
25
- *.safetensors filter=lfs diff=lfs merge=lfs -text
26
- saved_model/**/* filter=lfs diff=lfs merge=lfs -text
27
- *.tar.* filter=lfs diff=lfs merge=lfs -text
28
- *.tar filter=lfs diff=lfs merge=lfs -text
29
- *.tflite filter=lfs diff=lfs merge=lfs -text
30
- *.tgz filter=lfs diff=lfs merge=lfs -text
31
- *.wasm filter=lfs diff=lfs merge=lfs -text
32
- *.xz filter=lfs diff=lfs merge=lfs -text
33
- *.zip filter=lfs diff=lfs merge=lfs -text
34
- *.zst filter=lfs diff=lfs merge=lfs -text
35
- *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
Dockerfile DELETED
@@ -1,20 +0,0 @@
1
- FROM python:3.13.5-slim
2
-
3
- WORKDIR /app
4
-
5
- RUN apt-get update && apt-get install -y \
6
- build-essential \
7
- curl \
8
- git \
9
- && rm -rf /var/lib/apt/lists/*
10
-
11
- COPY requirements.txt ./
12
- COPY src/ ./src/
13
-
14
- RUN pip3 install -r requirements.txt
15
-
16
- EXPOSE 8501
17
-
18
- HEALTHCHECK CMD curl --fail http://localhost:8501/_stcore/health
19
-
20
- ENTRYPOINT ["streamlit", "run", "src/streamlit_app.py", "--server.port=8501", "--server.address=0.0.0.0"]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
README.md CHANGED
@@ -1,19 +1,71 @@
1
- ---
2
- title: Nexus E Commerce
3
- emoji: 🚀
4
- colorFrom: red
5
- colorTo: red
6
- sdk: docker
7
- app_port: 8501
8
- tags:
9
- - streamlit
10
- pinned: false
11
- short_description: NEXUS E-Commerce Customer Support Chatbot
12
- ---
13
-
14
- # Welcome to Streamlit!
15
-
16
- Edit `/src/streamlit_app.py` to customize this app to your heart's desire. :heart:
17
-
18
- If you have any questions, checkout our [documentation](https://docs.streamlit.io) and [community
19
- forums](https://discuss.streamlit.io).
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # NEXUS E-Commerce Customer Support Chatbot
2
+
3
+ A simple AI chatbot for e-commerce customer support using RAG (Retrieval-Augmented Generation).
4
+
5
+ ## What it does
6
+
7
+ This chatbot answers customer questions by searching through your product documentation and knowledge base. It uses AI models like Gemini or Groq to provide helpful responses in real-time.
8
+
9
+ **Note**: All data used in this project is AI-generated and fake - created for demonstration purposes only.
10
+
11
+ ## Features
12
+
13
+ - Chat interface built with Streamlit
14
+ - Supports multiple AI models (Gemini, Groq)
15
+ - Searches through PDF documents to find relevant answers
16
+ - Real-time streaming responses
17
+ - Easy to set up and customize
18
+
19
+ ## Getting Started
20
+
21
+ 1. Clone this repository
22
+ 2. Install the required packages:
23
+ ```bash
24
+ pip install -r requirements.txt
25
+ ```
26
+
27
+ 3. Create a `.env` file with your API keys:
28
+ ```
29
+ GROQ_API_KEY=your_key_here
30
+ GEMINI_API_KEY=your_key_here
31
+ ```
32
+
33
+ 4. Run the app:
34
+ ```bash
35
+ streamlit run app.py
36
+ ```
37
+
38
+ 5. Open your browser to `http://localhost:8501`
39
+
40
+ ## How it works
41
+
42
+ The system has a few main parts:
43
+
44
+ - **Document Processing**: Loads and splits PDF files into searchable chunks
45
+ - **Vector Search**: Creates embeddings to find relevant information quickly
46
+ - **AI Agent**: Uses the retrieved information to generate helpful responses
47
+ - **Chat Interface**: Simple web interface for conversations
48
+
49
+ ## Files
50
+
51
+ - `app.py` - Main Streamlit application
52
+ - `rag_setup.py` - Handles document loading and vector store creation
53
+ - `agent_setup.py` - Configures the AI agents
54
+ - `tools.py` - Search functionality
55
+ - `runner.py` - Processes queries and streams responses
56
+
57
+ ## Customization
58
+
59
+ You can easily modify:
60
+ - Add new AI models in the settings
61
+ - Update the knowledge base by adding new PDF documents
62
+ - Change the chat interface styling
63
+ - Adjust how documents are processed and searched
64
+
65
+ ## Requirements
66
+
67
+ - Python 3.8+
68
+ - API key for Gemini or Groq
69
+ - PDF documents for your knowledge base
70
+
71
+ That's it! The system is designed to be simple and straightforward to use.
agent_setup.py ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from agents import AsyncOpenAI,set_default_openai_client,set_default_openai_api,set_tracing_disabled,Agent
2
+
3
+ def set_sdk_client(api_key: str, base_url: str):
4
+ """
5
+ Set the Agents SDK client dynamically based on user input (Gemini or Groq).
6
+ """
7
+ client = AsyncOpenAI(api_key=api_key, base_url=base_url)
8
+ set_tracing_disabled(disabled=True)
9
+ set_default_openai_client(client)
10
+ set_default_openai_api("chat_completions")
11
+
12
+ # -----------------------------
13
+
14
+ def create_agent(model_name, search_tool):
15
+ """
16
+ Create an Agent with the specified model and search tool.
17
+ """
18
+ agent = Agent(
19
+ name="NexusCustomerSupport",
20
+ model=model_name,
21
+ instructions = (
22
+ "You are a helpful assistant specialized in answering questions strictly based on the NEXUS document. "
23
+ "Always use the search_docs tool to retrieve relevant information from the document before responding. "
24
+ "If the user’s query is outside the scope of the document, politely state that you cannot provide an answer."
25
+ ),
26
+ tools=[search_tool],
27
+ )
28
+ return agent
app.py ADDED
@@ -0,0 +1,76 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import asyncio
3
+ import os
4
+ from dotenv import load_dotenv
5
+ from rag_setup import load_and_split_pdf, build_vectorstore
6
+ from tools import create_search_tool
7
+ from agent_setup import create_agent, set_sdk_client
8
+ from runner import run_query
9
+
10
+ # Load environment variables
11
+ load_dotenv()
12
+ groq_key = os.getenv("GROQ_API_KEY")
13
+ gemini_key = os.getenv("GEMINI_API_KEY")
14
+
15
+ # Streamlit UI
16
+ def main():
17
+ st.set_page_config(page_title="NEXUS E-COMMERCE - Customer Support Chatbot", page_icon="🛒", layout="wide")
18
+
19
+ st.markdown("""
20
+ <style>
21
+ .stChatMessage.user {background:`#dbeafe`;border-radius:12px;padding:8px;}
22
+ .stChatMessage.assistant {background:`#f1f5f9`;border-radius:12px;padding:8px;}
23
+ </style>
24
+ """, unsafe_allow_html=True)
25
+
26
+ st.title("🛒 NEXUS E-COMMERCE")
27
+ st.subheader("🤖 Customer Support Chatbot")
28
+
29
+ # Sidebar: Settings
30
+
31
+ st.sidebar.header("⚙️ Settings")
32
+ model_choice = st.sidebar.radio("Model Provider", ["Gemini", "Groq"])
33
+
34
+ DEFAULT_KEYS = {
35
+ "Gemini": {"api_key": gemini_key, "base_url": "https://generativelanguage.googleapis.com/v1beta/openai/", "model": "gemini-2.5-flash"},
36
+ "Groq": {"api_key": groq_key, "base_url": "https://api.groq.com/openai/v1", "model": "llama-3.3-70b-versatile"}
37
+ }
38
+
39
+ api_key = st.sidebar.text_input("🔑 Enter API Key (or leave blank for default)", type="password")
40
+ model_name = st.sidebar.text_input("🧠 Model Name", value=DEFAULT_KEYS[model_choice]["model"])
41
+
42
+
43
+ # Knowledge Base (persistent across app run)
44
+
45
+ if "retriever" not in st.session_state:
46
+ docs = load_and_split_pdf()
47
+ st.session_state.retriever = build_vectorstore(docs)
48
+
49
+ search_tool = create_search_tool(st.session_state.retriever)
50
+
51
+ # Client + Agent
52
+ final_api_key = api_key if api_key else DEFAULT_KEYS[model_choice]["api_key"]
53
+ set_sdk_client(final_api_key, DEFAULT_KEYS[model_choice]["base_url"])
54
+ agent = create_agent(model_name, search_tool)
55
+
56
+ # Chat Interface
57
+
58
+ user_input = st.chat_input("💬 Ask something about NEXUS E-COMMERCE...")
59
+
60
+ if user_input:
61
+ with st.chat_message("user"):
62
+ st.write(user_input)
63
+
64
+ with st.chat_message("assistant"):
65
+ response_container = st.empty()
66
+ full_response_parts = []
67
+
68
+ async def fetch():
69
+ async for chunk in run_query(agent, user_input):
70
+ full_response_parts.append(chunk)
71
+ response_container.write("".join(full_response_parts))
72
+
73
+ asyncio.run(fetch())
74
+
75
+ if __name__ == "__main__":
76
+ main()
rag_setup.py ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from langchain_community.document_loaders import PyMuPDFLoader
2
+ from langchain.text_splitter import RecursiveCharacterTextSplitter
3
+ from langchain_community.vectorstores import FAISS
4
+ from langchain_community.embeddings import HuggingFaceEmbeddings
5
+ from datasets import load_dataset
6
+ import tempfile
7
+ import shutil
8
+
9
+ def load_pdf():
10
+ # Load dataset
11
+ dataset = load_dataset("sadaqatyar/NEXUS")
12
+
13
+ # Get the PDF file
14
+ pdf_file = dataset["train"][0]["file"]
15
+
16
+ # Create temp file and copy content
17
+ temp_pdf = tempfile.NamedTemporaryFile(delete=False, suffix='.pdf')
18
+ shutil.copyfileobj(open(pdf_file, 'rb'), temp_pdf)
19
+ temp_pdf.close()
20
+
21
+ return temp_pdf.name
22
+
23
+ # Usage
24
+ pdf_path = load_pdf() # if it's a single PDF
25
+
26
+ def load_and_split_pdf(pdf_path=pdf_path):
27
+ loader = PyMuPDFLoader(pdf_path)
28
+ pages = loader.load()
29
+
30
+ splitter = RecursiveCharacterTextSplitter(
31
+ chunk_size=3000,
32
+ chunk_overlap=100,
33
+ separators=["\n\n", "\n", ".", " "]
34
+ )
35
+ return splitter.split_documents(pages)
36
+
37
+
38
+
39
+ def build_vectorstore(docs):
40
+ embeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2")
41
+
42
+ vectorstore = FAISS.from_documents(docs, embeddings)
43
+ return vectorstore.as_retriever()
requirements.txt CHANGED
@@ -1,3 +1,14 @@
1
- altair
2
- pandas
3
- streamlit
 
 
 
 
 
 
 
 
 
 
 
 
1
+ faiss-cpu>=1.12.0
2
+ ipykernel>=6.30.1
3
+ langchain>=0.3.27
4
+ langchain-community>=0.3.27
5
+ langchain-huggingface>=0.3.1
6
+ openai-agents>=0.2.9
7
+ pymupdf>=1.26.3
8
+ streamlit>=1.48.1
9
+ python-dotenv
10
+ openai
11
+ groq
12
+ google-generativeai
13
+ sentence-transformers>=5.1.0
14
+ datasets
runner.py ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from agents import Runner
2
+ from openai.types.responses import ResponseTextDeltaEvent
3
+
4
+ async def run_query(agent, query: str):
5
+ """
6
+ Run a streamed query on the agent using the provided SQLite session.
7
+ """
8
+ response = Runner.run_streamed(agent, query)
9
+
10
+ async for event in response.stream_events():
11
+ if event.type == "raw_response_event" and isinstance(event.data, ResponseTextDeltaEvent):
12
+ print(event.data.delta, end="", flush=True)
runtime.txt ADDED
@@ -0,0 +1 @@
 
 
1
+ python-3.12
src/streamlit_app.py DELETED
@@ -1,40 +0,0 @@
1
- import altair as alt
2
- import numpy as np
3
- import pandas as pd
4
- import streamlit as st
5
-
6
- """
7
- # Welcome to Streamlit!
8
-
9
- Edit `/streamlit_app.py` to customize this app to your heart's desire :heart:.
10
- If you have any questions, checkout our [documentation](https://docs.streamlit.io) and [community
11
- forums](https://discuss.streamlit.io).
12
-
13
- In the meantime, below is an example of what you can do with just a few lines of code:
14
- """
15
-
16
- num_points = st.slider("Number of points in spiral", 1, 10000, 1100)
17
- num_turns = st.slider("Number of turns in spiral", 1, 300, 31)
18
-
19
- indices = np.linspace(0, 1, num_points)
20
- theta = 2 * np.pi * num_turns * indices
21
- radius = indices
22
-
23
- x = radius * np.cos(theta)
24
- y = radius * np.sin(theta)
25
-
26
- df = pd.DataFrame({
27
- "x": x,
28
- "y": y,
29
- "idx": indices,
30
- "rand": np.random.randn(num_points),
31
- })
32
-
33
- st.altair_chart(alt.Chart(df, height=700, width=700)
34
- .mark_point(filled=True)
35
- .encode(
36
- x=alt.X("x", axis=None),
37
- y=alt.Y("y", axis=None),
38
- color=alt.Color("idx", legend=None, scale=alt.Scale()),
39
- size=alt.Size("rand", legend=None, scale=alt.Scale(range=[1, 150])),
40
- ))
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
tools.py ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from agents import function_tool
2
+
3
+ def create_search_tool(retriever):
4
+ @function_tool
5
+ def search_docs(query: str) -> str:
6
+ """Search the knowledge base for relevant information."""
7
+ docs = retriever.get_relevant_documents(query)
8
+ results = []
9
+ for i, doc in enumerate(docs, start=1):
10
+ page = doc.metadata.get("page", "N/A")
11
+ source = doc.metadata.get("source", "N/A")
12
+ snippet = doc.page_content
13
+ results.append(f"[Result {i}] (Page {page}, Source: {source})\n{snippet}")
14
+ return "\n\n".join(results)
15
+
16
+ # return the tool function so it can be passed to the Agent
17
+ return search_docs