Spaces:
Sleeping
Sleeping
Tafazzul-Nadeeem
commited on
Commit
·
d705d38
1
Parent(s):
98143a3
RAG1
Browse files- agents/__init__.py +2 -0
- agents/rag_decision_agent.py +42 -0
- rag/retriever.py → agents/rag_retrieval_agent.py +0 -0
- app.py +11 -5
- rag/__init__.py +0 -1
agents/__init__.py
ADDED
|
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from .rag_decision_agent import rag_decision
|
| 2 |
+
from .rag_retrieval_agent import get_top_k
|
agents/rag_decision_agent.py
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from openai import OpenAI
|
| 2 |
+
import os
|
| 3 |
+
import re
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
def rag_decision(query):
|
| 7 |
+
"""
|
| 8 |
+
Function to perform whether RAG is required for this user's query.
|
| 9 |
+
"""
|
| 10 |
+
client = OpenAI(api_key=os.getenv("OPENAI_API_KEY"))
|
| 11 |
+
prompt = f"""
|
| 12 |
+
You are an agent of a Diagnostics Lab agentic ai Chatbot system.
|
| 13 |
+
Your task is to determine whether the user's query requires Retrieval-Augmented
|
| 14 |
+
Generation (RAG) or not. Currently the system has access to the rates and offers
|
| 15 |
+
of the services provided by the diagnostics lab only.
|
| 16 |
+
If the query or part of the query is about the rates, discounts and offers
|
| 17 |
+
respond with "RAG NEEDED" and nothing else
|
| 18 |
+
If the query is general and can be answered with your knowledge respond with
|
| 19 |
+
"RAG NOT NEEDED" and nothing else.
|
| 20 |
+
|
| 21 |
+
User's query: {query}
|
| 22 |
+
"""
|
| 23 |
+
# Make the API call to OpenAI
|
| 24 |
+
response = client.chat.completions.create(
|
| 25 |
+
model="gpt-4o-mini",
|
| 26 |
+
messages=[
|
| 27 |
+
{
|
| 28 |
+
"role": "user",
|
| 29 |
+
"content": [
|
| 30 |
+
{
|
| 31 |
+
"type": "text",
|
| 32 |
+
"text": prompt,
|
| 33 |
+
},
|
| 34 |
+
],
|
| 35 |
+
}
|
| 36 |
+
],
|
| 37 |
+
)
|
| 38 |
+
decision = response.choices[0].message.content.strip()
|
| 39 |
+
if re.search(r'\brag needed\b', decision, re.IGNORECASE):
|
| 40 |
+
return True
|
| 41 |
+
else:
|
| 42 |
+
return False
|
rag/retriever.py → agents/rag_retrieval_agent.py
RENAMED
|
File without changes
|
app.py
CHANGED
|
@@ -4,11 +4,14 @@ import os
|
|
| 4 |
import base64
|
| 5 |
import time
|
| 6 |
import copy
|
| 7 |
-
|
| 8 |
from dotenv import load_dotenv
|
| 9 |
# Load environment variables from .env file
|
| 10 |
load_dotenv()
|
| 11 |
|
|
|
|
|
|
|
|
|
|
| 12 |
client = OpenAI(api_key=os.getenv("OPENAI_API_KEY"))
|
| 13 |
|
| 14 |
|
|
@@ -23,10 +26,13 @@ with gr.Blocks() as demo:
|
|
| 23 |
)
|
| 24 |
clear = gr.ClearButton([chat_input, chatbot])
|
| 25 |
|
| 26 |
-
|
| 27 |
-
|
| 28 |
-
|
| 29 |
-
|
|
|
|
|
|
|
|
|
|
| 30 |
results = get_top_k(query, k=k)
|
| 31 |
return results
|
| 32 |
|
|
|
|
| 4 |
import base64
|
| 5 |
import time
|
| 6 |
import copy
|
| 7 |
+
|
| 8 |
from dotenv import load_dotenv
|
| 9 |
# Load environment variables from .env file
|
| 10 |
load_dotenv()
|
| 11 |
|
| 12 |
+
from agents import rag_decision
|
| 13 |
+
from agents import get_top_k
|
| 14 |
+
|
| 15 |
client = OpenAI(api_key=os.getenv("OPENAI_API_KEY"))
|
| 16 |
|
| 17 |
|
|
|
|
| 26 |
)
|
| 27 |
clear = gr.ClearButton([chat_input, chatbot])
|
| 28 |
|
| 29 |
+
# Agent1 - RAG Decision Agent (whether RAG is needed for the user's query)
|
| 30 |
+
def agent1_rag_decision(query):
|
| 31 |
+
decision = rag_decision(query)
|
| 32 |
+
return decision
|
| 33 |
+
|
| 34 |
+
# Agent2 - RAG Retrieval Agent (retrieve top k relevant documents)
|
| 35 |
+
def agent2_use_rag(query, k=3):
|
| 36 |
results = get_top_k(query, k=k)
|
| 37 |
return results
|
| 38 |
|
rag/__init__.py
DELETED
|
@@ -1 +0,0 @@
|
|
| 1 |
-
from .retriever import get_top_k
|
|
|
|
|
|