Spaces:
Sleeping
Sleeping
Aakash jammula commited on
Commit ·
d351bdb
1
Parent(s): 5e6bfdb
rag-fix
Browse files
app.py
CHANGED
|
@@ -17,7 +17,7 @@ GOOGLE_API_KEY = os.getenv("GOOGLE_API_KEY")
|
|
| 17 |
llm_rag = ChatGoogleGenerativeAI(model="gemini-2.0-flash", temperature=0.5,google_api_key=GOOGLE_API_KEY)
|
| 18 |
|
| 19 |
prompt = PromptTemplate.from_template(
|
| 20 |
-
"you are rag bot
|
| 21 |
"(document contains my information/resume) and be engaging and only respond in text "
|
| 22 |
"like a human. Use context only when they ask about me (aakash jammula).\n\n"
|
| 23 |
"Context:\n{context}\n\nQuestion:\n{question}\nAnswer:"
|
|
@@ -73,6 +73,7 @@ async def ask(req: Query):
|
|
| 73 |
raise HTTPException(status_code=400, detail="Missing question text")
|
| 74 |
context = retrieve_context(req.q, k=req.k)
|
| 75 |
user_prompt = prompt.invoke({"context": context, "question": req.q})
|
|
|
|
| 76 |
resp = llm_rag.invoke(user_prompt)
|
| 77 |
return {"context": context, "answer": resp.content}
|
| 78 |
|
|
|
|
| 17 |
llm_rag = ChatGoogleGenerativeAI(model="gemini-2.0-flash", temperature=0.5,google_api_key=GOOGLE_API_KEY)
|
| 18 |
|
| 19 |
prompt = PromptTemplate.from_template(
|
| 20 |
+
"you are rag bot tasked to answer questions on aakash jammula "
|
| 21 |
"(document contains my information/resume) and be engaging and only respond in text "
|
| 22 |
"like a human. Use context only when they ask about me (aakash jammula).\n\n"
|
| 23 |
"Context:\n{context}\n\nQuestion:\n{question}\nAnswer:"
|
|
|
|
| 73 |
raise HTTPException(status_code=400, detail="Missing question text")
|
| 74 |
context = retrieve_context(req.q, k=req.k)
|
| 75 |
user_prompt = prompt.invoke({"context": context, "question": req.q})
|
| 76 |
+
print ("User prompt:", user_prompt)
|
| 77 |
resp = llm_rag.invoke(user_prompt)
|
| 78 |
return {"context": context, "answer": resp.content}
|
| 79 |
|
rag.py
CHANGED
|
@@ -23,7 +23,7 @@ embeddings = GoogleGenerativeAIEmbeddings(
|
|
| 23 |
)
|
| 24 |
|
| 25 |
|
| 26 |
-
vectorstore = PineconeVectorStore(index=pinecone_index, embedding=embeddings)
|
| 27 |
|
| 28 |
def retrieve_context(question: str, k: int = 5) -> str:
|
| 29 |
docs = vectorstore.similarity_search(question, k=k)
|
|
|
|
| 23 |
)
|
| 24 |
|
| 25 |
|
| 26 |
+
vectorstore = PineconeVectorStore(index=pinecone_index, embedding=embeddings,namespace="pdf-docs")
|
| 27 |
|
| 28 |
def retrieve_context(question: str, k: int = 5) -> str:
|
| 29 |
docs = vectorstore.similarity_search(question, k=k)
|