Spaces:
Sleeping
Sleeping
suhail commited on
Commit ·
8119fcc
1
Parent(s): e012bf6
changes
Browse files
app/db/__pycache__/qdrant.cpython-313.pyc
CHANGED
|
Binary files a/app/db/__pycache__/qdrant.cpython-313.pyc and b/app/db/__pycache__/qdrant.cpython-313.pyc differ
|
|
|
app/db/qdrant.py
CHANGED
|
@@ -28,10 +28,13 @@ class QdrantDB:
|
|
| 28 |
raise ValueError("Missing Qdrant environment variables")
|
| 29 |
|
| 30 |
self._client = AsyncQdrantClient(
|
| 31 |
-
|
| 32 |
-
|
| 33 |
-
|
| 34 |
-
|
|
|
|
|
|
|
|
|
|
| 35 |
logger.info("QdrantDB client created")
|
| 36 |
|
| 37 |
return self._client
|
|
|
|
| 28 |
raise ValueError("Missing Qdrant environment variables")
|
| 29 |
|
| 30 |
self._client = AsyncQdrantClient(
|
| 31 |
+
url=self._url,
|
| 32 |
+
api_key=self._api_key,
|
| 33 |
+
prefer_grpc=False,
|
| 34 |
+
https=True,
|
| 35 |
+
verify=False # 🔥 LOCAL SSL FIX
|
| 36 |
+
)
|
| 37 |
+
|
| 38 |
logger.info("QdrantDB client created")
|
| 39 |
|
| 40 |
return self._client
|
app/rag/__pycache__/pipeline.cpython-313.pyc
CHANGED
|
Binary files a/app/rag/__pycache__/pipeline.cpython-313.pyc and b/app/rag/__pycache__/pipeline.cpython-313.pyc differ
|
|
|
app/rag/pipeline.py
CHANGED
|
@@ -31,15 +31,19 @@ class RAGPipeline:
|
|
| 31 |
payload = chunk.get('payload', {})
|
| 32 |
content = payload.get('content', '').strip()
|
| 33 |
|
| 34 |
-
if content:
|
| 35 |
-
context_texts.append(
|
|
|
|
|
|
|
|
|
|
| 36 |
|
| 37 |
-
|
| 38 |
-
|
| 39 |
-
|
| 40 |
-
|
| 41 |
-
|
| 42 |
-
|
|
|
|
| 43 |
|
| 44 |
# Remove duplicate sources
|
| 45 |
seen = set()
|
|
@@ -58,9 +62,10 @@ class RAGPipeline:
|
|
| 58 |
}
|
| 59 |
|
| 60 |
# Step 3: Combine context
|
| 61 |
-
|
|
|
|
| 62 |
|
| 63 |
-
# Step 4: Smart prompt for concise
|
| 64 |
system_prompt = "You are a helpful course assistant for Physical AI & Humanoid Robotics. Answer the user's question using ONLY the provided course book content. Be concise, clear, and to-the-point. Use bullet points if needed. Do not add extra information."
|
| 65 |
|
| 66 |
user_prompt = f"""Question: {question}
|
|
|
|
| 31 |
payload = chunk.get('payload', {})
|
| 32 |
content = payload.get('content', '').strip()
|
| 33 |
|
| 34 |
+
if len(content) > 120: # 🔥 minimum semantic length
|
| 35 |
+
context_texts.append(
|
| 36 |
+
f"[Source: {payload.get('chapter')}]\n{content}"
|
| 37 |
+
)
|
| 38 |
+
|
| 39 |
|
| 40 |
+
if len(content) > 120:
|
| 41 |
+
sources.append({
|
| 42 |
+
'source_file': payload.get('source_file', 'unknown'),
|
| 43 |
+
'chapter': payload.get('chapter', ''),
|
| 44 |
+
'section': payload.get('section', 'Unknown'),
|
| 45 |
+
'content': content[:500] + ("..." if len(content) > 500 else "")
|
| 46 |
+
})
|
| 47 |
|
| 48 |
# Remove duplicate sources
|
| 49 |
seen = set()
|
|
|
|
| 62 |
}
|
| 63 |
|
| 64 |
# Step 3: Combine context
|
| 65 |
+
context_texts = context_texts[:5] # top 5 only
|
| 66 |
+
combined_context = "\n\n---\n\n".join(context_texts)
|
| 67 |
|
| 68 |
+
# Step 4: Smart prompt for concise answerc
|
| 69 |
system_prompt = "You are a helpful course assistant for Physical AI & Humanoid Robotics. Answer the user's question using ONLY the provided course book content. Be concise, clear, and to-the-point. Use bullet points if needed. Do not add extra information."
|
| 70 |
|
| 71 |
user_prompt = f"""Question: {question}
|