Update app.py
Browse files
app.py
CHANGED
|
@@ -21,12 +21,12 @@ def text_to_json(text):
|
|
| 21 |
# Function to restrict query results to the PDF dataset (returns relevant content)
|
| 22 |
def restrict_to_pdf_query(query, dataset):
|
| 23 |
relevant_content = []
|
| 24 |
-
|
| 25 |
-
|
| 26 |
for section in dataset["dataset"]:
|
| 27 |
section_content = section["content"].lower()
|
| 28 |
-
# Check if
|
| 29 |
-
if
|
| 30 |
relevant_content.append(section["content"])
|
| 31 |
|
| 32 |
return relevant_content if relevant_content else ["No relevant content found."]
|
|
@@ -85,11 +85,15 @@ if user_query:
|
|
| 85 |
|
| 86 |
# Use only the first chunk (you can modify this to iterate over chunks or dynamically choose a chunk)
|
| 87 |
if chunks:
|
|
|
|
|
|
|
|
|
|
|
|
|
| 88 |
chat_completion = client.chat.completions.create(
|
| 89 |
messages=[
|
| 90 |
{
|
| 91 |
"role": "user",
|
| 92 |
-
"content":
|
| 93 |
}
|
| 94 |
],
|
| 95 |
model="llama3-groq-70b-8192-tool-use-preview", # Updated model
|
|
|
|
| 21 |
# Function to restrict query results to the PDF dataset (returns relevant content)
|
| 22 |
def restrict_to_pdf_query(query, dataset):
|
| 23 |
relevant_content = []
|
| 24 |
+
query_lower = query.lower()
|
| 25 |
+
|
| 26 |
for section in dataset["dataset"]:
|
| 27 |
section_content = section["content"].lower()
|
| 28 |
+
# Check if the query is mentioned directly in the content
|
| 29 |
+
if query_lower in section_content:
|
| 30 |
relevant_content.append(section["content"])
|
| 31 |
|
| 32 |
return relevant_content if relevant_content else ["No relevant content found."]
|
|
|
|
| 85 |
|
| 86 |
# Use only the first chunk (you can modify this to iterate over chunks or dynamically choose a chunk)
|
| 87 |
if chunks:
|
| 88 |
+
# Prepare a prompt that asks the model to act as an expert lawyer
|
| 89 |
+
prompt = f"""You are a Pakistani lawyer. Answer the following query based on the Pakistan Penal Code, explaining it in a professional and detailed manner, including references to specific sections of the code when applicable. If the information is found in the dataset, provide it accordingly. Query: "{user_query}"\nAnswer: {chunks[0]}"""
|
| 90 |
+
|
| 91 |
+
# Request answer from the model
|
| 92 |
chat_completion = client.chat.completions.create(
|
| 93 |
messages=[
|
| 94 |
{
|
| 95 |
"role": "user",
|
| 96 |
+
"content": prompt,
|
| 97 |
}
|
| 98 |
],
|
| 99 |
model="llama3-groq-70b-8192-tool-use-preview", # Updated model
|