Spaces:
Sleeping
Sleeping
SHAMIL SHAHBAZ AWAN
commited on
Update app.py
Browse files
app.py
CHANGED
|
@@ -178,11 +178,6 @@ if user_query:
|
|
| 178 |
# Retrieve the most relevant chunks based on the valid indices
|
| 179 |
retrieved_chunks = [chunks[idx] for idx in valid_indices]
|
| 180 |
|
| 181 |
-
# Display the retrieved chunks
|
| 182 |
-
st.subheader("Retrieved Chunks")
|
| 183 |
-
for chunk in retrieved_chunks:
|
| 184 |
-
st.write(chunk)
|
| 185 |
-
|
| 186 |
# Combine the retrieved chunks with the query and generate a response using Groq
|
| 187 |
combined_input = " ".join(retrieved_chunks) + user_query
|
| 188 |
|
|
@@ -196,7 +191,7 @@ if user_query:
|
|
| 196 |
model="llama3-8b-8192", # Specify the model you want to use
|
| 197 |
)
|
| 198 |
|
| 199 |
-
# Display the generated response
|
| 200 |
st.subheader("Generated Response")
|
| 201 |
st.write(chat_completion.choices[0].message.content)
|
| 202 |
except Exception as e:
|
|
|
|
| 178 |
# Retrieve the most relevant chunks based on the valid indices
|
| 179 |
retrieved_chunks = [chunks[idx] for idx in valid_indices]
|
| 180 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 181 |
# Combine the retrieved chunks with the query and generate a response using Groq
|
| 182 |
combined_input = " ".join(retrieved_chunks) + user_query
|
| 183 |
|
|
|
|
| 191 |
model="llama3-8b-8192", # Specify the model you want to use
|
| 192 |
)
|
| 193 |
|
| 194 |
+
# Display only the generated response
|
| 195 |
st.subheader("Generated Response")
|
| 196 |
st.write(chat_completion.choices[0].message.content)
|
| 197 |
except Exception as e:
|