Spaces:
Runtime error
Runtime error
添加在输出模型output后输出citation的功能(beta)
Browse files
app.py
CHANGED
|
@@ -172,6 +172,7 @@ def chat_llama3_8b(message: str,
|
|
| 172 |
) -> str:
|
| 173 |
"""
|
| 174 |
Generate a streaming response using the llama3-8b model.
|
|
|
|
| 175 |
"""
|
| 176 |
# Get citations from vector store
|
| 177 |
citation = query_vector_store(vector_store, message, 4, 0.7)
|
|
@@ -214,7 +215,18 @@ def chat_llama3_8b(message: str,
|
|
| 214 |
outputs = []
|
| 215 |
for text in streamer:
|
| 216 |
outputs.append(text)
|
| 217 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 218 |
|
| 219 |
|
| 220 |
# Gradio block
|
|
|
|
| 172 |
) -> str:
|
| 173 |
"""
|
| 174 |
Generate a streaming response using the llama3-8b model.
|
| 175 |
+
Will display citations after the response if citations are available.
|
| 176 |
"""
|
| 177 |
# Get citations from vector store
|
| 178 |
citation = query_vector_store(vector_store, message, 4, 0.7)
|
|
|
|
| 215 |
outputs = []
|
| 216 |
for text in streamer:
|
| 217 |
outputs.append(text)
|
| 218 |
+
current_output = "".join(outputs)
|
| 219 |
+
|
| 220 |
+
# If we have citations, append them at the end
|
| 221 |
+
if citation and text == streamer[-1]: # On the last chunk
|
| 222 |
+
citation_display = "\n\nReferences:\n" + "\n".join(
|
| 223 |
+
f"[{i+1}] {cite.strip()}"
|
| 224 |
+
for i, cite in enumerate(citation.split('\n'))
|
| 225 |
+
if cite.strip()
|
| 226 |
+
)
|
| 227 |
+
current_output += citation_display
|
| 228 |
+
|
| 229 |
+
yield current_output
|
| 230 |
|
| 231 |
|
| 232 |
# Gradio block
|