Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -155,6 +155,23 @@ def format_citations_with_links(sources, uploaded_files):
|
|
| 155 |
|
| 156 |
return "".join(citations_html)
|
| 157 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 158 |
# ========================================
|
| 159 |
# MAIN GRADIO QUERY FUNCTION
|
| 160 |
# ========================================
|
|
|
|
| 155 |
|
| 156 |
return "".join(citations_html)
|
| 157 |
|
| 158 |
+
# ========================================
|
| 159 |
+
# Creating the llm with model
|
| 160 |
+
# ========================================
|
| 161 |
+
def create_llm_pipeline():
|
| 162 |
+
"""Create LLM pipeline compatible with LangChain"""
|
| 163 |
+
return HuggingFacePipeline.from_model_id(
|
| 164 |
+
model_id="microsoft/DialoGPT-medium",
|
| 165 |
+
task="text-generation",
|
| 166 |
+
device_map="auto" if torch.cuda.is_available() else -1,
|
| 167 |
+
pipeline_kwargs={
|
| 168 |
+
"max_new_tokens": 200,
|
| 169 |
+
"do_sample": True,
|
| 170 |
+
"temperature": 0.7,
|
| 171 |
+
"pad_token_id": 0 # Fix tokenizer warning
|
| 172 |
+
}
|
| 173 |
+
)
|
| 174 |
+
|
| 175 |
# ========================================
|
| 176 |
# MAIN GRADIO QUERY FUNCTION
|
| 177 |
# ========================================
|