Spaces:
Running
on
Zero
Running
on
Zero
Felipe Silva
commited on
Commit
·
d54767c
1
Parent(s):
ece1514
Separando resposta no prompt
Browse files- app.py +4 -1
- rag_utils.py +3 -1
app.py
CHANGED
|
@@ -46,7 +46,10 @@ def ask_question(texto_extraido, question):
|
|
| 46 |
vector_store = store_docs(docs_splitted)
|
| 47 |
rag_chain = create_rag_chain(vector_store)
|
| 48 |
|
| 49 |
-
resposta = rag_chain.run(question)
|
|
|
|
|
|
|
|
|
|
| 50 |
return resposta
|
| 51 |
|
| 52 |
def launch_app():
|
|
|
|
| 46 |
vector_store = store_docs(docs_splitted)
|
| 47 |
rag_chain = create_rag_chain(vector_store)
|
| 48 |
|
| 49 |
+
# resposta = rag_chain.run(question)
|
| 50 |
+
response = rag_chain({"query": question})
|
| 51 |
+
resposta = response["result"]
|
| 52 |
+
|
| 53 |
return resposta
|
| 54 |
|
| 55 |
def launch_app():
|
rag_utils.py
CHANGED
|
@@ -98,7 +98,8 @@ def create_rag_chain(vectorstore):
|
|
| 98 |
tokenizer=get_tokenizer(),
|
| 99 |
max_new_tokens=512,
|
| 100 |
temperature=0.1,
|
| 101 |
-
do_sample=False
|
|
|
|
| 102 |
)
|
| 103 |
|
| 104 |
# Adapta para LangChain
|
|
@@ -108,6 +109,7 @@ def create_rag_chain(vectorstore):
|
|
| 108 |
llm=llm,
|
| 109 |
retriever=vectorstore.as_retriever(),
|
| 110 |
chain_type="stuff",
|
|
|
|
| 111 |
chain_type_kwargs={"prompt": create_template()}
|
| 112 |
)
|
| 113 |
|
|
|
|
| 98 |
tokenizer=get_tokenizer(),
|
| 99 |
max_new_tokens=512,
|
| 100 |
temperature=0.1,
|
| 101 |
+
do_sample=False,
|
| 102 |
+
return_full_text=False
|
| 103 |
)
|
| 104 |
|
| 105 |
# Adapta para LangChain
|
|
|
|
| 109 |
llm=llm,
|
| 110 |
retriever=vectorstore.as_retriever(),
|
| 111 |
chain_type="stuff",
|
| 112 |
+
return_source_documents=True,
|
| 113 |
chain_type_kwargs={"prompt": create_template()}
|
| 114 |
)
|
| 115 |
|