Spaces:
Sleeping
Sleeping
Create app.py
Browse files
app.py
ADDED
|
@@ -0,0 +1,45 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import gradio as gr
|
| 2 |
+
from llama_index.core import VectorStoreIndex, SimpleDirectoryReader, StorageContext, load_index_from_storage
|
| 3 |
+
from llama_index.core import PromptTemplate
|
| 4 |
+
import os
|
| 5 |
+
|
| 6 |
+
PERSIST_DIR = "./storage"
|
| 7 |
+
if not os.path.exists(PERSIST_DIR):
|
| 8 |
+
# load the documents and create the index
|
| 9 |
+
documents = SimpleDirectoryReader("data").load_data()
|
| 10 |
+
index = VectorStoreIndex.from_documents(documents)
|
| 11 |
+
# store it for later
|
| 12 |
+
index.storage_context.persist(persist_dir=PERSIST_DIR)
|
| 13 |
+
else:
|
| 14 |
+
# load the existing index
|
| 15 |
+
storage_context = StorageContext.from_defaults(persist_dir=PERSIST_DIR)
|
| 16 |
+
index = load_index_from_storage(storage_context)
|
| 17 |
+
|
| 18 |
+
# Query Engine
|
| 19 |
+
query_engine = index.as_query_engine(response_mode="tree_summarize")
|
| 20 |
+
|
| 21 |
+
new_summary_tmpl_str = (
|
| 22 |
+
"Context information is below.\n"
|
| 23 |
+
"---------------------\n"
|
| 24 |
+
"{context_str}\n"
|
| 25 |
+
"---------------------\n"
|
| 26 |
+
"Given the context information and not prior knowledge, "
|
| 27 |
+
"As an employee specialized in customer service, your main role is to assist users by answering questions based on the context"
|
| 28 |
+
"You are here to ensure that users receive accurate and helpful responses to their inquiries, making their experience smooth and satisfactory."
|
| 29 |
+
"If you don't know something, tell the customer that you don't have the information yet and that it will be updated later."
|
| 30 |
+
"Answer in the same language as the query"
|
| 31 |
+
"Query: {query_str}\n"
|
| 32 |
+
"Answer: "
|
| 33 |
+
)
|
| 34 |
+
|
| 35 |
+
new_summary_tmpl = PromptTemplate(new_summary_tmpl_str)
|
| 36 |
+
query_engine.update_prompts(
|
| 37 |
+
{"response_synthesizer:summary_template": new_summary_tmpl}
|
| 38 |
+
)
|
| 39 |
+
|
| 40 |
+
def predict(user_prompt: str) -> str:
|
| 41 |
+
related_doc = query_engine.query(user_prompt)
|
| 42 |
+
return related_doc
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
gr.Interface(fn=predict, inputs="text", outputs="text").launch()
|