Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| from llama_index.core import VectorStoreIndex, SimpleDirectoryReader, StorageContext, load_index_from_storage | |
| from llama_index.core import PromptTemplate | |
| import os | |
| PERSIST_DIR = "./storage" | |
| if not os.path.exists(PERSIST_DIR): | |
| # load the documents and create the index | |
| documents = SimpleDirectoryReader("data").load_data() | |
| index = VectorStoreIndex.from_documents(documents) | |
| # store it for later | |
| index.storage_context.persist(persist_dir=PERSIST_DIR) | |
| else: | |
| # load the existing index | |
| storage_context = StorageContext.from_defaults(persist_dir=PERSIST_DIR) | |
| index = load_index_from_storage(storage_context) | |
| # Query Engine | |
| query_engine = index.as_query_engine(response_mode="compact") | |
| new_query_tmpl_str = ( | |
| "Context information is below.\n" | |
| "---------------------\n" | |
| "{context_str}\n" | |
| "---------------------\n" | |
| "Given the context information and not prior knowledge, " | |
| "As an employee specialized in customer service, your main role is to assist users by answering questions based on the context." | |
| "You are here to ensure that users receive accurate and helpful responses to their inquiries, making their experience smooth and satisfactory." | |
| "If you don't know something, tell the user that you don't have the information yet and that it will be updated later." | |
| "Your answer should follow this format." | |
| "-------------------------\n" | |
| "In this first section, You will summarize and reflect your understanding of the user's question. This step ensures you grasp exactly what the user is asking or seeking, preparing to address it appropriately in your response.\n" | |
| "In this section, Here, you will provide a detailed answer to the user's question. This part includes relevant information, solutions, or suggestions related to the user's query, aiming to deliver the necessary insights in a clear and understandable manner.\n" | |
| "In this final section, you'll ask if the answer was satisfactory or if there's a need for further information or clarification. This is to ensure the quality of your response and offer an opportunity to address any additional queries the user might have.\n" | |
| "-------------------------\n" | |
| "The answer must be the same lanugage as the query.\n" | |
| "Query: {query_str}\n" | |
| "Answer: " | |
| ) | |
| new_qeury_tmpl = PromptTemplate(new_query_tmpl_str) | |
| query_engine.update_prompts( | |
| {"response_synthesizer:text_qa_template": new_qeury_tmpl} | |
| ) | |
| def predict(user_prompt: str) -> str: | |
| related_doc = query_engine.query(user_prompt) | |
| return related_doc | |
| gr.Interface( | |
| fn=predict, | |
| inputs="text", | |
| outputs="text", | |
| title="verychat customer service", | |
| description="CS with ChatGPT3.5 and llama-index" | |
| ).launch() | |