Spaces:
Runtime error
Runtime error
| from llama_index.core import VectorStoreIndex, get_response_synthesizer | |
| from llama_index.core.retrievers import VectorIndexRetriever | |
| from llama_index.core.query_engine import RetrieverQueryEngine | |
| from llama_index.core.postprocessor import SimilarityPostprocessor | |
| from llama_index.core import VectorStoreIndex, SimpleDirectoryReader, ServiceContext | |
| import gradio as gr | |
| import logging | |
| import sys | |
| import os | |
| logging.basicConfig(stream=sys.stdout, level=logging.INFO) | |
| logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout)) | |
| os.environ["OPENAI_API_KEY"] = os.getenv("OPENAI_API_KEY") | |
| def initialize_vector_store_index(): | |
| documents = SimpleDirectoryReader("./document/management/").load_data() | |
| # build index | |
| index = VectorStoreIndex.from_documents(documents) | |
| return index | |
| index = initialize_vector_store_index() | |
| # configure retriever | |
| retriever = VectorIndexRetriever( | |
| index=index, | |
| similarity_top_k=1, | |
| ) | |
| def get_response(text, history): | |
| # For simplicity, we are only using the 'text' argument | |
| response = retriever.retrieve(text) | |
| r = str(response[0].metadata)+"\n"+response[0].text | |
| return r | |
| t = gr.ChatInterface(get_response, analytics_enabled=True) | |
| t.launch(debug=True,share=True) | |