Spaces:
Sleeping
Sleeping
| import os | |
| os.environ["OPENAI_API_KEY"] | |
| import asyncio | |
| import websockets | |
| from llama_index.llms import OpenAI | |
| from llama_index.indices.loading import load_index_from_storage | |
| from llama_index import StorageContext | |
| from llama_index import get_response_synthesizer | |
| # rebuild storage context | |
| storage_context = StorageContext.from_defaults(persist_dir="index") | |
| doc_summary_index = load_index_from_storage(storage_context) | |
| from llama_index.indices.document_summary import ( | |
| DocumentSummaryIndexEmbeddingRetriever, | |
| ) | |
| # use retriever as part of a query engine | |
| from llama_index.query_engine import RetrieverQueryEngine | |
| retriever = DocumentSummaryIndexEmbeddingRetriever( | |
| doc_summary_index, | |
| similarity_top_k = 10 | |
| ) | |
| # configure response synthesizer | |
| response_synthesizer = get_response_synthesizer(response_mode="tree_summarize") | |
| # assemble query engine | |
| query_engine = RetrieverQueryEngine( | |
| retriever=retriever, | |
| response_synthesizer=response_synthesizer, | |
| ) | |
| import gradio as gr | |
| def ecuador(question): | |
| response = query_engine.query(question) | |
| retrieved_nodes = retriever.retrieve(question) | |
| source_name = retrieved_nodes[0].node.source_node.node_id | |
| source_text = retrieved_nodes[0].node.get_text() | |
| return response, source_name, source_text | |
| async def handle_websocket(websocket): | |
| async for message in websocket: | |
| question = message # Assuming incoming messages are questions | |
| response, source_name, source_text = ecuador(question) | |
| await websocket.send(f"{response}\nSource Document: {source_name}\nSource Text: {source_text}") | |
| async def main(): | |
| async with websockets.serve(handle_websocket, "localhost", 8080): | |
| await asyncio.Future() # Run forever | |
| demo = gr.Interface( | |
| fn=ecuador, | |
| inputs=gr.Textbox(type = "text", label = "Ask a question"), | |
| outputs=[ | |
| gr.Textbox(type = "text", label = "AI-generated response"), | |
| gr.Textbox(type = "text", label = "Source Document"), | |
| gr.Textbox(type = "text", label = "Source Text") | |
| ], | |
| live = True | |
| ) | |
| demo.launch() | |
| asyncio.run(main()) |