Spaces:
Runtime error
Runtime error
| import os | |
| import chromadb | |
| import openai | |
| import langchain | |
| from langchain.embeddings.openai import OpenAIEmbeddings | |
| from langchain.vectorstores import Chroma | |
| from langchain.chat_models import ChatOpenAI | |
| from langchain.chains import ConversationalRetrievalChain | |
| from langchain.memory import ConversationBufferMemory | |
| import gradio as gr | |
| from init import create_vectorstore | |
| from config import ( | |
| CHROMA_SETTINGS, | |
| PERSIST_DIRECTORY, | |
| ) | |
| def query(question): | |
| embeddings = OpenAIEmbeddings() | |
| db = Chroma( | |
| persist_directory=PERSIST_DIRECTORY, | |
| embedding_function=embeddings, | |
| client_settings=CHROMA_SETTINGS, | |
| ) | |
| memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True) | |
| text_qa = ConversationalRetrievalChain.from_llm( | |
| ChatOpenAI(model_name="gpt-3.5-turbo"), | |
| db.as_retriever(), | |
| memory=memory, | |
| ) | |
| result = text_qa({"question": question}) | |
| return result["answer"] | |
| demo = gr.Interface(fn=query, inputs="text", outputs="text") | |
| create_vectorstore() | |
| demo.launch() | |