Spaces:
Build error
Build error
| from dotenv import load_dotenv | |
| import os | |
| load_dotenv() | |
| OPENAI_API_KEY = os.getenv("OPENAI_API_KEY") | |
| import gradio as gr | |
| from langchain_community.document_loaders import PyPDFLoader | |
| from langchain_text_splitters import CharacterTextSplitter | |
| from langchain_openai import OpenAIEmbeddings | |
| from langchain_community.vectorstores import Chroma | |
| from langchain_openai import ChatOpenAI | |
| from langchain.prompts import PromptTemplate | |
| from langchain.chains.question_answering import load_qa_chain | |
| embeddings = OpenAIEmbeddings(openai_api_key=OPENAI_API_KEY) | |
| llm = ChatOpenAI(model="gpt-4-1106-preview", api_key=OPENAI_API_KEY) | |
| vectordb_path = "./vector_db" | |
| def rag_bot(query): | |
| print(f"Received query: {query}") | |
| template = """Please answer to human's input based on context. If possible, you should provide reference link with answer. The answer should be very politely, clear and short since it will be the response for client's query. | |
| Use the American English. If the input is not mentioned in context, output something like 'I don't know'. | |
| Context: {context} | |
| Human: {human_input} | |
| Your Response as Chatbot:""" | |
| prompt_s = PromptTemplate( | |
| input_variables=["human_input", "context"], template=template | |
| ) | |
| vectorstore = Chroma( | |
| persist_directory=os.path.join(vectordb_path), embedding_function=embeddings | |
| ) | |
| docs = vectorstore.similarity_search(query) | |
| stuff_chain = load_qa_chain(llm, chain_type="stuff", prompt=prompt_s) | |
| output = stuff_chain( | |
| {"input_documents": docs, "human_input": query}, return_only_outputs=False | |
| ) | |
| final_answer = output["output_text"] | |
| print(f"Final Answer ---> {final_answer}") | |
| return final_answer | |
| def chat(query, chat_history): | |
| response = rag_bot(query) | |
| return response | |
| if __name__ == "__main__": | |
| chatbot = gr.Chatbot(avatar_images=["user.jpg", "bot.png"], height=600) | |
| demo = gr.ChatInterface( | |
| fn=chat, | |
| title="TraderFyles AI Assistant", | |
| multimodal=False, | |
| chatbot=chatbot, | |
| ) | |
| demo.launch(debug=True, share=True) | |