File size: 2,080 Bytes
79ed267
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
539d927
79ed267
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
4d4469b
 
 
 
79ed267
 
 
4d4469b
79ed267
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
from dotenv import load_dotenv
import os

load_dotenv()
OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")

import gradio as gr
from langchain_community.document_loaders import PyPDFLoader
from langchain_text_splitters import CharacterTextSplitter
from langchain_openai import OpenAIEmbeddings
from langchain_community.vectorstores import Chroma
from langchain_openai import ChatOpenAI
from langchain.prompts import PromptTemplate
from langchain.chains.question_answering import load_qa_chain

embeddings = OpenAIEmbeddings(openai_api_key=OPENAI_API_KEY)
llm = ChatOpenAI(model="gpt-4-1106-preview", api_key=OPENAI_API_KEY)

vectordb_path = "./vector_db"

def rag_bot(query):
    print(f"Received query: {query}")

    template = """Please answer to human's input based on context. If possible, you should provide reference link with answer. The answer should be very politely, clear and short since it will be the response for client's query.
    Use the American English. If the input is not mentioned in context, output something like 'I don't know'.
    Context: {context}
    Human: {human_input}
    Your Response as Chatbot:"""

    prompt_s = PromptTemplate(
        input_variables=["human_input", "context"], template=template
    )

    vectorstore = Chroma(
        persist_directory=os.path.join(vectordb_path), embedding_function=embeddings
    )

    docs = vectorstore.similarity_search(query)

    stuff_chain = load_qa_chain(llm, chain_type="stuff", prompt=prompt_s)

    output = stuff_chain(
        {"input_documents": docs, "human_input": query}, return_only_outputs=False
    )

    final_answer = output["output_text"]
    print(f"Final Answer ---> {final_answer}")

    return final_answer

def chat(query, chat_history):
    response = rag_bot(query)
    return response

if __name__ == "__main__":
    chatbot = gr.Chatbot(avatar_images=["user.jpg", "bot.png"], height=600)
    demo = gr.ChatInterface(
        fn=chat,
        title="TraderFyles AI Assistant",
        multimodal=False,
        chatbot=chatbot,
    )
    demo.launch(debug=True, share=True)