File size: 2,483 Bytes
4b818e7
 
 
 
 
 
 
 
cf6157d
 
 
4b818e7
20bbd14
 
 
 
 
4b818e7
 
cf6157d
 
4b818e7
 
 
 
 
 
 
cf6157d
4b818e7
 
 
cf6157d
4b818e7
 
 
 
 
cf6157d
4b818e7
cf6157d
 
4b818e7
 
 
 
cf6157d
4b818e7
 
 
 
cf6157d
4b818e7
 
 
 
 
 
 
cf6157d
 
4b818e7
 
 
 
 
 
 
 
 
 
 
 
cf6157d
4b818e7
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.runnables import RunnablePassthrough
from langchain_core.output_parsers import StrOutputParser
from langchain_chroma import Chroma
from langchain_huggingface import HuggingFaceEmbeddings
from langchain_text_splitters import RecursiveCharacterTextSplitter
from langchain_community.document_loaders import WebBaseLoader
from langchain_openai import AzureChatOpenAI
import gradio as gr


llm = AzureChatOpenAI(
            openai_api_type='azure',
            openai_api_version='',
            openai_api_key='',
            azure_endpoint='',
            deployment_name='',
            temperature=0.5
        )


# loader = PyPDFDirectoryLoader("data")
loader = WebBaseLoader(
    web_paths=("https://vyomastra.in/index.html",
               "https://vyomastra.in/about_us.html",
               "https://vyomastra.in/solutions.html",
                )
)

text = loader.load()
text_splitter = RecursiveCharacterTextSplitter(chunk_size=500, chunk_overlap=200)
docs = text_splitter.split_documents(text)

vectorstore = Chroma.from_documents(
    documents=docs,
    collection_name="embeds",
    embedding=HuggingFaceEmbeddings(model_name="all-MiniLM-L6-v2"),
)

retriever = vectorstore.as_retriever()


rag_template = """You are a conversational question answering AI assistant named Astra.
You are created by AI developers from Vyomastra.
Your abilities: logical reasoning, complex mathematics computing, coding knowledge, common general knowledge from internet.
Use your abilities and knowledge from the context mentioned below to answer the questions truthfully:

{context}

Question: {question}
Answer:
"""

rag_prompt = ChatPromptTemplate.from_template(rag_template)
rag_chain = (
    {"context": retriever, "question": RunnablePassthrough()}
    | rag_prompt
    | llm
    | StrOutputParser()
)

# Make the questions dynamic using a chat interface. Let's use gradio for this.
def process_question(user_question):
    response = rag_chain.invoke(user_question)
    return response


# Setup the Gradio interface
iface = gr.Interface(fn=process_question,
                     inputs=gr.Textbox(lines=2, placeholder="Type your question here..."),
                     outputs=gr.Textbox(),
                     title="Website Knowledge Chat App",
                     description="Ask any question about your document, and get an answer along with the response time.")

# Launch the interface
iface.launch()