vkthakur88 commited on
Commit
1ce8e5f
·
1 Parent(s): 466efa6

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +25 -59
app.py CHANGED
@@ -1,73 +1,39 @@
1
- import langchain as lc
2
- import langchain.llms as lc_llms
3
- import langchain.embeddings as lc_embed
4
- import langchain.document_loaders as lcdl
5
- import langchain.text_splitter as lcts
6
- import langchain.vectorstores as lcvs
7
- import langchain.chains as lcchains
8
- import langchain.hub as lchub
9
-
10
  import os
11
- import gradio as gr
12
-
13
- hf_google_flan_t5_xxl = lc.HuggingFaceHub(
14
- repo_id = 'google/flan-t5-xxl'
15
- )
16
-
17
- hf_gpt2_xl = lc.HuggingFaceHub(repo_id = 'gpt2-xl')
18
-
19
- sentence_embeddings = lc_embed.HuggingFaceInferenceAPIEmbeddings(
20
- api_key = os.environ['HUGGINGFACEHUB_API_TOKEN'],
21
- model_name = 'sentence-transformers/all-MiniLM-L6-v2'
22
- )
23
 
24
- text_loader = lcdl.PyPDFLoader('Story.pdf')
25
-
26
- documents = text_loader.load()
27
-
28
- text_splitter = lcts.RecursiveCharacterTextSplitter(
29
- chunk_size = 1000,
30
- chunk_overlap = 200,
31
  )
32
 
33
- splits = text_splitter.split_documents(documents)
34
-
35
- db = lcvs.Chroma.from_documents(splits, sentence_embeddings)
36
 
37
- retriever = db.as_retriever(search_type="mmr")
38
 
39
- template = """
40
- You are an AI chatbot. Complete the conversation based on following context:
41
 
42
- -----------------------------
43
- Context : {context}
44
- -----------------------------
45
- Conversation History:
46
 
47
- {question}
48
- """
49
-
50
- qa_prompt = lc.PromptTemplate.from_template(template)
51
-
52
- qa_chain = lcchains.RetrievalQA.from_chain_type(
53
- llm = hf_google_flan_t5_xxl,
54
- retriever = retriever,
55
- return_source_documents = True,
56
- chain_type_kwargs={"prompt": qa_prompt}
57
- )
58
 
59
- def rag_interface(ques, history):
60
- prev_chat = " ".join([f"Human: {q} \nChatbot: {a} \n" for q, a in history])
61
- current_ques = f'\nHuman: {ques} \nChatbot:'
62
 
63
- query = prev_chat + current_ques
64
 
65
- result = qa_chain({
66
- 'query' : query
67
- })
68
 
69
- return result['result']
 
 
 
70
 
71
- demo = gr.ChatInterface(fn=rag_interface)
72
 
73
- demo.launch()
 
1
+ import huggingface_hub as hf_hub
2
+ import gradio as gr
 
 
 
 
 
 
 
3
  import os
 
 
 
 
 
 
 
 
 
 
 
 
4
 
5
+ client = hf_hub.InferenceClient(
6
+ model = 'google/flan-t5-xxl',
7
+ token = os.environ['HUGGINGFACEHUB_API_TOKEN']
 
 
 
 
8
  )
9
 
10
+ def create_prompt(question, history):
11
+ template = '''You are a good chatbot that talks with human.
 
12
 
13
+ {history}
14
 
15
+ Human : {question}
16
+ Chatbot :'''
17
 
18
+ conv_template = '''
19
+ Human : {query}
20
+ Chatbot : {response}
21
+ '''
22
 
23
+ prev_chat = ''
 
 
 
 
 
 
 
 
 
 
24
 
25
+ for ques, ans in history:
26
+ prev_chat += conv_template.format(query = ques, response = ans)
 
27
 
28
+ prompt = template.format(history = prev_chat, question = question)
29
 
30
+ return prompt
 
 
31
 
32
+ def chat_interface(question, history):
33
+ prompt = create_prompt(question, history)
34
+ response = client.text_generation(prompt)
35
+ return response
36
 
37
+ chat_app = gr.ChatInterface(fn = chat_interface)
38
 
39
+ chat_app.launch()