gingun18 commited on
Commit
1c8a9a6
·
verified ·
1 Parent(s): 95daa4f

Upload SimpleChatbot.py

Browse files
Files changed (1) hide show
  1. SimpleChatbot.py +43 -0
SimpleChatbot.py ADDED
@@ -0,0 +1,43 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Q&A Chatbot
2
+ from langchain_community.llms import HuggingFaceEndpoint
3
+ from langchain.chains import LLMChain
4
+ from langchain.prompts import PromptTemplate
5
+
6
+ # from langchain.llms import OpenAI
7
+ # uncomment above line when I have credit in OpenAI
8
+ from dotenv import load_dotenv
9
+ load_dotenv() # take environment variables from .env
10
+
11
+ import streamlit as st
12
+ import os
13
+
14
+ ## Function to load AI model and get responses. Here I can incorporate prompt template also
15
+
16
+ def get_model_response(question):
17
+ llm = HuggingFaceEndpoint(
18
+ repo_id="mistralai/Mistral-7B-Instruct-v0.2", max_length=128, temperature=0.5, token=os.getenv("HUGGINGFACEHUB_API_TOKEN"))
19
+
20
+ # llm = OpenAI(openai_api_key=os.getenv("OPENAI_API_KEY"), model_name="text-davinci-003", temperature=0.6, max_tokens=64)
21
+ # uncomment above line and use it instead of HuggingfaceEndpoint when I have credit in OpenAI.
22
+ template = """Question: {question}
23
+ Answer:"""
24
+ prompt = PromptTemplate.from_template(template)
25
+ llm_chain = LLMChain(prompt=prompt, llm=llm)
26
+ response = llm_chain.invoke({"question": question})
27
+ return response
28
+
29
+
30
+ ## Initialize our StreamLit app
31
+ st.set_page_config(page_title="Simple Chatbot")
32
+
33
+ st.header("Langchain Application - Simple Chatbot")
34
+
35
+ input = st.text_input("Input: ", key="input")
36
+ response = get_model_response(input)
37
+
38
+ submit = st.button("Ask the question")
39
+
40
+ ## If ask button is clicked
41
+ if submit:
42
+ st.subheader("The response is: ")
43
+ st.write(response)