nvinay1803 commited on
Commit
00a3fbc
·
verified ·
1 Parent(s): 0251cf8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +0 -39
app.py CHANGED
@@ -1,42 +1,3 @@
1
- # import streamlit as st
2
- # from langchain import HuggingFaceHub, PromptTemplate, LLMChain
3
-
4
- # st.set_page_config(page_title="LangChain Streamlit Demo", page_icon=":robot:")
5
- # st.title("LangChain Streamlit Demo")
6
-
7
-
8
- # llm = HuggingFaceHub(
9
- # repo_id="tiiuae/falcon-7b-instruct",
10
- # model_kwargs={"temperature": 0.6, "max_new_tokens": 2000}
11
- # )
12
-
13
- # template = """
14
- # You are an artificial intelligence assistant. The assistant gives helpful, detailed, and polite answers to the user's questions.
15
-
16
- # {question}
17
- # """
18
-
19
- # prompt = PromptTemplate(template=template, input_variables=["question"])
20
- # llm_chain = LLMChain(prompt=prompt, llm=llm, verbose=True)
21
-
22
-
23
-
24
- # def get_text():
25
- # st.sidebar.header("User Input")
26
- # input_text = st.sidebar.text_input("You: ",key="input")
27
- # return input_text
28
-
29
-
30
- # user_input=get_text()
31
- # submit=st.sidebar.button('Generate')
32
-
33
- # if submit:
34
- # generated_ans = llm_chain.invoke({"question": user_input})
35
- # st.subheader("Answer: ")
36
- # st.write(generated_ans)
37
-
38
-
39
-
40
  import torch
41
  import streamlit as st
42
  from streamlit_chat import message
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import torch
2
  import streamlit as st
3
  from streamlit_chat import message