Upload app.py
Browse files
app.py
CHANGED
|
@@ -1,21 +1,20 @@
|
|
| 1 |
from langchain import HuggingFaceHub
|
| 2 |
-
|
| 3 |
from dotenv import load_dotenv
|
|
|
|
| 4 |
import streamlit as st
|
| 5 |
|
| 6 |
-
#
|
| 7 |
load_dotenv()
|
| 8 |
|
| 9 |
-
#
|
| 10 |
llm_huggingface = HuggingFaceHub(repo_id="google/flan-t5-large", model_kwargs={"temperature": 0.0, "max_length": 64})
|
| 11 |
|
| 12 |
# Streamlit app
|
| 13 |
st.set_page_config(page_title="Chatbot")
|
| 14 |
st.header('Langchain Application')
|
| 15 |
|
| 16 |
-
#
|
| 17 |
-
|
| 18 |
-
# Function to load HuggingFace model and get response
|
| 19 |
def get_huggingface_response(question):
|
| 20 |
response = llm_huggingface(question)
|
| 21 |
return response
|
|
@@ -26,10 +25,9 @@ user_input = st.text_input("Input: ", key="input")
|
|
| 26 |
# Streamlit button
|
| 27 |
submit = st.button('Generate')
|
| 28 |
|
| 29 |
-
|
| 30 |
if submit:
|
| 31 |
-
|
| 32 |
response = get_huggingface_response(user_input)
|
| 33 |
-
# Display response
|
| 34 |
st.subheader("The response is ")
|
| 35 |
st.write(response)
|
|
|
|
| 1 |
from langchain import HuggingFaceHub
|
| 2 |
+
|
| 3 |
from dotenv import load_dotenv
|
| 4 |
+
|
| 5 |
import streamlit as st
|
| 6 |
|
| 7 |
+
# variable d'environement
|
| 8 |
load_dotenv()
|
| 9 |
|
| 10 |
+
# HuggingFace model
|
| 11 |
llm_huggingface = HuggingFaceHub(repo_id="google/flan-t5-large", model_kwargs={"temperature": 0.0, "max_length": 64})
|
| 12 |
|
| 13 |
# Streamlit app
|
| 14 |
st.set_page_config(page_title="Chatbot")
|
| 15 |
st.header('Langchain Application')
|
| 16 |
|
| 17 |
+
# Function to get responsr
|
|
|
|
|
|
|
| 18 |
def get_huggingface_response(question):
|
| 19 |
response = llm_huggingface(question)
|
| 20 |
return response
|
|
|
|
| 25 |
# Streamlit button
|
| 26 |
submit = st.button('Generate')
|
| 27 |
|
| 28 |
+
|
| 29 |
if submit:
|
| 30 |
+
|
| 31 |
response = get_huggingface_response(user_input)
|
|
|
|
| 32 |
st.subheader("The response is ")
|
| 33 |
st.write(response)
|