Manith Marapperuma commited on
Commit
ffe9f2c
·
verified ·
1 Parent(s): 333c23e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -25
app.py CHANGED
@@ -1,30 +1,13 @@
1
- import os
2
  import streamlit as st
3
  from transformers import pipeline
4
 
5
- # Get the Hugging Face API token from the environment variable
6
- HF_API_TOKEN = os.getenv("HF_API_TOKEN")
7
 
8
- if not HF_API_TOKEN:
9
- st.error("Hugging Face API token not found. Please set the HF_API_TOKEN environment variable.")
10
- else:
11
- # Initialize the pipeline with the Mistral model and your API token
12
- generator = pipeline(model="bigscience/Mistral", model_kwargs={"use_auth_token": HF_API_TOKEN})
13
 
14
- # Streamlit app setup
15
- st.title('Mistral LLM Explorer with Transformers')
16
- st.write('Enter your prompt below to interact with the Mistral model.')
17
-
18
- # User input
19
- user_input = st.text_area("Prompt:")
20
-
21
- # When the user presses the 'Submit' button
22
- if st.button('Submit'):
23
- if user_input:
24
- with st.spinner('Generating response...'):
25
- # Generate response from the Mistral model
26
- results = generator(user_input, max_length=100, clean_up_tokenization_spaces=True)
27
- # Display the generated text
28
- st.text_area("Response:", value=results[0]['generated_text'], height=300)
29
- else:
30
- st.warning('Please enter a prompt.')
 
 
1
  import streamlit as st
2
  from transformers import pipeline
3
 
4
+ # Load the chatbot model
5
+ chatbot = pipeline("conversational")
6
 
7
+ # Streamlit UI
8
+ st.title("Mistral Chatbot")
 
 
 
9
 
10
+ user_input = st.text_input("You: ", "")
11
+ if user_input:
12
+ response = chatbot(user_input)[0]['generated_text']
13
+ st.text_area("Mistral:", value=response, height=200)