Manith Marapperuma commited on
Commit
ad4d8e7
·
verified ·
1 Parent(s): ffe9f2c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +23 -6
app.py CHANGED
@@ -1,13 +1,30 @@
1
  import streamlit as st
2
  from transformers import pipeline
3
 
4
- # Load the chatbot model
5
- chatbot = pipeline("conversational")
6
 
7
- # Streamlit UI
 
 
 
 
 
8
  st.title("Mistral Chatbot")
 
9
 
10
- user_input = st.text_input("You: ", "")
11
  if user_input:
12
- response = chatbot(user_input)[0]['generated_text']
13
- st.text_area("Mistral:", value=response, height=200)
 
 
 
 
 
 
 
 
 
 
 
 
 
1
  import streamlit as st
2
  from transformers import pipeline
3
 
4
+ # Load the Mistral-7B model using pipeline
5
+ pipe = pipeline("text-generation", model="mistralai/Mistral-7B-Instruct-v0.2")
6
 
7
+ def generate_response(prompt):
8
+ """Generates a response using the Mistral-7B model."""
9
+ response = pipe(prompt, max_length=1000, num_return_sequences=1)[0]["generated_text"]
10
+ return response.strip()
11
+
12
+ # Streamlit app layout
13
  st.title("Mistral Chatbot")
14
+ user_input = st.text_input("Ask me anything!")
15
 
 
16
  if user_input:
17
+ response = generate_response(user_input)
18
+ st.write(f"Mistral: {response}")
19
+
20
+ # Deployment to Hugging Face Spaces (instructions included)
21
+ # 1. Create a Hugging Face account (if you don't have one)
22
+ # 2. Create a new Space from your account
23
+ # 3. Push your code to a Git repository (e.g., GitHub)
24
+ # 4. In your Space settings, connect your Git repository
25
+ # 5. Under "Model", select the Mistral-7B model you're using
26
+ # 6. Under "Environment", create a new environment with Python 3.7+
27
+ # 7. Under "Requirements", add "streamlit transformers" (separate lines)
28
+ # 8. Under "Start script", enter "streamlit run app.py" (replace app.py with your filename)
29
+ # 9. Deploy your Space!
30
+