duatanzeel commited on
Commit
f5c85c8
Β·
verified Β·
1 Parent(s): f43edf2

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +24 -7
app.py CHANGED
@@ -1,25 +1,42 @@
1
  import streamlit as st
2
  from transformers import pipeline
 
 
3
 
4
- # Set up the Hugging Face model pipeline (text generation or QA)
 
 
 
 
 
 
5
  st.title("πŸ€–πŸ“Ÿ Arduino Expert Chatbot")
6
  st.markdown("Get help with Arduino code, circuit diagrams, and projects.")
7
 
8
- # Load the model (e.g., 'mistralai/Mixtral-8x7B-Instruct-v0.1' or any suitable)
9
  @st.cache_resource
10
  def load_model():
11
- return pipeline("text-generation", model="mistralai/Mixtral-8x7B-Instruct-v0.1")
 
 
 
 
12
 
 
13
  model = load_model()
14
 
15
- # User input
16
  query = st.text_area("Ask your Arduino question here πŸ‘‡", height=150)
17
 
 
18
  if st.button("Get Answer"):
19
- if query:
20
  with st.spinner("Thinking... πŸ€–"):
21
- response = model(query, max_length=512, do_sample=True, temperature=0.7)
22
- st.success(response[0]['generated_text'])
 
 
 
23
  else:
24
  st.warning("Please enter a question about your Arduino project.")
25
 
 
1
  import streamlit as st
2
  from transformers import pipeline
3
+ from huggingface_hub import login
4
+ import os
5
 
6
+ # 🚨 Replace this with your actual Hugging Face access token
7
+ HF_TOKEN = "your_huggingface_access_token_here" # <-- Insert your token here securely
8
+
9
+ # Login to Hugging Face Hub
10
+ login(HF_TOKEN)
11
+
12
+ # App Title & Description
13
  st.title("πŸ€–πŸ“Ÿ Arduino Expert Chatbot")
14
  st.markdown("Get help with Arduino code, circuit diagrams, and projects.")
15
 
16
+ # Load the model securely
17
  @st.cache_resource
18
  def load_model():
19
+ try:
20
+ return pipeline("text-generation", model="mistralai/Mixtral-8x7B-Instruct-v0.1", token=HF_TOKEN)
21
+ except Exception as e:
22
+ st.error("🚫 Failed to load the model. Using fallback model (GPT-2).")
23
+ return pipeline("text-generation", model="gpt2") # fallback public model
24
 
25
+ # Load model once
26
  model = load_model()
27
 
28
+ # User Input
29
  query = st.text_area("Ask your Arduino question here πŸ‘‡", height=150)
30
 
31
+ # Generate Answer
32
  if st.button("Get Answer"):
33
+ if query.strip():
34
  with st.spinner("Thinking... πŸ€–"):
35
+ try:
36
+ response = model(query, max_length=512, do_sample=True, temperature=0.7)
37
+ st.success(response[0]['generated_text'])
38
+ except Exception as e:
39
+ st.error(f"❌ Error generating response: {e}")
40
  else:
41
  st.warning("Please enter a question about your Arduino project.")
42