Arduino-Master / app.py
duatanzeel's picture
Update app.py
3ad1da4 verified
raw
history blame
1.26 kB
import streamlit as st
from transformers import pipeline
from huggingface_hub import login
import os
HF_TOKEN = os.getenv("HUGGINGFACEHUB_API_TOKEN")
login(HF_TOKEN)
st.title("πŸ€–πŸ“Ÿ Arduino Expert Chatbot")
st.markdown("Ask anything about Arduino: code, circuits, projects!")
@st.cache_resource
def load_model():
try:
model = pipeline(
"text-generation",
model="tiiuae/falcon-7b-instruct", # lightweight, non-gated
token=HF_TOKEN
)
return model
except Exception as e:
print(f"Model load failed: {e}")
st.error("❌ Failed to load model.")
return pipeline("text-generation", model="gpt2")
model = load_model()
query = st.text_area("Ask your Arduino question here πŸ‘‡", height=150)
if st.button("Get Answer"):
if query.strip():
with st.spinner("Thinking... πŸ€–"):
try:
prompt = f"<s>[INST] {query} [/INST]"
response = model(prompt, max_new_tokens=512, do_sample=True, temperature=0.7)
st.success(response[0]['generated_text'])
except Exception as e:
st.error(f"❌ Error generating response: {e}")
else:
st.warning("Please enter a valid question.")