import os os.system("pip install transformers") os.system("pip install torch torchvision") os.system("pip install sentencepiece") os.system("pip install streamlit") import streamlit as st from transformers import T5ForConditionalGeneration, T5Tokenizer # Load the T5 model and tokenizer model = T5ForConditionalGeneration.from_pretrained("t5-base") tokenizer = T5Tokenizer.from_pretrained("t5-base") # Streamlit app title and description st.title("Hugging Face T5-based Chatbot") st.write("This chatbot can answer various questions and perform math calculations.") # User input text box user_input = st.text_input("Ask a question or type a math calculation:") # Check if the user input is not empty if user_input: # Add the '!' prefix to indicate it's a chatbot response input_text = "! " + user_input # Encode the user input input_ids = tokenizer.encode(input_text, return_tensors="pt") # Generate the response using the T5 model with st.spinner("Generating response..."): output = model.generate(input_ids) # Decode and beautify the response text response_text = tokenizer.decode(output[0], skip_special_tokens=True) st.info("Response:") st.success(response_text)