|
|
import os |
|
|
os.system("pip install transformers") |
|
|
os.system("pip install torch torchvision") |
|
|
os.system("pip install sentencepiece") |
|
|
os.system("pip install streamlit") |
|
|
|
|
|
import streamlit as st |
|
|
from transformers import T5ForConditionalGeneration, T5Tokenizer |
|
|
|
|
|
|
|
|
model = T5ForConditionalGeneration.from_pretrained("t5-base") |
|
|
tokenizer = T5Tokenizer.from_pretrained("t5-base") |
|
|
|
|
|
|
|
|
st.title("Hugging Face T5-based Chatbot") |
|
|
st.write("This chatbot can answer various questions and perform math calculations.") |
|
|
|
|
|
|
|
|
user_input = st.text_input("Ask a question or type a math calculation:") |
|
|
|
|
|
|
|
|
if user_input: |
|
|
|
|
|
input_text = "! " + user_input |
|
|
|
|
|
input_ids = tokenizer.encode(input_text, return_tensors="pt") |
|
|
|
|
|
|
|
|
with st.spinner("Generating response..."): |
|
|
output = model.generate(input_ids) |
|
|
|
|
|
|
|
|
response_text = tokenizer.decode(output[0], skip_special_tokens=True) |
|
|
st.info("Response:") |
|
|
st.success(response_text) |