Spaces:
Build error
Build error
| import streamlit as st | |
| from transformers import AutoTokenizer | |
| from transformers import AutoModelForSeq2SeqLM | |
| # Load the model and tokenizer from your Hugging Face Hub repository | |
| model_checkpoint = "abdulllah01/checkpoints" # Replace with your actual checkpoint | |
| tokenizer = AutoTokenizer.from_pretrained(model_checkpoint) | |
| model = AutoModelForSeq2SeqLM.from_pretrained(model_checkpoint) | |
| # Create a pipeline for question answering | |
| # Streamlit UI setup | |
| st.title("Tech Support Bot") | |
| st.write("Enter a context and ask a question related to Tech to get your problems solved!") | |
| # Text area for context input | |
| context = st.text_area("Context:", "") | |
| # Text input for the question | |
| question = st.text_input("Question:", "") | |
| # Example input question and context (document) from your dataset | |
| # Prepare the input text | |
| input_text = f"question: {question} context: {context}" | |
| input_ids = tokenizer.encode(input_text, return_tensors="pt") | |
| # Generate the answer | |
| if st.button("Get Answer"): | |
| if context and question: | |
| # Generate the answer using the pipeline | |
| output_ids = model.generate(input_ids) | |
| answer = tokenizer.decode(output_ids[0], skip_special_tokens=True) | |
| st.write("**Answer:**", answer) | |
| else: | |
| st.write("Please enter both context and question.") | |