import json import os import streamlit as st from transformers import pipeline from PIL import Image # Load the configuration from config.json with open('config.json') as f: config = json.load(f) # Set the authentication token os.environ["HF_API_TOKEN"] = config["hf_api_token"] # Configure the Hugging Face Space SPACE_NAME = config["space_name"] SPACE_LINK = config["space_link"] # Load the model model_name = config["model_name"] model = pipeline("text-generation", model=model_name) # Create a Streamlit app st.title("FuzzyLab Chat Application") st.write("Welcome to FuzzyLab's chat application!") # Create a text input for the user to enter their message user_input = st.text_input("Enter your message:") # Create a file uploader for attachments attachment = st.file_uploader("Upload attachment:", type=config["allowed_file_types"]) # Create a button to trigger the model's response if st.button("Send"): # Check if an attachment was uploaded if attachment is not None: # Get the attachment file name and type attachment_name = attachment.name attachment_type = attachment.type # Display the attachment information st.write(f"Attachment: {attachment_name} ({attachment_type})") # Generate a response using the model response = model(user_input, max_length=config["max_length"], return_full_text=False)[0]["generated_text"] st.write("Model Response:") st.write(response)