import streamlit as st from langchain.prompts import PromptTemplate from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline import os # Authenticate with Hugging Face # Add your Hugging Face token as an environment variable in Spaces or directly in the code # Function to get the response back def getLLMResponse(form_input, email_sender, email_recipient, email_style): # Load the tokenizer and model from the gated repository tokenizer = AutoTokenizer.from_pretrained("meta-llama/Llama-2-7b-chat", use_auth_token=True) model = AutoModelForCausalLM.from_pretrained( "meta-llama/Llama-2-7b-chat", trust_remote_code=True, use_auth_token=True ) # Create the pipeline generator = pipeline("text-generation", model=model, tokenizer=tokenizer) # Template for building the PROMPT template = """ Write an email with {style} style and includes topic: {email_topic}.\n\nSender: {sender}\nRecipient: {recipient} \n\nEmail Text: """ # Creating the final PROMPT prompt = PromptTemplate( input_variables=["style", "email_topic", "sender", "recipient"], template=template, ) # Generating the response using the pipeline response = generator( prompt.format( email_topic=form_input, sender=email_sender, recipient=email_recipient, style=email_style, ), max_length=256, temperature=0.7, ) # Extract and return the generated text return response[0]["generated_text"] # Streamlit application setup st.set_page_config( page_title="Generate Emails", page_icon="📧", layout="centered", initial_sidebar_state="collapsed", ) st.header("Generate Emails 📧") form_input = st.text_area("Enter the email topic", height=275) # Creating columns for the UI - To receive inputs from user col1, col2, col3 = st.columns([10, 10, 5]) with col1: email_sender = st.text_input("Sender Name") with col2: email_recipient = st.text_input("Recipient Name") with col3: email_style = st.selectbox( "Writing Style", ("Formal", "Appreciating", "Not Satisfied", "Neutral"), index=0, ) submit = st.button("Generate") # When 'Generate' button is clicked, execute the below code if submit: response = getLLMResponse(form_input, email_sender, email_recipient, email_style) st.write(response)