bloomscience5 / app.py
Sunghokim's picture
Update app.py
6494f47 verified
raw
history blame contribute delete
993 Bytes
import gradio as gr
from transformers import BloomForCausalLM, BloomTokenizer
# Load the pre-trained BLOOM model and tokenizer
model = BloomForCausalLM.from_pretrained("bigscience/bloom")
tokenizer = BloomTokenizer.from_pretrained("bigscience/bloom")
def generate_text(prompt):
# Tokenize the input prompt
inputs = tokenizer(prompt, return_tensors="pt")
# Generate text using the BLOOM model
output = model.generate(inputs["input_ids"], attention_mask=inputs["attention_mask"], max_length=100)
# Decode the generated text
generated_text = tokenizer.decode(output[0], skip_special_tokens=True)
return generated_text
# Create a Gradio interface for text generation
gr_interface = gr.Interface(
fn=generate_text,
inputs=gr.Textbox(label="Input Text"),
outputs=gr.Textbox(label="Generated Text"),
title="BLOOM Text Generation",
description="Generate text using the BigScience/BLOOM model"
)
# Launch the Gradio interface
gr_interface.launch()