HamidBekam's picture
Create app.py
4f825a8
raw
history blame contribute delete
795 Bytes
import gradio as gr
from transformers import pipeline, AutoModelForCausalLM, AutoTokenizer
# Load model and tokenizer
model_name = "gpt2"
model = AutoModelForCausalLM.from_pretrained(model_name)
tokenizer = AutoTokenizer.from_pretrained(model_name)
# Define pipeline for text generation
generator = pipeline('text-generation', model=model, tokenizer=tokenizer)
# Define Gradio interface
def generate_text(prompt):
result = generator(prompt, max_length=50)[0]
generated_text = result['generated_text']
return generated_text
iface = gr.Interface(fn=generate_text, inputs="text", outputs="text",
title="Text Generation with Hugging Face and Gradio",
description="Enter prompt to generate text.")
# Launch Gradio interface
iface.launch()