grah / app.py
scp4950's picture
Update app.py
9ed466e
raw
history blame contribute delete
652 Bytes
import gradio as gr
from transformers import pipeline
# Replace 'gpt2' with the correct model identifier if you have a specific model.
# For example, you could use 'gpt2', 'gpt2-medium', 'gpt3', etc.
model_name = 'meta-llama/Llama-2-7b'
generator = pipeline(model=model_name)
def generate_text(prompt):
return generator(prompt, max_length=100)[0]['generated_text']
# Gradio interface setup
iface = gr.Interface(
fn=generate_text,
inputs=gr.Textbox(),
outputs=gr.Textbox(),
title="Meta-Llama Chat",
description="Enter a prompt to chat with the Meta-Llama model.",
live=True
)
# Launch the Gradio interface
iface.launch()