jgs-430
Add Gradio app to run model inference
1d1af7e
raw
history blame contribute delete
556 Bytes
import gradio as gr
from transformers import pipeline
# Load your model from Hugging Face Hub
pipe = pipeline("text-generation", model="AgileGenAI/JIRA-story-point-increment-predictor")
# Define inference function
def predict(prompt):
result = pipe(prompt, max_new_tokens=100)
return result[0]["generated_text"]
# Create a simple Gradio interface
demo = gr.Interface(
fn=predict,
inputs=gr.Textbox(label="Enter prompt"),
outputs=gr.Textbox(label="Predicted output"),
title="JIRA Story Point Increment Predictor"
)
demo.launch()