granite / app.py
srisuriyas's picture
Update app.py
d7b61e4 verified
raw
history blame contribute delete
666 Bytes
import gradio as gr
from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
model_id = "ibm-granite/granite-3.1-2b-instruct" # Hugging Face model ID
tokenizer = AutoTokenizer.from_pretrained(model_id)
model = AutoModelForCausalLM.from_pretrained(model_id, device_map="auto")
pipe = pipeline(
"text-generation",
model=model,
tokenizer=tokenizer,
max_length=512,
temperature=0.7
)
def generate_answer(prompt):
result = pipe(prompt)[0]["generated_text"]
return result
demo = gr.Interface(
fn=generate_answer,
inputs="text",
outputs="text",
title="Granite 3.1 2B Instruct - RAG Answering"
)
demo.launch()