gpttest / app.py
AdityaQB's picture
Create app.py
4b84f2a verified
raw
history blame
641 Bytes
import gradio as gr
from transformers import AutoTokenizer, TFAutoModelForCausalLM
def get_response(name):
tokenizer = AutoTokenizer.from_pretrained("gpt2")
model = TFAutoModelForCausalLM.from_pretrained("gpt2")
model.config.pad_token_id = model.config.eos_token_id
inputs = tokenizer(["TensorFlow is"], return_tensors="tf")
generated = model.generate(**inputs, do_sample=True, seed=(42, 0), max_new_tokens=20, temperature=.1)
return tokenizer.decode(generated[0])
# print("Sampling output: ", tokenizer.decode(generated[0]))
iface = gr.Interface(fn=get_response, inputs="text", outputs="text")
iface.launch()