|
|
import gradio as gr |
|
|
from transformers import AutoModelForCausalLM, AutoTokenizer |
|
|
|
|
|
|
|
|
tokenizer = AutoTokenizer.from_pretrained("ytu-ce-cosmos/turkish-gpt2") |
|
|
model = AutoModelForCausalLM.from_pretrained("ytu-ce-cosmos/turkish-gpt2") |
|
|
|
|
|
def dusun(prompt, max_length=150): |
|
|
inputs = tokenizer(prompt, return_tensors="pt") |
|
|
outputs = model.generate(**inputs, max_length=max_length, do_sample=True, temperature=0.8) |
|
|
return tokenizer.decode(outputs[0], skip_special_tokens=True) |
|
|
|
|
|
demo = gr.Interface( |
|
|
fn=dusun, |
|
|
inputs=[ |
|
|
gr.Textbox(label="Türkçe Prompt", lines=3), |
|
|
gr.Slider(50, 200, value=100, label="Uzunluk") |
|
|
], |
|
|
outputs=gr.Textbox(label="Üretilen Metin", lines=10), |
|
|
title="🇹🇷 Türkçe GPT-2", |
|
|
examples=[ |
|
|
["Bir varmış bir yokmuş"], |
|
|
["Yapay zeka geleceği"], |
|
|
] |
|
|
) |
|
|
|
|
|
demo.launch() |