from transformers import pipeline
gen = pipeline("text-generation", model="iko-01/ARABIC_poetry2", tokenizer="iko-01/ARABIC_poetry2", device=0 if torch.cuda.is_available() else -1)
prompts = [
"ูุง ููุทููู",
"ููุฏู ุณู
ุนูุชู",
"ุญููู ุงุดุชุฏูู",
"ูุง ุฐุงูุฑุฉู",
"ูููุจู",
"ูู ุงููููู",
]
results = []
for p in prompts:
out = gen(p,
max_new_tokens=150,
do_sample=True,
top_k=50,
top_p=0.95,
temperature=0.8,
num_return_sequences=1)
text = out[0]["generated_text"]
results.append({"prompt": p, "output": text})
for r in results:
print("PROMPT:", r["prompt"])
print("OUTPUT:", r["output"])
print("-"*40)