vortexa64 commited on
Commit
8518a3f
·
verified ·
1 Parent(s): abe64fa

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -11
app.py CHANGED
@@ -1,8 +1,7 @@
1
  import gradio as gr
2
- from transformers import AutoTokenizer, AutoModelForCausalLM
3
  import torch
4
 
5
- # Load GPT-2 Bahasa Indonesia (contoh: IndoGPT2 dari cahya)
6
  model_name = "cahya/gpt2-small-indonesian-522M"
7
  tokenizer = AutoTokenizer.from_pretrained(model_name)
8
  model = AutoModelForCausalLM.from_pretrained(model_name)
@@ -12,22 +11,19 @@ def generate_response(prompt):
12
  with torch.no_grad():
13
  output = model.generate(
14
  input_ids,
15
- max_length=100,
16
  do_sample=True,
17
  top_k=50,
18
  top_p=0.95,
19
  temperature=0.9,
20
- pad_token_id=tokenizer.eos_token_id,
21
  )
22
  return tokenizer.decode(output[0], skip_special_tokens=True)
23
 
24
- # Gradio UI
25
- iface = gr.Interface(
26
  fn=generate_response,
27
- inputs=gr.Textbox(lines=4, placeholder="Ketik sesuatu yang nakal... 🤭"),
28
  outputs="text",
29
  title="🧠 AI Nakal Bahasa Indonesia",
30
- description="Powered by GPT-2 Indo | Coba ketik rayuan atau pertanyaan nakal 😳💕"
31
- )
32
-
33
- iface.launch()
 
1
  import gradio as gr
2
+ from transformers import AutoModelForCausalLM, AutoTokenizer
3
  import torch
4
 
 
5
  model_name = "cahya/gpt2-small-indonesian-522M"
6
  tokenizer = AutoTokenizer.from_pretrained(model_name)
7
  model = AutoModelForCausalLM.from_pretrained(model_name)
 
11
  with torch.no_grad():
12
  output = model.generate(
13
  input_ids,
14
+ max_length=80,
15
  do_sample=True,
16
  top_k=50,
17
  top_p=0.95,
18
  temperature=0.9,
19
+ pad_token_id=tokenizer.eos_token_id
20
  )
21
  return tokenizer.decode(output[0], skip_special_tokens=True)
22
 
23
+ gr.Interface(
 
24
  fn=generate_response,
25
+ inputs=gr.Textbox(lines=4, placeholder="Tulis rayuanmu di sini... 🤭"),
26
  outputs="text",
27
  title="🧠 AI Nakal Bahasa Indonesia",
28
+ description="Powered by GPT-2 Indo Coba tanya yang manja, romantis, atau... 🌚"
29
+ ).launch()