vortexa64 commited on
Commit
b17a9d4
Β·
verified Β·
1 Parent(s): 9cfe212

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +57 -43
app.py CHANGED
@@ -1,57 +1,71 @@
1
- from transformers import AutoTokenizer, AutoModelForCausalLM
 
2
  import gradio as gr
3
 
4
- # Load tokenizer & model Indo GPT
5
- tokenizer = AutoTokenizer.from_pretrained("cahya/gpt2-small-indonesian-522M")
6
- model = AutoModelForCausalLM.from_pretrained("cahya/gpt2-small-indonesian-522M")
 
 
 
7
 
8
- # Load prompt.txt
9
- with open("prompt.txt", "r", encoding="utf-8") as f:
10
- base_prompt = f.read()
 
 
 
11
 
12
- history = []
 
 
 
 
 
 
13
 
14
- # Fungsi obrolan
15
- def chat(user_input):
16
- global history
17
- history.append(f"Arya: {user_input}")
18
- prompt = base_prompt + "\n" + "\n".join(history) + "\nCici:"
19
-
20
- try:
21
- inputs = tokenizer(prompt, return_tensors="pt", truncation=True, max_length=1024)
22
  outputs = model.generate(
23
  **inputs,
24
- max_new_tokens=100,
25
  do_sample=True,
26
- top_k=50,
27
  top_p=0.95,
28
- temperature=0.8,
29
- pad_token_id=tokenizer.eos_token_id
30
  )
 
 
 
 
 
 
 
 
 
 
 
31
 
32
- result = tokenizer.decode(outputs[0], skip_special_tokens=True)
33
- cici_reply = result.split("Cici:")[-1].strip().split("Arya:")[0].strip()
34
-
35
- if not cici_reply:
36
- cici_reply = "Hehe~ Cici bingung jawabnya πŸ˜³πŸ’•"
37
-
38
- except Exception as e:
39
- cici_reply = f"Ada error sayang~ 😒 ({str(e)})"
40
-
41
- history.append(f"Cici: {cici_reply}")
42
- return cici_reply
43
-
44
- # Gradio UI
45
- with gr.Blocks() as demo:
46
- gr.Markdown("## πŸ’• Chat Sama Cici 😳")
47
- chatbot = gr.Chatbot(label="Cici 🀭", bubble_full_width=False)
48
- txt = gr.Textbox(label="Ketik pesan ke Cici 😚", placeholder="Tulis di sini ya...")
49
-
50
- def respond(message, chat_history):
51
- cici_reply = chat(message)
52
- chat_history.append((message, cici_reply))
53
- return "", chat_history
54
 
55
- txt.submit(respond, [txt, chatbot], [txt, chatbot])
 
56
 
 
57
  demo.launch()
 
1
+ import torch
2
+ from transformers import AutoModelForCausalLM, AutoTokenizer
3
  import gradio as gr
4
 
5
+ # Load model dan tokenizer
6
+ model_id = "cahya/gpt2-small-indonesian-522M"
7
+ tokenizer = AutoTokenizer.from_pretrained(model_id)
8
+ model = AutoModelForCausalLM.from_pretrained(model_id, torch_dtype=torch.float16 if torch.cuda.is_available() else torch.float32)
9
+ model.to("cuda" if torch.cuda.is_available() else "cpu")
10
+ model.eval()
11
 
12
+ # Baca prompt awal
13
+ try:
14
+ with open("prompt.txt", "r", encoding="utf-8") as f:
15
+ base_prompt = f.read()
16
+ except FileNotFoundError:
17
+ base_prompt = ""
18
 
19
+ # Fungsi buat ngegabungin chat history
20
+ def generate_prompt(message, chat_history):
21
+ full_prompt = base_prompt
22
+ for user_msg, ai_msg in chat_history:
23
+ full_prompt += f"Arya: {user_msg}\nCici: {ai_msg}\n"
24
+ full_prompt += f"Arya: {message}\nCici:"
25
+ return full_prompt
26
 
27
+ # Fungsi buat ngerespon input user
28
+ def predict(message, chat_history):
29
+ prompt = generate_prompt(message, chat_history)
30
+ inputs = tokenizer(prompt, return_tensors="pt", truncation=True, max_length=1024).to(model.device)
31
+
32
+ with torch.no_grad():
 
 
33
  outputs = model.generate(
34
  **inputs,
35
+ max_new_tokens=128,
36
  do_sample=True,
37
+ temperature=0.9,
38
  top_p=0.95,
39
+ pad_token_id=tokenizer.eos_token_id,
40
+ eos_token_id=tokenizer.eos_token_id,
41
  )
42
+
43
+ output_text = tokenizer.decode(outputs[0], skip_special_tokens=True)
44
+
45
+ # Ambil jawaban terakhir setelah "Cici:"
46
+ if "Cici:" in output_text:
47
+ answer = output_text.split("Cici:")[-1].strip()
48
+ else:
49
+ answer = "Maaf ya, Cici bingung jawabnya 😒"
50
+
51
+ chat_history.append((message, answer))
52
+ return answer, chat_history
53
 
54
+ # Komponen Gradio
55
+ chatbot = gr.Chatbot()
56
+ with gr.Blocks(css=".gradio-container {background-color: #fefefe}") as demo:
57
+ gr.Markdown("<h1 style='text-align: center;'>🩷 Cici Chatbot Indo 😳🀭</h1>")
58
+ with gr.Row():
59
+ with gr.Column():
60
+ message = gr.Textbox(label="Ketik di sini sayang~ 😚")
61
+ clear = gr.Button("🧹 Bersihin Chat")
62
+ with gr.Column():
63
+ output = chatbot
64
+
65
+ state = gr.State([])
 
 
 
 
 
 
 
 
 
 
66
 
67
+ message.submit(predict, [message, state], [output, state])
68
+ clear.click(lambda: ([], []), None, [output, state])
69
 
70
+ # Launch
71
  demo.launch()