dewper commited on
Commit
6081e28
·
verified ·
1 Parent(s): 9b487e2

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +49 -0
app.py ADDED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import torch
3
+ from transformers import AutoModelForCausalLM, AutoTokenizer
4
+ from peft import PeftModel
5
+
6
+ # Nama model dasar dan lokasi adapter kamu di Hugging Face
7
+ base_model_id = "microsoft/phi-2"
8
+ adapter_model_id = "username_kamu/Deeper-Logic-Phi2" # Ganti dengan repo kamu
9
+
10
+ # Load Tokenizer dan Model
11
+ tokenizer = AutoTokenizer.from_pretrained(base_model_id)
12
+ tokenizer.pad_token = tokenizer.eos_token
13
+
14
+ model = AutoModelForCausalLM.from_pretrained(
15
+ base_model_id,
16
+ torch_dtype=torch.float16,
17
+ device_map="auto",
18
+ trust_remote_code=True
19
+ )
20
+
21
+ # Gabungkan dengan hasil fine-tuning kamu
22
+ model = PeftModel.from_pretrained(model, adapter_model_id)
23
+
24
+ def predict(message, history):
25
+ prompt = f"Instruct: {message}\nOutput:"
26
+ inputs = tokenizer(prompt, return_tensors="pt").to("cuda")
27
+
28
+ with torch.no_grad():
29
+ outputs = model.generate(
30
+ **inputs,
31
+ max_new_tokens=200,
32
+ temperature=0.7,
33
+ do_sample=True,
34
+ pad_token_id=tokenizer.eos_token_id
35
+ )
36
+
37
+ response = tokenizer.decode(outputs[0], skip_special_tokens=True)
38
+ return response.split("Output:")[-1].strip()
39
+
40
+ # Membuat Interface Chat dengan Gradio
41
+ demo = gr.ChatInterface(
42
+ fn=predict,
43
+ title="Deeper-Logic AI",
44
+ description="Asisten Riset & Produktivitas Berbasis Phi-2 (Fine-tuned)",
45
+ theme="soft"
46
+ )
47
+
48
+ if __name__ == "__main__":
49
+ demo.launch()