ZENLLC commited on
Commit
6b9d1a2
·
verified ·
1 Parent(s): 9bf9697

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +48 -0
app.py ADDED
@@ -0,0 +1,48 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
3
+
4
+ # 1-line swap = your own open-weights model here -----------------------------
5
+ MODEL_NAME = "microsoft/DialoGPT-small" # 356 MB; no HF token required
6
+ # -----------------------------------------------------------------------------
7
+
8
+ tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME)
9
+ model = AutoModelForCausalLM.from_pretrained(MODEL_NAME)
10
+ generator = pipeline(
11
+ "text-generation",
12
+ model=model,
13
+ tokenizer=tokenizer,
14
+ max_new_tokens=100,
15
+ do_sample=True,
16
+ top_p=0.92,
17
+ temperature=0.7,
18
+ )
19
+
20
+ def respond(message, history):
21
+ """
22
+ message: str – latest user msg
23
+ history: list – [(user, bot), …] accumulated by Gradio
24
+ returns (bot_reply, history)
25
+ """
26
+ # ---- assemble prompt from chat history ----
27
+ prompt = ""
28
+ for usr, bot in history:
29
+ prompt += f"User: {usr}\nBot: {bot}\n"
30
+ prompt += f"User: {message}\nBot:"
31
+
32
+ # ---- generate ----
33
+ output = generator(prompt, num_return_sequences=1)[0]["generated_text"]
34
+ reply = output[len(prompt):].split("User:")[0].strip() or "…"
35
+
36
+ # ---- update history ----
37
+ history.append((message, reply))
38
+ return reply, history
39
+
40
+ demo = gr.ChatInterface(
41
+ fn=respond,
42
+ title="🤖 Simple DialoGPT Chatbot",
43
+ description="100 % local model · no API key needed · swap MODEL_NAME to try other open-source LLMs.",
44
+ theme="soft",
45
+ )
46
+
47
+ if __name__ == "__main__":
48
+ demo.launch()