Austinsz-Warehouse commited on
Commit
593df94
·
verified ·
1 Parent(s): d4909e6

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +63 -0
app.py ADDED
@@ -0,0 +1,63 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from transformers import AutoModelForCausalLM, AutoTokenizer
3
+ import torch
4
+
5
+ # -------------------------
6
+ # HuggingFace model to use
7
+ # -------------------------
8
+ MODEL_NAME = "tiiuae/falcon-7b-instruct" # you can change to any hosted model
9
+
10
+ # -------------------------
11
+ # Load model and tokenizer
12
+ # -------------------------
13
+ print("Loading model...")
14
+ tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME)
15
+ model = AutoModelForCausalLM.from_pretrained(MODEL_NAME)
16
+ device = "cuda" if torch.cuda.is_available() else "cpu"
17
+ model.to(device)
18
+
19
+ # -------------------------
20
+ # System prompt
21
+ # -------------------------
22
+ SYSTEM_PROMPT = (
23
+ "You are a helpful, creative AI assistant. "
24
+ "Your creator is Austin. Answer clearly and politely."
25
+ )
26
+
27
+ # -------------------------
28
+ # Chat function
29
+ # -------------------------
30
+ def chat_with_ai(user_input, history=[]):
31
+ full_prompt = SYSTEM_PROMPT + "\n"
32
+ for i, (u, r) in enumerate(history):
33
+ full_prompt += f"User: {u}\nAI: {r}\n"
34
+ full_prompt += f"User: {user_input}\nAI:"
35
+
36
+ inputs = tokenizer(full_prompt, return_tensors="pt").to(device)
37
+ outputs = model.generate(**inputs, max_new_tokens=200)
38
+ response = tokenizer.decode(outputs[0], skip_special_tokens=True)
39
+ response = response.split("AI:")[-1].strip()
40
+
41
+ history.append((user_input, response))
42
+ return response, history
43
+
44
+ # -------------------------
45
+ # Build Gradio GUI
46
+ # -------------------------
47
+ with gr.Blocks() as demo:
48
+ gr.Markdown("# Austin's AI Chatbot")
49
+ gr.Markdown("This chatbot was created by **Austin**. Chat with it below!")
50
+
51
+ chatbot = gr.Chatbot()
52
+ user_input = gr.Textbox(placeholder="Type your message here...")
53
+ submit_btn = gr.Button("Send")
54
+ history_state = gr.State([])
55
+
56
+ submit_btn.click(
57
+ chat_with_ai,
58
+ inputs=[user_input, history_state],
59
+ outputs=[chatbot, history_state]
60
+ )
61
+
62
+ # Launch app
63
+ demo.launch()