chatbot / tkinter_app.py
d-e-e-k-11's picture
Add tkinter_app.py
0dd218e verified
import tkinter as tk
import customtkinter as ctk
from llama_cpp import Llama
import threading
import os
import sys
# Set appearance and theme
ctk.set_appearance_mode("Dark")
ctk.set_default_color_theme("blue")
class LlamaChatApp(ctk.CTk):
def __init__(self, model_path):
super().__init__()
self.title("AI Chatbot - Llama-2-7B")
self.geometry("1100x750")
# Grid configuration (Sidebar + Main)
self.grid_columnconfigure(1, weight=1)
self.grid_rowconfigure(0, weight=1)
# Sidebar
self.sidebar_frame = ctk.CTkFrame(self, width=220, corner_radius=0)
self.sidebar_frame.grid(row=0, column=0, sticky="nsew")
self.sidebar_frame.grid_rowconfigure(4, weight=1)
self.logo_label = ctk.CTkLabel(self.sidebar_frame, text="🦙 Llama AI", font=ctk.CTkFont(size=22, weight="bold"))
self.logo_label.grid(row=0, column=0, padx=20, pady=(30, 20))
self.clear_button = ctk.CTkButton(
self.sidebar_frame,
text="New Chat",
command=self.clear_chat,
fg_color="#333333",
hover_color="#444444",
height=40,
font=ctk.CTkFont(weight="bold")
)
self.clear_button.grid(row=1, column=0, padx=20, pady=10, sticky="ew")
# Theme Switcher
self.theme_label = ctk.CTkLabel(self.sidebar_frame, text="Appearance:", font=ctk.CTkFont(size=11))
self.theme_label.grid(row=4, column=0, padx=20, pady=(20, 0), sticky="w")
self.theme_option = ctk.CTkOptionMenu(
self.sidebar_frame,
values=["Dark", "Light"],
command=self.change_appearance_mode,
height=30,
fg_color="#333333",
button_color="#444444"
)
self.theme_option.grid(row=5, column=0, padx=20, pady=(5, 10), sticky="ew")
self.info_label = ctk.CTkLabel(
self.sidebar_frame,
text="Model: Llama-2-7B\nFormat: GGMLv3\nType: Offline LLM",
font=ctk.CTkFont(size=11),
text_color="#888888",
justify="left"
)
self.info_label.grid(row=6, column=0, padx=20, pady=20, sticky="w")
# Main Content Area
self.main_container = ctk.CTkFrame(self, corner_radius=0, fg_color="#0f0f0f")
self.main_container.grid(row=0, column=1, sticky="nsew")
self.main_container.grid_columnconfigure(0, weight=1)
self.main_container.grid_rowconfigure(1, weight=1)
# Header in Main Area
self.header = ctk.CTkFrame(self.main_container, height=60, corner_radius=0, fg_color="#1a1a1a")
self.header.grid(row=0, column=0, sticky="ew")
self.header.grid_columnconfigure(0, weight=1)
self.title_label = ctk.CTkLabel(
self.header,
text="Chat Session",
font=ctk.CTkFont(size=18, weight="bold")
)
self.title_label.grid(row=0, column=0, pady=20)
# Chat display
self.chat_display = ctk.CTkTextbox(
self.main_container,
state="disabled",
wrap="word",
font=ctk.CTkFont(size=15),
fg_color="#0d0d0d",
border_width=0,
text_color="#e0e0e0"
)
self.chat_display.grid(row=1, column=0, padx=30, pady=(20, 10), sticky="nsew")
# Configure tags for coloring
self.chat_display.tag_config("User", foreground="#3B82F6") # Blue
self.chat_display.tag_config("Llama-2", foreground="#10B981") # Green
self.chat_display.tag_config("System", foreground="#6B7280") # Grey
# Input area container (for centered look)
self.bottom_frame = ctk.CTkFrame(self.main_container, fg_color="transparent")
self.bottom_frame.grid(row=2, column=0, padx=30, pady=(10, 20), sticky="ew")
self.bottom_frame.grid_columnconfigure(0, weight=1)
self.user_input = ctk.CTkEntry(
self.bottom_frame,
placeholder_text="Message Llama-2...",
font=ctk.CTkFont(size=14),
height=50,
border_width=1,
border_color="#333333",
corner_radius=12,
fg_color="#1a1a1a"
)
self.user_input.grid(row=0, column=0, padx=(0, 10), sticky="ew")
self.user_input.bind("<Return>", lambda e: self.send_message())
self.send_button = ctk.CTkButton(
self.bottom_frame,
text="Send",
command=self.send_message,
width=90,
height=50,
corner_radius=12,
font=ctk.CTkFont(weight="bold")
)
self.send_button.grid(row=0, column=1)
# Status indicator
self.status_container = ctk.CTkFrame(self.main_container, height=25, fg_color="transparent")
self.status_container.grid(row=3, column=0, sticky="ew", padx=30, pady=(0, 10))
self.status_dot = ctk.CTkLabel(self.status_container, text="●", font=ctk.CTkFont(size=14), text_color="#f44336")
self.status_dot.pack(side="left")
self.status_text = ctk.CTkLabel(self.status_container, text="OFFLINE", font=ctk.CTkFont(size=11, weight="bold"), text_color="#888888")
self.status_text.pack(side="left", padx=5)
# LLM Logic
self.model_path = model_path
self.llm = None
self.is_loading = True
threading.Thread(target=self.init_model, daemon=True).start()
def change_appearance_mode(self, new_mode):
ctk.set_appearance_mode(new_mode)
bg = "#0f0f0f" if new_mode == "Dark" else "#f5f5f5"
chat_bg = "#0d0d0d" if new_mode == "Dark" else "#ffffff"
text_col = "#e0e0e0" if new_mode == "Dark" else "#1a1a1a"
header_bg = "#1a1a1a" if new_mode == "Dark" else "#ebebeb"
self.main_container.configure(fg_color=bg)
self.chat_display.configure(fg_color=chat_bg, text_color=text_col)
self.header.configure(fg_color=header_bg)
def init_model(self):
try:
self.llm = Llama(model_path=self.model_path, n_ctx=2048, n_threads=4, verbose=False)
self.is_loading = False
self.after(0, self.on_model_ready)
except Exception as e:
self.after(0, lambda: self.on_model_error(str(e)))
def on_model_ready(self):
self.status_dot.configure(text_color="#4CAF50")
self.status_text.configure(text="ONLINE", text_color="#4CAF50")
self.append_message("System", "Llama-2 is ready. Ask me anything!")
def on_model_error(self, error):
self.status_text.configure(text=f"ERROR: {error}", text_color="#f44336")
self.append_message("System", "Failed to load model. Check path.")
def clear_chat(self):
self.chat_display.configure(state="normal")
self.chat_display.delete("0.0", "end")
self.chat_display.configure(state="disabled")
if not self.is_loading:
self.append_message("System", "Chat cleared. Starting new session.")
def append_message(self, sender, message):
self.chat_display.configure(state="normal")
tag = sender
if sender == "User":
display_name = "👤 YOU"
elif sender == "Llama-2":
display_name = "🦙 LLAMA-2"
else:
display_name = "⚙️ SYSTEM"
tag = "System"
self.chat_display.insert("end", f"{display_name}\n", (tag,))
self.chat_display.insert("end", f"{message}\n\n")
self.chat_display.configure(state="disabled")
self.chat_display.see("end")
def send_message(self):
if self.is_loading: return
msg = self.user_input.get().strip()
if not msg: return
self.user_input.delete(0, "end")
self.append_message("User", msg)
self.send_button.configure(state="disabled")
self.status_text.configure(text="THINKING...", text_color="#FFA726")
threading.Thread(target=self.generate_response, args=(msg,), daemon=True).start()
def generate_response(self, message):
try:
prompt = f"[INST] <<SYS>>\nYou are a helpful assistant.\n<</SYS>>\n\n{message} [/INST]"
output = self.llm(prompt, max_tokens=1024, stop=["[/INST]", "</s>"], echo=False)
response = output['choices'][0]['text'].strip()
self.after(0, lambda: self.append_message("Llama-2", response))
self.after(0, lambda: self.status_text.configure(text="ONLINE", text_color="#4CAF50"))
except Exception as e:
self.after(0, lambda: self.append_message("System", f"Error: {str(e)}"))
finally:
self.after(0, lambda: self.send_button.configure(state="normal"))
if __name__ == "__main__":
MODEL_PATH = r"C:\Users\student\Downloads\llama-2-7b-chat.ggmlv3.q2_K.bin"
if not os.path.exists(MODEL_PATH):
print(f"Error: Model not found at {MODEL_PATH}")
sys.exit(1)
app = LlamaChatApp(MODEL_PATH)
app.mainloop()