import gradio as gr from huggingface_hub import InferenceClient #from sentence_transformers import SentenceTransformer #import torch #import numpy as np # import libraries client = InferenceClient("microsoft/phi-4") def respond(message, history): #context = get_relevent_context(message, top_k=3) messages = [{"role": "system", "content": "You are a friendly kpop expert chatbot."}] if history: messages.extend(history) messages.append({"role": "user", "content": message}) response = client.chat_completion( messages, max_tokens=10000 ) return response['choices'][0]['message']['content'].strip() chatbot = gr.ChatInterface(respond, type="messages") title = "Kpop chatbot" topics = """ Welcone to Kpop song Hunters!! """ disclaimer = "" custom_css = """ .gradio-container { background-color: #FFF5F2 !important; } .gradio-chat { background-color: #A6B28B !important; } .chat-interface { background-color: #F5C9B0 !important; } .chat-message { background-color: #F9F6F3 !important; } """ with gr.Blocks(css=custom_css) as demo: gr.Markdown("Welcome to K‑Pop Chatbot!") gr.ChatInterface(fn=respond, type="messages") demo.launch() with gr.Blocks() as chatbot: with gr.Row(): with gr.Column(scale=1): # ← LEFT COLUMN (sidebar) gr.Markdown(title) # Shows your "# :rocket: Lift Off :rocket: "heading gr.Markdown(topics) # Shows your welcome text & bullet list with gr.Column(scale=2): # ← RIGHT COLUMN (main chat area) gr.ChatInterface( fn=respond, type="messages" ) with gr.Row(): gr.Markdown(disclaimer) # Footer disclaimer text chatbot.launch()