|
|
|
|
|
""" |
|
|
Main entry point for Hugging Face Spaces deployment |
|
|
""" |
|
|
|
|
|
import gradio as gr |
|
|
import os |
|
|
import sys |
|
|
from pathlib import Path |
|
|
|
|
|
|
|
|
sys.path.append(str(Path(__file__).parent / "app")) |
|
|
|
|
|
def create_simple_chat_interface(): |
|
|
"""Create a simple chat interface that works on HF Spaces""" |
|
|
|
|
|
|
|
|
chat_history = [] |
|
|
|
|
|
def send_message(message, history): |
|
|
"""Simple message handler""" |
|
|
if not message.strip(): |
|
|
return history, "" |
|
|
|
|
|
|
|
|
history.append([message, None]) |
|
|
|
|
|
|
|
|
responses = [ |
|
|
"Hello! I'm a helpful AI assistant. How can I help you today?", |
|
|
"That's an interesting question! Let me think about that.", |
|
|
"I'd be happy to help you with that.", |
|
|
"Thanks for your message! I'm here to assist you.", |
|
|
"Great question! Here's what I can tell you about that.", |
|
|
] |
|
|
|
|
|
import random |
|
|
response = random.choice(responses) |
|
|
|
|
|
|
|
|
history[-1][1] = response |
|
|
|
|
|
return history, "" |
|
|
|
|
|
def clear_chat(): |
|
|
"""Clear the chat history""" |
|
|
return [] |
|
|
|
|
|
|
|
|
with gr.Blocks( |
|
|
css=""" |
|
|
.chat-container { |
|
|
max-height: 600px; |
|
|
overflow-y: auto; |
|
|
border: 1px solid #ddd; |
|
|
border-radius: 10px; |
|
|
padding: 20px; |
|
|
background: white; |
|
|
} |
|
|
.user-message { |
|
|
background-color: #007bff; |
|
|
color: white; |
|
|
padding: 10px 15px; |
|
|
border-radius: 18px; |
|
|
margin: 10px 0; |
|
|
max-width: 80%; |
|
|
margin-left: auto; |
|
|
} |
|
|
.assistant-message { |
|
|
background-color: #f8f9fa; |
|
|
color: #333; |
|
|
padding: 10px 15px; |
|
|
border-radius: 18px; |
|
|
margin: 10px 0; |
|
|
max-width: 80%; |
|
|
margin-right: auto; |
|
|
} |
|
|
""", |
|
|
title="LLM Chat Interface" |
|
|
) as interface: |
|
|
|
|
|
gr.Markdown("# 🤖 LLM Chat Interface") |
|
|
gr.Markdown("Chat with your local LLM model using a beautiful web interface.") |
|
|
|
|
|
|
|
|
chatbot = gr.Chatbot( |
|
|
value=[], |
|
|
label="Chat History", |
|
|
height=400, |
|
|
elem_classes=["chat-container"] |
|
|
) |
|
|
|
|
|
|
|
|
with gr.Row(): |
|
|
message_input = gr.Textbox( |
|
|
placeholder="Type your message here...", |
|
|
label="Message", |
|
|
lines=3, |
|
|
scale=4, |
|
|
) |
|
|
send_btn = gr.Button("Send", variant="primary", scale=0.3) |
|
|
|
|
|
|
|
|
clear_btn = gr.Button("Clear Chat", variant="secondary") |
|
|
|
|
|
|
|
|
with gr.Row(): |
|
|
with gr.Column(scale=2): |
|
|
gr.Markdown("### ⚙️ Model Settings") |
|
|
|
|
|
temperature_slider = gr.Slider( |
|
|
minimum=0.0, |
|
|
maximum=2.0, |
|
|
value=0.7, |
|
|
step=0.1, |
|
|
label="Temperature", |
|
|
info="Controls randomness (0 = deterministic, 2 = very random)", |
|
|
) |
|
|
|
|
|
top_p_slider = gr.Slider( |
|
|
minimum=0.0, |
|
|
maximum=1.0, |
|
|
value=0.9, |
|
|
step=0.1, |
|
|
label="Top-p", |
|
|
info="Controls diversity via nucleus sampling", |
|
|
) |
|
|
|
|
|
max_tokens_slider = gr.Slider( |
|
|
minimum=50, |
|
|
maximum=2048, |
|
|
value=512, |
|
|
step=50, |
|
|
label="Max Tokens", |
|
|
info="Maximum number of tokens to generate", |
|
|
) |
|
|
|
|
|
|
|
|
system_message = gr.Textbox( |
|
|
placeholder="You are a helpful AI assistant.", |
|
|
label="System Message", |
|
|
lines=3, |
|
|
info="Optional system message to set the assistant's behavior", |
|
|
) |
|
|
|
|
|
|
|
|
model_status = gr.Markdown( |
|
|
"**Model Status:** ✅ Ready (Mock Mode)\n" |
|
|
"**Model Type:** Simple Chat Interface\n" |
|
|
"**Note:** This is a demo version. Add your model files to enable full LLM functionality." |
|
|
) |
|
|
|
|
|
|
|
|
send_btn.click( |
|
|
fn=send_message, |
|
|
inputs=[message_input, chatbot], |
|
|
outputs=[chatbot, message_input], |
|
|
) |
|
|
|
|
|
message_input.submit( |
|
|
fn=send_message, |
|
|
inputs=[message_input, chatbot], |
|
|
outputs=[chatbot, message_input], |
|
|
) |
|
|
|
|
|
clear_btn.click(fn=clear_chat, outputs=[chatbot]) |
|
|
|
|
|
return interface |
|
|
|
|
|
def main(): |
|
|
"""Initialize and launch the Gradio interface""" |
|
|
try: |
|
|
|
|
|
interface = create_simple_chat_interface() |
|
|
|
|
|
|
|
|
|
|
|
interface.launch( |
|
|
share=False, show_error=True, quiet=False |
|
|
) |
|
|
except Exception as e: |
|
|
print(f"Error launching interface: {e}") |
|
|
sys.exit(1) |
|
|
|
|
|
if __name__ == "__main__": |
|
|
main() |
|
|
|