File size: 5,746 Bytes
c2f9396 e9db321 c2f9396 e9db321 c2f9396 e9db321 c2f9396 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 | #!/usr/bin/env python3
"""
Main entry point for Hugging Face Spaces deployment
"""
import gradio as gr
import os
import sys
from pathlib import Path
# Add the app directory to the Python path
sys.path.append(str(Path(__file__).parent / "app"))
def create_simple_chat_interface():
"""Create a simple chat interface that works on HF Spaces"""
# Simple chat history
chat_history = []
def send_message(message, history):
"""Simple message handler"""
if not message.strip():
return history, ""
# Add user message to history
history.append([message, None])
# Simple response generation (mock for now)
responses = [
"Hello! I'm a helpful AI assistant. How can I help you today?",
"That's an interesting question! Let me think about that.",
"I'd be happy to help you with that.",
"Thanks for your message! I'm here to assist you.",
"Great question! Here's what I can tell you about that.",
]
import random
response = random.choice(responses)
# Add assistant response to history
history[-1][1] = response
return history, ""
def clear_chat():
"""Clear the chat history"""
return []
# Create the interface
with gr.Blocks(
css="""
.chat-container {
max-height: 600px;
overflow-y: auto;
border: 1px solid #ddd;
border-radius: 10px;
padding: 20px;
background: white;
}
.user-message {
background-color: #007bff;
color: white;
padding: 10px 15px;
border-radius: 18px;
margin: 10px 0;
max-width: 80%;
margin-left: auto;
}
.assistant-message {
background-color: #f8f9fa;
color: #333;
padding: 10px 15px;
border-radius: 18px;
margin: 10px 0;
max-width: 80%;
margin-right: auto;
}
""",
title="LLM Chat Interface"
) as interface:
gr.Markdown("# 🤖 LLM Chat Interface")
gr.Markdown("Chat with your local LLM model using a beautiful web interface.")
# Chat display
chatbot = gr.Chatbot(
value=[],
label="Chat History",
height=400,
elem_classes=["chat-container"]
)
# Input area
with gr.Row():
message_input = gr.Textbox(
placeholder="Type your message here...",
label="Message",
lines=3,
scale=4,
)
send_btn = gr.Button("Send", variant="primary", scale=0.3)
# Clear button
clear_btn = gr.Button("Clear Chat", variant="secondary")
# Model settings section
with gr.Row():
with gr.Column(scale=2):
gr.Markdown("### ⚙️ Model Settings")
temperature_slider = gr.Slider(
minimum=0.0,
maximum=2.0,
value=0.7,
step=0.1,
label="Temperature",
info="Controls randomness (0 = deterministic, 2 = very random)",
)
top_p_slider = gr.Slider(
minimum=0.0,
maximum=1.0,
value=0.9,
step=0.1,
label="Top-p",
info="Controls diversity via nucleus sampling",
)
max_tokens_slider = gr.Slider(
minimum=50,
maximum=2048,
value=512,
step=50,
label="Max Tokens",
info="Maximum number of tokens to generate",
)
# System message
system_message = gr.Textbox(
placeholder="You are a helpful AI assistant.",
label="System Message",
lines=3,
info="Optional system message to set the assistant's behavior",
)
# Model status
model_status = gr.Markdown(
"**Model Status:** ✅ Ready (Mock Mode)\n"
"**Model Type:** Simple Chat Interface\n"
"**Note:** This is a demo version. Add your model files to enable full LLM functionality."
)
# Event handlers
send_btn.click(
fn=send_message,
inputs=[message_input, chatbot],
outputs=[chatbot, message_input],
)
message_input.submit(
fn=send_message,
inputs=[message_input, chatbot],
outputs=[chatbot, message_input],
)
clear_btn.click(fn=clear_chat, outputs=[chatbot])
return interface
def main():
"""Initialize and launch the Gradio interface"""
try:
# Create the interface
interface = create_simple_chat_interface()
# Launch the app
# For HF Spaces, we don't need to specify host/port as it's handled automatically
interface.launch(
share=False, show_error=True, quiet=False # HF Spaces handles sharing
)
except Exception as e:
print(f"Error launching interface: {e}")
sys.exit(1)
if __name__ == "__main__":
main()
|