Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| # Placeholder for model loading (adjust as needed for your specific models) | |
| def load_model(model_name): | |
| # Replace this function with actual model loading code if needed | |
| return lambda input_text: f"Response from {model_name}: {input_text}" | |
| # Load the models (placeholder functions here) | |
| deepseek_r1_distill = load_model("DeepSeek-R1-Distill-Qwen-32B") | |
| deepseek_r1 = load_model("DeepSeek-R1") | |
| deepseek_r1_zero = load_model("DeepSeek-R1-Zero") | |
| # Define the optional parameters section | |
| def create_optional_parameters(): | |
| with gr.Accordion("Optional Parameters (Click to Expand)", open=False): | |
| system_message = gr.Textbox( | |
| label="System Message", | |
| value="You are a friendly Chatbot created by ruslanmv.com", | |
| lines=2, | |
| interactive=True | |
| ) | |
| max_new_tokens = gr.Slider(minimum=1, maximum=4000, value=200, label="Max New Tokens", interactive=True) | |
| temperature = gr.Slider(minimum=0.10, maximum=4.00, value=0.70, label="Temperature", interactive=True) | |
| top_p = gr.Slider(minimum=0.10, maximum=1.00, value=0.90, label="Top-p (nucleus sampling)", interactive=True) | |
| return system_message, max_new_tokens, temperature, top_p | |
| # Define the main interface | |
| def chat_interface(user_input, system_message, max_new_tokens, temperature, top_p): | |
| # Placeholder response - integrate with actual model here | |
| response = f"""**System Message**: {system_message} | |
| **Your Input**: {user_input} | |
| **Parameters Used**: | |
| - Max New Tokens: {max_new_tokens} | |
| - Temperature: {temperature} | |
| - Top-p: {top_p} | |
| *Note: Actual model integration required for real responses*""" | |
| return response | |
| # Create the Gradio interface | |
| with gr.Blocks(css=""" | |
| .gradio-container { | |
| font-family: Arial, sans-serif; | |
| background-color: #f9f9f9; | |
| color: #333; | |
| padding: 20px; | |
| } | |
| .gr-button.primary { | |
| background-color: #4caf50; | |
| color: white; | |
| border: none; | |
| padding: 10px 20px; | |
| font-size: 16px; | |
| border-radius: 5px; | |
| cursor: pointer; | |
| } | |
| .gr-button.primary:hover { | |
| background-color: #45a049; | |
| } | |
| .gr-textbox textarea { | |
| font-size: 16px; | |
| } | |
| """) as demo: | |
| gr.Markdown(""" | |
| # DeepSeek Chatbot | |
| Welcome to the **DeepSeek Chatbot**! This AI-powered chatbot is designed to provide insightful responses. | |
| Created by [ruslanmv.com](https://ruslanmv.com/). | |
| """) | |
| with gr.Row(): | |
| with gr.Column(scale=3): | |
| user_input = gr.Textbox( | |
| label="Your Message", | |
| placeholder="Type your message here...", | |
| lines=4, | |
| interactive=True | |
| ) | |
| submit_button = gr.Button("Send", variant="primary") | |
| with gr.Column(scale=5): | |
| output = gr.Markdown(label="Chatbot Response") | |
| # Add the optional parameters section | |
| system_message, max_new_tokens, temperature, top_p = create_optional_parameters() | |
| # Link the submit button to the chat interface | |
| submit_button.click( | |
| chat_interface, | |
| inputs=[user_input, system_message, max_new_tokens, temperature, top_p], | |
| outputs=output | |
| ) | |
| # Launch the demo | |
| if __name__ == "__main__": | |
| demo.launch() |