feat: Introduce .gitignore and requirements.txt, rename bot_response to model_inference for clarity, and disable demo sharing option.
Browse files- .gitignore +1 -0
- app.py +8 -8
- requirements.txt +1 -0
.gitignore
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
.gradio/certificate.pem
|
app.py
CHANGED
|
@@ -44,7 +44,7 @@ def save_feedback(model_name, history, feedback_data: gr.LikeData):
|
|
| 44 |
|
| 45 |
print(f"Feedback logged for {model_name}")
|
| 46 |
|
| 47 |
-
def
|
| 48 |
if not user_message or user_message.strip() == "":
|
| 49 |
yield history, ""
|
| 50 |
return
|
|
@@ -122,24 +122,24 @@ with gr.Blocks() as demo:
|
|
| 122 |
model_a_name = gr.Dropdown(MODELS, label="Model A", value=MODELS[0])
|
| 123 |
chatbot_a = gr.Chatbot(label="Model A Output")
|
| 124 |
msg_a = gr.Textbox(placeholder="Send message to Model A...", label="Model A Input")
|
| 125 |
-
btn_a = gr.Button("Send to Model A")
|
| 126 |
|
| 127 |
# --- Model B ---
|
| 128 |
with gr.Column():
|
| 129 |
model_b_name = gr.Dropdown(MODELS, label="Model B", value=MODELS[1])
|
| 130 |
chatbot_b = gr.Chatbot(label="Model B Output")
|
| 131 |
msg_b = gr.Textbox(placeholder="Send message to Model B...", label="Model B Input")
|
| 132 |
-
btn_b = gr.Button("Send to Model B")
|
| 133 |
|
| 134 |
# --- Bind Events ---
|
| 135 |
a_inputs = [msg_a, chatbot_a, model_a_name, system_msg, max_t, temp, top_p_val, local_endpoint_a]
|
| 136 |
-
msg_a.submit(
|
| 137 |
-
btn_a.click(
|
| 138 |
chatbot_a.like(save_feedback, [model_a_name, chatbot_a], None)
|
| 139 |
|
| 140 |
b_inputs = [msg_b, chatbot_b, model_b_name, system_msg, max_t, temp, top_p_val, local_endpoint_b]
|
| 141 |
-
msg_b.submit(
|
| 142 |
-
btn_b.click(
|
| 143 |
chatbot_b.like(save_feedback, [model_b_name, chatbot_b], None)
|
| 144 |
|
| 145 |
|
|
@@ -153,4 +153,4 @@ with gr.Blocks() as demo:
|
|
| 153 |
)
|
| 154 |
|
| 155 |
if __name__ == "__main__":
|
| 156 |
-
demo.launch(share=
|
|
|
|
| 44 |
|
| 45 |
print(f"Feedback logged for {model_name}")
|
| 46 |
|
| 47 |
+
def model_inference(user_message, history, model_name, system_message, max_tokens, temperature, top_p, oauth_token: gr.OAuthToken | None, local_endpoint: str):
|
| 48 |
if not user_message or user_message.strip() == "":
|
| 49 |
yield history, ""
|
| 50 |
return
|
|
|
|
| 122 |
model_a_name = gr.Dropdown(MODELS, label="Model A", value=MODELS[0])
|
| 123 |
chatbot_a = gr.Chatbot(label="Model A Output")
|
| 124 |
msg_a = gr.Textbox(placeholder="Send message to Model A...", label="Model A Input")
|
| 125 |
+
# btn_a = gr.Button("Send to Model A")
|
| 126 |
|
| 127 |
# --- Model B ---
|
| 128 |
with gr.Column():
|
| 129 |
model_b_name = gr.Dropdown(MODELS, label="Model B", value=MODELS[1])
|
| 130 |
chatbot_b = gr.Chatbot(label="Model B Output")
|
| 131 |
msg_b = gr.Textbox(placeholder="Send message to Model B...", label="Model B Input")
|
| 132 |
+
# btn_b = gr.Button("Send to Model B")
|
| 133 |
|
| 134 |
# --- Bind Events ---
|
| 135 |
a_inputs = [msg_a, chatbot_a, model_a_name, system_msg, max_t, temp, top_p_val, local_endpoint_a]
|
| 136 |
+
msg_a.submit(model_inference, a_inputs, [chatbot_a, msg_a])
|
| 137 |
+
# btn_a.click(model_inference, a_inputs, [chatbot_a, msg_a])
|
| 138 |
chatbot_a.like(save_feedback, [model_a_name, chatbot_a], None)
|
| 139 |
|
| 140 |
b_inputs = [msg_b, chatbot_b, model_b_name, system_msg, max_t, temp, top_p_val, local_endpoint_b]
|
| 141 |
+
msg_b.submit(model_inference, b_inputs, [chatbot_b, msg_b])
|
| 142 |
+
# btn_b.click(model_inference, b_inputs, [chatbot_b, msg_b])
|
| 143 |
chatbot_b.like(save_feedback, [model_b_name, chatbot_b], None)
|
| 144 |
|
| 145 |
|
|
|
|
| 153 |
)
|
| 154 |
|
| 155 |
if __name__ == "__main__":
|
| 156 |
+
demo.launch(share=False)
|
requirements.txt
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
gradio
|