Amossofer commited on
Commit
e5426fd
·
1 Parent(s): 592da02
Files changed (1) hide show
  1. app.py +12 -11
app.py CHANGED
@@ -1,18 +1,19 @@
1
  import gradio as gr
2
 
 
3
  def generate(sysA, sysB, wa, wb, user_input):
4
- # Example blending logic replace with your actual model call
5
- response = (
6
- f"System Prompt A: {sysA}\n"
7
- f"System Prompt B: {sysB}\n"
8
- f"Weight A: {wa}\n"
9
- f"Weight B: {wb}\n"
10
- f"User message: {user_input}\n\n"
11
- "=== Response ===\n"
12
- f"Blended response based on weights."
13
- )
14
  return response
15
 
 
16
  with gr.Blocks() as demo:
17
  gr.Markdown("# Multi-System Prompt Chat Demo")
18
 
@@ -24,7 +25,7 @@ with gr.Blocks() as demo:
24
  wa = gr.Slider(-5.0, 5.0, value=1.0, step=0.1, label="Weight wA")
25
  wb = gr.Slider(-5.0, 5.0, value=1.0, step=0.1, label="Weight wB")
26
 
27
- user_input = gr.Textbox(label="User Message", placeholder="Type your message here...")
28
  output = gr.Textbox(label="Model Response", lines=10)
29
 
30
  submit_btn = gr.Button("Send")
 
1
  import gradio as gr
2
 
3
+
4
  def generate(sysA, sysB, wa, wb, user_input):
5
+ # Simple blending: concatenate weighted system prompts with user input
6
+ # For example, we just repeat sysA and sysB according to weights (rounded)
7
+ prompt_a = (sysA + " ") * max(0, int(round(wa)))
8
+ prompt_b = (sysB + " ") * max(0, int(round(wb)))
9
+
10
+ blended_prompt = prompt_a + prompt_b + "\nUser: " + user_input
11
+
12
+ # Pretend model response is just echoing blended prompt
13
+ response = f"Blended prompt sent to model:\n{blended_prompt}"
 
14
  return response
15
 
16
+
17
  with gr.Blocks() as demo:
18
  gr.Markdown("# Multi-System Prompt Chat Demo")
19
 
 
25
  wa = gr.Slider(-5.0, 5.0, value=1.0, step=0.1, label="Weight wA")
26
  wb = gr.Slider(-5.0, 5.0, value=1.0, step=0.1, label="Weight wB")
27
 
28
+ user_input = gr.Textbox(label="User Message", lines=2)
29
  output = gr.Textbox(label="Model Response", lines=10)
30
 
31
  submit_btn = gr.Button("Send")