aidn commited on
Commit
6a3a98b
·
verified ·
1 Parent(s): b604ffd

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +17 -21
app.py CHANGED
@@ -2,7 +2,6 @@ import gradio as gr
2
  from huggingface_hub import InferenceClient
3
  import os
4
 
5
- # Liste der Ratsmitglieder
6
  COUNCIL_MEMBERS = {
7
  "Der Architekt": "meta-llama/Llama-3.3-70B-Instruct:cheapest",
8
  "Der Glitch": "deepseek-ai/DeepSeek-V3:cheapest",
@@ -30,50 +29,47 @@ def ask_model(model_id, system_prompt, user_input):
30
  return f"🚨 Error: {str(e)}"
31
 
32
  def run_council(user_prompt, rounds):
33
- # Gradio 5 Format: Liste von [User, Bot] Tuples
34
  history = []
35
  current_context = f"Die ursprüngliche Frage lautet: {user_prompt}\n\n"
36
 
37
  for r in range(int(rounds)):
38
  round_header = f"--- RUNDE {r+1} ---"
39
- # In Gradio 5: [User-Teil, Bot-Teil]
40
- history.append([None, f"## {round_header}"])
41
  yield history
42
 
43
  round_notes = ""
44
  for name, model_id in COUNCIL_MEMBERS.items():
45
- system_msg = f"Du bist {name} in einem Expertenrat. Diskutiere kurz und prägnant."
46
- if r > 0:
47
- system_msg += " Beziehe dich auf die vorherigen Argumente deiner Kollegen."
48
-
49
  answer = ask_model(model_id, system_msg, current_context)
50
  formatted_answer = f"**{name}**: {answer}"
51
 
52
- history.append([None, formatted_answer])
53
  round_notes += f"\n{formatted_answer}\n"
54
  yield history
55
 
56
  current_context += f"\nZusammenfassung Runde {r+1}:{round_notes}"
57
 
58
- # Finale Einigung
59
- final_res = ask_model("mistralai/Mixtral-8x7B-Instruct-v0.1", "Du bist der Moderator.", current_context + "Fasse alles final zusammen.")
60
- history.append([None, "### 🏆 FINALE ENTSCHEIDUNG"])
61
- history.append([None, final_res])
62
  yield history
63
 
64
- # Gradio 5 UI
65
- with gr.Blocks(theme=gr.themes.Soft()) as demo:
66
  gr.Markdown("# 🏛️ Der Subraum-Stammtisch")
67
- gr.Markdown("> Status: Initialisiere Prompt-Plenum auf Frequenz 0x42...")
68
 
69
  with gr.Row():
70
- input_text = gr.Textbox(label="Input-Vektor (Deine Frage)", placeholder="Sollten wir zum Mars fliegen?")
71
- rounds_slider = gr.Slider(minimum=1, maximum=3, value=1, step=1, label="Diskussionszyklen")
72
 
73
  start_btn = gr.Button("Protokoll starten", variant="primary")
74
- # KEIN type="messages" hier für Gradio 5
75
- chatbot = gr.Chatbot(label="Council Protokoll", height=600)
 
76
 
77
  start_btn.click(run_council, inputs=[input_text, rounds_slider], outputs=[chatbot])
78
 
79
- demo.launch()
 
 
2
  from huggingface_hub import InferenceClient
3
  import os
4
 
 
5
  COUNCIL_MEMBERS = {
6
  "Der Architekt": "meta-llama/Llama-3.3-70B-Instruct:cheapest",
7
  "Der Glitch": "deepseek-ai/DeepSeek-V3:cheapest",
 
29
  return f"🚨 Error: {str(e)}"
30
 
31
  def run_council(user_prompt, rounds):
32
+ # Gradio 6 Format: Liste von Dicts mit role/content
33
  history = []
34
  current_context = f"Die ursprüngliche Frage lautet: {user_prompt}\n\n"
35
 
36
  for r in range(int(rounds)):
37
  round_header = f"--- RUNDE {r+1} ---"
38
+ # In Gradio 6 muss der content ein String (einfach) oder eine Liste von Blöcken sein
39
+ history.append({"role": "assistant", "content": f"## {round_header}"})
40
  yield history
41
 
42
  round_notes = ""
43
  for name, model_id in COUNCIL_MEMBERS.items():
44
+ system_msg = f"Du bist {name} in einem Expertenrat. Antworte kurz."
 
 
 
45
  answer = ask_model(model_id, system_msg, current_context)
46
  formatted_answer = f"**{name}**: {answer}"
47
 
48
+ history.append({"role": "assistant", "content": formatted_answer})
49
  round_notes += f"\n{formatted_answer}\n"
50
  yield history
51
 
52
  current_context += f"\nZusammenfassung Runde {r+1}:{round_notes}"
53
 
54
+ final_res = ask_model("mistralai/Mixtral-8x7B-Instruct-v0.1", "Moderator.", current_context)
55
+ history.append({"role": "assistant", "content": "### 🏆 FINALE ENTSCHEIDUNG"})
56
+ history.append({"role": "assistant", "content": final_res})
 
57
  yield history
58
 
59
+ # Das UI Setup für Gradio 6
60
+ with gr.Blocks() as demo:
61
  gr.Markdown("# 🏛️ Der Subraum-Stammtisch")
 
62
 
63
  with gr.Row():
64
+ input_text = gr.Textbox(label="Input-Vektor", placeholder="Frage...")
65
+ rounds_slider = gr.Slider(1, 3, value=1, step=1, label="Diskussionszyklen")
66
 
67
  start_btn = gr.Button("Protokoll starten", variant="primary")
68
+
69
+ # WICHTIG: In Gradio 6 ist type="messages" oft Standard oder notwendig
70
+ chatbot = gr.Chatbot(label="Council Protokoll", height=600, type="messages")
71
 
72
  start_btn.click(run_council, inputs=[input_text, rounds_slider], outputs=[chatbot])
73
 
74
+ # Theme MUSS laut Warnung in launch()
75
+ demo.launch(theme=gr.themes.Soft())