aidn commited on
Commit
55e706d
·
verified ·
1 Parent(s): 3b1ed46

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -4
app.py CHANGED
@@ -2,7 +2,7 @@ import gradio as gr
2
  from huggingface_hub import InferenceClient
3
  import os
4
 
5
- # Die Ratsmitglieder - Sicher ohne Provider-Suffixe für maximale API-Kompatibilität
6
  COUNCIL_MEMBERS = {
7
  "Der Architekt": "meta-llama/Llama-3.3-70B-Instruct",
8
  "Der Glitch": "deepseek-ai/DeepSeek-V3",
@@ -10,7 +10,7 @@ COUNCIL_MEMBERS = {
10
  }
11
 
12
  # Ein garantierter Chat-Model-Moderator
13
- MODERATOR_MODEL = "meta-llama/Meta-Llama-3-8B-Instruct"
14
 
15
  client = InferenceClient(token=os.getenv("HF_TOKEN"))
16
 
@@ -27,7 +27,6 @@ def ask_model(model_id, system_prompt, user_input):
27
  max_tokens=500,
28
  stream=True
29
  ):
30
- # DER FIX: Wir prüfen, ob der Provider-Chunk wirklich Text enthält, bevor wir ihn lesen
31
  if hasattr(chunk, "choices") and chunk.choices and len(chunk.choices) > 0:
32
  response += chunk.choices[0].delta.content or ""
33
 
@@ -42,7 +41,7 @@ def run_council(user_prompt, rounds):
42
  current_context = f"Die zu diskutierende Frage lautet: {user_prompt}\n\n"
43
 
44
  for r in range(int(rounds)):
45
- round_header = f"### 🛰️ ZYKLUS {r+1} 🛰️"
46
  history.append({"role": "assistant", "content": round_header})
47
  yield history
48
 
 
2
  from huggingface_hub import InferenceClient
3
  import os
4
 
5
+ # Die Ratsmitglieder
6
  COUNCIL_MEMBERS = {
7
  "Der Architekt": "meta-llama/Llama-3.3-70B-Instruct",
8
  "Der Glitch": "deepseek-ai/DeepSeek-V3",
 
10
  }
11
 
12
  # Ein garantierter Chat-Model-Moderator
13
+ MODERATOR_MODEL = "openai/gpt-oss-120b:novita"
14
 
15
  client = InferenceClient(token=os.getenv("HF_TOKEN"))
16
 
 
27
  max_tokens=500,
28
  stream=True
29
  ):
 
30
  if hasattr(chunk, "choices") and chunk.choices and len(chunk.choices) > 0:
31
  response += chunk.choices[0].delta.content or ""
32
 
 
41
  current_context = f"Die zu diskutierende Frage lautet: {user_prompt}\n\n"
42
 
43
  for r in range(int(rounds)):
44
+ round_header = f"### ZYKLUS {r+1}"
45
  history.append({"role": "assistant", "content": round_header})
46
  yield history
47