0notexist0 commited on
Commit
74346c2
·
verified ·
1 Parent(s): a851b42

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +42 -12
app.py CHANGED
@@ -1,5 +1,5 @@
1
  """
2
- OpenRouter Chatbot – con storico conversazione
3
  Run: gradio app.py
4
  """
5
 
@@ -17,12 +17,12 @@ OPENROUTER_API_KEY = os.getenv(
17
  )
18
 
19
  # ------------------------------------------------------------------
20
- # Utility per ottenere l’elenco modelli
21
  # ------------------------------------------------------------------
22
  @functools.lru_cache(maxsize=1)
23
- def fetch_models() -> list[str]:
24
  """
25
- Restituisce la lista completa di modelli offerti da OpenRouter.
26
  """
27
  headers = {"Authorization": f"Bearer {OPENROUTER_API_KEY}"}
28
  try:
@@ -33,11 +33,26 @@ def fetch_models() -> list[str]:
33
  )
34
  resp.raise_for_status()
35
  data = resp.json()
36
- models = sorted(m["id"] for m in data["data"])
37
- return models
 
 
 
 
 
 
 
 
 
 
 
 
38
  except Exception as e:
39
- gr.Warning(f"Impossibile scaricare l’elenco modelli: {e}")
40
- return ["openai/gpt-4-turbo"] # fallback statico
 
 
 
41
 
42
  # ------------------------------------------------------------------
43
  # Funzione per formattare la cronologia
@@ -55,6 +70,11 @@ def format_history(history: list) -> str:
55
  # Funzione di chiamata al modello
56
  # ------------------------------------------------------------------
57
  def chat_with_openrouter(prompt: str, model: str, history: list):
 
 
 
 
 
58
  headers = {
59
  "Authorization": f"Bearer {OPENROUTER_API_KEY}",
60
  "Content-Type": "application/json"
@@ -86,10 +106,20 @@ def chat_with_openrouter(prompt: str, model: str, history: list):
86
  return history, format_history(history)
87
 
88
  # ------------------------------------------------------------------
89
- # Interfaccia Gradio con memoria
90
  # ------------------------------------------------------------------
91
  def build_interface():
92
- models = fetch_models()
 
 
 
 
 
 
 
 
 
 
93
 
94
  with gr.Blocks(title="NotExistChatter – Chat con memoria") as demo:
95
  gr.Markdown("## 🤖 project Adam – Chat dinamica")
@@ -97,8 +127,8 @@ def build_interface():
97
 
98
  with gr.Row():
99
  model_dropdown = gr.Dropdown(
100
- choices=models,
101
- value=models[0] if models else None,
102
  label="Modello",
103
  allow_custom_value=False,
104
  interactive=True
 
1
  """
2
+ OpenRouter Chatbot – con storico conversazione + gruppi modello
3
  Run: gradio app.py
4
  """
5
 
 
17
  )
18
 
19
  # ------------------------------------------------------------------
20
+ # Utility per ottenere l’elenco modelli raggruppati
21
  # ------------------------------------------------------------------
22
  @functools.lru_cache(maxsize=1)
23
+ def fetch_models_grouped() -> dict:
24
  """
25
+ Restituisce i modelli raggruppati tra 'con ragionamento' e 'senza ragionamento'.
26
  """
27
  headers = {"Authorization": f"Bearer {OPENROUTER_API_KEY}"}
28
  try:
 
33
  )
34
  resp.raise_for_status()
35
  data = resp.json()
36
+ reasoning_models = []
37
+ casual_models = []
38
+
39
+ for m in data["data"]:
40
+ model_id = m["id"].lower()
41
+ if any(key in model_id for key in ["gpt", "claude", "llama", "mistral", "mixtral", "gemini", "command-r", "qwen"]):
42
+ reasoning_models.append(m["id"])
43
+ else:
44
+ casual_models.append(m["id"])
45
+
46
+ return {
47
+ "🧠 Con Ragionamento": sorted(reasoning_models),
48
+ "⚡️ Generici / Casual": sorted(casual_models)
49
+ }
50
  except Exception as e:
51
+ gr.Warning(f"Impossibile ottenere modelli: {e}")
52
+ return {
53
+ "🧠 Con Ragionamento": ["openai/gpt-4-turbo"],
54
+ "⚡️ Generici / Casual": []
55
+ }
56
 
57
  # ------------------------------------------------------------------
58
  # Funzione per formattare la cronologia
 
70
  # Funzione di chiamata al modello
71
  # ------------------------------------------------------------------
72
  def chat_with_openrouter(prompt: str, model: str, history: list):
73
+ if model.startswith("🔷"):
74
+ msg = "⚠️ Seleziona un **modello valido**, non un'intestazione di gruppo."
75
+ history.append({"role": "assistant", "content": msg})
76
+ return history, format_history(history)
77
+
78
  headers = {
79
  "Authorization": f"Bearer {OPENROUTER_API_KEY}",
80
  "Content-Type": "application/json"
 
106
  return history, format_history(history)
107
 
108
  # ------------------------------------------------------------------
109
+ # Interfaccia Gradio con dropdown raggruppato
110
  # ------------------------------------------------------------------
111
  def build_interface():
112
+ models_grouped = fetch_models_grouped()
113
+
114
+ # Crea lista per dropdown con intestazioni simulate
115
+ grouped_choices = []
116
+ first_valid_model = None
117
+
118
+ for group_name, group_models in models_grouped.items():
119
+ grouped_choices.append(f"🔷 {group_name}")
120
+ grouped_choices.extend(group_models)
121
+ if not first_valid_model and group_models:
122
+ first_valid_model = group_models[0]
123
 
124
  with gr.Blocks(title="NotExistChatter – Chat con memoria") as demo:
125
  gr.Markdown("## 🤖 project Adam – Chat dinamica")
 
127
 
128
  with gr.Row():
129
  model_dropdown = gr.Dropdown(
130
+ choices=grouped_choices,
131
+ value=first_valid_model,
132
  label="Modello",
133
  allow_custom_value=False,
134
  interactive=True