0notexist0 commited on
Commit
57d1911
·
verified ·
1 Parent(s): 74346c2

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +40 -56
app.py CHANGED
@@ -1,5 +1,5 @@
1
  """
2
- OpenRouter Chatbot – con storico conversazione + gruppi modello
3
  Run: gradio app.py
4
  """
5
 
@@ -17,22 +17,16 @@ OPENROUTER_API_KEY = os.getenv(
17
  )
18
 
19
  # ------------------------------------------------------------------
20
- # Utility per ottenere l’elenco modelli raggruppati
21
  # ------------------------------------------------------------------
22
  @functools.lru_cache(maxsize=1)
23
  def fetch_models_grouped() -> dict:
24
- """
25
- Restituisce i modelli raggruppati tra 'con ragionamento' e 'senza ragionamento'.
26
- """
27
  headers = {"Authorization": f"Bearer {OPENROUTER_API_KEY}"}
28
  try:
29
- resp = requests.get(
30
- "https://openrouter.ai/api/v1/models",
31
- headers=headers,
32
- timeout=15
33
- )
34
  resp.raise_for_status()
35
  data = resp.json()
 
36
  reasoning_models = []
37
  casual_models = []
38
 
@@ -44,18 +38,17 @@ def fetch_models_grouped() -> dict:
44
  casual_models.append(m["id"])
45
 
46
  return {
47
- "🧠 Con Ragionamento": sorted(reasoning_models),
48
- "⚡️ Generici / Casual": sorted(casual_models)
49
  }
50
  except Exception as e:
51
- gr.Warning(f"Impossibile ottenere modelli: {e}")
52
  return {
53
- "🧠 Con Ragionamento": ["openai/gpt-4-turbo"],
54
- "⚡️ Generici / Casual": []
55
  }
56
 
57
  # ------------------------------------------------------------------
58
- # Funzione per formattare la cronologia
59
  # ------------------------------------------------------------------
60
  def format_history(history: list) -> str:
61
  output = ""
@@ -67,14 +60,9 @@ def format_history(history: list) -> str:
67
  return output.strip()
68
 
69
  # ------------------------------------------------------------------
70
- # Funzione di chiamata al modello
71
  # ------------------------------------------------------------------
72
- def chat_with_openrouter(prompt: str, model: str, history: list):
73
- if model.startswith("🔷"):
74
- msg = "⚠️ Seleziona un **modello valido**, non un'intestazione di gruppo."
75
- history.append({"role": "assistant", "content": msg})
76
- return history, format_history(history)
77
-
78
  headers = {
79
  "Authorization": f"Bearer {OPENROUTER_API_KEY}",
80
  "Content-Type": "application/json"
@@ -83,19 +71,14 @@ def chat_with_openrouter(prompt: str, model: str, history: list):
83
  history.append({"role": "user", "content": prompt})
84
 
85
  payload = {
86
- "model": model,
87
  "messages": history,
88
  "max_tokens": 4096,
89
  "temperature": 0.7,
90
  }
91
 
92
  try:
93
- resp = requests.post(
94
- "https://openrouter.ai/api/v1/chat/completions",
95
- headers=headers,
96
- json=payload,
97
- timeout=60
98
- )
99
  resp.raise_for_status()
100
  reply = resp.json()["choices"][0]["message"]["content"]
101
  history.append({"role": "assistant", "content": reply})
@@ -106,31 +89,26 @@ def chat_with_openrouter(prompt: str, model: str, history: list):
106
  return history, format_history(history)
107
 
108
  # ------------------------------------------------------------------
109
- # Interfaccia Gradio con dropdown raggruppato
110
  # ------------------------------------------------------------------
111
  def build_interface():
112
- models_grouped = fetch_models_grouped()
113
 
114
- # Crea lista per dropdown con intestazioni simulate
115
- grouped_choices = []
116
- first_valid_model = None
117
 
118
- for group_name, group_models in models_grouped.items():
119
- grouped_choices.append(f"🔷 {group_name}")
120
- grouped_choices.extend(group_models)
121
- if not first_valid_model and group_models:
122
- first_valid_model = group_models[0]
123
-
124
- with gr.Blocks(title="NotExistChatter – Chat con memoria") as demo:
125
- gr.Markdown("## 🤖 project Adam – Chat dinamica")
126
- gr.Markdown("Usa **qualsiasi modello** di OpenRouter con cronologia visibile.")
127
 
128
  with gr.Row():
129
- model_dropdown = gr.Dropdown(
130
- choices=grouped_choices,
131
- value=first_valid_model,
132
- label="Modello",
133
- allow_custom_value=False,
 
 
 
134
  interactive=True
135
  )
136
 
@@ -151,19 +129,25 @@ def build_interface():
151
  )
152
 
153
  send_btn = gr.Button("Invia", variant="primary")
154
-
155
- # Stato della cronologia
156
  chat_history = gr.State([])
157
 
 
 
 
 
 
 
 
 
158
  send_btn.click(
159
- fn=chat_with_openrouter,
160
- inputs=[prompt_box, model_dropdown, chat_history],
161
  outputs=[chat_history, output_box]
162
  )
163
 
164
  prompt_box.submit(
165
- fn=chat_with_openrouter,
166
- inputs=[prompt_box, model_dropdown, chat_history],
167
  outputs=[chat_history, output_box]
168
  )
169
 
@@ -176,7 +160,7 @@ def build_interface():
176
  return demo
177
 
178
  # ------------------------------------------------------------------
179
- # Avvio app
180
  # ------------------------------------------------------------------
181
  if __name__ == "__main__":
182
  build_interface().launch()
 
1
  """
2
+ OpenRouter Chatbot – con storico conversazione + modelli separati
3
  Run: gradio app.py
4
  """
5
 
 
17
  )
18
 
19
  # ------------------------------------------------------------------
20
+ # Ottieni modelli raggruppati
21
  # ------------------------------------------------------------------
22
  @functools.lru_cache(maxsize=1)
23
  def fetch_models_grouped() -> dict:
 
 
 
24
  headers = {"Authorization": f"Bearer {OPENROUTER_API_KEY}"}
25
  try:
26
+ resp = requests.get("https://openrouter.ai/api/v1/models", headers=headers, timeout=15)
 
 
 
 
27
  resp.raise_for_status()
28
  data = resp.json()
29
+
30
  reasoning_models = []
31
  casual_models = []
32
 
 
38
  casual_models.append(m["id"])
39
 
40
  return {
41
+ "reasoning": sorted(reasoning_models),
42
+ "casual": sorted(casual_models)
43
  }
44
  except Exception as e:
 
45
  return {
46
+ "reasoning": ["openai/gpt-4-turbo"],
47
+ "casual": []
48
  }
49
 
50
  # ------------------------------------------------------------------
51
+ # Format cronologia
52
  # ------------------------------------------------------------------
53
  def format_history(history: list) -> str:
54
  output = ""
 
60
  return output.strip()
61
 
62
  # ------------------------------------------------------------------
63
+ # Chat
64
  # ------------------------------------------------------------------
65
+ def chat_with_openrouter(prompt: str, selected_model: str, history: list):
 
 
 
 
 
66
  headers = {
67
  "Authorization": f"Bearer {OPENROUTER_API_KEY}",
68
  "Content-Type": "application/json"
 
71
  history.append({"role": "user", "content": prompt})
72
 
73
  payload = {
74
+ "model": selected_model,
75
  "messages": history,
76
  "max_tokens": 4096,
77
  "temperature": 0.7,
78
  }
79
 
80
  try:
81
+ resp = requests.post("https://openrouter.ai/api/v1/chat/completions", headers=headers, json=payload, timeout=60)
 
 
 
 
 
82
  resp.raise_for_status()
83
  reply = resp.json()["choices"][0]["message"]["content"]
84
  history.append({"role": "assistant", "content": reply})
 
89
  return history, format_history(history)
90
 
91
  # ------------------------------------------------------------------
92
+ # Interfaccia con dropdown separati
93
  # ------------------------------------------------------------------
94
  def build_interface():
95
+ grouped_models = fetch_models_grouped()
96
 
97
+ default_model = grouped_models["reasoning"][0] if grouped_models["reasoning"] else ""
 
 
98
 
99
+ with gr.Blocks(title="NotExistChatter Chat con modelli") as demo:
100
+ gr.Markdown("## 🤖 Project Adam – Chat dinamica con modelli OpenRouter")
101
+ gr.Markdown("Seleziona un modello da uno dei due gruppi.")
 
 
 
 
 
 
102
 
103
  with gr.Row():
104
+ reasoning_dropdown = gr.Dropdown(
105
+ choices=grouped_models["reasoning"],
106
+ label="🧠 Modelli con Ragionamento",
107
+ interactive=True
108
+ )
109
+ casual_dropdown = gr.Dropdown(
110
+ choices=grouped_models["casual"],
111
+ label="⚡️ Modelli Generici / Casual",
112
  interactive=True
113
  )
114
 
 
129
  )
130
 
131
  send_btn = gr.Button("Invia", variant="primary")
 
 
132
  chat_history = gr.State([])
133
 
134
+ # Seleziona modello valido (da uno dei due)
135
+ def resolve_model(prompt, reasoning_model, casual_model, history):
136
+ model = reasoning_model or casual_model
137
+ if not model:
138
+ history.append({"role": "assistant", "content": "⚠️ Seleziona almeno un modello."})
139
+ return history, format_history(history)
140
+ return chat_with_openrouter(prompt, model, history)
141
+
142
  send_btn.click(
143
+ fn=resolve_model,
144
+ inputs=[prompt_box, reasoning_dropdown, casual_dropdown, chat_history],
145
  outputs=[chat_history, output_box]
146
  )
147
 
148
  prompt_box.submit(
149
+ fn=resolve_model,
150
+ inputs=[prompt_box, reasoning_dropdown, casual_dropdown, chat_history],
151
  outputs=[chat_history, output_box]
152
  )
153
 
 
160
  return demo
161
 
162
  # ------------------------------------------------------------------
163
+ # Avvio
164
  # ------------------------------------------------------------------
165
  if __name__ == "__main__":
166
  build_interface().launch()