Madras1 commited on
Commit
cbf60d0
·
verified ·
1 Parent(s): 3d02fef

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -23
app.py CHANGED
@@ -52,14 +52,12 @@ def run_local_h200(messages):
52
  return local_tokenizer.decode(outputs[0][inputs.input_ids.shape[1]:], skip_special_tokens=True)
53
 
54
  def run_groq(messages, model_id):
55
- # Groq (Texto puro)
56
  for m in messages:
57
  if isinstance(m['content'], list):
58
  return "⚠️ Modelos Groq não veem imagens. Use Pixtral ou Gemini."
59
 
60
  if not groq_client: return "❌ Erro: Configure GROQ_API_KEY."
61
 
62
- # Limpa formato para OpenAI
63
  clean_msgs = []
64
  for m in messages:
65
  clean_msgs.append({"role": m['role'], "content": m['content']})
@@ -87,7 +85,6 @@ def run_mistral(messages, model_id):
87
  if item['type'] == 'text':
88
  new_content.append({"type": "text", "text": item['text']})
89
  elif item['type'] == 'image_url':
90
- # O router já mandou como data url ou path, vamos garantir
91
  url = item['image_url']['url']
92
  if url.startswith("data:image"):
93
  new_content.append({"type": "image_url", "image_url": url})
@@ -136,25 +133,19 @@ def run_gemini(messages, model_id):
136
  return response.text
137
  except Exception as e: return f"❌ Gemini Error ({model_id}): {e}"
138
 
139
- # --- ROTEADOR INTELIGENTE (Compatível Gradio 4) ---
140
  def router(message, history, model_selector):
141
- # Em Gradio 4 com multimodal=True, 'message' é um dict: {'text': '...', 'files': ['path']}
142
-
143
- # 1. Reconstrói histórico padronizado
144
  formatted_history = []
145
  for user_msg, bot_msg in history:
146
- # User (pode ter imagem antiga, mas Gradio 4 passa files separados no histórico visual)
147
- # Simplificando: Assumimos texto no histórico visual do Gradio 4
148
- if isinstance(user_msg, dict): # Caso raro no history
149
  formatted_history.append({"role": "user", "content": user_msg.get("text")})
150
  else:
151
  formatted_history.append({"role": "user", "content": str(user_msg)})
152
-
153
- # Bot
154
  if bot_msg:
155
  formatted_history.append({"role": "assistant", "content": str(bot_msg)})
156
 
157
- # 2. Formata a mensagem ATUAL (Onde a imagem está agora)
158
  current_content = []
159
  text_input = message.get("text", "")
160
  files = message.get("files", [])
@@ -163,18 +154,16 @@ def router(message, history, model_selector):
163
  current_content.append({"type": "text", "text": text_input})
164
 
165
  for file_path in files:
166
- # Adiciona imagem para processamento
167
  current_content.append({"type": "image_url", "image_url": {"url": file_path}})
168
 
169
- # Se não tem imagem, manda string simples (melhor compatibilidade)
170
  if not files:
171
  formatted_history.append({"role": "user", "content": text_input})
172
  else:
173
  formatted_history.append({"role": "user", "content": current_content})
174
 
175
- # 3. Roteamento
176
  if "Gemini" in model_selector:
177
- tid = "gemini-1.5-flash" # Default seguro
178
  if "3.0" in model_selector: tid = "gemini-3.0-pro-preview"
179
  elif "2.5 Pro" in model_selector: tid = "gemini-2.5-pro"
180
  elif "2.5 Flash" in model_selector: tid = "gemini-2.5-flash"
@@ -198,15 +187,15 @@ def router(message, history, model_selector):
198
  return "Modelo desconhecido."
199
 
200
  # --- INTERFACE ---
201
- with gr.Blocks(theme=gr.themes.Soft()) as demo:
202
- gr.Markdown("# 🔀 APIDOST V7: Vision Update (Stable)")
 
203
 
204
  with gr.Row():
205
  model_dropdown = gr.Dropdown(
206
  choices=[
207
  "✨ Google: Gemini 3.0 Pro (Experimental)",
208
  "✨ Google: Gemini 2.5 Flash",
209
- "✨ Google: Gemini 2.0 Flash (Exp)",
210
  "☁️ Groq: Llama 3.3 70B",
211
  "🇫🇷 Mistral: Pixtral Large (Vision) 🖼️",
212
  "🇫🇷 Mistral: Large 2512 (Dez/25)",
@@ -217,13 +206,11 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
217
  interactive=True
218
  )
219
 
220
- # REMOVIDO type="messages" (Isso corrige o erro!)
221
  chat = gr.ChatInterface(
222
  fn=router,
223
  additional_inputs=[model_dropdown],
224
- multimodal=True, # Isso ativa o upload de imagens no Gradio 4
225
  )
226
 
227
  if __name__ == "__main__":
228
- # Configuração de rede padrão
229
  demo.launch(server_name="0.0.0.0", server_port=7860)
 
52
  return local_tokenizer.decode(outputs[0][inputs.input_ids.shape[1]:], skip_special_tokens=True)
53
 
54
  def run_groq(messages, model_id):
 
55
  for m in messages:
56
  if isinstance(m['content'], list):
57
  return "⚠️ Modelos Groq não veem imagens. Use Pixtral ou Gemini."
58
 
59
  if not groq_client: return "❌ Erro: Configure GROQ_API_KEY."
60
 
 
61
  clean_msgs = []
62
  for m in messages:
63
  clean_msgs.append({"role": m['role'], "content": m['content']})
 
85
  if item['type'] == 'text':
86
  new_content.append({"type": "text", "text": item['text']})
87
  elif item['type'] == 'image_url':
 
88
  url = item['image_url']['url']
89
  if url.startswith("data:image"):
90
  new_content.append({"type": "image_url", "image_url": url})
 
133
  return response.text
134
  except Exception as e: return f"❌ Gemini Error ({model_id}): {e}"
135
 
136
+ # --- ROTEADOR ---
137
  def router(message, history, model_selector):
138
+ # Formatação de histórico blindada
 
 
139
  formatted_history = []
140
  for user_msg, bot_msg in history:
141
+ if isinstance(user_msg, dict):
 
 
142
  formatted_history.append({"role": "user", "content": user_msg.get("text")})
143
  else:
144
  formatted_history.append({"role": "user", "content": str(user_msg)})
 
 
145
  if bot_msg:
146
  formatted_history.append({"role": "assistant", "content": str(bot_msg)})
147
 
148
+ # Formatação da mensagem atual
149
  current_content = []
150
  text_input = message.get("text", "")
151
  files = message.get("files", [])
 
154
  current_content.append({"type": "text", "text": text_input})
155
 
156
  for file_path in files:
 
157
  current_content.append({"type": "image_url", "image_url": {"url": file_path}})
158
 
 
159
  if not files:
160
  formatted_history.append({"role": "user", "content": text_input})
161
  else:
162
  formatted_history.append({"role": "user", "content": current_content})
163
 
164
+ # Seleção de Modelo
165
  if "Gemini" in model_selector:
166
+ tid = "gemini-1.5-flash"
167
  if "3.0" in model_selector: tid = "gemini-3.0-pro-preview"
168
  elif "2.5 Pro" in model_selector: tid = "gemini-2.5-pro"
169
  elif "2.5 Flash" in model_selector: tid = "gemini-2.5-flash"
 
187
  return "Modelo desconhecido."
188
 
189
  # --- INTERFACE ---
190
+ # AQUI ESTAVA O ERRO: Removi 'theme=gr.themes.Soft()'
191
+ with gr.Blocks() as demo:
192
+ gr.Markdown("# 🔀 APIDOST V7 (Estável)")
193
 
194
  with gr.Row():
195
  model_dropdown = gr.Dropdown(
196
  choices=[
197
  "✨ Google: Gemini 3.0 Pro (Experimental)",
198
  "✨ Google: Gemini 2.5 Flash",
 
199
  "☁️ Groq: Llama 3.3 70B",
200
  "🇫🇷 Mistral: Pixtral Large (Vision) 🖼️",
201
  "🇫🇷 Mistral: Large 2512 (Dez/25)",
 
206
  interactive=True
207
  )
208
 
 
209
  chat = gr.ChatInterface(
210
  fn=router,
211
  additional_inputs=[model_dropdown],
212
+ multimodal=True,
213
  )
214
 
215
  if __name__ == "__main__":
 
216
  demo.launch(server_name="0.0.0.0", server_port=7860)