Madras1 commited on
Commit
03ddebd
·
verified ·
1 Parent(s): 9eb54fe

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +7 -10
app.py CHANGED
@@ -140,7 +140,7 @@ def router(message, history, model_selector, request: gr.Request):
140
  return f"⛔ LIMITADO: Aguarde para enviar mais mensagens."
141
 
142
  formatted_history = []
143
- # Proteção contra history=None ou formatos estranhos
144
  if history:
145
  for turn in history:
146
  if isinstance(turn, dict): formatted_history.append(turn)
@@ -191,7 +191,6 @@ def router(message, history, model_selector, request: gr.Request):
191
  with gr.Blocks() as demo:
192
  gr.Markdown("# 🔀 APIDOST v6 (Endpoint Fixed)")
193
 
194
- # Lista de Modelos Atualizada
195
  models_list = [
196
  "✨ Google: Gemini 3.0 Pro (Experimental)",
197
  "✨ Google: Gemini 2.5 Pro",
@@ -210,25 +209,23 @@ with gr.Blocks() as demo:
210
  with gr.Row():
211
  model_dropdown = gr.Dropdown(choices=models_list, value=models_list[-1], label="Cérebro", interactive=True)
212
 
213
- # 1. Interface de Chat VISUAL (para você testar no HuggingFace)
214
  chat = gr.ChatInterface(
215
  fn=router,
216
  additional_inputs=[model_dropdown],
217
  multimodal=True,
218
  )
219
 
220
- # 2. PONTE DE API INVISÍVEL (A SOLUÇÃO DO SEU PROBLEMA)
221
- # Isso cria explicitamente o endpoint "/chat" que o seu JavaScript está procurando.
222
- # Ele aceita 'message' (multimodal), 'history' (estado) e 'model_selector' (dropdown).
223
  api_bridge = gr.Interface(
224
  fn=router,
225
  inputs=[
226
- gr.MultimodalTextbox(label="message"), # O JS manda {text:..., files:...}
227
- gr.State(value=[], label="history"), # O JS pode mandar lista vazia []
228
- gr.Dropdown(choices=models_list, label="model_selector") # O JS manda a string do modelo
229
  ],
230
  outputs=[gr.Textbox(label="response")],
231
- api_name="chat" # <--- AQUI! Isso garante que activeClient.predict("/chat") funcione.
232
  )
233
 
234
  if __name__ == "__main__":
 
140
  return f"⛔ LIMITADO: Aguarde para enviar mais mensagens."
141
 
142
  formatted_history = []
143
+ # Proteção contra history=None
144
  if history:
145
  for turn in history:
146
  if isinstance(turn, dict): formatted_history.append(turn)
 
191
  with gr.Blocks() as demo:
192
  gr.Markdown("# 🔀 APIDOST v6 (Endpoint Fixed)")
193
 
 
194
  models_list = [
195
  "✨ Google: Gemini 3.0 Pro (Experimental)",
196
  "✨ Google: Gemini 2.5 Pro",
 
209
  with gr.Row():
210
  model_dropdown = gr.Dropdown(choices=models_list, value=models_list[-1], label="Cérebro", interactive=True)
211
 
 
212
  chat = gr.ChatInterface(
213
  fn=router,
214
  additional_inputs=[model_dropdown],
215
  multimodal=True,
216
  )
217
 
218
+ # AQUI ESTAVA O ERRO (CORRIGIDO)
219
+ # Removi 'label="history"' do gr.State
 
220
  api_bridge = gr.Interface(
221
  fn=router,
222
  inputs=[
223
+ gr.MultimodalTextbox(label="message"),
224
+ gr.State(value=[]), # <--- FIX: Sem label
225
+ gr.Dropdown(choices=models_list, label="model_selector")
226
  ],
227
  outputs=[gr.Textbox(label="response")],
228
+ api_name="chat"
229
  )
230
 
231
  if __name__ == "__main__":