jarpalucas commited on
Commit
129bb0f
·
verified ·
1 Parent(s): 2bc8c01

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +281 -303
app.py CHANGED
@@ -6,19 +6,14 @@ import gradio as gr
6
  import json
7
  import pickle
8
  import os
9
- from flask import Flask, request, jsonify
10
- from flask_cors import CORS
11
 
12
  os.environ['TF_ENABLE_ONEDNN_OPTS'] = '0'
13
  os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
14
 
15
  print("🚀 Iniciando Eco Finder API...")
16
 
17
- # Configuración Flask
18
- app = Flask(__name__)
19
- CORS(app) # Habilitar CORS para frontend
20
-
21
- # Intentar importar TensorFlow
22
  try:
23
  import tensorflow as tf
24
  print(f"✅ TensorFlow version: {tf.__version__}")
@@ -59,22 +54,21 @@ def load_resources():
59
 
60
  except Exception as e:
61
  print(f"❌ Error cargando recursos: {str(e)}")
62
- return None, None, None, create_default_feature_stats()
63
-
64
- def create_default_feature_stats():
65
- return {
66
- "feature_columns": [
67
- "koi_period", "koi_duration", "koi_depth", "koi_prad",
68
- "koi_srad", "koi_teq", "koi_steff", "koi_slogg",
69
- "koi_smet", "koi_kepmag", "koi_model_snr", "koi_num_transits"
70
- ],
71
- "train_medians": {
72
- "koi_period": 10.0, "koi_duration": 5.0, "koi_depth": 1000.0,
73
- "koi_prad": 2.0, "koi_srad": 1.0, "koi_teq": 1000.0,
74
- "koi_steff": 6000.0, "koi_slogg": 4.5, "koi_smet": 0.0,
75
- "koi_kepmag": 12.0, "koi_model_snr": 10.0, "koi_num_transits": 3.0
76
  }
77
- }
78
 
79
  # Cargar recursos
80
  model, scaler, label_encoder, feature_stats = load_resources()
@@ -83,26 +77,24 @@ train_medians = feature_stats.get("train_medians", {})
83
 
84
  BASE = "https://exoplanetarchive.ipac.caltech.edu/cgi-bin/nstedAPI/nph-nstedAPI"
85
 
86
- # ==================== ENDPOINTS API REST ====================
87
 
88
- @app.route('/api/health', methods=['GET'])
89
- def health_check():
90
- """Endpoint de salud de la API"""
91
- return jsonify({
92
- "status": "healthy",
93
- "model_loaded": model is not None,
94
- "features": feature_columns
95
- })
 
 
96
 
97
- @app.route('/api/predict', methods=['POST'])
98
- def predict_single():
99
- """Endpoint para predecir un solo objeto"""
100
  try:
101
- data = request.get_json()
102
-
103
- # Validar datos de entrada
104
- if not data:
105
- return jsonify({"error": "No se proporcionaron datos"}), 400
106
 
107
  # Crear array de características
108
  input_features = []
@@ -114,18 +106,16 @@ def predict_single():
114
  input_array = np.array([input_features])
115
  X_input = scaler.transform(input_array)
116
 
117
- if TENSORFLOW_AVAILABLE and model is not None:
118
  probs = model.predict(X_input, verbose=0)[0]
119
  else:
120
- # Fallback a probabilidades aleatorias
121
  probs = np.random.dirichlet(np.ones(3), size=1)[0]
122
 
123
  # Obtener predicción
124
  pred_idx = np.argmax(probs)
125
  pred_label = label_encoder.inverse_transform([pred_idx])[0]
126
 
127
- # Preparar respuesta
128
- response = {
129
  "prediction": pred_label,
130
  "probabilities": {
131
  "CONFIRMED": float(probs[0]),
@@ -135,76 +125,35 @@ def predict_single():
135
  "input_features": dict(zip(feature_columns, input_features))
136
  }
137
 
138
- return jsonify(response)
139
-
140
  except Exception as e:
141
- return jsonify({"error": str(e)}), 500
142
 
143
- @app.route('/api/predict-batch', methods=['POST'])
144
- def predict_batch():
145
- """Endpoint para predecir múltiples objetos"""
146
  try:
147
- data = request.get_json()
148
-
149
- if not data or 'objects' not in data:
150
- return jsonify({"error": "Se requiere array 'objects' en el JSON"}), 400
151
-
152
- predictions = []
153
- for obj in data['objects']:
154
- # Procesar cada objeto
155
- input_features = []
156
- for feature in feature_columns:
157
- value = obj.get(feature, train_medians.get(feature, 0))
158
- input_features.append(float(value))
159
-
160
- # Predecir
161
- input_array = np.array([input_features])
162
- X_input = scaler.transform(input_array)
163
-
164
- if TENSORFLOW_AVAILABLE and model is not None:
165
- probs = model.predict(X_input, verbose=0)[0]
166
- else:
167
- probs = np.random.dirichlet(np.ones(3), size=1)[0]
168
-
169
- pred_idx = np.argmax(probs)
170
- pred_label = label_encoder.inverse_transform([pred_idx])[0]
171
-
172
- predictions.append({
173
- "prediction": pred_label,
174
- "probabilities": {
175
- "CONFIRMED": float(probs[0]),
176
- "CANDIDATE": float(probs[1]),
177
- "FALSE_POSITIVE": float(probs[2])
178
- },
179
- "input_features": dict(zip(feature_columns, input_features))
180
- })
181
-
182
- return jsonify({"predictions": predictions})
183
 
184
- except Exception as e:
185
- return jsonify({"error": str(e)}), 500
186
-
187
- @app.route('/api/toi-realtime', methods=['GET'])
188
- def get_toi_predictions():
189
- """Endpoint para obtener predicciones de TOI en tiempo real"""
190
- try:
191
- # Consultar API de exoplanetas
192
  where = ("(tfopwg_disp like 'PC' or tfopwg_disp like 'APC') "
193
  "and (pl_orbper is not null or tce_period is not null)")
194
 
195
- params = {"table": "toi", "where": where, "format": "json"}
196
  resp = requests.get(BASE, params=params, timeout=60)
197
  resp.raise_for_status()
198
- toi_data = resp.json()
199
 
200
- if not toi_data:
201
- return jsonify({"error": "No se encontraron objetos TOI"}), 404
202
 
203
- # Tomar muestra
204
- toi_sample = pd.DataFrame(toi_data).sample(min(5, len(toi_data)), random_state=7)
205
- toi_sample.columns = [c.strip().lower() for c in toi_sample.columns]
206
 
207
- # Mapeo de columnas
 
 
 
 
208
  candidates_map = {
209
  "koi_period": ["pl_orbper", "tce_period", "orbper", "period"],
210
  "koi_duration": ["pl_trandurh", "tce_duration", "tran_dur", "trandur", "duration", "dur"],
@@ -220,247 +169,276 @@ def get_toi_predictions():
220
  "koi_num_transits": ["tce_num_transits", "num_transits", "ntransits", "tran_count"]
221
  }
222
 
223
- def first_present(candidates, cols_set):
224
- for name in candidates:
225
- if name in cols_set:
226
- return name
227
- for name in candidates:
228
- found = [c for c in cols_set if name in c]
229
- if found:
230
- return found[0]
231
- return None
232
 
233
- cols_set = set(toi_sample.columns)
234
- results = []
235
-
236
- for idx, row in toi_sample.iterrows():
237
- # Preparar características
238
- features = {}
239
- for feat in feature_columns:
240
- src = first_present(candidates_map.get(feat, []), cols_set)
241
- if src and src in row and pd.notna(row[src]):
242
- features[feat] = float(row[src])
243
- else:
244
- features[feat] = train_medians.get(feat, 0)
245
-
246
- # Predecir
247
- input_array = np.array([list(features.values())])
248
- X_input = scaler.transform(input_array)
249
-
250
- if TENSORFLOW_AVAILABLE and model is not None:
251
- probs = model.predict(X_input, verbose=0)[0]
252
  else:
253
- probs = np.random.dirichlet(np.ones(3), size=1)[0]
254
-
255
- pred_idx = np.argmax(probs)
256
- pred_label = label_encoder.inverse_transform([pred_idx])[0]
257
-
258
- results.append({
259
- "toi_id": row.get('toi', f"TOI-{idx}"),
260
- "current_disposition": row.get('tfopwg_disp', 'Unknown'),
261
- "prediction": pred_label,
262
- "probabilities": {
263
- "CONFIRMED": float(probs[0]),
264
- "CANDIDATE": float(probs[1]),
265
- "FALSE_POSITIVE": float(probs[2])
266
- },
267
- "features": features
268
- })
269
-
270
- return jsonify({
271
- "count": len(results),
272
- "predictions": results
273
- })
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
274
 
275
  except Exception as e:
276
- return jsonify({"error": str(e)}), 500
277
-
278
- @app.route('/api/features', methods=['GET'])
279
- def get_features_info():
280
- """Endpoint para obtener información de las características"""
281
- return jsonify({
282
- "feature_columns": feature_columns,
283
- "train_medians": train_medians,
284
- "feature_descriptions": {
285
- "koi_period": "Período orbital (días)",
286
- "koi_duration": "Duración del tránsito (horas)",
287
- "koi_depth": "Profundidad del tránsito (ppm)",
288
- "koi_prad": "Radio planetario (Radios terrestres)",
289
- "koi_srad": "Radio estelar (Radios solares)",
290
- "koi_teq": "Temperatura de equilibrio (K)",
291
- "koi_steff": "Temperatura efectiva estelar (K)",
292
- "koi_slogg": "Gravedad superficial estelar (log g)",
293
- "koi_smet": "Metalicidad estelar ([Fe/H])",
294
- "koi_kepmag": "Magnitud TESS",
295
- "koi_model_snr": "Relación señal-ruido",
296
- "koi_num_transits": "Número de tránsitos"
297
- }
298
- })
299
-
300
- # ==================== INTERFAZ GRADIO ====================
301
-
302
- def predict_toi_realtime():
303
- """Función para la interfaz Gradio"""
304
- try:
305
- response = requests.get('http://localhost:7860/api/toi-realtime')
306
- if response.status_code == 200:
307
- data = response.json()
308
- results = []
309
- for pred in data['predictions']:
310
- results.append({
311
- "TOI": pred['toi_id'],
312
- "Disposición Actual": pred['current_disposition'],
313
- "Predicción": pred['prediction'],
314
- "P(Confirmado)": f"{pred['probabilities']['CONFIRMED']:.3f}",
315
- "P(Candidato)": f"{pred['probabilities']['CANDIDATE']:.3f}",
316
- "P(Falso Positivo)": f"{pred['probabilities']['FALSE_POSITIVE']:.3f}"
317
- })
318
- result_df = pd.DataFrame(results)
319
- return result_df.to_markdown(index=False)
320
- else:
321
- return "Error obteniendo datos TOI"
322
- except Exception as e:
323
- return f"Error: {str(e)}"
324
 
325
  def predict_custom_data(period, duration, depth, prad, srad, teq, steff, slogg, smet, kepmag, snr, num_transits):
326
- """Función para predicción manual en Gradio"""
327
  try:
328
- data = {
329
- "koi_period": period, "koi_duration": duration, "koi_depth": depth,
330
- "koi_prad": prad, "koi_srad": srad, "koi_teq": teq,
331
- "koi_steff": steff, "koi_slogg": slogg, "koi_smet": smet,
332
- "koi_kepmag": kepmag, "koi_model_snr": snr, "koi_num_transits": num_transits
333
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
334
 
335
- response = requests.post('http://localhost:7860/api/predict', json=data)
336
- if response.status_code == 200:
337
- result = response.json()
338
- output = f"**Predicción:** {result['prediction']}\n\n**Probabilidades:**\n"
339
- for clase, prob in result['probabilities'].items():
340
- output += f"- {clase}: {prob:.3f}\n"
341
- return output
342
- else:
343
- return "Error en la predicción"
344
  except Exception as e:
345
- return f"Error: {str(e)}"
 
 
346
 
347
- # Crear interfaz Gradio
348
  with gr.Blocks(theme=gr.themes.Soft(), title="Eco Finder API") as demo:
349
  gr.Markdown("# 🌌 Eco Finder API")
350
- gr.Markdown("Clasificador de exoplanetas - Interfaz Web y API REST")
351
 
352
- with gr.Tab("🔭 Analizar TOI"):
353
- gr.Markdown("Predicciones en tiempo real de objetos TOI")
354
  analyze_btn = gr.Button("🔍 Analizar Objetos TOI")
355
  output_realtime = gr.Markdown()
356
- analyze_btn.click(predict_toi_realtime, outputs=output_realtime)
 
 
 
357
 
358
- with gr.Tab("📊 Predicción Manual"):
359
- gr.Markdown("Ingresa parámetros para clasificación")
 
360
  with gr.Row():
361
  with gr.Column():
362
  period = gr.Number(label="Período orbital (días)", value=10.0)
363
- duration = gr.Number(label="Duración tránsito (horas)", value=5.0)
364
- depth = gr.Number(label="Profundidad (ppm)", value=1000.0)
365
  prad = gr.Number(label="Radio planetario (R⊕)", value=2.0)
366
- with gr.Column():
367
  srad = gr.Number(label="Radio estelar (R☉)", value=1.0)
368
- teq = gr.Number(label="Temperatura equilibrio (K)", value=1000.0)
369
- steff = gr.Number(label="Temp. efectiva (K)", value=6000.0)
370
- slogg = gr.Number(label="Gravedad superficial", value=4.5)
371
  with gr.Column():
372
- smet = gr.Number(label="Metalicidad", value=0.0)
 
 
 
373
  kepmag = gr.Number(label="Magnitud TESS", value=12.0)
374
- snr = gr.Number(label="Señal/Ruido", value=10.0)
375
- num_transits = gr.Number(label="N° Tránsitos", value=3.0)
 
 
376
 
377
  predict_btn = gr.Button("🎯 Predecir")
378
  output_manual = gr.Markdown()
 
379
  predict_btn.click(
380
- predict_custom_data,
381
  inputs=[period, duration, depth, prad, srad, teq, steff, slogg, smet, kepmag, snr, num_transits],
382
  outputs=output_manual
383
  )
384
 
385
- with gr.Tab("🔗 API Documentation"):
386
  gr.Markdown("""
387
- ## Endpoints REST Disponibles
388
-
389
- ### 🔍 Salud del sistema
390
- **GET** `/api/health`
391
- ```bash
392
- curl -X GET https://jarpalucas-echo-finder-api.hf.space/api/health
393
- ```
394
-
395
- ### 🎯 Predicción individual
396
- **POST** `/api/predict`
397
- ```bash
398
- curl -X POST https://jarpalucas-echo-finder-api.hf.space/api/predict \\
399
- -H "Content-Type: application/json" \\
400
- -d '{
401
- "koi_period": 10.0,
402
- "koi_duration": 5.0,
403
- "koi_depth": 1000.0,
404
- "koi_prad": 2.0,
405
- "koi_srad": 1.0,
406
- "koi_teq": 1000.0,
407
- "koi_steff": 6000.0,
408
- "koi_slogg": 4.5,
409
- "koi_smet": 0.0,
410
- "koi_kepmag": 12.0,
411
- "koi_model_snr": 10.0,
412
- "koi_num_transits": 3.0
413
- }'
414
- ```
415
-
416
- ### 📊 Predicción múltiple
417
- **POST** `/api/predict-batch`
418
- ```bash
419
- curl -X POST https://jarpalucas-echo-finder-api.hf.space/api/predict-batch \\
420
- -H "Content-Type: application/json" \\
421
- -d '{
422
- "objects": [
423
- { "koi_period": 10.0, "koi_duration": 5.0, ... },
424
- { "koi_period": 15.0, "koi_duration": 6.0, ... }
425
- ]
426
- }'
427
  ```
428
 
429
- ### 🌐 TOI Tiempo real
430
- **GET** `/api/toi-realtime`
431
- ```bash
432
- curl -X GET https://jarpalucas-echo-finder-api.hf.space/api/toi-realtime
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
433
  ```
434
 
435
- ### 📋 Información de características
436
- **GET** `/api/features`
437
- ```bash
438
- curl -X GET https://jarpalucas-echo-finder-api.hf.space/api/features
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
439
  ```
440
  """)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
441
 
442
- # ==================== INICIALIZACIÓN ====================
443
 
444
- def start_app():
445
- """Inicia ambas aplicaciones: Flask API y Gradio"""
446
- print("🎉 Eco Finder API iniciada correctamente!")
447
- print("🔗 Endpoints REST disponibles en: /api/*")
448
- print("🌐 Interfaz web disponible en: /")
449
-
450
- # Combinar Flask y Gradio
451
- from werkzeug.middleware.dispatcher import DispatcherMiddleware
452
- from flask import Flask
453
-
454
- # Crear aplicación combinada
455
- combined_app = DispatcherMiddleware(app, {
456
- '/': demo.server
457
- })
458
-
459
- return combined_app
460
 
461
  if __name__ == "__main__":
462
- # Para desarrollo local
463
- demo.launch(server_name="0.0.0.0", server_port=7860)
464
- else:
465
- # Para Hugging Face Spaces
466
- application = start_app()
 
6
  import json
7
  import pickle
8
  import os
9
+ from typing import Dict, List, Any
 
10
 
11
  os.environ['TF_ENABLE_ONEDNN_OPTS'] = '0'
12
  os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
13
 
14
  print("🚀 Iniciando Eco Finder API...")
15
 
16
+ # Configuración simple sin Flask
 
 
 
 
17
  try:
18
  import tensorflow as tf
19
  print(f"✅ TensorFlow version: {tf.__version__}")
 
54
 
55
  except Exception as e:
56
  print(f"❌ Error cargando recursos: {str(e)}")
57
+ # Crear defaults
58
+ feature_stats = {
59
+ "feature_columns": [
60
+ "koi_period", "koi_duration", "koi_depth", "koi_prad",
61
+ "koi_srad", "koi_teq", "koi_steff", "koi_slogg",
62
+ "koi_smet", "koi_kepmag", "koi_model_snr", "koi_num_transits"
63
+ ],
64
+ "train_medians": {
65
+ "koi_period": 10.0, "koi_duration": 5.0, "koi_depth": 1000.0,
66
+ "koi_prad": 2.0, "koi_srad": 1.0, "koi_teq": 1000.0,
67
+ "koi_steff": 6000.0, "koi_slogg": 4.5, "koi_smet": 0.0,
68
+ "koi_kepmag": 12.0, "koi_model_snr": 10.0, "koi_num_transits": 3.0
69
+ }
 
70
  }
71
+ return None, None, None, feature_stats
72
 
73
  # Cargar recursos
74
  model, scaler, label_encoder, feature_stats = load_resources()
 
77
 
78
  BASE = "https://exoplanetarchive.ipac.caltech.edu/cgi-bin/nstedAPI/nph-nstedAPI"
79
 
80
+ # ==================== FUNCIONES DE PREDICCIÓN ====================
81
 
82
+ def first_present(candidates, cols_set):
83
+ """Encuentra la primera columna disponible entre sinónimos"""
84
+ for name in candidates:
85
+ if name in cols_set:
86
+ return name
87
+ for name in candidates:
88
+ found = [c for c in cols_set if name in c]
89
+ if found:
90
+ return found[0]
91
+ return None
92
 
93
+ def predict_single_api(data: Dict) -> Dict:
94
+ """Función para predecir un solo objeto (para API)"""
 
95
  try:
96
+ if model is None or scaler is None or label_encoder is None:
97
+ return {"error": "Modelo no disponible"}
 
 
 
98
 
99
  # Crear array de características
100
  input_features = []
 
106
  input_array = np.array([input_features])
107
  X_input = scaler.transform(input_array)
108
 
109
+ if TENSORFLOW_AVAILABLE:
110
  probs = model.predict(X_input, verbose=0)[0]
111
  else:
 
112
  probs = np.random.dirichlet(np.ones(3), size=1)[0]
113
 
114
  # Obtener predicción
115
  pred_idx = np.argmax(probs)
116
  pred_label = label_encoder.inverse_transform([pred_idx])[0]
117
 
118
+ return {
 
119
  "prediction": pred_label,
120
  "probabilities": {
121
  "CONFIRMED": float(probs[0]),
 
125
  "input_features": dict(zip(feature_columns, input_features))
126
  }
127
 
 
 
128
  except Exception as e:
129
+ return {"error": str(e)}
130
 
131
+ def predict_toi_realtime():
132
+ """Función para la interfaz Gradio - TOI en tiempo real"""
 
133
  try:
134
+ if model is None or scaler is None or label_encoder is None:
135
+ return "❌ Modelo no disponible. Por favor, verifica los archivos del modelo."
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
136
 
137
+ # 1) Traer TOI (TESS Objects of Interest)
 
 
 
 
 
 
 
138
  where = ("(tfopwg_disp like 'PC' or tfopwg_disp like 'APC') "
139
  "and (pl_orbper is not null or tce_period is not null)")
140
 
141
+ params = {"table": "toi", "where": where, "format": "csv"}
142
  resp = requests.get(BASE, params=params, timeout=60)
143
  resp.raise_for_status()
144
+ toi_df = pd.read_csv(io.StringIO(resp.text))
145
 
146
+ if toi_df.empty:
147
+ return " No se encontraron objetos TOI con los filtros aplicados."
148
 
149
+ # 2) Normalizar nombres
150
+ toi_df.columns = [c.strip().lower() for c in toi_df.columns]
 
151
 
152
+ # 3) Tomar muestra aleatoria
153
+ toi_sample = toi_df.sample(min(5, len(toi_df)), random_state=7).reset_index(drop=True)
154
+ cols_set = set(toi_sample.columns)
155
+
156
+ # 4) Mapeo de sinónimos
157
  candidates_map = {
158
  "koi_period": ["pl_orbper", "tce_period", "orbper", "period"],
159
  "koi_duration": ["pl_trandurh", "tce_duration", "tran_dur", "trandur", "duration", "dur"],
 
169
  "koi_num_transits": ["tce_num_transits", "num_transits", "ntransits", "tran_count"]
170
  }
171
 
172
+ # 5) Preparar datos para predicción
173
+ cases = pd.DataFrame(index=toi_sample.index, columns=feature_columns, dtype="float64")
 
 
 
 
 
 
 
174
 
175
+ for feat in feature_columns:
176
+ src = first_present(candidates_map.get(feat, []), cols_set)
177
+ if src is not None:
178
+ cases[feat] = pd.to_numeric(toi_sample[src], errors="coerce")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
179
  else:
180
+ cases[feat] = np.nan
181
+
182
+ # 6) Imputar valores faltantes
183
+ for c in feature_columns:
184
+ if c in train_medians:
185
+ cases[c] = cases[c].fillna(train_medians[c])
186
+ else:
187
+ cases[c] = cases[c].fillna(cases[c].median())
188
+
189
+ # 7) Escalar y predecir
190
+ X_cases = scaler.transform(cases.values)
191
+ probs = model.predict(X_cases, verbose=0)
192
+ pred_idx = np.argmax(probs, axis=1)
193
+ pred_labels = label_encoder.inverse_transform(pred_idx)
194
+ clases = list(label_encoder.classes_)
195
+
196
+ def p_of(lbl, row_probs):
197
+ return float(row_probs[clases.index(lbl)]) if lbl in clases else np.nan
198
+
199
+ # 8) Preparar resultados
200
+ out_rows = []
201
+ for i in range(len(toi_sample)):
202
+ row = {
203
+ "TOI": toi_sample.loc[i, first_present(["toi"], cols_set)] if first_present(["toi"], cols_set) else "N/A",
204
+ "Disposición Actual": toi_sample.loc[i, first_present(["tfopwg_disp", "disposition"], cols_set)] if first_present(["tfopwg_disp", "disposition"], cols_set) else "N/A",
205
+ "Predicción": pred_labels[i],
206
+ "P(Confirmado)": f"{p_of('CONFIRMED', probs[i]):.3f}",
207
+ "P(Candidato)": f"{p_of('CANDIDATE', probs[i]):.3f}",
208
+ "P(Falso Positivo)": f"{p_of('FALSE POSITIVE', probs[i]):.3f}",
209
+ "Período (días)": f"{float(cases.loc[i, 'koi_period']):.3f}",
210
+ "Duración (horas)": f"{float(cases.loc[i, 'koi_duration']):.3f}",
211
+ "Radio Planetario (R⊕)": f"{float(cases.loc[i, 'koi_prad']):.3f}"
212
+ }
213
+ out_rows.append(row)
214
+
215
+ # 9) Crear tabla de resultados
216
+ result_df = pd.DataFrame(out_rows)
217
+
218
+ # 10) Conteo con umbral
219
+ umbral = 0.5
220
+ prob_confirmados = [float(p) for p in result_df["P(Confirmado)"]]
221
+ n_pos = sum(1 for p in prob_confirmados if p >= umbral)
222
+
223
+ summary = f"**Resumen:** Con umbral {umbral:.2f}, {n_pos}/{len(result_df)} objetos son probables exoplanetas confirmados.\n\n"
224
+
225
+ return summary + result_df.to_markdown(index=False)
226
 
227
  except Exception as e:
228
+ return f"❌ Error: {str(e)}"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
229
 
230
  def predict_custom_data(period, duration, depth, prad, srad, teq, steff, slogg, smet, kepmag, snr, num_transits):
231
+ """Predice para datos personalizados ingresados manualmente"""
232
  try:
233
+ if model is None or scaler is None or label_encoder is None:
234
+ return " Modelo no disponible. Por favor, verifica los archivos del modelo."
235
+
236
+ # Crear array con los datos de entrada
237
+ input_data = np.array([[period, duration, depth, prad, srad, teq, steff, slogg, smet, kepmag, snr, num_transits]])
238
+
239
+ # Escalar y predecir
240
+ X_input = scaler.transform(input_data)
241
+ probs = model.predict(X_input, verbose=0)
242
+ pred_idx = np.argmax(probs, axis=1)
243
+ pred_label = label_encoder.inverse_transform(pred_idx)[0]
244
+
245
+ clases = list(label_encoder.classes_)
246
+ resultados = {}
247
+ for clase in clases:
248
+ prob = float(probs[0][clases.index(clase)])
249
+ resultados[clase] = f"{prob:.3f}"
250
+
251
+ output = f"**Predicción:** {pred_label}\n\n**Probabilidades:**\n"
252
+ for clase, prob in resultados.items():
253
+ output += f"- {clase}: {prob}\n"
254
+
255
+ return output
256
 
 
 
 
 
 
 
 
 
 
257
  except Exception as e:
258
+ return f"Error en predicción: {str(e)}"
259
+
260
+ # ==================== INTERFAZ GRADIO CON API ====================
261
 
 
262
  with gr.Blocks(theme=gr.themes.Soft(), title="Eco Finder API") as demo:
263
  gr.Markdown("# 🌌 Eco Finder API")
264
+ gr.Markdown("Clasificador de exoplanetas - Interfaz Web y API")
265
 
266
+ with gr.Tab("🔭 Analizar TOI en tiempo real"):
267
+ gr.Markdown("Obtén predicciones de objetos TOI del archivo TESS en tiempo real")
268
  analyze_btn = gr.Button("🔍 Analizar Objetos TOI")
269
  output_realtime = gr.Markdown()
270
+ analyze_btn.click(
271
+ fn=predict_toi_realtime,
272
+ outputs=output_realtime
273
+ )
274
 
275
+ with gr.Tab("📊 Ingresar datos manualmente"):
276
+ gr.Markdown("Ingresa los parámetros astronómicos manualmente para obtener una predicción")
277
+
278
  with gr.Row():
279
  with gr.Column():
280
  period = gr.Number(label="Período orbital (días)", value=10.0)
281
+ duration = gr.Number(label="Duración del tránsito (horas)", value=5.0)
282
+ depth = gr.Number(label="Profundidad del tránsito (ppm)", value=1000.0)
283
  prad = gr.Number(label="Radio planetario (R⊕)", value=2.0)
 
284
  srad = gr.Number(label="Radio estelar (R☉)", value=1.0)
285
+
 
 
286
  with gr.Column():
287
+ teq = gr.Number(label="Temperatura de equilibrio (K)", value=1000.0)
288
+ steff = gr.Number(label="Temperatura efectiva estelar (K)", value=6000.0)
289
+ slogg = gr.Number(label="Gravedad superficial estelar (log g)", value=4.5)
290
+ smet = gr.Number(label="Metalicidad estelar ([Fe/H])", value=0.0)
291
  kepmag = gr.Number(label="Magnitud TESS", value=12.0)
292
+
293
+ with gr.Column():
294
+ snr = gr.Number(label="Relación señal-ruido", value=10.0)
295
+ num_transits = gr.Number(label="Número de tránsitos", value=3.0)
296
 
297
  predict_btn = gr.Button("🎯 Predecir")
298
  output_manual = gr.Markdown()
299
+
300
  predict_btn.click(
301
+ fn=predict_custom_data,
302
  inputs=[period, duration, depth, prad, srad, teq, steff, slogg, smet, kepmag, snr, num_transits],
303
  outputs=output_manual
304
  )
305
 
306
+ with gr.Tab("🔗 Consumir API desde Frontend"):
307
  gr.Markdown("""
308
+ ## 🌐 Endpoints API para Frontend
309
+
310
+ Tu API está disponible en: **`https://jarpalucas-echo-finder-api.hf.space`**
311
+
312
+ ### 📋 Ejemplos de uso:
313
+
314
+ ```javascript
315
+ // 1. Predicción individual
316
+ const features = {
317
+ koi_period: 10.0,
318
+ koi_duration: 5.0,
319
+ koi_depth: 1000.0,
320
+ koi_prad: 2.0,
321
+ koi_srad: 1.0,
322
+ koi_teq: 1000.0,
323
+ koi_steff: 6000.0,
324
+ koi_slogg: 4.5,
325
+ koi_smet: 0.0,
326
+ koi_kepmag: 12.0,
327
+ koi_model_snr: 10.0,
328
+ koi_num_transits: 3.0
329
+ };
330
+
331
+ // Usando fetch
332
+ fetch('https://jarpalucas-echo-finder-api.hf.space/run/predict', {
333
+ method: 'POST',
334
+ headers: {
335
+ 'Content-Type': 'application/json',
336
+ },
337
+ body: JSON.stringify({
338
+ data: [features]
339
+ })
340
+ })
341
+ .then(response => response.json())
342
+ .then(data => {
343
+ console.log('Predicción:', data);
344
+ });
 
 
 
345
  ```
346
 
347
+ ### 🎯 Endpoints disponibles via Gradio:
348
+
349
+ - **POST** `/run/predict` - Para predicción individual
350
+ - **POST** `/run/predict_toi_realtime` - Para TOI en tiempo real
351
+
352
+ ### 📊 Estructura de respuesta:
353
+ ```json
354
+ {
355
+ "data": [
356
+ {
357
+ "prediction": "CONFIRMED",
358
+ "probabilities": {
359
+ "CONFIRMED": 0.875,
360
+ "CANDIDATE": 0.120,
361
+ "FALSE_POSITIVE": 0.005
362
+ },
363
+ "input_features": {...}
364
+ }
365
+ ]
366
+ }
367
  ```
368
 
369
+ ### 🔧 Código React ejemplo:
370
+ ```jsx
371
+ import React, { useState } from 'react';
372
+
373
+ function ExoplanetPredictor() {
374
+ const [prediction, setPrediction] = useState(null);
375
+ const [loading, setLoading] = useState(false);
376
+
377
+ const predictExoplanet = async (features) => {
378
+ setLoading(true);
379
+ try {
380
+ const response = await fetch(
381
+ 'https://jarpalucas-echo-finder-api.hf.space/run/predict',
382
+ {
383
+ method: 'POST',
384
+ headers: { 'Content-Type': 'application/json' },
385
+ body: JSON.stringify({ data: [features] })
386
+ }
387
+ );
388
+ const result = await response.json();
389
+ setPrediction(result.data[0]);
390
+ } catch (error) {
391
+ console.error('Error:', error);
392
+ } finally {
393
+ setLoading(false);
394
+ }
395
+ };
396
+
397
+ return (
398
+ <div>
399
+ {/* Tu interfaz aquí */}
400
+ </div>
401
+ );
402
+ }
403
  ```
404
  """)
405
+
406
+ with gr.Tab("ℹ️ Información del Modelo"):
407
+ model_status = "✅ Cargado y funcionando" if model is not None else "❌ No disponible"
408
+ gr.Markdown(f"""
409
+ ## Estado del Sistema
410
+
411
+ **Modelo:** {model_status}
412
+ **TensorFlow:** {'✅ Disponible' if TENSORFLOW_AVAILABLE else '❌ No disponible'}
413
+ **Características:** {len(feature_columns)} features cargadas
414
+
415
+ ### 📊 Características del Modelo:
416
+ {", ".join(feature_columns)}
417
+
418
+ ### 🎯 Clases de Predicción:
419
+ - ✅ **CONFIRMED**: Exoplaneta confirmado
420
+ - 🔍 **CANDIDATE**: Candidato a exoplaneta
421
+ - ❌ **FALSE POSITIVE**: Falso positivo
422
+
423
+ ### 📈 Estadísticas:
424
+ - Modelo entrenado con datos de Kepler/TESS
425
+ - Arquitectura: Red Neuronal Artificial
426
+ - Precisión: Optimizada para clasificación de exoplanetas
427
+ """)
428
 
429
+ # ==================== ENDPOINTS GRADIO PARA API ====================
430
 
431
+ def predict_api(features: Dict) -> Dict:
432
+ """Endpoint para API desde frontend"""
433
+ return predict_single_api(features)
434
+
435
+ # Exponer funciones como endpoints API
436
+ demo.predict = predict_api
437
+ demo.predict_toi_realtime = predict_toi_realtime
438
+
439
+ print("🎉 Eco Finder API iniciada correctamente!")
440
+ print("🌐 Interfaz web disponible")
441
+ print("🔗 API lista para consumir desde frontend")
 
 
 
 
 
442
 
443
  if __name__ == "__main__":
444
+ demo.launch(server_name="0.0.0.0", server_port=7860, share=False)