ArthurGamaJorge commited on
Commit
17c039f
·
1 Parent(s): 99eb447

Deixar apenas conexão via readme

Browse files
.gitattributes DELETED
@@ -1,38 +0,0 @@
1
- pip install git-filter-repo
2
- *.7z filter=lfs diff=lfs merge=lfs -text
3
- *.arrow filter=lfs diff=lfs merge=lfs -text
4
- *.bin filter=lfs diff=lfs merge=lfs -text
5
- *.bz2 filter=lfs diff=lfs merge=lfs -text
6
- *.ckpt filter=lfs diff=lfs merge=lfs -text
7
- *.ftz filter=lfs diff=lfs merge=lfs -text
8
- *.gz filter=lfs diff=lfs merge=lfs -text
9
- *.h5 filter=lfs diff=lfs merge=lfs -text
10
- *.joblib filter=lfs diff=lfs merge=lfs -text
11
- *.lfs.* filter=lfs diff=lfs merge=lfs -text
12
- *.mlmodel filter=lfs diff=lfs merge=lfs -text
13
- *.model filter=lfs diff=lfs merge=lfs -text
14
- *.msgpack filter=lfs diff=lfs merge=lfs -text
15
- *.npy filter=lfs diff=lfs merge=lfs -text
16
- *.npz filter=lfs diff=lfs merge=lfs -text
17
- *.onnx filter=lfs diff=lfs merge=lfs -text
18
- *.ot filter=lfs diff=lfs merge=lfs -text
19
- *.parquet filter=lfs diff=lfs merge=lfs -text
20
- *.pb filter=lfs diff=lfs merge=lfs -text
21
- *.pickle filter=lfs diff=lfs merge=lfs -text
22
- *.pkl filter=lfs diff=lfs merge=lfs -text
23
- *.pt filter=lfs diff=lfs merge=lfs -text
24
- *.pth filter=lfs diff=lfs merge=lfs -text
25
- *.rar filter=lfs diff=lfs merge=lfs -text
26
- *.safetensors filter=lfs diff=lfs merge=lfs -text
27
- saved_model/**/* filter=lfs diff=lfs merge=lfs -text
28
- *.tar.* filter=lfs diff=lfs merge=lfs -text
29
- *.tar filter=lfs diff=lfs merge=lfs -text
30
- *.tflite filter=lfs diff=lfs merge=lfs -text
31
- *.tgz filter=lfs diff=lfs merge=lfs -text
32
- *.wasm filter=lfs diff=lfs merge=lfs -text
33
- *.xz filter=lfs diff=lfs merge=lfs -text
34
- *.zip filter=lfs diff=lfs merge=lfs -text
35
- *.zst filter=lfs diff=lfs merge=lfs -text
36
- *tfevents* filter=lfs diff=lfs merge=lfs -text
37
- models/checkpoints/*.keras filter=lfs diff=lfs merge=lfs -text
38
- *.keras filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
Dockerfile DELETED
@@ -1,20 +0,0 @@
1
- FROM python:3.13.3
2
-
3
- RUN useradd -m -u 1000 user
4
-
5
- RUN apt-get update && apt-get install -y \
6
- python3 \
7
- python3-pip \
8
- libgl1
9
-
10
- USER user
11
- ENV PATH="/home/user/.local/bin:$PATH"
12
-
13
- COPY . /app
14
-
15
- WORKDIR /app
16
-
17
- RUN pip install --no-cache-dir --upgrade -r requirements.txt
18
-
19
- COPY --chown=user . /app
20
- CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
app.py DELETED
@@ -1,84 +0,0 @@
1
- import uvicorn
2
- from fastapi import Body, FastAPI, UploadFile, File, Response
3
- from fastapi.responses import JSONResponse
4
- from fastapi.middleware.cors import CORSMiddleware
5
- import traceback
6
- import numpy as np
7
- import json
8
-
9
- from detect import DengueDetector
10
- from predict import DenguePredictor
11
-
12
- def default_json_serializer(obj):
13
- if isinstance(obj, np.integer):
14
- return int(obj)
15
- elif isinstance(obj, np.floating):
16
- return float(obj)
17
- elif isinstance(obj, np.ndarray):
18
- return obj.tolist()
19
- raise TypeError(f"Object of type {obj.__class__.__name__} is not JSON serializable")
20
-
21
- detector: DengueDetector = None
22
- predictor: DenguePredictor = None
23
-
24
- app = FastAPI()
25
-
26
- # --- Crie um evento de startup para carregar os modelos ---
27
- @app.on_event("startup")
28
- async def startup_event():
29
- global detector, predictor
30
- print("Executando evento de startup: Carregando os módulos de IA...")
31
- detector = DengueDetector()
32
- predictor = DenguePredictor()
33
- print("Módulos de IA carregados com sucesso. API pronta.")
34
-
35
- # --- CORS ---
36
- origins = ["https://previdengue.vercel.app", "http://localhost:3000", "*"]
37
- app.add_middleware(
38
- CORSMiddleware,
39
- allow_origins=origins,
40
- allow_credentials=True,
41
- allow_methods=["*"],
42
- allow_headers=["*"]
43
- )
44
-
45
- # --- Rotas ---
46
- @app.get("/")
47
- def health_check():
48
- return {"status": "ok", "message": "API de Dengue rodando!"}
49
-
50
- @app.post("/detect/")
51
- async def detect(file: UploadFile = File(...)):
52
- if detector is None:
53
- return JSONResponse(status_code=503, content={"error": "Detector ainda não foi inicializado."})
54
- try:
55
- content = await file.read()
56
- result = detector.detect_image(content)
57
- return JSONResponse(content=result)
58
- except Exception as e:
59
- return JSONResponse(status_code=500, content={"error": str(e)})
60
-
61
-
62
- @app.post("/predict/")
63
- async def predict_dengue_route(payload: dict = Body(...)):
64
- if predictor is None:
65
- return JSONResponse(status_code=503, content={"error": "Preditor ainda não foi inicializado."})
66
- try:
67
- ibge_code_str = payload.get("ibge_code")
68
- if ibge_code_str is None:
69
- raise ValueError("O campo 'ibge_code' é obrigatório.")
70
-
71
- ibge_code = int(ibge_code_str)
72
- result = predictor.predict(ibge_code)
73
-
74
- json_content = json.dumps(result, default=default_json_serializer)
75
-
76
- return Response(content=json_content, media_type="application/json")
77
-
78
- except Exception as e:
79
- tb_str = traceback.format_exc()
80
- print(tb_str)
81
- return JSONResponse(status_code=500, content={
82
- "error": str(e),
83
- "traceback": tb_str
84
- })
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
detect.py DELETED
@@ -1,54 +0,0 @@
1
- from collections import Counter
2
- import numpy as np
3
- from PIL import Image
4
- from io import BytesIO
5
- from ultralytics import YOLO
6
-
7
- class DengueDetector:
8
- def __init__(self, model_path="./models/DetectsmallTest1.pt"):
9
- self.model = YOLO(model_path)
10
- self.names = self.model.names
11
-
12
- def calculate_intensity(self, objects):
13
- weights = {"piscina": 9, "caixa_agua": 4, "carro": 1}
14
- score = sum(weights.get(obj["class"], 0) for obj in objects)
15
- return score
16
-
17
- def detect_image(self, image_bytes):
18
- # Carregar imagem da memória
19
- img = Image.open(BytesIO(image_bytes)).convert("RGB")
20
- img_np = np.array(img) # YOLO aceita np.array diretamente
21
- height, width = img_np.shape[:2]
22
-
23
- # Detectar objetos
24
- results = self.model(img_np)
25
- result = results[0]
26
- boxes = result.boxes
27
- class_ids = boxes.cls.tolist()
28
- confidences = boxes.conf.tolist()
29
- class_names = [self.names[int(cls)] for cls in class_ids]
30
- counts = Counter(class_names)
31
-
32
- # Construir lista de detecções
33
- detections = []
34
- for i in range(len(boxes)):
35
- x1, y1, x2, y2 = map(float, boxes.xyxy[i])
36
- conf = float(confidences[i])
37
- cls_id = int(class_ids[i])
38
- detections.append({
39
- "class": self.names[cls_id],
40
- "confidence": round(conf, 4),
41
- "box": {
42
- "x1": x1, "y1": y1, "x2": x2, "y2": y2,
43
- "original_width": width, "original_height": height
44
- }
45
- })
46
-
47
- intensity_score = self.calculate_intensity(detections)
48
-
49
- return {
50
- "total": len(class_ids),
51
- "contagem": counts,
52
- "objetos": detections,
53
- "intensity_score": intensity_score
54
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
models/DetectsmallTest1.pt DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:3b4b91b6b7d56edae2d369cb6b7306690a9864a1a51d2bf4f1bd6d2877ccf79c
3
- size 22508451
 
 
 
 
models/checkpoints/model_checkpoint_best_city.keras DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:3b2f243729883b59055234a45b00f16892e60e5b948003ec3c27dc4d022e88a5
3
- size 2505166
 
 
 
 
models/checkpoints/test_checkpoint1.keras DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:f3864073b783dad22b1833e95531c6d78df30556af7131d258aa91e7b0fdc5cc
3
- size 421650
 
 
 
 
models/scalers/scaler_dyn_global.pkl DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:dce234dbd10d12bc0535d23089905f873dc98235550b601e70b702a8665d682f
3
- size 907
 
 
 
 
models/scalers/scaler_static_global.pkl DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:9699ae64d444fc4453c6524ad3d4d44f7ca09e8bf4f3439c9f19a82f3745e266
3
- size 743
 
 
 
 
models/scalers/scaler_target_global.pkl DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:981c50a1405169907c95f8e20555949cca362d672f7ee48992e9c31760f85fd8
3
- size 799
 
 
 
 
predict.py DELETED
@@ -1,204 +0,0 @@
1
- import os
2
- import numpy as np
3
- import pandas as pd
4
- import joblib
5
- import warnings
6
- from pathlib import Path
7
- from datetime import timedelta
8
- import tensorflow as tf
9
- import matplotlib.pyplot as plt
10
- import base64
11
- from io import BytesIO
12
- from huggingface_hub import hf_hub_download
13
-
14
- warnings.filterwarnings('ignore')
15
- plt.style.use('seaborn-v0_8-darkgrid')
16
-
17
- # --- MUDANÇA: Definição da loss customizada necessária para carregar o modelo ---
18
- def asymmetric_mse(y_true, y_pred):
19
- penalty_factor = 5.0
20
- error = y_true - y_pred
21
- penalty = tf.where(error > 0, penalty_factor, 1.0)
22
- loss = tf.square(error) * penalty
23
- return tf.reduce_mean(loss)
24
-
25
- class DenguePredictor:
26
- def __init__(self, project_root=None):
27
- self.project_root = Path(project_root) if project_root else Path(__file__).resolve().parent
28
- # --- MUDANÇA: Constantes do modelo alinhadas com o treinamento final ---
29
- self.sequence_length = 12
30
- self.horizon = 8
31
- self.year_min_train = 2014
32
- self.year_max_train = 2025
33
- self.dynamic_features = [
34
- "numero_casos", "casos_velocidade", "casos_aceleracao", "casos_mm_4_semanas",
35
- "T2M", "T2M_MAX", "T2M_MIN", "PRECTOTCORR", "RH2M", "ALLSKY_SFC_SW_DWN",
36
- "week_sin", "week_cos", "year_norm"
37
- ]
38
- self.static_features = ["latitude", "longitude"]
39
- self.feature_names_pt = {
40
- "numero_casos": "Nº de Casos de Dengue", "T2M": "Temperatura Média (°C)",
41
- "PRECTOTCORR": "Precipitação (mm)"
42
- }
43
- self.load_assets()
44
-
45
- def load_assets(self):
46
- print("INFO: Carregando todos os ativos da IA (modelo, scalers, dados)...")
47
- AI_ASSETS_DIR = self.project_root / "models"
48
-
49
- # --- MUDANÇA: Baixa o arquivo do Hugging Face para o caminho local ---
50
- INFERENCE_PATH = hf_hub_download(
51
- repo_id='previdengue/predict_inference_data',
52
- filename='inference_data.parquet',
53
- repo_type='dataset',
54
- token=os.environ.get('HF_TOKEN') # Autenticação com o token de acesso
55
- )
56
-
57
- SCALER_DIR = AI_ASSETS_DIR / "scalers"
58
- MODEL_PATH = AI_ASSETS_DIR / "checkpoints" / "model_checkpoint_best_city.keras"
59
-
60
- # --- MUDANÇA: Carregamento dos scalers GLOBAIS ---
61
- self.scaler_dyn = joblib.load(SCALER_DIR / "scaler_dyn_global.pkl")
62
- self.scaler_static = joblib.load(SCALER_DIR / "scaler_static_global.pkl")
63
- self.scaler_target = joblib.load(SCALER_DIR / "scaler_target_global.pkl")
64
-
65
- # Lê os dados de inferência do arquivo baixado
66
- df_master = pd.read_parquet(INFERENCE_PATH)
67
- df_master['codigo_ibge'] = df_master['codigo_ibge'].astype(int)
68
- df_master['date'] = pd.to_datetime(df_master['ano'].astype(str) + df_master['semana'].astype(str) + '0', format='%Y%W%w', errors='coerce')
69
- df_master = df_master.sort_values(by=['codigo_ibge', 'date']).reset_index(drop=True)
70
- self.df_master = df_master
71
- self.municipios = df_master[['codigo_ibge', 'municipio']].drop_duplicates().sort_values('codigo_ibge')
72
-
73
- # --- MUDANÇA: Carregamento do modelo com a loss customizada ---
74
- self.model = tf.keras.models.load_model(MODEL_PATH, custom_objects={'asymmetric_mse': asymmetric_mse})
75
- print("INFO: Ativos da IA carregados com sucesso.")
76
-
77
- def plot_to_base64(self, fig):
78
- buf = BytesIO()
79
- fig.savefig(buf, format='png', bbox_inches='tight', facecolor='#18181b')
80
- buf.seek(0)
81
- img_str = base64.b64encode(buf.read()).decode('utf-8')
82
- plt.close(fig)
83
- return img_str
84
-
85
- def predict(self, ibge_code: int):
86
- df_mun = self.df_master[self.df_master['codigo_ibge'] == ibge_code].copy()
87
- if df_mun.empty or len(df_mun) < self.sequence_length:
88
- raise ValueError(f"Não há dados ou histórico suficiente para o município {ibge_code}")
89
-
90
- municipio_name = self.municipios[self.municipios['codigo_ibge'] == ibge_code].iloc[0]['municipio']
91
-
92
- # 1. Pega a última sequência de dados históricos completos
93
- last_complete_sequence = df_mun.dropna(subset=['numero_casos']).tail(self.sequence_length).copy()
94
- if len(last_complete_sequence) < self.sequence_length:
95
- raise ValueError(f"Histórico insuficiente de casos conhecidos para {ibge_code}")
96
-
97
- # 2. Engenharia de Features na sequência de entrada
98
- last_complete_sequence['casos_velocidade'] = last_complete_sequence['numero_casos'].diff().fillna(0)
99
- last_complete_sequence['casos_aceleracao'] = last_complete_sequence['casos_velocidade'].diff().fillna(0)
100
- last_complete_sequence['casos_mm_4_semanas'] = last_complete_sequence['numero_casos'].rolling(4, min_periods=1).mean()
101
- last_complete_sequence['week_sin'] = np.sin(2 * np.pi * last_complete_sequence['semana'] / 52)
102
- last_complete_sequence['week_cos'] = np.cos(2 * np.pi * last_complete_sequence['semana'] / 52)
103
- last_complete_sequence['year_norm'] = (last_complete_sequence['ano'] - self.year_min_train) / (self.year_max_train - self.year_min_train)
104
-
105
- # 3. Prepara os dados de entrada para o modelo
106
- dynamic_input_raw = last_complete_sequence[self.dynamic_features].values
107
- static_input_raw = last_complete_sequence[self.static_features].iloc[-1].values.reshape(1, -1)
108
-
109
- dynamic_input_scaled = self.scaler_dyn.transform(dynamic_input_raw).reshape(1, self.sequence_length, -1)
110
- static_input_scaled = self.scaler_static.transform(static_input_raw)
111
-
112
- # 4. Faz a predição (tiro único)
113
- predictions_scaled = self.model.predict([dynamic_input_scaled, static_input_scaled], verbose=0)
114
- pred_casos_scaled = predictions_scaled[0] # Primeira saída do modelo
115
-
116
- # 5. Inverte a transformação para obter o número de casos reais
117
- pred_casos_log = self.scaler_target.inverse_transform(pred_casos_scaled.reshape(1, -1))
118
- pred_casos_real = np.expm1(pred_casos_log).flatten()
119
-
120
- predictions_final = [max(0, round(case)) for case in pred_casos_real]
121
-
122
- # 6. Formata a resposta
123
- last_real_date = last_complete_sequence['date'].iloc[-1]
124
- predicted_data = [{
125
- "date": (last_real_date + timedelta(weeks=i + 1)).strftime('%Y-%m-%d'),
126
- "predicted_cases": cases
127
- } for i, cases in enumerate(predictions_final)]
128
-
129
- historic_data = [{
130
- "date": row['date'].strftime('%Y-%m-%d'),
131
- "cases": int(row["numero_casos"]) if pd.notna(row["numero_casos"]) else None
132
- } for _, row in df_mun.tail(52).iterrows()]
133
-
134
- # Geração de insights (Análise de Lag)
135
- lag_plot_b64, strategic_summary, tipping_points = self.generate_lag_insights(df_mun)
136
-
137
- return {
138
- "municipality_name": municipio_name,
139
- "historic_data": historic_data,
140
- "predicted_data": predicted_data,
141
- "insights": {
142
- "lag_analysis_plot_base64": lag_plot_b64,
143
- "strategic_summary": strategic_summary,
144
- "tipping_points": tipping_points
145
- }
146
- }
147
-
148
- def generate_lag_insights(self, df_mun):
149
- # Renomeia as colunas para a análise e para os gráficos
150
- df_analysis = df_mun.rename(columns={"T2M": "Temperatura Média (°C)", "PRECTOTCORR": "Precipitação (mm)"})
151
- max_lag = 12
152
- cases_col_name = 'numero_casos'
153
- lag_features = ['Temperatura Média (°C)', 'Precipitação (mm)']
154
- lag_correlations = {}
155
-
156
- # Calcula a correlação para cada feature com diferentes defasagens (lags)
157
- for col in lag_features:
158
- # Garante que a coluna existe antes de tentar usá-la
159
- if col in df_analysis.columns:
160
- corrs = [df_analysis[cases_col_name].corr(df_analysis[col].shift(lag)) for lag in range(1, max_lag + 1)]
161
- lag_correlations[col] = corrs
162
-
163
- # Cria a figura para o gráfico
164
- fig, ax = plt.subplots(figsize=(10, 6), facecolor='#18181b')
165
- ax.set_facecolor('#18181b')
166
-
167
- # Plota as correlações
168
- for feature_name, corrs in lag_correlations.items():
169
- ax.plot(range(1, max_lag + 1), corrs, marker='o', linestyle='-', label=feature_name)
170
-
171
- # Estiliza o gráfico
172
- ax.set_title('Análise de Defasagem (Lag)', color='white')
173
- ax.set_xlabel('Defasagem (Semanas)', color='white')
174
- ax.set_ylabel('Correlação com Casos', color='white')
175
- ax.tick_params(colors='white')
176
- ax.legend(facecolor='#27272a', edgecolor='gray', labelcolor='white')
177
- ax.grid(True, which='both', linestyle='--', linewidth=0.5, color='#444')
178
-
179
- # Converte o gráfico para base64 para enviar na resposta da API
180
- lag_plot_b64 = self.plot_to_base64(fig)
181
-
182
- # Encontra o pico de correlação para cada feature
183
- lag_peaks = {
184
- feature: (np.argmax(np.abs(corrs)) + 1) if corrs and not all(pd.isna(corrs)) else 'N/A'
185
- for feature, corrs in lag_correlations.items()
186
- }
187
- temp_lag = lag_peaks.get('Temperatura Média (°C)', 'N/A')
188
- rain_lag = lag_peaks.get('Precipitação (mm)', 'N/A')
189
-
190
- # Cria um resumo estratégico
191
- summary = (
192
- f"A IA identifica a **Temperatura** e a **Precipitação** como os principais gatilhos climáticos. "
193
- f"O impacto da temperatura tende a ser máximo após **{temp_lag} semanas**, enquanto o da chuva ocorre após **{rain_lag} semanas**. "
194
- "Ações preventivas devem ser intensificadas nesta janela após eventos climáticos extremos."
195
- )
196
-
197
- # Cria os pontos-chave (tipping points)
198
- tipping_points = [
199
- {"factor": "Temperatura", "value": f"Impacto máximo em {temp_lag} semanas"},
200
- {"factor": "Precipitação", "value": f"Impacto máximo em {rain_lag} semanas"},
201
- {"factor": "Umidade", "value": "Aumenta a sobrevida do mosquito adulto"},
202
- ]
203
-
204
- return lag_plot_b64, summary, tipping_points
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
requirements.txt DELETED
@@ -1,16 +0,0 @@
1
- fastapi==0.115.12
2
- uvicorn==0.34.0
3
- pillow==11.1.0
4
- opencv-python-headless
5
- numpy==2.1.1
6
- ultralytics==8.3.105
7
- torch==2.6.0
8
- python-multipart
9
- joblib==1.5.1
10
- pandas==2.2.3
11
- matplotlib==3.10.3
12
- tensorflow==2.20.0
13
- epiweeks==2.3.0
14
- scikit-learn==1.6.1
15
- fastparquet
16
- huggingface_hub