Spaces:
Sleeping
Sleeping
Merge pull request #30 from Diaure/feature/db_schema_setup
Browse files- App/database.py +22 -12
- App/model.py +51 -0
- App/predict.py +27 -2
- App/schemas.py +0 -1
- README.md +121 -22
- poetry.lock +93 -1
- pyproject.toml +3 -1
- requirements.txt +1 -0
- scripts/create_tables.py +6 -0
- scripts/insert_dataset.py +1 -4
App/database.py
CHANGED
|
@@ -1,20 +1,30 @@
|
|
| 1 |
import os
|
| 2 |
from dotenv import load_dotenv
|
| 3 |
from sqlalchemy import create_engine
|
| 4 |
-
from sqlalchemy.orm import sessionmaker
|
| 5 |
|
| 6 |
load_dotenv()
|
| 7 |
|
| 8 |
-
|
| 9 |
-
|
| 10 |
-
|
| 11 |
-
DB_PORT = os.getenv("DB_PORT")
|
| 12 |
-
DB_NAME = os.getenv("DB_NAME")
|
| 13 |
|
| 14 |
-
|
| 15 |
-
f"postgresql+psycopg2://{DB_USER}:{DB_PASSWORD}"
|
| 16 |
-
f"@{DB_HOST}:{DB_PORT}/{DB_NAME}"
|
| 17 |
-
)
|
| 18 |
|
| 19 |
-
|
| 20 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
import os
|
| 2 |
from dotenv import load_dotenv
|
| 3 |
from sqlalchemy import create_engine
|
| 4 |
+
from sqlalchemy.orm import sessionmaker, declarative_base
|
| 5 |
|
| 6 |
load_dotenv()
|
| 7 |
|
| 8 |
+
# Détection si on est en CI (GitHub Actions) ou en test
|
| 9 |
+
IS_CI = os.getenv("CI") == "true"
|
| 10 |
+
IS_PYTEST = "pytest" in os.getenv("PYTHONPATH", "") or os.getenv("PYTEST_CURRENT_TEST") is not None
|
|
|
|
|
|
|
| 11 |
|
| 12 |
+
SKIP_DB = IS_CI or IS_PYTEST
|
|
|
|
|
|
|
|
|
|
| 13 |
|
| 14 |
+
DB_USER = os.getenv("DB_USER", "postgres")
|
| 15 |
+
DB_PASSWORD = os.getenv("DB_PASSWORD", "password")
|
| 16 |
+
DB_HOST = os.getenv("DB_HOST", "localhost")
|
| 17 |
+
DB_PORT = os.getenv("DB_PORT", "5432")
|
| 18 |
+
DB_NAME = os.getenv("DB_NAME", "test_db")
|
| 19 |
+
|
| 20 |
+
DATABASE_URL = (f"postgresql+psycopg2://{DB_USER}:{DB_PASSWORD}"f"@{DB_HOST}:{DB_PORT}/{DB_NAME}")
|
| 21 |
+
|
| 22 |
+
Base = declarative_base()
|
| 23 |
+
|
| 24 |
+
if not SKIP_DB:
|
| 25 |
+
engine = create_engine(DATABASE_URL)
|
| 26 |
+
SessionLocal = sessionmaker(autocommit = False, autoflush = False, bind = engine)
|
| 27 |
+
|
| 28 |
+
else:
|
| 29 |
+
engine = None
|
| 30 |
+
SessionLocal = None
|
App/model.py
ADDED
|
@@ -0,0 +1,51 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from sqlalchemy import Column, Integer, String, Float, Boolean, DateTime, ForeignKey
|
| 2 |
+
from sqlalchemy.sql import func
|
| 3 |
+
from App.database import Base
|
| 4 |
+
|
| 5 |
+
class Input(Base):
|
| 6 |
+
__tablename__ = "inputs"
|
| 7 |
+
|
| 8 |
+
id = Column(Integer, primary_key=True, index=True)
|
| 9 |
+
genre = Column(String)
|
| 10 |
+
statut_marital = Column(String)
|
| 11 |
+
departement = Column(String)
|
| 12 |
+
poste = Column(String)
|
| 13 |
+
domaine_etude = Column(String)
|
| 14 |
+
frequence_deplacement = Column(String)
|
| 15 |
+
heure_supplementaires = Column(Boolean)
|
| 16 |
+
evolution_cat_evol = Column(String)
|
| 17 |
+
categorie_employe = Column(String)
|
| 18 |
+
satisfaction_employee_nature_travail = Column(Integer)
|
| 19 |
+
nombre_participation_pee = Column(Integer)
|
| 20 |
+
ecart_note_evaluation = Column(Integer)
|
| 21 |
+
revenu_mensuel = Column(Integer)
|
| 22 |
+
distance_domicile_travail = Column(Integer)
|
| 23 |
+
satisfaction_globale = Column(Float)
|
| 24 |
+
niveau_education = Column(Integer)
|
| 25 |
+
note_evaluation_actuelle = Column(Integer)
|
| 26 |
+
satisfaction_employee_equipe = Column(Integer)
|
| 27 |
+
age = Column(Integer)
|
| 28 |
+
revenu_par_annee_experience_interne = Column(Integer)
|
| 29 |
+
satisfaction_employee_equilibre_pro_perso = Column(Integer)
|
| 30 |
+
nombre_experiences_precedentes = Column(Integer)
|
| 31 |
+
annees_dans_l_entreprise = Column(Integer)
|
| 32 |
+
nb_formations_suivies = Column(Integer)
|
| 33 |
+
revenu_par_annee_experience_totale = Column(Integer)
|
| 34 |
+
ratio_sans_promotion = Column(Integer)
|
| 35 |
+
satisfaction_employee_environnement = Column(Integer)
|
| 36 |
+
exp_hors_entreprise = Column(Integer)
|
| 37 |
+
mobilite_promotion = Column(Integer)
|
| 38 |
+
annees_depuis_la_derniere_promotion = Column(Integer)
|
| 39 |
+
|
| 40 |
+
created_at = Column(DateTime(timezone=True), server_default=func.now())
|
| 41 |
+
|
| 42 |
+
class Predictions(Base):
|
| 43 |
+
__tablename__ = "predictions"
|
| 44 |
+
id = Column(Integer, primary_key=True, index=True)
|
| 45 |
+
input_id = Column(Integer, ForeignKey("inputs.id"))
|
| 46 |
+
|
| 47 |
+
prediction_label = Column(String)
|
| 48 |
+
prediction_proba = Column(Float)
|
| 49 |
+
model_version = Column(String)
|
| 50 |
+
|
| 51 |
+
created_at = Column(DateTime(timezone=True), server_default=func.now())
|
App/predict.py
CHANGED
|
@@ -4,6 +4,9 @@ from App.schemas import EmployeeFeatures
|
|
| 4 |
import json
|
| 5 |
from pathlib import Path
|
| 6 |
from huggingface_hub import hf_hub_download
|
|
|
|
|
|
|
|
|
|
| 7 |
|
| 8 |
MODEL_REPO = "Diaure/xgb_model"
|
| 9 |
|
|
@@ -43,7 +46,29 @@ def predict_employee(data: dict):
|
|
| 43 |
pred = model.predict(df)[0]
|
| 44 |
proba = model.predict_proba(df)[0][1]
|
| 45 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 46 |
return {
|
| 47 |
"Prediction": classes_mapping[str(pred)],
|
| 48 |
-
"Probabilite_depart": float(proba)
|
| 49 |
-
}
|
|
|
|
| 4 |
import json
|
| 5 |
from pathlib import Path
|
| 6 |
from huggingface_hub import hf_hub_download
|
| 7 |
+
from sqlalchemy.orm import Session
|
| 8 |
+
from App.database import SessionLocal
|
| 9 |
+
from App.model import Input, Predictions
|
| 10 |
|
| 11 |
MODEL_REPO = "Diaure/xgb_model"
|
| 12 |
|
|
|
|
| 46 |
pred = model.predict(df)[0]
|
| 47 |
proba = model.predict_proba(df)[0][1]
|
| 48 |
|
| 49 |
+
db: Session = SessionLocal() if SessionLocal is not None else None
|
| 50 |
+
|
| 51 |
+
if db is not None:
|
| 52 |
+
try:
|
| 53 |
+
# enregistrer les inputs: à chaque appel de POST/predict, on stocke d'abord les entrées de l'utilisateur
|
| 54 |
+
input_row = Input(**data)
|
| 55 |
+
db.add(input_row)
|
| 56 |
+
db.commit()
|
| 57 |
+
db.refresh(input_row)
|
| 58 |
+
|
| 59 |
+
# puis on récupère les ids générés automatiquement et enregistre les prédictions liés aux ids
|
| 60 |
+
pred_row = Predictions(input_id = input_row.id, prediction_label = classes_mapping[str(pred)], prediction_proba = float(proba), model_version = "v1")
|
| 61 |
+
db.add(pred_row)
|
| 62 |
+
db.commit()
|
| 63 |
+
|
| 64 |
+
except Exception as e:
|
| 65 |
+
print("🔥 ERREUR DB :", e)
|
| 66 |
+
raise e
|
| 67 |
+
|
| 68 |
+
finally:
|
| 69 |
+
db.close()
|
| 70 |
+
|
| 71 |
+
# puis on renvoie la réponse API
|
| 72 |
return {
|
| 73 |
"Prediction": classes_mapping[str(pred)],
|
| 74 |
+
"Probabilite_depart": float(proba)}
|
|
|
App/schemas.py
CHANGED
|
@@ -10,7 +10,6 @@ class EmployeeFeatures(BaseModel):
|
|
| 10 |
heure_supplementaires: bool
|
| 11 |
evolution_cat_evol: str
|
| 12 |
categorie_employe: str
|
| 13 |
-
|
| 14 |
satisfaction_employee_nature_travail: int
|
| 15 |
nombre_participation_pee: int
|
| 16 |
ecart_note_evaluation: int
|
|
|
|
| 10 |
heure_supplementaires: bool
|
| 11 |
evolution_cat_evol: str
|
| 12 |
categorie_employe: str
|
|
|
|
| 13 |
satisfaction_employee_nature_travail: int
|
| 14 |
nombre_participation_pee: int
|
| 15 |
ecart_note_evaluation: int
|
README.md
CHANGED
|
@@ -15,9 +15,7 @@ pinned: false
|
|
| 15 |
opérationnels et accessibles via une API performante.
|
| 16 |
|
| 17 |
Ce projet correspond à un **Proof of Concept (POC)** visant à déployer un modèle de machine
|
| 18 |
-
learning en production en appliquant les bonnes pratiques d’ingénierie logicielle
|
| 19 |
-
versionnage, tests, base de données et automatisation.
|
| 20 |
-
|
| 21 |
|
| 22 |
|
| 23 |
## Objectifs du projet
|
|
@@ -29,7 +27,7 @@ versionnage, tests, base de données et automatisation.
|
|
| 29 |
|
| 30 |
|
| 31 |
## Périmètre fonctionnel
|
| 32 |
-
Le projet inclut
|
| 33 |
- Une API développée avec **FastAPI**
|
| 34 |
- L’exposition d’un modèle de machine learning via des endpoints REST
|
| 35 |
- Une base de données **PostgreSQL** pour stocker les entrées/sorties du modèle
|
|
@@ -91,7 +89,7 @@ L’API est déployée publiquement sur Hugging Face Spaces.
|
|
| 91 |
|
| 92 |
- URL de l’API :
|
| 93 |
https://diaure-futurisys-ml-api.hf.space
|
| 94 |
-
- Documentation interactive (Swagger UI)
|
| 95 |
https://diaure-futurisys-ml-api.hf.space/docs. Ele permet de:
|
| 96 |
- visualiser les endpoints
|
| 97 |
- tester directement l’endpoint `/predict`
|
|
@@ -100,12 +98,12 @@ https://diaure-futurisys-ml-api.hf.space/docs. Ele permet de:
|
|
| 100 |
### `Endpoint principal`
|
| 101 |
`POST /predict`
|
| 102 |
|
| 103 |
-
Cet endpoint reçoit les caractéristiques d’un employé et retourne
|
| 104 |
|
| 105 |
- une prédiction lisible ("Reste" ou "Part")
|
| 106 |
- la probabilité associée au départ
|
| 107 |
|
| 108 |
-
Exemple de réponse
|
| 109 |
```json
|
| 110 |
{
|
| 111 |
"Prediction": "Part",
|
|
@@ -121,20 +119,115 @@ L’API expose un endpoint principal de prédiction.
|
|
| 121 |
|
| 122 |
**POST /predict**
|
| 123 |
- Description : retourne une prédiction de départ d’un employé
|
| 124 |
-
- Validation des données
|
| 125 |
-
- Réponses possibles
|
| 126 |
-
- 200
|
| 127 |
-
- 422
|
| 128 |
|
| 129 |
-
##
|
| 130 |
-
|
| 131 |
-
|
| 132 |
-
|
| 133 |
-
-
|
| 134 |
-
-
|
| 135 |
-
-
|
| 136 |
-
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 137 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 138 |
|
| 139 |
|
| 140 |
## Structure du projet
|
|
@@ -143,7 +236,9 @@ futurisys_ml-api/
|
|
| 143 |
├── github/workflows
|
| 144 |
│ ├── ci.yml # Description des évènement déclenchants des tests
|
| 145 |
├── app/ # Code applicatif principal
|
|
|
|
| 146 |
│ ├── main.py # Point d’entrée de l’API
|
|
|
|
| 147 |
│ ├── predict.py # Application du modèle
|
| 148 |
│ ├── schemas.py # Validation des données (Pydantic)
|
| 149 |
│ ── model/ # Elements du modèle
|
|
@@ -151,10 +246,14 @@ futurisys_ml-api/
|
|
| 151 |
│ ├── modele_final_xgb.joblib # Modèle final avec hyperparamètres
|
| 152 |
│ ├── preprocesseur_fitted.joblib # Pipeline entrainé
|
| 153 |
|
|
| 154 |
-
├── scripts/
|
| 155 |
-
├──
|
| 156 |
-
│ ├──
|
|
|
|
|
|
|
|
|
|
| 157 |
|
|
|
|
|
| 158 |
├── .gitignore # Nettoyage du dépôt
|
| 159 |
├── Dockerfile # Reproduction du dépôt
|
| 160 |
├── poetry.lock # Nettoyage du dépôt
|
|
|
|
| 15 |
opérationnels et accessibles via une API performante.
|
| 16 |
|
| 17 |
Ce projet correspond à un **Proof of Concept (POC)** visant à déployer un modèle de machine
|
| 18 |
+
learning en production en appliquant les bonnes pratiques d’ingénierie logicielle: versionnage, tests, base de données et automatisation.
|
|
|
|
|
|
|
| 19 |
|
| 20 |
|
| 21 |
## Objectifs du projet
|
|
|
|
| 27 |
|
| 28 |
|
| 29 |
## Périmètre fonctionnel
|
| 30 |
+
Le projet inclut:
|
| 31 |
- Une API développée avec **FastAPI**
|
| 32 |
- L’exposition d’un modèle de machine learning via des endpoints REST
|
| 33 |
- Une base de données **PostgreSQL** pour stocker les entrées/sorties du modèle
|
|
|
|
| 89 |
|
| 90 |
- URL de l’API :
|
| 91 |
https://diaure-futurisys-ml-api.hf.space
|
| 92 |
+
- Documentation interactive (Swagger UI):
|
| 93 |
https://diaure-futurisys-ml-api.hf.space/docs. Ele permet de:
|
| 94 |
- visualiser les endpoints
|
| 95 |
- tester directement l’endpoint `/predict`
|
|
|
|
| 98 |
### `Endpoint principal`
|
| 99 |
`POST /predict`
|
| 100 |
|
| 101 |
+
Cet endpoint reçoit les caractéristiques d’un employé et retourne:
|
| 102 |
|
| 103 |
- une prédiction lisible ("Reste" ou "Part")
|
| 104 |
- la probabilité associée au départ
|
| 105 |
|
| 106 |
+
Exemple de réponse:
|
| 107 |
```json
|
| 108 |
{
|
| 109 |
"Prediction": "Part",
|
|
|
|
| 119 |
|
| 120 |
**POST /predict**
|
| 121 |
- Description : retourne une prédiction de départ d’un employé
|
| 122 |
+
- Validation des données: Pydantic
|
| 123 |
+
- Réponses possibles:
|
| 124 |
+
- 200: prédiction valide
|
| 125 |
+
- 422: données invalides
|
| 126 |
|
| 127 |
+
## Base de données et traçabilité des prédictions
|
| 128 |
+
### `Objectifs`
|
| 129 |
+
|
| 130 |
+
L’intégration d’une base de données PostgreSQL permet d’inscrire le projet dans une logique MLOps et de répondre à plusieurs objectifs clés:
|
| 131 |
+
- assurer la traçabilité complète des prédictions du modèle
|
| 132 |
+
- conserver l’historique des données d’entrée utilisateur
|
| 133 |
+
- stocker les résultats de prédiction (label, probabilité, version du modèle)
|
| 134 |
+
- préparer une architecture compatible avec un déploiement en production.
|
| 135 |
+
|
| 136 |
+
### `Méthodologie utilisée`
|
| 137 |
+
- **PostgreSQL** a été retenu pour:
|
| 138 |
+
- sa robustesse et sa fiabilité
|
| 139 |
+
- sa compatibilité native avec SQLAlchemy
|
| 140 |
+
- son usage courant en environnement professionnel
|
| 141 |
+
|
| 142 |
+
- **SQLAlchemy** est utilisé comme couche d’abstraction:
|
| 143 |
+
- gestion centralisée de la connexion à la base
|
| 144 |
+
- cohérence entre le schéma Python et la base SQL
|
| 145 |
+
|
| 146 |
+
Les identifiants de connexion sont stockés dans des variables d’environnement (`.env`) afin d’éviter toute exposition de secrets dans le dépôt Git.
|
| 147 |
+
|
| 148 |
+
### `Modélisation de la base de données`
|
| 149 |
+
La base de données repose sur trois tables distinctes, chacune ayant un rôle précis.
|
| 150 |
+
1. `employees_dataset - Dataset de référence`
|
| 151 |
+
Il contient le dataset final nettoyé et préparé lors de l'entraînement du modèle en incluant l'ensemble des **32 deatures** du modèle. Il sert de:
|
| 152 |
+
- référence de schéma
|
| 153 |
+
- source de validation
|
| 154 |
+
- base documentaire du modèle
|
| 155 |
+
|
| 156 |
+
C'est une table qui n'est jamais alimentée par l'utilisateur.
|
| 157 |
+
|
| 158 |
+
```python
|
| 159 |
+
load_dotenv()
|
| 160 |
+
|
| 161 |
+
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
|
| 162 |
+
csv_path = os.path.join(BASE_DIR, "dataset_final.csv")
|
| 163 |
+
|
| 164 |
+
df = pd.read_csv(csv_path, encoding="latin-1")
|
| 165 |
|
| 166 |
+
DB_USER = os.getenv("DB_USER")
|
| 167 |
+
DB_PASSWORD = os.getenv("DB_PASSWORD")
|
| 168 |
+
DB_HOST = os.getenv("DB_HOST")
|
| 169 |
+
DB_PORT = os.getenv("DB_PORT")
|
| 170 |
+
DB_NAME = os.getenv("DB_NAME")
|
| 171 |
+
|
| 172 |
+
DATABASE_URL = (f"postgresql+psycopg2://{DB_USER}:{DB_PASSWORD}"f"@{DB_HOST}:{DB_PORT}/{DB_NAME}")
|
| 173 |
+
|
| 174 |
+
engine = create_engine(DATABASE_URL)
|
| 175 |
+
|
| 176 |
+
df.to_sql("employees_dataset", engine, if_exists="replace", index=False)
|
| 177 |
+
```
|
| 178 |
+
|
| 179 |
+
2. `inputs - Entrées utilisateur`
|
| 180 |
+
- Enregistre chaque requête utilisateur envoyée à l'endpoint `/predict`
|
| 181 |
+
- Contient exactement les features attendues par le modèle
|
| 182 |
+
- Structure strictement alignée avec le schéma Pydandic(`EmployeeFeatures`)
|
| 183 |
+
- Permet:
|
| 184 |
+
- l'audit des predictions
|
| 185 |
+
- l'analyse à posteriori
|
| 186 |
+
- la reproductibilité des résultats.
|
| 187 |
+
```python
|
| 188 |
+
class Input(Base):
|
| 189 |
+
__tablename__ = "inputs"
|
| 190 |
+
|
| 191 |
+
id = Column(Integer, primary_key=True, index=True)
|
| 192 |
+
genre = Column(String)
|
| 193 |
+
statut_marital = Column(String)
|
| 194 |
+
departement = Column(String)
|
| 195 |
+
poste = Column(String)
|
| 196 |
+
```
|
| 197 |
+
|
| 198 |
+
3. `predictions - Résultats du modèle`
|
| 199 |
+
- Continet:
|
| 200 |
+
- le label de prédiction
|
| 201 |
+
- la probabilité associée
|
| 202 |
+
- Reliée à `inputs` via une clé étrangère
|
| 203 |
+
- Garantit une trçabilité complète.
|
| 204 |
+
```python
|
| 205 |
+
class Predictions(Base):
|
| 206 |
+
__tablename__ = "predictions"
|
| 207 |
+
id = Column(Integer, primary_key=True, index=True)
|
| 208 |
+
input_id = Column(Integer, ForeignKey("inputs.id"))
|
| 209 |
+
|
| 210 |
+
prediction_label = Column(String)
|
| 211 |
+
prediction_proba = Column(Float)
|
| 212 |
+
model_version = Column(String)
|
| 213 |
+
```
|
| 214 |
+
|
| 215 |
+
### `Interaction API <> Base de données`
|
| 216 |
+
Lors d’un appel à l’endpoint `POST /predict`:
|
| 217 |
+
- les données utilisateur sont validées via **Pydantic**
|
| 218 |
+
- les entrées sont enregistrées dans la table **inputs**
|
| 219 |
+
- le modèle est exécuté
|
| 220 |
+
- la prédiction est enregistrée dans la table **predictions**
|
| 221 |
+
- la réponse est retournée à l’utilisateur.
|
| 222 |
+
|
| 223 |
+
## Stack technique
|
| 224 |
+
- **Langage**: Python
|
| 225 |
+
- **API**: FastAPI
|
| 226 |
+
- **Machine Learning**: scikit-learn
|
| 227 |
+
- **Base de données**: PostgreSQL
|
| 228 |
+
- **Tests**: Pytest, pytest-cov
|
| 229 |
+
- **CI/CD**: GitHub Actions
|
| 230 |
+
- **Versionnage**: Git / GitHub
|
| 231 |
|
| 232 |
|
| 233 |
## Structure du projet
|
|
|
|
| 236 |
├── github/workflows
|
| 237 |
│ ├── ci.yml # Description des évènement déclenchants des tests
|
| 238 |
├── app/ # Code applicatif principal
|
| 239 |
+
│ ├── database.py # Point de connexion à la base PostgreSQL
|
| 240 |
│ ├── main.py # Point d’entrée de l’API
|
| 241 |
+
│ ├── model.py # Définition des tables de la database
|
| 242 |
│ ├── predict.py # Application du modèle
|
| 243 |
│ ├── schemas.py # Validation des données (Pydantic)
|
| 244 |
│ ── model/ # Elements du modèle
|
|
|
|
| 246 |
│ ├── modele_final_xgb.joblib # Modèle final avec hyperparamètres
|
| 247 |
│ ├── preprocesseur_fitted.joblib # Pipeline entrainé
|
| 248 |
|
|
| 249 |
+
├── scripts/ # Scripts bd (BD, données)
|
| 250 |
+
│ ├── create_tables.py # Créaton des tables définies dans model.py
|
| 251 |
+
│ ├── dataset_final.csv # Data final
|
| 252 |
+
│ ├── insert_dataset.py # Code chargement de la table dataset_final
|
| 253 |
+
├── tests/ # Tests unitaires, fonctionnels
|
| 254 |
+
│ ├── test_api.py # Test automatisé de l'API via Pytest
|
| 255 |
|
|
| 256 |
+
├── .env # Stockage des variables sensibles et de configuration
|
| 257 |
├── .gitignore # Nettoyage du dépôt
|
| 258 |
├── Dockerfile # Reproduction du dépôt
|
| 259 |
├── poetry.lock # Nettoyage du dépôt
|
poetry.lock
CHANGED
|
@@ -1882,6 +1882,83 @@ files = [
|
|
| 1882 |
dev = ["abi3audit", "black", "check-manifest", "coverage", "packaging", "psleak", "pylint", "pyperf", "pypinfo", "pytest", "pytest-cov", "pytest-instafail", "pytest-xdist", "requests", "rstcheck", "ruff", "setuptools", "sphinx", "sphinx_rtd_theme", "toml-sort", "twine", "validate-pyproject[all]", "virtualenv", "vulture", "wheel"]
|
| 1883 |
test = ["psleak", "pytest", "pytest-instafail", "pytest-xdist", "setuptools"]
|
| 1884 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1885 |
[[package]]
|
| 1886 |
name = "ptyprocess"
|
| 1887 |
version = "0.7.0"
|
|
@@ -2146,6 +2223,21 @@ files = [
|
|
| 2146 |
[package.dependencies]
|
| 2147 |
six = ">=1.5"
|
| 2148 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 2149 |
[[package]]
|
| 2150 |
name = "pytz"
|
| 2151 |
version = "2025.2"
|
|
@@ -2925,4 +3017,4 @@ scikit-learn = ["scikit-learn"]
|
|
| 2925 |
[metadata]
|
| 2926 |
lock-version = "2.1"
|
| 2927 |
python-versions = "3.11.9"
|
| 2928 |
-
content-hash = "
|
|
|
|
| 1882 |
dev = ["abi3audit", "black", "check-manifest", "coverage", "packaging", "psleak", "pylint", "pyperf", "pypinfo", "pytest", "pytest-cov", "pytest-instafail", "pytest-xdist", "requests", "rstcheck", "ruff", "setuptools", "sphinx", "sphinx_rtd_theme", "toml-sort", "twine", "validate-pyproject[all]", "virtualenv", "vulture", "wheel"]
|
| 1883 |
test = ["psleak", "pytest", "pytest-instafail", "pytest-xdist", "setuptools"]
|
| 1884 |
|
| 1885 |
+
[[package]]
|
| 1886 |
+
name = "psycopg2-binary"
|
| 1887 |
+
version = "2.9.11"
|
| 1888 |
+
description = "psycopg2 - Python-PostgreSQL Database Adapter"
|
| 1889 |
+
optional = false
|
| 1890 |
+
python-versions = ">=3.9"
|
| 1891 |
+
groups = ["main"]
|
| 1892 |
+
files = [
|
| 1893 |
+
{file = "psycopg2-binary-2.9.11.tar.gz", hash = "sha256:b6aed9e096bf63f9e75edf2581aa9a7e7186d97ab5c177aa6c87797cd591236c"},
|
| 1894 |
+
{file = "psycopg2_binary-2.9.11-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d6fe6b47d0b42ce1c9f1fa3e35bb365011ca22e39db37074458f27921dca40f2"},
|
| 1895 |
+
{file = "psycopg2_binary-2.9.11-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a6c0e4262e089516603a09474ee13eabf09cb65c332277e39af68f6233911087"},
|
| 1896 |
+
{file = "psycopg2_binary-2.9.11-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c47676e5b485393f069b4d7a811267d3168ce46f988fa602658b8bb901e9e64d"},
|
| 1897 |
+
{file = "psycopg2_binary-2.9.11-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:a28d8c01a7b27a1e3265b11250ba7557e5f72b5ee9e5f3a2fa8d2949c29bf5d2"},
|
| 1898 |
+
{file = "psycopg2_binary-2.9.11-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5f3f2732cf504a1aa9e9609d02f79bea1067d99edf844ab92c247bbca143303b"},
|
| 1899 |
+
{file = "psycopg2_binary-2.9.11-cp310-cp310-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:865f9945ed1b3950d968ec4690ce68c55019d79e4497366d36e090327ce7db14"},
|
| 1900 |
+
{file = "psycopg2_binary-2.9.11-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:91537a8df2bde69b1c1db01d6d944c831ca793952e4f57892600e96cee95f2cd"},
|
| 1901 |
+
{file = "psycopg2_binary-2.9.11-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4dca1f356a67ecb68c81a7bc7809f1569ad9e152ce7fd02c2f2036862ca9f66b"},
|
| 1902 |
+
{file = "psycopg2_binary-2.9.11-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:0da4de5c1ac69d94ed4364b6cbe7190c1a70d325f112ba783d83f8440285f152"},
|
| 1903 |
+
{file = "psycopg2_binary-2.9.11-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:37d8412565a7267f7d79e29ab66876e55cb5e8e7b3bbf94f8206f6795f8f7e7e"},
|
| 1904 |
+
{file = "psycopg2_binary-2.9.11-cp310-cp310-win_amd64.whl", hash = "sha256:c665f01ec8ab273a61c62beeb8cce3014c214429ced8a308ca1fc410ecac3a39"},
|
| 1905 |
+
{file = "psycopg2_binary-2.9.11-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:0e8480afd62362d0a6a27dd09e4ca2def6fa50ed3a4e7c09165266106b2ffa10"},
|
| 1906 |
+
{file = "psycopg2_binary-2.9.11-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:763c93ef1df3da6d1a90f86ea7f3f806dc06b21c198fa87c3c25504abec9404a"},
|
| 1907 |
+
{file = "psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2e164359396576a3cc701ba8af4751ae68a07235d7a380c631184a611220d9a4"},
|
| 1908 |
+
{file = "psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:d57c9c387660b8893093459738b6abddbb30a7eab058b77b0d0d1c7d521ddfd7"},
|
| 1909 |
+
{file = "psycopg2_binary-2.9.11-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2c226ef95eb2250974bf6fa7a842082b31f68385c4f3268370e3f3870e7859ee"},
|
| 1910 |
+
{file = "psycopg2_binary-2.9.11-cp311-cp311-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a311f1edc9967723d3511ea7d2708e2c3592e3405677bf53d5c7246753591fbb"},
|
| 1911 |
+
{file = "psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ebb415404821b6d1c47353ebe9c8645967a5235e6d88f914147e7fd411419e6f"},
|
| 1912 |
+
{file = "psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f07c9c4a5093258a03b28fab9b4f151aa376989e7f35f855088234e656ee6a94"},
|
| 1913 |
+
{file = "psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:00ce1830d971f43b667abe4a56e42c1e2d594b32da4802e44a73bacacb25535f"},
|
| 1914 |
+
{file = "psycopg2_binary-2.9.11-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:cffe9d7697ae7456649617e8bb8d7a45afb71cd13f7ab22af3e5c61f04840908"},
|
| 1915 |
+
{file = "psycopg2_binary-2.9.11-cp311-cp311-win_amd64.whl", hash = "sha256:304fd7b7f97eef30e91b8f7e720b3db75fee010b520e434ea35ed1ff22501d03"},
|
| 1916 |
+
{file = "psycopg2_binary-2.9.11-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:be9b840ac0525a283a96b556616f5b4820e0526addb8dcf6525a0fa162730be4"},
|
| 1917 |
+
{file = "psycopg2_binary-2.9.11-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f090b7ddd13ca842ebfe301cd587a76a4cf0913b1e429eb92c1be5dbeb1a19bc"},
|
| 1918 |
+
{file = "psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ab8905b5dcb05bf3fb22e0cf90e10f469563486ffb6a96569e51f897c750a76a"},
|
| 1919 |
+
{file = "psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:bf940cd7e7fec19181fdbc29d76911741153d51cab52e5c21165f3262125685e"},
|
| 1920 |
+
{file = "psycopg2_binary-2.9.11-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fa0f693d3c68ae925966f0b14b8edda71696608039f4ed61b1fe9ffa468d16db"},
|
| 1921 |
+
{file = "psycopg2_binary-2.9.11-cp312-cp312-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a1cf393f1cdaf6a9b57c0a719a1068ba1069f022a59b8b1fe44b006745b59757"},
|
| 1922 |
+
{file = "psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ef7a6beb4beaa62f88592ccc65df20328029d721db309cb3250b0aae0fa146c3"},
|
| 1923 |
+
{file = "psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:31b32c457a6025e74d233957cc9736742ac5a6cb196c6b68499f6bb51390bd6a"},
|
| 1924 |
+
{file = "psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:edcb3aeb11cb4bf13a2af3c53a15b3d612edeb6409047ea0b5d6a21a9d744b34"},
|
| 1925 |
+
{file = "psycopg2_binary-2.9.11-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:62b6d93d7c0b61a1dd6197d208ab613eb7dcfdcca0a49c42ceb082257991de9d"},
|
| 1926 |
+
{file = "psycopg2_binary-2.9.11-cp312-cp312-win_amd64.whl", hash = "sha256:b33fabeb1fde21180479b2d4667e994de7bbf0eec22832ba5d9b5e4cf65b6c6d"},
|
| 1927 |
+
{file = "psycopg2_binary-2.9.11-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b8fb3db325435d34235b044b199e56cdf9ff41223a4b9752e8576465170bb38c"},
|
| 1928 |
+
{file = "psycopg2_binary-2.9.11-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:366df99e710a2acd90efed3764bb1e28df6c675d33a7fb40df9b7281694432ee"},
|
| 1929 |
+
{file = "psycopg2_binary-2.9.11-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8c55b385daa2f92cb64b12ec4536c66954ac53654c7f15a203578da4e78105c0"},
|
| 1930 |
+
{file = "psycopg2_binary-2.9.11-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:c0377174bf1dd416993d16edc15357f6eb17ac998244cca19bc67cdc0e2e5766"},
|
| 1931 |
+
{file = "psycopg2_binary-2.9.11-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5c6ff3335ce08c75afaed19e08699e8aacf95d4a260b495a4a8545244fe2ceb3"},
|
| 1932 |
+
{file = "psycopg2_binary-2.9.11-cp313-cp313-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:84011ba3109e06ac412f95399b704d3d6950e386b7994475b231cf61eec2fc1f"},
|
| 1933 |
+
{file = "psycopg2_binary-2.9.11-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ba34475ceb08cccbdd98f6b46916917ae6eeb92b5ae111df10b544c3a4621dc4"},
|
| 1934 |
+
{file = "psycopg2_binary-2.9.11-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:b31e90fdd0f968c2de3b26ab014314fe814225b6c324f770952f7d38abf17e3c"},
|
| 1935 |
+
{file = "psycopg2_binary-2.9.11-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:d526864e0f67f74937a8fce859bd56c979f5e2ec57ca7c627f5f1071ef7fee60"},
|
| 1936 |
+
{file = "psycopg2_binary-2.9.11-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:04195548662fa544626c8ea0f06561eb6203f1984ba5b4562764fbeb4c3d14b1"},
|
| 1937 |
+
{file = "psycopg2_binary-2.9.11-cp313-cp313-win_amd64.whl", hash = "sha256:efff12b432179443f54e230fdf60de1f6cc726b6c832db8701227d089310e8aa"},
|
| 1938 |
+
{file = "psycopg2_binary-2.9.11-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:92e3b669236327083a2e33ccfa0d320dd01b9803b3e14dd986a4fc54aa00f4e1"},
|
| 1939 |
+
{file = "psycopg2_binary-2.9.11-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:e0deeb03da539fa3577fcb0b3f2554a97f7e5477c246098dbb18091a4a01c16f"},
|
| 1940 |
+
{file = "psycopg2_binary-2.9.11-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9b52a3f9bb540a3e4ec0f6ba6d31339727b2950c9772850d6545b7eae0b9d7c5"},
|
| 1941 |
+
{file = "psycopg2_binary-2.9.11-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:db4fd476874ccfdbb630a54426964959e58da4c61c9feba73e6094d51303d7d8"},
|
| 1942 |
+
{file = "psycopg2_binary-2.9.11-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:47f212c1d3be608a12937cc131bd85502954398aaa1320cb4c14421a0ffccf4c"},
|
| 1943 |
+
{file = "psycopg2_binary-2.9.11-cp314-cp314-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e35b7abae2b0adab776add56111df1735ccc71406e56203515e228a8dc07089f"},
|
| 1944 |
+
{file = "psycopg2_binary-2.9.11-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:fcf21be3ce5f5659daefd2b3b3b6e4727b028221ddc94e6c1523425579664747"},
|
| 1945 |
+
{file = "psycopg2_binary-2.9.11-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:9bd81e64e8de111237737b29d68039b9c813bdf520156af36d26819c9a979e5f"},
|
| 1946 |
+
{file = "psycopg2_binary-2.9.11-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:32770a4d666fbdafab017086655bcddab791d7cb260a16679cc5a7338b64343b"},
|
| 1947 |
+
{file = "psycopg2_binary-2.9.11-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:c3cb3a676873d7506825221045bd70e0427c905b9c8ee8d6acd70cfcbd6e576d"},
|
| 1948 |
+
{file = "psycopg2_binary-2.9.11-cp314-cp314-win_amd64.whl", hash = "sha256:4012c9c954dfaccd28f94e84ab9f94e12df76b4afb22331b1f0d3154893a6316"},
|
| 1949 |
+
{file = "psycopg2_binary-2.9.11-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:20e7fb94e20b03dcc783f76c0865f9da39559dcc0c28dd1a3fce0d01902a6b9c"},
|
| 1950 |
+
{file = "psycopg2_binary-2.9.11-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4bdab48575b6f870f465b397c38f1b415520e9879fdf10a53ee4f49dcbdf8a21"},
|
| 1951 |
+
{file = "psycopg2_binary-2.9.11-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:9d3a9edcfbe77a3ed4bc72836d466dfce4174beb79eda79ea155cc77237ed9e8"},
|
| 1952 |
+
{file = "psycopg2_binary-2.9.11-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:44fc5c2b8fa871ce7f0023f619f1349a0aa03a0857f2c96fbc01c657dcbbdb49"},
|
| 1953 |
+
{file = "psycopg2_binary-2.9.11-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:9c55460033867b4622cda1b6872edf445809535144152e5d14941ef591980edf"},
|
| 1954 |
+
{file = "psycopg2_binary-2.9.11-cp39-cp39-manylinux_2_38_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:2d11098a83cca92deaeaed3d58cfd150d49b3b06ee0d0852be466bf87596899e"},
|
| 1955 |
+
{file = "psycopg2_binary-2.9.11-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:691c807d94aecfbc76a14e1408847d59ff5b5906a04a23e12a89007672b9e819"},
|
| 1956 |
+
{file = "psycopg2_binary-2.9.11-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:8b81627b691f29c4c30a8f322546ad039c40c328373b11dff7490a3e1b517855"},
|
| 1957 |
+
{file = "psycopg2_binary-2.9.11-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:b637d6d941209e8d96a072d7977238eea128046effbf37d1d8b2c0764750017d"},
|
| 1958 |
+
{file = "psycopg2_binary-2.9.11-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:41360b01c140c2a03d346cec3280cf8a71aa07d94f3b1509fa0161c366af66b4"},
|
| 1959 |
+
{file = "psycopg2_binary-2.9.11-cp39-cp39-win_amd64.whl", hash = "sha256:875039274f8a2361e5207857899706da840768e2a775bf8c65e82f60b197df02"},
|
| 1960 |
+
]
|
| 1961 |
+
|
| 1962 |
[[package]]
|
| 1963 |
name = "ptyprocess"
|
| 1964 |
version = "0.7.0"
|
|
|
|
| 2223 |
[package.dependencies]
|
| 2224 |
six = ">=1.5"
|
| 2225 |
|
| 2226 |
+
[[package]]
|
| 2227 |
+
name = "python-dotenv"
|
| 2228 |
+
version = "1.2.1"
|
| 2229 |
+
description = "Read key-value pairs from a .env file and set them as environment variables"
|
| 2230 |
+
optional = false
|
| 2231 |
+
python-versions = ">=3.9"
|
| 2232 |
+
groups = ["main"]
|
| 2233 |
+
files = [
|
| 2234 |
+
{file = "python_dotenv-1.2.1-py3-none-any.whl", hash = "sha256:b81ee9561e9ca4004139c6cbba3a238c32b03e4894671e181b671e8cb8425d61"},
|
| 2235 |
+
{file = "python_dotenv-1.2.1.tar.gz", hash = "sha256:42667e897e16ab0d66954af0e60a9caa94f0fd4ecf3aaf6d2d260eec1aa36ad6"},
|
| 2236 |
+
]
|
| 2237 |
+
|
| 2238 |
+
[package.extras]
|
| 2239 |
+
cli = ["click (>=5.0)"]
|
| 2240 |
+
|
| 2241 |
[[package]]
|
| 2242 |
name = "pytz"
|
| 2243 |
version = "2025.2"
|
|
|
|
| 3017 |
[metadata]
|
| 3018 |
lock-version = "2.1"
|
| 3019 |
python-versions = "3.11.9"
|
| 3020 |
+
content-hash = "4e865f02d97ed8361a2eae0ad8e4f5e6e91092ffd20634c88693ba4ae861215e"
|
pyproject.toml
CHANGED
|
@@ -24,7 +24,9 @@ dependencies = [
|
|
| 24 |
"ipykernel>=6.25,<7.0",
|
| 25 |
"huggingface-hub ==1.3.1",
|
| 26 |
"fastapi ==0.115.0",
|
| 27 |
-
"uvicorn ==0.30.1"
|
|
|
|
|
|
|
| 28 |
]
|
| 29 |
|
| 30 |
[build-system]
|
|
|
|
| 24 |
"ipykernel>=6.25,<7.0",
|
| 25 |
"huggingface-hub ==1.3.1",
|
| 26 |
"fastapi ==0.115.0",
|
| 27 |
+
"uvicorn ==0.30.1",
|
| 28 |
+
"python-dotenv ==1.2.1",
|
| 29 |
+
"psycopg2-binary ==2.9.11"
|
| 30 |
]
|
| 31 |
|
| 32 |
[build-system]
|
requirements.txt
CHANGED
|
@@ -13,3 +13,4 @@ pandas==2.2.2
|
|
| 13 |
scikit-learn==1.4.2
|
| 14 |
xgboost ==2.0.3
|
| 15 |
huggingface-hub ==1.3.1
|
|
|
|
|
|
| 13 |
scikit-learn==1.4.2
|
| 14 |
xgboost ==2.0.3
|
| 15 |
huggingface-hub ==1.3.1
|
| 16 |
+
python-dotenv ==1.2.1
|
scripts/create_tables.py
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from App.database import engine
|
| 2 |
+
from App.database import Base
|
| 3 |
+
|
| 4 |
+
Base.metadata.create_all(bind=engine)
|
| 5 |
+
|
| 6 |
+
print("Tables créées avec succès")
|
scripts/insert_dataset.py
CHANGED
|
@@ -16,10 +16,7 @@ DB_HOST = os.getenv("DB_HOST")
|
|
| 16 |
DB_PORT = os.getenv("DB_PORT")
|
| 17 |
DB_NAME = os.getenv("DB_NAME")
|
| 18 |
|
| 19 |
-
DATABASE_URL = (
|
| 20 |
-
f"postgresql+psycopg2://{DB_USER}:{DB_PASSWORD}"
|
| 21 |
-
f"@{DB_HOST}:{DB_PORT}/{DB_NAME}"
|
| 22 |
-
)
|
| 23 |
|
| 24 |
engine = create_engine(DATABASE_URL)
|
| 25 |
|
|
|
|
| 16 |
DB_PORT = os.getenv("DB_PORT")
|
| 17 |
DB_NAME = os.getenv("DB_NAME")
|
| 18 |
|
| 19 |
+
DATABASE_URL = (f"postgresql+psycopg2://{DB_USER}:{DB_PASSWORD}"f"@{DB_HOST}:{DB_PORT}/{DB_NAME}")
|
|
|
|
|
|
|
|
|
|
| 20 |
|
| 21 |
engine = create_engine(DATABASE_URL)
|
| 22 |
|