Alquilar78 commited on
Commit
c4d546e
·
1 Parent(s): b9d6ffb

Modification Config et Squelette

Browse files
.dockerignore ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ # Dossiers d'environnement virtuel
2
+ .venv/
3
+ env/
4
+ venv/
5
+ .env/
6
+
7
+ # Cache Python
8
+ __pycache__/
9
+ *.pyc
10
+ .pytest_cache/
.gitignore ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ venv/
2
+ env/
3
+ __pycache__/
4
+ *.pyc
Dockerfile CHANGED
@@ -8,10 +8,11 @@ WORKDIR /app
8
  COPY . /app
9
 
10
  # Install any needed packages specified in requirements.txt
11
- RUN pip install pandas
 
12
 
13
  # Make a volume mount point for the input/output CSV files
14
  VOLUME ["/app/input_data.csv", "/app/output_data.csv"]
15
 
16
  # Run the application (by default, run the main ETL process)
17
- CMD ["python", "etl_process.py"]
 
8
  COPY . /app
9
 
10
  # Install any needed packages specified in requirements.txt
11
+ RUN pip install --no-cache-dir --upgrade pip \
12
+ && pip install --no-cache-dir -r requirements.txt
13
 
14
  # Make a volume mount point for the input/output CSV files
15
  VOLUME ["/app/input_data.csv", "/app/output_data.csv"]
16
 
17
  # Run the application (by default, run the main ETL process)
18
+ CMD ["python", "main.py"]
data/output_data.csv ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ id,name,age,city,salary,tax,net_salary
2
+ 1,John Doe,28.0,New York,70000,7000.0,63000.0
3
+ 2,Jane Smith,34.0,Los Angeles,80000,8000.0,72000.0
4
+ 3,Bob Johnson,45.0,Chicago,90000,9000.0,81000.0
5
+ 4,Alice Williams,29.0,San Francisco,85000,8500.0,76500.0
6
+ 6,Eve Davis,38.0,Boston,95000,9500.0,85500.0
jenkins/Jenkinsfile CHANGED
@@ -2,8 +2,8 @@ pipeline {
2
  agent any
3
 
4
  environment {
5
- TEST_IMAGE = 'paycare-tests'
6
- ETL_IMAGE = 'paycare-etl'
7
  AWS_ACCESS_KEY_ID = credentials('aws-access-key')
8
  AWS_SECRET_ACCESS_KEY = credentials('aws-secret-key')
9
  AWS_DEFAULT_REGION = 'eu-central-1'
@@ -32,24 +32,24 @@ pipeline {
32
  --env AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID} \
33
  --env AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY} \
34
  --env AWS_DEFAULT_REGION=${AWS_DEFAULT_REGION} \
35
- paycare-tests
36
  '''
37
  }
38
  }
39
 
40
- stage('Build ETL Container') {
41
  steps {
42
- sh 'docker build -t ${ETL_IMAGE} .'
43
  }
44
  }
45
 
46
- stage('Run ETL in Docker') {
47
  steps {
48
  script {
49
  sh '''
50
  docker run --rm \
51
  -v ${WORKSPACE}/data:/app/data \
52
- ${ETL_IMAGE}
53
  '''
54
 
55
  sh 'ls -l ${WORKSPACE}/data'
@@ -61,11 +61,11 @@ pipeline {
61
 
62
  post {
63
  success {
64
- echo '✅ ETL Pipeline completed successfully!'
65
  archiveArtifacts artifacts: 'data/output_data.csv', fingerprint: true
66
  }
67
  failure {
68
- echo '❌ ETL Pipeline failed.'
69
  }
70
  }
71
  }
 
2
  agent any
3
 
4
  environment {
5
+ TEST_IMAGE = 'air-quality-tests'
6
+ APP_IMAGE = 'air-quality-app'
7
  AWS_ACCESS_KEY_ID = credentials('aws-access-key')
8
  AWS_SECRET_ACCESS_KEY = credentials('aws-secret-key')
9
  AWS_DEFAULT_REGION = 'eu-central-1'
 
32
  --env AWS_ACCESS_KEY_ID=${AWS_ACCESS_KEY_ID} \
33
  --env AWS_SECRET_ACCESS_KEY=${AWS_SECRET_ACCESS_KEY} \
34
  --env AWS_DEFAULT_REGION=${AWS_DEFAULT_REGION} \
35
+ ${TEST_IMAGE}
36
  '''
37
  }
38
  }
39
 
40
+ stage('Build App Container') {
41
  steps {
42
+ sh 'docker build -t ${APP_IMAGE} .'
43
  }
44
  }
45
 
46
+ stage('Run App in Docker') {
47
  steps {
48
  script {
49
  sh '''
50
  docker run --rm \
51
  -v ${WORKSPACE}/data:/app/data \
52
+ ${APP_IMAGE}
53
  '''
54
 
55
  sh 'ls -l ${WORKSPACE}/data'
 
61
 
62
  post {
63
  success {
64
+ echo '✅ App Pipeline completed successfully!'
65
  archiveArtifacts artifacts: 'data/output_data.csv', fingerprint: true
66
  }
67
  failure {
68
+ echo '❌ App Pipeline failed.'
69
  }
70
  }
71
  }
etl_process.py → main.py RENAMED
File without changes
tests/.dockerignore ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ # Dossiers d'environnement virtuel
2
+ .venv/
3
+ env/
4
+ venv/
5
+ .env/
6
+
7
+ # Cache Python
8
+ __pycache__/
9
+ *.pyc
10
+ .pytest_cache/
tests/Dockerfile CHANGED
@@ -6,12 +6,13 @@ WORKDIR /app
6
 
7
  # Copier uniquement les fichiers nécessaires
8
  COPY tests/requirements.txt requirements.txt
9
- RUN pip install --no-cache-dir --upgrade pip && pip install --no-cache-dir -r requirements.txt
 
10
 
11
  # Copier le code source et les tests (avec le bon chemin)
12
- COPY etl_process.py /app/
13
- COPY tests/test_etl.py tests/upload_s3.py /app/
14
 
15
  # Exécuter les tests Pytest au démarrage
16
  # CMD ["pytest", "test_etl.py", "--junitxml=results.xml"]
17
- CMD pytest test_etl.py --junitxml=results.xml && python upload_s3.py
 
6
 
7
  # Copier uniquement les fichiers nécessaires
8
  COPY tests/requirements.txt requirements.txt
9
+ RUN pip install --no-cache-dir --upgrade pip \
10
+ && pip install --no-cache-dir -r requirements.txt
11
 
12
  # Copier le code source et les tests (avec le bon chemin)
13
+ COPY main.py /app/
14
+ COPY tests/test_main.py tests/upload_s3.py /app/
15
 
16
  # Exécuter les tests Pytest au démarrage
17
  # CMD ["pytest", "test_etl.py", "--junitxml=results.xml"]
18
+ CMD pytest test_main.py --junitxml=results.xml && python upload_s3.py
tests/__init__.py ADDED
File without changes
tests/requirements.txt CHANGED
@@ -1,4 +1,4 @@
1
- pandas==2.0.3
2
- numpy==1.24.4
3
  pytest
4
  boto3
 
1
+ pandas
2
+ numpy
3
  pytest
4
  boto3
tests/{test_etl.py → test_main.py} RENAMED
@@ -1,7 +1,7 @@
1
  import pytest
2
  import pandas as pd
3
  from io import StringIO
4
- from etl_process import extract_data, transform_data, load_data
5
 
6
 
7
  # Test for data extraction
 
1
  import pytest
2
  import pandas as pd
3
  from io import StringIO
4
+ from main import extract_data, transform_data, load_data
5
 
6
 
7
  # Test for data extraction