Inder-26 commited on
Commit
da68771
·
1 Parent(s): c1c191a

Pipeline completed and fastapi created

Browse files
app.py ADDED
@@ -0,0 +1,98 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os, certifi, sys
2
+
3
+ from networksecurity.utils.ml_utils.model.estimator import NetworkModel
4
+
5
+ ca = certifi.where()
6
+
7
+ from dotenv import load_dotenv
8
+ load_dotenv()
9
+ mong_db_url = os.getenv("MONGODB_URL_KEY")
10
+ print(mong_db_url)
11
+
12
+ import pymongo
13
+ from networksecurity.exception.exception import NetworkSecurityException
14
+ from networksecurity.logging.logger import logging
15
+ from networksecurity.pipeline.training_pipeline import TraningPipeline
16
+
17
+ from fastapi.middleware.cors import CORSMiddleware
18
+ from fastapi import FastAPI, File, UploadFile, Request
19
+ from uvicorn import run as app_run
20
+ from fastapi.responses import Response
21
+ from starlette.responses import RedirectResponse
22
+ import pandas as pd
23
+
24
+ from networksecurity.utils.main_utils.utils import load_object
25
+
26
+ client = pymongo.MongoClient(mong_db_url, tlsCAFile=ca)
27
+
28
+ from networksecurity.constant.training_pipeline import DATA_INGESTION_COLLECTION_NAME
29
+ from networksecurity.constant.training_pipeline import DATA_INGESTION_DATABASE_NAME
30
+
31
+ database = client[DATA_INGESTION_DATABASE_NAME]
32
+ collection = database[DATA_INGESTION_COLLECTION_NAME]
33
+
34
+ app = FastAPI(title="Network Security Prediction API")
35
+ origins = ["*"]
36
+
37
+ app.add_middleware(
38
+ CORSMiddleware,
39
+ allow_origins=origins,
40
+ allow_credentials=True,
41
+ allow_methods=["*"],
42
+ allow_headers=["*"],
43
+ )
44
+
45
+ from fastapi.templating import Jinja2Templates
46
+ templates = Jinja2Templates(directory="templates")
47
+
48
+ @app.get("/")
49
+ async def index():
50
+ return RedirectResponse(url="/docs")
51
+
52
+
53
+ @app.get("/train", tags=["train"])
54
+ async def train_route():
55
+ try:
56
+ training_pipeline = TraningPipeline()
57
+ training_pipeline.run_pipeline()
58
+ return Response(content="Training successful !!")
59
+
60
+ except Exception as e:
61
+ logging.error(f"Training failed: {e}")
62
+ raise NetworkSecurityException(e, sys)
63
+
64
+
65
+
66
+ @app.post("/predict")
67
+ async def predict_route(request: Request, file: UploadFile = File(...)):
68
+ try:
69
+ df = pd.read_csv(file.file)
70
+ logging.info(f"Uploaded file shape: {df.shape}")
71
+ logging.info(f"Uploaded columns: {df.columns.tolist()}")
72
+
73
+ preprocessor = load_object("final_model/preprocessor.pkl")
74
+ final_model = load_object("final_model/model.pkl")
75
+
76
+ network_model = NetworkModel(
77
+ preprocessor=preprocessor,
78
+ model=final_model
79
+ )
80
+ print(df.iloc[0])
81
+ y_pred = network_model.predict(df)
82
+ print(y_pred)
83
+ df["prediction_column"] = y_pred
84
+ print(df["prediction_column"])
85
+
86
+ os.makedirs("prediction_output", exist_ok=True)
87
+ df.to_csv("prediction_output/output.csv", index=False)
88
+ table_html = df.to_html(classes='table table-striped')
89
+
90
+ return templates.TemplateResponse("table.html", {"request": request, "table": table_html})
91
+
92
+
93
+ except Exception as e:
94
+ logging.error(f"Prediction failed: {e}")
95
+ raise NetworkSecurityException(e, sys)
96
+
97
+ if __name__ == "__main__":
98
+ app_run(app, host="localhost", port=8000)
confusion_matrix.png CHANGED
main.py CHANGED
@@ -6,12 +6,12 @@ from networksecurity.components.model_trainer import ModelTrainer,ModelTrainerCo
6
  from networksecurity.exception.exception import NetworkSecurityException
7
  from networksecurity.logging.logger import logging
8
  from networksecurity.entity.config_entity import DataIngestionConfig,DataValidationConfig,DataTransformationConfig
9
- from networksecurity.entity.config_entity import TraningPipelineConfig
10
  import sys
11
 
12
  if __name__ == "__main__":
13
  try:
14
- traningpipelineconfig=TraningPipelineConfig()
15
  dataingestionconfig=DataIngestionConfig(traningpipelineconfig)
16
  data_ingestion=DataIngestion(dataingestionconfig)
17
  logging.info("Initiate the data ingestion")
 
6
  from networksecurity.exception.exception import NetworkSecurityException
7
  from networksecurity.logging.logger import logging
8
  from networksecurity.entity.config_entity import DataIngestionConfig,DataValidationConfig,DataTransformationConfig
9
+ from networksecurity.entity.config_entity import TrainingPipelineConfig
10
  import sys
11
 
12
  if __name__ == "__main__":
13
  try:
14
+ traningpipelineconfig=TrainingPipelineConfig()
15
  dataingestionconfig=DataIngestionConfig(traningpipelineconfig)
16
  data_ingestion=DataIngestion(dataingestionconfig)
17
  logging.info("Initiate the data ingestion")
networksecurity/components/model_trainer.py CHANGED
@@ -181,7 +181,7 @@ class ModelTrainer:
181
 
182
  # ---------- Best model selection ----------
183
  best_model_name = max(model_scores, key=model_scores.get)
184
- best_model = models[best_model_name]
185
 
186
  logging.info(
187
  f"Best Model: {best_model_name} | "
 
181
 
182
  # ---------- Best model selection ----------
183
  best_model_name = max(model_scores, key=model_scores.get)
184
+ best_model = model_report[best_model_name]["model"]
185
 
186
  logging.info(
187
  f"Best Model: {best_model_name} | "
networksecurity/entity/config_entity.py CHANGED
@@ -6,7 +6,7 @@ print(training_pipeline.PIPELINE_NAME)
6
  print(training_pipeline.ARTIFACT_DIR)
7
 
8
 
9
- class TraningPipelineConfig:
10
  def __init__(self,timestamp=datetime.now()):
11
  timestamp=timestamp.strftime("%m_%d_%Y_%H_M_%S")
12
  self.pipeline_name = training_pipeline.PIPELINE_NAME
@@ -15,7 +15,7 @@ class TraningPipelineConfig:
15
  self.timestamp: str = timestamp
16
 
17
  class DataIngestionConfig:
18
- def __init__(self,training_pipeline_config: TraningPipelineConfig):
19
  self.data_ingestion_dir:str = os.path.join(
20
  training_pipeline_config.artifact_dir,training_pipeline.DATA_INGESTION_DIR_NAME
21
  )
@@ -34,7 +34,7 @@ class DataIngestionConfig:
34
 
35
 
36
  class DataValidationConfig:
37
- def __init__(self, training_pipeline_config: TraningPipelineConfig):
38
  # Main data validation directory
39
  self.data_validation_dir: str = os.path.join(
40
  training_pipeline_config.artifact_dir,
@@ -85,7 +85,7 @@ class DataValidationConfig:
85
  )
86
 
87
  class DataTransformationConfig:
88
- def __init__(self, training_pipeline_config: TraningPipelineConfig):
89
  self.data_transformation_dir: str = os.path.join(
90
  training_pipeline_config.artifact_dir,
91
  training_pipeline.DATA_TRANSFORMATION_DIR_NAME
@@ -110,7 +110,7 @@ class DataTransformationConfig:
110
  )
111
 
112
  class ModelTrainerConfig:
113
- def __init__(self, training_pipeline_config: TraningPipelineConfig):
114
  self.model_trainer_dir: str = os.path.join(
115
  training_pipeline_config.artifact_dir, training_pipeline.MODEL_TRAINER_DIR_NAME
116
  )
 
6
  print(training_pipeline.ARTIFACT_DIR)
7
 
8
 
9
+ class TrainingPipelineConfig:
10
  def __init__(self,timestamp=datetime.now()):
11
  timestamp=timestamp.strftime("%m_%d_%Y_%H_M_%S")
12
  self.pipeline_name = training_pipeline.PIPELINE_NAME
 
15
  self.timestamp: str = timestamp
16
 
17
  class DataIngestionConfig:
18
+ def __init__(self,training_pipeline_config: TrainingPipelineConfig):
19
  self.data_ingestion_dir:str = os.path.join(
20
  training_pipeline_config.artifact_dir,training_pipeline.DATA_INGESTION_DIR_NAME
21
  )
 
34
 
35
 
36
  class DataValidationConfig:
37
+ def __init__(self, training_pipeline_config: TrainingPipelineConfig):
38
  # Main data validation directory
39
  self.data_validation_dir: str = os.path.join(
40
  training_pipeline_config.artifact_dir,
 
85
  )
86
 
87
  class DataTransformationConfig:
88
+ def __init__(self, training_pipeline_config: TrainingPipelineConfig):
89
  self.data_transformation_dir: str = os.path.join(
90
  training_pipeline_config.artifact_dir,
91
  training_pipeline.DATA_TRANSFORMATION_DIR_NAME
 
110
  )
111
 
112
  class ModelTrainerConfig:
113
+ def __init__(self, training_pipeline_config: TrainingPipelineConfig):
114
  self.model_trainer_dir: str = os.path.join(
115
  training_pipeline_config.artifact_dir, training_pipeline.MODEL_TRAINER_DIR_NAME
116
  )
networksecurity/pipeline/batch_prediction.py ADDED
File without changes
networksecurity/pipeline/training_pipeline.py ADDED
@@ -0,0 +1,104 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os, sys
2
+ from networksecurity.exception.exception import NetworkSecurityException
3
+ from networksecurity.logging.logger import logging
4
+
5
+ from networksecurity.components.data_ingestion import DataIngestion
6
+ from networksecurity.components.data_validation import DataValidation
7
+ from networksecurity.components.data_transformation import DataTransformation
8
+ from networksecurity.components.model_trainer import ModelTrainer
9
+
10
+ from networksecurity.entity.config_entity import (
11
+ TrainingPipelineConfig,
12
+ DataIngestionConfig,
13
+ DataValidationConfig,
14
+ DataTransformationConfig,
15
+ ModelTrainerConfig,
16
+ )
17
+
18
+ from networksecurity.entity.artifact_entity import (
19
+ DataIngestionArtifact,
20
+ DataValidationArtifact,
21
+ DataTransformationArtifact,
22
+ ModelTrainerArtifact,
23
+ )
24
+
25
+ class TraningPipeline:
26
+ def __init__(self):
27
+ try:
28
+ self.training_pipeline_config = TrainingPipelineConfig()
29
+ except Exception as e:
30
+ raise NetworkSecurityException(e, sys)
31
+
32
+ def start_data_ingestion(self):
33
+ try:
34
+ logging.info("Starting data ingestion")
35
+ self.data_ingestion_config = DataIngestionConfig(
36
+ training_pipeline_config=self.training_pipeline_config
37
+ )
38
+ data_ingestion = DataIngestion(
39
+ data_ingestion_config=self.data_ingestion_config
40
+ )
41
+ data_ingestion_artifact = data_ingestion.initiate_data_ingestion()
42
+ logging.info(f"Data ingestion completed and artifact {data_ingestion_artifact}")
43
+ return data_ingestion_artifact
44
+ except Exception as e:
45
+ raise NetworkSecurityException(e, sys)
46
+
47
+ def start_data_validation(self, data_ingestion_artifact: DataIngestionArtifact):
48
+ try:
49
+ logging.info("Starting data validation")
50
+ self.data_validation_config = DataValidationConfig(
51
+ training_pipeline_config=self.training_pipeline_config
52
+ )
53
+ data_validation = DataValidation(
54
+ data_validation_config=self.data_validation_config,
55
+ data_ingestion_artifact=data_ingestion_artifact
56
+ )
57
+ data_validation_artifact = data_validation.initiate_data_validation()
58
+ logging.info(f"Data validation completed and artifact {data_validation_artifact}")
59
+ return data_validation_artifact
60
+ except Exception as e:
61
+ raise NetworkSecurityException(e, sys)
62
+
63
+ def start_data_transformation(self, data_validation_artifact: DataValidationArtifact):
64
+ try:
65
+ logging.info("Starting data transformation")
66
+ self.data_transformation_config = DataTransformationConfig(
67
+ training_pipeline_config=self.training_pipeline_config
68
+ )
69
+ data_transformation = DataTransformation(
70
+ data_transformation_config=self.data_transformation_config,
71
+ data_validation_artifact=data_validation_artifact
72
+ )
73
+ data_transformation_artifact = data_transformation.initiate_data_transformation()
74
+ logging.info(f"Data transformation completed and artifact {data_transformation_artifact}")
75
+ return data_transformation_artifact
76
+ except Exception as e:
77
+ raise NetworkSecurityException(e, sys)
78
+
79
+ def start_model_trainer(self, data_transformation_artifact: DataTransformationArtifact) -> ModelTrainerArtifact:
80
+ try:
81
+ logging.info("Starting model trainer")
82
+ self.model_trainer_config = ModelTrainerConfig(
83
+ training_pipeline_config=self.training_pipeline_config
84
+ )
85
+ model_trainer = ModelTrainer(
86
+ model_trainer_config=self.model_trainer_config,
87
+ data_transformation_artifact=data_transformation_artifact
88
+ )
89
+ model_trainer_artifact = model_trainer.initiate_model_trainer()
90
+ logging.info(f"Model trainer completed and artifact {model_trainer_artifact}")
91
+ return model_trainer_artifact
92
+ except Exception as e:
93
+ raise NetworkSecurityException(e, sys)
94
+
95
+ def run_pipeline(self):
96
+ try:
97
+ data_ingestion_artifact = self.start_data_ingestion()
98
+ data_validation_artifact = self.start_data_validation(data_ingestion_artifact=data_ingestion_artifact)
99
+ data_transformation_artifact = self.start_data_transformation(data_validation_artifact=data_validation_artifact)
100
+ model_trainer_artifact = self.start_model_trainer(data_transformation_artifact=data_transformation_artifact)
101
+ logging.info("Training pipeline completed successfully")
102
+ return model_trainer_artifact
103
+ except Exception as e:
104
+ raise NetworkSecurityException(e, sys)
precision_recall_curve.png CHANGED
prediction_output/output.csv ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ having_IP_Address,URL_Length,Shortining_Service,having_At_Symbol,double_slash_redirecting,Prefix_Suffix,having_Sub_Domain,SSLfinal_State,Domain_registeration_length,Favicon,port,HTTPS_token,Request_URL,URL_of_Anchor,Links_in_tags,SFH,Submitting_to_email,Abnormal_URL,Redirect,on_mouseover,RightClick,popUpWidnow,Iframe,age_of_domain,DNSRecord,web_traffic,Page_Rank,Google_Index,Links_pointing_to_page,Statistical_report,prediction
2
+ 1,-1,1,1,1,-1,1,1,1,1,1,-1,-1,1,1,0,1,1,0,1,1,1,1,1,1,-1,-1,1,1,1,1.0
3
+ -1,-1,-1,1,-1,1,1,1,-1,1,1,-1,1,1,0,1,1,-1,0,1,1,1,1,-1,-1,1,1,-1,1,1,1.0
4
+ -1,-1,-1,1,-1,-1,0,0,1,1,1,-1,-1,-1,1,-1,1,-1,1,1,1,1,1,1,1,-1,-1,1,0,1,0.0
5
+ -1,-1,1,1,1,-1,1,1,1,1,1,1,-1,0,1,1,1,1,0,1,1,1,1,1,1,1,-1,1,1,1,1.0
6
+ 1,1,1,1,1,-1,0,-1,-1,1,1,1,1,0,0,1,1,1,0,1,1,1,1,-1,1,1,-1,1,0,1,1.0
7
+ 1,-1,1,1,1,1,0,-1,-1,1,1,1,1,0,0,-1,1,1,0,1,1,1,1,1,1,1,-1,1,0,1,1.0
8
+ 1,1,1,1,1,-1,0,1,-1,-1,1,1,1,0,-1,1,1,1,0,-1,1,-1,1,-1,1,1,-1,1,0,1,1.0
9
+ 1,-1,1,1,1,-1,-1,-1,1,1,1,1,-1,-1,0,-1,1,1,0,1,1,1,1,-1,-1,-1,-1,1,0,1,0.0
10
+ 1,-1,1,-1,1,-1,-1,1,1,-1,-1,1,-1,0,0,0,-1,1,0,-1,1,-1,-1,1,1,1,-1,1,0,1,1.0
11
+ 1,1,1,1,1,-1,0,-1,-1,1,1,1,1,-1,0,1,1,1,0,1,1,1,1,-1,-1,-1,-1,1,0,1,0.0
12
+ 1,-1,1,1,1,-1,-1,1,1,1,1,1,-1,0,-1,-1,1,1,0,1,1,1,1,1,1,1,-1,1,0,1,1.0
13
+ -1,1,-1,1,1,-1,0,1,-1,1,1,1,1,0,1,-1,1,1,0,1,1,1,1,1,1,1,-1,1,0,1,0.0
requirements.txt CHANGED
@@ -10,4 +10,7 @@ pyaml
10
  mlflow
11
  dagshub
12
  seaborn
 
 
 
13
  #-e .
 
10
  mlflow
11
  dagshub
12
  seaborn
13
+ fastapi
14
+ uvicorn
15
+ python-multipart
16
  #-e .
roc_curve.png CHANGED
templates/predict.html ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <!DOCTYPE html>
2
+ <html>
3
+ <head>
4
+ <title>Data Table</title>
5
+ <style>
6
+ table {
7
+ border-collapse: collapse;
8
+ width: 100%;
9
+ }
10
+ th, td {
11
+ border: 1px solid black;
12
+ padding: 8px;
13
+ text-align: left;
14
+ }
15
+ </style>
16
+ </head>
17
+ <body>
18
+ <h2>Predicted Data</h2>
19
+ {{ table | safe }}
20
+ </body>
21
+ </html>
valid_data/test.csv ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ having_IP_Address,URL_Length,Shortining_Service,having_At_Symbol,double_slash_redirecting,Prefix_Suffix,having_Sub_Domain,SSLfinal_State,Domain_registeration_length,Favicon,port,HTTPS_token,Request_URL,URL_of_Anchor,Links_in_tags,SFH,Submitting_to_email,Abnormal_URL,Redirect,on_mouseover,RightClick,popUpWidnow,Iframe,age_of_domain,DNSRecord,web_traffic,Page_Rank,Google_Index,Links_pointing_to_page,Statistical_report
2
+ 1,-1,1,1,1,-1,1,1,1,1,1,-1,-1,1,1,0,1,1,0,1,1,1,1,1,1,-1,-1,1,1,1
3
+ -1,-1,-1,1,-1,1,1,1,-1,1,1,-1,1,1,0,1,1,-1,0,1,1,1,1,-1,-1,1,1,-1,1,1
4
+ -1,-1,-1,1,-1,-1,0,0,1,1,1,-1,-1,-1,1,-1,1,-1,1,1,1,1,1,1,1,-1,-1,1,0,1
5
+ -1,-1,1,1,1,-1,1,1,1,1,1,1,-1,0,1,1,1,1,0,1,1,1,1,1,1,1,-1,1,1,1
6
+ 1,1,1,1,1,-1,0,-1,-1,1,1,1,1,0,0,1,1,1,0,1,1,1,1,-1,1,1,-1,1,0,1
7
+ 1,-1,1,1,1,1,0,-1,-1,1,1,1,1,0,0,-1,1,1,0,1,1,1,1,1,1,1,-1,1,0,1
8
+ 1,1,1,1,1,-1,0,1,-1,-1,1,1,1,0,-1,1,1,1,0,-1,1,-1,1,-1,1,1,-1,1,0,1
9
+ 1,-1,1,1,1,-1,-1,-1,1,1,1,1,-1,-1,0,-1,1,1,0,1,1,1,1,-1,-1,-1,-1,1,0,1
10
+ 1,-1,1,-1,1,-1,-1,1,1,-1,-1,1,-1,0,0,0,-1,1,0,-1,1,-1,-1,1,1,1,-1,1,0,1
11
+ 1,1,1,1,1,-1,0,-1,-1,1,1,1,1,-1,0,1,1,1,0,1,1,1,1,-1,-1,-1,-1,1,0,1
12
+ 1,-1,1,1,1,-1,-1,1,1,1,1,1,-1,0,-1,-1,1,1,0,1,1,1,1,1,1,1,-1,1,0,1
13
+ -1,1,-1,1,1,-1,0,1,-1,1,1,1,1,0,1,-1,1,1,0,1,1,1,1,1,1,1,-1,1,0,1