Pranav216 commited on
Commit
d4d8d76
·
1 Parent(s): 520ef41

Variable rename

Browse files
Files changed (2) hide show
  1. frontend.py +5 -5
  2. main.py +4 -4
frontend.py CHANGED
@@ -2,7 +2,7 @@ import gradio as gr
2
  import main
3
  import numpy as np
4
  import pandas as pd
5
- from main import clf_rf, clf_log, accuracy_score_rf, accuracy_score_lr, breier_score_rf, breier_score_lr, roc_rf, roc_lr
6
 
7
  def eda(Graphs):
8
  match Graphs:
@@ -125,8 +125,8 @@ def metrics(Algorithms):
125
  value = df
126
  )
127
  df_acc = gr.DataFrame(
128
- headers = ['Accuracy Score', 'Breier Score', 'ROC Score'],
129
- value = [list([accuracy_score_rf, breier_score_rf, roc_rf])],
130
  )
131
  return df_clf, df_acc
132
 
@@ -138,8 +138,8 @@ def metrics(Algorithms):
138
  value = df
139
  )
140
  df_acc = gr.DataFrame(
141
- headers = ['Accuracy Score', 'Breier Score', 'ROC Score'],
142
- value = [list([accuracy_score_lr, breier_score_lr, roc_lr])],
143
  )
144
  return df_clf, df_acc
145
 
 
2
  import main
3
  import numpy as np
4
  import pandas as pd
5
+ from main import clf_rf, clf_log, accuracy_score_rf, accuracy_score_lr, brier_score_rf, brier_score_lr, roc_rf, roc_lr
6
 
7
  def eda(Graphs):
8
  match Graphs:
 
125
  value = df
126
  )
127
  df_acc = gr.DataFrame(
128
+ headers = ['Accuracy Score', 'Brier Score', 'ROC Score'],
129
+ value = [list([accuracy_score_rf, brier_score_rf, roc_rf])],
130
  )
131
  return df_clf, df_acc
132
 
 
138
  value = df
139
  )
140
  df_acc = gr.DataFrame(
141
+ headers = ['Accuracy Score', 'Brier Score', 'ROC Score'],
142
+ value = [list([accuracy_score_lr, brier_score_lr, roc_lr])],
143
  )
144
  return df_clf, df_acc
145
 
main.py CHANGED
@@ -71,12 +71,12 @@ display = ConfusionMatrixDisplay(confusion_matrix=cm, display_labels=rf_clf.clas
71
  display.plot(cmap=plt.cm.Blues)
72
  plt.savefig("graphs/OutputGraphs/ConfusionMatrixRandomForest.png")
73
 
74
- breier_score_rf = brier_score_loss(y_test, y_rf_pred)
75
  accuracy_score_rf = accuracy_score(y_test, y_rf_pred)
76
  roc_rf = roc_auc_score(y_test, y_rf_pred)
77
 
78
  print("Brier Score Loss:")
79
- print(breier_score_rf)
80
  print("Accuracy Score:")
81
  print(accuracy_score_rf)
82
  print("ROC AUC Score:")
@@ -108,12 +108,12 @@ display = ConfusionMatrixDisplay(confusion_matrix=cm, display_labels=logistic.cl
108
  display.plot(cmap=plt.cm.Blues)
109
  plt.savefig("graphs/OutputGraphs/ConfusionMatrixLogistic.png")
110
 
111
- breier_score_lr = brier_score_loss(y_test, y_log_pred)
112
  accuracy_score_lr = accuracy_score(y_test, y_log_pred)
113
  roc_lr = roc_auc_score(y_test, y_log_pred)
114
 
115
  print("Brier Score Loss:")
116
- print(breier_score_lr)
117
  print("Accuracy Score:")
118
  print(accuracy_score_lr)
119
  print("ROC AUC Score:")
 
71
  display.plot(cmap=plt.cm.Blues)
72
  plt.savefig("graphs/OutputGraphs/ConfusionMatrixRandomForest.png")
73
 
74
+ brier_score_rf = brier_score_loss(y_test, y_rf_pred)
75
  accuracy_score_rf = accuracy_score(y_test, y_rf_pred)
76
  roc_rf = roc_auc_score(y_test, y_rf_pred)
77
 
78
  print("Brier Score Loss:")
79
+ print(brier_score_rf)
80
  print("Accuracy Score:")
81
  print(accuracy_score_rf)
82
  print("ROC AUC Score:")
 
108
  display.plot(cmap=plt.cm.Blues)
109
  plt.savefig("graphs/OutputGraphs/ConfusionMatrixLogistic.png")
110
 
111
+ brier_score_lr = brier_score_loss(y_test, y_log_pred)
112
  accuracy_score_lr = accuracy_score(y_test, y_log_pred)
113
  roc_lr = roc_auc_score(y_test, y_log_pred)
114
 
115
  print("Brier Score Loss:")
116
+ print(brier_score_lr)
117
  print("Accuracy Score:")
118
  print(accuracy_score_lr)
119
  print("ROC AUC Score:")