github-actions commited on
Commit
cdb8e3e
·
1 Parent(s): 30a2ab4

Update model artifacts

Browse files
src/__pycache__/__init__.cpython-39.pyc CHANGED
Binary files a/src/__pycache__/__init__.cpython-39.pyc and b/src/__pycache__/__init__.cpython-39.pyc differ
 
src/__pycache__/preprocessing.cpython-39.pyc CHANGED
Binary files a/src/__pycache__/preprocessing.cpython-39.pyc and b/src/__pycache__/preprocessing.cpython-39.pyc differ
 
src/evaluate.py CHANGED
@@ -1,4 +1,3 @@
1
- # src/evaluate.py
2
  import joblib
3
  import pandas as pd
4
  from sklearn.metrics import classification_report, confusion_matrix
@@ -27,7 +26,8 @@ def evaluate():
27
  y_pred = model.predict(X)
28
 
29
  report = classification_report(y_true, y_pred, output_dict=True)
30
- cm = confusion_matrix(y_true, y_pred).tolist()
 
31
 
32
  os.makedirs("model", exist_ok=True)
33
 
@@ -35,12 +35,13 @@ def evaluate():
35
  with open("model/eval_report.json", "w") as f:
36
  json.dump({"report": report, "confusion_matrix": cm}, f, indent=2)
37
 
38
- # summary
 
39
  summary = {
40
  "accuracy": report["accuracy"],
41
- "precision_judi": report["judi"]["precision"],
42
- "recall_judi": report["judi"]["recall"],
43
- "f1_judi": report["judi"]["f1-score"]
44
  }
45
  with open("model/metrics_summary.json", "w") as f:
46
  json.dump(summary, f, indent=2)
@@ -48,7 +49,7 @@ def evaluate():
48
  # confusion matrix heatmap
49
  labels = sorted(list(set(y_true)))
50
  plt.figure(figsize=(6,4))
51
- sns.heatmap(cm, annot=True, fmt="d", cmap="Blues",
52
  xticklabels=labels, yticklabels=labels)
53
  plt.xlabel("Predicted")
54
  plt.ylabel("True")
 
 
1
  import joblib
2
  import pandas as pd
3
  from sklearn.metrics import classification_report, confusion_matrix
 
26
  y_pred = model.predict(X)
27
 
28
  report = classification_report(y_true, y_pred, output_dict=True)
29
+ cm_array = confusion_matrix(y_true, y_pred)
30
+ cm = cm_array.tolist()
31
 
32
  os.makedirs("model", exist_ok=True)
33
 
 
35
  with open("model/eval_report.json", "w") as f:
36
  json.dump({"report": report, "confusion_matrix": cm}, f, indent=2)
37
 
38
+ # pilih label target dinamis
39
+ target_label = "judi" if "judi" in report else list(report.keys())[0]
40
  summary = {
41
  "accuracy": report["accuracy"],
42
+ f"precision_{target_label}": report[target_label]["precision"],
43
+ f"recall_{target_label}": report[target_label]["recall"],
44
+ f"f1_{target_label}": report[target_label]["f1-score"]
45
  }
46
  with open("model/metrics_summary.json", "w") as f:
47
  json.dump(summary, f, indent=2)
 
49
  # confusion matrix heatmap
50
  labels = sorted(list(set(y_true)))
51
  plt.figure(figsize=(6,4))
52
+ sns.heatmap(cm_array, annot=True, fmt="d", cmap="Blues",
53
  xticklabels=labels, yticklabels=labels)
54
  plt.xlabel("Predicted")
55
  plt.ylabel("True")
vectorizer.joblib CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:f14229d548760c078cee8e16d53a57e5bad027f153c2e1467d58a7ffdddd4866
3
  size 2966
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ddcf5f27dd00caccc9f0bc88dfd5c94cf7639bc48bdde8bf61a40e250e9dfae3
3
  size 2966