gmanolache commited on
Commit
11c0b77
·
verified ·
1 Parent(s): 4783cc7

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +14 -5
app.py CHANGED
@@ -62,17 +62,26 @@ def evaluate_model(file, name):
62
  if len(y_pred) != len(y_test):
63
  return "❌ Model output length does not match test set.", load_leaderboard()
64
 
65
- accuracy = 100.0 * (y_pred == y_test).mean()
66
- avg_time = elapsed / len(X_test)
67
 
68
  leaderboard = load_leaderboard()
 
 
 
 
 
69
  new_entry = pd.DataFrame([{
70
  "Name": name,
71
- "Accuracy": round(accuracy, 2),
72
- "Avg Time (ms)": round(avg_time, 2)
73
  }])
74
  leaderboard = pd.concat([leaderboard, new_entry], ignore_index=True)
75
- leaderboard = leaderboard.sort_values(by=["Accuracy", "Avg Time (ms)"], ascending=[False, True]).reset_index(drop=True)
 
 
 
 
76
  leaderboard.to_csv(LEADERBOARD_PATH, index=False)
77
 
78
  return "", leaderboard
 
62
  if len(y_pred) != len(y_test):
63
  return "❌ Model output length does not match test set.", load_leaderboard()
64
 
65
+ accuracy = round(100.0 * (y_pred == y_test).mean(), 5)
66
+ avg_time = round(elapsed / len(X_test), 5)
67
 
68
  leaderboard = load_leaderboard()
69
+
70
+ # Remove existing entries for the same name
71
+ leaderboard = leaderboard[leaderboard["Name"] != name]
72
+
73
+ # Add new result
74
  new_entry = pd.DataFrame([{
75
  "Name": name,
76
+ "Accuracy": accuracy,
77
+ "Avg Time (ms)": avg_time
78
  }])
79
  leaderboard = pd.concat([leaderboard, new_entry], ignore_index=True)
80
+
81
+ # Keep only the best score per name
82
+ leaderboard = leaderboard.sort_values(by=["Accuracy", "Avg Time (ms)"], ascending=[False, True])
83
+ leaderboard = leaderboard.drop_duplicates(subset=["Name"], keep="first").reset_index(drop=True)
84
+
85
  leaderboard.to_csv(LEADERBOARD_PATH, index=False)
86
 
87
  return "", leaderboard