Spaces:
Sleeping
Sleeping
RGB Evaluation
commited on
Commit
·
005cdbf
1
Parent(s):
5253a83
Fix: Correct attribute references for error detection/correction metrics
Browse files- Changed error_detection_count to errors_detected (actual attribute name)
- Changed error_correction_count to errors_corrected (actual attribute name)
- Fixed both app.py (main) and RGBMetrics/app.py (mirror)
- Resolves 'EvaluationResult' object has no attribute 'error_detection_count' error
app.py
CHANGED
|
@@ -174,9 +174,9 @@ def run_evaluation_background(selected_models, selected_tasks, max_samples, api_
|
|
| 174 |
"accuracy": r.accuracy,
|
| 175 |
"rejected": r.rejected,
|
| 176 |
"rejection_rate": r.rejection_rate,
|
| 177 |
-
"error_detection_count": r.
|
| 178 |
"error_detection_rate": r.error_detection_rate,
|
| 179 |
-
"error_correction_count": r.
|
| 180 |
"error_correction_rate": r.error_correction_rate,
|
| 181 |
}
|
| 182 |
for r in results
|
|
|
|
| 174 |
"accuracy": r.accuracy,
|
| 175 |
"rejected": r.rejected,
|
| 176 |
"rejection_rate": r.rejection_rate,
|
| 177 |
+
"error_detection_count": r.errors_detected,
|
| 178 |
"error_detection_rate": r.error_detection_rate,
|
| 179 |
+
"error_correction_count": r.errors_corrected,
|
| 180 |
"error_correction_rate": r.error_correction_rate,
|
| 181 |
}
|
| 182 |
for r in results
|