cyberosa
commited on
Commit
·
ca32d33
1
Parent(s):
75bb2cf
adding new dataset for errors by mech
Browse files- errors_by_mech.parquet +3 -0
- scripts/tools_metrics.py +39 -0
errors_by_mech.parquet
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:0259ed7b72c96f1382056604a8bb934ad9ad5baa286a43a2beda8d94a11c3d54
|
| 3 |
+
size 5474
|
scripts/tools_metrics.py
CHANGED
|
@@ -75,6 +75,35 @@ def prepare_tools(tools: pd.DataFrame) -> pd.DataFrame:
|
|
| 75 |
return tools
|
| 76 |
|
| 77 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 78 |
def compute_tools_based_datasets():
|
| 79 |
try:
|
| 80 |
tools_df = pd.read_parquet(TMP_DIR / "tools.parquet")
|
|
@@ -104,6 +133,16 @@ def compute_tools_based_datasets():
|
|
| 104 |
winning_df = get_tool_winning_rate_by_market(tools_df, inc_tools=INC_TOOLS)
|
| 105 |
winning_df.to_parquet(ROOT_DIR / "winning_df.parquet", index=False)
|
| 106 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 107 |
|
| 108 |
if __name__ == "__main__":
|
| 109 |
compute_tools_based_datasets()
|
|
|
|
| 75 |
return tools
|
| 76 |
|
| 77 |
|
| 78 |
+
def get_errors_by_mech_address(
|
| 79 |
+
tools_df: pd.DataFrame, inc_tools: List[str]
|
| 80 |
+
) -> pd.DataFrame:
|
| 81 |
+
"""Gets the tool errors distribution by mech address in a weekly fashion"""
|
| 82 |
+
tools_inc = tools_df[tools_df["tool"].isin(inc_tools)]
|
| 83 |
+
weekly_errors = (
|
| 84 |
+
tools_inc.groupby(
|
| 85 |
+
["request_month_year_week", "mech_address", "error"], sort=False
|
| 86 |
+
)
|
| 87 |
+
.size()
|
| 88 |
+
.reset_index(name="requests")
|
| 89 |
+
)
|
| 90 |
+
weekly_errors["error_cat"] = weekly_errors["error"].apply(
|
| 91 |
+
lambda x: "non_error" if x == 0 else "error"
|
| 92 |
+
)
|
| 93 |
+
total_requests_errors = (
|
| 94 |
+
tools_inc.groupby(["request_month_year_week", "mech_address"], sort=False)
|
| 95 |
+
.size()
|
| 96 |
+
.reset_index(name="total_requests")
|
| 97 |
+
)
|
| 98 |
+
all_errors = weekly_errors.merge(
|
| 99 |
+
total_requests_errors, on=["request_month_year_week", "mech_address"]
|
| 100 |
+
)
|
| 101 |
+
all_errors["errors_percentage"] = (
|
| 102 |
+
all_errors["requests"] / all_errors["total_requests"]
|
| 103 |
+
) * 100
|
| 104 |
+
return all_errors
|
| 105 |
+
|
| 106 |
+
|
| 107 |
def compute_tools_based_datasets():
|
| 108 |
try:
|
| 109 |
tools_df = pd.read_parquet(TMP_DIR / "tools.parquet")
|
|
|
|
| 133 |
winning_df = get_tool_winning_rate_by_market(tools_df, inc_tools=INC_TOOLS)
|
| 134 |
winning_df.to_parquet(ROOT_DIR / "winning_df.parquet", index=False)
|
| 135 |
|
| 136 |
+
# errors by mech address
|
| 137 |
+
try:
|
| 138 |
+
tools_df = pd.read_parquet(TMP_DIR / "tools.parquet")
|
| 139 |
+
tools_df = prepare_tools(tools_df)
|
| 140 |
+
except Exception as e:
|
| 141 |
+
print(f"Error reading tools parquet file {e}")
|
| 142 |
+
return None
|
| 143 |
+
errors_by_mech = get_errors_by_mech_address(tools_df=tools_df, inc_tools=INC_TOOLS)
|
| 144 |
+
errors_by_mech.to_parquet(ROOT_DIR / "errors_by_mech.parquet", index=False)
|
| 145 |
+
|
| 146 |
|
| 147 |
if __name__ == "__main__":
|
| 148 |
compute_tools_based_datasets()
|