|
|
import pandas as pd |
|
|
import pickle |
|
|
from dotenv import load_dotenv |
|
|
from web3_utils import ROOT_DIR, TMP_DIR |
|
|
from staking import check_list_addresses |
|
|
from tools_metrics import prepare_tools |
|
|
import pandas as pd |
|
|
|
|
|
|
|
|
load_dotenv() |
|
|
|
|
|
|
|
|
def get_trader_type(address: str, service_map: dict) -> str: |
|
|
|
|
|
keys = service_map.keys() |
|
|
last_key = max(keys) |
|
|
|
|
|
for key, value in service_map.items(): |
|
|
if value["safe_address"].lower() == address.lower(): |
|
|
|
|
|
return "Olas" |
|
|
|
|
|
return "non_Olas" |
|
|
|
|
|
|
|
|
def compute_active_traders_dataset(): |
|
|
"""Function to prepare the active traders dataset""" |
|
|
with open(ROOT_DIR / "service_map.pkl", "rb") as f: |
|
|
service_map = pickle.load(f) |
|
|
|
|
|
tools_df = pd.read_parquet(TMP_DIR / "tools.parquet") |
|
|
tools_df = prepare_tools(tools_df) |
|
|
|
|
|
tools_df.rename( |
|
|
columns={ |
|
|
"request_month_year_week": "month_year_week", |
|
|
"request_time": "creation_timestamp", |
|
|
}, |
|
|
inplace=True, |
|
|
) |
|
|
tools_df["creation_timestamp"] = tools_df["creation_timestamp"].dt.tz_convert("UTC") |
|
|
tools_df = tools_df.sort_values(by="creation_timestamp", ascending=True) |
|
|
tools_df["month_year_week"] = ( |
|
|
tools_df["creation_timestamp"] |
|
|
.dt.to_period("W") |
|
|
.dt.start_time.dt.strftime("%b-%d-%Y") |
|
|
) |
|
|
tool_traders = tools_df.trader_address.unique() |
|
|
mapping = check_list_addresses(tool_traders) |
|
|
|
|
|
tools_df["trader_type"] = tools_df.trader_address.apply( |
|
|
lambda x: mapping.get(x, "unknown") |
|
|
) |
|
|
tools_df = tools_df[ |
|
|
[ |
|
|
"month_year_week", |
|
|
"market_creator", |
|
|
"trader_type", |
|
|
"trader_address", |
|
|
"creation_timestamp", |
|
|
] |
|
|
] |
|
|
tools_df.drop_duplicates(inplace=True) |
|
|
|
|
|
all_trades = pd.read_parquet(ROOT_DIR / "all_trades_profitability.parquet") |
|
|
|
|
|
|
|
|
unknown_traders = pd.read_parquet(ROOT_DIR / "unknown_traders.parquet") |
|
|
unknown_traders["creation_timestamp"] = pd.to_datetime( |
|
|
unknown_traders["creation_timestamp"] |
|
|
) |
|
|
unknown_traders["creation_timestamp"] = unknown_traders[ |
|
|
"creation_timestamp" |
|
|
].dt.tz_convert("UTC") |
|
|
unknown_traders = unknown_traders.sort_values( |
|
|
by="creation_timestamp", ascending=True |
|
|
) |
|
|
unknown_traders["month_year_week"] = ( |
|
|
unknown_traders["creation_timestamp"] |
|
|
.dt.to_period("W") |
|
|
.dt.start_time.dt.strftime("%b-%d-%Y") |
|
|
) |
|
|
unknown_traders["trader_type"] = "unknown" |
|
|
unknown_traders = unknown_traders[ |
|
|
[ |
|
|
"month_year_week", |
|
|
"trader_type", |
|
|
"market_creator", |
|
|
"trader_address", |
|
|
"creation_timestamp", |
|
|
] |
|
|
] |
|
|
unknown_traders.drop_duplicates(inplace=True) |
|
|
|
|
|
all_trades["creation_timestamp"] = pd.to_datetime(all_trades["creation_timestamp"]) |
|
|
all_trades["creation_timestamp"] = all_trades["creation_timestamp"].dt.tz_convert( |
|
|
"UTC" |
|
|
) |
|
|
all_trades = all_trades.sort_values(by="creation_timestamp", ascending=True) |
|
|
all_trades["month_year_week"] = ( |
|
|
all_trades["creation_timestamp"] |
|
|
.dt.to_period("W") |
|
|
.dt.start_time.dt.strftime("%b-%d-%Y") |
|
|
) |
|
|
all_trades["trader_type"] = all_trades["staking"].apply( |
|
|
lambda x: "non_Olas" if x == "non_Olas" else "Olas" |
|
|
) |
|
|
all_trades = all_trades[ |
|
|
[ |
|
|
"month_year_week", |
|
|
"market_creator", |
|
|
"trader_type", |
|
|
"trader_address", |
|
|
"creation_timestamp", |
|
|
] |
|
|
] |
|
|
all_trades.drop_duplicates(inplace=True) |
|
|
filtered_traders_data = pd.concat([all_trades, tools_df], axis=0) |
|
|
filtered_traders_data.drop_duplicates(inplace=True) |
|
|
if len(unknown_traders) > 0: |
|
|
|
|
|
filtered_traders_data = pd.concat( |
|
|
[filtered_traders_data, unknown_traders], axis=0 |
|
|
) |
|
|
filtered_traders_data.to_parquet(ROOT_DIR / "active_traders.parquet") |
|
|
|
|
|
|
|
|
if __name__ == "__main__": |
|
|
compute_active_traders_dataset() |
|
|
|