| | import numpy as np |
| | import pandas as pd |
| | from typing import Tuple |
| | import time |
| | from tqdm import tqdm |
| | from utils import convert_hex_to_int, ROOT_DIR, TMP_DIR |
| | from get_mech_info import read_all_trades_profitability |
| | from tools_metrics import prepare_tools |
| | from predict_kpis import compute_markets_agent_roi |
| |
|
| |
|
| | def determine_market_status(row): |
| | current_answer = row["currentAnswer"] |
| | """Determine the market status of a trade.""" |
| | if (current_answer is np.nan or current_answer is None) and time.time() >= int( |
| | row["openingTimestamp"] |
| | ): |
| | return "pending" |
| | if current_answer is np.nan or current_answer is None: |
| | return "open" |
| | if row["fpmm.isPendingArbitration"]: |
| | return "arbitrating" |
| | if row["fpmm.answerFinalizedTimestamp"] and time.time() < int( |
| | row["fpmm.answerFinalizedTimestamp"] |
| | ): |
| | return "finalizing" |
| | return "closed" |
| |
|
| |
|
| | def compute_market_metrics(all_trades: pd.DataFrame): |
| | print("Preparing dataset") |
| | all_trades.rename( |
| | columns={ |
| | "fpmm.currentAnswer": "currentAnswer", |
| | "fpmm.openingTimestamp": "openingTimestamp", |
| | "fpmm.id": "market_id", |
| | }, |
| | inplace=True, |
| | ) |
| | all_trades["currentAnswer"] = all_trades["currentAnswer"].apply( |
| | lambda x: convert_hex_to_int(x) |
| | ) |
| | all_trades["market_status"] = all_trades.apply( |
| | lambda x: determine_market_status(x), axis=1 |
| | ) |
| | closed_trades = all_trades.loc[all_trades["market_status"] == "closed"] |
| | print("Computing metrics") |
| | nr_trades = ( |
| | closed_trades.groupby("market_id")["id"].count().reset_index(name="nr_trades") |
| | ) |
| | total_traders = ( |
| | closed_trades.groupby("market_id")["trader_address"] |
| | .nunique() |
| | .reset_index(name="total_traders") |
| | ) |
| | final_dataset = nr_trades.merge(total_traders, on="market_id") |
| | markets = closed_trades[ |
| | ["market_id", "title", "market_creator", "openingTimestamp"] |
| | ] |
| | markets.drop_duplicates("market_id", inplace=True) |
| | market_metrics = markets.merge(final_dataset, on="market_id") |
| | print("Saving dataset") |
| | market_metrics.to_parquet(ROOT_DIR / "closed_market_metrics.parquet", index=False) |
| | print(market_metrics.head()) |
| |
|
| |
|
| | def prepare_traders_data() -> Tuple[pd.DataFrame, pd.DataFrame]: |
| | """Prepare traders data for weekly metrics computation.""" |
| | trades = read_all_trades_profitability() |
| | trades["creation_timestamp"] = pd.to_datetime(trades["creation_timestamp"]) |
| | trades["creation_timestamp"] = trades["creation_timestamp"].dt.tz_convert("UTC") |
| | trades["creation_date"] = trades["creation_timestamp"].dt.date |
| | trades = trades.sort_values(by="creation_timestamp", ascending=True) |
| | unique_addresses = trades.trader_address.unique() |
| | closed_markets = trades.title.unique() |
| | |
| | try: |
| | tools_df = pd.read_parquet(TMP_DIR / "tools.parquet") |
| | tools_df = prepare_tools(tools_df, total_included=False) |
| | except Exception as e: |
| | print(f"Error reading tools parquet file {e}") |
| | return None |
| |
|
| | traders_activity = tools_df[ |
| | tools_df["trader_address"].isin(unique_addresses) |
| | ].copy() |
| | traders_activity = traders_activity[traders_activity["title"].isin(closed_markets)] |
| | traders_activity["request_time"] = pd.to_datetime( |
| | traders_activity["request_time"], utc=True |
| | ) |
| | traders_activity = traders_activity.sort_values(by="request_time", ascending=True) |
| | traders_activity["request_date"] = traders_activity["request_time"].dt.date |
| | return trades, traders_activity |
| |
|
| |
|
| | def compute_weekly_trader_metrics(): |
| | trades_data, mechs_data = prepare_traders_data() |
| | trades_data["week_start"] = ( |
| | trades_data["creation_timestamp"].dt.to_period("W").dt.start_time |
| | ) |
| |
|
| | grouped_trades = trades_data.groupby("week_start") |
| | contents = [] |
| | traders = trades_data.trader_address.unique() |
| | |
| | for week, week_data in grouped_trades: |
| | print(f"Week: {week}") |
| |
|
| | |
| | closed_markets = week_data.title.unique() |
| | for trader in tqdm( |
| | traders, total=len(traders), desc="Computing metrics for traders" |
| | ): |
| | |
| | trader_trades = trades_data[ |
| | (trades_data["trader_address"] == trader) |
| | & (trades_data["title"].isin(closed_markets)) |
| | ] |
| | if len(trader_trades) == 0: |
| | |
| | continue |
| |
|
| | |
| | trader_mech_calls = mechs_data.loc[ |
| | (mechs_data["trader_address"] == trader) |
| | & (mechs_data["title"].isin(closed_markets)) |
| | ] |
| | |
| | try: |
| | |
| | roi_dict = compute_markets_agent_roi( |
| | trader_trades, trader_mech_calls, trader, "week", week |
| | ) |
| | contents.append(pd.DataFrame([roi_dict])) |
| | except ValueError as e: |
| | print(f"Skipping ROI calculation: {e}") |
| | continue |
| | traders_weekly_metrics = pd.concat(contents, ignore_index=True) |
| | return traders_weekly_metrics |
| |
|
| |
|
| | if __name__ == "__main__": |
| | all_trades = pd.read_parquet(TMP_DIR / "fpmmTrades.parquet") |
| | compute_market_metrics(all_trades) |
| | weekly_metrics_df = compute_weekly_trader_metrics() |
| | weekly_metrics_df.to_parquet( |
| | ROOT_DIR / "traders_weekly_metrics.parquet", index=False |
| | ) |
| |
|