|
|
import pandas as pd |
|
|
from datetime import datetime, timedelta, UTC |
|
|
from web3_utils import ROOT_DIR |
|
|
from utils import measure_execution_time |
|
|
from get_mech_info import ( |
|
|
fetch_block_number, |
|
|
get_last_block_number, |
|
|
read_all_trades_profitability, |
|
|
) |
|
|
from dotenv import load_dotenv |
|
|
from tqdm import tqdm |
|
|
import requests |
|
|
import os |
|
|
import pickle |
|
|
from utils import TMP_DIR, ROOT_DIR |
|
|
from concurrent.futures import ThreadPoolExecutor |
|
|
from gnosis_timestamps import get_all_txs_between_blocks_from_trader_address |
|
|
from dune_client.types import QueryParameter |
|
|
from dune_client.client import DuneClient |
|
|
from dune_client.query import QueryBase |
|
|
from staking import add_predict_agent_category |
|
|
from typing import Tuple |
|
|
from tools_metrics import prepare_tools |
|
|
from profitability import DEFAULT_MECH_FEE |
|
|
|
|
|
DATETIME_60_DAYS_AGO = datetime.now(UTC) - timedelta(days=60) |
|
|
|
|
|
load_dotenv() |
|
|
|
|
|
|
|
|
def get_block_number_from_datetime(datetime_sample) -> int: |
|
|
"""Function to get the block number from a datetime sample""" |
|
|
print(f"Datetime sample = {datetime_sample}") |
|
|
five_seconds = timedelta(seconds=5) |
|
|
block_number = fetch_block_number( |
|
|
int(datetime_sample.timestamp()), |
|
|
int((datetime_sample + five_seconds).timestamp()), |
|
|
) |
|
|
block_number = block_number.get("number", "") |
|
|
if block_number.isdigit(): |
|
|
return int(block_number) |
|
|
|
|
|
|
|
|
def safe_get_txs(session, trader_address, market_creator: str, start_block, end_block): |
|
|
try: |
|
|
df = get_all_txs_between_blocks_from_trader_address( |
|
|
session, trader_address, market_creator, start_block, end_block |
|
|
) |
|
|
if df is None: |
|
|
return pd.DataFrame() |
|
|
return df |
|
|
except Exception as e: |
|
|
print(f"Error getting transactions for {trader_address}: {e}") |
|
|
return pd.DataFrame() |
|
|
|
|
|
|
|
|
@measure_execution_time |
|
|
def prepare_gnosis_txs_dataset(): |
|
|
"""Function to extract the gnosis txs from the Olas traders""" |
|
|
|
|
|
|
|
|
all_trades = pd.read_parquet(ROOT_DIR / "all_trades_profitability.parquet") |
|
|
|
|
|
|
|
|
olas_traders = all_trades[all_trades["staking"] != "non_Olas"] |
|
|
|
|
|
|
|
|
olas_traders = olas_traders[["trader_address", "market_creator"]].drop_duplicates() |
|
|
olas_traders = olas_traders.reset_index(drop=True) |
|
|
print(f"Number of unique traders = {len(olas_traders)}") |
|
|
print(olas_traders.market_creator.value_counts()) |
|
|
|
|
|
|
|
|
starting_block = get_block_number_from_datetime(DATETIME_60_DAYS_AGO) |
|
|
print(f"Starting block = {starting_block}") |
|
|
|
|
|
ending_block = get_last_block_number() |
|
|
print(f"Ending block = {ending_block}") |
|
|
print(f"Number of blocks to fetch = {ending_block - starting_block}") |
|
|
|
|
|
results = [] |
|
|
session = requests.Session() |
|
|
|
|
|
with ThreadPoolExecutor(max_workers=10) as executor: |
|
|
results = list( |
|
|
tqdm( |
|
|
executor.map( |
|
|
lambda x: safe_get_txs( |
|
|
session, |
|
|
x.trader_address, |
|
|
x.market_creator, |
|
|
starting_block, |
|
|
ending_block, |
|
|
), |
|
|
olas_traders.itertuples(index=False), |
|
|
), |
|
|
total=len(olas_traders), |
|
|
) |
|
|
) |
|
|
results = [r for r in results if isinstance(r, pd.DataFrame)] |
|
|
return pd.concat(results, ignore_index=True) |
|
|
|
|
|
|
|
|
def get_service_id_from_trader_address(trader_address: str, service_map: dict): |
|
|
"""Function to get the service id from the trader address""" |
|
|
|
|
|
|
|
|
for key, value in service_map.items(): |
|
|
if value["safe_address"].lower() == trader_address.lower(): |
|
|
return key |
|
|
return None |
|
|
|
|
|
|
|
|
def prepare_predict_services_dataset(): |
|
|
|
|
|
with open(ROOT_DIR / "service_map.pkl", "rb") as f: |
|
|
service_map = pickle.load(f) |
|
|
content = [] |
|
|
|
|
|
for key, value in service_map.items(): |
|
|
print(f"value = {value}") |
|
|
if "agent_id" not in value: |
|
|
print(f"agent_id not found in value {value}") |
|
|
continue |
|
|
if value["agent_id"] == 25 or value["agent_id"] == 14: |
|
|
agent_dict = {} |
|
|
|
|
|
owner_address = value["owner_address"] |
|
|
agent_dict["safe_address"] = value["safe_address"] |
|
|
agent_dict["service_id"] = key |
|
|
agent_dict["market_creator"] = add_predict_agent_category( |
|
|
owner_address=owner_address |
|
|
) |
|
|
content.append(agent_dict) |
|
|
|
|
|
predict_services = pd.DataFrame(content) |
|
|
print(f"Number of unique predict agents = {len(predict_services)}") |
|
|
|
|
|
predict_services.to_csv(ROOT_DIR / "predict_services.csv", index=False) |
|
|
|
|
|
|
|
|
def setup_dune_python_client(): |
|
|
"""Function to setup the dune python client""" |
|
|
|
|
|
|
|
|
dune_api_key = os.getenv("DUNE_API_KEY") |
|
|
|
|
|
if dune_api_key is None: |
|
|
raise ValueError("Dune API key is not set in the environment variable") |
|
|
|
|
|
|
|
|
dune_client = DuneClient( |
|
|
api_key=dune_api_key, |
|
|
) |
|
|
return dune_client |
|
|
|
|
|
|
|
|
def load_predict_services_file(dune_client: DuneClient): |
|
|
"""Function to load the olas dataset in dune""" |
|
|
|
|
|
|
|
|
|
|
|
try: |
|
|
with open(ROOT_DIR / "predict_services.csv", "r") as open_file: |
|
|
data = open_file.read() |
|
|
|
|
|
|
|
|
print("loading the dataset in dune") |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
dune_client.upload_csv( |
|
|
table_name="predict_services", |
|
|
data=data, |
|
|
description="Olas predict services found at the service registry", |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
is_private=False, |
|
|
) |
|
|
print(f"CSV file uploaded successfully!") |
|
|
|
|
|
except FileNotFoundError: |
|
|
print(f"Error: CSV file not found at {ROOT_DIR} / predict_services.csv") |
|
|
except Exception as e: |
|
|
print(f"An error occurred during upload: {e}") |
|
|
|
|
|
|
|
|
def get_latest_result_from_DAA_QS(dune_client: DuneClient): |
|
|
query = QueryBase( |
|
|
query_id=5193717, |
|
|
) |
|
|
try: |
|
|
|
|
|
|
|
|
query_result = dune_client.get_latest_result_dataframe(5193717) |
|
|
print(f"DAA QS query result = {query_result.head()}") |
|
|
return query_result |
|
|
except UnicodeEncodeError: |
|
|
|
|
|
print("Handling Unicode encoding error, using alternative method...") |
|
|
query_result = dune_client.run_query_dataframe(query=query) |
|
|
print(f"DAA QS query result = {query_result.head()}") |
|
|
return query_result |
|
|
return query_result |
|
|
|
|
|
|
|
|
def get_latest_result_from_DAA_Pearl(dune_client: DuneClient): |
|
|
query = QueryBase( |
|
|
query_id=5193539, |
|
|
) |
|
|
|
|
|
query_result = dune_client.get_latest_result_dataframe(5193539) |
|
|
print(f"DAA Pearl query result = {query_result.head()}") |
|
|
return query_result |
|
|
|
|
|
|
|
|
def get_the_list_of_pearl_agents(dune_client: DuneClient): |
|
|
"""Function to get the list of pearl agents from Dune""" |
|
|
query = QueryBase( |
|
|
query_id=5194206, |
|
|
) |
|
|
|
|
|
query_result = dune_client.get_latest_result_dataframe(5194206) |
|
|
print(f"Pearl agents query result = {query_result.head()}") |
|
|
|
|
|
query_result = query_result[["safe_address", "serviceId", "service_owner"]] |
|
|
|
|
|
query_result = query_result.drop_duplicates( |
|
|
subset=["safe_address", "service_owner", "serviceId"] |
|
|
) |
|
|
return query_result |
|
|
|
|
|
|
|
|
def prepare_daa_data(): |
|
|
|
|
|
dune = setup_dune_python_client() |
|
|
|
|
|
|
|
|
df = get_latest_result_from_DAA_QS(dune) |
|
|
|
|
|
df.to_parquet( |
|
|
ROOT_DIR / "latest_result_DAA_QS.parquet", index=False, compression="gzip" |
|
|
) |
|
|
|
|
|
df = get_latest_result_from_DAA_Pearl(dune) |
|
|
|
|
|
df.to_parquet( |
|
|
ROOT_DIR / "latest_result_DAA_Pearl.parquet", index=False, compression="gzip" |
|
|
) |
|
|
|
|
|
df = get_the_list_of_pearl_agents(dune) |
|
|
|
|
|
df.to_parquet(ROOT_DIR / "pearl_agents.parquet", index=False, compression="gzip") |
|
|
|
|
|
|
|
|
def get_mech_requests_on_closed_markets_by_pearl_agents( |
|
|
trades_closed_markets: pd.DataFrame, |
|
|
) -> pd.DataFrame: |
|
|
|
|
|
pearl_agents = pd.read_parquet(ROOT_DIR / "pearl_agents.parquet") |
|
|
unique_addresses = pearl_agents["safe_address"].unique() |
|
|
|
|
|
|
|
|
closed_markets = trades_closed_markets.title.unique() |
|
|
|
|
|
|
|
|
try: |
|
|
tools_df = pd.read_parquet(TMP_DIR / "tools.parquet") |
|
|
tools_df = prepare_tools(tools_df, total_included=False) |
|
|
except Exception as e: |
|
|
print(f"Error reading tools parquet file {e}") |
|
|
return None |
|
|
|
|
|
agents_activity = tools_df[tools_df["trader_address"].isin(unique_addresses)].copy() |
|
|
agents_activity = agents_activity[agents_activity["title"].isin(closed_markets)] |
|
|
if len(agents_activity) > 0: |
|
|
return agents_activity |
|
|
raise ValueError("No agents activity found on closed markets") |
|
|
|
|
|
|
|
|
def get_trades_on_closed_markets_by_pearl_agents() -> pd.DataFrame: |
|
|
|
|
|
pearl_agents = pd.read_parquet(ROOT_DIR / "pearl_agents.parquet") |
|
|
unique_addresses = pearl_agents["safe_address"].unique() |
|
|
|
|
|
|
|
|
all_trades_on_closed_markets = read_all_trades_profitability() |
|
|
|
|
|
|
|
|
agent_trades_df = all_trades_on_closed_markets[ |
|
|
all_trades_on_closed_markets["trader_address"].isin(unique_addresses) |
|
|
].copy() |
|
|
if len(agent_trades_df) > 0: |
|
|
return agent_trades_df |
|
|
raise ValueError("No trades found for the pearl agents") |
|
|
|
|
|
|
|
|
def compute_markets_agent_roi( |
|
|
agent_trades: pd.DataFrame, |
|
|
mech_calls: pd.DataFrame, |
|
|
agent: str, |
|
|
period: str, |
|
|
period_value: datetime, |
|
|
) -> dict: |
|
|
|
|
|
total_earnings = agent_trades.earnings.sum() |
|
|
total_market_fees = agent_trades.trade_fee_amount.sum() |
|
|
total_mech_fees = len(mech_calls) * DEFAULT_MECH_FEE |
|
|
total_bet_amount = agent_trades.collateral_amount.sum() |
|
|
total_costs = total_bet_amount + total_market_fees + total_mech_fees |
|
|
net_earnings = total_earnings - total_costs |
|
|
if total_costs == 0: |
|
|
raise ValueError(f"Total costs for agent {agent} are zero") |
|
|
roi = net_earnings / total_costs |
|
|
if period == "week": |
|
|
return { |
|
|
"trader_address": agent, |
|
|
"week_start": period_value, |
|
|
"roi": roi, |
|
|
"net_earnings": net_earnings, |
|
|
"earnings": total_earnings, |
|
|
"total_bet_amount": total_bet_amount, |
|
|
"total_mech_calls": len(mech_calls), |
|
|
"nr_trades": len(agent_trades), |
|
|
} |
|
|
if period == "day": |
|
|
return { |
|
|
"trader_address": agent, |
|
|
"creation_date": period_value, |
|
|
"roi": roi, |
|
|
"net_earnings": net_earnings, |
|
|
"earnings": total_earnings, |
|
|
"total_bet_amount": total_bet_amount, |
|
|
"total_mech_calls": len(mech_calls), |
|
|
"nr_trades": len(agent_trades), |
|
|
} |
|
|
raise ValueError( |
|
|
f"Invalid period {period} for agent {agent}. Expected 'week' or 'day'." |
|
|
) |
|
|
|
|
|
|
|
|
def prepare_agents_data() -> Tuple[pd.DataFrame, pd.DataFrame]: |
|
|
"""Function to prepare the agents data for the predict ROI KPIs computation""" |
|
|
|
|
|
agent_trades = get_trades_on_closed_markets_by_pearl_agents() |
|
|
print( |
|
|
f"Number of trades done by pearl agents on closed markets: {len(agent_trades)}" |
|
|
) |
|
|
agent_trades["creation_timestamp"] = pd.to_datetime( |
|
|
agent_trades["creation_timestamp"] |
|
|
) |
|
|
agent_trades["creation_timestamp"] = agent_trades[ |
|
|
"creation_timestamp" |
|
|
].dt.tz_convert("UTC") |
|
|
agent_trades["creation_date"] = agent_trades["creation_timestamp"].dt.date |
|
|
agent_trades = agent_trades.sort_values(by="creation_timestamp", ascending=True) |
|
|
|
|
|
agent_mech_requests = get_mech_requests_on_closed_markets_by_pearl_agents( |
|
|
agent_trades |
|
|
) |
|
|
agent_mech_requests["request_time"] = pd.to_datetime( |
|
|
agent_mech_requests["request_time"], utc=True |
|
|
) |
|
|
agent_mech_requests = agent_mech_requests.sort_values( |
|
|
by="request_time", ascending=True |
|
|
) |
|
|
agent_mech_requests["request_date"] = agent_mech_requests["request_time"].dt.date |
|
|
print( |
|
|
f"Number of mech requests done by pearl agents on closed markets: {len(agent_mech_requests)}" |
|
|
) |
|
|
|
|
|
return agent_trades, agent_mech_requests |
|
|
|
|
|
|
|
|
def compute_weekly_avg_roi_pearl_agents( |
|
|
agent_trades, agent_mech_requests |
|
|
) -> pd.DataFrame: |
|
|
agent_trades["week_start"] = ( |
|
|
agent_trades["creation_timestamp"].dt.to_period("W").dt.start_time |
|
|
) |
|
|
|
|
|
grouped_trades = agent_trades.groupby("week_start") |
|
|
contents = [] |
|
|
agents = agent_trades.trader_address.unique() |
|
|
|
|
|
for week, week_data in grouped_trades: |
|
|
print(f"Week: {week}") |
|
|
|
|
|
|
|
|
closed_markets = week_data.title.unique() |
|
|
for agent in agents: |
|
|
|
|
|
agent_markets_data = agent_trades.loc[ |
|
|
(agent_trades["trader_address"] == agent) |
|
|
& (agent_trades["title"].isin(closed_markets)) |
|
|
] |
|
|
if len(agent_markets_data) == 0: |
|
|
|
|
|
continue |
|
|
|
|
|
agent_mech_calls = agent_mech_requests.loc[ |
|
|
(agent_mech_requests["trader_address"] == agent) |
|
|
& (agent_mech_requests["title"].isin(closed_markets)) |
|
|
] |
|
|
|
|
|
|
|
|
try: |
|
|
|
|
|
roi_dict = compute_markets_agent_roi( |
|
|
agent_markets_data, agent_mech_calls, agent, "week", week |
|
|
) |
|
|
contents.append(pd.DataFrame([roi_dict])) |
|
|
except ValueError as e: |
|
|
print(f"Skipping ROI calculation: {e}") |
|
|
continue |
|
|
|
|
|
weekly_agents_data = pd.concat(contents, ignore_index=True) |
|
|
|
|
|
weekly_avg_roi_data = ( |
|
|
weekly_agents_data.groupby("week_start")["roi"] |
|
|
.mean() |
|
|
.reset_index(name="avg_weekly_roi") |
|
|
) |
|
|
return weekly_avg_roi_data |
|
|
|
|
|
|
|
|
def compute_total_roi_pearl_agents(agent_trades, agent_mech_requests) -> pd.DataFrame: |
|
|
closed_markets = agent_trades.title.unique() |
|
|
contents = [] |
|
|
agents = agent_trades.trader_address.unique() |
|
|
for agent in agents: |
|
|
|
|
|
agent_markets_data = agent_trades.loc[ |
|
|
(agent_trades["trader_address"] == agent) |
|
|
& (agent_trades["title"].isin(closed_markets)) |
|
|
] |
|
|
if len(agent_markets_data) == 0: |
|
|
|
|
|
continue |
|
|
|
|
|
agent_mech_calls = agent_mech_requests.loc[ |
|
|
(agent_mech_requests["trader_address"] == agent) |
|
|
& (agent_mech_requests["title"].isin(closed_markets)) |
|
|
] |
|
|
|
|
|
|
|
|
try: |
|
|
|
|
|
roi_dict = compute_markets_agent_roi( |
|
|
agent_markets_data, agent_mech_calls, agent, "day", None |
|
|
) |
|
|
contents.append(pd.DataFrame([roi_dict])) |
|
|
except ValueError as e: |
|
|
print(f"Skipping ROI calculation: {e}") |
|
|
continue |
|
|
total_roi_data = pd.concat(contents, ignore_index=True) |
|
|
total_roi_data.to_parquet(ROOT_DIR / "total_roi.parquet") |
|
|
|
|
|
|
|
|
def compute_two_weeks_rolling_avg_roi_pearl_agents( |
|
|
agents_trades: pd.DataFrame, agents_mech_requests: pd.DataFrame |
|
|
) -> pd.DataFrame: |
|
|
grouped_trades = agents_trades.groupby("creation_date") |
|
|
contents = [] |
|
|
agents = agents_trades.trader_address.unique() |
|
|
|
|
|
for day, day_data in grouped_trades: |
|
|
|
|
|
print(f"Day: {day}") |
|
|
two_weeks_ago = day - timedelta(days=14) |
|
|
two_weeks_data = agents_trades.loc[ |
|
|
(agents_trades["creation_date"] >= two_weeks_ago) |
|
|
& (agents_trades["creation_date"] <= day) |
|
|
] |
|
|
if len(two_weeks_data) == 0: |
|
|
|
|
|
continue |
|
|
|
|
|
closed_markets = two_weeks_data.title.unique() |
|
|
for agent in agents: |
|
|
|
|
|
agent_markets_data = agents_trades.loc[ |
|
|
(agents_trades["trader_address"] == agent) |
|
|
& (agents_trades["title"].isin(closed_markets)) |
|
|
] |
|
|
if len(agent_markets_data) == 0: |
|
|
|
|
|
continue |
|
|
|
|
|
|
|
|
agent_mech_calls = agents_mech_requests.loc[ |
|
|
(agents_mech_requests["trader_address"] == agent) |
|
|
& (agents_mech_requests["title"].isin(closed_markets)) |
|
|
] |
|
|
|
|
|
try: |
|
|
|
|
|
roi_dict = compute_markets_agent_roi( |
|
|
agent_markets_data, agent_mech_calls, agent, "day", day |
|
|
) |
|
|
contents.append(pd.DataFrame([roi_dict])) |
|
|
except ValueError as e: |
|
|
print(f"Skipping ROI calculation: {e}") |
|
|
continue |
|
|
two_weeks_avg_data = pd.concat(contents, ignore_index=True) |
|
|
|
|
|
two_weeks_rolling_avg_roi = ( |
|
|
two_weeks_avg_data.groupby("creation_date")["roi"] |
|
|
.mean() |
|
|
.reset_index(name="two_weeks_avg_roi") |
|
|
) |
|
|
return two_weeks_rolling_avg_roi |
|
|
|
|
|
|
|
|
if __name__ == "__main__": |
|
|
prepare_daa_data() |
|
|
prepare_predict_services_dataset() |
|
|
dune = setup_dune_python_client() |
|
|
|
|
|
agents_trades, agents_mech_requests = prepare_agents_data() |
|
|
|
|
|
|
|
|
|
|
|
weekly_avg = compute_weekly_avg_roi_pearl_agents( |
|
|
agents_trades, agents_mech_requests |
|
|
) |
|
|
print(weekly_avg.head()) |
|
|
|
|
|
weekly_avg.to_parquet(ROOT_DIR / "weekly_avg_roi_pearl_agents.parquet") |
|
|
|
|
|
two_weeks_avg = compute_two_weeks_rolling_avg_roi_pearl_agents( |
|
|
agents_trades, agents_mech_requests |
|
|
) |
|
|
print(two_weeks_avg.head()) |
|
|
|
|
|
two_weeks_avg.to_parquet(ROOT_DIR / "two_weeks_avg_roi_pearl_agents.parquet") |
|
|
|