Olas-predict-dataset / scripts /predict_kpis.py
Skanislav
chore: add dotenv integration
36c053a
import pandas as pd
from datetime import datetime, timedelta, UTC
from web3_utils import ROOT_DIR
from utils import measure_execution_time
from get_mech_info import (
fetch_block_number,
get_last_block_number,
read_all_trades_profitability,
)
from dotenv import load_dotenv
from tqdm import tqdm
import requests
import os
import pickle
from utils import TMP_DIR, ROOT_DIR
from concurrent.futures import ThreadPoolExecutor
from gnosis_timestamps import get_all_txs_between_blocks_from_trader_address
from dune_client.types import QueryParameter
from dune_client.client import DuneClient
from dune_client.query import QueryBase
from staking import add_predict_agent_category
from typing import Tuple
from tools_metrics import prepare_tools
from profitability import DEFAULT_MECH_FEE
DATETIME_60_DAYS_AGO = datetime.now(UTC) - timedelta(days=60)
load_dotenv()
def get_block_number_from_datetime(datetime_sample) -> int:
"""Function to get the block number from a datetime sample"""
print(f"Datetime sample = {datetime_sample}")
five_seconds = timedelta(seconds=5)
block_number = fetch_block_number(
int(datetime_sample.timestamp()),
int((datetime_sample + five_seconds).timestamp()),
)
block_number = block_number.get("number", "")
if block_number.isdigit():
return int(block_number)
def safe_get_txs(session, trader_address, market_creator: str, start_block, end_block):
try:
df = get_all_txs_between_blocks_from_trader_address(
session, trader_address, market_creator, start_block, end_block
)
if df is None:
return pd.DataFrame() # Return empty DataFrame if None
return df
except Exception as e:
print(f"Error getting transactions for {trader_address}: {e}")
return pd.DataFrame() # Return empty DataFrame on error
@measure_execution_time
def prepare_gnosis_txs_dataset():
"""Function to extract the gnosis txs from the Olas traders"""
# To read current all trades profitability
all_trades = pd.read_parquet(ROOT_DIR / "all_trades_profitability.parquet")
# to filter only traders of the trader_type Olas
olas_traders = all_trades[all_trades["staking"] != "non_Olas"]
# To extract the list of unique trader addresses from olas_traders and their corresponding market creator
olas_traders = olas_traders[["trader_address", "market_creator"]].drop_duplicates()
olas_traders = olas_traders.reset_index(drop=True)
print(f"Number of unique traders = {len(olas_traders)}")
print(olas_traders.market_creator.value_counts())
# Get the starting block number
starting_block = get_block_number_from_datetime(DATETIME_60_DAYS_AGO)
print(f"Starting block = {starting_block}")
# Get the ending block number
ending_block = get_last_block_number()
print(f"Ending block = {ending_block}")
print(f"Number of blocks to fetch = {ending_block - starting_block}")
# Parallelize getting the transactions from the trader addresses
results = []
session = requests.Session()
# Use this session for all requests
with ThreadPoolExecutor(max_workers=10) as executor:
results = list(
tqdm(
executor.map(
lambda x: safe_get_txs(
session,
x.trader_address,
x.market_creator,
starting_block,
ending_block,
),
olas_traders.itertuples(index=False),
),
total=len(olas_traders),
)
)
results = [r for r in results if isinstance(r, pd.DataFrame)]
return pd.concat(results, ignore_index=True)
def get_service_id_from_trader_address(trader_address: str, service_map: dict):
"""Function to get the service id from the trader address"""
# get the service id from the trader address
for key, value in service_map.items():
if value["safe_address"].lower() == trader_address.lower():
return key
return None
def prepare_predict_services_dataset():
# Read the service map pickle file
with open(ROOT_DIR / "service_map.pkl", "rb") as f:
service_map = pickle.load(f)
content = []
# Find all the safe addresses in the service map whose agent_id is equal to 25 or 14
for key, value in service_map.items():
print(f"value = {value}")
if "agent_id" not in value:
print(f"agent_id not found in value {value}")
continue
if value["agent_id"] == 25 or value["agent_id"] == 14:
agent_dict = {}
# label the predict agents into two categories: pearl and quickstart
owner_address = value["owner_address"]
agent_dict["safe_address"] = value["safe_address"]
agent_dict["service_id"] = key
agent_dict["market_creator"] = add_predict_agent_category(
owner_address=owner_address
)
content.append(agent_dict)
# build the dataframe from the list of dictionaries
predict_services = pd.DataFrame(content)
print(f"Number of unique predict agents = {len(predict_services)}")
# save the dataset as a csv file
predict_services.to_csv(ROOT_DIR / "predict_services.csv", index=False)
def setup_dune_python_client():
"""Function to setup the dune python client"""
# Read api key from the environment variable
dune_api_key = os.getenv("DUNE_API_KEY")
# If the api key is not set, raise an error
if dune_api_key is None:
raise ValueError("Dune API key is not set in the environment variable")
# Initialize the DuneClient
dune_client = DuneClient(
api_key=dune_api_key,
)
return dune_client
def load_predict_services_file(dune_client: DuneClient):
"""Function to load the olas dataset in dune"""
# Prepare the olas dataset
# prepare_predict_services_dataset()
try:
with open(ROOT_DIR / "predict_services.csv", "r") as open_file:
data = open_file.read()
# Upload the CSV data
print("loading the dataset in dune")
# dune_client.create_table(
# table_name="olas_trader_agents",
# description="Olas trader agents found in Pearl and Quickstart markets",
# schema=[
# {"name": "date", "type": "timestamp"},
# {"name": "dgs10", "type": "double", "nullable": True},
# ],
# is_private=False,
# namespace="cyberosa",
# )
# use the dune client to upload the dataset predict_services.csv
dune_client.upload_csv(
table_name="predict_services",
data=data,
description="Olas predict services found at the service registry",
# schema=[
# {"name": "safe_address", "type": "text"},
# {"name": "service_id", "type": "text"},
# {"name": "market_creator", "type": "text"},
# ],
is_private=False,
)
print(f"CSV file uploaded successfully!")
except FileNotFoundError:
print(f"Error: CSV file not found at {ROOT_DIR} / predict_services.csv")
except Exception as e:
print(f"An error occurred during upload: {e}")
def get_latest_result_from_DAA_QS(dune_client: DuneClient):
query = QueryBase(
query_id=5193717,
)
try:
# Add error handling and sanitize headers
# query_result = dune_client.run_query_dataframe(query=query)
query_result = dune_client.get_latest_result_dataframe(5193717)
print(f"DAA QS query result = {query_result.head()}")
return query_result
except UnicodeEncodeError:
# Alternative approach using run_query instead of get_latest_result
print("Handling Unicode encoding error, using alternative method...")
query_result = dune_client.run_query_dataframe(query=query)
print(f"DAA QS query result = {query_result.head()}")
return query_result
return query_result
def get_latest_result_from_DAA_Pearl(dune_client: DuneClient):
query = QueryBase(
query_id=5193539,
)
# query_result = dune_client.run_query_dataframe(query=query)
query_result = dune_client.get_latest_result_dataframe(5193539)
print(f"DAA Pearl query result = {query_result.head()}")
return query_result
def get_the_list_of_pearl_agents(dune_client: DuneClient):
"""Function to get the list of pearl agents from Dune"""
query = QueryBase(
query_id=5194206,
)
# query_result = dune_client.run_query_dataframe(query=query)
query_result = dune_client.get_latest_result_dataframe(5194206)
print(f"Pearl agents query result = {query_result.head()}")
# keep only the columns: safe_address, service_id, service_owner
query_result = query_result[["safe_address", "serviceId", "service_owner"]]
# Remove duplicates
query_result = query_result.drop_duplicates(
subset=["safe_address", "service_owner", "serviceId"]
)
return query_result
def prepare_daa_data():
# patch_http_connection_pool()
dune = setup_dune_python_client()
# load_olas_agents_dataset_in_dune(dune)
# Get the latest result from the DAA QS query
df = get_latest_result_from_DAA_QS(dune)
# Save the result to a parquet file
df.to_parquet(
ROOT_DIR / "latest_result_DAA_QS.parquet", index=False, compression="gzip"
)
# Get the latest result from the DAA Pearl query
df = get_latest_result_from_DAA_Pearl(dune)
# Save the result to a parquet file
df.to_parquet(
ROOT_DIR / "latest_result_DAA_Pearl.parquet", index=False, compression="gzip"
)
# GEt the list of pearl agents
df = get_the_list_of_pearl_agents(dune)
# Save the result to a parquet file
df.to_parquet(ROOT_DIR / "pearl_agents.parquet", index=False, compression="gzip")
def get_mech_requests_on_closed_markets_by_pearl_agents(
trades_closed_markets: pd.DataFrame,
) -> pd.DataFrame:
# read the list of pearl agents
pearl_agents = pd.read_parquet(ROOT_DIR / "pearl_agents.parquet")
unique_addresses = pearl_agents["safe_address"].unique()
# prepare a list of closed markets from trades_closed_markets
closed_markets = trades_closed_markets.title.unique()
# filter the mech requests done by agents on closed markets
try:
tools_df = pd.read_parquet(TMP_DIR / "tools.parquet")
tools_df = prepare_tools(tools_df, total_included=False)
except Exception as e:
print(f"Error reading tools parquet file {e}")
return None
agents_activity = tools_df[tools_df["trader_address"].isin(unique_addresses)].copy()
agents_activity = agents_activity[agents_activity["title"].isin(closed_markets)]
if len(agents_activity) > 0:
return agents_activity
raise ValueError("No agents activity found on closed markets")
def get_trades_on_closed_markets_by_pearl_agents() -> pd.DataFrame:
# read the list of pearl agents
pearl_agents = pd.read_parquet(ROOT_DIR / "pearl_agents.parquet")
unique_addresses = pearl_agents["safe_address"].unique()
# read the trades datasource on closed markets
all_trades_on_closed_markets = read_all_trades_profitability()
# filter the trades done by pearl agents
agent_trades_df = all_trades_on_closed_markets[
all_trades_on_closed_markets["trader_address"].isin(unique_addresses)
].copy()
if len(agent_trades_df) > 0:
return agent_trades_df
raise ValueError("No trades found for the pearl agents")
def compute_markets_agent_roi(
agent_trades: pd.DataFrame,
mech_calls: pd.DataFrame,
agent: str,
period: str,
period_value: datetime,
) -> dict:
# ROI formula net_earnings/total_costs
total_earnings = agent_trades.earnings.sum()
total_market_fees = agent_trades.trade_fee_amount.sum()
total_mech_fees = len(mech_calls) * DEFAULT_MECH_FEE
total_bet_amount = agent_trades.collateral_amount.sum()
total_costs = total_bet_amount + total_market_fees + total_mech_fees
net_earnings = total_earnings - total_costs
if total_costs == 0:
raise ValueError(f"Total costs for agent {agent} are zero")
roi = net_earnings / total_costs
if period == "week":
return {
"trader_address": agent,
"week_start": period_value,
"roi": roi,
"net_earnings": net_earnings,
"earnings": total_earnings,
"total_bet_amount": total_bet_amount,
"total_mech_calls": len(mech_calls),
"nr_trades": len(agent_trades),
}
if period == "day":
return {
"trader_address": agent,
"creation_date": period_value,
"roi": roi,
"net_earnings": net_earnings,
"earnings": total_earnings,
"total_bet_amount": total_bet_amount,
"total_mech_calls": len(mech_calls),
"nr_trades": len(agent_trades),
}
raise ValueError(
f"Invalid period {period} for agent {agent}. Expected 'week' or 'day'."
)
def prepare_agents_data() -> Tuple[pd.DataFrame, pd.DataFrame]:
"""Function to prepare the agents data for the predict ROI KPIs computation"""
# Get the trades done by pearl agents on closed markets
agent_trades = get_trades_on_closed_markets_by_pearl_agents()
print(
f"Number of trades done by pearl agents on closed markets: {len(agent_trades)}"
)
agent_trades["creation_timestamp"] = pd.to_datetime(
agent_trades["creation_timestamp"]
)
agent_trades["creation_timestamp"] = agent_trades[
"creation_timestamp"
].dt.tz_convert("UTC")
agent_trades["creation_date"] = agent_trades["creation_timestamp"].dt.date
agent_trades = agent_trades.sort_values(by="creation_timestamp", ascending=True)
# Get the mech requests done by pearl agents on closed markets
agent_mech_requests = get_mech_requests_on_closed_markets_by_pearl_agents(
agent_trades
)
agent_mech_requests["request_time"] = pd.to_datetime(
agent_mech_requests["request_time"], utc=True
)
agent_mech_requests = agent_mech_requests.sort_values(
by="request_time", ascending=True
)
agent_mech_requests["request_date"] = agent_mech_requests["request_time"].dt.date
print(
f"Number of mech requests done by pearl agents on closed markets: {len(agent_mech_requests)}"
)
return agent_trades, agent_mech_requests
def compute_weekly_avg_roi_pearl_agents(
agent_trades, agent_mech_requests
) -> pd.DataFrame:
agent_trades["week_start"] = (
agent_trades["creation_timestamp"].dt.to_period("W").dt.start_time
)
grouped_trades = agent_trades.groupby("week_start")
contents = []
agents = agent_trades.trader_address.unique()
# Iterate through the groups (each group represents a week)
for week, week_data in grouped_trades:
print(f"Week: {week}") # Print the week identifier
# for all closed markets
closed_markets = week_data.title.unique()
for agent in agents:
# compute all trades done by the agent on those markets, no matter from which week
agent_markets_data = agent_trades.loc[
(agent_trades["trader_address"] == agent)
& (agent_trades["title"].isin(closed_markets))
]
if len(agent_markets_data) == 0:
# not betting activity
continue
# filter mech requests done by the agent on that market
agent_mech_calls = agent_mech_requests.loc[
(agent_mech_requests["trader_address"] == agent)
& (agent_mech_requests["title"].isin(closed_markets))
]
# compute the ROI for that market, that trader and that week
try:
# Convert the dictionary to DataFrame before appending
roi_dict = compute_markets_agent_roi(
agent_markets_data, agent_mech_calls, agent, "week", week
)
contents.append(pd.DataFrame([roi_dict]))
except ValueError as e:
print(f"Skipping ROI calculation: {e}")
continue
weekly_agents_data = pd.concat(contents, ignore_index=True)
# average the ROI for all samples (at the trader/market level) in that week
weekly_avg_roi_data = (
weekly_agents_data.groupby("week_start")["roi"]
.mean()
.reset_index(name="avg_weekly_roi")
)
return weekly_avg_roi_data
def compute_total_roi_pearl_agents(agent_trades, agent_mech_requests) -> pd.DataFrame:
closed_markets = agent_trades.title.unique()
contents = []
agents = agent_trades.trader_address.unique()
for agent in agents:
# compute all trades done by the agent on those markets, no matter from which week
agent_markets_data = agent_trades.loc[
(agent_trades["trader_address"] == agent)
& (agent_trades["title"].isin(closed_markets))
]
if len(agent_markets_data) == 0:
# not betting activity
continue
# filter mech requests done by the agent on that market
agent_mech_calls = agent_mech_requests.loc[
(agent_mech_requests["trader_address"] == agent)
& (agent_mech_requests["title"].isin(closed_markets))
]
# compute the ROI for that market, that trader and that week
try:
# Convert the dictionary to DataFrame before appending
roi_dict = compute_markets_agent_roi(
agent_markets_data, agent_mech_calls, agent, "day", None
)
contents.append(pd.DataFrame([roi_dict]))
except ValueError as e:
print(f"Skipping ROI calculation: {e}")
continue
total_roi_data = pd.concat(contents, ignore_index=True)
total_roi_data.to_parquet(ROOT_DIR / "total_roi.parquet")
def compute_two_weeks_rolling_avg_roi_pearl_agents(
agents_trades: pd.DataFrame, agents_mech_requests: pd.DataFrame
) -> pd.DataFrame:
grouped_trades = agents_trades.groupby("creation_date")
contents = []
agents = agents_trades.trader_address.unique()
# Iterate through the groups (each group represents a day)
for day, day_data in grouped_trades:
# take all closed markets in two weeks before that day
print(f"Day: {day}") # Print the day identifier
two_weeks_ago = day - timedelta(days=14)
two_weeks_data = agents_trades.loc[
(agents_trades["creation_date"] >= two_weeks_ago)
& (agents_trades["creation_date"] <= day)
]
if len(two_weeks_data) == 0:
# not betting activity
continue
# for all closed markets
closed_markets = two_weeks_data.title.unique()
for agent in agents:
# take trades done by the agent two weeks before that day using creation_date and delta
agent_markets_data = agents_trades.loc[
(agents_trades["trader_address"] == agent)
& (agents_trades["title"].isin(closed_markets))
]
if len(agent_markets_data) == 0:
# not betting activity
continue
# filter mech requests done by the agent on that market
agent_mech_calls = agents_mech_requests.loc[
(agents_mech_requests["trader_address"] == agent)
& (agents_mech_requests["title"].isin(closed_markets))
]
# compute the ROI for these markets, that trader and for this period
try:
# Convert the dictionary to DataFrame before appending
roi_dict = compute_markets_agent_roi(
agent_markets_data, agent_mech_calls, agent, "day", day
)
contents.append(pd.DataFrame([roi_dict]))
except ValueError as e:
print(f"Skipping ROI calculation: {e}")
continue
two_weeks_avg_data = pd.concat(contents, ignore_index=True)
two_weeks_rolling_avg_roi = (
two_weeks_avg_data.groupby("creation_date")["roi"]
.mean()
.reset_index(name="two_weeks_avg_roi")
)
return two_weeks_rolling_avg_roi
if __name__ == "__main__":
prepare_daa_data()
prepare_predict_services_dataset()
dune = setup_dune_python_client()
# load_predict_services_file(dune_client=dune)
agents_trades, agents_mech_requests = prepare_agents_data()
# compute_total_roi_pearl_agents(
# agent_trades=agents_trades, agent_mech_requests=agents_mech_requests
# )
weekly_avg = compute_weekly_avg_roi_pearl_agents(
agents_trades, agents_mech_requests
)
print(weekly_avg.head())
# save in a file
weekly_avg.to_parquet(ROOT_DIR / "weekly_avg_roi_pearl_agents.parquet")
two_weeks_avg = compute_two_weeks_rolling_avg_roi_pearl_agents(
agents_trades, agents_mech_requests
)
print(two_weeks_avg.head())
# save in a file
two_weeks_avg.to_parquet(ROOT_DIR / "two_weeks_avg_roi_pearl_agents.parquet")