cyberosa commited on
Commit
1ce17c8
·
1 Parent(s): 8b56de6

updating weekly roi for pearl agents computation

Browse files
scripts/{daa.py → predict_kpis.py} RENAMED
@@ -2,18 +2,25 @@ import pandas as pd
2
  from datetime import datetime, timedelta, UTC
3
  from web3_utils import ROOT_DIR
4
  from utils import measure_execution_time
5
- from get_mech_info import fetch_block_number, get_last_block_number
 
 
 
 
6
  from tqdm import tqdm
7
  import requests
8
  import os
9
  import pickle
 
10
  from concurrent.futures import ThreadPoolExecutor
11
  from gnosis_timestamps import get_all_txs_between_blocks_from_trader_address
12
  from dune_client.types import QueryParameter
13
  from dune_client.client import DuneClient
14
  from dune_client.query import QueryBase
15
  from staking import add_predict_agent_category
16
- from functools import wraps
 
 
17
 
18
  DATETIME_60_DAYS_AGO = datetime.now(UTC) - timedelta(days=60)
19
 
@@ -254,8 +261,148 @@ def prepare_daa_data():
254
  df.to_parquet(ROOT_DIR / "pearl_agents.parquet", index=False, compression="gzip")
255
 
256
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
257
  if __name__ == "__main__":
258
  prepare_daa_data()
259
- # prepare_predict_services_dataset()
260
  # dune = setup_dune_python_client()
261
  # load_predict_services_file(dune_client=dune)
 
 
 
 
 
2
  from datetime import datetime, timedelta, UTC
3
  from web3_utils import ROOT_DIR
4
  from utils import measure_execution_time
5
+ from get_mech_info import (
6
+ fetch_block_number,
7
+ get_last_block_number,
8
+ read_all_trades_profitability,
9
+ )
10
  from tqdm import tqdm
11
  import requests
12
  import os
13
  import pickle
14
+ from utils import TMP_DIR, INC_TOOLS, ROOT_DIR
15
  from concurrent.futures import ThreadPoolExecutor
16
  from gnosis_timestamps import get_all_txs_between_blocks_from_trader_address
17
  from dune_client.types import QueryParameter
18
  from dune_client.client import DuneClient
19
  from dune_client.query import QueryBase
20
  from staking import add_predict_agent_category
21
+ from typing import Tuple
22
+ from tools_metrics import prepare_tools
23
+ from profitability import DEFAULT_MECH_FEE
24
 
25
  DATETIME_60_DAYS_AGO = datetime.now(UTC) - timedelta(days=60)
26
 
 
261
  df.to_parquet(ROOT_DIR / "pearl_agents.parquet", index=False, compression="gzip")
262
 
263
 
264
+ def get_mech_requests_on_closed_markets_by_pearl_agents(
265
+ trades_closed_markets: pd.DataFrame,
266
+ ) -> pd.DataFrame:
267
+ # read the list of pearl agents
268
+ pearl_agents = pd.read_parquet(ROOT_DIR / "pearl_agents.parquet")
269
+ unique_addresses = pearl_agents["safe_address"].unique()
270
+
271
+ # prepare a list of closed markets from trades_closed_markets
272
+ closed_markets = trades_closed_markets.title.unique()
273
+
274
+ # filter the mech requests done by agents on closed markets
275
+ try:
276
+ tools_df = pd.read_parquet(TMP_DIR / "tools.parquet")
277
+ tools_df = prepare_tools(tools_df)
278
+ except Exception as e:
279
+ print(f"Error reading tools parquet file {e}")
280
+ return None
281
+ agents_activity = tools_df[tools_df["trader_address"].isin(unique_addresses)].copy()
282
+ agents_activity = agents_activity[agents_activity["title"].isin(closed_markets)]
283
+ if len(agents_activity) > 0:
284
+ return agents_activity
285
+ raise ValueError("No agents activity found on closed markets")
286
+
287
+
288
+ def get_trades_on_closed_markets_by_pearl_agents() -> pd.DataFrame:
289
+ # read the list of pearl agents
290
+ pearl_agents = pd.read_parquet(ROOT_DIR / "pearl_agents.parquet")
291
+ unique_addresses = pearl_agents["safe_address"].unique()
292
+
293
+ # read the trades datasource on closed markets
294
+ all_trades_on_closed_markets = read_all_trades_profitability()
295
+
296
+ # filter the trades done by pearl agents
297
+ agent_trades_df = all_trades_on_closed_markets[
298
+ all_trades_on_closed_markets["trader_address"].isin(unique_addresses)
299
+ ].copy()
300
+ if len(agent_trades_df) > 0:
301
+ return agent_trades_df
302
+ raise ValueError("No trades found for the pearl agents")
303
+
304
+
305
+ def compute_market_agent_roi(
306
+ agent_trades: pd.DataFrame,
307
+ mech_calls: pd.DataFrame,
308
+ agent: str,
309
+ week: datetime,
310
+ ) -> dict:
311
+ # ROI formula net_earnings/total_costs
312
+ earnings = agent_trades.earnings.sum()
313
+ total_market_fees = agent_trades.trade_fee_amount.sum()
314
+ total_mech_fees = len(mech_calls) * DEFAULT_MECH_FEE
315
+ total_bet_amount = agent_trades.collateral_amount.sum()
316
+ total_costs = total_bet_amount + total_market_fees + total_mech_fees
317
+ net_earnings = earnings - total_costs
318
+ if total_costs == 0:
319
+ raise ValueError(f"Total costs for agent {agent} in week {week} are zero")
320
+ roi = net_earnings / total_costs
321
+ return {
322
+ "trader_address": agent,
323
+ "week_start": week,
324
+ "roi": roi,
325
+ "net_earnings": net_earnings,
326
+ "total_bet_amount": total_bet_amount,
327
+ "total_mech_calls": len(mech_calls),
328
+ }
329
+
330
+
331
+ def compute_weekly_avg_roi_pearl_agents() -> pd.DataFrame:
332
+ agent_trades = get_trades_on_closed_markets_by_pearl_agents()
333
+ agent_mech_requests = get_mech_requests_on_closed_markets_by_pearl_agents(
334
+ agent_trades
335
+ )
336
+
337
+ agent_trades["creation_timestamp"] = pd.to_datetime(
338
+ agent_trades["creation_timestamp"]
339
+ )
340
+ agent_trades["creation_timestamp"] = agent_trades[
341
+ "creation_timestamp"
342
+ ].dt.tz_convert("UTC")
343
+ agent_trades["creation_date"] = agent_trades["creation_timestamp"].dt.date
344
+ agent_trades = agent_trades.sort_values(by="creation_timestamp", ascending=True)
345
+
346
+ agent_trades["week_start"] = (
347
+ agent_trades["creation_timestamp"].dt.to_period("W").dt.start_time
348
+ )
349
+
350
+ grouped_trades = agent_trades.groupby("week_start")
351
+ contents = []
352
+
353
+ # Iterate through the groups (each group represents a week)
354
+ for week, week_data in grouped_trades:
355
+ print(f"Week: {week}") # Print the week identifier
356
+
357
+ # for all closed markets
358
+ closed_markets = week_data.title.unique()
359
+ agents = week_data.trader_address.unique()
360
+ for agent in agents:
361
+ # compute all trades done by the agent on those markets, no matter from which week
362
+ agent_markets_data = agent_trades.loc[
363
+ (agent_trades["trader_address"] == agent)
364
+ & (agent_trades["title"].isin(closed_markets))
365
+ ]
366
+ if len(agent_markets_data) == 0:
367
+ # not betting activity
368
+ continue
369
+ # filter mech requests done by the agent on that market
370
+ agent_mech_calls = agent_mech_requests.loc[
371
+ (agent_mech_requests["trader_address"] == agent)
372
+ & (agent_mech_requests["title"].isin(closed_markets))
373
+ ]
374
+ # compute the ROI for that market, that trader and that week
375
+ try:
376
+ # Convert the dictionary to DataFrame before appending
377
+ roi_dict = compute_market_agent_roi(
378
+ agent_markets_data, agent_mech_calls, agent, week
379
+ )
380
+ contents.append(pd.DataFrame([roi_dict]))
381
+ except ValueError as e:
382
+ print(f"Skipping ROI calculation: {e}")
383
+ continue
384
+
385
+ weekly_agents_data = pd.concat(contents, ignore_index=True)
386
+ # average the ROI for all samples (at the trader/market level) in that week
387
+ weekly_avg_roi_data = (
388
+ weekly_agents_data.groupby("week_start")["roi"]
389
+ .mean()
390
+ .reset_index(name="avg_weekly_roi")
391
+ )
392
+ return weekly_avg_roi_data
393
+
394
+
395
+ def compute_daily_avg_roi_pearl_agents() -> pd.DataFrame:
396
+ # TODO Implementation
397
+ print("WIP")
398
+
399
+
400
  if __name__ == "__main__":
401
  prepare_daa_data()
402
+ prepare_predict_services_dataset()
403
  # dune = setup_dune_python_client()
404
  # load_predict_services_file(dune_client=dune)
405
+ # final_dataset = compute_weekly_avg_roi_pearl_agents()
406
+ # print(final_dataset.head())
407
+ # # save in a file
408
+ # final_dataset.to_parquet(ROOT_DIR / "weekly_avg_roi_pearl_agents.parquet")
scripts/pull_data.py CHANGED
@@ -136,7 +136,7 @@ def only_new_weekly_analysis():
136
 
137
  save_historical_data()
138
  try:
139
- clean_old_data_from_parquet_files("2025-04-08")
140
  clean_old_data_from_json_files()
141
  except Exception as e:
142
  print("Error cleaning the oldest information from parquet files")
 
136
 
137
  save_historical_data()
138
  try:
139
+ clean_old_data_from_parquet_files("2025-04-11")
140
  clean_old_data_from_json_files()
141
  except Exception as e:
142
  print("Error cleaning the oldest information from parquet files")
scripts/tools_metrics.py CHANGED
@@ -1,5 +1,4 @@
1
  import pandas as pd
2
- from typing import List
3
  from utils import TMP_DIR, INC_TOOLS, ROOT_DIR
4
 
5
 
 
1
  import pandas as pd
 
2
  from utils import TMP_DIR, INC_TOOLS, ROOT_DIR
3
 
4