File size: 21,398 Bytes
42c51a5
 
 
 
1ce17c8
 
 
 
 
36c053a
42c51a5
 
907fbd3
a8bc892
832855e
42c51a5
 
907fbd3
 
 
217aa4f
1ce17c8
 
 
42c51a5
 
 
36c053a
 
42c51a5
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
a8bc892
 
 
 
 
 
 
 
 
 
aed26cb
217aa4f
 
 
 
aed26cb
217aa4f
 
 
 
 
aed26cb
217aa4f
 
 
 
 
 
 
 
 
 
aed26cb
 
217aa4f
aed26cb
217aa4f
 
907fbd3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
aed26cb
a8bc892
 
 
aed26cb
a8bc892
aed26cb
a8bc892
 
 
 
aed26cb
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
a8bc892
 
aed26cb
a8bc892
 
832855e
a8bc892
 
 
 
907fbd3
 
a899051
907fbd3
47e94b6
 
3828281
 
47e94b6
 
 
 
 
 
 
 
907fbd3
 
 
 
 
a899051
907fbd3
3828281
 
907fbd3
 
 
 
89572c9
 
 
 
 
3828281
 
89572c9
 
 
59f2dfd
 
 
 
89572c9
 
 
47e94b6
 
907fbd3
217aa4f
907fbd3
 
 
 
 
 
 
 
 
 
 
42c51a5
89572c9
 
 
 
47e94b6
 
1ce17c8
 
 
 
 
 
 
 
 
 
 
 
 
e93a0eb
1ce17c8
 
 
e93a0eb
1ce17c8
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5aa6873
1ce17c8
 
 
5aa6873
 
1ce17c8
 
c2e6ec4
1ce17c8
 
 
 
c2e6ec4
1ce17c8
5aa6873
1ce17c8
5aa6873
 
 
 
 
 
c2e6ec4
5aa6873
 
c2e6ec4
5aa6873
 
 
 
 
 
 
c2e6ec4
5aa6873
 
c2e6ec4
5aa6873
 
 
 
1ce17c8
 
5aa6873
 
 
1ce17c8
5aa6873
 
1ce17c8
 
 
 
 
 
 
 
 
5aa6873
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1ce17c8
5aa6873
 
 
 
1ce17c8
 
 
 
 
 
5aa6873
1ce17c8
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5aa6873
1ce17c8
 
 
5aa6873
 
1ce17c8
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
832855e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5aa6873
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1ce17c8
 
47e94b6
 
1ce17c8
832855e
aed26cb
5aa6873
832855e
 
 
5aa6873
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
import pandas as pd
from datetime import datetime, timedelta, UTC
from web3_utils import ROOT_DIR
from utils import measure_execution_time
from get_mech_info import (
    fetch_block_number,
    get_last_block_number,
    read_all_trades_profitability,
)
from dotenv import load_dotenv
from tqdm import tqdm
import requests
import os
import pickle
from utils import TMP_DIR, ROOT_DIR
from concurrent.futures import ThreadPoolExecutor
from gnosis_timestamps import get_all_txs_between_blocks_from_trader_address
from dune_client.types import QueryParameter
from dune_client.client import DuneClient
from dune_client.query import QueryBase
from staking import add_predict_agent_category
from typing import Tuple
from tools_metrics import prepare_tools
from profitability import DEFAULT_MECH_FEE

DATETIME_60_DAYS_AGO = datetime.now(UTC) - timedelta(days=60)

load_dotenv()


def get_block_number_from_datetime(datetime_sample) -> int:
    """Function to get the block number from a datetime sample"""
    print(f"Datetime sample = {datetime_sample}")
    five_seconds = timedelta(seconds=5)
    block_number = fetch_block_number(
        int(datetime_sample.timestamp()),
        int((datetime_sample + five_seconds).timestamp()),
    )
    block_number = block_number.get("number", "")
    if block_number.isdigit():
        return int(block_number)


def safe_get_txs(session, trader_address, market_creator: str, start_block, end_block):
    try:
        df = get_all_txs_between_blocks_from_trader_address(
            session, trader_address, market_creator, start_block, end_block
        )
        if df is None:
            return pd.DataFrame()  # Return empty DataFrame if None
        return df
    except Exception as e:
        print(f"Error getting transactions for {trader_address}: {e}")
        return pd.DataFrame()  # Return empty DataFrame on error


@measure_execution_time
def prepare_gnosis_txs_dataset():
    """Function to extract the gnosis txs from the Olas traders"""

    # To read current all trades profitability
    all_trades = pd.read_parquet(ROOT_DIR / "all_trades_profitability.parquet")

    # to filter only traders of the trader_type Olas
    olas_traders = all_trades[all_trades["staking"] != "non_Olas"]

    # To extract the list of unique trader addresses from olas_traders and their corresponding market creator
    olas_traders = olas_traders[["trader_address", "market_creator"]].drop_duplicates()
    olas_traders = olas_traders.reset_index(drop=True)
    print(f"Number of unique traders = {len(olas_traders)}")
    print(olas_traders.market_creator.value_counts())

    # Get the starting block number
    starting_block = get_block_number_from_datetime(DATETIME_60_DAYS_AGO)
    print(f"Starting block = {starting_block}")
    # Get the ending block number
    ending_block = get_last_block_number()
    print(f"Ending block = {ending_block}")
    print(f"Number of blocks to fetch = {ending_block - starting_block}")
    # Parallelize getting the transactions from the trader addresses
    results = []
    session = requests.Session()
    # Use this session for all requests
    with ThreadPoolExecutor(max_workers=10) as executor:
        results = list(
            tqdm(
                executor.map(
                    lambda x: safe_get_txs(
                        session,
                        x.trader_address,
                        x.market_creator,
                        starting_block,
                        ending_block,
                    ),
                    olas_traders.itertuples(index=False),
                ),
                total=len(olas_traders),
            )
        )
    results = [r for r in results if isinstance(r, pd.DataFrame)]
    return pd.concat(results, ignore_index=True)


def get_service_id_from_trader_address(trader_address: str, service_map: dict):
    """Function to get the service id from the trader address"""

    # get the service id from the trader address
    for key, value in service_map.items():
        if value["safe_address"].lower() == trader_address.lower():
            return key
    return None


def prepare_predict_services_dataset():
    # Read the service map pickle file
    with open(ROOT_DIR / "service_map.pkl", "rb") as f:
        service_map = pickle.load(f)
    content = []
    # Find all the safe addresses in the service map whose agent_id is equal to 25 or 14
    for key, value in service_map.items():
        print(f"value = {value}")
        if "agent_id" not in value:
            print(f"agent_id not found in value {value}")
            continue
        if value["agent_id"] == 25 or value["agent_id"] == 14:
            agent_dict = {}
            # label the predict agents into two categories: pearl and quickstart
            owner_address = value["owner_address"]
            agent_dict["safe_address"] = value["safe_address"]
            agent_dict["service_id"] = key
            agent_dict["market_creator"] = add_predict_agent_category(
                owner_address=owner_address
            )
            content.append(agent_dict)
    # build the dataframe from the list of dictionaries
    predict_services = pd.DataFrame(content)
    print(f"Number of unique predict agents = {len(predict_services)}")
    # save the dataset as a csv file
    predict_services.to_csv(ROOT_DIR / "predict_services.csv", index=False)


def setup_dune_python_client():
    """Function to setup the dune python client"""

    # Read api key from the environment variable
    dune_api_key = os.getenv("DUNE_API_KEY")
    # If the api key is not set, raise an error
    if dune_api_key is None:
        raise ValueError("Dune API key is not set in the environment variable")

    # Initialize the DuneClient
    dune_client = DuneClient(
        api_key=dune_api_key,
    )
    return dune_client


def load_predict_services_file(dune_client: DuneClient):
    """Function to load the olas dataset in dune"""

    # Prepare the olas dataset
    # prepare_predict_services_dataset()
    try:
        with open(ROOT_DIR / "predict_services.csv", "r") as open_file:
            data = open_file.read()

        # Upload the CSV data
        print("loading the dataset in dune")
        # dune_client.create_table(
        #     table_name="olas_trader_agents",
        #     description="Olas trader agents found in Pearl and Quickstart markets",
        #     schema=[
        #         {"name": "date", "type": "timestamp"},
        #         {"name": "dgs10", "type": "double", "nullable": True},
        #     ],
        #     is_private=False,
        #     namespace="cyberosa",
        # )
        # use the dune client to upload the dataset predict_services.csv
        dune_client.upload_csv(
            table_name="predict_services",
            data=data,
            description="Olas predict services found at the service registry",
            # schema=[
            #     {"name": "safe_address", "type": "text"},
            #     {"name": "service_id", "type": "text"},
            #     {"name": "market_creator", "type": "text"},
            # ],
            is_private=False,
        )
        print(f"CSV file uploaded successfully!")

    except FileNotFoundError:
        print(f"Error: CSV file not found at {ROOT_DIR} / predict_services.csv")
    except Exception as e:
        print(f"An error occurred during upload: {e}")


def get_latest_result_from_DAA_QS(dune_client: DuneClient):
    query = QueryBase(
        query_id=5193717,
    )
    try:
        # Add error handling and sanitize headers
        # query_result = dune_client.run_query_dataframe(query=query)
        query_result = dune_client.get_latest_result_dataframe(5193717)
        print(f"DAA QS query result = {query_result.head()}")
        return query_result
    except UnicodeEncodeError:
        # Alternative approach using run_query instead of get_latest_result
        print("Handling Unicode encoding error, using alternative method...")
        query_result = dune_client.run_query_dataframe(query=query)
        print(f"DAA QS query result = {query_result.head()}")
        return query_result
    return query_result


def get_latest_result_from_DAA_Pearl(dune_client: DuneClient):
    query = QueryBase(
        query_id=5193539,
    )
    # query_result = dune_client.run_query_dataframe(query=query)
    query_result = dune_client.get_latest_result_dataframe(5193539)
    print(f"DAA Pearl query result = {query_result.head()}")
    return query_result


def get_the_list_of_pearl_agents(dune_client: DuneClient):
    """Function to get the list of pearl agents from Dune"""
    query = QueryBase(
        query_id=5194206,
    )
    # query_result = dune_client.run_query_dataframe(query=query)
    query_result = dune_client.get_latest_result_dataframe(5194206)
    print(f"Pearl agents query result = {query_result.head()}")
    # keep only the columns: safe_address, service_id, service_owner
    query_result = query_result[["safe_address", "serviceId", "service_owner"]]
    # Remove duplicates
    query_result = query_result.drop_duplicates(
        subset=["safe_address", "service_owner", "serviceId"]
    )
    return query_result


def prepare_daa_data():
    # patch_http_connection_pool()
    dune = setup_dune_python_client()
    # load_olas_agents_dataset_in_dune(dune)
    # Get the latest result from the DAA QS query
    df = get_latest_result_from_DAA_QS(dune)
    # Save the result to a parquet file
    df.to_parquet(
        ROOT_DIR / "latest_result_DAA_QS.parquet", index=False, compression="gzip"
    )
    # Get the latest result from the DAA Pearl query
    df = get_latest_result_from_DAA_Pearl(dune)
    # Save the result to a parquet file
    df.to_parquet(
        ROOT_DIR / "latest_result_DAA_Pearl.parquet", index=False, compression="gzip"
    )
    # GEt the list of pearl agents
    df = get_the_list_of_pearl_agents(dune)
    # Save the result to a parquet file
    df.to_parquet(ROOT_DIR / "pearl_agents.parquet", index=False, compression="gzip")


def get_mech_requests_on_closed_markets_by_pearl_agents(
    trades_closed_markets: pd.DataFrame,
) -> pd.DataFrame:
    # read the list of pearl agents
    pearl_agents = pd.read_parquet(ROOT_DIR / "pearl_agents.parquet")
    unique_addresses = pearl_agents["safe_address"].unique()

    # prepare a list of closed markets from trades_closed_markets
    closed_markets = trades_closed_markets.title.unique()

    # filter the mech requests done by agents on closed markets
    try:
        tools_df = pd.read_parquet(TMP_DIR / "tools.parquet")
        tools_df = prepare_tools(tools_df, total_included=False)
    except Exception as e:
        print(f"Error reading tools parquet file {e}")
        return None

    agents_activity = tools_df[tools_df["trader_address"].isin(unique_addresses)].copy()
    agents_activity = agents_activity[agents_activity["title"].isin(closed_markets)]
    if len(agents_activity) > 0:
        return agents_activity
    raise ValueError("No agents activity found on closed markets")


def get_trades_on_closed_markets_by_pearl_agents() -> pd.DataFrame:
    # read the list of pearl agents
    pearl_agents = pd.read_parquet(ROOT_DIR / "pearl_agents.parquet")
    unique_addresses = pearl_agents["safe_address"].unique()

    # read the trades datasource on closed markets
    all_trades_on_closed_markets = read_all_trades_profitability()

    # filter the trades done by pearl agents
    agent_trades_df = all_trades_on_closed_markets[
        all_trades_on_closed_markets["trader_address"].isin(unique_addresses)
    ].copy()
    if len(agent_trades_df) > 0:
        return agent_trades_df
    raise ValueError("No trades found for the pearl agents")


def compute_markets_agent_roi(
    agent_trades: pd.DataFrame,
    mech_calls: pd.DataFrame,
    agent: str,
    period: str,
    period_value: datetime,
) -> dict:
    # ROI formula net_earnings/total_costs
    total_earnings = agent_trades.earnings.sum()
    total_market_fees = agent_trades.trade_fee_amount.sum()
    total_mech_fees = len(mech_calls) * DEFAULT_MECH_FEE
    total_bet_amount = agent_trades.collateral_amount.sum()
    total_costs = total_bet_amount + total_market_fees + total_mech_fees
    net_earnings = total_earnings - total_costs
    if total_costs == 0:
        raise ValueError(f"Total costs for agent {agent} are zero")
    roi = net_earnings / total_costs
    if period == "week":
        return {
            "trader_address": agent,
            "week_start": period_value,
            "roi": roi,
            "net_earnings": net_earnings,
            "earnings": total_earnings,
            "total_bet_amount": total_bet_amount,
            "total_mech_calls": len(mech_calls),
            "nr_trades": len(agent_trades),
        }
    if period == "day":
        return {
            "trader_address": agent,
            "creation_date": period_value,
            "roi": roi,
            "net_earnings": net_earnings,
            "earnings": total_earnings,
            "total_bet_amount": total_bet_amount,
            "total_mech_calls": len(mech_calls),
            "nr_trades": len(agent_trades),
        }
    raise ValueError(
        f"Invalid period {period} for agent {agent}. Expected 'week' or 'day'."
    )


def prepare_agents_data() -> Tuple[pd.DataFrame, pd.DataFrame]:
    """Function to prepare the agents data for the predict ROI KPIs computation"""
    # Get the trades done by pearl agents on closed markets
    agent_trades = get_trades_on_closed_markets_by_pearl_agents()
    print(
        f"Number of trades done by pearl agents on closed markets: {len(agent_trades)}"
    )
    agent_trades["creation_timestamp"] = pd.to_datetime(
        agent_trades["creation_timestamp"]
    )
    agent_trades["creation_timestamp"] = agent_trades[
        "creation_timestamp"
    ].dt.tz_convert("UTC")
    agent_trades["creation_date"] = agent_trades["creation_timestamp"].dt.date
    agent_trades = agent_trades.sort_values(by="creation_timestamp", ascending=True)
    # Get the mech requests done by pearl agents on closed markets
    agent_mech_requests = get_mech_requests_on_closed_markets_by_pearl_agents(
        agent_trades
    )
    agent_mech_requests["request_time"] = pd.to_datetime(
        agent_mech_requests["request_time"], utc=True
    )
    agent_mech_requests = agent_mech_requests.sort_values(
        by="request_time", ascending=True
    )
    agent_mech_requests["request_date"] = agent_mech_requests["request_time"].dt.date
    print(
        f"Number of mech requests done by pearl agents on closed markets: {len(agent_mech_requests)}"
    )

    return agent_trades, agent_mech_requests


def compute_weekly_avg_roi_pearl_agents(
    agent_trades, agent_mech_requests
) -> pd.DataFrame:
    agent_trades["week_start"] = (
        agent_trades["creation_timestamp"].dt.to_period("W").dt.start_time
    )

    grouped_trades = agent_trades.groupby("week_start")
    contents = []
    agents = agent_trades.trader_address.unique()
    # Iterate through the groups (each group represents a week)
    for week, week_data in grouped_trades:
        print(f"Week: {week}")  # Print the week identifier

        # for all closed markets
        closed_markets = week_data.title.unique()
        for agent in agents:
            # compute all trades done by the agent on those markets, no matter from which week
            agent_markets_data = agent_trades.loc[
                (agent_trades["trader_address"] == agent)
                & (agent_trades["title"].isin(closed_markets))
            ]
            if len(agent_markets_data) == 0:
                # not betting activity
                continue
            # filter mech requests done by the agent on that market
            agent_mech_calls = agent_mech_requests.loc[
                (agent_mech_requests["trader_address"] == agent)
                & (agent_mech_requests["title"].isin(closed_markets))
            ]

            # compute the ROI for that market, that trader and that week
            try:
                # Convert the dictionary to DataFrame before appending
                roi_dict = compute_markets_agent_roi(
                    agent_markets_data, agent_mech_calls, agent, "week", week
                )
                contents.append(pd.DataFrame([roi_dict]))
            except ValueError as e:
                print(f"Skipping ROI calculation: {e}")
                continue

    weekly_agents_data = pd.concat(contents, ignore_index=True)
    # average the ROI for all samples (at the trader/market level) in that week
    weekly_avg_roi_data = (
        weekly_agents_data.groupby("week_start")["roi"]
        .mean()
        .reset_index(name="avg_weekly_roi")
    )
    return weekly_avg_roi_data


def compute_total_roi_pearl_agents(agent_trades, agent_mech_requests) -> pd.DataFrame:
    closed_markets = agent_trades.title.unique()
    contents = []
    agents = agent_trades.trader_address.unique()
    for agent in agents:
        # compute all trades done by the agent on those markets, no matter from which week
        agent_markets_data = agent_trades.loc[
            (agent_trades["trader_address"] == agent)
            & (agent_trades["title"].isin(closed_markets))
        ]
        if len(agent_markets_data) == 0:
            # not betting activity
            continue
        # filter mech requests done by the agent on that market
        agent_mech_calls = agent_mech_requests.loc[
            (agent_mech_requests["trader_address"] == agent)
            & (agent_mech_requests["title"].isin(closed_markets))
        ]

        # compute the ROI for that market, that trader and that week
        try:
            # Convert the dictionary to DataFrame before appending
            roi_dict = compute_markets_agent_roi(
                agent_markets_data, agent_mech_calls, agent, "day", None
            )
            contents.append(pd.DataFrame([roi_dict]))
        except ValueError as e:
            print(f"Skipping ROI calculation: {e}")
            continue
    total_roi_data = pd.concat(contents, ignore_index=True)
    total_roi_data.to_parquet(ROOT_DIR / "total_roi.parquet")


def compute_two_weeks_rolling_avg_roi_pearl_agents(
    agents_trades: pd.DataFrame, agents_mech_requests: pd.DataFrame
) -> pd.DataFrame:
    grouped_trades = agents_trades.groupby("creation_date")
    contents = []
    agents = agents_trades.trader_address.unique()
    # Iterate through the groups (each group represents a day)
    for day, day_data in grouped_trades:
        # take all closed markets in two weeks before that day
        print(f"Day: {day}")  # Print the day identifier
        two_weeks_ago = day - timedelta(days=14)
        two_weeks_data = agents_trades.loc[
            (agents_trades["creation_date"] >= two_weeks_ago)
            & (agents_trades["creation_date"] <= day)
        ]
        if len(two_weeks_data) == 0:
            # not betting activity
            continue
        # for all closed markets
        closed_markets = two_weeks_data.title.unique()
        for agent in agents:
            # take trades done by the agent two weeks before that day using creation_date and delta
            agent_markets_data = agents_trades.loc[
                (agents_trades["trader_address"] == agent)
                & (agents_trades["title"].isin(closed_markets))
            ]
            if len(agent_markets_data) == 0:
                # not betting activity
                continue

            # filter mech requests done by the agent on that market
            agent_mech_calls = agents_mech_requests.loc[
                (agents_mech_requests["trader_address"] == agent)
                & (agents_mech_requests["title"].isin(closed_markets))
            ]
            # compute the ROI for these markets, that trader and for this period
            try:
                # Convert the dictionary to DataFrame before appending
                roi_dict = compute_markets_agent_roi(
                    agent_markets_data, agent_mech_calls, agent, "day", day
                )
                contents.append(pd.DataFrame([roi_dict]))
            except ValueError as e:
                print(f"Skipping ROI calculation: {e}")
                continue
    two_weeks_avg_data = pd.concat(contents, ignore_index=True)

    two_weeks_rolling_avg_roi = (
        two_weeks_avg_data.groupby("creation_date")["roi"]
        .mean()
        .reset_index(name="two_weeks_avg_roi")
    )
    return two_weeks_rolling_avg_roi


if __name__ == "__main__":
    prepare_daa_data()
    prepare_predict_services_dataset()
    dune = setup_dune_python_client()
    # load_predict_services_file(dune_client=dune)
    agents_trades, agents_mech_requests = prepare_agents_data()
    # compute_total_roi_pearl_agents(
    #     agent_trades=agents_trades, agent_mech_requests=agents_mech_requests
    # )
    weekly_avg = compute_weekly_avg_roi_pearl_agents(
        agents_trades, agents_mech_requests
    )
    print(weekly_avg.head())
    # save in a file
    weekly_avg.to_parquet(ROOT_DIR / "weekly_avg_roi_pearl_agents.parquet")

    two_weeks_avg = compute_two_weeks_rolling_avg_roi_pearl_agents(
        agents_trades, agents_mech_requests
    )
    print(two_weeks_avg.head())
    # save in a file
    two_weeks_avg.to_parquet(ROOT_DIR / "two_weeks_avg_roi_pearl_agents.parquet")