Olas-predict-dataset / scripts /get_mech_info.py
cyberosa
updating daily data
9bb36bd
from string import Template
import shutil
import gzip
from typing import Any
from datetime import datetime, timedelta, UTC
from utils import (
SUBGRAPH_API_KEY,
measure_execution_time,
ROOT_DIR,
TMP_DIR,
NETWORK_SUBGRAPH_URL,
transform_to_datetime,
)
import requests
import pandas as pd
import numpy as np
from mech_request_utils import (
collect_all_mech_delivers,
collect_all_mech_requests,
clean_mech_delivers,
fix_duplicate_requestIds,
merge_requests_delivers,
get_ipfs_data,
merge_json_files,
)
from tools import remove_old_entries
import logging
SUBGRAPH_HEADERS = {
"Accept": "application/json, multipart/mixed",
"Content-Type": "application/json",
}
QUERY_BATCH_SIZE = 1000
DATETIME_60_DAYS_AGO = datetime.now(UTC) - timedelta(days=60)
DATETIME_10_DAYS_AGO = datetime.now(UTC) - timedelta(days=10)
DATETIME_10_HOURS_AGO = datetime.now(UTC) - timedelta(hours=10)
BLOCK_NUMBER = Template(
"""
{
blocks(
first: 1,
orderBy: timestamp,
orderDirection: asc,
where: {
timestamp_gte: "${timestamp_from}",
timestamp_lte: "${timestamp_to}"
}
){
id,
number,
}
}
"""
)
LATEST_BLOCK_QUERY = """
{
blocks(
first: 1,
orderBy: timestamp,
orderDirection: desc,
){
id,
number,
}
}
"""
def read_all_trades_profitability() -> pd.DataFrame:
try:
all_trades = pd.read_parquet(ROOT_DIR / "all_trades_profitability.parquet")
except Exception as e:
try:
with gzip.open("all_trades_profitability.parquet.gz", "rb") as f_in:
with open("all_trades_profitability.parquet", "wb") as f_out:
shutil.copyfileobj(f_in, f_out)
all_trades = pd.read_parquet(ROOT_DIR / "all_trades_profitability.parquet")
except Exception as e:
print(f"Error reading old trades parquet file {e}")
return None
return all_trades
def fetch_last_block_number() -> dict:
# print(f"Sending query for the subgraph = {query}")
network_subgraph_url = NETWORK_SUBGRAPH_URL.substitute(
subgraph_api_key=SUBGRAPH_API_KEY
)
query = LATEST_BLOCK_QUERY
response = requests.post(
network_subgraph_url,
headers=SUBGRAPH_HEADERS,
json={"query": query},
timeout=300,
)
result_json = response.json()
print(f"Response of the query={result_json}")
blocks = result_json.get("data", {}).get("blocks", "")
if len(blocks) == 0:
raise ValueError(f"The query {query} did not return any results")
return blocks[0]
def fetch_block_number(timestamp_from: int, timestamp_to: int) -> dict:
"""Get a block number by its timestamp margins."""
query = BLOCK_NUMBER.substitute(
timestamp_from=timestamp_from, timestamp_to=timestamp_to
)
# print(f"Sending query for the subgraph = {query}")
network_subgraph_url = NETWORK_SUBGRAPH_URL.substitute(
subgraph_api_key=SUBGRAPH_API_KEY
)
response = requests.post(
network_subgraph_url,
headers=SUBGRAPH_HEADERS,
json={"query": query},
timeout=300,
)
# print(f"block query: {query}")
result_json = response.json()
print(f"Response of the query={result_json}")
blocks = result_json.get("data", {}).get("blocks", "")
if len(blocks) == 0:
raise ValueError(f"The query {query} did not return any results")
return blocks[0]
def update_json_files():
merge_json_files("mech_requests.json", "new_mech_requests.json")
merge_json_files("mech_delivers.json", "new_mech_delivers.json")
merge_json_files("merged_requests.json", "new_merged_requests.json")
merge_json_files("tools_info.json", "new_tools_info.json")
def clean_old_data_from_json_files():
try:
tools = pd.read_parquet(TMP_DIR / "tools.parquet")
tools["request_time"] = pd.to_datetime(tools["request_time"], utc=True)
tools = tools.sort_values(by="request_time", ascending=True)
tools["request_block"] = pd.to_numeric(tools["request_block"], errors="coerce")
min_block_number = tools.iloc[0].request_block
remove_old_entries(
input_file="mech_requests.json",
output_file="mech_requests.json",
min_block_number=min_block_number,
)
remove_old_entries(
input_file="mech_delivers.json",
output_file="mech_delivers.json",
min_block_number=min_block_number,
)
remove_old_entries(
input_file="merged_requests.json",
output_file="merged_requests.json",
min_block_number=min_block_number,
)
remove_old_entries(
input_file="tools_info.json",
output_file="tools_info.json",
min_block_number=min_block_number,
)
except Exception as e:
print(f"Error cleaning json files {e}")
def update_all_trades_parquet(new_trades_df: pd.DataFrame) -> pd.DataFrame:
# Read old all_trades parquet file
old_trades_df = read_all_trades_profitability()
# merge two dataframes
merge_df = pd.concat([old_trades_df, new_trades_df], ignore_index=True)
# Check for duplicates
print(f"Initial length before removing duplicates in all_trades= {len(merge_df)}")
# Remove duplicates
merge_df.drop_duplicates("trade_id", inplace=True)
print(f"Final length after removing duplicates in all_trades = {len(merge_df)}")
return merge_df
def update_tools_parquet(new_tools_filename: str):
try:
old_tools_df = pd.read_parquet(TMP_DIR / "tools.parquet")
except Exception as e:
print(f"Error reading old tools parquet file {e}")
return None
try:
new_tools_df = pd.read_parquet(ROOT_DIR / new_tools_filename)
except Exception as e:
print(f"Error reading new trades parquet file {e}")
return None
# Add the missing column to old_tools_df with NaN values
if (
"mech_address" in new_tools_df.columns
and "mech_address" not in old_tools_df.columns
):
old_tools_df["mech_address"] = None
# merge two dataframes
merge_df = pd.concat([old_tools_df, new_tools_df], ignore_index=True)
# Check for duplicates
print(f"Initial length before removing duplicates in tools= {len(merge_df)}")
# Remove duplicates
merge_df.drop_duplicates(
subset=["request_id", "request_time"], keep="last", inplace=True
)
print(f"Final length after removing duplicates in tools= {len(merge_df)}")
merge_df["request_block"] = pd.to_numeric(
merge_df["request_block"], errors="coerce"
)
merge_df = merge_df.dropna(subset=["request_block"])
merge_df["request_block"] = merge_df["request_block"].astype("int64")
merge_df.to_parquet(TMP_DIR / "tools.parquet", index=False)
def get_mech_info_2024() -> dict[str, Any]:
"""Query the subgraph to get the 2024 information from mech."""
date = "2024-01-01"
datetime_jan_2024 = datetime.strptime(date, "%Y-%m-%d")
timestamp_jan_2024 = int(datetime_jan_2024.timestamp())
margin = timedelta(seconds=5)
timestamp_jan_2024_plus_margin = int((datetime_jan_2024 + margin).timestamp())
jan_block_number = fetch_block_number(
timestamp_jan_2024, timestamp_jan_2024_plus_margin
)
# expecting only one block
jan_block_number = jan_block_number.get("number", "")
if jan_block_number.isdigit():
jan_block_number = int(jan_block_number)
if jan_block_number == "":
raise ValueError(
"Could not find a valid block number for the first of January 2024"
)
MECH_TO_INFO = {
# this block number is when the creator had its first tx ever, and after this mech's creation
"0xff82123dfb52ab75c417195c5fdb87630145ae81": (
"old_mech_abi.json",
jan_block_number,
),
# this block number is when this mech was created
"0x77af31de935740567cf4ff1986d04b2c964a786a": (
"new_mech_abi.json",
jan_block_number,
),
}
return MECH_TO_INFO
def get_last_block_number() -> int:
last_block_number = fetch_last_block_number()
# expecting only one block
last_block_number = last_block_number.get("number", "")
if last_block_number.isdigit():
last_block_number = int(last_block_number)
if last_block_number == "":
raise ValueError("Could not find a valid block number for last month data")
return last_block_number
def get_last_60_days_block_number() -> int:
timestamp_60_days_ago = int((DATETIME_60_DAYS_AGO).timestamp())
margin = timedelta(seconds=5)
timestamp_60_days_ago_plus_margin = int((DATETIME_60_DAYS_AGO + margin).timestamp())
last_month_block_number = fetch_block_number(
timestamp_60_days_ago, timestamp_60_days_ago_plus_margin
)
# expecting only one block
last_month_block_number = last_month_block_number.get("number", "")
if last_month_block_number.isdigit():
last_month_block_number = int(last_month_block_number)
if last_month_block_number == "":
raise ValueError("Could not find a valid block number for last month data")
return last_month_block_number
def get_mech_info_last_60_days() -> dict[str, Any]:
"""Query the subgraph to get the last 60 days of information from mech."""
last_month_block_number = get_last_60_days_block_number()
MECH_TO_INFO = {
# this block number is when the creator had its first tx ever, and after this mech's creation
"0xff82123dfb52ab75c417195c5fdb87630145ae81": (
"old_mech_abi.json",
last_month_block_number,
),
# this block number is when this mech was created
"0x77af31de935740567cf4ff1986d04b2c964a786a": (
"new_mech_abi.json",
last_month_block_number,
),
}
print(f"last 60 days block number {last_month_block_number}")
return MECH_TO_INFO
@measure_execution_time
def get_mech_events_since_last_run(logger, mech_sandbox: bool = False):
"""Function to download only the new events since the last execution."""
# Read the latest date from stored data
try:
all_trades = read_all_trades_profitability()
latest_timestamp = max(all_trades.creation_timestamp)
print(f"Updating data since {latest_timestamp}")
except Exception:
print("Error while reading the profitability parquet file")
return None
# Get the block number of lastest date
five_seconds = np.timedelta64(5, "s")
last_run_block_number = fetch_block_number(
int(latest_timestamp.timestamp()),
int((latest_timestamp + five_seconds).timestamp()),
)
# expecting only one block
last_run_block_number = last_run_block_number.get("number", "")
if last_run_block_number.isdigit():
last_run_block_number = int(last_run_block_number)
if last_run_block_number == "":
raise ValueError("Could not find a valid block number for last collected data")
last_block_number = get_last_block_number()
# mech requests
req_output_file = "new_mech_requests.json"
requests_dict, duplicatedReqId, nr_errors = collect_all_mech_requests(
from_block=last_run_block_number,
to_block=last_block_number,
filename=req_output_file,
mech_sandbox=mech_sandbox,
)
print(f"NUMBER OF MECH REQUEST ERRORS={nr_errors}")
# mech delivers
deli_output_file = "new_mech_delivers.json"
delivers_dict, duplicatedIds, nr_errors = collect_all_mech_delivers(
from_block=last_run_block_number,
to_block=last_block_number,
filename=deli_output_file,
mech_sandbox=mech_sandbox,
)
print(f"NUMBER OF MECH DELIVER ERRORS={nr_errors}")
if delivers_dict is None:
return None
# clean delivers
clean_mech_delivers(req_output_file, deli_output_file)
# solve duplicated requestIds
block_map = fix_duplicate_requestIds(req_output_file, deli_output_file)
merged_output_file = "new_merged_requests.json"
tools_output_file = "new_tools_info.json"
# merge the two files into one source
not_found = merge_requests_delivers(
req_output_file, deli_output_file, merged_output_file
)
# Add ipfs contents
get_ipfs_data(merged_output_file, tools_output_file, logger)
return latest_timestamp
if __name__ == "__main__":
logger = logging.getLogger(__name__)
get_mech_events_since_last_run(logger, mech_sandbox=True)
# result = get_mech_info_last_60_days()
# print(result)