File size: 13,798 Bytes
cd451ea bb6067e cd451ea f149546 cd451ea f149546 804c893 deea902 22ee9d2 cd451ea f149546 ec49da1 deea902 792d89e cd451ea 217aa4f cd451ea 22ee9d2 cd451ea 22ee9d2 cd451ea 746d43a cd451ea 217aa4f cd451ea 217aa4f cd451ea 22ee9d2 cd451ea 1f29ed0 cd451ea 1f29ed0 cd451ea 217aa4f cd451ea |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191 192 193 194 195 196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246 247 248 249 250 251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302 303 304 305 306 307 308 309 310 311 312 313 314 315 316 317 318 319 320 321 322 323 324 325 326 327 328 329 330 331 332 333 334 335 336 337 338 339 340 341 342 343 344 345 346 |
import json
import sys
from typing import Any, List
from utils import RPC, ROOT_DIR, TMP_DIR, JSON_DATA_DIR
import requests
from tqdm import tqdm
from web3 import Web3
import pandas as pd
import pickle
import os
import gzip
import shutil
from concurrent.futures import ThreadPoolExecutor, as_completed
NUM_WORKERS = 30
DEPRECATED_STAKING_PROGRAMS = {
"quickstart_alpha_everest": "0x5add592ce0a1B5DceCebB5Dcac086Cd9F9e3eA5C",
"quickstart_alpha_alpine": "0x2Ef503950Be67a98746F484DA0bBAdA339DF3326",
"quickstart_alpha_coastal": "0x43fB32f25dce34EB76c78C7A42C8F40F84BCD237",
}
# these are only for Gnosis chain
STAKING_PROGRAMS_QS = {
"quickstart_beta_hobbyist": "0x389B46c259631Acd6a69Bde8B6cEe218230bAE8C",
"quickstart_beta_hobbyist_2": "0x238EB6993b90a978ec6AAD7530d6429c949C08DA",
"quickstart_beta_expert": "0x5344B7DD311e5d3DdDd46A4f71481bD7b05AAA3e",
"quickstart_beta_expert_2": "0xb964e44c126410df341ae04B13aB10A985fE3513",
"quickstart_beta_expert_3": "0x80faD33Cadb5F53f9D29F02Db97D682E8b101618",
"quickstart_beta_expert_4": "0xaD9d891134443B443D7F30013c7e14Fe27F2E029",
"quickstart_beta_expert_5": "0xE56dF1E563De1B10715cB313D514af350D207212",
"quickstart_beta_expert_6": "0x2546214aEE7eEa4bEE7689C81231017CA231Dc93",
"quickstart_beta_expert_7": "0xD7A3C8b975f71030135f1a66e9e23164d54fF455",
"quickstart_beta_expert_8": "0x356C108D49C5eebd21c84c04E9162de41933030c",
"quickstart_beta_expert_9": "0x17dBAe44BC5618Cc254055b386A29576b4F87015",
"quickstart_beta_expert_10": "0xB0ef657b8302bd2c74B6E6D9B2b4b39145b19c6f",
"quickstart_beta_expert_11": "0x3112c1613eAC3dBAE3D4E38CeF023eb9E2C91CF7",
"quickstart_beta_expert_12": "0xF4a75F476801B3fBB2e7093aCDcc3576593Cc1fc",
"quickstart_beta_expert_15_mech_marketplace": "0x88eB38FF79fBa8C19943C0e5Acfa67D5876AdCC1",
"quickstart_beta_expert_16_mech_marketplace": "0x6c65430515c70a3f5E62107CC301685B7D46f991",
"quickstart_beta_expert_17": "0x1430107A785C3A36a0C1FC0ee09B9631e2E72aFf",
"quickstart_beta_expert_18": "0x041e679d04Fc0D4f75Eb937Dea729Df09a58e454",
"quickstart_Beta_Mech_MarketPlace_expert_1": "0xdB9E2713c3dA3C403F2eA6E570eB978b00304e9E",
"quickstart_Beta_Mech_MarketPlace_expert_2": "0x1E90522b45c771DCF5f79645B9e96551d2ECaF62",
"quickstart_Beta_Mech_MarketPlace_expert_3": "0x75eeca6207be98cac3fde8a20ecd7b01e50b3472",
"quickstart_Beta_Mech_MarketPlace_expert_4": "0x9c7f6103e3a72e4d1805b9c683ea5b370ec1a99f",
"quickstart_Beta_Mech_MarketPlace_expert_5": "0xcdc603e0ee55aae92519f9770f214b2be4967f7d",
"quickstart_Beta_Mech_MarketPlace_expert_6": "0x22d6cd3d587d8391c3aae83a783f26c67ab54a85",
"quickstart_Beta_Mech_MarketPlace_expert_7": "0xaaecdf4d0cbd6ca0622892ac6044472f3912a5f3",
"quickstart_Beta_Mech_MarketPlace_expert_8": "0x168aed532a0cd8868c22fc77937af78b363652b1",
}
STAKING_PROGRAMS_PEARL = {
"pearl_alpha": "0xEE9F19b5DF06c7E8Bfc7B28745dcf944C504198A",
"pearl_beta": "0xeF44Fb0842DDeF59D37f85D61A1eF492bbA6135d",
"pearl_beta_2": "0x1c2F82413666d2a3fD8bC337b0268e62dDF67434",
"pearl_beta_3": "0xBd59Ff0522aA773cB6074ce83cD1e4a05A457bc1",
"pearl_beta_4": "0x3052451e1eAee78e62E169AfdF6288F8791F2918",
"pearl_beta_5": "0x4Abe376Fda28c2F43b84884E5f822eA775DeA9F4",
"pearl_beta_6": "0x6C6D01e8eA8f806eF0c22F0ef7ed81D868C1aB39",
"pearl_beta_mech_marketplace": "0xDaF34eC46298b53a3d24CBCb431E84eBd23927dA",
"pearl_beta-mech_marketplace": "0xAb10188207Ea030555f53C8A84339A92f473aa5e",
"pearl_beta_mech_marketplace_2": "0x8d7bE092d154b01d404f1aCCFA22Cef98C613B5D",
"pearl_beta_mech_marketplace_3": "0x9d00a0551f20979080d3762005c9b74d7aa77b85",
"pearl_beta_mech_marketplace_4": "0xE2f80659dB1069f3B6a08af1A62064190c119543",
}
SERVICE_REGISTRY_ADDRESS = "0x9338b5153AE39BB89f50468E608eD9d764B755fD"
def _get_contract(address: str) -> Any:
w3 = Web3(Web3.HTTPProvider(RPC))
abi = _get_abi(address)
contract = w3.eth.contract(address=Web3.to_checksum_address(address), abi=abi)
return contract
def _get_abi(address: str) -> List:
contract_abi_url = (
"https://gnosis.blockscout.com/api/v2/smart-contracts/{contract_address}"
)
response = requests.get(contract_abi_url.format(contract_address=address)).json()
if "result" in response:
result = response["result"]
try:
abi = json.loads(result)
except json.JSONDecodeError:
print("Error: Failed to parse 'result' field as JSON")
sys.exit(1)
else:
abi = response.get("abi")
return abi if abi else []
def get_service_safe(service_id: int) -> str:
"""Gets the service Safe"""
service_registry = _get_contract(SERVICE_REGISTRY_ADDRESS)
service_safe_address = service_registry.functions.getService(service_id).call()[1]
return service_safe_address
def list_contract_functions(contract):
function_names = []
for item in contract.abi:
if item.get("type") == "function":
function_names.append(item.get("name"))
return function_names
def get_service_data(service_registry: Any, service_id: int) -> dict:
tmp_map = {}
# Get the list of addresses
print(f"getting addresses from service id ={service_id}")
# available_functions = list_contract_functions(service_registry)
# print("Available Contract Functions:")
# for func in available_functions:
# print(f"- {func}")
try:
data = service_registry.functions.getService(service_id).call()
try:
owner_data = service_registry.functions.ownerOf(service_id).call()
except Exception as e:
tqdm.write(f"Error: no owner data infor from {service_id}")
return None
# print(f"owner data = {owner_data}")
address = data[1]
state = data[-2]
agent_id = data[7][0]
# print(f"address = {address}")
# print(f"state={state}")
print(f"agent_id={agent_id}")
# PEARL trade
if state != 4: # the service was not deployed yet
# non-staking?
print(
f"The service with id {service_id} is not deployed yet. state={state}"
)
return None
if address != "0x0000000000000000000000000000000000000000":
tmp_map[service_id] = {
"safe_address": address,
"state": state,
"owner_address": owner_data,
"agent_id": agent_id,
}
except Exception as e:
tqdm.write(f"Error fetching service id {service_id}: {e}")
return None
return tmp_map
def update_service_map(start: int = 1, end: int = 2800):
if os.path.exists(ROOT_DIR / "service_map.pkl"):
with open(ROOT_DIR / "service_map.pkl", "rb") as f:
service_map = pickle.load(f)
else:
service_map = {}
print(f"updating service map from service id={start}")
# we do not know which is the last service id right now
service_registry = _get_contract(SERVICE_REGISTRY_ADDRESS)
with ThreadPoolExecutor(max_workers=NUM_WORKERS) as executor:
futures = []
for service_id in range(start, end):
futures.append(
executor.submit(
get_service_data,
service_registry,
service_id,
)
)
for future in tqdm(
as_completed(futures),
total=len(futures),
desc=f"Fetching all service data from contracts",
):
partial_dict = future.result()
if partial_dict:
service_map.update(partial_dict)
print(f"length of service map {len(service_map)}")
with open(ROOT_DIR / "service_map.pkl", "wb") as f:
pickle.dump(service_map, f)
def check_owner_staking_contract(owner_address: str) -> str:
staking = "non_staking"
owner_address = owner_address.lower()
# check quickstart staking contracts
qs_list = [x.lower() for x in STAKING_PROGRAMS_QS.values()]
if owner_address in qs_list:
return "quickstart"
# check pearl staking contracts
pearl_list = [x.lower() for x in STAKING_PROGRAMS_PEARL.values()]
if owner_address in pearl_list:
return "pearl"
# check legacy staking contracts
deprec_list = [x.lower() for x in DEPRECATED_STAKING_PROGRAMS.values()]
if owner_address in deprec_list:
return "quickstart"
return staking
def add_predict_agent_category(owner_address: str) -> str:
"""Function to add the predict agent category to the service map"""
owner_address = owner_address.lower()
# check if it is a predict agent
pearl_list = [x.lower() for x in STAKING_PROGRAMS_PEARL.values()]
if owner_address in pearl_list:
return "pearl"
return "quickstart"
def get_trader_address_staking(trader_address: str, service_map: dict) -> str:
# check if there is any service id linked with that trader address
found_key = -1
# agent_id = 25 <- requeriment for a service id to be from Pearl or Quickstart
for key, value in service_map.items():
if value["safe_address"].lower() == trader_address.lower():
# found a service
found_key = key
break
if found_key == -1:
return "non_Olas"
owner = service_map[found_key]["owner_address"]
return check_owner_staking_contract(owner_address=owner)
def label_trades_by_staking(trades_df: pd.DataFrame, start: int = None) -> None:
with open(ROOT_DIR / "service_map.pkl", "rb") as f:
service_map = pickle.load(f)
# get the last service id
keys = service_map.keys()
if start is None:
last_key = max(keys)
else:
last_key = start
print(f"last service key = {last_key}")
update_service_map(start=last_key)
all_traders = trades_df.trader_address.unique()
trades_df["staking"] = ""
for trader in tqdm(all_traders, desc="Labeling traders by staking", unit="trader"):
# tqdm.write(f"checking trader {trader}")
staking_label = get_trader_address_staking(trader, service_map)
if staking_label:
trades_df.loc[trades_df["trader_address"] == trader, "staking"] = (
staking_label
)
# tqdm.write(f"statking label {staking_label}")
return trades_df
def generate_retention_activity_file():
tools = pd.read_parquet(TMP_DIR / "tools.parquet")
tools["request_time"] = pd.to_datetime(tools["request_time"])
tools["request_date"] = tools["request_time"].dt.date
tools = tools.sort_values(by="request_time", ascending=True)
reduced_tools_df = tools[
["trader_address", "request_time", "market_creator", "request_date"]
]
print(f"length of reduced tools before labeling = {len(reduced_tools_df)}")
reduced_tools_df = label_trades_by_staking(trades_df=reduced_tools_df, start=2)
print(f"labeling of tools activity. {reduced_tools_df.staking.value_counts()}")
print(f"length of reduced tools after labeling = {len(reduced_tools_df)}")
reduced_tools_df = reduced_tools_df.sort_values(by="request_time", ascending=True)
reduced_tools_df["month_year_week"] = (
pd.to_datetime(tools["request_time"])
.dt.to_period("W")
.dt.start_time.dt.strftime("%b-%d-%Y")
)
reduced_tools_df.to_parquet(ROOT_DIR / "retention_activity.parquet")
with open(ROOT_DIR / "retention_activity.parquet", "rb") as f_in:
with gzip.open(ROOT_DIR / "retention_activity.parquet.gz", "wb") as f_out:
shutil.copyfileobj(f_in, f_out)
return True
def check_list_addresses(address_list: list):
with open(ROOT_DIR / "service_map.pkl", "rb") as f:
service_map = pickle.load(f)
# check if it is part of any service id on the map
mapping = {}
print(f"length of service map={len(service_map)}")
keys = service_map.keys()
last_key = max(keys)
print(f"last service key = {last_key}")
update_service_map(start=last_key)
found_key = -1
trader_types = []
for trader_address in address_list:
for key, value in service_map.items():
if value["safe_address"].lower() == trader_address.lower():
# found a service
found_key = key
mapping[trader_address] = "Olas"
trader_types.append("Olas")
break
if found_key == -1:
mapping[trader_address] = "non_Olas"
trader_types.append("non_Olas")
return mapping
def check_service_map():
with open(ROOT_DIR / "service_map.pkl", "rb") as f:
service_map = pickle.load(f)
# check if it is part of any service id on the map
mapping = {}
print(f"length of service map={len(service_map)}")
keys = service_map.keys()
last_key = max(keys)
print(f"last key ={last_key}")
missing_keys = 0
for i in range(1, last_key):
if i not in keys:
missing_keys += 1
print(f"missing key = {i}")
print(f"total missing keys = {missing_keys}")
if __name__ == "__main__":
# create_service_map()
# trades_df = pd.read_parquet(JSON_DATA_DIR / "all_trades_df.parquet")
# trades_df = trades_df.loc[trades_df["is_invalid"] == False]
# trades_df = label_trades_by_staking(trades_df=trades_df)
# print(trades_df.staking.value_counts())
# trades_df.to_parquet(TMP_DIR / "result_staking.parquet", index=False)
# generate_retention_activity_file()
# a_list = [
# "0x027592700fafc4db3221bb662d7bdc7f546a2bb5",
# "0x0845f4ad01a2f41da618848c7a9e56b64377965e",
# ]
# check_list_addresses(address_list=a_list)
update_service_map(start=1)
# check_service_map()
|