Skanislav commited on
Commit ·
d7ec263
1
Parent(s): 4409d1e
fix: merge files cold start
Browse files- scripts/mech_request_utils.py +14 -11
scripts/mech_request_utils.py
CHANGED
|
@@ -192,10 +192,11 @@ def collect_all_mech_requests(
|
|
| 192 |
)
|
| 193 |
request_query = REQUESTS_QUERY_FILTER
|
| 194 |
transport = RequestsHTTPTransport(url=mech_subgraph_url)
|
| 195 |
-
client = Client(transport=transport, fetch_schema_from_transport=
|
| 196 |
|
| 197 |
id_gt = "0x00"
|
| 198 |
nr_errors = 0
|
|
|
|
| 199 |
while True:
|
| 200 |
variables = {
|
| 201 |
"sender_not_in": [FPMM_QS_CREATOR, FPMM_PEARL_CREATOR],
|
|
@@ -239,6 +240,7 @@ def fetch_with_retry(client, query, variables, max_retries=5):
|
|
| 239 |
try:
|
| 240 |
return client.execute(gql(query), variable_values=variables)
|
| 241 |
except Exception as e:
|
|
|
|
| 242 |
if attempt == max_retries - 1:
|
| 243 |
raise e
|
| 244 |
wait_time = (2**attempt) + uniform(0, 1) # exponential backoff with jitter
|
|
@@ -263,7 +265,7 @@ def collect_all_mech_delivers(
|
|
| 263 |
deliver_query = DELIVERS_QUERY_NO_FILTER
|
| 264 |
|
| 265 |
transport = RequestsHTTPTransport(url=mech_subgraph_url)
|
| 266 |
-
client = Client(transport=transport, fetch_schema_from_transport=
|
| 267 |
to_block = (
|
| 268 |
to_block + MECH_FROM_BLOCK_RANGE
|
| 269 |
) # there is a delay between deliver and request
|
|
@@ -324,7 +326,7 @@ def collect_missing_delivers(
|
|
| 324 |
)
|
| 325 |
missing_query = MISSING_DELIVERS_QUERY
|
| 326 |
transport = RequestsHTTPTransport(url=mech_subgraph_url)
|
| 327 |
-
client = Client(transport=transport, fetch_schema_from_transport=
|
| 328 |
|
| 329 |
variables = {
|
| 330 |
"requestId": request_id,
|
|
@@ -452,20 +454,21 @@ def write_mech_events_to_file(
|
|
| 452 |
|
| 453 |
|
| 454 |
def merge_json_files(old_file: str, new_file: str):
|
| 455 |
-
# read old file
|
| 456 |
-
with open(JSON_DATA_DIR / old_file, "r") as f:
|
| 457 |
-
old_data = json.load(f)
|
| 458 |
-
|
| 459 |
-
# read the new file
|
| 460 |
with open(JSON_DATA_DIR / new_file, "r") as f:
|
| 461 |
new_data = json.load(f)
|
| 462 |
|
| 463 |
-
|
| 464 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 465 |
|
| 466 |
# Save the merged JSON file
|
| 467 |
print(f"{old_file} updated")
|
| 468 |
-
save_json_file(
|
| 469 |
|
| 470 |
|
| 471 |
def clean_mech_delivers(requests_filename: str, delivers_filename: str) -> None:
|
|
|
|
| 192 |
)
|
| 193 |
request_query = REQUESTS_QUERY_FILTER
|
| 194 |
transport = RequestsHTTPTransport(url=mech_subgraph_url)
|
| 195 |
+
client = Client(transport=transport, fetch_schema_from_transport=False)
|
| 196 |
|
| 197 |
id_gt = "0x00"
|
| 198 |
nr_errors = 0
|
| 199 |
+
last_id_gt = 0
|
| 200 |
while True:
|
| 201 |
variables = {
|
| 202 |
"sender_not_in": [FPMM_QS_CREATOR, FPMM_PEARL_CREATOR],
|
|
|
|
| 240 |
try:
|
| 241 |
return client.execute(gql(query), variable_values=variables)
|
| 242 |
except Exception as e:
|
| 243 |
+
print(f"Error while getting the response: {e}")
|
| 244 |
if attempt == max_retries - 1:
|
| 245 |
raise e
|
| 246 |
wait_time = (2**attempt) + uniform(0, 1) # exponential backoff with jitter
|
|
|
|
| 265 |
deliver_query = DELIVERS_QUERY_NO_FILTER
|
| 266 |
|
| 267 |
transport = RequestsHTTPTransport(url=mech_subgraph_url)
|
| 268 |
+
client = Client(transport=transport, fetch_schema_from_transport=False)
|
| 269 |
to_block = (
|
| 270 |
to_block + MECH_FROM_BLOCK_RANGE
|
| 271 |
) # there is a delay between deliver and request
|
|
|
|
| 326 |
)
|
| 327 |
missing_query = MISSING_DELIVERS_QUERY
|
| 328 |
transport = RequestsHTTPTransport(url=mech_subgraph_url)
|
| 329 |
+
client = Client(transport=transport, fetch_schema_from_transport=False)
|
| 330 |
|
| 331 |
variables = {
|
| 332 |
"requestId": request_id,
|
|
|
|
| 454 |
|
| 455 |
|
| 456 |
def merge_json_files(old_file: str, new_file: str):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 457 |
with open(JSON_DATA_DIR / new_file, "r") as f:
|
| 458 |
new_data = json.load(f)
|
| 459 |
|
| 460 |
+
try:
|
| 461 |
+
with open(JSON_DATA_DIR / old_file, "r") as f:
|
| 462 |
+
old_data = json.load(f)
|
| 463 |
+
old_data.update(new_data)
|
| 464 |
+
data_to_save = old_data
|
| 465 |
+
except FileNotFoundError:
|
| 466 |
+
# if no old file exists, just use new data
|
| 467 |
+
data_to_save = new_data
|
| 468 |
|
| 469 |
# Save the merged JSON file
|
| 470 |
print(f"{old_file} updated")
|
| 471 |
+
save_json_file(data_to_save, old_file)
|
| 472 |
|
| 473 |
|
| 474 |
def clean_mech_delivers(requests_filename: str, delivers_filename: str) -> None:
|