Spaces:
Sleeping
Sleeping
Gilmullin Almaz
commited on
Commit
·
4d0485b
1
Parent(s):
511ff8c
cash: non_hashing tree exclusion
Browse files
app.py
CHANGED
|
@@ -417,17 +417,12 @@ if submit_planning:
|
|
| 417 |
# route_score = round(tree.route_score(node_id), 3)
|
| 418 |
# st.image(get_route_svg(tree, node_id), caption=f"Route {node_id}; {num_steps} steps; Route score: {route_score}")
|
| 419 |
|
| 420 |
-
@st.cache_data
|
| 421 |
def prepare_clustering_data(tree):
|
| 422 |
-
"""Pre-compute and cache the clustering data in chunks"""
|
| 423 |
try:
|
| 424 |
-
# Free memory before starting
|
| 425 |
gc.collect()
|
| 426 |
-
|
| 427 |
-
# Process in chunks
|
| 428 |
chunk_size = 10
|
| 429 |
super_cgrs_dict = {}
|
| 430 |
-
|
| 431 |
for i in range(0, len(tree.winning_nodes), chunk_size):
|
| 432 |
chunk = list(tree.winning_nodes)[i:i+chunk_size]
|
| 433 |
temp_dict = {node: tree.synthesis_route(node) for node in chunk}
|
|
@@ -436,7 +431,6 @@ if submit_planning:
|
|
| 436 |
del temp_dict
|
| 437 |
gc.collect()
|
| 438 |
|
| 439 |
-
# Process reduced CGRs in chunks
|
| 440 |
reduced_super_cgrs_dict = {}
|
| 441 |
for i in range(0, len(super_cgrs_dict), chunk_size):
|
| 442 |
keys = list(super_cgrs_dict.keys())[i:i+chunk_size]
|
|
@@ -448,7 +442,6 @@ if submit_planning:
|
|
| 448 |
|
| 449 |
del super_cgrs_dict
|
| 450 |
gc.collect()
|
| 451 |
-
|
| 452 |
return reduced_super_cgrs_dict
|
| 453 |
except Exception as e:
|
| 454 |
st.error(f"Error in prepare_clustering_data: {str(e)}")
|
|
|
|
| 417 |
# route_score = round(tree.route_score(node_id), 3)
|
| 418 |
# st.image(get_route_svg(tree, node_id), caption=f"Route {node_id}; {num_steps} steps; Route score: {route_score}")
|
| 419 |
|
| 420 |
+
@st.cache_data(hash_funcs={Tree: lambda _: None})
|
| 421 |
def prepare_clustering_data(tree):
|
|
|
|
| 422 |
try:
|
|
|
|
| 423 |
gc.collect()
|
|
|
|
|
|
|
| 424 |
chunk_size = 10
|
| 425 |
super_cgrs_dict = {}
|
|
|
|
| 426 |
for i in range(0, len(tree.winning_nodes), chunk_size):
|
| 427 |
chunk = list(tree.winning_nodes)[i:i+chunk_size]
|
| 428 |
temp_dict = {node: tree.synthesis_route(node) for node in chunk}
|
|
|
|
| 431 |
del temp_dict
|
| 432 |
gc.collect()
|
| 433 |
|
|
|
|
| 434 |
reduced_super_cgrs_dict = {}
|
| 435 |
for i in range(0, len(super_cgrs_dict), chunk_size):
|
| 436 |
keys = list(super_cgrs_dict.keys())[i:i+chunk_size]
|
|
|
|
| 442 |
|
| 443 |
del super_cgrs_dict
|
| 444 |
gc.collect()
|
|
|
|
| 445 |
return reduced_super_cgrs_dict
|
| 446 |
except Exception as e:
|
| 447 |
st.error(f"Error in prepare_clustering_data: {str(e)}")
|