Gilmullin Almaz commited on
Commit
c45df67
·
1 Parent(s): 4d0485b

cash: hashing error try

Browse files
Files changed (1) hide show
  1. app.py +34 -6
app.py CHANGED
@@ -420,33 +420,61 @@ if submit_planning:
420
  @st.cache_data(hash_funcs={Tree: lambda _: None})
421
  def prepare_clustering_data(tree):
422
  try:
423
- gc.collect()
 
 
 
 
 
424
  chunk_size = 10
425
  super_cgrs_dict = {}
426
- for i in range(0, len(tree.winning_nodes), chunk_size):
427
- chunk = list(tree.winning_nodes)[i:i+chunk_size]
428
- temp_dict = {node: tree.synthesis_route(node) for node in chunk}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
429
  chunk_super_cgrs = reassign_nums_chunk(temp_dict)
430
  super_cgrs_dict.update(chunk_super_cgrs)
 
 
431
  del temp_dict
432
  gc.collect()
433
 
 
434
  reduced_super_cgrs_dict = {}
435
  for i in range(0, len(super_cgrs_dict), chunk_size):
436
  keys = list(super_cgrs_dict.keys())[i:i+chunk_size]
437
  chunk_dict = {k: super_cgrs_dict[k] for k in keys}
 
438
  reduced_chunk = process_all_rs_cgrs(chunk_dict)
439
  reduced_super_cgrs_dict.update(reduced_chunk)
 
 
440
  del chunk_dict
441
  gc.collect()
442
 
443
- del super_cgrs_dict
444
- gc.collect()
445
  return reduced_super_cgrs_dict
446
  except Exception as e:
 
447
  st.error(f"Error in prepare_clustering_data: {str(e)}")
448
  return None
449
 
 
450
  def memory_status():
451
  """Get current memory status"""
452
  process = psutil.Process()
 
420
  @st.cache_data(hash_funcs={Tree: lambda _: None})
421
  def prepare_clustering_data(tree):
422
  try:
423
+ # Log the start and basic info from the Tree
424
+ print("Starting clustering data preparation.")
425
+ total_nodes = len(tree.winning_nodes)
426
+ print(f"Total winning nodes: {total_nodes}")
427
+ print(f"Tree id: {id(tree)}")
428
+
429
  chunk_size = 10
430
  super_cgrs_dict = {}
431
+
432
+ # Process winning nodes in chunks
433
+ for i in range(0, total_nodes, chunk_size):
434
+ current_chunk = list(tree.winning_nodes)[i:i+chunk_size]
435
+ print(f"Processing chunk {i // chunk_size + 1}: Nodes {current_chunk}")
436
+
437
+ temp_dict = {}
438
+ for node in current_chunk:
439
+ try:
440
+ # Log before processing each node
441
+ print(f"Processing node {node}")
442
+ route = tree.synthesis_route(node)
443
+ temp_dict[node] = route
444
+ print(f"Node {node} processed successfully (route length: {len(route)}).")
445
+ except Exception as e:
446
+ print(f"Error processing node {node}: {e}")
447
+
448
+ # Log before calling reassign_nums_chunk
449
+ print(f"Calling reassign_nums_chunk for nodes: {list(temp_dict.keys())}")
450
  chunk_super_cgrs = reassign_nums_chunk(temp_dict)
451
  super_cgrs_dict.update(chunk_super_cgrs)
452
+ print(f"Chunk {i // chunk_size + 1} processed. Keys: {list(chunk_super_cgrs.keys())}")
453
+
454
  del temp_dict
455
  gc.collect()
456
 
457
+ # Process reduced CGRs in chunks
458
  reduced_super_cgrs_dict = {}
459
  for i in range(0, len(super_cgrs_dict), chunk_size):
460
  keys = list(super_cgrs_dict.keys())[i:i+chunk_size]
461
  chunk_dict = {k: super_cgrs_dict[k] for k in keys}
462
+ print(f"Reducing chunk for keys: {keys}")
463
  reduced_chunk = process_all_rs_cgrs(chunk_dict)
464
  reduced_super_cgrs_dict.update(reduced_chunk)
465
+ print(f"Reduced chunk processed for keys: {list(reduced_chunk.keys())}")
466
+
467
  del chunk_dict
468
  gc.collect()
469
 
470
+ print("Clustering data preparation complete.")
 
471
  return reduced_super_cgrs_dict
472
  except Exception as e:
473
+ print(f"Error in prepare_clustering_data: {str(e)}")
474
  st.error(f"Error in prepare_clustering_data: {str(e)}")
475
  return None
476
 
477
+
478
  def memory_status():
479
  """Get current memory status"""
480
  process = psutil.Process()