samwaugh commited on
Commit
78240c5
Β·
1 Parent(s): fd6d67a

Debugging for inference

Browse files
Files changed (1) hide show
  1. backend/runner/inference.py +11 -3
backend/runner/inference.py CHANGED
@@ -721,15 +721,15 @@ def run_inference_streaming(
721
  print(f"πŸ” Batch {batch_count} completed in {batch_time:.2f}s")
722
  print(f"πŸ” Progress: {total_processed:,}/{total_items:,} ({progress_pct:.1f}%)")
723
  print(f"πŸ” Elapsed time: {elapsed_time:.1f}s")
724
- print(f"πŸ” Current top score: {results[0]['score']:.4f} if results else 'N/A'")
725
  print(f"πŸ” Estimated time remaining: {((elapsed_time / total_processed) * (total_items - total_processed)):.1f}s")
726
  else:
727
  print(f"πŸ” Batch {batch_count} completed in {batch_time:.2f}s")
728
  print(f"πŸ” Total processed: {total_processed:,}")
729
  print(f"πŸ” Elapsed time: {elapsed_time:.1f}s")
730
- print(f"πŸ” Current top score: {results[0]['score']:.4f} if results else 'N/A'")
731
 
732
- print(f"πŸ” Current top result: {results[0]['english_original'][:100]}..." if results else "No results yet")
733
  print("─" * 80)
734
 
735
  # Process remaining items
@@ -765,6 +765,10 @@ def process_embedding_batch_streaming(
765
  ) -> List[Dict[str, Any]]:
766
  """Process a batch of streaming embeddings"""
767
  results = []
 
 
 
 
768
 
769
  for item in batch:
770
  try:
@@ -776,6 +780,7 @@ def process_embedding_batch_streaming(
776
  elif model_type == "PaintingCLIP" and 'paintingclip_embedding' in item:
777
  embedding = torch.tensor(item['paintingclip_embedding'])
778
  else:
 
779
  continue
780
 
781
  # Calculate similarity
@@ -798,9 +803,12 @@ def process_embedding_batch_streaming(
798
  "work": work_id,
799
  "rank": len(results) + 1,
800
  })
 
801
 
802
  except Exception as e:
 
803
  print(f"⚠️ Error processing item in streaming batch: {e}")
804
  continue
805
 
 
806
  return results
 
721
  print(f"πŸ” Batch {batch_count} completed in {batch_time:.2f}s")
722
  print(f"πŸ” Progress: {total_processed:,}/{total_items:,} ({progress_pct:.1f}%)")
723
  print(f"πŸ” Elapsed time: {elapsed_time:.1f}s")
724
+ print(f"πŸ” Current top score: {results[0]['score']:.4f}" if results else "πŸ” Current top score: N/A")
725
  print(f"πŸ” Estimated time remaining: {((elapsed_time / total_processed) * (total_items - total_processed)):.1f}s")
726
  else:
727
  print(f"πŸ” Batch {batch_count} completed in {batch_time:.2f}s")
728
  print(f"πŸ” Total processed: {total_processed:,}")
729
  print(f"πŸ” Elapsed time: {elapsed_time:.1f}s")
730
+ print(f"πŸ” Current top score: {results[0]['score']:.4f}" if results else "πŸ” Current top score: N/A")
731
 
732
+ print(f"πŸ” Current top result: {results[0]['english_original'][:100]}..." if results else "πŸ” No results yet")
733
  print("─" * 80)
734
 
735
  # Process remaining items
 
765
  ) -> List[Dict[str, Any]]:
766
  """Process a batch of streaming embeddings"""
767
  results = []
768
+ processed_count = 0
769
+ error_count = 0
770
+
771
+ print(f"πŸ” Processing batch of {len(batch)} items...")
772
 
773
  for item in batch:
774
  try:
 
780
  elif model_type == "PaintingCLIP" and 'paintingclip_embedding' in item:
781
  embedding = torch.tensor(item['paintingclip_embedding'])
782
  else:
783
+ print(f"⚠️ No embedding found for {model_type} in item: {list(item.keys())}")
784
  continue
785
 
786
  # Calculate similarity
 
803
  "work": work_id,
804
  "rank": len(results) + 1,
805
  })
806
+ processed_count += 1
807
 
808
  except Exception as e:
809
+ error_count += 1
810
  print(f"⚠️ Error processing item in streaming batch: {e}")
811
  continue
812
 
813
+ print(f"πŸ” Batch processing complete: {processed_count} successful, {error_count} errors")
814
  return results