Add debugging
Browse files- backend/runner/inference.py +14 -2
backend/runner/inference.py
CHANGED
|
@@ -770,6 +770,16 @@ def process_embedding_batch_streaming(
|
|
| 770 |
|
| 771 |
print(f"🔍 Processing batch of {len(batch)} items...")
|
| 772 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 773 |
for item in batch:
|
| 774 |
try:
|
| 775 |
sentence_id = item.get('sentence_id', '')
|
|
@@ -780,7 +790,8 @@ def process_embedding_batch_streaming(
|
|
| 780 |
elif model_type == "PaintingCLIP" and 'paintingclip_embedding' in item:
|
| 781 |
embedding = torch.tensor(item['paintingclip_embedding'])
|
| 782 |
else:
|
| 783 |
-
|
|
|
|
| 784 |
continue
|
| 785 |
|
| 786 |
# Calculate similarity
|
|
@@ -807,7 +818,8 @@ def process_embedding_batch_streaming(
|
|
| 807 |
|
| 808 |
except Exception as e:
|
| 809 |
error_count += 1
|
| 810 |
-
|
|
|
|
| 811 |
continue
|
| 812 |
|
| 813 |
print(f"🔍 Batch processing complete: {processed_count} successful, {error_count} errors")
|
|
|
|
| 770 |
|
| 771 |
print(f"🔍 Processing batch of {len(batch)} items...")
|
| 772 |
|
| 773 |
+
# Debug: show first few items to understand the data structure
|
| 774 |
+
for i, item in enumerate(batch[:3]):
|
| 775 |
+
print(f" Item {i}: keys = {list(item.keys())}")
|
| 776 |
+
if 'clip_embedding' in item:
|
| 777 |
+
print(f"🔍 Item {i}: clip_embedding shape = {len(item['clip_embedding'])}")
|
| 778 |
+
if 'paintingclip_embedding' in item:
|
| 779 |
+
print(f" Item {i}: paintingclip_embedding shape = {len(item['paintingclip_embedding'])}")
|
| 780 |
+
if 'sentence_id' in item:
|
| 781 |
+
print(f" Item {i}: sentence_id = {item['sentence_id']}")
|
| 782 |
+
|
| 783 |
for item in batch:
|
| 784 |
try:
|
| 785 |
sentence_id = item.get('sentence_id', '')
|
|
|
|
| 790 |
elif model_type == "PaintingCLIP" and 'paintingclip_embedding' in item:
|
| 791 |
embedding = torch.tensor(item['paintingclip_embedding'])
|
| 792 |
else:
|
| 793 |
+
if processed_count < 3: # Only show first few errors
|
| 794 |
+
print(f"⚠️ No embedding found for {model_type} in item: {list(item.keys())}")
|
| 795 |
continue
|
| 796 |
|
| 797 |
# Calculate similarity
|
|
|
|
| 818 |
|
| 819 |
except Exception as e:
|
| 820 |
error_count += 1
|
| 821 |
+
if error_count < 3: # Only show first few errors
|
| 822 |
+
print(f"⚠️ Error processing item in streaming batch: {e}")
|
| 823 |
continue
|
| 824 |
|
| 825 |
print(f"🔍 Batch processing complete: {processed_count} successful, {error_count} errors")
|