File size: 3,462 Bytes
31fd8be |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 |
from typing import Dict, Any, List
import torch
import numpy as np
from PIL import Image
import base64
import io
import cv2
# Test importing the problematic packages
from transformers import Sam3Model, Sam3Processor
class EndpointHandler:
"""
Minimal test handler to isolate dependency loading issues
This handler imports all the same dependencies but doesn't execute SAM3 inference
"""
def __init__(self, path: str = ""):
"""
Initialize the handler - test dependency loading without heavy model loading
Args:
path: Path to model weights (unused in this test)
"""
self.device = "cuda" if torch.cuda.is_available() else "cpu"
print(f"β
Test handler initialized successfully on device: {self.device}")
# Test that we can import the SAM3 classes without loading the model
print(f"β
Successfully imported Sam3Model: {Sam3Model}")
print(f"β
Successfully imported Sam3Processor: {Sam3Processor}")
# Test other dependencies
print(f"β
PyTorch version: {torch.__version__}")
print(f"β
NumPy version: {np.__version__}")
print(f"β
PIL (Pillow) available: {Image}")
print(f"β
OpenCV available: {cv2.__version__}")
# Don't actually load the model to avoid memory/download issues
self.model = None
self.processor = None
print("β
Minimal test handler ready - all dependencies loaded successfully!")
def __call__(self, data: Dict[str, Any]) -> Dict[str, Any]:
"""
Minimal test endpoint that returns success without actual inference
Args:
data: Input data (will be ignored in this test)
Returns:
Simple success response to verify the handler works
"""
try:
print("π Test handler called with data keys:", list(data.keys()) if data else "No data")
# Test basic operations with imported libraries
test_array = np.array([1, 2, 3])
test_tensor = torch.tensor([1.0, 2.0, 3.0])
print(f"β
NumPy test array: {test_array}")
print(f"β
PyTorch test tensor: {test_tensor}")
print(f"β
Device available: {self.device}")
# Return a successful test response
return {
"status": "success",
"message": "β
All dependencies loaded and working correctly!",
"test_results": {
"numpy_test": test_array.tolist(),
"torch_test": test_tensor.tolist(),
"device": self.device,
"torch_version": torch.__version__,
"numpy_version": np.__version__,
"opencv_version": cv2.__version__,
"transformers_classes_available": {
"Sam3Model": str(Sam3Model),
"Sam3Processor": str(Sam3Processor)
}
},
"input_data_received": data is not None,
"handler_type": "minimal_test_handler"
}
except Exception as e:
print(f"β Error in test handler: {str(e)}")
return {
"status": "error",
"message": f"Test handler failed: {str(e)}",
"error_type": type(e).__name__,
"handler_type": "minimal_test_handler"
} |