Factor Studios commited on
Commit
028bfa9
·
verified ·
1 Parent(s): ee10eab

Update test_ai_integration_http.py

Browse files
Files changed (1) hide show
  1. test_ai_integration_http.py +9 -7
test_ai_integration_http.py CHANGED
@@ -120,23 +120,25 @@ def test_ai_integration_http():
120
  )
121
  status['processor_loaded'] = True
122
 
123
- # Get config and model class from the processor
124
- config = processor.config
125
-
126
- # Load the model as a general vision model
127
  model = AutoModel.from_pretrained(
128
  model_name,
129
  trust_remote_code=True,
130
  torch_dtype=torch.float32, # Use float32 for better compatibility
131
- device_map=None # Don't auto-map devices
 
132
  )
133
  status['model_loaded'] = True
134
 
135
- # Log model architecture
 
 
 
136
  model_size = get_model_size(model)
137
  logger.info(f"Model loaded: {model_size/1e9:.2f} GB in parameters")
138
  logger.info(f"Model architecture: {model.__class__.__name__}")
139
- logger.info(f"Model config type: {type(config).__name__}")
 
140
  except Exception as e:
141
  logger.error(f"Model loading failed: {str(e)}")
142
  raise
 
120
  )
121
  status['processor_loaded'] = True
122
 
123
+ # Load model directly - let it handle config internally
 
 
 
124
  model = AutoModel.from_pretrained(
125
  model_name,
126
  trust_remote_code=True,
127
  torch_dtype=torch.float32, # Use float32 for better compatibility
128
+ device_map=None, # Don't auto-map devices
129
+ ignore_mismatched_sizes=True # Handle any size mismatches
130
  )
131
  status['model_loaded'] = True
132
 
133
+ logger.info(f"Processor type: {type(processor).__name__}")
134
+ logger.info(f"Model type: {type(model).__name__}")
135
+
136
+ # Log model architecture and details
137
  model_size = get_model_size(model)
138
  logger.info(f"Model loaded: {model_size/1e9:.2f} GB in parameters")
139
  logger.info(f"Model architecture: {model.__class__.__name__}")
140
+ if hasattr(model, 'config'):
141
+ logger.info(f"Model config type: {type(model.config).__name__}")
142
  except Exception as e:
143
  logger.error(f"Model loading failed: {str(e)}")
144
  raise