cgoodmaker commited on
Commit
b08f876
·
1 Parent(s): bb7e939

Pass HF_TOKEN explicitly to pipeline() for gated model auth

Browse files
Files changed (1) hide show
  1. models/medgemma_agent.py +5 -2
models/medgemma_agent.py CHANGED
@@ -207,12 +207,14 @@ class MedGemmaAgent:
207
  import torch
208
  from transformers import pipeline
209
 
210
- # Authenticate with HF Hub if a token is provided (required for gated models)
211
  hf_token = os.environ.get("HF_TOKEN")
212
  if hf_token:
213
  from huggingface_hub import login
214
  login(token=hf_token, add_to_git_credential=False)
215
  self._print("Authenticated with HF Hub")
 
 
216
 
217
  self._print(f"Loading model: {self.model_id}")
218
 
@@ -235,7 +237,8 @@ class MedGemmaAgent:
235
  self.pipe = pipeline(
236
  "image-text-to-text",
237
  model=self.model_id,
238
- model_kwargs=model_kwargs
 
239
  )
240
 
241
  self._print(f"Model loaded in {time.time() - start:.1f}s")
 
207
  import torch
208
  from transformers import pipeline
209
 
210
+ # Authenticate with HF Hub (required for gated models like MedGemma)
211
  hf_token = os.environ.get("HF_TOKEN")
212
  if hf_token:
213
  from huggingface_hub import login
214
  login(token=hf_token, add_to_git_credential=False)
215
  self._print("Authenticated with HF Hub")
216
+ else:
217
+ self._print("Warning: HF_TOKEN not set — gated models will fail")
218
 
219
  self._print(f"Loading model: {self.model_id}")
220
 
 
237
  self.pipe = pipeline(
238
  "image-text-to-text",
239
  model=self.model_id,
240
+ model_kwargs=model_kwargs,
241
+ token=hf_token, # pass explicitly in addition to login()
242
  )
243
 
244
  self._print(f"Model loaded in {time.time() - start:.1f}s")