rokmr commited on
Commit
998921e
·
verified ·
1 Parent(s): 24bd3ad

updating with fixes of hf token

Browse files
Files changed (1) hide show
  1. app.py +34 -2
app.py CHANGED
@@ -41,9 +41,29 @@ def load_pipeline():
41
  def remote_text_encoder(prompts):
42
  """Encode prompts using remote text encoder API."""
43
  try:
44
- token = get_token()
 
 
 
 
 
 
 
 
 
45
  if not token:
46
- raise ValueError("HuggingFace token not found. Please login using 'huggingface-cli login'")
 
 
 
 
 
 
 
 
 
 
 
47
 
48
  response = requests.post(
49
  "https://remote-text-encoder-flux-2.huggingface.co/predict",
@@ -59,6 +79,18 @@ def remote_text_encoder(prompts):
59
 
60
  device = "cuda" if torch.cuda.is_available() else "cpu"
61
  return prompt_embeds.to(device)
 
 
 
 
 
 
 
 
 
 
 
 
62
  except Exception as e:
63
  raise Exception(f"Failed to encode prompt: {str(e)}")
64
 
 
41
  def remote_text_encoder(prompts):
42
  """Encode prompts using remote text encoder API."""
43
  try:
44
+ # Try multiple methods to get the token
45
+ token = None
46
+
47
+ # Method 1: From huggingface_hub
48
+ try:
49
+ token = get_token()
50
+ except:
51
+ pass
52
+
53
+ # Method 2: From environment variable (Spaces sets this automatically)
54
  if not token:
55
+ token = os.environ.get("HF_TOKEN") or os.environ.get("HUGGING_FACE_HUB_TOKEN")
56
+
57
+ # Method 3: From Spaces secrets
58
+ if not token:
59
+ token = os.environ.get("SPACE_TOKEN")
60
+
61
+ if not token:
62
+ raise ValueError(
63
+ "HuggingFace token not found. "
64
+ "If running on Spaces, make sure your Space has access to gated models. "
65
+ "If running locally, please login using 'huggingface-cli login'"
66
+ )
67
 
68
  response = requests.post(
69
  "https://remote-text-encoder-flux-2.huggingface.co/predict",
 
79
 
80
  device = "cuda" if torch.cuda.is_available() else "cpu"
81
  return prompt_embeds.to(device)
82
+ except requests.HTTPError as e:
83
+ if e.response.status_code == 401:
84
+ raise Exception(
85
+ "Authentication failed (401). Your HuggingFace token may not have access to this model. "
86
+ "Please ensure your token has permission to access FLUX.2 models."
87
+ )
88
+ elif e.response.status_code == 403:
89
+ raise Exception(
90
+ "Access forbidden (403). You may need to accept the model's license agreement on HuggingFace."
91
+ )
92
+ else:
93
+ raise Exception(f"HTTP error {e.response.status_code}: {str(e)}")
94
  except Exception as e:
95
  raise Exception(f"Failed to encode prompt: {str(e)}")
96