ghmk commited on
Commit
0752519
·
1 Parent(s): 687d9ef

Add fallback auth with obfuscated read-only key

Browse files
Files changed (1) hide show
  1. app.py +12 -9
app.py CHANGED
@@ -5,22 +5,25 @@ import random
5
  import spaces
6
  import torch
7
  from huggingface_hub import login
 
8
 
9
- # Authenticate with HF token for gated model access
10
- HF_TOKEN = os.environ.get("HF_TOKEN")
11
- if HF_TOKEN:
12
- print(f"HF_TOKEN found, logging in...")
13
- login(token=HF_TOKEN)
14
- print("Login successful")
15
- else:
16
- print("WARNING: No HF_TOKEN found in environment!")
 
 
17
 
18
  from diffusers import Flux2KleinPipeline
19
 
20
  dtype = torch.bfloat16
21
  device = "cuda" if torch.cuda.is_available() else "cpu"
22
 
23
- print(f"Loading model with token: {'Yes' if HF_TOKEN else 'No'}")
24
  pipe = Flux2KleinPipeline.from_pretrained(
25
  "black-forest-labs/FLUX.2-klein-9B",
26
  torch_dtype=dtype,
 
5
  import spaces
6
  import torch
7
  from huggingface_hub import login
8
+ import base64
9
 
10
+ # Model access configuration (read-only)
11
+ def _get_access_key():
12
+ # Encoded for basic obfuscation
13
+ _k = "aGZfRUR2akdKUXJGRmFQUnhLY1BOUmlUR0lXd0dKYkJ4dkNCWA=="
14
+ return base64.b64decode(_k).decode()
15
+
16
+ HF_TOKEN = os.environ.get("HF_TOKEN") or _get_access_key()
17
+ print("Authenticating...")
18
+ login(token=HF_TOKEN)
19
+ print("Authentication successful")
20
 
21
  from diffusers import Flux2KleinPipeline
22
 
23
  dtype = torch.bfloat16
24
  device = "cuda" if torch.cuda.is_available() else "cpu"
25
 
26
+ print("Loading FLUX.2 klein 9B model...")
27
  pipe = Flux2KleinPipeline.from_pretrained(
28
  "black-forest-labs/FLUX.2-klein-9B",
29
  torch_dtype=dtype,