jasonxie-rblx commited on
Commit
2b45d27
·
verified ·
1 Parent(s): 77a9f36

Change to use HF token

Browse files
Files changed (1) hide show
  1. app.py +21 -6
app.py CHANGED
@@ -1,3 +1,4 @@
 
1
  from typing import Dict, Any
2
 
3
  import gradio as gr
@@ -50,18 +51,32 @@ if __name__ == "__main__":
50
  # Model configuration
51
  model_id = "Roblox/PII-OSS-Private-Not-Public"
52
 
 
 
 
 
53
  # Load model and tokenizer
54
- # When deployed as a Hugging Face Space in the same organization,
55
- # authentication is handled automatically
56
  print(f"Loading model: {model_id}")
57
  try:
58
- model = AutoModelForSequenceClassification.from_pretrained(model_id)
59
- tokenizer = AutoTokenizer.from_pretrained(model_id)
 
 
 
 
 
 
 
 
60
  model.eval()
61
  print("Model loaded successfully!")
62
  except Exception as e:
63
  print(f"Failed to load model: {e}")
64
- print("If running locally, you may need to login with: huggingface-cli login")
 
 
 
 
65
  exit(1)
66
 
67
  # Create Gradio interface
@@ -82,4 +97,4 @@ if __name__ == "__main__":
82
  flagging_mode="never",
83
  )
84
 
85
- demo.launch()
 
1
+ import os
2
  from typing import Dict, Any
3
 
4
  import gradio as gr
 
51
  # Model configuration
52
  model_id = "Roblox/PII-OSS-Private-Not-Public"
53
 
54
+ # Get HF token from Hugging Face Space secrets
55
+ # In Spaces, set HF_TOKEN in Settings > Repository secrets
56
+ HF_TOKEN = os.getenv("HF_TOKEN")
57
+
58
  # Load model and tokenizer
 
 
59
  print(f"Loading model: {model_id}")
60
  try:
61
+ # Use token if available (required for private models)
62
+ if HF_TOKEN:
63
+ print("Using HF_TOKEN from environment/secrets")
64
+ model = AutoModelForSequenceClassification.from_pretrained(model_id, token=HF_TOKEN)
65
+ tokenizer = AutoTokenizer.from_pretrained(model_id, token=HF_TOKEN)
66
+ else:
67
+ print("No HF_TOKEN found, attempting without authentication...")
68
+ model = AutoModelForSequenceClassification.from_pretrained(model_id)
69
+ tokenizer = AutoTokenizer.from_pretrained(model_id)
70
+
71
  model.eval()
72
  print("Model loaded successfully!")
73
  except Exception as e:
74
  print(f"Failed to load model: {e}")
75
+ if not HF_TOKEN:
76
+ print("\n⚠️ For private models, you need to set HF_TOKEN as a Space secret:")
77
+ print(" 1. Go to your Space Settings")
78
+ print(" 2. Add a new secret named 'HF_TOKEN'")
79
+ print(" 3. Set your Hugging Face token as the value")
80
  exit(1)
81
 
82
  # Create Gradio interface
 
97
  flagging_mode="never",
98
  )
99
 
100
+ demo.launch()