PunchNFIT commited on
Commit
b5f9bf1
·
1 Parent(s): ff41043

Fix tokenizer mapping for CustomSNPConfig

Browse files
Files changed (1) hide show
  1. api_inference.py +10 -1
api_inference.py CHANGED
@@ -60,10 +60,19 @@ AutoModel.register(CustomSNPConfig, CustomSNPModel)
60
  try:
61
  print("Loading model from:", MODEL_DIR)
62
  config = AutoConfig.from_pretrained(MODEL_DIR, trust_remote_code=True)
63
- tokenizer = AutoTokenizer.from_pretrained(MODEL_DIR)
 
 
 
 
 
 
 
 
64
  model = AutoModel.from_pretrained(MODEL_DIR, config=config, trust_remote_code=True)
65
  model.eval()
66
  print("✅ Custom SNP model loaded successfully.")
 
67
  except Exception as e:
68
  print("❌ Error loading custom model:", e)
69
  raise e
 
60
  try:
61
  print("Loading model from:", MODEL_DIR)
62
  config = AutoConfig.from_pretrained(MODEL_DIR, trust_remote_code=True)
63
+
64
+ # Try loading tokenizer; fallback if not mapped
65
+ from transformers import RobertaTokenizer
66
+ try:
67
+ tokenizer = AutoTokenizer.from_pretrained(MODEL_DIR)
68
+ except Exception:
69
+ print("⚠️ Falling back to default RoBERTa tokenizer.")
70
+ tokenizer = RobertaTokenizer.from_pretrained("roberta-base")
71
+
72
  model = AutoModel.from_pretrained(MODEL_DIR, config=config, trust_remote_code=True)
73
  model.eval()
74
  print("✅ Custom SNP model loaded successfully.")
75
+
76
  except Exception as e:
77
  print("❌ Error loading custom model:", e)
78
  raise e