SFM2001 commited on
Commit
626f7ac
·
1 Parent(s): d40f128

try to find bug

Browse files
Files changed (1) hide show
  1. create_app.py +2 -0
create_app.py CHANGED
@@ -20,6 +20,7 @@ def load_models():
20
  global MODELS_LOADED, LONGFORMER_TOKENIZER, LONGFORMER_MODEL, QWEN_TOKENIZER, QWEN_MODEL
21
  device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
22
  print("DEIVCE=", device)
 
23
  if not MODELS_LOADED:
24
  LONGFORMER_TOKENIZER = LongformerTokenizer.from_pretrained('allenai/longformer-base-4096', device='auto')
25
  config = LongformerConfig.from_json_file("Longformer_checkpoint/config.json")
@@ -32,6 +33,7 @@ def load_models():
32
  QWEN_TOKENIZER.pad_token_id = QWEN_TOKENIZER.eos_token_id
33
  QWEN_MODEL = AutoModelForCausalLM.from_pretrained(model_name, torch_dtype=torch.float16).half()
34
  MODELS_LOADED = True
 
35
 
36
  def create_app():
37
  set_seed(42)
 
20
  global MODELS_LOADED, LONGFORMER_TOKENIZER, LONGFORMER_MODEL, QWEN_TOKENIZER, QWEN_MODEL
21
  device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
22
  print("DEIVCE=", device)
23
+ print("WHY DONT PRINT")
24
  if not MODELS_LOADED:
25
  LONGFORMER_TOKENIZER = LongformerTokenizer.from_pretrained('allenai/longformer-base-4096', device='auto')
26
  config = LongformerConfig.from_json_file("Longformer_checkpoint/config.json")
 
33
  QWEN_TOKENIZER.pad_token_id = QWEN_TOKENIZER.eos_token_id
34
  QWEN_MODEL = AutoModelForCausalLM.from_pretrained(model_name, torch_dtype=torch.float16).half()
35
  MODELS_LOADED = True
36
+ print("LOAD ENDED")
37
 
38
  def create_app():
39
  set_seed(42)