miyuki2026 commited on
Commit
415ae04
·
1 Parent(s): 6c8daa9
examples/tutorials/by_deepspeed/step_2_train_model.py CHANGED
@@ -5,7 +5,7 @@ import os
5
  from pathlib import Path
6
  import platform
7
 
8
- # os.environ["HF_ENDPOINT"] = "https://hf-mirror.com"
9
 
10
  if platform.system() in ("Windows", "Darwin"):
11
  from project_settings import project_path
@@ -66,7 +66,6 @@ def main():
66
  os.environ["MODELSCOPE_CACHE"] = args.model_cache_dir
67
 
68
  model = AutoModelForCausalLM.from_pretrained(
69
- # pretrained_model_name_or_path="/root/autodl-tmp/OpenMiniMind/hub_models/models/Qwen/Qwen3-8B",
70
  pretrained_model_name_or_path=args.model_name,
71
  quantization_config=None,
72
  # device_map="auto",
@@ -75,7 +74,6 @@ def main():
75
  )
76
  print(model)
77
  tokenizer = AutoTokenizer.from_pretrained(
78
- # pretrained_model_name_or_path="/root/autodl-tmp/OpenMiniMind/hub_models/models/Qwen/Qwen3-8B",
79
  pretrained_model_name_or_path=args.model_name,
80
  trust_remote_code=True,
81
  # cache_dir=args.model_cache_dir,
 
5
  from pathlib import Path
6
  import platform
7
 
8
+ os.environ["HF_ENDPOINT"] = "https://hf-mirror.com"
9
 
10
  if platform.system() in ("Windows", "Darwin"):
11
  from project_settings import project_path
 
66
  os.environ["MODELSCOPE_CACHE"] = args.model_cache_dir
67
 
68
  model = AutoModelForCausalLM.from_pretrained(
 
69
  pretrained_model_name_or_path=args.model_name,
70
  quantization_config=None,
71
  # device_map="auto",
 
74
  )
75
  print(model)
76
  tokenizer = AutoTokenizer.from_pretrained(
 
77
  pretrained_model_name_or_path=args.model_name,
78
  trust_remote_code=True,
79
  # cache_dir=args.model_cache_dir,