miyuki2026 commited on
Commit
daed0d6
·
1 Parent(s): 94d6fef
examples/tutorials/by_deepspeed/step_2_train_model.py CHANGED
@@ -66,17 +66,19 @@ def main():
66
  os.environ["MODELSCOPE_CACHE"] = args.model_cache_dir
67
 
68
  model = AutoModelForCausalLM.from_pretrained(
69
- pretrained_model_name_or_path=args.model_name,
 
70
  quantization_config=None,
71
  # device_map="auto",
72
  trust_remote_code=True,
73
- cache_dir=args.model_cache_dir,
74
  )
75
  print(model)
76
  tokenizer = AutoTokenizer.from_pretrained(
77
- pretrained_model_name_or_path=args.model_name,
 
78
  trust_remote_code=True,
79
- cache_dir=args.model_cache_dir,
80
  )
81
  print(tokenizer)
82
 
 
66
  os.environ["MODELSCOPE_CACHE"] = args.model_cache_dir
67
 
68
  model = AutoModelForCausalLM.from_pretrained(
69
+ pretrained_model_name_or_path="/root/autodl-tmp/OpenMiniMind/hub_models/models/Qwen/Qwen3-8B",
70
+ # pretrained_model_name_or_path=args.model_name,
71
  quantization_config=None,
72
  # device_map="auto",
73
  trust_remote_code=True,
74
+ # cache_dir=args.model_cache_dir,
75
  )
76
  print(model)
77
  tokenizer = AutoTokenizer.from_pretrained(
78
+ pretrained_model_name_or_path="/root/autodl-tmp/OpenMiniMind/hub_models/models/Qwen/Qwen3-8B",
79
+ # pretrained_model_name_or_path=args.model_name,
80
  trust_remote_code=True,
81
+ # cache_dir=args.model_cache_dir,
82
  )
83
  print(tokenizer)
84