miyuki2026 commited on
Commit
04a1180
·
1 Parent(s): daed0d6
examples/tutorials/by_deepspeed/step_2_train_model.py CHANGED
@@ -14,8 +14,8 @@ else:
14
  project_path = Path(project_path)
15
 
16
  from peft import LoraConfig
17
- # from transformers import AutoConfig, AutoTokenizer, AutoModelForCausalLM, BitsAndBytesConfig
18
- from modelscope import AutoConfig, AutoTokenizer, AutoModelForCausalLM, BitsAndBytesConfig
19
  from trl import SFTTrainer, SFTConfig
20
  from datasets import load_dataset
21
  import torch
@@ -63,7 +63,7 @@ def get_args():
63
  def main():
64
  args = get_args()
65
 
66
- os.environ["MODELSCOPE_CACHE"] = args.model_cache_dir
67
 
68
  model = AutoModelForCausalLM.from_pretrained(
69
  pretrained_model_name_or_path="/root/autodl-tmp/OpenMiniMind/hub_models/models/Qwen/Qwen3-8B",
 
14
  project_path = Path(project_path)
15
 
16
  from peft import LoraConfig
17
+ from transformers import AutoConfig, AutoTokenizer, AutoModelForCausalLM, BitsAndBytesConfig
18
+ # from modelscope import AutoConfig, AutoTokenizer, AutoModelForCausalLM, BitsAndBytesConfig
19
  from trl import SFTTrainer, SFTConfig
20
  from datasets import load_dataset
21
  import torch
 
63
  def main():
64
  args = get_args()
65
 
66
+ # os.environ["MODELSCOPE_CACHE"] = args.model_cache_dir
67
 
68
  model = AutoModelForCausalLM.from_pretrained(
69
  pretrained_model_name_or_path="/root/autodl-tmp/OpenMiniMind/hub_models/models/Qwen/Qwen3-8B",