ingeol commited on
Commit
6fa9d56
·
verified ·
1 Parent(s): d951357

Training in progress, epoch 1

Browse files
Files changed (2) hide show
  1. training_args.bin +1 -1
  2. upload.py +3 -3
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:bbad7a51936e9d60cc0f7618e20fe0a56e387481ec70f21cb5b60f23b6a7a931
3
  size 6651
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:27e2ac7cb643fd30f84baae2da820e5bb4bb007195fa5e93edce5a2ea4aac2c7
3
  size 6651
upload.py CHANGED
@@ -1,7 +1,7 @@
1
  from deepspeed.utils.zero_to_fp32 import get_fp32_state_dict_from_zero_checkpoint
2
  from transformers import AutoTokenizer, AutoModelForCausalLM
3
 
4
- ckpt = 'checkpoint-70'
5
  origin_model_id = 'beomi/Llama-3-Open-Ko-8B'
6
  model = AutoModelForCausalLM.from_pretrained(ckpt)
7
  tokenizer = AutoTokenizer.from_pretrained(origin_model_id)
@@ -12,5 +12,5 @@ model.resize_token_embeddings(len(tokenizer))
12
  # state_dict = get_fp32_state_dict_from_zero_checkpoint(ckpt)
13
  # model.load_state_dict(state_dict)
14
 
15
- tokenizer.push_to_hub('ingeol/kosaul_ft_v0.1')
16
- model.push_to_hub('ingeol/kosaul_ft_v0.1')
 
1
  from deepspeed.utils.zero_to_fp32 import get_fp32_state_dict_from_zero_checkpoint
2
  from transformers import AutoTokenizer, AutoModelForCausalLM
3
 
4
+ ckpt = 'checkpoint-36'
5
  origin_model_id = 'beomi/Llama-3-Open-Ko-8B'
6
  model = AutoModelForCausalLM.from_pretrained(ckpt)
7
  tokenizer = AutoTokenizer.from_pretrained(origin_model_id)
 
12
  # state_dict = get_fp32_state_dict_from_zero_checkpoint(ckpt)
13
  # model.load_state_dict(state_dict)
14
 
15
+ tokenizer.push_to_hub('ingeol/kosaul_ft_v0.3')
16
+ model.push_to_hub('ingeol/kosaul_ft_v0.3')