abarbosa's picture
Pushing fine-tuned model to Hugging Face Hub
088c78f verified
[2025-06-30 14:45:15,192][__main__][INFO] - cache_dir: /tmp/
dataset:
name: kamel-usp/aes_enem_dataset
split: JBCS2025
training_params:
seed: 42
num_train_epochs: 20
logging_steps: 100
metric_for_best_model: QWK
bf16: true
bootstrap:
enabled: true
n_bootstrap: 10000
bootstrap_seed: 42
metrics:
- QWK
- Macro_F1
- Weighted_F1
post_training_results:
model_path: /workspace/jbcs2025/outputs/2025-03-24/20-42-59
experiments:
model:
name: microsoft/Phi-3.5-mini-instruct
type: phi35_classification_lora
num_labels: 6
output_dir: ./results/
logging_dir: ./logs/
best_model_dir: ./results/best_model
lora_r: 8
lora_dropout: 0.05
lora_alpha: 16
lora_target_modules: all-linear
checkpoint_path: ''
tokenizer:
name: microsoft/Phi-3.5-mini-instruct
dataset:
grade_index: 1
use_full_context: true
training_params:
weight_decay: 0.01
warmup_ratio: 0.1
learning_rate: 5.0e-05
train_batch_size: 1
eval_batch_size: 16
gradient_accumulation_steps: 16
gradient_checkpointing: false
[2025-06-30 14:45:19,186][__main__][INFO] - GPU 0: NVIDIA H200 | TDP 700 W
[2025-06-30 14:45:19,186][__main__][INFO] - Starting the Fine Tuning training process.
[2025-06-30 14:45:24,727][transformers.tokenization_utils_base][INFO] - loading file tokenizer.model from cache at /tmp/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/tokenizer.model
[2025-06-30 14:45:24,727][transformers.tokenization_utils_base][INFO] - loading file tokenizer.json from cache at /tmp/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/tokenizer.json
[2025-06-30 14:45:24,727][transformers.tokenization_utils_base][INFO] - loading file added_tokens.json from cache at /tmp/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/added_tokens.json
[2025-06-30 14:45:24,727][transformers.tokenization_utils_base][INFO] - loading file special_tokens_map.json from cache at /tmp/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/special_tokens_map.json
[2025-06-30 14:45:24,727][transformers.tokenization_utils_base][INFO] - loading file tokenizer_config.json from cache at /tmp/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/tokenizer_config.json
[2025-06-30 14:45:24,727][transformers.tokenization_utils_base][INFO] - loading file chat_template.jinja from cache at None
[2025-06-30 14:45:24,784][transformers.tokenization_utils_base][INFO] - Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained.
[2025-06-30 14:45:24,790][__main__][INFO] - Tokenizer function parameters- Padding:longest; Truncation: False; Use Full Context: True
[2025-06-30 14:45:26,642][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /tmp/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json
[2025-06-30 14:45:26,643][transformers.configuration_utils][INFO] - Model config Phi3Config {
"architectures": [
"Phi3ForCausalLM"
],
"attention_bias": false,
"attention_dropout": 0.0,
"auto_map": {
"AutoConfig": "configuration_phi3.Phi3Config",
"AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM"
},
"bos_token_id": 1,
"embd_pdrop": 0.0,
"eos_token_id": 32000,
"hidden_act": "silu",
"hidden_size": 3072,
"id2label": {
"0": "LABEL_0",
"1": "LABEL_1",
"2": "LABEL_2",
"3": "LABEL_3",
"4": "LABEL_4",
"5": "LABEL_5"
},
"initializer_range": 0.02,
"intermediate_size": 8192,
"label2id": {
"LABEL_0": 0,
"LABEL_1": 1,
"LABEL_2": 2,
"LABEL_3": 3,
"LABEL_4": 4,
"LABEL_5": 5
},
"max_position_embeddings": 131072,
"model_type": "phi3",
"num_attention_heads": 32,
"num_hidden_layers": 32,
"num_key_value_heads": 32,
"original_max_position_embeddings": 4096,
"pad_token_id": 32000,
"partial_rotary_factor": 1.0,
"resid_pdrop": 0.0,
"rms_norm_eps": 1e-05,
"rope_scaling": {
"long_factor": [
1.0800000429153442,
1.1100000143051147,
1.1399999856948853,
1.340000033378601,
1.5899999141693115,
1.600000023841858,
1.6200000047683716,
2.620000123977661,
3.2300000190734863,
3.2300000190734863,
4.789999961853027,
7.400000095367432,
7.700000286102295,
9.09000015258789,
12.199999809265137,
17.670000076293945,
24.46000099182129,
28.57000160217285,
30.420001983642578,
30.840002059936523,
32.590003967285156,
32.93000411987305,
42.320003509521484,
44.96000289916992,
50.340003967285156,
50.45000457763672,
57.55000305175781,
57.93000411987305,
58.21000289916992,
60.1400032043457,
62.61000442504883,
62.62000274658203,
62.71000289916992,
63.1400032043457,
63.1400032043457,
63.77000427246094,
63.93000411987305,
63.96000289916992,
63.970001220703125,
64.02999877929688,
64.06999969482422,
64.08000183105469,
64.12000274658203,
64.41000366210938,
64.4800033569336,
64.51000213623047,
64.52999877929688,
64.83999633789062
],
"short_factor": [
1.0,
1.0199999809265137,
1.0299999713897705,
1.0299999713897705,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0699999332427979,
1.0999999046325684,
1.1099998950958252,
1.1599998474121094,
1.1599998474121094,
1.1699998378753662,
1.2899998426437378,
1.339999794960022,
1.679999828338623,
1.7899998426437378,
1.8199998140335083,
1.8499997854232788,
1.8799997568130493,
1.9099997282028198,
1.9399996995925903,
1.9899996519088745,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0799996852874756,
2.0899996757507324,
2.189999580383301,
2.2199995517730713,
2.5899994373321533,
2.729999542236328,
2.749999523162842,
2.8399994373321533
],
"type": "longrope"
},
"rope_theta": 10000.0,
"sliding_window": 262144,
"tie_word_embeddings": false,
"torch_dtype": "bfloat16",
"transformers_version": "4.53.0",
"use_cache": true,
"vocab_size": 32064
}
[2025-06-30 14:45:26,643][transformers.modeling_utils][INFO] - loading weights file model.safetensors from cache at /tmp/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/model.safetensors.index.json
[2025-06-30 14:45:26,643][transformers.modeling_utils][INFO] - Will use torch_dtype=torch.bfloat16 as defined in model's config object
[2025-06-30 14:45:26,643][transformers.modeling_utils][INFO] - Instantiating Phi3ForSequenceClassification model under default dtype torch.bfloat16.
[2025-06-30 14:45:30,676][transformers.modeling_utils][INFO] - Some weights of the model checkpoint at microsoft/Phi-3.5-mini-instruct were not used when initializing Phi3ForSequenceClassification: ['lm_head.weight']
- This IS expected if you are initializing Phi3ForSequenceClassification from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).
- This IS NOT expected if you are initializing Phi3ForSequenceClassification from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).
[2025-06-30 14:45:30,676][transformers.modeling_utils][WARNING] - Some weights of Phi3ForSequenceClassification were not initialized from the model checkpoint at microsoft/Phi-3.5-mini-instruct and are newly initialized: ['score.weight']
You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.
[2025-06-30 14:45:31,386][__main__][INFO] - Initialized new PEFT model for ce loss
[2025-06-30 14:45:31,388][__main__][INFO] - None
[2025-06-30 14:45:31,389][transformers.training_args][INFO] - PyTorch: setting up devices
[2025-06-30 14:45:31,443][__main__][INFO] - Total steps: 620. Number of warmup steps: 62
[2025-06-30 14:45:31,449][transformers.trainer][INFO] - You have loaded a model on multiple GPUs. `is_model_parallel` attribute will be force-set to `True` to avoid any unexpected behavior such as device placement mismatching.
[2025-06-30 14:45:31,473][transformers.trainer][INFO] - Using auto half precision backend
[2025-06-30 14:45:31,474][transformers.trainer][WARNING] - No label_names provided for model class `PeftModelForSequenceClassification`. Since `PeftModel` hides base models input arguments, if label_names is not given, label_names can't be set automatically within `Trainer`. Note that empty label_names list will be used instead.
[2025-06-30 14:45:31,475][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: essay_text, reference, essay_year, id, prompt, id_prompt, supporting_text, grades. If essay_text, reference, essay_year, id, prompt, id_prompt, supporting_text, grades are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message.
[2025-06-30 14:45:31,486][transformers.trainer][INFO] -
***** Running Evaluation *****
[2025-06-30 14:45:31,486][transformers.trainer][INFO] - Num examples = 132
[2025-06-30 14:45:31,486][transformers.trainer][INFO] - Batch size = 16
[2025-06-30 14:45:52,902][transformers.trainer][INFO] - The following columns in the Training set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: essay_text, reference, essay_year, id, prompt, id_prompt, supporting_text, grades. If essay_text, reference, essay_year, id, prompt, id_prompt, supporting_text, grades are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message.
[2025-06-30 14:45:52,935][transformers.trainer][INFO] - ***** Running training *****
[2025-06-30 14:45:52,935][transformers.trainer][INFO] - Num examples = 500
[2025-06-30 14:45:52,935][transformers.trainer][INFO] - Num Epochs = 20
[2025-06-30 14:45:52,935][transformers.trainer][INFO] - Instantaneous batch size per device = 1
[2025-06-30 14:45:52,935][transformers.trainer][INFO] - Total train batch size (w. parallel, distributed & accumulation) = 16
[2025-06-30 14:45:52,935][transformers.trainer][INFO] - Gradient Accumulation steps = 16
[2025-06-30 14:45:52,935][transformers.trainer][INFO] - Total optimization steps = 640
[2025-06-30 14:45:52,937][transformers.trainer][INFO] - Number of trainable parameters = 12,601,344
[2025-06-30 14:49:35,664][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: essay_text, reference, essay_year, id, prompt, id_prompt, supporting_text, grades. If essay_text, reference, essay_year, id, prompt, id_prompt, supporting_text, grades are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message.
[2025-06-30 14:49:35,667][transformers.trainer][INFO] -
***** Running Evaluation *****
[2025-06-30 14:49:35,667][transformers.trainer][INFO] - Num examples = 132
[2025-06-30 14:49:35,667][transformers.trainer][INFO] - Batch size = 16
[2025-06-30 14:49:56,674][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-06-30/14-45-15/results/checkpoint-32
[2025-06-30 14:49:57,428][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /workspace/.hf_home/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json
[2025-06-30 14:49:57,429][transformers.configuration_utils][INFO] - Model config Phi3Config {
"architectures": [
"Phi3ForCausalLM"
],
"attention_bias": false,
"attention_dropout": 0.0,
"auto_map": {
"AutoConfig": "configuration_phi3.Phi3Config",
"AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM"
},
"bos_token_id": 1,
"embd_pdrop": 0.0,
"eos_token_id": 32000,
"hidden_act": "silu",
"hidden_size": 3072,
"initializer_range": 0.02,
"intermediate_size": 8192,
"max_position_embeddings": 131072,
"model_type": "phi3",
"num_attention_heads": 32,
"num_hidden_layers": 32,
"num_key_value_heads": 32,
"original_max_position_embeddings": 4096,
"pad_token_id": 32000,
"partial_rotary_factor": 1.0,
"resid_pdrop": 0.0,
"rms_norm_eps": 1e-05,
"rope_scaling": {
"long_factor": [
1.0800000429153442,
1.1100000143051147,
1.1399999856948853,
1.340000033378601,
1.5899999141693115,
1.600000023841858,
1.6200000047683716,
2.620000123977661,
3.2300000190734863,
3.2300000190734863,
4.789999961853027,
7.400000095367432,
7.700000286102295,
9.09000015258789,
12.199999809265137,
17.670000076293945,
24.46000099182129,
28.57000160217285,
30.420001983642578,
30.840002059936523,
32.590003967285156,
32.93000411987305,
42.320003509521484,
44.96000289916992,
50.340003967285156,
50.45000457763672,
57.55000305175781,
57.93000411987305,
58.21000289916992,
60.1400032043457,
62.61000442504883,
62.62000274658203,
62.71000289916992,
63.1400032043457,
63.1400032043457,
63.77000427246094,
63.93000411987305,
63.96000289916992,
63.970001220703125,
64.02999877929688,
64.06999969482422,
64.08000183105469,
64.12000274658203,
64.41000366210938,
64.4800033569336,
64.51000213623047,
64.52999877929688,
64.83999633789062
],
"short_factor": [
1.0,
1.0199999809265137,
1.0299999713897705,
1.0299999713897705,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0699999332427979,
1.0999999046325684,
1.1099998950958252,
1.1599998474121094,
1.1599998474121094,
1.1699998378753662,
1.2899998426437378,
1.339999794960022,
1.679999828338623,
1.7899998426437378,
1.8199998140335083,
1.8499997854232788,
1.8799997568130493,
1.9099997282028198,
1.9399996995925903,
1.9899996519088745,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0799996852874756,
2.0899996757507324,
2.189999580383301,
2.2199995517730713,
2.5899994373321533,
2.729999542236328,
2.749999523162842,
2.8399994373321533
],
"type": "longrope"
},
"rope_theta": 10000.0,
"sliding_window": 262144,
"tie_word_embeddings": false,
"torch_dtype": "bfloat16",
"transformers_version": "4.53.0",
"use_cache": true,
"vocab_size": 32064
}
[2025-06-30 14:53:40,100][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: essay_text, reference, essay_year, id, prompt, id_prompt, supporting_text, grades. If essay_text, reference, essay_year, id, prompt, id_prompt, supporting_text, grades are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message.
[2025-06-30 14:53:40,103][transformers.trainer][INFO] -
***** Running Evaluation *****
[2025-06-30 14:53:40,103][transformers.trainer][INFO] - Num examples = 132
[2025-06-30 14:53:40,103][transformers.trainer][INFO] - Batch size = 16
[2025-06-30 14:54:01,118][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-06-30/14-45-15/results/checkpoint-64
[2025-06-30 14:54:01,581][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /workspace/.hf_home/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json
[2025-06-30 14:54:01,581][transformers.configuration_utils][INFO] - Model config Phi3Config {
"architectures": [
"Phi3ForCausalLM"
],
"attention_bias": false,
"attention_dropout": 0.0,
"auto_map": {
"AutoConfig": "configuration_phi3.Phi3Config",
"AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM"
},
"bos_token_id": 1,
"embd_pdrop": 0.0,
"eos_token_id": 32000,
"hidden_act": "silu",
"hidden_size": 3072,
"initializer_range": 0.02,
"intermediate_size": 8192,
"max_position_embeddings": 131072,
"model_type": "phi3",
"num_attention_heads": 32,
"num_hidden_layers": 32,
"num_key_value_heads": 32,
"original_max_position_embeddings": 4096,
"pad_token_id": 32000,
"partial_rotary_factor": 1.0,
"resid_pdrop": 0.0,
"rms_norm_eps": 1e-05,
"rope_scaling": {
"long_factor": [
1.0800000429153442,
1.1100000143051147,
1.1399999856948853,
1.340000033378601,
1.5899999141693115,
1.600000023841858,
1.6200000047683716,
2.620000123977661,
3.2300000190734863,
3.2300000190734863,
4.789999961853027,
7.400000095367432,
7.700000286102295,
9.09000015258789,
12.199999809265137,
17.670000076293945,
24.46000099182129,
28.57000160217285,
30.420001983642578,
30.840002059936523,
32.590003967285156,
32.93000411987305,
42.320003509521484,
44.96000289916992,
50.340003967285156,
50.45000457763672,
57.55000305175781,
57.93000411987305,
58.21000289916992,
60.1400032043457,
62.61000442504883,
62.62000274658203,
62.71000289916992,
63.1400032043457,
63.1400032043457,
63.77000427246094,
63.93000411987305,
63.96000289916992,
63.970001220703125,
64.02999877929688,
64.06999969482422,
64.08000183105469,
64.12000274658203,
64.41000366210938,
64.4800033569336,
64.51000213623047,
64.52999877929688,
64.83999633789062
],
"short_factor": [
1.0,
1.0199999809265137,
1.0299999713897705,
1.0299999713897705,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0699999332427979,
1.0999999046325684,
1.1099998950958252,
1.1599998474121094,
1.1599998474121094,
1.1699998378753662,
1.2899998426437378,
1.339999794960022,
1.679999828338623,
1.7899998426437378,
1.8199998140335083,
1.8499997854232788,
1.8799997568130493,
1.9099997282028198,
1.9399996995925903,
1.9899996519088745,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0799996852874756,
2.0899996757507324,
2.189999580383301,
2.2199995517730713,
2.5899994373321533,
2.729999542236328,
2.749999523162842,
2.8399994373321533
],
"type": "longrope"
},
"rope_theta": 10000.0,
"sliding_window": 262144,
"tie_word_embeddings": false,
"torch_dtype": "bfloat16",
"transformers_version": "4.53.0",
"use_cache": true,
"vocab_size": 32064
}
[2025-06-30 14:54:01,812][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-06-30/14-45-15/results/checkpoint-32] due to args.save_total_limit
[2025-06-30 14:57:44,293][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: essay_text, reference, essay_year, id, prompt, id_prompt, supporting_text, grades. If essay_text, reference, essay_year, id, prompt, id_prompt, supporting_text, grades are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message.
[2025-06-30 14:57:44,296][transformers.trainer][INFO] -
***** Running Evaluation *****
[2025-06-30 14:57:44,296][transformers.trainer][INFO] - Num examples = 132
[2025-06-30 14:57:44,296][transformers.trainer][INFO] - Batch size = 16
[2025-06-30 14:58:05,303][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-06-30/14-45-15/results/checkpoint-96
[2025-06-30 14:58:05,798][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /workspace/.hf_home/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json
[2025-06-30 14:58:05,799][transformers.configuration_utils][INFO] - Model config Phi3Config {
"architectures": [
"Phi3ForCausalLM"
],
"attention_bias": false,
"attention_dropout": 0.0,
"auto_map": {
"AutoConfig": "configuration_phi3.Phi3Config",
"AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM"
},
"bos_token_id": 1,
"embd_pdrop": 0.0,
"eos_token_id": 32000,
"hidden_act": "silu",
"hidden_size": 3072,
"initializer_range": 0.02,
"intermediate_size": 8192,
"max_position_embeddings": 131072,
"model_type": "phi3",
"num_attention_heads": 32,
"num_hidden_layers": 32,
"num_key_value_heads": 32,
"original_max_position_embeddings": 4096,
"pad_token_id": 32000,
"partial_rotary_factor": 1.0,
"resid_pdrop": 0.0,
"rms_norm_eps": 1e-05,
"rope_scaling": {
"long_factor": [
1.0800000429153442,
1.1100000143051147,
1.1399999856948853,
1.340000033378601,
1.5899999141693115,
1.600000023841858,
1.6200000047683716,
2.620000123977661,
3.2300000190734863,
3.2300000190734863,
4.789999961853027,
7.400000095367432,
7.700000286102295,
9.09000015258789,
12.199999809265137,
17.670000076293945,
24.46000099182129,
28.57000160217285,
30.420001983642578,
30.840002059936523,
32.590003967285156,
32.93000411987305,
42.320003509521484,
44.96000289916992,
50.340003967285156,
50.45000457763672,
57.55000305175781,
57.93000411987305,
58.21000289916992,
60.1400032043457,
62.61000442504883,
62.62000274658203,
62.71000289916992,
63.1400032043457,
63.1400032043457,
63.77000427246094,
63.93000411987305,
63.96000289916992,
63.970001220703125,
64.02999877929688,
64.06999969482422,
64.08000183105469,
64.12000274658203,
64.41000366210938,
64.4800033569336,
64.51000213623047,
64.52999877929688,
64.83999633789062
],
"short_factor": [
1.0,
1.0199999809265137,
1.0299999713897705,
1.0299999713897705,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0699999332427979,
1.0999999046325684,
1.1099998950958252,
1.1599998474121094,
1.1599998474121094,
1.1699998378753662,
1.2899998426437378,
1.339999794960022,
1.679999828338623,
1.7899998426437378,
1.8199998140335083,
1.8499997854232788,
1.8799997568130493,
1.9099997282028198,
1.9399996995925903,
1.9899996519088745,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0799996852874756,
2.0899996757507324,
2.189999580383301,
2.2199995517730713,
2.5899994373321533,
2.729999542236328,
2.749999523162842,
2.8399994373321533
],
"type": "longrope"
},
"rope_theta": 10000.0,
"sliding_window": 262144,
"tie_word_embeddings": false,
"torch_dtype": "bfloat16",
"transformers_version": "4.53.0",
"use_cache": true,
"vocab_size": 32064
}
[2025-06-30 14:58:06,002][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-06-30/14-45-15/results/checkpoint-64] due to args.save_total_limit
[2025-06-30 15:01:48,951][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: essay_text, reference, essay_year, id, prompt, id_prompt, supporting_text, grades. If essay_text, reference, essay_year, id, prompt, id_prompt, supporting_text, grades are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message.
[2025-06-30 15:01:48,954][transformers.trainer][INFO] -
***** Running Evaluation *****
[2025-06-30 15:01:48,954][transformers.trainer][INFO] - Num examples = 132
[2025-06-30 15:01:48,954][transformers.trainer][INFO] - Batch size = 16
[2025-06-30 15:02:09,956][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-06-30/14-45-15/results/checkpoint-128
[2025-06-30 15:02:10,432][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /workspace/.hf_home/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json
[2025-06-30 15:02:10,432][transformers.configuration_utils][INFO] - Model config Phi3Config {
"architectures": [
"Phi3ForCausalLM"
],
"attention_bias": false,
"attention_dropout": 0.0,
"auto_map": {
"AutoConfig": "configuration_phi3.Phi3Config",
"AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM"
},
"bos_token_id": 1,
"embd_pdrop": 0.0,
"eos_token_id": 32000,
"hidden_act": "silu",
"hidden_size": 3072,
"initializer_range": 0.02,
"intermediate_size": 8192,
"max_position_embeddings": 131072,
"model_type": "phi3",
"num_attention_heads": 32,
"num_hidden_layers": 32,
"num_key_value_heads": 32,
"original_max_position_embeddings": 4096,
"pad_token_id": 32000,
"partial_rotary_factor": 1.0,
"resid_pdrop": 0.0,
"rms_norm_eps": 1e-05,
"rope_scaling": {
"long_factor": [
1.0800000429153442,
1.1100000143051147,
1.1399999856948853,
1.340000033378601,
1.5899999141693115,
1.600000023841858,
1.6200000047683716,
2.620000123977661,
3.2300000190734863,
3.2300000190734863,
4.789999961853027,
7.400000095367432,
7.700000286102295,
9.09000015258789,
12.199999809265137,
17.670000076293945,
24.46000099182129,
28.57000160217285,
30.420001983642578,
30.840002059936523,
32.590003967285156,
32.93000411987305,
42.320003509521484,
44.96000289916992,
50.340003967285156,
50.45000457763672,
57.55000305175781,
57.93000411987305,
58.21000289916992,
60.1400032043457,
62.61000442504883,
62.62000274658203,
62.71000289916992,
63.1400032043457,
63.1400032043457,
63.77000427246094,
63.93000411987305,
63.96000289916992,
63.970001220703125,
64.02999877929688,
64.06999969482422,
64.08000183105469,
64.12000274658203,
64.41000366210938,
64.4800033569336,
64.51000213623047,
64.52999877929688,
64.83999633789062
],
"short_factor": [
1.0,
1.0199999809265137,
1.0299999713897705,
1.0299999713897705,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0699999332427979,
1.0999999046325684,
1.1099998950958252,
1.1599998474121094,
1.1599998474121094,
1.1699998378753662,
1.2899998426437378,
1.339999794960022,
1.679999828338623,
1.7899998426437378,
1.8199998140335083,
1.8499997854232788,
1.8799997568130493,
1.9099997282028198,
1.9399996995925903,
1.9899996519088745,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0799996852874756,
2.0899996757507324,
2.189999580383301,
2.2199995517730713,
2.5899994373321533,
2.729999542236328,
2.749999523162842,
2.8399994373321533
],
"type": "longrope"
},
"rope_theta": 10000.0,
"sliding_window": 262144,
"tie_word_embeddings": false,
"torch_dtype": "bfloat16",
"transformers_version": "4.53.0",
"use_cache": true,
"vocab_size": 32064
}
[2025-06-30 15:05:53,531][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: essay_text, reference, essay_year, id, prompt, id_prompt, supporting_text, grades. If essay_text, reference, essay_year, id, prompt, id_prompt, supporting_text, grades are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message.
[2025-06-30 15:05:53,534][transformers.trainer][INFO] -
***** Running Evaluation *****
[2025-06-30 15:05:53,534][transformers.trainer][INFO] - Num examples = 132
[2025-06-30 15:05:53,534][transformers.trainer][INFO] - Batch size = 16
[2025-06-30 15:06:14,554][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-06-30/14-45-15/results/checkpoint-160
[2025-06-30 15:06:15,142][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /workspace/.hf_home/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json
[2025-06-30 15:06:15,143][transformers.configuration_utils][INFO] - Model config Phi3Config {
"architectures": [
"Phi3ForCausalLM"
],
"attention_bias": false,
"attention_dropout": 0.0,
"auto_map": {
"AutoConfig": "configuration_phi3.Phi3Config",
"AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM"
},
"bos_token_id": 1,
"embd_pdrop": 0.0,
"eos_token_id": 32000,
"hidden_act": "silu",
"hidden_size": 3072,
"initializer_range": 0.02,
"intermediate_size": 8192,
"max_position_embeddings": 131072,
"model_type": "phi3",
"num_attention_heads": 32,
"num_hidden_layers": 32,
"num_key_value_heads": 32,
"original_max_position_embeddings": 4096,
"pad_token_id": 32000,
"partial_rotary_factor": 1.0,
"resid_pdrop": 0.0,
"rms_norm_eps": 1e-05,
"rope_scaling": {
"long_factor": [
1.0800000429153442,
1.1100000143051147,
1.1399999856948853,
1.340000033378601,
1.5899999141693115,
1.600000023841858,
1.6200000047683716,
2.620000123977661,
3.2300000190734863,
3.2300000190734863,
4.789999961853027,
7.400000095367432,
7.700000286102295,
9.09000015258789,
12.199999809265137,
17.670000076293945,
24.46000099182129,
28.57000160217285,
30.420001983642578,
30.840002059936523,
32.590003967285156,
32.93000411987305,
42.320003509521484,
44.96000289916992,
50.340003967285156,
50.45000457763672,
57.55000305175781,
57.93000411987305,
58.21000289916992,
60.1400032043457,
62.61000442504883,
62.62000274658203,
62.71000289916992,
63.1400032043457,
63.1400032043457,
63.77000427246094,
63.93000411987305,
63.96000289916992,
63.970001220703125,
64.02999877929688,
64.06999969482422,
64.08000183105469,
64.12000274658203,
64.41000366210938,
64.4800033569336,
64.51000213623047,
64.52999877929688,
64.83999633789062
],
"short_factor": [
1.0,
1.0199999809265137,
1.0299999713897705,
1.0299999713897705,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0699999332427979,
1.0999999046325684,
1.1099998950958252,
1.1599998474121094,
1.1599998474121094,
1.1699998378753662,
1.2899998426437378,
1.339999794960022,
1.679999828338623,
1.7899998426437378,
1.8199998140335083,
1.8499997854232788,
1.8799997568130493,
1.9099997282028198,
1.9399996995925903,
1.9899996519088745,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0799996852874756,
2.0899996757507324,
2.189999580383301,
2.2199995517730713,
2.5899994373321533,
2.729999542236328,
2.749999523162842,
2.8399994373321533
],
"type": "longrope"
},
"rope_theta": 10000.0,
"sliding_window": 262144,
"tie_word_embeddings": false,
"torch_dtype": "bfloat16",
"transformers_version": "4.53.0",
"use_cache": true,
"vocab_size": 32064
}
[2025-06-30 15:06:15,381][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-06-30/14-45-15/results/checkpoint-96] due to args.save_total_limit
[2025-06-30 15:06:15,391][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-06-30/14-45-15/results/checkpoint-128] due to args.save_total_limit
[2025-06-30 15:09:58,484][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: essay_text, reference, essay_year, id, prompt, id_prompt, supporting_text, grades. If essay_text, reference, essay_year, id, prompt, id_prompt, supporting_text, grades are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message.
[2025-06-30 15:09:58,487][transformers.trainer][INFO] -
***** Running Evaluation *****
[2025-06-30 15:09:58,487][transformers.trainer][INFO] - Num examples = 132
[2025-06-30 15:09:58,487][transformers.trainer][INFO] - Batch size = 16
[2025-06-30 15:10:19,510][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-06-30/14-45-15/results/checkpoint-192
[2025-06-30 15:10:19,967][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /workspace/.hf_home/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json
[2025-06-30 15:10:19,968][transformers.configuration_utils][INFO] - Model config Phi3Config {
"architectures": [
"Phi3ForCausalLM"
],
"attention_bias": false,
"attention_dropout": 0.0,
"auto_map": {
"AutoConfig": "configuration_phi3.Phi3Config",
"AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM"
},
"bos_token_id": 1,
"embd_pdrop": 0.0,
"eos_token_id": 32000,
"hidden_act": "silu",
"hidden_size": 3072,
"initializer_range": 0.02,
"intermediate_size": 8192,
"max_position_embeddings": 131072,
"model_type": "phi3",
"num_attention_heads": 32,
"num_hidden_layers": 32,
"num_key_value_heads": 32,
"original_max_position_embeddings": 4096,
"pad_token_id": 32000,
"partial_rotary_factor": 1.0,
"resid_pdrop": 0.0,
"rms_norm_eps": 1e-05,
"rope_scaling": {
"long_factor": [
1.0800000429153442,
1.1100000143051147,
1.1399999856948853,
1.340000033378601,
1.5899999141693115,
1.600000023841858,
1.6200000047683716,
2.620000123977661,
3.2300000190734863,
3.2300000190734863,
4.789999961853027,
7.400000095367432,
7.700000286102295,
9.09000015258789,
12.199999809265137,
17.670000076293945,
24.46000099182129,
28.57000160217285,
30.420001983642578,
30.840002059936523,
32.590003967285156,
32.93000411987305,
42.320003509521484,
44.96000289916992,
50.340003967285156,
50.45000457763672,
57.55000305175781,
57.93000411987305,
58.21000289916992,
60.1400032043457,
62.61000442504883,
62.62000274658203,
62.71000289916992,
63.1400032043457,
63.1400032043457,
63.77000427246094,
63.93000411987305,
63.96000289916992,
63.970001220703125,
64.02999877929688,
64.06999969482422,
64.08000183105469,
64.12000274658203,
64.41000366210938,
64.4800033569336,
64.51000213623047,
64.52999877929688,
64.83999633789062
],
"short_factor": [
1.0,
1.0199999809265137,
1.0299999713897705,
1.0299999713897705,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0699999332427979,
1.0999999046325684,
1.1099998950958252,
1.1599998474121094,
1.1599998474121094,
1.1699998378753662,
1.2899998426437378,
1.339999794960022,
1.679999828338623,
1.7899998426437378,
1.8199998140335083,
1.8499997854232788,
1.8799997568130493,
1.9099997282028198,
1.9399996995925903,
1.9899996519088745,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0799996852874756,
2.0899996757507324,
2.189999580383301,
2.2199995517730713,
2.5899994373321533,
2.729999542236328,
2.749999523162842,
2.8399994373321533
],
"type": "longrope"
},
"rope_theta": 10000.0,
"sliding_window": 262144,
"tie_word_embeddings": false,
"torch_dtype": "bfloat16",
"transformers_version": "4.53.0",
"use_cache": true,
"vocab_size": 32064
}
[2025-06-30 15:10:20,204][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-06-30/14-45-15/results/checkpoint-160] due to args.save_total_limit
[2025-06-30 15:14:02,702][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: essay_text, reference, essay_year, id, prompt, id_prompt, supporting_text, grades. If essay_text, reference, essay_year, id, prompt, id_prompt, supporting_text, grades are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message.
[2025-06-30 15:14:02,705][transformers.trainer][INFO] -
***** Running Evaluation *****
[2025-06-30 15:14:02,705][transformers.trainer][INFO] - Num examples = 132
[2025-06-30 15:14:02,705][transformers.trainer][INFO] - Batch size = 16
[2025-06-30 15:14:23,725][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-06-30/14-45-15/results/checkpoint-224
[2025-06-30 15:14:24,234][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /workspace/.hf_home/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json
[2025-06-30 15:14:24,235][transformers.configuration_utils][INFO] - Model config Phi3Config {
"architectures": [
"Phi3ForCausalLM"
],
"attention_bias": false,
"attention_dropout": 0.0,
"auto_map": {
"AutoConfig": "configuration_phi3.Phi3Config",
"AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM"
},
"bos_token_id": 1,
"embd_pdrop": 0.0,
"eos_token_id": 32000,
"hidden_act": "silu",
"hidden_size": 3072,
"initializer_range": 0.02,
"intermediate_size": 8192,
"max_position_embeddings": 131072,
"model_type": "phi3",
"num_attention_heads": 32,
"num_hidden_layers": 32,
"num_key_value_heads": 32,
"original_max_position_embeddings": 4096,
"pad_token_id": 32000,
"partial_rotary_factor": 1.0,
"resid_pdrop": 0.0,
"rms_norm_eps": 1e-05,
"rope_scaling": {
"long_factor": [
1.0800000429153442,
1.1100000143051147,
1.1399999856948853,
1.340000033378601,
1.5899999141693115,
1.600000023841858,
1.6200000047683716,
2.620000123977661,
3.2300000190734863,
3.2300000190734863,
4.789999961853027,
7.400000095367432,
7.700000286102295,
9.09000015258789,
12.199999809265137,
17.670000076293945,
24.46000099182129,
28.57000160217285,
30.420001983642578,
30.840002059936523,
32.590003967285156,
32.93000411987305,
42.320003509521484,
44.96000289916992,
50.340003967285156,
50.45000457763672,
57.55000305175781,
57.93000411987305,
58.21000289916992,
60.1400032043457,
62.61000442504883,
62.62000274658203,
62.71000289916992,
63.1400032043457,
63.1400032043457,
63.77000427246094,
63.93000411987305,
63.96000289916992,
63.970001220703125,
64.02999877929688,
64.06999969482422,
64.08000183105469,
64.12000274658203,
64.41000366210938,
64.4800033569336,
64.51000213623047,
64.52999877929688,
64.83999633789062
],
"short_factor": [
1.0,
1.0199999809265137,
1.0299999713897705,
1.0299999713897705,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0699999332427979,
1.0999999046325684,
1.1099998950958252,
1.1599998474121094,
1.1599998474121094,
1.1699998378753662,
1.2899998426437378,
1.339999794960022,
1.679999828338623,
1.7899998426437378,
1.8199998140335083,
1.8499997854232788,
1.8799997568130493,
1.9099997282028198,
1.9399996995925903,
1.9899996519088745,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0799996852874756,
2.0899996757507324,
2.189999580383301,
2.2199995517730713,
2.5899994373321533,
2.729999542236328,
2.749999523162842,
2.8399994373321533
],
"type": "longrope"
},
"rope_theta": 10000.0,
"sliding_window": 262144,
"tie_word_embeddings": false,
"torch_dtype": "bfloat16",
"transformers_version": "4.53.0",
"use_cache": true,
"vocab_size": 32064
}
[2025-06-30 15:18:07,038][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: essay_text, reference, essay_year, id, prompt, id_prompt, supporting_text, grades. If essay_text, reference, essay_year, id, prompt, id_prompt, supporting_text, grades are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message.
[2025-06-30 15:18:07,041][transformers.trainer][INFO] -
***** Running Evaluation *****
[2025-06-30 15:18:07,041][transformers.trainer][INFO] - Num examples = 132
[2025-06-30 15:18:07,041][transformers.trainer][INFO] - Batch size = 16
[2025-06-30 15:18:28,051][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-06-30/14-45-15/results/checkpoint-256
[2025-06-30 15:18:28,563][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /workspace/.hf_home/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json
[2025-06-30 15:18:28,564][transformers.configuration_utils][INFO] - Model config Phi3Config {
"architectures": [
"Phi3ForCausalLM"
],
"attention_bias": false,
"attention_dropout": 0.0,
"auto_map": {
"AutoConfig": "configuration_phi3.Phi3Config",
"AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM"
},
"bos_token_id": 1,
"embd_pdrop": 0.0,
"eos_token_id": 32000,
"hidden_act": "silu",
"hidden_size": 3072,
"initializer_range": 0.02,
"intermediate_size": 8192,
"max_position_embeddings": 131072,
"model_type": "phi3",
"num_attention_heads": 32,
"num_hidden_layers": 32,
"num_key_value_heads": 32,
"original_max_position_embeddings": 4096,
"pad_token_id": 32000,
"partial_rotary_factor": 1.0,
"resid_pdrop": 0.0,
"rms_norm_eps": 1e-05,
"rope_scaling": {
"long_factor": [
1.0800000429153442,
1.1100000143051147,
1.1399999856948853,
1.340000033378601,
1.5899999141693115,
1.600000023841858,
1.6200000047683716,
2.620000123977661,
3.2300000190734863,
3.2300000190734863,
4.789999961853027,
7.400000095367432,
7.700000286102295,
9.09000015258789,
12.199999809265137,
17.670000076293945,
24.46000099182129,
28.57000160217285,
30.420001983642578,
30.840002059936523,
32.590003967285156,
32.93000411987305,
42.320003509521484,
44.96000289916992,
50.340003967285156,
50.45000457763672,
57.55000305175781,
57.93000411987305,
58.21000289916992,
60.1400032043457,
62.61000442504883,
62.62000274658203,
62.71000289916992,
63.1400032043457,
63.1400032043457,
63.77000427246094,
63.93000411987305,
63.96000289916992,
63.970001220703125,
64.02999877929688,
64.06999969482422,
64.08000183105469,
64.12000274658203,
64.41000366210938,
64.4800033569336,
64.51000213623047,
64.52999877929688,
64.83999633789062
],
"short_factor": [
1.0,
1.0199999809265137,
1.0299999713897705,
1.0299999713897705,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0699999332427979,
1.0999999046325684,
1.1099998950958252,
1.1599998474121094,
1.1599998474121094,
1.1699998378753662,
1.2899998426437378,
1.339999794960022,
1.679999828338623,
1.7899998426437378,
1.8199998140335083,
1.8499997854232788,
1.8799997568130493,
1.9099997282028198,
1.9399996995925903,
1.9899996519088745,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0799996852874756,
2.0899996757507324,
2.189999580383301,
2.2199995517730713,
2.5899994373321533,
2.729999542236328,
2.749999523162842,
2.8399994373321533
],
"type": "longrope"
},
"rope_theta": 10000.0,
"sliding_window": 262144,
"tie_word_embeddings": false,
"torch_dtype": "bfloat16",
"transformers_version": "4.53.0",
"use_cache": true,
"vocab_size": 32064
}
[2025-06-30 15:18:28,799][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-06-30/14-45-15/results/checkpoint-224] due to args.save_total_limit
[2025-06-30 15:22:11,279][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: essay_text, reference, essay_year, id, prompt, id_prompt, supporting_text, grades. If essay_text, reference, essay_year, id, prompt, id_prompt, supporting_text, grades are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message.
[2025-06-30 15:22:11,282][transformers.trainer][INFO] -
***** Running Evaluation *****
[2025-06-30 15:22:11,283][transformers.trainer][INFO] - Num examples = 132
[2025-06-30 15:22:11,283][transformers.trainer][INFO] - Batch size = 16
[2025-06-30 15:22:32,311][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-06-30/14-45-15/results/checkpoint-288
[2025-06-30 15:22:32,778][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /workspace/.hf_home/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json
[2025-06-30 15:22:32,778][transformers.configuration_utils][INFO] - Model config Phi3Config {
"architectures": [
"Phi3ForCausalLM"
],
"attention_bias": false,
"attention_dropout": 0.0,
"auto_map": {
"AutoConfig": "configuration_phi3.Phi3Config",
"AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM"
},
"bos_token_id": 1,
"embd_pdrop": 0.0,
"eos_token_id": 32000,
"hidden_act": "silu",
"hidden_size": 3072,
"initializer_range": 0.02,
"intermediate_size": 8192,
"max_position_embeddings": 131072,
"model_type": "phi3",
"num_attention_heads": 32,
"num_hidden_layers": 32,
"num_key_value_heads": 32,
"original_max_position_embeddings": 4096,
"pad_token_id": 32000,
"partial_rotary_factor": 1.0,
"resid_pdrop": 0.0,
"rms_norm_eps": 1e-05,
"rope_scaling": {
"long_factor": [
1.0800000429153442,
1.1100000143051147,
1.1399999856948853,
1.340000033378601,
1.5899999141693115,
1.600000023841858,
1.6200000047683716,
2.620000123977661,
3.2300000190734863,
3.2300000190734863,
4.789999961853027,
7.400000095367432,
7.700000286102295,
9.09000015258789,
12.199999809265137,
17.670000076293945,
24.46000099182129,
28.57000160217285,
30.420001983642578,
30.840002059936523,
32.590003967285156,
32.93000411987305,
42.320003509521484,
44.96000289916992,
50.340003967285156,
50.45000457763672,
57.55000305175781,
57.93000411987305,
58.21000289916992,
60.1400032043457,
62.61000442504883,
62.62000274658203,
62.71000289916992,
63.1400032043457,
63.1400032043457,
63.77000427246094,
63.93000411987305,
63.96000289916992,
63.970001220703125,
64.02999877929688,
64.06999969482422,
64.08000183105469,
64.12000274658203,
64.41000366210938,
64.4800033569336,
64.51000213623047,
64.52999877929688,
64.83999633789062
],
"short_factor": [
1.0,
1.0199999809265137,
1.0299999713897705,
1.0299999713897705,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0699999332427979,
1.0999999046325684,
1.1099998950958252,
1.1599998474121094,
1.1599998474121094,
1.1699998378753662,
1.2899998426437378,
1.339999794960022,
1.679999828338623,
1.7899998426437378,
1.8199998140335083,
1.8499997854232788,
1.8799997568130493,
1.9099997282028198,
1.9399996995925903,
1.9899996519088745,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0799996852874756,
2.0899996757507324,
2.189999580383301,
2.2199995517730713,
2.5899994373321533,
2.729999542236328,
2.749999523162842,
2.8399994373321533
],
"type": "longrope"
},
"rope_theta": 10000.0,
"sliding_window": 262144,
"tie_word_embeddings": false,
"torch_dtype": "bfloat16",
"transformers_version": "4.53.0",
"use_cache": true,
"vocab_size": 32064
}
[2025-06-30 15:22:33,002][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-06-30/14-45-15/results/checkpoint-256] due to args.save_total_limit
[2025-06-30 15:26:15,673][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: essay_text, reference, essay_year, id, prompt, id_prompt, supporting_text, grades. If essay_text, reference, essay_year, id, prompt, id_prompt, supporting_text, grades are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message.
[2025-06-30 15:26:15,676][transformers.trainer][INFO] -
***** Running Evaluation *****
[2025-06-30 15:26:15,676][transformers.trainer][INFO] - Num examples = 132
[2025-06-30 15:26:15,676][transformers.trainer][INFO] - Batch size = 16
[2025-06-30 15:26:36,804][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-06-30/14-45-15/results/checkpoint-320
[2025-06-30 15:26:37,264][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /workspace/.hf_home/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json
[2025-06-30 15:26:37,265][transformers.configuration_utils][INFO] - Model config Phi3Config {
"architectures": [
"Phi3ForCausalLM"
],
"attention_bias": false,
"attention_dropout": 0.0,
"auto_map": {
"AutoConfig": "configuration_phi3.Phi3Config",
"AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM"
},
"bos_token_id": 1,
"embd_pdrop": 0.0,
"eos_token_id": 32000,
"hidden_act": "silu",
"hidden_size": 3072,
"initializer_range": 0.02,
"intermediate_size": 8192,
"max_position_embeddings": 131072,
"model_type": "phi3",
"num_attention_heads": 32,
"num_hidden_layers": 32,
"num_key_value_heads": 32,
"original_max_position_embeddings": 4096,
"pad_token_id": 32000,
"partial_rotary_factor": 1.0,
"resid_pdrop": 0.0,
"rms_norm_eps": 1e-05,
"rope_scaling": {
"long_factor": [
1.0800000429153442,
1.1100000143051147,
1.1399999856948853,
1.340000033378601,
1.5899999141693115,
1.600000023841858,
1.6200000047683716,
2.620000123977661,
3.2300000190734863,
3.2300000190734863,
4.789999961853027,
7.400000095367432,
7.700000286102295,
9.09000015258789,
12.199999809265137,
17.670000076293945,
24.46000099182129,
28.57000160217285,
30.420001983642578,
30.840002059936523,
32.590003967285156,
32.93000411987305,
42.320003509521484,
44.96000289916992,
50.340003967285156,
50.45000457763672,
57.55000305175781,
57.93000411987305,
58.21000289916992,
60.1400032043457,
62.61000442504883,
62.62000274658203,
62.71000289916992,
63.1400032043457,
63.1400032043457,
63.77000427246094,
63.93000411987305,
63.96000289916992,
63.970001220703125,
64.02999877929688,
64.06999969482422,
64.08000183105469,
64.12000274658203,
64.41000366210938,
64.4800033569336,
64.51000213623047,
64.52999877929688,
64.83999633789062
],
"short_factor": [
1.0,
1.0199999809265137,
1.0299999713897705,
1.0299999713897705,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0699999332427979,
1.0999999046325684,
1.1099998950958252,
1.1599998474121094,
1.1599998474121094,
1.1699998378753662,
1.2899998426437378,
1.339999794960022,
1.679999828338623,
1.7899998426437378,
1.8199998140335083,
1.8499997854232788,
1.8799997568130493,
1.9099997282028198,
1.9399996995925903,
1.9899996519088745,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0799996852874756,
2.0899996757507324,
2.189999580383301,
2.2199995517730713,
2.5899994373321533,
2.729999542236328,
2.749999523162842,
2.8399994373321533
],
"type": "longrope"
},
"rope_theta": 10000.0,
"sliding_window": 262144,
"tie_word_embeddings": false,
"torch_dtype": "bfloat16",
"transformers_version": "4.53.0",
"use_cache": true,
"vocab_size": 32064
}
[2025-06-30 15:26:37,510][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-06-30/14-45-15/results/checkpoint-192] due to args.save_total_limit
[2025-06-30 15:26:37,520][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-06-30/14-45-15/results/checkpoint-288] due to args.save_total_limit
[2025-06-30 15:30:20,667][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: essay_text, reference, essay_year, id, prompt, id_prompt, supporting_text, grades. If essay_text, reference, essay_year, id, prompt, id_prompt, supporting_text, grades are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message.
[2025-06-30 15:30:20,670][transformers.trainer][INFO] -
***** Running Evaluation *****
[2025-06-30 15:30:20,671][transformers.trainer][INFO] - Num examples = 132
[2025-06-30 15:30:20,671][transformers.trainer][INFO] - Batch size = 16
[2025-06-30 15:30:41,885][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-06-30/14-45-15/results/checkpoint-352
[2025-06-30 15:30:42,351][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /workspace/.hf_home/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json
[2025-06-30 15:30:42,352][transformers.configuration_utils][INFO] - Model config Phi3Config {
"architectures": [
"Phi3ForCausalLM"
],
"attention_bias": false,
"attention_dropout": 0.0,
"auto_map": {
"AutoConfig": "configuration_phi3.Phi3Config",
"AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM"
},
"bos_token_id": 1,
"embd_pdrop": 0.0,
"eos_token_id": 32000,
"hidden_act": "silu",
"hidden_size": 3072,
"initializer_range": 0.02,
"intermediate_size": 8192,
"max_position_embeddings": 131072,
"model_type": "phi3",
"num_attention_heads": 32,
"num_hidden_layers": 32,
"num_key_value_heads": 32,
"original_max_position_embeddings": 4096,
"pad_token_id": 32000,
"partial_rotary_factor": 1.0,
"resid_pdrop": 0.0,
"rms_norm_eps": 1e-05,
"rope_scaling": {
"long_factor": [
1.0800000429153442,
1.1100000143051147,
1.1399999856948853,
1.340000033378601,
1.5899999141693115,
1.600000023841858,
1.6200000047683716,
2.620000123977661,
3.2300000190734863,
3.2300000190734863,
4.789999961853027,
7.400000095367432,
7.700000286102295,
9.09000015258789,
12.199999809265137,
17.670000076293945,
24.46000099182129,
28.57000160217285,
30.420001983642578,
30.840002059936523,
32.590003967285156,
32.93000411987305,
42.320003509521484,
44.96000289916992,
50.340003967285156,
50.45000457763672,
57.55000305175781,
57.93000411987305,
58.21000289916992,
60.1400032043457,
62.61000442504883,
62.62000274658203,
62.71000289916992,
63.1400032043457,
63.1400032043457,
63.77000427246094,
63.93000411987305,
63.96000289916992,
63.970001220703125,
64.02999877929688,
64.06999969482422,
64.08000183105469,
64.12000274658203,
64.41000366210938,
64.4800033569336,
64.51000213623047,
64.52999877929688,
64.83999633789062
],
"short_factor": [
1.0,
1.0199999809265137,
1.0299999713897705,
1.0299999713897705,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0699999332427979,
1.0999999046325684,
1.1099998950958252,
1.1599998474121094,
1.1599998474121094,
1.1699998378753662,
1.2899998426437378,
1.339999794960022,
1.679999828338623,
1.7899998426437378,
1.8199998140335083,
1.8499997854232788,
1.8799997568130493,
1.9099997282028198,
1.9399996995925903,
1.9899996519088745,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0799996852874756,
2.0899996757507324,
2.189999580383301,
2.2199995517730713,
2.5899994373321533,
2.729999542236328,
2.749999523162842,
2.8399994373321533
],
"type": "longrope"
},
"rope_theta": 10000.0,
"sliding_window": 262144,
"tie_word_embeddings": false,
"torch_dtype": "bfloat16",
"transformers_version": "4.53.0",
"use_cache": true,
"vocab_size": 32064
}
[2025-06-30 15:34:26,334][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: essay_text, reference, essay_year, id, prompt, id_prompt, supporting_text, grades. If essay_text, reference, essay_year, id, prompt, id_prompt, supporting_text, grades are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message.
[2025-06-30 15:34:26,337][transformers.trainer][INFO] -
***** Running Evaluation *****
[2025-06-30 15:34:26,337][transformers.trainer][INFO] - Num examples = 132
[2025-06-30 15:34:26,337][transformers.trainer][INFO] - Batch size = 16
[2025-06-30 15:34:47,359][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-06-30/14-45-15/results/checkpoint-384
[2025-06-30 15:34:47,818][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /workspace/.hf_home/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json
[2025-06-30 15:34:47,819][transformers.configuration_utils][INFO] - Model config Phi3Config {
"architectures": [
"Phi3ForCausalLM"
],
"attention_bias": false,
"attention_dropout": 0.0,
"auto_map": {
"AutoConfig": "configuration_phi3.Phi3Config",
"AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM"
},
"bos_token_id": 1,
"embd_pdrop": 0.0,
"eos_token_id": 32000,
"hidden_act": "silu",
"hidden_size": 3072,
"initializer_range": 0.02,
"intermediate_size": 8192,
"max_position_embeddings": 131072,
"model_type": "phi3",
"num_attention_heads": 32,
"num_hidden_layers": 32,
"num_key_value_heads": 32,
"original_max_position_embeddings": 4096,
"pad_token_id": 32000,
"partial_rotary_factor": 1.0,
"resid_pdrop": 0.0,
"rms_norm_eps": 1e-05,
"rope_scaling": {
"long_factor": [
1.0800000429153442,
1.1100000143051147,
1.1399999856948853,
1.340000033378601,
1.5899999141693115,
1.600000023841858,
1.6200000047683716,
2.620000123977661,
3.2300000190734863,
3.2300000190734863,
4.789999961853027,
7.400000095367432,
7.700000286102295,
9.09000015258789,
12.199999809265137,
17.670000076293945,
24.46000099182129,
28.57000160217285,
30.420001983642578,
30.840002059936523,
32.590003967285156,
32.93000411987305,
42.320003509521484,
44.96000289916992,
50.340003967285156,
50.45000457763672,
57.55000305175781,
57.93000411987305,
58.21000289916992,
60.1400032043457,
62.61000442504883,
62.62000274658203,
62.71000289916992,
63.1400032043457,
63.1400032043457,
63.77000427246094,
63.93000411987305,
63.96000289916992,
63.970001220703125,
64.02999877929688,
64.06999969482422,
64.08000183105469,
64.12000274658203,
64.41000366210938,
64.4800033569336,
64.51000213623047,
64.52999877929688,
64.83999633789062
],
"short_factor": [
1.0,
1.0199999809265137,
1.0299999713897705,
1.0299999713897705,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0699999332427979,
1.0999999046325684,
1.1099998950958252,
1.1599998474121094,
1.1599998474121094,
1.1699998378753662,
1.2899998426437378,
1.339999794960022,
1.679999828338623,
1.7899998426437378,
1.8199998140335083,
1.8499997854232788,
1.8799997568130493,
1.9099997282028198,
1.9399996995925903,
1.9899996519088745,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0799996852874756,
2.0899996757507324,
2.189999580383301,
2.2199995517730713,
2.5899994373321533,
2.729999542236328,
2.749999523162842,
2.8399994373321533
],
"type": "longrope"
},
"rope_theta": 10000.0,
"sliding_window": 262144,
"tie_word_embeddings": false,
"torch_dtype": "bfloat16",
"transformers_version": "4.53.0",
"use_cache": true,
"vocab_size": 32064
}
[2025-06-30 15:34:48,010][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-06-30/14-45-15/results/checkpoint-352] due to args.save_total_limit
[2025-06-30 15:38:30,492][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: essay_text, reference, essay_year, id, prompt, id_prompt, supporting_text, grades. If essay_text, reference, essay_year, id, prompt, id_prompt, supporting_text, grades are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message.
[2025-06-30 15:38:30,495][transformers.trainer][INFO] -
***** Running Evaluation *****
[2025-06-30 15:38:30,495][transformers.trainer][INFO] - Num examples = 132
[2025-06-30 15:38:30,495][transformers.trainer][INFO] - Batch size = 16
[2025-06-30 15:38:51,500][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-06-30/14-45-15/results/checkpoint-416
[2025-06-30 15:38:51,952][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /workspace/.hf_home/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json
[2025-06-30 15:38:51,952][transformers.configuration_utils][INFO] - Model config Phi3Config {
"architectures": [
"Phi3ForCausalLM"
],
"attention_bias": false,
"attention_dropout": 0.0,
"auto_map": {
"AutoConfig": "configuration_phi3.Phi3Config",
"AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM"
},
"bos_token_id": 1,
"embd_pdrop": 0.0,
"eos_token_id": 32000,
"hidden_act": "silu",
"hidden_size": 3072,
"initializer_range": 0.02,
"intermediate_size": 8192,
"max_position_embeddings": 131072,
"model_type": "phi3",
"num_attention_heads": 32,
"num_hidden_layers": 32,
"num_key_value_heads": 32,
"original_max_position_embeddings": 4096,
"pad_token_id": 32000,
"partial_rotary_factor": 1.0,
"resid_pdrop": 0.0,
"rms_norm_eps": 1e-05,
"rope_scaling": {
"long_factor": [
1.0800000429153442,
1.1100000143051147,
1.1399999856948853,
1.340000033378601,
1.5899999141693115,
1.600000023841858,
1.6200000047683716,
2.620000123977661,
3.2300000190734863,
3.2300000190734863,
4.789999961853027,
7.400000095367432,
7.700000286102295,
9.09000015258789,
12.199999809265137,
17.670000076293945,
24.46000099182129,
28.57000160217285,
30.420001983642578,
30.840002059936523,
32.590003967285156,
32.93000411987305,
42.320003509521484,
44.96000289916992,
50.340003967285156,
50.45000457763672,
57.55000305175781,
57.93000411987305,
58.21000289916992,
60.1400032043457,
62.61000442504883,
62.62000274658203,
62.71000289916992,
63.1400032043457,
63.1400032043457,
63.77000427246094,
63.93000411987305,
63.96000289916992,
63.970001220703125,
64.02999877929688,
64.06999969482422,
64.08000183105469,
64.12000274658203,
64.41000366210938,
64.4800033569336,
64.51000213623047,
64.52999877929688,
64.83999633789062
],
"short_factor": [
1.0,
1.0199999809265137,
1.0299999713897705,
1.0299999713897705,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0699999332427979,
1.0999999046325684,
1.1099998950958252,
1.1599998474121094,
1.1599998474121094,
1.1699998378753662,
1.2899998426437378,
1.339999794960022,
1.679999828338623,
1.7899998426437378,
1.8199998140335083,
1.8499997854232788,
1.8799997568130493,
1.9099997282028198,
1.9399996995925903,
1.9899996519088745,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0799996852874756,
2.0899996757507324,
2.189999580383301,
2.2199995517730713,
2.5899994373321533,
2.729999542236328,
2.749999523162842,
2.8399994373321533
],
"type": "longrope"
},
"rope_theta": 10000.0,
"sliding_window": 262144,
"tie_word_embeddings": false,
"torch_dtype": "bfloat16",
"transformers_version": "4.53.0",
"use_cache": true,
"vocab_size": 32064
}
[2025-06-30 15:38:52,199][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-06-30/14-45-15/results/checkpoint-384] due to args.save_total_limit
[2025-06-30 15:42:34,501][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: essay_text, reference, essay_year, id, prompt, id_prompt, supporting_text, grades. If essay_text, reference, essay_year, id, prompt, id_prompt, supporting_text, grades are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message.
[2025-06-30 15:42:34,504][transformers.trainer][INFO] -
***** Running Evaluation *****
[2025-06-30 15:42:34,504][transformers.trainer][INFO] - Num examples = 132
[2025-06-30 15:42:34,504][transformers.trainer][INFO] - Batch size = 16
[2025-06-30 15:42:55,519][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-06-30/14-45-15/results/checkpoint-448
[2025-06-30 15:42:56,007][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /workspace/.hf_home/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json
[2025-06-30 15:42:56,008][transformers.configuration_utils][INFO] - Model config Phi3Config {
"architectures": [
"Phi3ForCausalLM"
],
"attention_bias": false,
"attention_dropout": 0.0,
"auto_map": {
"AutoConfig": "configuration_phi3.Phi3Config",
"AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM"
},
"bos_token_id": 1,
"embd_pdrop": 0.0,
"eos_token_id": 32000,
"hidden_act": "silu",
"hidden_size": 3072,
"initializer_range": 0.02,
"intermediate_size": 8192,
"max_position_embeddings": 131072,
"model_type": "phi3",
"num_attention_heads": 32,
"num_hidden_layers": 32,
"num_key_value_heads": 32,
"original_max_position_embeddings": 4096,
"pad_token_id": 32000,
"partial_rotary_factor": 1.0,
"resid_pdrop": 0.0,
"rms_norm_eps": 1e-05,
"rope_scaling": {
"long_factor": [
1.0800000429153442,
1.1100000143051147,
1.1399999856948853,
1.340000033378601,
1.5899999141693115,
1.600000023841858,
1.6200000047683716,
2.620000123977661,
3.2300000190734863,
3.2300000190734863,
4.789999961853027,
7.400000095367432,
7.700000286102295,
9.09000015258789,
12.199999809265137,
17.670000076293945,
24.46000099182129,
28.57000160217285,
30.420001983642578,
30.840002059936523,
32.590003967285156,
32.93000411987305,
42.320003509521484,
44.96000289916992,
50.340003967285156,
50.45000457763672,
57.55000305175781,
57.93000411987305,
58.21000289916992,
60.1400032043457,
62.61000442504883,
62.62000274658203,
62.71000289916992,
63.1400032043457,
63.1400032043457,
63.77000427246094,
63.93000411987305,
63.96000289916992,
63.970001220703125,
64.02999877929688,
64.06999969482422,
64.08000183105469,
64.12000274658203,
64.41000366210938,
64.4800033569336,
64.51000213623047,
64.52999877929688,
64.83999633789062
],
"short_factor": [
1.0,
1.0199999809265137,
1.0299999713897705,
1.0299999713897705,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0699999332427979,
1.0999999046325684,
1.1099998950958252,
1.1599998474121094,
1.1599998474121094,
1.1699998378753662,
1.2899998426437378,
1.339999794960022,
1.679999828338623,
1.7899998426437378,
1.8199998140335083,
1.8499997854232788,
1.8799997568130493,
1.9099997282028198,
1.9399996995925903,
1.9899996519088745,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0799996852874756,
2.0899996757507324,
2.189999580383301,
2.2199995517730713,
2.5899994373321533,
2.729999542236328,
2.749999523162842,
2.8399994373321533
],
"type": "longrope"
},
"rope_theta": 10000.0,
"sliding_window": 262144,
"tie_word_embeddings": false,
"torch_dtype": "bfloat16",
"transformers_version": "4.53.0",
"use_cache": true,
"vocab_size": 32064
}
[2025-06-30 15:42:56,202][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-06-30/14-45-15/results/checkpoint-416] due to args.save_total_limit
[2025-06-30 15:46:39,270][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: essay_text, reference, essay_year, id, prompt, id_prompt, supporting_text, grades. If essay_text, reference, essay_year, id, prompt, id_prompt, supporting_text, grades are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message.
[2025-06-30 15:46:39,273][transformers.trainer][INFO] -
***** Running Evaluation *****
[2025-06-30 15:46:39,273][transformers.trainer][INFO] - Num examples = 132
[2025-06-30 15:46:39,273][transformers.trainer][INFO] - Batch size = 16
[2025-06-30 15:47:00,283][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-06-30/14-45-15/results/checkpoint-480
[2025-06-30 15:47:00,781][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /workspace/.hf_home/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json
[2025-06-30 15:47:00,782][transformers.configuration_utils][INFO] - Model config Phi3Config {
"architectures": [
"Phi3ForCausalLM"
],
"attention_bias": false,
"attention_dropout": 0.0,
"auto_map": {
"AutoConfig": "configuration_phi3.Phi3Config",
"AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM"
},
"bos_token_id": 1,
"embd_pdrop": 0.0,
"eos_token_id": 32000,
"hidden_act": "silu",
"hidden_size": 3072,
"initializer_range": 0.02,
"intermediate_size": 8192,
"max_position_embeddings": 131072,
"model_type": "phi3",
"num_attention_heads": 32,
"num_hidden_layers": 32,
"num_key_value_heads": 32,
"original_max_position_embeddings": 4096,
"pad_token_id": 32000,
"partial_rotary_factor": 1.0,
"resid_pdrop": 0.0,
"rms_norm_eps": 1e-05,
"rope_scaling": {
"long_factor": [
1.0800000429153442,
1.1100000143051147,
1.1399999856948853,
1.340000033378601,
1.5899999141693115,
1.600000023841858,
1.6200000047683716,
2.620000123977661,
3.2300000190734863,
3.2300000190734863,
4.789999961853027,
7.400000095367432,
7.700000286102295,
9.09000015258789,
12.199999809265137,
17.670000076293945,
24.46000099182129,
28.57000160217285,
30.420001983642578,
30.840002059936523,
32.590003967285156,
32.93000411987305,
42.320003509521484,
44.96000289916992,
50.340003967285156,
50.45000457763672,
57.55000305175781,
57.93000411987305,
58.21000289916992,
60.1400032043457,
62.61000442504883,
62.62000274658203,
62.71000289916992,
63.1400032043457,
63.1400032043457,
63.77000427246094,
63.93000411987305,
63.96000289916992,
63.970001220703125,
64.02999877929688,
64.06999969482422,
64.08000183105469,
64.12000274658203,
64.41000366210938,
64.4800033569336,
64.51000213623047,
64.52999877929688,
64.83999633789062
],
"short_factor": [
1.0,
1.0199999809265137,
1.0299999713897705,
1.0299999713897705,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0699999332427979,
1.0999999046325684,
1.1099998950958252,
1.1599998474121094,
1.1599998474121094,
1.1699998378753662,
1.2899998426437378,
1.339999794960022,
1.679999828338623,
1.7899998426437378,
1.8199998140335083,
1.8499997854232788,
1.8799997568130493,
1.9099997282028198,
1.9399996995925903,
1.9899996519088745,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0799996852874756,
2.0899996757507324,
2.189999580383301,
2.2199995517730713,
2.5899994373321533,
2.729999542236328,
2.749999523162842,
2.8399994373321533
],
"type": "longrope"
},
"rope_theta": 10000.0,
"sliding_window": 262144,
"tie_word_embeddings": false,
"torch_dtype": "bfloat16",
"transformers_version": "4.53.0",
"use_cache": true,
"vocab_size": 32064
}
[2025-06-30 15:47:00,992][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-06-30/14-45-15/results/checkpoint-448] due to args.save_total_limit
[2025-06-30 15:47:01,003][transformers.trainer][INFO] -
Training completed. Do not forget to share your model on huggingface.co/models =)
[2025-06-30 15:47:01,004][transformers.trainer][INFO] - Loading best model from /workspace/jbcs2025/outputs/2025-06-30/14-45-15/results/checkpoint-320 (score: 0.5557692307692308).
[2025-06-30 15:47:01,083][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-06-30/14-45-15/results/checkpoint-480] due to args.save_total_limit
[2025-06-30 15:47:01,094][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: essay_text, reference, essay_year, id, prompt, id_prompt, supporting_text, grades. If essay_text, reference, essay_year, id, prompt, id_prompt, supporting_text, grades are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message.
[2025-06-30 15:47:01,097][transformers.trainer][INFO] -
***** Running Evaluation *****
[2025-06-30 15:47:01,097][transformers.trainer][INFO] - Num examples = 132
[2025-06-30 15:47:01,097][transformers.trainer][INFO] - Batch size = 16
[2025-06-30 15:47:22,101][__main__][INFO] - Training completed successfully.
[2025-06-30 15:47:22,101][__main__][INFO] - Running on Test
[2025-06-30 15:47:22,101][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: essay_text, reference, essay_year, id, prompt, id_prompt, supporting_text, grades. If essay_text, reference, essay_year, id, prompt, id_prompt, supporting_text, grades are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message.
[2025-06-30 15:47:22,104][transformers.trainer][INFO] -
***** Running Evaluation *****
[2025-06-30 15:47:22,104][transformers.trainer][INFO] - Num examples = 138
[2025-06-30 15:47:22,104][transformers.trainer][INFO] - Batch size = 16
[2025-06-30 15:47:44,203][__main__][INFO] - Test metrics: {'eval_loss': 2.1007742881774902, 'eval_model_preparation_time': 0.0078, 'eval_accuracy': 0.427536231884058, 'eval_RMSE': 62.96997213993625, 'eval_QWK': 0.28865979381443296, 'eval_HDIV': 0.10144927536231885, 'eval_Macro_F1': 0.2501665672495648, 'eval_Micro_F1': 0.427536231884058, 'eval_Weighted_F1': 0.40163022813760296, 'eval_TP_0': 0, 'eval_TN_0': 137, 'eval_FP_0': 0, 'eval_FN_0': 1, 'eval_TP_1': 16, 'eval_TN_1': 81, 'eval_FP_1': 22, 'eval_FN_1': 19, 'eval_TP_2': 0, 'eval_TN_2': 133, 'eval_FP_2': 0, 'eval_FN_2': 5, 'eval_TP_3': 33, 'eval_TN_3': 52, 'eval_FP_3': 35, 'eval_FN_3': 18, 'eval_TP_4': 6, 'eval_TN_4': 100, 'eval_FP_4': 12, 'eval_FN_4': 20, 'eval_TP_5': 4, 'eval_TN_5': 108, 'eval_FP_5': 10, 'eval_FN_5': 16, 'eval_runtime': 22.0913, 'eval_samples_per_second': 6.247, 'eval_steps_per_second': 0.407, 'epoch': 15.0}
[2025-06-30 15:47:44,204][transformers.trainer][INFO] - Saving model checkpoint to ./results/best_model
[2025-06-30 15:47:44,636][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /workspace/.hf_home/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json
[2025-06-30 15:47:44,637][transformers.configuration_utils][INFO] - Model config Phi3Config {
"architectures": [
"Phi3ForCausalLM"
],
"attention_bias": false,
"attention_dropout": 0.0,
"auto_map": {
"AutoConfig": "configuration_phi3.Phi3Config",
"AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM"
},
"bos_token_id": 1,
"embd_pdrop": 0.0,
"eos_token_id": 32000,
"hidden_act": "silu",
"hidden_size": 3072,
"initializer_range": 0.02,
"intermediate_size": 8192,
"max_position_embeddings": 131072,
"model_type": "phi3",
"num_attention_heads": 32,
"num_hidden_layers": 32,
"num_key_value_heads": 32,
"original_max_position_embeddings": 4096,
"pad_token_id": 32000,
"partial_rotary_factor": 1.0,
"resid_pdrop": 0.0,
"rms_norm_eps": 1e-05,
"rope_scaling": {
"long_factor": [
1.0800000429153442,
1.1100000143051147,
1.1399999856948853,
1.340000033378601,
1.5899999141693115,
1.600000023841858,
1.6200000047683716,
2.620000123977661,
3.2300000190734863,
3.2300000190734863,
4.789999961853027,
7.400000095367432,
7.700000286102295,
9.09000015258789,
12.199999809265137,
17.670000076293945,
24.46000099182129,
28.57000160217285,
30.420001983642578,
30.840002059936523,
32.590003967285156,
32.93000411987305,
42.320003509521484,
44.96000289916992,
50.340003967285156,
50.45000457763672,
57.55000305175781,
57.93000411987305,
58.21000289916992,
60.1400032043457,
62.61000442504883,
62.62000274658203,
62.71000289916992,
63.1400032043457,
63.1400032043457,
63.77000427246094,
63.93000411987305,
63.96000289916992,
63.970001220703125,
64.02999877929688,
64.06999969482422,
64.08000183105469,
64.12000274658203,
64.41000366210938,
64.4800033569336,
64.51000213623047,
64.52999877929688,
64.83999633789062
],
"short_factor": [
1.0,
1.0199999809265137,
1.0299999713897705,
1.0299999713897705,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0699999332427979,
1.0999999046325684,
1.1099998950958252,
1.1599998474121094,
1.1599998474121094,
1.1699998378753662,
1.2899998426437378,
1.339999794960022,
1.679999828338623,
1.7899998426437378,
1.8199998140335083,
1.8499997854232788,
1.8799997568130493,
1.9099997282028198,
1.9399996995925903,
1.9899996519088745,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0799996852874756,
2.0899996757507324,
2.189999580383301,
2.2199995517730713,
2.5899994373321533,
2.729999542236328,
2.749999523162842,
2.8399994373321533
],
"type": "longrope"
},
"rope_theta": 10000.0,
"sliding_window": 262144,
"tie_word_embeddings": false,
"torch_dtype": "bfloat16",
"transformers_version": "4.53.0",
"use_cache": true,
"vocab_size": 32064
}
[2025-06-30 15:47:44,691][transformers.tokenization_utils_base][INFO] - chat template saved in ./results/best_model/chat_template.jinja
[2025-06-30 15:47:44,692][transformers.tokenization_utils_base][INFO] - tokenizer config file saved in ./results/best_model/tokenizer_config.json
[2025-06-30 15:47:44,692][transformers.tokenization_utils_base][INFO] - Special tokens file saved in ./results/best_model/special_tokens_map.json
[2025-06-30 15:47:44,717][__main__][INFO] - Model and tokenizer saved to ./results/best_model
[2025-06-30 15:47:44,722][__main__][INFO] - Fine Tuning Finished.
[2025-06-30 15:47:45,230][__main__][INFO] - Total emissions: 0.3567 kg CO2eq