jbcs2025_phi35-balanced-C3 / run_experiment.log
abarbosa's picture
Pushing fine-tuned model to Hugging Face Hub
97c998e verified
[2025-03-24 20:42:59,108][__main__][INFO] - cache_dir: /media/data/tmp
dataset:
name: kamel-usp/aes_enem_dataset
split: JBCS2025
training_params:
seed: 42
num_train_epochs: 20
logging_steps: 100
metric_for_best_model: QWK
bf16: true
post_training_results:
model_path: /workspace/jbcs2025/outputs/2025-03-24/19-26-35
experiments:
model:
name: microsoft/Phi-3.5-mini-instruct
type: phi35_classification_lora
num_labels: 6
output_dir: ./results/phi35-balanced/C3
logging_dir: ./logs/phi35-balanced/C3
best_model_dir: ./results/phi35-balanced/C3/best_model
lora_r: 8
lora_dropout: 0.05
lora_alpha: 16
lora_target_modules: all-linear
dataset:
grade_index: 2
training_id: phi35-balanced-C3
training_params:
weight_decay: 0.01
warmup_ratio: 0.1
learning_rate: 5.0e-05
train_batch_size: 2
eval_batch_size: 16
gradient_accumulation_steps: 8
gradient_checkpointing: false
[2025-03-24 20:42:59,110][__main__][INFO] - Starting the Fine Tuning training process.
[2025-03-24 20:43:04,743][transformers.tokenization_utils_base][INFO] - loading file tokenizer.model from cache at /media/data/tmp/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/tokenizer.model
[2025-03-24 20:43:04,744][transformers.tokenization_utils_base][INFO] - loading file tokenizer.json from cache at /media/data/tmp/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/tokenizer.json
[2025-03-24 20:43:04,744][transformers.tokenization_utils_base][INFO] - loading file added_tokens.json from cache at /media/data/tmp/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/added_tokens.json
[2025-03-24 20:43:04,744][transformers.tokenization_utils_base][INFO] - loading file special_tokens_map.json from cache at /media/data/tmp/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/special_tokens_map.json
[2025-03-24 20:43:04,744][transformers.tokenization_utils_base][INFO] - loading file tokenizer_config.json from cache at /media/data/tmp/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/tokenizer_config.json
[2025-03-24 20:43:04,744][transformers.tokenization_utils_base][INFO] - loading file chat_template.jinja from cache at None
[2025-03-24 20:43:04,821][transformers.tokenization_utils_base][INFO] - Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained.
[2025-03-24 20:43:04,874][__main__][INFO] - Tokenizer function parameters- Padding:longest; Truncation: False
[2025-03-24 20:43:06,629][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /media/data/tmp/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json
[2025-03-24 20:43:06,630][transformers.configuration_utils][INFO] - Model config Phi3Config {
"architectures": [
"Phi3ForCausalLM"
],
"attention_bias": false,
"attention_dropout": 0.0,
"auto_map": {
"AutoConfig": "microsoft/Phi-3.5-mini-instruct--configuration_phi3.Phi3Config",
"AutoModelForCausalLM": "microsoft/Phi-3.5-mini-instruct--modeling_phi3.Phi3ForCausalLM"
},
"bos_token_id": 1,
"embd_pdrop": 0.0,
"eos_token_id": 32000,
"hidden_act": "silu",
"hidden_size": 3072,
"id2label": {
"0": 0,
"1": 40,
"2": 80,
"3": 120,
"4": 160,
"5": 200
},
"initializer_range": 0.02,
"intermediate_size": 8192,
"label2id": {
"0": 0,
"40": 1,
"80": 2,
"120": 3,
"160": 4,
"200": 5
},
"max_position_embeddings": 131072,
"model_type": "phi3",
"num_attention_heads": 32,
"num_hidden_layers": 32,
"num_key_value_heads": 32,
"original_max_position_embeddings": 4096,
"pad_token_id": 32000,
"partial_rotary_factor": 1.0,
"resid_pdrop": 0.0,
"rms_norm_eps": 1e-05,
"rope_scaling": {
"long_factor": [
1.0800000429153442,
1.1100000143051147,
1.1399999856948853,
1.340000033378601,
1.5899999141693115,
1.600000023841858,
1.6200000047683716,
2.620000123977661,
3.2300000190734863,
3.2300000190734863,
4.789999961853027,
7.400000095367432,
7.700000286102295,
9.09000015258789,
12.199999809265137,
17.670000076293945,
24.46000099182129,
28.57000160217285,
30.420001983642578,
30.840002059936523,
32.590003967285156,
32.93000411987305,
42.320003509521484,
44.96000289916992,
50.340003967285156,
50.45000457763672,
57.55000305175781,
57.93000411987305,
58.21000289916992,
60.1400032043457,
62.61000442504883,
62.62000274658203,
62.71000289916992,
63.1400032043457,
63.1400032043457,
63.77000427246094,
63.93000411987305,
63.96000289916992,
63.970001220703125,
64.02999877929688,
64.06999969482422,
64.08000183105469,
64.12000274658203,
64.41000366210938,
64.4800033569336,
64.51000213623047,
64.52999877929688,
64.83999633789062
],
"short_factor": [
1.0,
1.0199999809265137,
1.0299999713897705,
1.0299999713897705,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0699999332427979,
1.0999999046325684,
1.1099998950958252,
1.1599998474121094,
1.1599998474121094,
1.1699998378753662,
1.2899998426437378,
1.339999794960022,
1.679999828338623,
1.7899998426437378,
1.8199998140335083,
1.8499997854232788,
1.8799997568130493,
1.9099997282028198,
1.9399996995925903,
1.9899996519088745,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0799996852874756,
2.0899996757507324,
2.189999580383301,
2.2199995517730713,
2.5899994373321533,
2.729999542236328,
2.749999523162842,
2.8399994373321533
],
"type": "longrope"
},
"rope_theta": 10000.0,
"sliding_window": 262144,
"tie_word_embeddings": false,
"torch_dtype": "bfloat16",
"transformers_version": "4.50.0",
"use_cache": true,
"vocab_size": 32064
}
[2025-03-24 20:43:06,631][transformers.modeling_utils][INFO] - loading weights file model.safetensors from cache at /media/data/tmp/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/model.safetensors.index.json
[2025-03-24 20:43:06,631][transformers.modeling_utils][INFO] - Will use torch_dtype=torch.bfloat16 as defined in model's config object
[2025-03-24 20:43:06,631][transformers.modeling_utils][INFO] - Instantiating Phi3ForSequenceClassification model under default dtype torch.bfloat16.
[2025-03-24 20:43:33,790][transformers.modeling_utils][INFO] - Some weights of the model checkpoint at microsoft/Phi-3.5-mini-instruct were not used when initializing Phi3ForSequenceClassification: ['lm_head.weight']
- This IS expected if you are initializing Phi3ForSequenceClassification from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model).
- This IS NOT expected if you are initializing Phi3ForSequenceClassification from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model).
[2025-03-24 20:43:33,790][transformers.modeling_utils][WARNING] - Some weights of Phi3ForSequenceClassification were not initialized from the model checkpoint at microsoft/Phi-3.5-mini-instruct and are newly initialized: ['score.weight']
You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference.
[2025-03-24 20:44:03,711][__main__][INFO] - None
[2025-03-24 20:44:03,713][transformers.training_args][INFO] - PyTorch: setting up devices
[2025-03-24 20:44:03,736][__main__][INFO] - Total steps: 620. Number of warmup steps: 62
[2025-03-24 20:44:03,744][transformers.trainer][INFO] - You have loaded a model on multiple GPUs. `is_model_parallel` attribute will be force-set to `True` to avoid any unexpected behavior such as device placement mismatching.
[2025-03-24 20:44:03,768][transformers.trainer][INFO] - Using auto half precision backend
[2025-03-24 20:44:03,769][transformers.trainer][WARNING] - No label_names provided for model class `PeftModelForSequenceClassification`. Since `PeftModel` hides base models input arguments, if label_names is not given, label_names can't be set automatically within `Trainer`. Note that empty label_names list will be used instead.
[2025-03-24 20:44:03,777][transformers.trainer][INFO] - The following columns in the evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: id_prompt, reference, grades, supporting_text, prompt, id, essay_text, essay_year. If id_prompt, reference, grades, supporting_text, prompt, id, essay_text, essay_year are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message.
[2025-03-24 20:44:03,789][transformers.trainer][INFO] -
***** Running Evaluation *****
[2025-03-24 20:44:03,789][transformers.trainer][INFO] - Num examples = 132
[2025-03-24 20:44:03,789][transformers.trainer][INFO] - Batch size = 16
[2025-03-24 20:44:18,684][transformers][INFO] - {'accuracy': 0.1590909090909091, 'RMSE': 90.45340337332908, 'QWK': -0.11659732317409399, 'HDIV': 0.3712121212121212, 'Macro_F1': 0.06876138433515483, 'Micro_F1': 0.1590909090909091, 'Weighted_F1': 0.09521443947673455, 'Macro_F1_(ignoring_nan)': np.float64(0.13752276867030966)}
[2025-03-24 20:44:18,688][tensorboardX.summary][INFO] - Summary name eval/Macro_F1_(ignoring_nan) is illegal; using eval/Macro_F1__ignoring_nan_ instead.
[2025-03-24 20:44:18,951][transformers.trainer][INFO] - The following columns in the training set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: id_prompt, reference, grades, supporting_text, prompt, id, essay_text, essay_year. If id_prompt, reference, grades, supporting_text, prompt, id, essay_text, essay_year are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message.
[2025-03-24 20:44:18,978][transformers.trainer][INFO] - ***** Running training *****
[2025-03-24 20:44:18,979][transformers.trainer][INFO] - Num examples = 500
[2025-03-24 20:44:18,979][transformers.trainer][INFO] - Num Epochs = 20
[2025-03-24 20:44:18,979][transformers.trainer][INFO] - Instantaneous batch size per device = 2
[2025-03-24 20:44:18,979][transformers.trainer][INFO] - Total train batch size (w. parallel, distributed & accumulation) = 16
[2025-03-24 20:44:18,979][transformers.trainer][INFO] - Gradient Accumulation steps = 8
[2025-03-24 20:44:18,979][transformers.trainer][INFO] - Total optimization steps = 620
[2025-03-24 20:44:18,981][transformers.trainer][INFO] - Number of trainable parameters = 12,601,344
[2025-03-24 20:48:55,260][transformers.trainer][INFO] - The following columns in the evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: id_prompt, reference, grades, supporting_text, prompt, id, essay_text, essay_year. If id_prompt, reference, grades, supporting_text, prompt, id, essay_text, essay_year are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message.
[2025-03-24 20:48:55,262][transformers.trainer][INFO] -
***** Running Evaluation *****
[2025-03-24 20:48:55,262][transformers.trainer][INFO] - Num examples = 132
[2025-03-24 20:48:55,262][transformers.trainer][INFO] - Batch size = 16
[2025-03-24 20:49:10,025][transformers][INFO] - {'accuracy': 0.44696969696969696, 'RMSE': 49.60449637488582, 'QWK': 0.27998710232158197, 'HDIV': 0.06818181818181823, 'Macro_F1': 0.2289231932089075, 'Micro_F1': 0.44696969696969696, 'Weighted_F1': 0.40948998091855243, 'Macro_F1_(ignoring_nan)': np.float64(0.34338478981336124)}
[2025-03-24 20:49:10,026][tensorboardX.summary][INFO] - Summary name eval/Macro_F1_(ignoring_nan) is illegal; using eval/Macro_F1__ignoring_nan_ instead.
[2025-03-24 20:49:10,028][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-03-24/20-42-59/results/phi35-balanced/C3/checkpoint-32
[2025-03-24 20:49:10,566][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json
[2025-03-24 20:49:10,567][transformers.configuration_utils][INFO] - Model config Phi3Config {
"architectures": [
"Phi3ForCausalLM"
],
"attention_bias": false,
"attention_dropout": 0.0,
"auto_map": {
"AutoConfig": "microsoft/Phi-3.5-mini-instruct--configuration_phi3.Phi3Config",
"AutoModelForCausalLM": "microsoft/Phi-3.5-mini-instruct--modeling_phi3.Phi3ForCausalLM"
},
"bos_token_id": 1,
"embd_pdrop": 0.0,
"eos_token_id": 32000,
"hidden_act": "silu",
"hidden_size": 3072,
"initializer_range": 0.02,
"intermediate_size": 8192,
"max_position_embeddings": 131072,
"model_type": "phi3",
"num_attention_heads": 32,
"num_hidden_layers": 32,
"num_key_value_heads": 32,
"original_max_position_embeddings": 4096,
"pad_token_id": 32000,
"partial_rotary_factor": 1.0,
"resid_pdrop": 0.0,
"rms_norm_eps": 1e-05,
"rope_scaling": {
"long_factor": [
1.0800000429153442,
1.1100000143051147,
1.1399999856948853,
1.340000033378601,
1.5899999141693115,
1.600000023841858,
1.6200000047683716,
2.620000123977661,
3.2300000190734863,
3.2300000190734863,
4.789999961853027,
7.400000095367432,
7.700000286102295,
9.09000015258789,
12.199999809265137,
17.670000076293945,
24.46000099182129,
28.57000160217285,
30.420001983642578,
30.840002059936523,
32.590003967285156,
32.93000411987305,
42.320003509521484,
44.96000289916992,
50.340003967285156,
50.45000457763672,
57.55000305175781,
57.93000411987305,
58.21000289916992,
60.1400032043457,
62.61000442504883,
62.62000274658203,
62.71000289916992,
63.1400032043457,
63.1400032043457,
63.77000427246094,
63.93000411987305,
63.96000289916992,
63.970001220703125,
64.02999877929688,
64.06999969482422,
64.08000183105469,
64.12000274658203,
64.41000366210938,
64.4800033569336,
64.51000213623047,
64.52999877929688,
64.83999633789062
],
"short_factor": [
1.0,
1.0199999809265137,
1.0299999713897705,
1.0299999713897705,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0699999332427979,
1.0999999046325684,
1.1099998950958252,
1.1599998474121094,
1.1599998474121094,
1.1699998378753662,
1.2899998426437378,
1.339999794960022,
1.679999828338623,
1.7899998426437378,
1.8199998140335083,
1.8499997854232788,
1.8799997568130493,
1.9099997282028198,
1.9399996995925903,
1.9899996519088745,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0799996852874756,
2.0899996757507324,
2.189999580383301,
2.2199995517730713,
2.5899994373321533,
2.729999542236328,
2.749999523162842,
2.8399994373321533
],
"type": "longrope"
},
"rope_theta": 10000.0,
"sliding_window": 262144,
"tie_word_embeddings": false,
"torch_dtype": "bfloat16",
"transformers_version": "4.50.0",
"use_cache": true,
"vocab_size": 32064
}
[2025-03-24 20:53:54,817][transformers.trainer][INFO] - The following columns in the evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: id_prompt, reference, grades, supporting_text, prompt, id, essay_text, essay_year. If id_prompt, reference, grades, supporting_text, prompt, id, essay_text, essay_year are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message.
[2025-03-24 20:53:54,819][transformers.trainer][INFO] -
***** Running Evaluation *****
[2025-03-24 20:53:54,819][transformers.trainer][INFO] - Num examples = 132
[2025-03-24 20:53:54,819][transformers.trainer][INFO] - Batch size = 16
[2025-03-24 20:54:09,579][transformers][INFO] - {'accuracy': 0.42424242424242425, 'RMSE': 50.090826596203314, 'QWK': 0.39994729444834864, 'HDIV': 0.05303030303030298, 'Macro_F1': 0.3048111463876343, 'Micro_F1': 0.42424242424242425, 'Weighted_F1': 0.4200706253158965, 'Macro_F1_(ignoring_nan)': np.float64(0.36577337566516116)}
[2025-03-24 20:54:09,580][tensorboardX.summary][INFO] - Summary name eval/Macro_F1_(ignoring_nan) is illegal; using eval/Macro_F1__ignoring_nan_ instead.
[2025-03-24 20:54:09,583][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-03-24/20-42-59/results/phi35-balanced/C3/checkpoint-64
[2025-03-24 20:54:10,112][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json
[2025-03-24 20:54:10,113][transformers.configuration_utils][INFO] - Model config Phi3Config {
"architectures": [
"Phi3ForCausalLM"
],
"attention_bias": false,
"attention_dropout": 0.0,
"auto_map": {
"AutoConfig": "microsoft/Phi-3.5-mini-instruct--configuration_phi3.Phi3Config",
"AutoModelForCausalLM": "microsoft/Phi-3.5-mini-instruct--modeling_phi3.Phi3ForCausalLM"
},
"bos_token_id": 1,
"embd_pdrop": 0.0,
"eos_token_id": 32000,
"hidden_act": "silu",
"hidden_size": 3072,
"initializer_range": 0.02,
"intermediate_size": 8192,
"max_position_embeddings": 131072,
"model_type": "phi3",
"num_attention_heads": 32,
"num_hidden_layers": 32,
"num_key_value_heads": 32,
"original_max_position_embeddings": 4096,
"pad_token_id": 32000,
"partial_rotary_factor": 1.0,
"resid_pdrop": 0.0,
"rms_norm_eps": 1e-05,
"rope_scaling": {
"long_factor": [
1.0800000429153442,
1.1100000143051147,
1.1399999856948853,
1.340000033378601,
1.5899999141693115,
1.600000023841858,
1.6200000047683716,
2.620000123977661,
3.2300000190734863,
3.2300000190734863,
4.789999961853027,
7.400000095367432,
7.700000286102295,
9.09000015258789,
12.199999809265137,
17.670000076293945,
24.46000099182129,
28.57000160217285,
30.420001983642578,
30.840002059936523,
32.590003967285156,
32.93000411987305,
42.320003509521484,
44.96000289916992,
50.340003967285156,
50.45000457763672,
57.55000305175781,
57.93000411987305,
58.21000289916992,
60.1400032043457,
62.61000442504883,
62.62000274658203,
62.71000289916992,
63.1400032043457,
63.1400032043457,
63.77000427246094,
63.93000411987305,
63.96000289916992,
63.970001220703125,
64.02999877929688,
64.06999969482422,
64.08000183105469,
64.12000274658203,
64.41000366210938,
64.4800033569336,
64.51000213623047,
64.52999877929688,
64.83999633789062
],
"short_factor": [
1.0,
1.0199999809265137,
1.0299999713897705,
1.0299999713897705,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0699999332427979,
1.0999999046325684,
1.1099998950958252,
1.1599998474121094,
1.1599998474121094,
1.1699998378753662,
1.2899998426437378,
1.339999794960022,
1.679999828338623,
1.7899998426437378,
1.8199998140335083,
1.8499997854232788,
1.8799997568130493,
1.9099997282028198,
1.9399996995925903,
1.9899996519088745,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0799996852874756,
2.0899996757507324,
2.189999580383301,
2.2199995517730713,
2.5899994373321533,
2.729999542236328,
2.749999523162842,
2.8399994373321533
],
"type": "longrope"
},
"rope_theta": 10000.0,
"sliding_window": 262144,
"tie_word_embeddings": false,
"torch_dtype": "bfloat16",
"transformers_version": "4.50.0",
"use_cache": true,
"vocab_size": 32064
}
[2025-03-24 20:54:18,295][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-03-24/20-42-59/results/phi35-balanced/C3/checkpoint-32] due to args.save_total_limit
[2025-03-24 20:58:54,327][transformers.trainer][INFO] - The following columns in the evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: id_prompt, reference, grades, supporting_text, prompt, id, essay_text, essay_year. If id_prompt, reference, grades, supporting_text, prompt, id, essay_text, essay_year are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message.
[2025-03-24 20:58:54,329][transformers.trainer][INFO] -
***** Running Evaluation *****
[2025-03-24 20:58:54,329][transformers.trainer][INFO] - Num examples = 132
[2025-03-24 20:58:54,329][transformers.trainer][INFO] - Batch size = 16
[2025-03-24 20:59:08,976][transformers][INFO] - {'accuracy': 0.48484848484848486, 'RMSE': 47.86344211304794, 'QWK': 0.2894736842105263, 'HDIV': 0.06060606060606055, 'Macro_F1': 0.2556449232585596, 'Micro_F1': 0.48484848484848486, 'Weighted_F1': 0.42275768308826156, 'Macro_F1_(ignoring_nan)': np.float64(0.3834673848878394)}
[2025-03-24 20:59:08,977][tensorboardX.summary][INFO] - Summary name eval/Macro_F1_(ignoring_nan) is illegal; using eval/Macro_F1__ignoring_nan_ instead.
[2025-03-24 20:59:08,979][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-03-24/20-42-59/results/phi35-balanced/C3/checkpoint-96
[2025-03-24 20:59:09,896][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json
[2025-03-24 20:59:09,898][transformers.configuration_utils][INFO] - Model config Phi3Config {
"architectures": [
"Phi3ForCausalLM"
],
"attention_bias": false,
"attention_dropout": 0.0,
"auto_map": {
"AutoConfig": "microsoft/Phi-3.5-mini-instruct--configuration_phi3.Phi3Config",
"AutoModelForCausalLM": "microsoft/Phi-3.5-mini-instruct--modeling_phi3.Phi3ForCausalLM"
},
"bos_token_id": 1,
"embd_pdrop": 0.0,
"eos_token_id": 32000,
"hidden_act": "silu",
"hidden_size": 3072,
"initializer_range": 0.02,
"intermediate_size": 8192,
"max_position_embeddings": 131072,
"model_type": "phi3",
"num_attention_heads": 32,
"num_hidden_layers": 32,
"num_key_value_heads": 32,
"original_max_position_embeddings": 4096,
"pad_token_id": 32000,
"partial_rotary_factor": 1.0,
"resid_pdrop": 0.0,
"rms_norm_eps": 1e-05,
"rope_scaling": {
"long_factor": [
1.0800000429153442,
1.1100000143051147,
1.1399999856948853,
1.340000033378601,
1.5899999141693115,
1.600000023841858,
1.6200000047683716,
2.620000123977661,
3.2300000190734863,
3.2300000190734863,
4.789999961853027,
7.400000095367432,
7.700000286102295,
9.09000015258789,
12.199999809265137,
17.670000076293945,
24.46000099182129,
28.57000160217285,
30.420001983642578,
30.840002059936523,
32.590003967285156,
32.93000411987305,
42.320003509521484,
44.96000289916992,
50.340003967285156,
50.45000457763672,
57.55000305175781,
57.93000411987305,
58.21000289916992,
60.1400032043457,
62.61000442504883,
62.62000274658203,
62.71000289916992,
63.1400032043457,
63.1400032043457,
63.77000427246094,
63.93000411987305,
63.96000289916992,
63.970001220703125,
64.02999877929688,
64.06999969482422,
64.08000183105469,
64.12000274658203,
64.41000366210938,
64.4800033569336,
64.51000213623047,
64.52999877929688,
64.83999633789062
],
"short_factor": [
1.0,
1.0199999809265137,
1.0299999713897705,
1.0299999713897705,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0699999332427979,
1.0999999046325684,
1.1099998950958252,
1.1599998474121094,
1.1599998474121094,
1.1699998378753662,
1.2899998426437378,
1.339999794960022,
1.679999828338623,
1.7899998426437378,
1.8199998140335083,
1.8499997854232788,
1.8799997568130493,
1.9099997282028198,
1.9399996995925903,
1.9899996519088745,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0799996852874756,
2.0899996757507324,
2.189999580383301,
2.2199995517730713,
2.5899994373321533,
2.729999542236328,
2.749999523162842,
2.8399994373321533
],
"type": "longrope"
},
"rope_theta": 10000.0,
"sliding_window": 262144,
"tie_word_embeddings": false,
"torch_dtype": "bfloat16",
"transformers_version": "4.50.0",
"use_cache": true,
"vocab_size": 32064
}
[2025-03-24 21:03:54,571][transformers.trainer][INFO] - The following columns in the evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: id_prompt, reference, grades, supporting_text, prompt, id, essay_text, essay_year. If id_prompt, reference, grades, supporting_text, prompt, id, essay_text, essay_year are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message.
[2025-03-24 21:03:54,573][transformers.trainer][INFO] -
***** Running Evaluation *****
[2025-03-24 21:03:54,573][transformers.trainer][INFO] - Num examples = 132
[2025-03-24 21:03:54,573][transformers.trainer][INFO] - Batch size = 16
[2025-03-24 21:04:09,095][transformers][INFO] - {'accuracy': 0.4090909090909091, 'RMSE': 50.572480239511265, 'QWK': 0.41413546487168695, 'HDIV': 0.05303030303030298, 'Macro_F1': 0.28829114355430147, 'Micro_F1': 0.4090909090909091, 'Weighted_F1': 0.3784693952158067, 'Macro_F1_(ignoring_nan)': np.float64(0.4324367153314522)}
[2025-03-24 21:04:09,095][tensorboardX.summary][INFO] - Summary name eval/Macro_F1_(ignoring_nan) is illegal; using eval/Macro_F1__ignoring_nan_ instead.
[2025-03-24 21:04:09,098][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-03-24/20-42-59/results/phi35-balanced/C3/checkpoint-128
[2025-03-24 21:04:10,039][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json
[2025-03-24 21:04:10,041][transformers.configuration_utils][INFO] - Model config Phi3Config {
"architectures": [
"Phi3ForCausalLM"
],
"attention_bias": false,
"attention_dropout": 0.0,
"auto_map": {
"AutoConfig": "microsoft/Phi-3.5-mini-instruct--configuration_phi3.Phi3Config",
"AutoModelForCausalLM": "microsoft/Phi-3.5-mini-instruct--modeling_phi3.Phi3ForCausalLM"
},
"bos_token_id": 1,
"embd_pdrop": 0.0,
"eos_token_id": 32000,
"hidden_act": "silu",
"hidden_size": 3072,
"initializer_range": 0.02,
"intermediate_size": 8192,
"max_position_embeddings": 131072,
"model_type": "phi3",
"num_attention_heads": 32,
"num_hidden_layers": 32,
"num_key_value_heads": 32,
"original_max_position_embeddings": 4096,
"pad_token_id": 32000,
"partial_rotary_factor": 1.0,
"resid_pdrop": 0.0,
"rms_norm_eps": 1e-05,
"rope_scaling": {
"long_factor": [
1.0800000429153442,
1.1100000143051147,
1.1399999856948853,
1.340000033378601,
1.5899999141693115,
1.600000023841858,
1.6200000047683716,
2.620000123977661,
3.2300000190734863,
3.2300000190734863,
4.789999961853027,
7.400000095367432,
7.700000286102295,
9.09000015258789,
12.199999809265137,
17.670000076293945,
24.46000099182129,
28.57000160217285,
30.420001983642578,
30.840002059936523,
32.590003967285156,
32.93000411987305,
42.320003509521484,
44.96000289916992,
50.340003967285156,
50.45000457763672,
57.55000305175781,
57.93000411987305,
58.21000289916992,
60.1400032043457,
62.61000442504883,
62.62000274658203,
62.71000289916992,
63.1400032043457,
63.1400032043457,
63.77000427246094,
63.93000411987305,
63.96000289916992,
63.970001220703125,
64.02999877929688,
64.06999969482422,
64.08000183105469,
64.12000274658203,
64.41000366210938,
64.4800033569336,
64.51000213623047,
64.52999877929688,
64.83999633789062
],
"short_factor": [
1.0,
1.0199999809265137,
1.0299999713897705,
1.0299999713897705,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0699999332427979,
1.0999999046325684,
1.1099998950958252,
1.1599998474121094,
1.1599998474121094,
1.1699998378753662,
1.2899998426437378,
1.339999794960022,
1.679999828338623,
1.7899998426437378,
1.8199998140335083,
1.8499997854232788,
1.8799997568130493,
1.9099997282028198,
1.9399996995925903,
1.9899996519088745,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0799996852874756,
2.0899996757507324,
2.189999580383301,
2.2199995517730713,
2.5899994373321533,
2.729999542236328,
2.749999523162842,
2.8399994373321533
],
"type": "longrope"
},
"rope_theta": 10000.0,
"sliding_window": 262144,
"tie_word_embeddings": false,
"torch_dtype": "bfloat16",
"transformers_version": "4.50.0",
"use_cache": true,
"vocab_size": 32064
}
[2025-03-24 21:04:18,198][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-03-24/20-42-59/results/phi35-balanced/C3/checkpoint-64] due to args.save_total_limit
[2025-03-24 21:04:18,216][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-03-24/20-42-59/results/phi35-balanced/C3/checkpoint-96] due to args.save_total_limit
[2025-03-24 21:08:54,403][transformers.trainer][INFO] - The following columns in the evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: id_prompt, reference, grades, supporting_text, prompt, id, essay_text, essay_year. If id_prompt, reference, grades, supporting_text, prompt, id, essay_text, essay_year are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message.
[2025-03-24 21:08:54,406][transformers.trainer][INFO] -
***** Running Evaluation *****
[2025-03-24 21:08:54,406][transformers.trainer][INFO] - Num examples = 132
[2025-03-24 21:08:54,406][transformers.trainer][INFO] - Batch size = 16
[2025-03-24 21:09:08,977][transformers][INFO] - {'accuracy': 0.4393939393939394, 'RMSE': 52.91502622129181, 'QWK': 0.3044073364358062, 'HDIV': 0.09848484848484851, 'Macro_F1': 0.32087411293848583, 'Micro_F1': 0.4393939393939394, 'Weighted_F1': 0.38613799176305474, 'Macro_F1_(ignoring_nan)': np.float64(0.385048935526183)}
[2025-03-24 21:09:08,978][tensorboardX.summary][INFO] - Summary name eval/Macro_F1_(ignoring_nan) is illegal; using eval/Macro_F1__ignoring_nan_ instead.
[2025-03-24 21:09:08,981][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-03-24/20-42-59/results/phi35-balanced/C3/checkpoint-160
[2025-03-24 21:09:09,918][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json
[2025-03-24 21:09:09,919][transformers.configuration_utils][INFO] - Model config Phi3Config {
"architectures": [
"Phi3ForCausalLM"
],
"attention_bias": false,
"attention_dropout": 0.0,
"auto_map": {
"AutoConfig": "microsoft/Phi-3.5-mini-instruct--configuration_phi3.Phi3Config",
"AutoModelForCausalLM": "microsoft/Phi-3.5-mini-instruct--modeling_phi3.Phi3ForCausalLM"
},
"bos_token_id": 1,
"embd_pdrop": 0.0,
"eos_token_id": 32000,
"hidden_act": "silu",
"hidden_size": 3072,
"initializer_range": 0.02,
"intermediate_size": 8192,
"max_position_embeddings": 131072,
"model_type": "phi3",
"num_attention_heads": 32,
"num_hidden_layers": 32,
"num_key_value_heads": 32,
"original_max_position_embeddings": 4096,
"pad_token_id": 32000,
"partial_rotary_factor": 1.0,
"resid_pdrop": 0.0,
"rms_norm_eps": 1e-05,
"rope_scaling": {
"long_factor": [
1.0800000429153442,
1.1100000143051147,
1.1399999856948853,
1.340000033378601,
1.5899999141693115,
1.600000023841858,
1.6200000047683716,
2.620000123977661,
3.2300000190734863,
3.2300000190734863,
4.789999961853027,
7.400000095367432,
7.700000286102295,
9.09000015258789,
12.199999809265137,
17.670000076293945,
24.46000099182129,
28.57000160217285,
30.420001983642578,
30.840002059936523,
32.590003967285156,
32.93000411987305,
42.320003509521484,
44.96000289916992,
50.340003967285156,
50.45000457763672,
57.55000305175781,
57.93000411987305,
58.21000289916992,
60.1400032043457,
62.61000442504883,
62.62000274658203,
62.71000289916992,
63.1400032043457,
63.1400032043457,
63.77000427246094,
63.93000411987305,
63.96000289916992,
63.970001220703125,
64.02999877929688,
64.06999969482422,
64.08000183105469,
64.12000274658203,
64.41000366210938,
64.4800033569336,
64.51000213623047,
64.52999877929688,
64.83999633789062
],
"short_factor": [
1.0,
1.0199999809265137,
1.0299999713897705,
1.0299999713897705,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0699999332427979,
1.0999999046325684,
1.1099998950958252,
1.1599998474121094,
1.1599998474121094,
1.1699998378753662,
1.2899998426437378,
1.339999794960022,
1.679999828338623,
1.7899998426437378,
1.8199998140335083,
1.8499997854232788,
1.8799997568130493,
1.9099997282028198,
1.9399996995925903,
1.9899996519088745,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0799996852874756,
2.0899996757507324,
2.189999580383301,
2.2199995517730713,
2.5899994373321533,
2.729999542236328,
2.749999523162842,
2.8399994373321533
],
"type": "longrope"
},
"rope_theta": 10000.0,
"sliding_window": 262144,
"tie_word_embeddings": false,
"torch_dtype": "bfloat16",
"transformers_version": "4.50.0",
"use_cache": true,
"vocab_size": 32064
}
[2025-03-24 21:13:54,429][transformers.trainer][INFO] - The following columns in the evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: id_prompt, reference, grades, supporting_text, prompt, id, essay_text, essay_year. If id_prompt, reference, grades, supporting_text, prompt, id, essay_text, essay_year are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message.
[2025-03-24 21:13:54,432][transformers.trainer][INFO] -
***** Running Evaluation *****
[2025-03-24 21:13:54,432][transformers.trainer][INFO] - Num examples = 132
[2025-03-24 21:13:54,432][transformers.trainer][INFO] - Batch size = 16
[2025-03-24 21:14:09,300][transformers][INFO] - {'accuracy': 0.4772727272727273, 'RMSE': 48.11602139777888, 'QWK': 0.297872340425532, 'HDIV': 0.0757575757575758, 'Macro_F1': 0.24605757469858466, 'Micro_F1': 0.4772727272727273, 'Weighted_F1': 0.39749065440354975, 'Macro_F1_(ignoring_nan)': np.float64(0.4921151493971693)}
[2025-03-24 21:14:09,300][tensorboardX.summary][INFO] - Summary name eval/Macro_F1_(ignoring_nan) is illegal; using eval/Macro_F1__ignoring_nan_ instead.
[2025-03-24 21:14:09,303][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-03-24/20-42-59/results/phi35-balanced/C3/checkpoint-192
[2025-03-24 21:14:10,029][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json
[2025-03-24 21:14:10,030][transformers.configuration_utils][INFO] - Model config Phi3Config {
"architectures": [
"Phi3ForCausalLM"
],
"attention_bias": false,
"attention_dropout": 0.0,
"auto_map": {
"AutoConfig": "microsoft/Phi-3.5-mini-instruct--configuration_phi3.Phi3Config",
"AutoModelForCausalLM": "microsoft/Phi-3.5-mini-instruct--modeling_phi3.Phi3ForCausalLM"
},
"bos_token_id": 1,
"embd_pdrop": 0.0,
"eos_token_id": 32000,
"hidden_act": "silu",
"hidden_size": 3072,
"initializer_range": 0.02,
"intermediate_size": 8192,
"max_position_embeddings": 131072,
"model_type": "phi3",
"num_attention_heads": 32,
"num_hidden_layers": 32,
"num_key_value_heads": 32,
"original_max_position_embeddings": 4096,
"pad_token_id": 32000,
"partial_rotary_factor": 1.0,
"resid_pdrop": 0.0,
"rms_norm_eps": 1e-05,
"rope_scaling": {
"long_factor": [
1.0800000429153442,
1.1100000143051147,
1.1399999856948853,
1.340000033378601,
1.5899999141693115,
1.600000023841858,
1.6200000047683716,
2.620000123977661,
3.2300000190734863,
3.2300000190734863,
4.789999961853027,
7.400000095367432,
7.700000286102295,
9.09000015258789,
12.199999809265137,
17.670000076293945,
24.46000099182129,
28.57000160217285,
30.420001983642578,
30.840002059936523,
32.590003967285156,
32.93000411987305,
42.320003509521484,
44.96000289916992,
50.340003967285156,
50.45000457763672,
57.55000305175781,
57.93000411987305,
58.21000289916992,
60.1400032043457,
62.61000442504883,
62.62000274658203,
62.71000289916992,
63.1400032043457,
63.1400032043457,
63.77000427246094,
63.93000411987305,
63.96000289916992,
63.970001220703125,
64.02999877929688,
64.06999969482422,
64.08000183105469,
64.12000274658203,
64.41000366210938,
64.4800033569336,
64.51000213623047,
64.52999877929688,
64.83999633789062
],
"short_factor": [
1.0,
1.0199999809265137,
1.0299999713897705,
1.0299999713897705,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0699999332427979,
1.0999999046325684,
1.1099998950958252,
1.1599998474121094,
1.1599998474121094,
1.1699998378753662,
1.2899998426437378,
1.339999794960022,
1.679999828338623,
1.7899998426437378,
1.8199998140335083,
1.8499997854232788,
1.8799997568130493,
1.9099997282028198,
1.9399996995925903,
1.9899996519088745,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0799996852874756,
2.0899996757507324,
2.189999580383301,
2.2199995517730713,
2.5899994373321533,
2.729999542236328,
2.749999523162842,
2.8399994373321533
],
"type": "longrope"
},
"rope_theta": 10000.0,
"sliding_window": 262144,
"tie_word_embeddings": false,
"torch_dtype": "bfloat16",
"transformers_version": "4.50.0",
"use_cache": true,
"vocab_size": 32064
}
[2025-03-24 21:14:18,203][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-03-24/20-42-59/results/phi35-balanced/C3/checkpoint-160] due to args.save_total_limit
[2025-03-24 21:18:54,633][transformers.trainer][INFO] - The following columns in the evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: id_prompt, reference, grades, supporting_text, prompt, id, essay_text, essay_year. If id_prompt, reference, grades, supporting_text, prompt, id, essay_text, essay_year are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message.
[2025-03-24 21:18:54,636][transformers.trainer][INFO] -
***** Running Evaluation *****
[2025-03-24 21:18:54,636][transformers.trainer][INFO] - Num examples = 132
[2025-03-24 21:18:54,636][transformers.trainer][INFO] - Batch size = 16
[2025-03-24 21:19:09,381][transformers][INFO] - {'accuracy': 0.4318181818181818, 'RMSE': 53.37119867948301, 'QWK': 0.3880691233330702, 'HDIV': 0.08333333333333337, 'Macro_F1': 0.3240266713102027, 'Micro_F1': 0.4318181818181818, 'Weighted_F1': 0.42270168051152773, 'Macro_F1_(ignoring_nan)': np.float64(0.38883200557224323)}
[2025-03-24 21:19:09,381][tensorboardX.summary][INFO] - Summary name eval/Macro_F1_(ignoring_nan) is illegal; using eval/Macro_F1__ignoring_nan_ instead.
[2025-03-24 21:19:09,384][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-03-24/20-42-59/results/phi35-balanced/C3/checkpoint-224
[2025-03-24 21:19:09,883][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json
[2025-03-24 21:19:09,884][transformers.configuration_utils][INFO] - Model config Phi3Config {
"architectures": [
"Phi3ForCausalLM"
],
"attention_bias": false,
"attention_dropout": 0.0,
"auto_map": {
"AutoConfig": "microsoft/Phi-3.5-mini-instruct--configuration_phi3.Phi3Config",
"AutoModelForCausalLM": "microsoft/Phi-3.5-mini-instruct--modeling_phi3.Phi3ForCausalLM"
},
"bos_token_id": 1,
"embd_pdrop": 0.0,
"eos_token_id": 32000,
"hidden_act": "silu",
"hidden_size": 3072,
"initializer_range": 0.02,
"intermediate_size": 8192,
"max_position_embeddings": 131072,
"model_type": "phi3",
"num_attention_heads": 32,
"num_hidden_layers": 32,
"num_key_value_heads": 32,
"original_max_position_embeddings": 4096,
"pad_token_id": 32000,
"partial_rotary_factor": 1.0,
"resid_pdrop": 0.0,
"rms_norm_eps": 1e-05,
"rope_scaling": {
"long_factor": [
1.0800000429153442,
1.1100000143051147,
1.1399999856948853,
1.340000033378601,
1.5899999141693115,
1.600000023841858,
1.6200000047683716,
2.620000123977661,
3.2300000190734863,
3.2300000190734863,
4.789999961853027,
7.400000095367432,
7.700000286102295,
9.09000015258789,
12.199999809265137,
17.670000076293945,
24.46000099182129,
28.57000160217285,
30.420001983642578,
30.840002059936523,
32.590003967285156,
32.93000411987305,
42.320003509521484,
44.96000289916992,
50.340003967285156,
50.45000457763672,
57.55000305175781,
57.93000411987305,
58.21000289916992,
60.1400032043457,
62.61000442504883,
62.62000274658203,
62.71000289916992,
63.1400032043457,
63.1400032043457,
63.77000427246094,
63.93000411987305,
63.96000289916992,
63.970001220703125,
64.02999877929688,
64.06999969482422,
64.08000183105469,
64.12000274658203,
64.41000366210938,
64.4800033569336,
64.51000213623047,
64.52999877929688,
64.83999633789062
],
"short_factor": [
1.0,
1.0199999809265137,
1.0299999713897705,
1.0299999713897705,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0699999332427979,
1.0999999046325684,
1.1099998950958252,
1.1599998474121094,
1.1599998474121094,
1.1699998378753662,
1.2899998426437378,
1.339999794960022,
1.679999828338623,
1.7899998426437378,
1.8199998140335083,
1.8499997854232788,
1.8799997568130493,
1.9099997282028198,
1.9399996995925903,
1.9899996519088745,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0799996852874756,
2.0899996757507324,
2.189999580383301,
2.2199995517730713,
2.5899994373321533,
2.729999542236328,
2.749999523162842,
2.8399994373321533
],
"type": "longrope"
},
"rope_theta": 10000.0,
"sliding_window": 262144,
"tie_word_embeddings": false,
"torch_dtype": "bfloat16",
"transformers_version": "4.50.0",
"use_cache": true,
"vocab_size": 32064
}
[2025-03-24 21:19:18,224][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-03-24/20-42-59/results/phi35-balanced/C3/checkpoint-192] due to args.save_total_limit
[2025-03-24 21:23:54,422][transformers.trainer][INFO] - The following columns in the evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: id_prompt, reference, grades, supporting_text, prompt, id, essay_text, essay_year. If id_prompt, reference, grades, supporting_text, prompt, id, essay_text, essay_year are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message.
[2025-03-24 21:23:54,425][transformers.trainer][INFO] -
***** Running Evaluation *****
[2025-03-24 21:23:54,425][transformers.trainer][INFO] - Num examples = 132
[2025-03-24 21:23:54,425][transformers.trainer][INFO] - Batch size = 16
[2025-03-24 21:24:09,094][transformers][INFO] - {'accuracy': 0.4696969696969697, 'RMSE': 50.572480239511265, 'QWK': 0.42535281010151027, 'HDIV': 0.09090909090909094, 'Macro_F1': 0.28053948161543096, 'Micro_F1': 0.4696969696969697, 'Weighted_F1': 0.4352282316839279, 'Macro_F1_(ignoring_nan)': np.float64(0.42080922242314645)}
[2025-03-24 21:24:09,094][tensorboardX.summary][INFO] - Summary name eval/Macro_F1_(ignoring_nan) is illegal; using eval/Macro_F1__ignoring_nan_ instead.
[2025-03-24 21:24:09,097][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-03-24/20-42-59/results/phi35-balanced/C3/checkpoint-256
[2025-03-24 21:24:09,594][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json
[2025-03-24 21:24:09,595][transformers.configuration_utils][INFO] - Model config Phi3Config {
"architectures": [
"Phi3ForCausalLM"
],
"attention_bias": false,
"attention_dropout": 0.0,
"auto_map": {
"AutoConfig": "microsoft/Phi-3.5-mini-instruct--configuration_phi3.Phi3Config",
"AutoModelForCausalLM": "microsoft/Phi-3.5-mini-instruct--modeling_phi3.Phi3ForCausalLM"
},
"bos_token_id": 1,
"embd_pdrop": 0.0,
"eos_token_id": 32000,
"hidden_act": "silu",
"hidden_size": 3072,
"initializer_range": 0.02,
"intermediate_size": 8192,
"max_position_embeddings": 131072,
"model_type": "phi3",
"num_attention_heads": 32,
"num_hidden_layers": 32,
"num_key_value_heads": 32,
"original_max_position_embeddings": 4096,
"pad_token_id": 32000,
"partial_rotary_factor": 1.0,
"resid_pdrop": 0.0,
"rms_norm_eps": 1e-05,
"rope_scaling": {
"long_factor": [
1.0800000429153442,
1.1100000143051147,
1.1399999856948853,
1.340000033378601,
1.5899999141693115,
1.600000023841858,
1.6200000047683716,
2.620000123977661,
3.2300000190734863,
3.2300000190734863,
4.789999961853027,
7.400000095367432,
7.700000286102295,
9.09000015258789,
12.199999809265137,
17.670000076293945,
24.46000099182129,
28.57000160217285,
30.420001983642578,
30.840002059936523,
32.590003967285156,
32.93000411987305,
42.320003509521484,
44.96000289916992,
50.340003967285156,
50.45000457763672,
57.55000305175781,
57.93000411987305,
58.21000289916992,
60.1400032043457,
62.61000442504883,
62.62000274658203,
62.71000289916992,
63.1400032043457,
63.1400032043457,
63.77000427246094,
63.93000411987305,
63.96000289916992,
63.970001220703125,
64.02999877929688,
64.06999969482422,
64.08000183105469,
64.12000274658203,
64.41000366210938,
64.4800033569336,
64.51000213623047,
64.52999877929688,
64.83999633789062
],
"short_factor": [
1.0,
1.0199999809265137,
1.0299999713897705,
1.0299999713897705,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0699999332427979,
1.0999999046325684,
1.1099998950958252,
1.1599998474121094,
1.1599998474121094,
1.1699998378753662,
1.2899998426437378,
1.339999794960022,
1.679999828338623,
1.7899998426437378,
1.8199998140335083,
1.8499997854232788,
1.8799997568130493,
1.9099997282028198,
1.9399996995925903,
1.9899996519088745,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0799996852874756,
2.0899996757507324,
2.189999580383301,
2.2199995517730713,
2.5899994373321533,
2.729999542236328,
2.749999523162842,
2.8399994373321533
],
"type": "longrope"
},
"rope_theta": 10000.0,
"sliding_window": 262144,
"tie_word_embeddings": false,
"torch_dtype": "bfloat16",
"transformers_version": "4.50.0",
"use_cache": true,
"vocab_size": 32064
}
[2025-03-24 21:24:17,819][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-03-24/20-42-59/results/phi35-balanced/C3/checkpoint-128] due to args.save_total_limit
[2025-03-24 21:24:17,837][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-03-24/20-42-59/results/phi35-balanced/C3/checkpoint-224] due to args.save_total_limit
[2025-03-24 21:28:54,045][transformers.trainer][INFO] - The following columns in the evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: id_prompt, reference, grades, supporting_text, prompt, id, essay_text, essay_year. If id_prompt, reference, grades, supporting_text, prompt, id, essay_text, essay_year are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message.
[2025-03-24 21:28:54,048][transformers.trainer][INFO] -
***** Running Evaluation *****
[2025-03-24 21:28:54,048][transformers.trainer][INFO] - Num examples = 132
[2025-03-24 21:28:54,048][transformers.trainer][INFO] - Batch size = 16
[2025-03-24 21:29:08,798][transformers][INFO] - {'accuracy': 0.4696969696969697, 'RMSE': 49.848254581765296, 'QWK': 0.3706976744186047, 'HDIV': 0.09090909090909094, 'Macro_F1': 0.34991564807741277, 'Micro_F1': 0.4696969696969697, 'Weighted_F1': 0.4365355233002292, 'Macro_F1_(ignoring_nan)': np.float64(0.4198987776928953)}
[2025-03-24 21:29:08,799][tensorboardX.summary][INFO] - Summary name eval/Macro_F1_(ignoring_nan) is illegal; using eval/Macro_F1__ignoring_nan_ instead.
[2025-03-24 21:29:08,801][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-03-24/20-42-59/results/phi35-balanced/C3/checkpoint-288
[2025-03-24 21:29:09,303][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json
[2025-03-24 21:29:09,304][transformers.configuration_utils][INFO] - Model config Phi3Config {
"architectures": [
"Phi3ForCausalLM"
],
"attention_bias": false,
"attention_dropout": 0.0,
"auto_map": {
"AutoConfig": "microsoft/Phi-3.5-mini-instruct--configuration_phi3.Phi3Config",
"AutoModelForCausalLM": "microsoft/Phi-3.5-mini-instruct--modeling_phi3.Phi3ForCausalLM"
},
"bos_token_id": 1,
"embd_pdrop": 0.0,
"eos_token_id": 32000,
"hidden_act": "silu",
"hidden_size": 3072,
"initializer_range": 0.02,
"intermediate_size": 8192,
"max_position_embeddings": 131072,
"model_type": "phi3",
"num_attention_heads": 32,
"num_hidden_layers": 32,
"num_key_value_heads": 32,
"original_max_position_embeddings": 4096,
"pad_token_id": 32000,
"partial_rotary_factor": 1.0,
"resid_pdrop": 0.0,
"rms_norm_eps": 1e-05,
"rope_scaling": {
"long_factor": [
1.0800000429153442,
1.1100000143051147,
1.1399999856948853,
1.340000033378601,
1.5899999141693115,
1.600000023841858,
1.6200000047683716,
2.620000123977661,
3.2300000190734863,
3.2300000190734863,
4.789999961853027,
7.400000095367432,
7.700000286102295,
9.09000015258789,
12.199999809265137,
17.670000076293945,
24.46000099182129,
28.57000160217285,
30.420001983642578,
30.840002059936523,
32.590003967285156,
32.93000411987305,
42.320003509521484,
44.96000289916992,
50.340003967285156,
50.45000457763672,
57.55000305175781,
57.93000411987305,
58.21000289916992,
60.1400032043457,
62.61000442504883,
62.62000274658203,
62.71000289916992,
63.1400032043457,
63.1400032043457,
63.77000427246094,
63.93000411987305,
63.96000289916992,
63.970001220703125,
64.02999877929688,
64.06999969482422,
64.08000183105469,
64.12000274658203,
64.41000366210938,
64.4800033569336,
64.51000213623047,
64.52999877929688,
64.83999633789062
],
"short_factor": [
1.0,
1.0199999809265137,
1.0299999713897705,
1.0299999713897705,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0699999332427979,
1.0999999046325684,
1.1099998950958252,
1.1599998474121094,
1.1599998474121094,
1.1699998378753662,
1.2899998426437378,
1.339999794960022,
1.679999828338623,
1.7899998426437378,
1.8199998140335083,
1.8499997854232788,
1.8799997568130493,
1.9099997282028198,
1.9399996995925903,
1.9899996519088745,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0799996852874756,
2.0899996757507324,
2.189999580383301,
2.2199995517730713,
2.5899994373321533,
2.729999542236328,
2.749999523162842,
2.8399994373321533
],
"type": "longrope"
},
"rope_theta": 10000.0,
"sliding_window": 262144,
"tie_word_embeddings": false,
"torch_dtype": "bfloat16",
"transformers_version": "4.50.0",
"use_cache": true,
"vocab_size": 32064
}
[2025-03-24 21:33:53,782][transformers.trainer][INFO] - The following columns in the evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: id_prompt, reference, grades, supporting_text, prompt, id, essay_text, essay_year. If id_prompt, reference, grades, supporting_text, prompt, id, essay_text, essay_year are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message.
[2025-03-24 21:33:53,784][transformers.trainer][INFO] -
***** Running Evaluation *****
[2025-03-24 21:33:53,784][transformers.trainer][INFO] - Num examples = 132
[2025-03-24 21:33:53,785][transformers.trainer][INFO] - Batch size = 16
[2025-03-24 21:34:08,502][transformers][INFO] - {'accuracy': 0.49242424242424243, 'RMSE': 48.865926655275736, 'QWK': 0.4835557673975215, 'HDIV': 0.08333333333333337, 'Macro_F1': 0.38035734552588485, 'Micro_F1': 0.49242424242424243, 'Weighted_F1': 0.4966709263951347, 'Macro_F1_(ignoring_nan)': np.float64(0.45642881463106183)}
[2025-03-24 21:34:08,503][tensorboardX.summary][INFO] - Summary name eval/Macro_F1_(ignoring_nan) is illegal; using eval/Macro_F1__ignoring_nan_ instead.
[2025-03-24 21:34:08,506][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-03-24/20-42-59/results/phi35-balanced/C3/checkpoint-320
[2025-03-24 21:34:09,050][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json
[2025-03-24 21:34:09,051][transformers.configuration_utils][INFO] - Model config Phi3Config {
"architectures": [
"Phi3ForCausalLM"
],
"attention_bias": false,
"attention_dropout": 0.0,
"auto_map": {
"AutoConfig": "microsoft/Phi-3.5-mini-instruct--configuration_phi3.Phi3Config",
"AutoModelForCausalLM": "microsoft/Phi-3.5-mini-instruct--modeling_phi3.Phi3ForCausalLM"
},
"bos_token_id": 1,
"embd_pdrop": 0.0,
"eos_token_id": 32000,
"hidden_act": "silu",
"hidden_size": 3072,
"initializer_range": 0.02,
"intermediate_size": 8192,
"max_position_embeddings": 131072,
"model_type": "phi3",
"num_attention_heads": 32,
"num_hidden_layers": 32,
"num_key_value_heads": 32,
"original_max_position_embeddings": 4096,
"pad_token_id": 32000,
"partial_rotary_factor": 1.0,
"resid_pdrop": 0.0,
"rms_norm_eps": 1e-05,
"rope_scaling": {
"long_factor": [
1.0800000429153442,
1.1100000143051147,
1.1399999856948853,
1.340000033378601,
1.5899999141693115,
1.600000023841858,
1.6200000047683716,
2.620000123977661,
3.2300000190734863,
3.2300000190734863,
4.789999961853027,
7.400000095367432,
7.700000286102295,
9.09000015258789,
12.199999809265137,
17.670000076293945,
24.46000099182129,
28.57000160217285,
30.420001983642578,
30.840002059936523,
32.590003967285156,
32.93000411987305,
42.320003509521484,
44.96000289916992,
50.340003967285156,
50.45000457763672,
57.55000305175781,
57.93000411987305,
58.21000289916992,
60.1400032043457,
62.61000442504883,
62.62000274658203,
62.71000289916992,
63.1400032043457,
63.1400032043457,
63.77000427246094,
63.93000411987305,
63.96000289916992,
63.970001220703125,
64.02999877929688,
64.06999969482422,
64.08000183105469,
64.12000274658203,
64.41000366210938,
64.4800033569336,
64.51000213623047,
64.52999877929688,
64.83999633789062
],
"short_factor": [
1.0,
1.0199999809265137,
1.0299999713897705,
1.0299999713897705,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0699999332427979,
1.0999999046325684,
1.1099998950958252,
1.1599998474121094,
1.1599998474121094,
1.1699998378753662,
1.2899998426437378,
1.339999794960022,
1.679999828338623,
1.7899998426437378,
1.8199998140335083,
1.8499997854232788,
1.8799997568130493,
1.9099997282028198,
1.9399996995925903,
1.9899996519088745,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0799996852874756,
2.0899996757507324,
2.189999580383301,
2.2199995517730713,
2.5899994373321533,
2.729999542236328,
2.749999523162842,
2.8399994373321533
],
"type": "longrope"
},
"rope_theta": 10000.0,
"sliding_window": 262144,
"tie_word_embeddings": false,
"torch_dtype": "bfloat16",
"transformers_version": "4.50.0",
"use_cache": true,
"vocab_size": 32064
}
[2025-03-24 21:34:17,728][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-03-24/20-42-59/results/phi35-balanced/C3/checkpoint-256] due to args.save_total_limit
[2025-03-24 21:34:17,745][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-03-24/20-42-59/results/phi35-balanced/C3/checkpoint-288] due to args.save_total_limit
[2025-03-24 21:38:53,917][transformers.trainer][INFO] - The following columns in the evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: id_prompt, reference, grades, supporting_text, prompt, id, essay_text, essay_year. If id_prompt, reference, grades, supporting_text, prompt, id, essay_text, essay_year are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message.
[2025-03-24 21:38:53,919][transformers.trainer][INFO] -
***** Running Evaluation *****
[2025-03-24 21:38:53,919][transformers.trainer][INFO] - Num examples = 132
[2025-03-24 21:38:53,919][transformers.trainer][INFO] - Batch size = 16
[2025-03-24 21:39:08,333][transformers][INFO] - {'accuracy': 0.49242424242424243, 'RMSE': 51.99067515459193, 'QWK': 0.44178108169612385, 'HDIV': 0.08333333333333337, 'Macro_F1': 0.3436145788491893, 'Micro_F1': 0.49242424242424243, 'Weighted_F1': 0.49456423903259894, 'Macro_F1_(ignoring_nan)': np.float64(0.41233749461902713)}
[2025-03-24 21:39:08,334][tensorboardX.summary][INFO] - Summary name eval/Macro_F1_(ignoring_nan) is illegal; using eval/Macro_F1__ignoring_nan_ instead.
[2025-03-24 21:39:08,337][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-03-24/20-42-59/results/phi35-balanced/C3/checkpoint-352
[2025-03-24 21:39:08,852][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json
[2025-03-24 21:39:08,853][transformers.configuration_utils][INFO] - Model config Phi3Config {
"architectures": [
"Phi3ForCausalLM"
],
"attention_bias": false,
"attention_dropout": 0.0,
"auto_map": {
"AutoConfig": "microsoft/Phi-3.5-mini-instruct--configuration_phi3.Phi3Config",
"AutoModelForCausalLM": "microsoft/Phi-3.5-mini-instruct--modeling_phi3.Phi3ForCausalLM"
},
"bos_token_id": 1,
"embd_pdrop": 0.0,
"eos_token_id": 32000,
"hidden_act": "silu",
"hidden_size": 3072,
"initializer_range": 0.02,
"intermediate_size": 8192,
"max_position_embeddings": 131072,
"model_type": "phi3",
"num_attention_heads": 32,
"num_hidden_layers": 32,
"num_key_value_heads": 32,
"original_max_position_embeddings": 4096,
"pad_token_id": 32000,
"partial_rotary_factor": 1.0,
"resid_pdrop": 0.0,
"rms_norm_eps": 1e-05,
"rope_scaling": {
"long_factor": [
1.0800000429153442,
1.1100000143051147,
1.1399999856948853,
1.340000033378601,
1.5899999141693115,
1.600000023841858,
1.6200000047683716,
2.620000123977661,
3.2300000190734863,
3.2300000190734863,
4.789999961853027,
7.400000095367432,
7.700000286102295,
9.09000015258789,
12.199999809265137,
17.670000076293945,
24.46000099182129,
28.57000160217285,
30.420001983642578,
30.840002059936523,
32.590003967285156,
32.93000411987305,
42.320003509521484,
44.96000289916992,
50.340003967285156,
50.45000457763672,
57.55000305175781,
57.93000411987305,
58.21000289916992,
60.1400032043457,
62.61000442504883,
62.62000274658203,
62.71000289916992,
63.1400032043457,
63.1400032043457,
63.77000427246094,
63.93000411987305,
63.96000289916992,
63.970001220703125,
64.02999877929688,
64.06999969482422,
64.08000183105469,
64.12000274658203,
64.41000366210938,
64.4800033569336,
64.51000213623047,
64.52999877929688,
64.83999633789062
],
"short_factor": [
1.0,
1.0199999809265137,
1.0299999713897705,
1.0299999713897705,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0699999332427979,
1.0999999046325684,
1.1099998950958252,
1.1599998474121094,
1.1599998474121094,
1.1699998378753662,
1.2899998426437378,
1.339999794960022,
1.679999828338623,
1.7899998426437378,
1.8199998140335083,
1.8499997854232788,
1.8799997568130493,
1.9099997282028198,
1.9399996995925903,
1.9899996519088745,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0799996852874756,
2.0899996757507324,
2.189999580383301,
2.2199995517730713,
2.5899994373321533,
2.729999542236328,
2.749999523162842,
2.8399994373321533
],
"type": "longrope"
},
"rope_theta": 10000.0,
"sliding_window": 262144,
"tie_word_embeddings": false,
"torch_dtype": "bfloat16",
"transformers_version": "4.50.0",
"use_cache": true,
"vocab_size": 32064
}
[2025-03-24 21:43:53,337][transformers.trainer][INFO] - The following columns in the evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: id_prompt, reference, grades, supporting_text, prompt, id, essay_text, essay_year. If id_prompt, reference, grades, supporting_text, prompt, id, essay_text, essay_year are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message.
[2025-03-24 21:43:53,339][transformers.trainer][INFO] -
***** Running Evaluation *****
[2025-03-24 21:43:53,340][transformers.trainer][INFO] - Num examples = 132
[2025-03-24 21:43:53,340][transformers.trainer][INFO] - Batch size = 16
[2025-03-24 21:44:08,075][transformers][INFO] - {'accuracy': 0.5075757575757576, 'RMSE': 48.36728170358491, 'QWK': 0.4449673202614379, 'HDIV': 0.06818181818181823, 'Macro_F1': 0.3666403678419461, 'Micro_F1': 0.5075757575757576, 'Weighted_F1': 0.5011660844336598, 'Macro_F1_(ignoring_nan)': np.float64(0.4399684414103353)}
[2025-03-24 21:44:08,075][tensorboardX.summary][INFO] - Summary name eval/Macro_F1_(ignoring_nan) is illegal; using eval/Macro_F1__ignoring_nan_ instead.
[2025-03-24 21:44:08,078][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-03-24/20-42-59/results/phi35-balanced/C3/checkpoint-384
[2025-03-24 21:44:08,928][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json
[2025-03-24 21:44:08,929][transformers.configuration_utils][INFO] - Model config Phi3Config {
"architectures": [
"Phi3ForCausalLM"
],
"attention_bias": false,
"attention_dropout": 0.0,
"auto_map": {
"AutoConfig": "microsoft/Phi-3.5-mini-instruct--configuration_phi3.Phi3Config",
"AutoModelForCausalLM": "microsoft/Phi-3.5-mini-instruct--modeling_phi3.Phi3ForCausalLM"
},
"bos_token_id": 1,
"embd_pdrop": 0.0,
"eos_token_id": 32000,
"hidden_act": "silu",
"hidden_size": 3072,
"initializer_range": 0.02,
"intermediate_size": 8192,
"max_position_embeddings": 131072,
"model_type": "phi3",
"num_attention_heads": 32,
"num_hidden_layers": 32,
"num_key_value_heads": 32,
"original_max_position_embeddings": 4096,
"pad_token_id": 32000,
"partial_rotary_factor": 1.0,
"resid_pdrop": 0.0,
"rms_norm_eps": 1e-05,
"rope_scaling": {
"long_factor": [
1.0800000429153442,
1.1100000143051147,
1.1399999856948853,
1.340000033378601,
1.5899999141693115,
1.600000023841858,
1.6200000047683716,
2.620000123977661,
3.2300000190734863,
3.2300000190734863,
4.789999961853027,
7.400000095367432,
7.700000286102295,
9.09000015258789,
12.199999809265137,
17.670000076293945,
24.46000099182129,
28.57000160217285,
30.420001983642578,
30.840002059936523,
32.590003967285156,
32.93000411987305,
42.320003509521484,
44.96000289916992,
50.340003967285156,
50.45000457763672,
57.55000305175781,
57.93000411987305,
58.21000289916992,
60.1400032043457,
62.61000442504883,
62.62000274658203,
62.71000289916992,
63.1400032043457,
63.1400032043457,
63.77000427246094,
63.93000411987305,
63.96000289916992,
63.970001220703125,
64.02999877929688,
64.06999969482422,
64.08000183105469,
64.12000274658203,
64.41000366210938,
64.4800033569336,
64.51000213623047,
64.52999877929688,
64.83999633789062
],
"short_factor": [
1.0,
1.0199999809265137,
1.0299999713897705,
1.0299999713897705,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0699999332427979,
1.0999999046325684,
1.1099998950958252,
1.1599998474121094,
1.1599998474121094,
1.1699998378753662,
1.2899998426437378,
1.339999794960022,
1.679999828338623,
1.7899998426437378,
1.8199998140335083,
1.8499997854232788,
1.8799997568130493,
1.9099997282028198,
1.9399996995925903,
1.9899996519088745,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0799996852874756,
2.0899996757507324,
2.189999580383301,
2.2199995517730713,
2.5899994373321533,
2.729999542236328,
2.749999523162842,
2.8399994373321533
],
"type": "longrope"
},
"rope_theta": 10000.0,
"sliding_window": 262144,
"tie_word_embeddings": false,
"torch_dtype": "bfloat16",
"transformers_version": "4.50.0",
"use_cache": true,
"vocab_size": 32064
}
[2025-03-24 21:44:17,206][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-03-24/20-42-59/results/phi35-balanced/C3/checkpoint-352] due to args.save_total_limit
[2025-03-24 21:48:53,616][transformers.trainer][INFO] - The following columns in the evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: id_prompt, reference, grades, supporting_text, prompt, id, essay_text, essay_year. If id_prompt, reference, grades, supporting_text, prompt, id, essay_text, essay_year are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message.
[2025-03-24 21:48:53,619][transformers.trainer][INFO] -
***** Running Evaluation *****
[2025-03-24 21:48:53,619][transformers.trainer][INFO] - Num examples = 132
[2025-03-24 21:48:53,619][transformers.trainer][INFO] - Batch size = 16
[2025-03-24 21:49:08,293][transformers][INFO] - {'accuracy': 0.5075757575757576, 'RMSE': 47.60952285695233, 'QWK': 0.4913032726073696, 'HDIV': 0.0757575757575758, 'Macro_F1': 0.38534530771372877, 'Micro_F1': 0.5075757575757576, 'Weighted_F1': 0.5082073871547556, 'Macro_F1_(ignoring_nan)': np.float64(0.4624143692564745)}
[2025-03-24 21:49:08,294][tensorboardX.summary][INFO] - Summary name eval/Macro_F1_(ignoring_nan) is illegal; using eval/Macro_F1__ignoring_nan_ instead.
[2025-03-24 21:49:08,297][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-03-24/20-42-59/results/phi35-balanced/C3/checkpoint-416
[2025-03-24 21:49:08,795][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json
[2025-03-24 21:49:08,796][transformers.configuration_utils][INFO] - Model config Phi3Config {
"architectures": [
"Phi3ForCausalLM"
],
"attention_bias": false,
"attention_dropout": 0.0,
"auto_map": {
"AutoConfig": "microsoft/Phi-3.5-mini-instruct--configuration_phi3.Phi3Config",
"AutoModelForCausalLM": "microsoft/Phi-3.5-mini-instruct--modeling_phi3.Phi3ForCausalLM"
},
"bos_token_id": 1,
"embd_pdrop": 0.0,
"eos_token_id": 32000,
"hidden_act": "silu",
"hidden_size": 3072,
"initializer_range": 0.02,
"intermediate_size": 8192,
"max_position_embeddings": 131072,
"model_type": "phi3",
"num_attention_heads": 32,
"num_hidden_layers": 32,
"num_key_value_heads": 32,
"original_max_position_embeddings": 4096,
"pad_token_id": 32000,
"partial_rotary_factor": 1.0,
"resid_pdrop": 0.0,
"rms_norm_eps": 1e-05,
"rope_scaling": {
"long_factor": [
1.0800000429153442,
1.1100000143051147,
1.1399999856948853,
1.340000033378601,
1.5899999141693115,
1.600000023841858,
1.6200000047683716,
2.620000123977661,
3.2300000190734863,
3.2300000190734863,
4.789999961853027,
7.400000095367432,
7.700000286102295,
9.09000015258789,
12.199999809265137,
17.670000076293945,
24.46000099182129,
28.57000160217285,
30.420001983642578,
30.840002059936523,
32.590003967285156,
32.93000411987305,
42.320003509521484,
44.96000289916992,
50.340003967285156,
50.45000457763672,
57.55000305175781,
57.93000411987305,
58.21000289916992,
60.1400032043457,
62.61000442504883,
62.62000274658203,
62.71000289916992,
63.1400032043457,
63.1400032043457,
63.77000427246094,
63.93000411987305,
63.96000289916992,
63.970001220703125,
64.02999877929688,
64.06999969482422,
64.08000183105469,
64.12000274658203,
64.41000366210938,
64.4800033569336,
64.51000213623047,
64.52999877929688,
64.83999633789062
],
"short_factor": [
1.0,
1.0199999809265137,
1.0299999713897705,
1.0299999713897705,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0699999332427979,
1.0999999046325684,
1.1099998950958252,
1.1599998474121094,
1.1599998474121094,
1.1699998378753662,
1.2899998426437378,
1.339999794960022,
1.679999828338623,
1.7899998426437378,
1.8199998140335083,
1.8499997854232788,
1.8799997568130493,
1.9099997282028198,
1.9399996995925903,
1.9899996519088745,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0799996852874756,
2.0899996757507324,
2.189999580383301,
2.2199995517730713,
2.5899994373321533,
2.729999542236328,
2.749999523162842,
2.8399994373321533
],
"type": "longrope"
},
"rope_theta": 10000.0,
"sliding_window": 262144,
"tie_word_embeddings": false,
"torch_dtype": "bfloat16",
"transformers_version": "4.50.0",
"use_cache": true,
"vocab_size": 32064
}
[2025-03-24 21:49:17,214][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-03-24/20-42-59/results/phi35-balanced/C3/checkpoint-320] due to args.save_total_limit
[2025-03-24 21:49:17,232][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-03-24/20-42-59/results/phi35-balanced/C3/checkpoint-384] due to args.save_total_limit
[2025-03-24 21:53:53,501][transformers.trainer][INFO] - The following columns in the evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: id_prompt, reference, grades, supporting_text, prompt, id, essay_text, essay_year. If id_prompt, reference, grades, supporting_text, prompt, id, essay_text, essay_year are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message.
[2025-03-24 21:53:53,504][transformers.trainer][INFO] -
***** Running Evaluation *****
[2025-03-24 21:53:53,504][transformers.trainer][INFO] - Num examples = 132
[2025-03-24 21:53:53,504][transformers.trainer][INFO] - Batch size = 16
[2025-03-24 21:54:08,103][transformers][INFO] - {'accuracy': 0.4621212121212121, 'RMSE': 51.286479995248555, 'QWK': 0.33373650911797526, 'HDIV': 0.09848484848484851, 'Macro_F1': 0.2843933843933844, 'Micro_F1': 0.4621212121212121, 'Weighted_F1': 0.43266329629965994, 'Macro_F1_(ignoring_nan)': np.float64(0.3412720612720613)}
[2025-03-24 21:54:08,103][tensorboardX.summary][INFO] - Summary name eval/Macro_F1_(ignoring_nan) is illegal; using eval/Macro_F1__ignoring_nan_ instead.
[2025-03-24 21:54:08,106][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-03-24/20-42-59/results/phi35-balanced/C3/checkpoint-448
[2025-03-24 21:54:08,612][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json
[2025-03-24 21:54:08,613][transformers.configuration_utils][INFO] - Model config Phi3Config {
"architectures": [
"Phi3ForCausalLM"
],
"attention_bias": false,
"attention_dropout": 0.0,
"auto_map": {
"AutoConfig": "microsoft/Phi-3.5-mini-instruct--configuration_phi3.Phi3Config",
"AutoModelForCausalLM": "microsoft/Phi-3.5-mini-instruct--modeling_phi3.Phi3ForCausalLM"
},
"bos_token_id": 1,
"embd_pdrop": 0.0,
"eos_token_id": 32000,
"hidden_act": "silu",
"hidden_size": 3072,
"initializer_range": 0.02,
"intermediate_size": 8192,
"max_position_embeddings": 131072,
"model_type": "phi3",
"num_attention_heads": 32,
"num_hidden_layers": 32,
"num_key_value_heads": 32,
"original_max_position_embeddings": 4096,
"pad_token_id": 32000,
"partial_rotary_factor": 1.0,
"resid_pdrop": 0.0,
"rms_norm_eps": 1e-05,
"rope_scaling": {
"long_factor": [
1.0800000429153442,
1.1100000143051147,
1.1399999856948853,
1.340000033378601,
1.5899999141693115,
1.600000023841858,
1.6200000047683716,
2.620000123977661,
3.2300000190734863,
3.2300000190734863,
4.789999961853027,
7.400000095367432,
7.700000286102295,
9.09000015258789,
12.199999809265137,
17.670000076293945,
24.46000099182129,
28.57000160217285,
30.420001983642578,
30.840002059936523,
32.590003967285156,
32.93000411987305,
42.320003509521484,
44.96000289916992,
50.340003967285156,
50.45000457763672,
57.55000305175781,
57.93000411987305,
58.21000289916992,
60.1400032043457,
62.61000442504883,
62.62000274658203,
62.71000289916992,
63.1400032043457,
63.1400032043457,
63.77000427246094,
63.93000411987305,
63.96000289916992,
63.970001220703125,
64.02999877929688,
64.06999969482422,
64.08000183105469,
64.12000274658203,
64.41000366210938,
64.4800033569336,
64.51000213623047,
64.52999877929688,
64.83999633789062
],
"short_factor": [
1.0,
1.0199999809265137,
1.0299999713897705,
1.0299999713897705,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0699999332427979,
1.0999999046325684,
1.1099998950958252,
1.1599998474121094,
1.1599998474121094,
1.1699998378753662,
1.2899998426437378,
1.339999794960022,
1.679999828338623,
1.7899998426437378,
1.8199998140335083,
1.8499997854232788,
1.8799997568130493,
1.9099997282028198,
1.9399996995925903,
1.9899996519088745,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0799996852874756,
2.0899996757507324,
2.189999580383301,
2.2199995517730713,
2.5899994373321533,
2.729999542236328,
2.749999523162842,
2.8399994373321533
],
"type": "longrope"
},
"rope_theta": 10000.0,
"sliding_window": 262144,
"tie_word_embeddings": false,
"torch_dtype": "bfloat16",
"transformers_version": "4.50.0",
"use_cache": true,
"vocab_size": 32064
}
[2025-03-24 21:58:53,309][transformers.trainer][INFO] - The following columns in the evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: id_prompt, reference, grades, supporting_text, prompt, id, essay_text, essay_year. If id_prompt, reference, grades, supporting_text, prompt, id, essay_text, essay_year are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message.
[2025-03-24 21:58:53,311][transformers.trainer][INFO] -
***** Running Evaluation *****
[2025-03-24 21:58:53,311][transformers.trainer][INFO] - Num examples = 132
[2025-03-24 21:58:53,311][transformers.trainer][INFO] - Batch size = 16
[2025-03-24 21:59:07,872][transformers][INFO] - {'accuracy': 0.5303030303030303, 'RMSE': 48.617243480439775, 'QWK': 0.4149999999999999, 'HDIV': 0.06818181818181823, 'Macro_F1': 0.29559447503836783, 'Micro_F1': 0.5303030303030303, 'Weighted_F1': 0.49783307481240124, 'Macro_F1_(ignoring_nan)': np.float64(0.4433917125575517)}
[2025-03-24 21:59:07,873][tensorboardX.summary][INFO] - Summary name eval/Macro_F1_(ignoring_nan) is illegal; using eval/Macro_F1__ignoring_nan_ instead.
[2025-03-24 21:59:07,876][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-03-24/20-42-59/results/phi35-balanced/C3/checkpoint-480
[2025-03-24 21:59:08,399][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json
[2025-03-24 21:59:08,400][transformers.configuration_utils][INFO] - Model config Phi3Config {
"architectures": [
"Phi3ForCausalLM"
],
"attention_bias": false,
"attention_dropout": 0.0,
"auto_map": {
"AutoConfig": "microsoft/Phi-3.5-mini-instruct--configuration_phi3.Phi3Config",
"AutoModelForCausalLM": "microsoft/Phi-3.5-mini-instruct--modeling_phi3.Phi3ForCausalLM"
},
"bos_token_id": 1,
"embd_pdrop": 0.0,
"eos_token_id": 32000,
"hidden_act": "silu",
"hidden_size": 3072,
"initializer_range": 0.02,
"intermediate_size": 8192,
"max_position_embeddings": 131072,
"model_type": "phi3",
"num_attention_heads": 32,
"num_hidden_layers": 32,
"num_key_value_heads": 32,
"original_max_position_embeddings": 4096,
"pad_token_id": 32000,
"partial_rotary_factor": 1.0,
"resid_pdrop": 0.0,
"rms_norm_eps": 1e-05,
"rope_scaling": {
"long_factor": [
1.0800000429153442,
1.1100000143051147,
1.1399999856948853,
1.340000033378601,
1.5899999141693115,
1.600000023841858,
1.6200000047683716,
2.620000123977661,
3.2300000190734863,
3.2300000190734863,
4.789999961853027,
7.400000095367432,
7.700000286102295,
9.09000015258789,
12.199999809265137,
17.670000076293945,
24.46000099182129,
28.57000160217285,
30.420001983642578,
30.840002059936523,
32.590003967285156,
32.93000411987305,
42.320003509521484,
44.96000289916992,
50.340003967285156,
50.45000457763672,
57.55000305175781,
57.93000411987305,
58.21000289916992,
60.1400032043457,
62.61000442504883,
62.62000274658203,
62.71000289916992,
63.1400032043457,
63.1400032043457,
63.77000427246094,
63.93000411987305,
63.96000289916992,
63.970001220703125,
64.02999877929688,
64.06999969482422,
64.08000183105469,
64.12000274658203,
64.41000366210938,
64.4800033569336,
64.51000213623047,
64.52999877929688,
64.83999633789062
],
"short_factor": [
1.0,
1.0199999809265137,
1.0299999713897705,
1.0299999713897705,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0699999332427979,
1.0999999046325684,
1.1099998950958252,
1.1599998474121094,
1.1599998474121094,
1.1699998378753662,
1.2899998426437378,
1.339999794960022,
1.679999828338623,
1.7899998426437378,
1.8199998140335083,
1.8499997854232788,
1.8799997568130493,
1.9099997282028198,
1.9399996995925903,
1.9899996519088745,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0799996852874756,
2.0899996757507324,
2.189999580383301,
2.2199995517730713,
2.5899994373321533,
2.729999542236328,
2.749999523162842,
2.8399994373321533
],
"type": "longrope"
},
"rope_theta": 10000.0,
"sliding_window": 262144,
"tie_word_embeddings": false,
"torch_dtype": "bfloat16",
"transformers_version": "4.50.0",
"use_cache": true,
"vocab_size": 32064
}
[2025-03-24 21:59:16,525][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-03-24/20-42-59/results/phi35-balanced/C3/checkpoint-448] due to args.save_total_limit
[2025-03-24 22:03:52,815][transformers.trainer][INFO] - The following columns in the evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: id_prompt, reference, grades, supporting_text, prompt, id, essay_text, essay_year. If id_prompt, reference, grades, supporting_text, prompt, id, essay_text, essay_year are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message.
[2025-03-24 22:03:52,817][transformers.trainer][INFO] -
***** Running Evaluation *****
[2025-03-24 22:03:52,817][transformers.trainer][INFO] - Num examples = 132
[2025-03-24 22:03:52,817][transformers.trainer][INFO] - Batch size = 16
[2025-03-24 22:04:07,290][transformers][INFO] - {'accuracy': 0.49242424242424243, 'RMSE': 50.33222956847167, 'QWK': 0.34668940039784035, 'HDIV': 0.09090909090909094, 'Macro_F1': 0.34605040021706684, 'Micro_F1': 0.49242424242424243, 'Weighted_F1': 0.4617462784129451, 'Macro_F1_(ignoring_nan)': np.float64(0.41526048026048024)}
[2025-03-24 22:04:07,290][tensorboardX.summary][INFO] - Summary name eval/Macro_F1_(ignoring_nan) is illegal; using eval/Macro_F1__ignoring_nan_ instead.
[2025-03-24 22:04:07,293][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-03-24/20-42-59/results/phi35-balanced/C3/checkpoint-512
[2025-03-24 22:04:07,897][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json
[2025-03-24 22:04:07,898][transformers.configuration_utils][INFO] - Model config Phi3Config {
"architectures": [
"Phi3ForCausalLM"
],
"attention_bias": false,
"attention_dropout": 0.0,
"auto_map": {
"AutoConfig": "microsoft/Phi-3.5-mini-instruct--configuration_phi3.Phi3Config",
"AutoModelForCausalLM": "microsoft/Phi-3.5-mini-instruct--modeling_phi3.Phi3ForCausalLM"
},
"bos_token_id": 1,
"embd_pdrop": 0.0,
"eos_token_id": 32000,
"hidden_act": "silu",
"hidden_size": 3072,
"initializer_range": 0.02,
"intermediate_size": 8192,
"max_position_embeddings": 131072,
"model_type": "phi3",
"num_attention_heads": 32,
"num_hidden_layers": 32,
"num_key_value_heads": 32,
"original_max_position_embeddings": 4096,
"pad_token_id": 32000,
"partial_rotary_factor": 1.0,
"resid_pdrop": 0.0,
"rms_norm_eps": 1e-05,
"rope_scaling": {
"long_factor": [
1.0800000429153442,
1.1100000143051147,
1.1399999856948853,
1.340000033378601,
1.5899999141693115,
1.600000023841858,
1.6200000047683716,
2.620000123977661,
3.2300000190734863,
3.2300000190734863,
4.789999961853027,
7.400000095367432,
7.700000286102295,
9.09000015258789,
12.199999809265137,
17.670000076293945,
24.46000099182129,
28.57000160217285,
30.420001983642578,
30.840002059936523,
32.590003967285156,
32.93000411987305,
42.320003509521484,
44.96000289916992,
50.340003967285156,
50.45000457763672,
57.55000305175781,
57.93000411987305,
58.21000289916992,
60.1400032043457,
62.61000442504883,
62.62000274658203,
62.71000289916992,
63.1400032043457,
63.1400032043457,
63.77000427246094,
63.93000411987305,
63.96000289916992,
63.970001220703125,
64.02999877929688,
64.06999969482422,
64.08000183105469,
64.12000274658203,
64.41000366210938,
64.4800033569336,
64.51000213623047,
64.52999877929688,
64.83999633789062
],
"short_factor": [
1.0,
1.0199999809265137,
1.0299999713897705,
1.0299999713897705,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0699999332427979,
1.0999999046325684,
1.1099998950958252,
1.1599998474121094,
1.1599998474121094,
1.1699998378753662,
1.2899998426437378,
1.339999794960022,
1.679999828338623,
1.7899998426437378,
1.8199998140335083,
1.8499997854232788,
1.8799997568130493,
1.9099997282028198,
1.9399996995925903,
1.9899996519088745,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0799996852874756,
2.0899996757507324,
2.189999580383301,
2.2199995517730713,
2.5899994373321533,
2.729999542236328,
2.749999523162842,
2.8399994373321533
],
"type": "longrope"
},
"rope_theta": 10000.0,
"sliding_window": 262144,
"tie_word_embeddings": false,
"torch_dtype": "bfloat16",
"transformers_version": "4.50.0",
"use_cache": true,
"vocab_size": 32064
}
[2025-03-24 22:04:16,111][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-03-24/20-42-59/results/phi35-balanced/C3/checkpoint-480] due to args.save_total_limit
[2025-03-24 22:08:52,265][transformers.trainer][INFO] - The following columns in the evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: id_prompt, reference, grades, supporting_text, prompt, id, essay_text, essay_year. If id_prompt, reference, grades, supporting_text, prompt, id, essay_text, essay_year are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message.
[2025-03-24 22:08:52,267][transformers.trainer][INFO] -
***** Running Evaluation *****
[2025-03-24 22:08:52,267][transformers.trainer][INFO] - Num examples = 132
[2025-03-24 22:08:52,267][transformers.trainer][INFO] - Batch size = 16
[2025-03-24 22:09:06,893][transformers][INFO] - {'accuracy': 0.44696969696969696, 'RMSE': 50.33222956847167, 'QWK': 0.435135135135135, 'HDIV': 0.0757575757575758, 'Macro_F1': 0.33657152409447394, 'Micro_F1': 0.44696969696969696, 'Weighted_F1': 0.4453170011003548, 'Macro_F1_(ignoring_nan)': np.float64(0.4038858289133687)}
[2025-03-24 22:09:06,894][tensorboardX.summary][INFO] - Summary name eval/Macro_F1_(ignoring_nan) is illegal; using eval/Macro_F1__ignoring_nan_ instead.
[2025-03-24 22:09:06,896][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-03-24/20-42-59/results/phi35-balanced/C3/checkpoint-544
[2025-03-24 22:09:07,386][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json
[2025-03-24 22:09:07,387][transformers.configuration_utils][INFO] - Model config Phi3Config {
"architectures": [
"Phi3ForCausalLM"
],
"attention_bias": false,
"attention_dropout": 0.0,
"auto_map": {
"AutoConfig": "microsoft/Phi-3.5-mini-instruct--configuration_phi3.Phi3Config",
"AutoModelForCausalLM": "microsoft/Phi-3.5-mini-instruct--modeling_phi3.Phi3ForCausalLM"
},
"bos_token_id": 1,
"embd_pdrop": 0.0,
"eos_token_id": 32000,
"hidden_act": "silu",
"hidden_size": 3072,
"initializer_range": 0.02,
"intermediate_size": 8192,
"max_position_embeddings": 131072,
"model_type": "phi3",
"num_attention_heads": 32,
"num_hidden_layers": 32,
"num_key_value_heads": 32,
"original_max_position_embeddings": 4096,
"pad_token_id": 32000,
"partial_rotary_factor": 1.0,
"resid_pdrop": 0.0,
"rms_norm_eps": 1e-05,
"rope_scaling": {
"long_factor": [
1.0800000429153442,
1.1100000143051147,
1.1399999856948853,
1.340000033378601,
1.5899999141693115,
1.600000023841858,
1.6200000047683716,
2.620000123977661,
3.2300000190734863,
3.2300000190734863,
4.789999961853027,
7.400000095367432,
7.700000286102295,
9.09000015258789,
12.199999809265137,
17.670000076293945,
24.46000099182129,
28.57000160217285,
30.420001983642578,
30.840002059936523,
32.590003967285156,
32.93000411987305,
42.320003509521484,
44.96000289916992,
50.340003967285156,
50.45000457763672,
57.55000305175781,
57.93000411987305,
58.21000289916992,
60.1400032043457,
62.61000442504883,
62.62000274658203,
62.71000289916992,
63.1400032043457,
63.1400032043457,
63.77000427246094,
63.93000411987305,
63.96000289916992,
63.970001220703125,
64.02999877929688,
64.06999969482422,
64.08000183105469,
64.12000274658203,
64.41000366210938,
64.4800033569336,
64.51000213623047,
64.52999877929688,
64.83999633789062
],
"short_factor": [
1.0,
1.0199999809265137,
1.0299999713897705,
1.0299999713897705,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0699999332427979,
1.0999999046325684,
1.1099998950958252,
1.1599998474121094,
1.1599998474121094,
1.1699998378753662,
1.2899998426437378,
1.339999794960022,
1.679999828338623,
1.7899998426437378,
1.8199998140335083,
1.8499997854232788,
1.8799997568130493,
1.9099997282028198,
1.9399996995925903,
1.9899996519088745,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0799996852874756,
2.0899996757507324,
2.189999580383301,
2.2199995517730713,
2.5899994373321533,
2.729999542236328,
2.749999523162842,
2.8399994373321533
],
"type": "longrope"
},
"rope_theta": 10000.0,
"sliding_window": 262144,
"tie_word_embeddings": false,
"torch_dtype": "bfloat16",
"transformers_version": "4.50.0",
"use_cache": true,
"vocab_size": 32064
}
[2025-03-24 22:09:15,703][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-03-24/20-42-59/results/phi35-balanced/C3/checkpoint-512] due to args.save_total_limit
[2025-03-24 22:13:52,069][transformers.trainer][INFO] - The following columns in the evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: id_prompt, reference, grades, supporting_text, prompt, id, essay_text, essay_year. If id_prompt, reference, grades, supporting_text, prompt, id, essay_text, essay_year are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message.
[2025-03-24 22:13:52,072][transformers.trainer][INFO] -
***** Running Evaluation *****
[2025-03-24 22:13:52,072][transformers.trainer][INFO] - Num examples = 132
[2025-03-24 22:13:52,072][transformers.trainer][INFO] - Batch size = 16
[2025-03-24 22:14:06,774][transformers][INFO] - {'accuracy': 0.49242424242424243, 'RMSE': 50.33222956847167, 'QWK': 0.3815459110473458, 'HDIV': 0.09090909090909094, 'Macro_F1': 0.36160714285714285, 'Micro_F1': 0.49242424242424243, 'Weighted_F1': 0.47104978354978355, 'Macro_F1_(ignoring_nan)': np.float64(0.43392857142857144)}
[2025-03-24 22:14:06,775][tensorboardX.summary][INFO] - Summary name eval/Macro_F1_(ignoring_nan) is illegal; using eval/Macro_F1__ignoring_nan_ instead.
[2025-03-24 22:14:06,778][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-03-24/20-42-59/results/phi35-balanced/C3/checkpoint-576
[2025-03-24 22:14:07,646][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json
[2025-03-24 22:14:07,647][transformers.configuration_utils][INFO] - Model config Phi3Config {
"architectures": [
"Phi3ForCausalLM"
],
"attention_bias": false,
"attention_dropout": 0.0,
"auto_map": {
"AutoConfig": "microsoft/Phi-3.5-mini-instruct--configuration_phi3.Phi3Config",
"AutoModelForCausalLM": "microsoft/Phi-3.5-mini-instruct--modeling_phi3.Phi3ForCausalLM"
},
"bos_token_id": 1,
"embd_pdrop": 0.0,
"eos_token_id": 32000,
"hidden_act": "silu",
"hidden_size": 3072,
"initializer_range": 0.02,
"intermediate_size": 8192,
"max_position_embeddings": 131072,
"model_type": "phi3",
"num_attention_heads": 32,
"num_hidden_layers": 32,
"num_key_value_heads": 32,
"original_max_position_embeddings": 4096,
"pad_token_id": 32000,
"partial_rotary_factor": 1.0,
"resid_pdrop": 0.0,
"rms_norm_eps": 1e-05,
"rope_scaling": {
"long_factor": [
1.0800000429153442,
1.1100000143051147,
1.1399999856948853,
1.340000033378601,
1.5899999141693115,
1.600000023841858,
1.6200000047683716,
2.620000123977661,
3.2300000190734863,
3.2300000190734863,
4.789999961853027,
7.400000095367432,
7.700000286102295,
9.09000015258789,
12.199999809265137,
17.670000076293945,
24.46000099182129,
28.57000160217285,
30.420001983642578,
30.840002059936523,
32.590003967285156,
32.93000411987305,
42.320003509521484,
44.96000289916992,
50.340003967285156,
50.45000457763672,
57.55000305175781,
57.93000411987305,
58.21000289916992,
60.1400032043457,
62.61000442504883,
62.62000274658203,
62.71000289916992,
63.1400032043457,
63.1400032043457,
63.77000427246094,
63.93000411987305,
63.96000289916992,
63.970001220703125,
64.02999877929688,
64.06999969482422,
64.08000183105469,
64.12000274658203,
64.41000366210938,
64.4800033569336,
64.51000213623047,
64.52999877929688,
64.83999633789062
],
"short_factor": [
1.0,
1.0199999809265137,
1.0299999713897705,
1.0299999713897705,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0699999332427979,
1.0999999046325684,
1.1099998950958252,
1.1599998474121094,
1.1599998474121094,
1.1699998378753662,
1.2899998426437378,
1.339999794960022,
1.679999828338623,
1.7899998426437378,
1.8199998140335083,
1.8499997854232788,
1.8799997568130493,
1.9099997282028198,
1.9399996995925903,
1.9899996519088745,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0799996852874756,
2.0899996757507324,
2.189999580383301,
2.2199995517730713,
2.5899994373321533,
2.729999542236328,
2.749999523162842,
2.8399994373321533
],
"type": "longrope"
},
"rope_theta": 10000.0,
"sliding_window": 262144,
"tie_word_embeddings": false,
"torch_dtype": "bfloat16",
"transformers_version": "4.50.0",
"use_cache": true,
"vocab_size": 32064
}
[2025-03-24 22:14:15,989][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-03-24/20-42-59/results/phi35-balanced/C3/checkpoint-544] due to args.save_total_limit
[2025-03-24 22:14:16,005][transformers.trainer][INFO] -
Training completed. Do not forget to share your model on huggingface.co/models =)
[2025-03-24 22:14:16,005][transformers.trainer][INFO] - Loading best model from /workspace/jbcs2025/outputs/2025-03-24/20-42-59/results/phi35-balanced/C3/checkpoint-416 (score: 0.4913032726073696).
[2025-03-24 22:14:37,691][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-03-24/20-42-59/results/phi35-balanced/C3/checkpoint-576] due to args.save_total_limit
[2025-03-24 22:14:37,709][transformers.trainer][INFO] - The following columns in the evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: id_prompt, reference, grades, supporting_text, prompt, id, essay_text, essay_year. If id_prompt, reference, grades, supporting_text, prompt, id, essay_text, essay_year are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message.
[2025-03-24 22:14:37,712][transformers.trainer][INFO] -
***** Running Evaluation *****
[2025-03-24 22:14:37,712][transformers.trainer][INFO] - Num examples = 132
[2025-03-24 22:14:37,712][transformers.trainer][INFO] - Batch size = 16
[2025-03-24 22:14:52,669][transformers][INFO] - {'accuracy': 0.5075757575757576, 'RMSE': 47.60952285695233, 'QWK': 0.4913032726073696, 'HDIV': 0.0757575757575758, 'Macro_F1': 0.38534530771372877, 'Micro_F1': 0.5075757575757576, 'Weighted_F1': 0.5082073871547556, 'Macro_F1_(ignoring_nan)': np.float64(0.4624143692564745)}
[2025-03-24 22:14:52,672][tensorboardX.summary][INFO] - Summary name eval/Macro_F1_(ignoring_nan) is illegal; using eval/Macro_F1__ignoring_nan_ instead.
[2025-03-24 22:14:52,673][__main__][INFO] - Training completed successfully.
[2025-03-24 22:14:52,673][__main__][INFO] - Running on Test
[2025-03-24 22:14:52,674][transformers.trainer][INFO] - The following columns in the evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: id_prompt, reference, grades, supporting_text, prompt, id, essay_text, essay_year. If id_prompt, reference, grades, supporting_text, prompt, id, essay_text, essay_year are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message.
[2025-03-24 22:14:52,676][transformers.trainer][INFO] -
***** Running Evaluation *****
[2025-03-24 22:14:52,676][transformers.trainer][INFO] - Num examples = 138
[2025-03-24 22:14:52,676][transformers.trainer][INFO] - Batch size = 16
[2025-03-24 22:15:08,409][transformers][INFO] - {'accuracy': 0.3333333333333333, 'RMSE': 60.43321862224019, 'QWK': 0.23535620052770445, 'HDIV': 0.1159420289855072, 'Macro_F1': 0.26255872656551776, 'Micro_F1': 0.3333333333333333, 'Weighted_F1': 0.3336611749101599, 'Macro_F1_(ignoring_nan)': np.float64(0.3150704718786213)}
[2025-03-24 22:15:08,410][tensorboardX.summary][INFO] - Summary name eval/Macro_F1_(ignoring_nan) is illegal; using eval/Macro_F1__ignoring_nan_ instead.
[2025-03-24 22:15:08,412][transformers.trainer][INFO] - Saving model checkpoint to ./results/phi35-balanced/C3/best_model
[2025-03-24 22:15:09,301][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json
[2025-03-24 22:15:09,302][transformers.configuration_utils][INFO] - Model config Phi3Config {
"architectures": [
"Phi3ForCausalLM"
],
"attention_bias": false,
"attention_dropout": 0.0,
"auto_map": {
"AutoConfig": "microsoft/Phi-3.5-mini-instruct--configuration_phi3.Phi3Config",
"AutoModelForCausalLM": "microsoft/Phi-3.5-mini-instruct--modeling_phi3.Phi3ForCausalLM"
},
"bos_token_id": 1,
"embd_pdrop": 0.0,
"eos_token_id": 32000,
"hidden_act": "silu",
"hidden_size": 3072,
"initializer_range": 0.02,
"intermediate_size": 8192,
"max_position_embeddings": 131072,
"model_type": "phi3",
"num_attention_heads": 32,
"num_hidden_layers": 32,
"num_key_value_heads": 32,
"original_max_position_embeddings": 4096,
"pad_token_id": 32000,
"partial_rotary_factor": 1.0,
"resid_pdrop": 0.0,
"rms_norm_eps": 1e-05,
"rope_scaling": {
"long_factor": [
1.0800000429153442,
1.1100000143051147,
1.1399999856948853,
1.340000033378601,
1.5899999141693115,
1.600000023841858,
1.6200000047683716,
2.620000123977661,
3.2300000190734863,
3.2300000190734863,
4.789999961853027,
7.400000095367432,
7.700000286102295,
9.09000015258789,
12.199999809265137,
17.670000076293945,
24.46000099182129,
28.57000160217285,
30.420001983642578,
30.840002059936523,
32.590003967285156,
32.93000411987305,
42.320003509521484,
44.96000289916992,
50.340003967285156,
50.45000457763672,
57.55000305175781,
57.93000411987305,
58.21000289916992,
60.1400032043457,
62.61000442504883,
62.62000274658203,
62.71000289916992,
63.1400032043457,
63.1400032043457,
63.77000427246094,
63.93000411987305,
63.96000289916992,
63.970001220703125,
64.02999877929688,
64.06999969482422,
64.08000183105469,
64.12000274658203,
64.41000366210938,
64.4800033569336,
64.51000213623047,
64.52999877929688,
64.83999633789062
],
"short_factor": [
1.0,
1.0199999809265137,
1.0299999713897705,
1.0299999713897705,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0499999523162842,
1.0699999332427979,
1.0999999046325684,
1.1099998950958252,
1.1599998474121094,
1.1599998474121094,
1.1699998378753662,
1.2899998426437378,
1.339999794960022,
1.679999828338623,
1.7899998426437378,
1.8199998140335083,
1.8499997854232788,
1.8799997568130493,
1.9099997282028198,
1.9399996995925903,
1.9899996519088745,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0199997425079346,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0299997329711914,
2.0799996852874756,
2.0899996757507324,
2.189999580383301,
2.2199995517730713,
2.5899994373321533,
2.729999542236328,
2.749999523162842,
2.8399994373321533
],
"type": "longrope"
},
"rope_theta": 10000.0,
"sliding_window": 262144,
"tie_word_embeddings": false,
"torch_dtype": "bfloat16",
"transformers_version": "4.50.0",
"use_cache": true,
"vocab_size": 32064
}
[2025-03-24 22:15:17,688][__main__][INFO] - Fine Tuning Finished.