[2025-03-25 12:24:03,262][__main__][INFO] - cache_dir: /media/data/tmp dataset: name: kamel-usp/aes_enem_dataset split: JBCS2025 training_params: seed: 42 num_train_epochs: 20 logging_steps: 100 metric_for_best_model: QWK bf16: true post_training_results: model_path: /workspace/jbcs2025/outputs/2025-03-25/11-12-15 experiments: model: name: microsoft/Phi-3.5-mini-instruct type: phi35_classification_lora num_labels: 6 output_dir: ./results/phi35-balanced/C5 logging_dir: ./logs/phi35-balanced/C5 best_model_dir: ./results/phi35-balanced/C5/best_model lora_r: 8 lora_dropout: 0.05 lora_alpha: 16 lora_target_modules: all-linear dataset: grade_index: 4 training_id: phi35-balanced-C5 training_params: weight_decay: 0.01 warmup_ratio: 0.1 learning_rate: 5.0e-05 train_batch_size: 2 eval_batch_size: 16 gradient_accumulation_steps: 8 gradient_checkpointing: false [2025-03-25 12:24:03,264][__main__][INFO] - Starting the Fine Tuning training process. [2025-03-25 12:24:08,620][transformers.tokenization_utils_base][INFO] - loading file tokenizer.model from cache at /media/data/tmp/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/tokenizer.model [2025-03-25 12:24:08,620][transformers.tokenization_utils_base][INFO] - loading file tokenizer.json from cache at /media/data/tmp/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/tokenizer.json [2025-03-25 12:24:08,620][transformers.tokenization_utils_base][INFO] - loading file added_tokens.json from cache at /media/data/tmp/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/added_tokens.json [2025-03-25 12:24:08,620][transformers.tokenization_utils_base][INFO] - loading file special_tokens_map.json from cache at /media/data/tmp/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/special_tokens_map.json [2025-03-25 12:24:08,620][transformers.tokenization_utils_base][INFO] - loading file tokenizer_config.json from cache at /media/data/tmp/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/tokenizer_config.json [2025-03-25 12:24:08,620][transformers.tokenization_utils_base][INFO] - loading file chat_template.jinja from cache at None [2025-03-25 12:24:08,693][transformers.tokenization_utils_base][INFO] - Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained. [2025-03-25 12:24:08,699][__main__][INFO] - Tokenizer function parameters- Padding:longest; Truncation: False [2025-03-25 12:24:09,480][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /media/data/tmp/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json [2025-03-25 12:24:09,481][transformers.configuration_utils][INFO] - Model config Phi3Config { "architectures": [ "Phi3ForCausalLM" ], "attention_bias": false, "attention_dropout": 0.0, "auto_map": { "AutoConfig": "microsoft/Phi-3.5-mini-instruct--configuration_phi3.Phi3Config", "AutoModelForCausalLM": "microsoft/Phi-3.5-mini-instruct--modeling_phi3.Phi3ForCausalLM" }, "bos_token_id": 1, "embd_pdrop": 0.0, "eos_token_id": 32000, "hidden_act": "silu", "hidden_size": 3072, "id2label": { "0": 0, "1": 40, "2": 80, "3": 120, "4": 160, "5": 200 }, "initializer_range": 0.02, "intermediate_size": 8192, "label2id": { "0": 0, "40": 1, "80": 2, "120": 3, "160": 4, "200": 5 }, "max_position_embeddings": 131072, "model_type": "phi3", "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "original_max_position_embeddings": 4096, "pad_token_id": 32000, "partial_rotary_factor": 1.0, "resid_pdrop": 0.0, "rms_norm_eps": 1e-05, "rope_scaling": { "long_factor": [ 1.0800000429153442, 1.1100000143051147, 1.1399999856948853, 1.340000033378601, 1.5899999141693115, 1.600000023841858, 1.6200000047683716, 2.620000123977661, 3.2300000190734863, 3.2300000190734863, 4.789999961853027, 7.400000095367432, 7.700000286102295, 9.09000015258789, 12.199999809265137, 17.670000076293945, 24.46000099182129, 28.57000160217285, 30.420001983642578, 30.840002059936523, 32.590003967285156, 32.93000411987305, 42.320003509521484, 44.96000289916992, 50.340003967285156, 50.45000457763672, 57.55000305175781, 57.93000411987305, 58.21000289916992, 60.1400032043457, 62.61000442504883, 62.62000274658203, 62.71000289916992, 63.1400032043457, 63.1400032043457, 63.77000427246094, 63.93000411987305, 63.96000289916992, 63.970001220703125, 64.02999877929688, 64.06999969482422, 64.08000183105469, 64.12000274658203, 64.41000366210938, 64.4800033569336, 64.51000213623047, 64.52999877929688, 64.83999633789062 ], "short_factor": [ 1.0, 1.0199999809265137, 1.0299999713897705, 1.0299999713897705, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0699999332427979, 1.0999999046325684, 1.1099998950958252, 1.1599998474121094, 1.1599998474121094, 1.1699998378753662, 1.2899998426437378, 1.339999794960022, 1.679999828338623, 1.7899998426437378, 1.8199998140335083, 1.8499997854232788, 1.8799997568130493, 1.9099997282028198, 1.9399996995925903, 1.9899996519088745, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0799996852874756, 2.0899996757507324, 2.189999580383301, 2.2199995517730713, 2.5899994373321533, 2.729999542236328, 2.749999523162842, 2.8399994373321533 ], "type": "longrope" }, "rope_theta": 10000.0, "sliding_window": 262144, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "transformers_version": "4.50.0", "use_cache": true, "vocab_size": 32064 } [2025-03-25 12:24:09,481][transformers.modeling_utils][INFO] - loading weights file model.safetensors from cache at /media/data/tmp/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/model.safetensors.index.json [2025-03-25 12:24:09,482][transformers.modeling_utils][INFO] - Will use torch_dtype=torch.bfloat16 as defined in model's config object [2025-03-25 12:24:09,482][transformers.modeling_utils][INFO] - Instantiating Phi3ForSequenceClassification model under default dtype torch.bfloat16. [2025-03-25 12:24:31,520][transformers.modeling_utils][INFO] - Some weights of the model checkpoint at microsoft/Phi-3.5-mini-instruct were not used when initializing Phi3ForSequenceClassification: ['lm_head.weight'] - This IS expected if you are initializing Phi3ForSequenceClassification from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model). - This IS NOT expected if you are initializing Phi3ForSequenceClassification from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model). [2025-03-25 12:24:31,520][transformers.modeling_utils][WARNING] - Some weights of Phi3ForSequenceClassification were not initialized from the model checkpoint at microsoft/Phi-3.5-mini-instruct and are newly initialized: ['score.weight'] You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference. [2025-03-25 12:24:55,874][__main__][INFO] - None [2025-03-25 12:24:55,876][transformers.training_args][INFO] - PyTorch: setting up devices [2025-03-25 12:24:55,899][__main__][INFO] - Total steps: 620. Number of warmup steps: 62 [2025-03-25 12:24:55,906][transformers.trainer][INFO] - You have loaded a model on multiple GPUs. `is_model_parallel` attribute will be force-set to `True` to avoid any unexpected behavior such as device placement mismatching. [2025-03-25 12:24:55,929][transformers.trainer][INFO] - Using auto half precision backend [2025-03-25 12:24:55,930][transformers.trainer][WARNING] - No label_names provided for model class `PeftModelForSequenceClassification`. Since `PeftModel` hides base models input arguments, if label_names is not given, label_names can't be set automatically within `Trainer`. Note that empty label_names list will be used instead. [2025-03-25 12:24:55,938][transformers.trainer][INFO] - The following columns in the evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: reference, supporting_text, essay_text, prompt, essay_year, id, grades, id_prompt. If reference, supporting_text, essay_text, prompt, essay_year, id, grades, id_prompt are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. [2025-03-25 12:24:55,949][transformers.trainer][INFO] - ***** Running Evaluation ***** [2025-03-25 12:24:55,949][transformers.trainer][INFO] - Num examples = 132 [2025-03-25 12:24:55,949][transformers.trainer][INFO] - Batch size = 16 [2025-03-25 12:25:10,916][transformers][INFO] - {'accuracy': 0.2196969696969697, 'RMSE': 79.77240352174657, 'QWK': -0.17865160895299015, 'HDIV': 0.2727272727272727, 'Macro_F1': 0.0818840579710145, 'Micro_F1': 0.2196969696969697, 'Weighted_F1': 0.12490118577075099, 'Macro_F1_(ignoring_nan)': np.float64(0.163768115942029)} [2025-03-25 12:25:10,920][tensorboardX.summary][INFO] - Summary name eval/Macro_F1_(ignoring_nan) is illegal; using eval/Macro_F1__ignoring_nan_ instead. [2025-03-25 12:25:11,161][transformers.trainer][INFO] - The following columns in the training set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: reference, supporting_text, essay_text, prompt, essay_year, id, grades, id_prompt. If reference, supporting_text, essay_text, prompt, essay_year, id, grades, id_prompt are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. [2025-03-25 12:25:11,188][transformers.trainer][INFO] - ***** Running training ***** [2025-03-25 12:25:11,188][transformers.trainer][INFO] - Num examples = 500 [2025-03-25 12:25:11,188][transformers.trainer][INFO] - Num Epochs = 20 [2025-03-25 12:25:11,188][transformers.trainer][INFO] - Instantaneous batch size per device = 2 [2025-03-25 12:25:11,188][transformers.trainer][INFO] - Total train batch size (w. parallel, distributed & accumulation) = 16 [2025-03-25 12:25:11,188][transformers.trainer][INFO] - Gradient Accumulation steps = 8 [2025-03-25 12:25:11,188][transformers.trainer][INFO] - Total optimization steps = 620 [2025-03-25 12:25:11,190][transformers.trainer][INFO] - Number of trainable parameters = 12,601,344 [2025-03-25 12:30:02,780][transformers.trainer][INFO] - The following columns in the evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: reference, supporting_text, essay_text, prompt, essay_year, id, grades, id_prompt. If reference, supporting_text, essay_text, prompt, essay_year, id, grades, id_prompt are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. [2025-03-25 12:30:02,782][transformers.trainer][INFO] - ***** Running Evaluation ***** [2025-03-25 12:30:02,782][transformers.trainer][INFO] - Num examples = 132 [2025-03-25 12:30:02,782][transformers.trainer][INFO] - Batch size = 16 [2025-03-25 12:30:17,339][transformers][INFO] - {'accuracy': 0.17424242424242425, 'RMSE': 80.075721739621, 'QWK': 0.03621708165406057, 'HDIV': 0.25757575757575757, 'Macro_F1': 0.10146198830409357, 'Micro_F1': 0.17424242424242425, 'Weighted_F1': 0.09156477051213893, 'Macro_F1_(ignoring_nan)': np.float64(0.20292397660818715)} [2025-03-25 12:30:17,339][tensorboardX.summary][INFO] - Summary name eval/Macro_F1_(ignoring_nan) is illegal; using eval/Macro_F1__ignoring_nan_ instead. [2025-03-25 12:30:17,341][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-03-25/12-24-03/results/phi35-balanced/C5/checkpoint-32 [2025-03-25 12:30:17,829][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json [2025-03-25 12:30:17,830][transformers.configuration_utils][INFO] - Model config Phi3Config { "architectures": [ "Phi3ForCausalLM" ], "attention_bias": false, "attention_dropout": 0.0, "auto_map": { "AutoConfig": "microsoft/Phi-3.5-mini-instruct--configuration_phi3.Phi3Config", "AutoModelForCausalLM": "microsoft/Phi-3.5-mini-instruct--modeling_phi3.Phi3ForCausalLM" }, "bos_token_id": 1, "embd_pdrop": 0.0, "eos_token_id": 32000, "hidden_act": "silu", "hidden_size": 3072, "initializer_range": 0.02, "intermediate_size": 8192, "max_position_embeddings": 131072, "model_type": "phi3", "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "original_max_position_embeddings": 4096, "pad_token_id": 32000, "partial_rotary_factor": 1.0, "resid_pdrop": 0.0, "rms_norm_eps": 1e-05, "rope_scaling": { "long_factor": [ 1.0800000429153442, 1.1100000143051147, 1.1399999856948853, 1.340000033378601, 1.5899999141693115, 1.600000023841858, 1.6200000047683716, 2.620000123977661, 3.2300000190734863, 3.2300000190734863, 4.789999961853027, 7.400000095367432, 7.700000286102295, 9.09000015258789, 12.199999809265137, 17.670000076293945, 24.46000099182129, 28.57000160217285, 30.420001983642578, 30.840002059936523, 32.590003967285156, 32.93000411987305, 42.320003509521484, 44.96000289916992, 50.340003967285156, 50.45000457763672, 57.55000305175781, 57.93000411987305, 58.21000289916992, 60.1400032043457, 62.61000442504883, 62.62000274658203, 62.71000289916992, 63.1400032043457, 63.1400032043457, 63.77000427246094, 63.93000411987305, 63.96000289916992, 63.970001220703125, 64.02999877929688, 64.06999969482422, 64.08000183105469, 64.12000274658203, 64.41000366210938, 64.4800033569336, 64.51000213623047, 64.52999877929688, 64.83999633789062 ], "short_factor": [ 1.0, 1.0199999809265137, 1.0299999713897705, 1.0299999713897705, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0699999332427979, 1.0999999046325684, 1.1099998950958252, 1.1599998474121094, 1.1599998474121094, 1.1699998378753662, 1.2899998426437378, 1.339999794960022, 1.679999828338623, 1.7899998426437378, 1.8199998140335083, 1.8499997854232788, 1.8799997568130493, 1.9099997282028198, 1.9399996995925903, 1.9899996519088745, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0799996852874756, 2.0899996757507324, 2.189999580383301, 2.2199995517730713, 2.5899994373321533, 2.729999542236328, 2.749999523162842, 2.8399994373321533 ], "type": "longrope" }, "rope_theta": 10000.0, "sliding_window": 262144, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "transformers_version": "4.50.0", "use_cache": true, "vocab_size": 32064 } [2025-03-25 12:35:15,786][transformers.trainer][INFO] - The following columns in the evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: reference, supporting_text, essay_text, prompt, essay_year, id, grades, id_prompt. If reference, supporting_text, essay_text, prompt, essay_year, id, grades, id_prompt are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. [2025-03-25 12:35:15,789][transformers.trainer][INFO] - ***** Running Evaluation ***** [2025-03-25 12:35:15,789][transformers.trainer][INFO] - Num examples = 132 [2025-03-25 12:35:15,789][transformers.trainer][INFO] - Batch size = 16 [2025-03-25 12:35:30,015][transformers][INFO] - {'accuracy': 0.24242424242424243, 'RMSE': 70.06490497453707, 'QWK': 0.24075441686076227, 'HDIV': 0.16666666666666663, 'Macro_F1': 0.16837419518229546, 'Micro_F1': 0.24242424242424243, 'Weighted_F1': 0.2152421028023728, 'Macro_F1_(ignoring_nan)': np.float64(0.20204903421875456)} [2025-03-25 12:35:30,015][tensorboardX.summary][INFO] - Summary name eval/Macro_F1_(ignoring_nan) is illegal; using eval/Macro_F1__ignoring_nan_ instead. [2025-03-25 12:35:30,019][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-03-25/12-24-03/results/phi35-balanced/C5/checkpoint-64 [2025-03-25 12:35:30,603][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json [2025-03-25 12:35:30,604][transformers.configuration_utils][INFO] - Model config Phi3Config { "architectures": [ "Phi3ForCausalLM" ], "attention_bias": false, "attention_dropout": 0.0, "auto_map": { "AutoConfig": "microsoft/Phi-3.5-mini-instruct--configuration_phi3.Phi3Config", "AutoModelForCausalLM": "microsoft/Phi-3.5-mini-instruct--modeling_phi3.Phi3ForCausalLM" }, "bos_token_id": 1, "embd_pdrop": 0.0, "eos_token_id": 32000, "hidden_act": "silu", "hidden_size": 3072, "initializer_range": 0.02, "intermediate_size": 8192, "max_position_embeddings": 131072, "model_type": "phi3", "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "original_max_position_embeddings": 4096, "pad_token_id": 32000, "partial_rotary_factor": 1.0, "resid_pdrop": 0.0, "rms_norm_eps": 1e-05, "rope_scaling": { "long_factor": [ 1.0800000429153442, 1.1100000143051147, 1.1399999856948853, 1.340000033378601, 1.5899999141693115, 1.600000023841858, 1.6200000047683716, 2.620000123977661, 3.2300000190734863, 3.2300000190734863, 4.789999961853027, 7.400000095367432, 7.700000286102295, 9.09000015258789, 12.199999809265137, 17.670000076293945, 24.46000099182129, 28.57000160217285, 30.420001983642578, 30.840002059936523, 32.590003967285156, 32.93000411987305, 42.320003509521484, 44.96000289916992, 50.340003967285156, 50.45000457763672, 57.55000305175781, 57.93000411987305, 58.21000289916992, 60.1400032043457, 62.61000442504883, 62.62000274658203, 62.71000289916992, 63.1400032043457, 63.1400032043457, 63.77000427246094, 63.93000411987305, 63.96000289916992, 63.970001220703125, 64.02999877929688, 64.06999969482422, 64.08000183105469, 64.12000274658203, 64.41000366210938, 64.4800033569336, 64.51000213623047, 64.52999877929688, 64.83999633789062 ], "short_factor": [ 1.0, 1.0199999809265137, 1.0299999713897705, 1.0299999713897705, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0699999332427979, 1.0999999046325684, 1.1099998950958252, 1.1599998474121094, 1.1599998474121094, 1.1699998378753662, 1.2899998426437378, 1.339999794960022, 1.679999828338623, 1.7899998426437378, 1.8199998140335083, 1.8499997854232788, 1.8799997568130493, 1.9099997282028198, 1.9399996995925903, 1.9899996519088745, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0799996852874756, 2.0899996757507324, 2.189999580383301, 2.2199995517730713, 2.5899994373321533, 2.729999542236328, 2.749999523162842, 2.8399994373321533 ], "type": "longrope" }, "rope_theta": 10000.0, "sliding_window": 262144, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "transformers_version": "4.50.0", "use_cache": true, "vocab_size": 32064 } [2025-03-25 12:35:37,037][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-03-25/12-24-03/results/phi35-balanced/C5/checkpoint-32] due to args.save_total_limit [2025-03-25 12:40:28,041][transformers.trainer][INFO] - The following columns in the evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: reference, supporting_text, essay_text, prompt, essay_year, id, grades, id_prompt. If reference, supporting_text, essay_text, prompt, essay_year, id, grades, id_prompt are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. [2025-03-25 12:40:28,044][transformers.trainer][INFO] - ***** Running Evaluation ***** [2025-03-25 12:40:28,044][transformers.trainer][INFO] - Num examples = 132 [2025-03-25 12:40:28,044][transformers.trainer][INFO] - Batch size = 16 [2025-03-25 12:40:42,596][transformers][INFO] - {'accuracy': 0.29545454545454547, 'RMSE': 54.49492609130661, 'QWK': 0.4311145510835913, 'HDIV': 0.05303030303030298, 'Macro_F1': 0.23847544610384777, 'Micro_F1': 0.29545454545454547, 'Weighted_F1': 0.24895876275317505, 'Macro_F1_(ignoring_nan)': np.float64(0.28617053532461734)} [2025-03-25 12:40:42,597][tensorboardX.summary][INFO] - Summary name eval/Macro_F1_(ignoring_nan) is illegal; using eval/Macro_F1__ignoring_nan_ instead. [2025-03-25 12:40:42,599][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-03-25/12-24-03/results/phi35-balanced/C5/checkpoint-96 [2025-03-25 12:40:43,128][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json [2025-03-25 12:40:43,129][transformers.configuration_utils][INFO] - Model config Phi3Config { "architectures": [ "Phi3ForCausalLM" ], "attention_bias": false, "attention_dropout": 0.0, "auto_map": { "AutoConfig": "microsoft/Phi-3.5-mini-instruct--configuration_phi3.Phi3Config", "AutoModelForCausalLM": "microsoft/Phi-3.5-mini-instruct--modeling_phi3.Phi3ForCausalLM" }, "bos_token_id": 1, "embd_pdrop": 0.0, "eos_token_id": 32000, "hidden_act": "silu", "hidden_size": 3072, "initializer_range": 0.02, "intermediate_size": 8192, "max_position_embeddings": 131072, "model_type": "phi3", "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "original_max_position_embeddings": 4096, "pad_token_id": 32000, "partial_rotary_factor": 1.0, "resid_pdrop": 0.0, "rms_norm_eps": 1e-05, "rope_scaling": { "long_factor": [ 1.0800000429153442, 1.1100000143051147, 1.1399999856948853, 1.340000033378601, 1.5899999141693115, 1.600000023841858, 1.6200000047683716, 2.620000123977661, 3.2300000190734863, 3.2300000190734863, 4.789999961853027, 7.400000095367432, 7.700000286102295, 9.09000015258789, 12.199999809265137, 17.670000076293945, 24.46000099182129, 28.57000160217285, 30.420001983642578, 30.840002059936523, 32.590003967285156, 32.93000411987305, 42.320003509521484, 44.96000289916992, 50.340003967285156, 50.45000457763672, 57.55000305175781, 57.93000411987305, 58.21000289916992, 60.1400032043457, 62.61000442504883, 62.62000274658203, 62.71000289916992, 63.1400032043457, 63.1400032043457, 63.77000427246094, 63.93000411987305, 63.96000289916992, 63.970001220703125, 64.02999877929688, 64.06999969482422, 64.08000183105469, 64.12000274658203, 64.41000366210938, 64.4800033569336, 64.51000213623047, 64.52999877929688, 64.83999633789062 ], "short_factor": [ 1.0, 1.0199999809265137, 1.0299999713897705, 1.0299999713897705, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0699999332427979, 1.0999999046325684, 1.1099998950958252, 1.1599998474121094, 1.1599998474121094, 1.1699998378753662, 1.2899998426437378, 1.339999794960022, 1.679999828338623, 1.7899998426437378, 1.8199998140335083, 1.8499997854232788, 1.8799997568130493, 1.9099997282028198, 1.9399996995925903, 1.9899996519088745, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0799996852874756, 2.0899996757507324, 2.189999580383301, 2.2199995517730713, 2.5899994373321533, 2.729999542236328, 2.749999523162842, 2.8399994373321533 ], "type": "longrope" }, "rope_theta": 10000.0, "sliding_window": 262144, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "transformers_version": "4.50.0", "use_cache": true, "vocab_size": 32064 } [2025-03-25 12:40:48,828][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-03-25/12-24-03/results/phi35-balanced/C5/checkpoint-64] due to args.save_total_limit [2025-03-25 12:45:40,249][transformers.trainer][INFO] - The following columns in the evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: reference, supporting_text, essay_text, prompt, essay_year, id, grades, id_prompt. If reference, supporting_text, essay_text, prompt, essay_year, id, grades, id_prompt are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. [2025-03-25 12:45:40,252][transformers.trainer][INFO] - ***** Running Evaluation ***** [2025-03-25 12:45:40,252][transformers.trainer][INFO] - Num examples = 132 [2025-03-25 12:45:40,252][transformers.trainer][INFO] - Batch size = 16 [2025-03-25 12:45:54,619][transformers][INFO] - {'accuracy': 0.26515151515151514, 'RMSE': 68.66696087021411, 'QWK': 0.4372205173169662, 'HDIV': 0.15909090909090906, 'Macro_F1': 0.17046749116066365, 'Micro_F1': 0.26515151515151514, 'Weighted_F1': 0.17500122287761904, 'Macro_F1_(ignoring_nan)': np.float64(0.2557012367409955)} [2025-03-25 12:45:54,619][tensorboardX.summary][INFO] - Summary name eval/Macro_F1_(ignoring_nan) is illegal; using eval/Macro_F1__ignoring_nan_ instead. [2025-03-25 12:45:54,622][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-03-25/12-24-03/results/phi35-balanced/C5/checkpoint-128 [2025-03-25 12:45:55,188][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json [2025-03-25 12:45:55,189][transformers.configuration_utils][INFO] - Model config Phi3Config { "architectures": [ "Phi3ForCausalLM" ], "attention_bias": false, "attention_dropout": 0.0, "auto_map": { "AutoConfig": "microsoft/Phi-3.5-mini-instruct--configuration_phi3.Phi3Config", "AutoModelForCausalLM": "microsoft/Phi-3.5-mini-instruct--modeling_phi3.Phi3ForCausalLM" }, "bos_token_id": 1, "embd_pdrop": 0.0, "eos_token_id": 32000, "hidden_act": "silu", "hidden_size": 3072, "initializer_range": 0.02, "intermediate_size": 8192, "max_position_embeddings": 131072, "model_type": "phi3", "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "original_max_position_embeddings": 4096, "pad_token_id": 32000, "partial_rotary_factor": 1.0, "resid_pdrop": 0.0, "rms_norm_eps": 1e-05, "rope_scaling": { "long_factor": [ 1.0800000429153442, 1.1100000143051147, 1.1399999856948853, 1.340000033378601, 1.5899999141693115, 1.600000023841858, 1.6200000047683716, 2.620000123977661, 3.2300000190734863, 3.2300000190734863, 4.789999961853027, 7.400000095367432, 7.700000286102295, 9.09000015258789, 12.199999809265137, 17.670000076293945, 24.46000099182129, 28.57000160217285, 30.420001983642578, 30.840002059936523, 32.590003967285156, 32.93000411987305, 42.320003509521484, 44.96000289916992, 50.340003967285156, 50.45000457763672, 57.55000305175781, 57.93000411987305, 58.21000289916992, 60.1400032043457, 62.61000442504883, 62.62000274658203, 62.71000289916992, 63.1400032043457, 63.1400032043457, 63.77000427246094, 63.93000411987305, 63.96000289916992, 63.970001220703125, 64.02999877929688, 64.06999969482422, 64.08000183105469, 64.12000274658203, 64.41000366210938, 64.4800033569336, 64.51000213623047, 64.52999877929688, 64.83999633789062 ], "short_factor": [ 1.0, 1.0199999809265137, 1.0299999713897705, 1.0299999713897705, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0699999332427979, 1.0999999046325684, 1.1099998950958252, 1.1599998474121094, 1.1599998474121094, 1.1699998378753662, 1.2899998426437378, 1.339999794960022, 1.679999828338623, 1.7899998426437378, 1.8199998140335083, 1.8499997854232788, 1.8799997568130493, 1.9099997282028198, 1.9399996995925903, 1.9899996519088745, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0799996852874756, 2.0899996757507324, 2.189999580383301, 2.2199995517730713, 2.5899994373321533, 2.729999542236328, 2.749999523162842, 2.8399994373321533 ], "type": "longrope" }, "rope_theta": 10000.0, "sliding_window": 262144, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "transformers_version": "4.50.0", "use_cache": true, "vocab_size": 32064 } [2025-03-25 12:46:01,623][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-03-25/12-24-03/results/phi35-balanced/C5/checkpoint-96] due to args.save_total_limit [2025-03-25 12:50:52,724][transformers.trainer][INFO] - The following columns in the evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: reference, supporting_text, essay_text, prompt, essay_year, id, grades, id_prompt. If reference, supporting_text, essay_text, prompt, essay_year, id, grades, id_prompt are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. [2025-03-25 12:50:52,726][transformers.trainer][INFO] - ***** Running Evaluation ***** [2025-03-25 12:50:52,726][transformers.trainer][INFO] - Num examples = 132 [2025-03-25 12:50:52,726][transformers.trainer][INFO] - Batch size = 16 [2025-03-25 12:51:07,212][transformers][INFO] - {'accuracy': 0.2878787878787879, 'RMSE': 65.96601512532914, 'QWK': 0.4616712864088698, 'HDIV': 0.14393939393939392, 'Macro_F1': 0.2180973594888689, 'Micro_F1': 0.2878787878787879, 'Weighted_F1': 0.24242798606006155, 'Macro_F1_(ignoring_nan)': np.float64(0.2180973594888689)} [2025-03-25 12:51:07,212][tensorboardX.summary][INFO] - Summary name eval/Macro_F1_(ignoring_nan) is illegal; using eval/Macro_F1__ignoring_nan_ instead. [2025-03-25 12:51:07,215][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-03-25/12-24-03/results/phi35-balanced/C5/checkpoint-160 [2025-03-25 12:51:08,745][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json [2025-03-25 12:51:08,746][transformers.configuration_utils][INFO] - Model config Phi3Config { "architectures": [ "Phi3ForCausalLM" ], "attention_bias": false, "attention_dropout": 0.0, "auto_map": { "AutoConfig": "microsoft/Phi-3.5-mini-instruct--configuration_phi3.Phi3Config", "AutoModelForCausalLM": "microsoft/Phi-3.5-mini-instruct--modeling_phi3.Phi3ForCausalLM" }, "bos_token_id": 1, "embd_pdrop": 0.0, "eos_token_id": 32000, "hidden_act": "silu", "hidden_size": 3072, "initializer_range": 0.02, "intermediate_size": 8192, "max_position_embeddings": 131072, "model_type": "phi3", "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "original_max_position_embeddings": 4096, "pad_token_id": 32000, "partial_rotary_factor": 1.0, "resid_pdrop": 0.0, "rms_norm_eps": 1e-05, "rope_scaling": { "long_factor": [ 1.0800000429153442, 1.1100000143051147, 1.1399999856948853, 1.340000033378601, 1.5899999141693115, 1.600000023841858, 1.6200000047683716, 2.620000123977661, 3.2300000190734863, 3.2300000190734863, 4.789999961853027, 7.400000095367432, 7.700000286102295, 9.09000015258789, 12.199999809265137, 17.670000076293945, 24.46000099182129, 28.57000160217285, 30.420001983642578, 30.840002059936523, 32.590003967285156, 32.93000411987305, 42.320003509521484, 44.96000289916992, 50.340003967285156, 50.45000457763672, 57.55000305175781, 57.93000411987305, 58.21000289916992, 60.1400032043457, 62.61000442504883, 62.62000274658203, 62.71000289916992, 63.1400032043457, 63.1400032043457, 63.77000427246094, 63.93000411987305, 63.96000289916992, 63.970001220703125, 64.02999877929688, 64.06999969482422, 64.08000183105469, 64.12000274658203, 64.41000366210938, 64.4800033569336, 64.51000213623047, 64.52999877929688, 64.83999633789062 ], "short_factor": [ 1.0, 1.0199999809265137, 1.0299999713897705, 1.0299999713897705, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0699999332427979, 1.0999999046325684, 1.1099998950958252, 1.1599998474121094, 1.1599998474121094, 1.1699998378753662, 1.2899998426437378, 1.339999794960022, 1.679999828338623, 1.7899998426437378, 1.8199998140335083, 1.8499997854232788, 1.8799997568130493, 1.9099997282028198, 1.9399996995925903, 1.9899996519088745, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0799996852874756, 2.0899996757507324, 2.189999580383301, 2.2199995517730713, 2.5899994373321533, 2.729999542236328, 2.749999523162842, 2.8399994373321533 ], "type": "longrope" }, "rope_theta": 10000.0, "sliding_window": 262144, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "transformers_version": "4.50.0", "use_cache": true, "vocab_size": 32064 } [2025-03-25 12:51:15,423][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-03-25/12-24-03/results/phi35-balanced/C5/checkpoint-128] due to args.save_total_limit [2025-03-25 12:56:06,556][transformers.trainer][INFO] - The following columns in the evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: reference, supporting_text, essay_text, prompt, essay_year, id, grades, id_prompt. If reference, supporting_text, essay_text, prompt, essay_year, id, grades, id_prompt are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. [2025-03-25 12:56:06,558][transformers.trainer][INFO] - ***** Running Evaluation ***** [2025-03-25 12:56:06,558][transformers.trainer][INFO] - Num examples = 132 [2025-03-25 12:56:06,558][transformers.trainer][INFO] - Batch size = 16 [2025-03-25 12:56:20,914][transformers][INFO] - {'accuracy': 0.2878787878787879, 'RMSE': 65.04077974857798, 'QWK': 0.44284262977117705, 'HDIV': 0.14393939393939392, 'Macro_F1': 0.23398413876518773, 'Micro_F1': 0.2878787878787879, 'Weighted_F1': 0.23765550107707453, 'Macro_F1_(ignoring_nan)': np.float64(0.23398413876518773)} [2025-03-25 12:56:20,915][tensorboardX.summary][INFO] - Summary name eval/Macro_F1_(ignoring_nan) is illegal; using eval/Macro_F1__ignoring_nan_ instead. [2025-03-25 12:56:20,917][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-03-25/12-24-03/results/phi35-balanced/C5/checkpoint-192 [2025-03-25 12:56:21,420][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json [2025-03-25 12:56:21,421][transformers.configuration_utils][INFO] - Model config Phi3Config { "architectures": [ "Phi3ForCausalLM" ], "attention_bias": false, "attention_dropout": 0.0, "auto_map": { "AutoConfig": "microsoft/Phi-3.5-mini-instruct--configuration_phi3.Phi3Config", "AutoModelForCausalLM": "microsoft/Phi-3.5-mini-instruct--modeling_phi3.Phi3ForCausalLM" }, "bos_token_id": 1, "embd_pdrop": 0.0, "eos_token_id": 32000, "hidden_act": "silu", "hidden_size": 3072, "initializer_range": 0.02, "intermediate_size": 8192, "max_position_embeddings": 131072, "model_type": "phi3", "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "original_max_position_embeddings": 4096, "pad_token_id": 32000, "partial_rotary_factor": 1.0, "resid_pdrop": 0.0, "rms_norm_eps": 1e-05, "rope_scaling": { "long_factor": [ 1.0800000429153442, 1.1100000143051147, 1.1399999856948853, 1.340000033378601, 1.5899999141693115, 1.600000023841858, 1.6200000047683716, 2.620000123977661, 3.2300000190734863, 3.2300000190734863, 4.789999961853027, 7.400000095367432, 7.700000286102295, 9.09000015258789, 12.199999809265137, 17.670000076293945, 24.46000099182129, 28.57000160217285, 30.420001983642578, 30.840002059936523, 32.590003967285156, 32.93000411987305, 42.320003509521484, 44.96000289916992, 50.340003967285156, 50.45000457763672, 57.55000305175781, 57.93000411987305, 58.21000289916992, 60.1400032043457, 62.61000442504883, 62.62000274658203, 62.71000289916992, 63.1400032043457, 63.1400032043457, 63.77000427246094, 63.93000411987305, 63.96000289916992, 63.970001220703125, 64.02999877929688, 64.06999969482422, 64.08000183105469, 64.12000274658203, 64.41000366210938, 64.4800033569336, 64.51000213623047, 64.52999877929688, 64.83999633789062 ], "short_factor": [ 1.0, 1.0199999809265137, 1.0299999713897705, 1.0299999713897705, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0699999332427979, 1.0999999046325684, 1.1099998950958252, 1.1599998474121094, 1.1599998474121094, 1.1699998378753662, 1.2899998426437378, 1.339999794960022, 1.679999828338623, 1.7899998426437378, 1.8199998140335083, 1.8499997854232788, 1.8799997568130493, 1.9099997282028198, 1.9399996995925903, 1.9899996519088745, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0799996852874756, 2.0899996757507324, 2.189999580383301, 2.2199995517730713, 2.5899994373321533, 2.729999542236328, 2.749999523162842, 2.8399994373321533 ], "type": "longrope" }, "rope_theta": 10000.0, "sliding_window": 262144, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "transformers_version": "4.50.0", "use_cache": true, "vocab_size": 32064 } [2025-03-25 13:01:19,187][transformers.trainer][INFO] - The following columns in the evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: reference, supporting_text, essay_text, prompt, essay_year, id, grades, id_prompt. If reference, supporting_text, essay_text, prompt, essay_year, id, grades, id_prompt are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. [2025-03-25 13:01:19,188][transformers.trainer][INFO] - ***** Running Evaluation ***** [2025-03-25 13:01:19,189][transformers.trainer][INFO] - Num examples = 132 [2025-03-25 13:01:19,189][transformers.trainer][INFO] - Batch size = 16 [2025-03-25 13:01:33,419][transformers][INFO] - {'accuracy': 0.30303030303030304, 'RMSE': 62.957417066111944, 'QWK': 0.4823218997361477, 'HDIV': 0.10606060606060608, 'Macro_F1': 0.24116650168839468, 'Micro_F1': 0.30303030303030304, 'Weighted_F1': 0.24695724541810343, 'Macro_F1_(ignoring_nan)': np.float64(0.24116650168839468)} [2025-03-25 13:01:33,419][tensorboardX.summary][INFO] - Summary name eval/Macro_F1_(ignoring_nan) is illegal; using eval/Macro_F1__ignoring_nan_ instead. [2025-03-25 13:01:33,422][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-03-25/12-24-03/results/phi35-balanced/C5/checkpoint-224 [2025-03-25 13:01:34,096][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json [2025-03-25 13:01:34,097][transformers.configuration_utils][INFO] - Model config Phi3Config { "architectures": [ "Phi3ForCausalLM" ], "attention_bias": false, "attention_dropout": 0.0, "auto_map": { "AutoConfig": "microsoft/Phi-3.5-mini-instruct--configuration_phi3.Phi3Config", "AutoModelForCausalLM": "microsoft/Phi-3.5-mini-instruct--modeling_phi3.Phi3ForCausalLM" }, "bos_token_id": 1, "embd_pdrop": 0.0, "eos_token_id": 32000, "hidden_act": "silu", "hidden_size": 3072, "initializer_range": 0.02, "intermediate_size": 8192, "max_position_embeddings": 131072, "model_type": "phi3", "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "original_max_position_embeddings": 4096, "pad_token_id": 32000, "partial_rotary_factor": 1.0, "resid_pdrop": 0.0, "rms_norm_eps": 1e-05, "rope_scaling": { "long_factor": [ 1.0800000429153442, 1.1100000143051147, 1.1399999856948853, 1.340000033378601, 1.5899999141693115, 1.600000023841858, 1.6200000047683716, 2.620000123977661, 3.2300000190734863, 3.2300000190734863, 4.789999961853027, 7.400000095367432, 7.700000286102295, 9.09000015258789, 12.199999809265137, 17.670000076293945, 24.46000099182129, 28.57000160217285, 30.420001983642578, 30.840002059936523, 32.590003967285156, 32.93000411987305, 42.320003509521484, 44.96000289916992, 50.340003967285156, 50.45000457763672, 57.55000305175781, 57.93000411987305, 58.21000289916992, 60.1400032043457, 62.61000442504883, 62.62000274658203, 62.71000289916992, 63.1400032043457, 63.1400032043457, 63.77000427246094, 63.93000411987305, 63.96000289916992, 63.970001220703125, 64.02999877929688, 64.06999969482422, 64.08000183105469, 64.12000274658203, 64.41000366210938, 64.4800033569336, 64.51000213623047, 64.52999877929688, 64.83999633789062 ], "short_factor": [ 1.0, 1.0199999809265137, 1.0299999713897705, 1.0299999713897705, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0699999332427979, 1.0999999046325684, 1.1099998950958252, 1.1599998474121094, 1.1599998474121094, 1.1699998378753662, 1.2899998426437378, 1.339999794960022, 1.679999828338623, 1.7899998426437378, 1.8199998140335083, 1.8499997854232788, 1.8799997568130493, 1.9099997282028198, 1.9399996995925903, 1.9899996519088745, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0799996852874756, 2.0899996757507324, 2.189999580383301, 2.2199995517730713, 2.5899994373321533, 2.729999542236328, 2.749999523162842, 2.8399994373321533 ], "type": "longrope" }, "rope_theta": 10000.0, "sliding_window": 262144, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "transformers_version": "4.50.0", "use_cache": true, "vocab_size": 32064 } [2025-03-25 13:01:40,528][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-03-25/12-24-03/results/phi35-balanced/C5/checkpoint-160] due to args.save_total_limit [2025-03-25 13:01:40,534][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-03-25/12-24-03/results/phi35-balanced/C5/checkpoint-192] due to args.save_total_limit [2025-03-25 13:06:31,623][transformers.trainer][INFO] - The following columns in the evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: reference, supporting_text, essay_text, prompt, essay_year, id, grades, id_prompt. If reference, supporting_text, essay_text, prompt, essay_year, id, grades, id_prompt are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. [2025-03-25 13:06:31,625][transformers.trainer][INFO] - ***** Running Evaluation ***** [2025-03-25 13:06:31,626][transformers.trainer][INFO] - Num examples = 132 [2025-03-25 13:06:31,626][transformers.trainer][INFO] - Batch size = 16 [2025-03-25 13:06:45,998][transformers][INFO] - {'accuracy': 0.3333333333333333, 'RMSE': 57.52469825293227, 'QWK': 0.4667021843367075, 'HDIV': 0.06818181818181823, 'Macro_F1': 0.2829570150982398, 'Micro_F1': 0.3333333333333333, 'Weighted_F1': 0.321143903582232, 'Macro_F1_(ignoring_nan)': np.float64(0.2829570150982398)} [2025-03-25 13:06:45,998][tensorboardX.summary][INFO] - Summary name eval/Macro_F1_(ignoring_nan) is illegal; using eval/Macro_F1__ignoring_nan_ instead. [2025-03-25 13:06:46,001][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-03-25/12-24-03/results/phi35-balanced/C5/checkpoint-256 [2025-03-25 13:06:46,500][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json [2025-03-25 13:06:46,501][transformers.configuration_utils][INFO] - Model config Phi3Config { "architectures": [ "Phi3ForCausalLM" ], "attention_bias": false, "attention_dropout": 0.0, "auto_map": { "AutoConfig": "microsoft/Phi-3.5-mini-instruct--configuration_phi3.Phi3Config", "AutoModelForCausalLM": "microsoft/Phi-3.5-mini-instruct--modeling_phi3.Phi3ForCausalLM" }, "bos_token_id": 1, "embd_pdrop": 0.0, "eos_token_id": 32000, "hidden_act": "silu", "hidden_size": 3072, "initializer_range": 0.02, "intermediate_size": 8192, "max_position_embeddings": 131072, "model_type": "phi3", "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "original_max_position_embeddings": 4096, "pad_token_id": 32000, "partial_rotary_factor": 1.0, "resid_pdrop": 0.0, "rms_norm_eps": 1e-05, "rope_scaling": { "long_factor": [ 1.0800000429153442, 1.1100000143051147, 1.1399999856948853, 1.340000033378601, 1.5899999141693115, 1.600000023841858, 1.6200000047683716, 2.620000123977661, 3.2300000190734863, 3.2300000190734863, 4.789999961853027, 7.400000095367432, 7.700000286102295, 9.09000015258789, 12.199999809265137, 17.670000076293945, 24.46000099182129, 28.57000160217285, 30.420001983642578, 30.840002059936523, 32.590003967285156, 32.93000411987305, 42.320003509521484, 44.96000289916992, 50.340003967285156, 50.45000457763672, 57.55000305175781, 57.93000411987305, 58.21000289916992, 60.1400032043457, 62.61000442504883, 62.62000274658203, 62.71000289916992, 63.1400032043457, 63.1400032043457, 63.77000427246094, 63.93000411987305, 63.96000289916992, 63.970001220703125, 64.02999877929688, 64.06999969482422, 64.08000183105469, 64.12000274658203, 64.41000366210938, 64.4800033569336, 64.51000213623047, 64.52999877929688, 64.83999633789062 ], "short_factor": [ 1.0, 1.0199999809265137, 1.0299999713897705, 1.0299999713897705, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0699999332427979, 1.0999999046325684, 1.1099998950958252, 1.1599998474121094, 1.1599998474121094, 1.1699998378753662, 1.2899998426437378, 1.339999794960022, 1.679999828338623, 1.7899998426437378, 1.8199998140335083, 1.8499997854232788, 1.8799997568130493, 1.9099997282028198, 1.9399996995925903, 1.9899996519088745, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0799996852874756, 2.0899996757507324, 2.189999580383301, 2.2199995517730713, 2.5899994373321533, 2.729999542236328, 2.749999523162842, 2.8399994373321533 ], "type": "longrope" }, "rope_theta": 10000.0, "sliding_window": 262144, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "transformers_version": "4.50.0", "use_cache": true, "vocab_size": 32064 } [2025-03-25 13:11:43,809][transformers.trainer][INFO] - The following columns in the evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: reference, supporting_text, essay_text, prompt, essay_year, id, grades, id_prompt. If reference, supporting_text, essay_text, prompt, essay_year, id, grades, id_prompt are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. [2025-03-25 13:11:43,811][transformers.trainer][INFO] - ***** Running Evaluation ***** [2025-03-25 13:11:43,812][transformers.trainer][INFO] - Num examples = 132 [2025-03-25 13:11:43,812][transformers.trainer][INFO] - Batch size = 16 [2025-03-25 13:11:58,120][transformers][INFO] - {'accuracy': 0.3333333333333333, 'RMSE': 54.9379815626841, 'QWK': 0.5332575972735019, 'HDIV': 0.06060606060606055, 'Macro_F1': 0.27814158963507446, 'Micro_F1': 0.3333333333333333, 'Weighted_F1': 0.28024515114391496, 'Macro_F1_(ignoring_nan)': np.float64(0.27814158963507446)} [2025-03-25 13:11:58,121][tensorboardX.summary][INFO] - Summary name eval/Macro_F1_(ignoring_nan) is illegal; using eval/Macro_F1__ignoring_nan_ instead. [2025-03-25 13:11:58,124][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-03-25/12-24-03/results/phi35-balanced/C5/checkpoint-288 [2025-03-25 13:11:58,617][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json [2025-03-25 13:11:58,618][transformers.configuration_utils][INFO] - Model config Phi3Config { "architectures": [ "Phi3ForCausalLM" ], "attention_bias": false, "attention_dropout": 0.0, "auto_map": { "AutoConfig": "microsoft/Phi-3.5-mini-instruct--configuration_phi3.Phi3Config", "AutoModelForCausalLM": "microsoft/Phi-3.5-mini-instruct--modeling_phi3.Phi3ForCausalLM" }, "bos_token_id": 1, "embd_pdrop": 0.0, "eos_token_id": 32000, "hidden_act": "silu", "hidden_size": 3072, "initializer_range": 0.02, "intermediate_size": 8192, "max_position_embeddings": 131072, "model_type": "phi3", "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "original_max_position_embeddings": 4096, "pad_token_id": 32000, "partial_rotary_factor": 1.0, "resid_pdrop": 0.0, "rms_norm_eps": 1e-05, "rope_scaling": { "long_factor": [ 1.0800000429153442, 1.1100000143051147, 1.1399999856948853, 1.340000033378601, 1.5899999141693115, 1.600000023841858, 1.6200000047683716, 2.620000123977661, 3.2300000190734863, 3.2300000190734863, 4.789999961853027, 7.400000095367432, 7.700000286102295, 9.09000015258789, 12.199999809265137, 17.670000076293945, 24.46000099182129, 28.57000160217285, 30.420001983642578, 30.840002059936523, 32.590003967285156, 32.93000411987305, 42.320003509521484, 44.96000289916992, 50.340003967285156, 50.45000457763672, 57.55000305175781, 57.93000411987305, 58.21000289916992, 60.1400032043457, 62.61000442504883, 62.62000274658203, 62.71000289916992, 63.1400032043457, 63.1400032043457, 63.77000427246094, 63.93000411987305, 63.96000289916992, 63.970001220703125, 64.02999877929688, 64.06999969482422, 64.08000183105469, 64.12000274658203, 64.41000366210938, 64.4800033569336, 64.51000213623047, 64.52999877929688, 64.83999633789062 ], "short_factor": [ 1.0, 1.0199999809265137, 1.0299999713897705, 1.0299999713897705, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0699999332427979, 1.0999999046325684, 1.1099998950958252, 1.1599998474121094, 1.1599998474121094, 1.1699998378753662, 1.2899998426437378, 1.339999794960022, 1.679999828338623, 1.7899998426437378, 1.8199998140335083, 1.8499997854232788, 1.8799997568130493, 1.9099997282028198, 1.9399996995925903, 1.9899996519088745, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0799996852874756, 2.0899996757507324, 2.189999580383301, 2.2199995517730713, 2.5899994373321533, 2.729999542236328, 2.749999523162842, 2.8399994373321533 ], "type": "longrope" }, "rope_theta": 10000.0, "sliding_window": 262144, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "transformers_version": "4.50.0", "use_cache": true, "vocab_size": 32064 } [2025-03-25 13:12:05,028][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-03-25/12-24-03/results/phi35-balanced/C5/checkpoint-224] due to args.save_total_limit [2025-03-25 13:12:05,035][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-03-25/12-24-03/results/phi35-balanced/C5/checkpoint-256] due to args.save_total_limit [2025-03-25 13:16:56,319][transformers.trainer][INFO] - The following columns in the evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: reference, supporting_text, essay_text, prompt, essay_year, id, grades, id_prompt. If reference, supporting_text, essay_text, prompt, essay_year, id, grades, id_prompt are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. [2025-03-25 13:16:56,321][transformers.trainer][INFO] - ***** Running Evaluation ***** [2025-03-25 13:16:56,322][transformers.trainer][INFO] - Num examples = 132 [2025-03-25 13:16:56,322][transformers.trainer][INFO] - Batch size = 16 [2025-03-25 13:17:10,819][transformers][INFO] - {'accuracy': 0.3484848484848485, 'RMSE': 58.981250230796896, 'QWK': 0.5139088482857729, 'HDIV': 0.10606060606060608, 'Macro_F1': 0.26306055334557943, 'Micro_F1': 0.3484848484848485, 'Weighted_F1': 0.30461075851025127, 'Macro_F1_(ignoring_nan)': np.float64(0.3156726640146953)} [2025-03-25 13:17:10,820][tensorboardX.summary][INFO] - Summary name eval/Macro_F1_(ignoring_nan) is illegal; using eval/Macro_F1__ignoring_nan_ instead. [2025-03-25 13:17:10,822][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-03-25/12-24-03/results/phi35-balanced/C5/checkpoint-320 [2025-03-25 13:17:11,314][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json [2025-03-25 13:17:11,315][transformers.configuration_utils][INFO] - Model config Phi3Config { "architectures": [ "Phi3ForCausalLM" ], "attention_bias": false, "attention_dropout": 0.0, "auto_map": { "AutoConfig": "microsoft/Phi-3.5-mini-instruct--configuration_phi3.Phi3Config", "AutoModelForCausalLM": "microsoft/Phi-3.5-mini-instruct--modeling_phi3.Phi3ForCausalLM" }, "bos_token_id": 1, "embd_pdrop": 0.0, "eos_token_id": 32000, "hidden_act": "silu", "hidden_size": 3072, "initializer_range": 0.02, "intermediate_size": 8192, "max_position_embeddings": 131072, "model_type": "phi3", "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "original_max_position_embeddings": 4096, "pad_token_id": 32000, "partial_rotary_factor": 1.0, "resid_pdrop": 0.0, "rms_norm_eps": 1e-05, "rope_scaling": { "long_factor": [ 1.0800000429153442, 1.1100000143051147, 1.1399999856948853, 1.340000033378601, 1.5899999141693115, 1.600000023841858, 1.6200000047683716, 2.620000123977661, 3.2300000190734863, 3.2300000190734863, 4.789999961853027, 7.400000095367432, 7.700000286102295, 9.09000015258789, 12.199999809265137, 17.670000076293945, 24.46000099182129, 28.57000160217285, 30.420001983642578, 30.840002059936523, 32.590003967285156, 32.93000411987305, 42.320003509521484, 44.96000289916992, 50.340003967285156, 50.45000457763672, 57.55000305175781, 57.93000411987305, 58.21000289916992, 60.1400032043457, 62.61000442504883, 62.62000274658203, 62.71000289916992, 63.1400032043457, 63.1400032043457, 63.77000427246094, 63.93000411987305, 63.96000289916992, 63.970001220703125, 64.02999877929688, 64.06999969482422, 64.08000183105469, 64.12000274658203, 64.41000366210938, 64.4800033569336, 64.51000213623047, 64.52999877929688, 64.83999633789062 ], "short_factor": [ 1.0, 1.0199999809265137, 1.0299999713897705, 1.0299999713897705, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0699999332427979, 1.0999999046325684, 1.1099998950958252, 1.1599998474121094, 1.1599998474121094, 1.1699998378753662, 1.2899998426437378, 1.339999794960022, 1.679999828338623, 1.7899998426437378, 1.8199998140335083, 1.8499997854232788, 1.8799997568130493, 1.9099997282028198, 1.9399996995925903, 1.9899996519088745, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0799996852874756, 2.0899996757507324, 2.189999580383301, 2.2199995517730713, 2.5899994373321533, 2.729999542236328, 2.749999523162842, 2.8399994373321533 ], "type": "longrope" }, "rope_theta": 10000.0, "sliding_window": 262144, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "transformers_version": "4.50.0", "use_cache": true, "vocab_size": 32064 } [2025-03-25 13:22:08,859][transformers.trainer][INFO] - The following columns in the evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: reference, supporting_text, essay_text, prompt, essay_year, id, grades, id_prompt. If reference, supporting_text, essay_text, prompt, essay_year, id, grades, id_prompt are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. [2025-03-25 13:22:08,861][transformers.trainer][INFO] - ***** Running Evaluation ***** [2025-03-25 13:22:08,861][transformers.trainer][INFO] - Num examples = 132 [2025-03-25 13:22:08,861][transformers.trainer][INFO] - Batch size = 16 [2025-03-25 13:22:23,115][transformers][INFO] - {'accuracy': 0.3181818181818182, 'RMSE': 59.18640302493726, 'QWK': 0.4907896844465802, 'HDIV': 0.09848484848484851, 'Macro_F1': 0.25637357391295873, 'Micro_F1': 0.3181818181818182, 'Weighted_F1': 0.27171617579719604, 'Macro_F1_(ignoring_nan)': np.float64(0.25637357391295873)} [2025-03-25 13:22:23,115][tensorboardX.summary][INFO] - Summary name eval/Macro_F1_(ignoring_nan) is illegal; using eval/Macro_F1__ignoring_nan_ instead. [2025-03-25 13:22:23,117][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-03-25/12-24-03/results/phi35-balanced/C5/checkpoint-352 [2025-03-25 13:22:23,600][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json [2025-03-25 13:22:23,600][transformers.configuration_utils][INFO] - Model config Phi3Config { "architectures": [ "Phi3ForCausalLM" ], "attention_bias": false, "attention_dropout": 0.0, "auto_map": { "AutoConfig": "microsoft/Phi-3.5-mini-instruct--configuration_phi3.Phi3Config", "AutoModelForCausalLM": "microsoft/Phi-3.5-mini-instruct--modeling_phi3.Phi3ForCausalLM" }, "bos_token_id": 1, "embd_pdrop": 0.0, "eos_token_id": 32000, "hidden_act": "silu", "hidden_size": 3072, "initializer_range": 0.02, "intermediate_size": 8192, "max_position_embeddings": 131072, "model_type": "phi3", "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "original_max_position_embeddings": 4096, "pad_token_id": 32000, "partial_rotary_factor": 1.0, "resid_pdrop": 0.0, "rms_norm_eps": 1e-05, "rope_scaling": { "long_factor": [ 1.0800000429153442, 1.1100000143051147, 1.1399999856948853, 1.340000033378601, 1.5899999141693115, 1.600000023841858, 1.6200000047683716, 2.620000123977661, 3.2300000190734863, 3.2300000190734863, 4.789999961853027, 7.400000095367432, 7.700000286102295, 9.09000015258789, 12.199999809265137, 17.670000076293945, 24.46000099182129, 28.57000160217285, 30.420001983642578, 30.840002059936523, 32.590003967285156, 32.93000411987305, 42.320003509521484, 44.96000289916992, 50.340003967285156, 50.45000457763672, 57.55000305175781, 57.93000411987305, 58.21000289916992, 60.1400032043457, 62.61000442504883, 62.62000274658203, 62.71000289916992, 63.1400032043457, 63.1400032043457, 63.77000427246094, 63.93000411987305, 63.96000289916992, 63.970001220703125, 64.02999877929688, 64.06999969482422, 64.08000183105469, 64.12000274658203, 64.41000366210938, 64.4800033569336, 64.51000213623047, 64.52999877929688, 64.83999633789062 ], "short_factor": [ 1.0, 1.0199999809265137, 1.0299999713897705, 1.0299999713897705, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0699999332427979, 1.0999999046325684, 1.1099998950958252, 1.1599998474121094, 1.1599998474121094, 1.1699998378753662, 1.2899998426437378, 1.339999794960022, 1.679999828338623, 1.7899998426437378, 1.8199998140335083, 1.8499997854232788, 1.8799997568130493, 1.9099997282028198, 1.9399996995925903, 1.9899996519088745, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0799996852874756, 2.0899996757507324, 2.189999580383301, 2.2199995517730713, 2.5899994373321533, 2.729999542236328, 2.749999523162842, 2.8399994373321533 ], "type": "longrope" }, "rope_theta": 10000.0, "sliding_window": 262144, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "transformers_version": "4.50.0", "use_cache": true, "vocab_size": 32064 } [2025-03-25 13:22:29,922][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-03-25/12-24-03/results/phi35-balanced/C5/checkpoint-320] due to args.save_total_limit [2025-03-25 13:27:21,135][transformers.trainer][INFO] - The following columns in the evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: reference, supporting_text, essay_text, prompt, essay_year, id, grades, id_prompt. If reference, supporting_text, essay_text, prompt, essay_year, id, grades, id_prompt are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. [2025-03-25 13:27:21,137][transformers.trainer][INFO] - ***** Running Evaluation ***** [2025-03-25 13:27:21,137][transformers.trainer][INFO] - Num examples = 132 [2025-03-25 13:27:21,137][transformers.trainer][INFO] - Batch size = 16 [2025-03-25 13:27:35,320][transformers][INFO] - {'accuracy': 0.3333333333333333, 'RMSE': 57.735026918962575, 'QWK': 0.5033656214086358, 'HDIV': 0.06818181818181823, 'Macro_F1': 0.27161705646113865, 'Micro_F1': 0.3333333333333333, 'Weighted_F1': 0.28464037970062067, 'Macro_F1_(ignoring_nan)': np.float64(0.27161705646113865)} [2025-03-25 13:27:35,321][tensorboardX.summary][INFO] - Summary name eval/Macro_F1_(ignoring_nan) is illegal; using eval/Macro_F1__ignoring_nan_ instead. [2025-03-25 13:27:35,323][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-03-25/12-24-03/results/phi35-balanced/C5/checkpoint-384 [2025-03-25 13:27:35,807][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json [2025-03-25 13:27:35,808][transformers.configuration_utils][INFO] - Model config Phi3Config { "architectures": [ "Phi3ForCausalLM" ], "attention_bias": false, "attention_dropout": 0.0, "auto_map": { "AutoConfig": "microsoft/Phi-3.5-mini-instruct--configuration_phi3.Phi3Config", "AutoModelForCausalLM": "microsoft/Phi-3.5-mini-instruct--modeling_phi3.Phi3ForCausalLM" }, "bos_token_id": 1, "embd_pdrop": 0.0, "eos_token_id": 32000, "hidden_act": "silu", "hidden_size": 3072, "initializer_range": 0.02, "intermediate_size": 8192, "max_position_embeddings": 131072, "model_type": "phi3", "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "original_max_position_embeddings": 4096, "pad_token_id": 32000, "partial_rotary_factor": 1.0, "resid_pdrop": 0.0, "rms_norm_eps": 1e-05, "rope_scaling": { "long_factor": [ 1.0800000429153442, 1.1100000143051147, 1.1399999856948853, 1.340000033378601, 1.5899999141693115, 1.600000023841858, 1.6200000047683716, 2.620000123977661, 3.2300000190734863, 3.2300000190734863, 4.789999961853027, 7.400000095367432, 7.700000286102295, 9.09000015258789, 12.199999809265137, 17.670000076293945, 24.46000099182129, 28.57000160217285, 30.420001983642578, 30.840002059936523, 32.590003967285156, 32.93000411987305, 42.320003509521484, 44.96000289916992, 50.340003967285156, 50.45000457763672, 57.55000305175781, 57.93000411987305, 58.21000289916992, 60.1400032043457, 62.61000442504883, 62.62000274658203, 62.71000289916992, 63.1400032043457, 63.1400032043457, 63.77000427246094, 63.93000411987305, 63.96000289916992, 63.970001220703125, 64.02999877929688, 64.06999969482422, 64.08000183105469, 64.12000274658203, 64.41000366210938, 64.4800033569336, 64.51000213623047, 64.52999877929688, 64.83999633789062 ], "short_factor": [ 1.0, 1.0199999809265137, 1.0299999713897705, 1.0299999713897705, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0699999332427979, 1.0999999046325684, 1.1099998950958252, 1.1599998474121094, 1.1599998474121094, 1.1699998378753662, 1.2899998426437378, 1.339999794960022, 1.679999828338623, 1.7899998426437378, 1.8199998140335083, 1.8499997854232788, 1.8799997568130493, 1.9099997282028198, 1.9399996995925903, 1.9899996519088745, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0799996852874756, 2.0899996757507324, 2.189999580383301, 2.2199995517730713, 2.5899994373321533, 2.729999542236328, 2.749999523162842, 2.8399994373321533 ], "type": "longrope" }, "rope_theta": 10000.0, "sliding_window": 262144, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "transformers_version": "4.50.0", "use_cache": true, "vocab_size": 32064 } [2025-03-25 13:27:42,121][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-03-25/12-24-03/results/phi35-balanced/C5/checkpoint-352] due to args.save_total_limit [2025-03-25 13:32:33,398][transformers.trainer][INFO] - The following columns in the evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: reference, supporting_text, essay_text, prompt, essay_year, id, grades, id_prompt. If reference, supporting_text, essay_text, prompt, essay_year, id, grades, id_prompt are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. [2025-03-25 13:32:33,400][transformers.trainer][INFO] - ***** Running Evaluation ***** [2025-03-25 13:32:33,400][transformers.trainer][INFO] - Num examples = 132 [2025-03-25 13:32:33,401][transformers.trainer][INFO] - Batch size = 16 [2025-03-25 13:32:47,620][transformers][INFO] - {'accuracy': 0.30303030303030304, 'RMSE': 61.791438065332464, 'QWK': 0.43218441033484456, 'HDIV': 0.09090909090909094, 'Macro_F1': 0.25084362139917693, 'Micro_F1': 0.30303030303030304, 'Weighted_F1': 0.27994388327721664, 'Macro_F1_(ignoring_nan)': np.float64(0.25084362139917693)} [2025-03-25 13:32:47,620][tensorboardX.summary][INFO] - Summary name eval/Macro_F1_(ignoring_nan) is illegal; using eval/Macro_F1__ignoring_nan_ instead. [2025-03-25 13:32:47,624][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-03-25/12-24-03/results/phi35-balanced/C5/checkpoint-416 [2025-03-25 13:32:48,161][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json [2025-03-25 13:32:48,162][transformers.configuration_utils][INFO] - Model config Phi3Config { "architectures": [ "Phi3ForCausalLM" ], "attention_bias": false, "attention_dropout": 0.0, "auto_map": { "AutoConfig": "microsoft/Phi-3.5-mini-instruct--configuration_phi3.Phi3Config", "AutoModelForCausalLM": "microsoft/Phi-3.5-mini-instruct--modeling_phi3.Phi3ForCausalLM" }, "bos_token_id": 1, "embd_pdrop": 0.0, "eos_token_id": 32000, "hidden_act": "silu", "hidden_size": 3072, "initializer_range": 0.02, "intermediate_size": 8192, "max_position_embeddings": 131072, "model_type": "phi3", "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "original_max_position_embeddings": 4096, "pad_token_id": 32000, "partial_rotary_factor": 1.0, "resid_pdrop": 0.0, "rms_norm_eps": 1e-05, "rope_scaling": { "long_factor": [ 1.0800000429153442, 1.1100000143051147, 1.1399999856948853, 1.340000033378601, 1.5899999141693115, 1.600000023841858, 1.6200000047683716, 2.620000123977661, 3.2300000190734863, 3.2300000190734863, 4.789999961853027, 7.400000095367432, 7.700000286102295, 9.09000015258789, 12.199999809265137, 17.670000076293945, 24.46000099182129, 28.57000160217285, 30.420001983642578, 30.840002059936523, 32.590003967285156, 32.93000411987305, 42.320003509521484, 44.96000289916992, 50.340003967285156, 50.45000457763672, 57.55000305175781, 57.93000411987305, 58.21000289916992, 60.1400032043457, 62.61000442504883, 62.62000274658203, 62.71000289916992, 63.1400032043457, 63.1400032043457, 63.77000427246094, 63.93000411987305, 63.96000289916992, 63.970001220703125, 64.02999877929688, 64.06999969482422, 64.08000183105469, 64.12000274658203, 64.41000366210938, 64.4800033569336, 64.51000213623047, 64.52999877929688, 64.83999633789062 ], "short_factor": [ 1.0, 1.0199999809265137, 1.0299999713897705, 1.0299999713897705, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0699999332427979, 1.0999999046325684, 1.1099998950958252, 1.1599998474121094, 1.1599998474121094, 1.1699998378753662, 1.2899998426437378, 1.339999794960022, 1.679999828338623, 1.7899998426437378, 1.8199998140335083, 1.8499997854232788, 1.8799997568130493, 1.9099997282028198, 1.9399996995925903, 1.9899996519088745, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0799996852874756, 2.0899996757507324, 2.189999580383301, 2.2199995517730713, 2.5899994373321533, 2.729999542236328, 2.749999523162842, 2.8399994373321533 ], "type": "longrope" }, "rope_theta": 10000.0, "sliding_window": 262144, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "transformers_version": "4.50.0", "use_cache": true, "vocab_size": 32064 } [2025-03-25 13:32:52,654][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-03-25/12-24-03/results/phi35-balanced/C5/checkpoint-384] due to args.save_total_limit [2025-03-25 13:37:43,765][transformers.trainer][INFO] - The following columns in the evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: reference, supporting_text, essay_text, prompt, essay_year, id, grades, id_prompt. If reference, supporting_text, essay_text, prompt, essay_year, id, grades, id_prompt are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. [2025-03-25 13:37:43,767][transformers.trainer][INFO] - ***** Running Evaluation ***** [2025-03-25 13:37:43,767][transformers.trainer][INFO] - Num examples = 132 [2025-03-25 13:37:43,767][transformers.trainer][INFO] - Batch size = 16 [2025-03-25 13:37:58,215][transformers][INFO] - {'accuracy': 0.25, 'RMSE': 64.8541487189571, 'QWK': 0.46677532013969725, 'HDIV': 0.09090909090909094, 'Macro_F1': 0.22235122119023046, 'Micro_F1': 0.25, 'Weighted_F1': 0.23538011695906436, 'Macro_F1_(ignoring_nan)': np.float64(0.22235122119023046)} [2025-03-25 13:37:58,216][tensorboardX.summary][INFO] - Summary name eval/Macro_F1_(ignoring_nan) is illegal; using eval/Macro_F1__ignoring_nan_ instead. [2025-03-25 13:37:58,218][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-03-25/12-24-03/results/phi35-balanced/C5/checkpoint-448 [2025-03-25 13:37:58,700][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json [2025-03-25 13:37:58,701][transformers.configuration_utils][INFO] - Model config Phi3Config { "architectures": [ "Phi3ForCausalLM" ], "attention_bias": false, "attention_dropout": 0.0, "auto_map": { "AutoConfig": "microsoft/Phi-3.5-mini-instruct--configuration_phi3.Phi3Config", "AutoModelForCausalLM": "microsoft/Phi-3.5-mini-instruct--modeling_phi3.Phi3ForCausalLM" }, "bos_token_id": 1, "embd_pdrop": 0.0, "eos_token_id": 32000, "hidden_act": "silu", "hidden_size": 3072, "initializer_range": 0.02, "intermediate_size": 8192, "max_position_embeddings": 131072, "model_type": "phi3", "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "original_max_position_embeddings": 4096, "pad_token_id": 32000, "partial_rotary_factor": 1.0, "resid_pdrop": 0.0, "rms_norm_eps": 1e-05, "rope_scaling": { "long_factor": [ 1.0800000429153442, 1.1100000143051147, 1.1399999856948853, 1.340000033378601, 1.5899999141693115, 1.600000023841858, 1.6200000047683716, 2.620000123977661, 3.2300000190734863, 3.2300000190734863, 4.789999961853027, 7.400000095367432, 7.700000286102295, 9.09000015258789, 12.199999809265137, 17.670000076293945, 24.46000099182129, 28.57000160217285, 30.420001983642578, 30.840002059936523, 32.590003967285156, 32.93000411987305, 42.320003509521484, 44.96000289916992, 50.340003967285156, 50.45000457763672, 57.55000305175781, 57.93000411987305, 58.21000289916992, 60.1400032043457, 62.61000442504883, 62.62000274658203, 62.71000289916992, 63.1400032043457, 63.1400032043457, 63.77000427246094, 63.93000411987305, 63.96000289916992, 63.970001220703125, 64.02999877929688, 64.06999969482422, 64.08000183105469, 64.12000274658203, 64.41000366210938, 64.4800033569336, 64.51000213623047, 64.52999877929688, 64.83999633789062 ], "short_factor": [ 1.0, 1.0199999809265137, 1.0299999713897705, 1.0299999713897705, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0699999332427979, 1.0999999046325684, 1.1099998950958252, 1.1599998474121094, 1.1599998474121094, 1.1699998378753662, 1.2899998426437378, 1.339999794960022, 1.679999828338623, 1.7899998426437378, 1.8199998140335083, 1.8499997854232788, 1.8799997568130493, 1.9099997282028198, 1.9399996995925903, 1.9899996519088745, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0799996852874756, 2.0899996757507324, 2.189999580383301, 2.2199995517730713, 2.5899994373321533, 2.729999542236328, 2.749999523162842, 2.8399994373321533 ], "type": "longrope" }, "rope_theta": 10000.0, "sliding_window": 262144, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "transformers_version": "4.50.0", "use_cache": true, "vocab_size": 32064 } [2025-03-25 13:38:04,920][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-03-25/12-24-03/results/phi35-balanced/C5/checkpoint-416] due to args.save_total_limit [2025-03-25 13:38:04,927][transformers.trainer][INFO] - Training completed. Do not forget to share your model on huggingface.co/models =) [2025-03-25 13:38:04,927][transformers.trainer][INFO] - Loading best model from /workspace/jbcs2025/outputs/2025-03-25/12-24-03/results/phi35-balanced/C5/checkpoint-288 (score: 0.5332575972735019). [2025-03-25 13:38:21,551][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-03-25/12-24-03/results/phi35-balanced/C5/checkpoint-448] due to args.save_total_limit [2025-03-25 13:38:21,558][transformers.trainer][INFO] - The following columns in the evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: reference, supporting_text, essay_text, prompt, essay_year, id, grades, id_prompt. If reference, supporting_text, essay_text, prompt, essay_year, id, grades, id_prompt are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. [2025-03-25 13:38:21,561][transformers.trainer][INFO] - ***** Running Evaluation ***** [2025-03-25 13:38:21,561][transformers.trainer][INFO] - Num examples = 132 [2025-03-25 13:38:21,561][transformers.trainer][INFO] - Batch size = 16 [2025-03-25 13:38:35,717][transformers][INFO] - {'accuracy': 0.3333333333333333, 'RMSE': 54.9379815626841, 'QWK': 0.5332575972735019, 'HDIV': 0.06060606060606055, 'Macro_F1': 0.27814158963507446, 'Micro_F1': 0.3333333333333333, 'Weighted_F1': 0.28024515114391496, 'Macro_F1_(ignoring_nan)': np.float64(0.27814158963507446)} [2025-03-25 13:38:35,720][tensorboardX.summary][INFO] - Summary name eval/Macro_F1_(ignoring_nan) is illegal; using eval/Macro_F1__ignoring_nan_ instead. [2025-03-25 13:38:35,721][__main__][INFO] - Training completed successfully. [2025-03-25 13:38:35,722][__main__][INFO] - Running on Test [2025-03-25 13:38:35,722][transformers.trainer][INFO] - The following columns in the evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: reference, supporting_text, essay_text, prompt, essay_year, id, grades, id_prompt. If reference, supporting_text, essay_text, prompt, essay_year, id, grades, id_prompt are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. [2025-03-25 13:38:35,724][transformers.trainer][INFO] - ***** Running Evaluation ***** [2025-03-25 13:38:35,724][transformers.trainer][INFO] - Num examples = 138 [2025-03-25 13:38:35,724][transformers.trainer][INFO] - Batch size = 16 [2025-03-25 13:38:50,961][transformers][INFO] - {'accuracy': 0.35507246376811596, 'RMSE': 58.18511189623005, 'QWK': 0.5186813186813186, 'HDIV': 0.08695652173913049, 'Macro_F1': 0.2833097701518754, 'Micro_F1': 0.35507246376811596, 'Weighted_F1': 0.32384938563428267, 'Macro_F1_(ignoring_nan)': np.float64(0.2833097701518754)} [2025-03-25 13:38:50,962][tensorboardX.summary][INFO] - Summary name eval/Macro_F1_(ignoring_nan) is illegal; using eval/Macro_F1__ignoring_nan_ instead. [2025-03-25 13:38:50,964][transformers.trainer][INFO] - Saving model checkpoint to ./results/phi35-balanced/C5/best_model [2025-03-25 13:38:51,433][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /root/.cache/huggingface/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json [2025-03-25 13:38:51,433][transformers.configuration_utils][INFO] - Model config Phi3Config { "architectures": [ "Phi3ForCausalLM" ], "attention_bias": false, "attention_dropout": 0.0, "auto_map": { "AutoConfig": "microsoft/Phi-3.5-mini-instruct--configuration_phi3.Phi3Config", "AutoModelForCausalLM": "microsoft/Phi-3.5-mini-instruct--modeling_phi3.Phi3ForCausalLM" }, "bos_token_id": 1, "embd_pdrop": 0.0, "eos_token_id": 32000, "hidden_act": "silu", "hidden_size": 3072, "initializer_range": 0.02, "intermediate_size": 8192, "max_position_embeddings": 131072, "model_type": "phi3", "num_attention_heads": 32, "num_hidden_layers": 32, "num_key_value_heads": 32, "original_max_position_embeddings": 4096, "pad_token_id": 32000, "partial_rotary_factor": 1.0, "resid_pdrop": 0.0, "rms_norm_eps": 1e-05, "rope_scaling": { "long_factor": [ 1.0800000429153442, 1.1100000143051147, 1.1399999856948853, 1.340000033378601, 1.5899999141693115, 1.600000023841858, 1.6200000047683716, 2.620000123977661, 3.2300000190734863, 3.2300000190734863, 4.789999961853027, 7.400000095367432, 7.700000286102295, 9.09000015258789, 12.199999809265137, 17.670000076293945, 24.46000099182129, 28.57000160217285, 30.420001983642578, 30.840002059936523, 32.590003967285156, 32.93000411987305, 42.320003509521484, 44.96000289916992, 50.340003967285156, 50.45000457763672, 57.55000305175781, 57.93000411987305, 58.21000289916992, 60.1400032043457, 62.61000442504883, 62.62000274658203, 62.71000289916992, 63.1400032043457, 63.1400032043457, 63.77000427246094, 63.93000411987305, 63.96000289916992, 63.970001220703125, 64.02999877929688, 64.06999969482422, 64.08000183105469, 64.12000274658203, 64.41000366210938, 64.4800033569336, 64.51000213623047, 64.52999877929688, 64.83999633789062 ], "short_factor": [ 1.0, 1.0199999809265137, 1.0299999713897705, 1.0299999713897705, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0499999523162842, 1.0699999332427979, 1.0999999046325684, 1.1099998950958252, 1.1599998474121094, 1.1599998474121094, 1.1699998378753662, 1.2899998426437378, 1.339999794960022, 1.679999828338623, 1.7899998426437378, 1.8199998140335083, 1.8499997854232788, 1.8799997568130493, 1.9099997282028198, 1.9399996995925903, 1.9899996519088745, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0199997425079346, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0299997329711914, 2.0799996852874756, 2.0899996757507324, 2.189999580383301, 2.2199995517730713, 2.5899994373321533, 2.729999542236328, 2.749999523162842, 2.8399994373321533 ], "type": "longrope" }, "rope_theta": 10000.0, "sliding_window": 262144, "tie_word_embeddings": false, "torch_dtype": "bfloat16", "transformers_version": "4.50.0", "use_cache": true, "vocab_size": 32064 } [2025-03-25 13:38:57,844][__main__][INFO] - Fine Tuning Finished.