diff --git "a/run_experiment.log" "b/run_experiment.log" new file mode 100644--- /dev/null +++ "b/run_experiment.log" @@ -0,0 +1,2906 @@ +[2025-06-30 16:50:49,845][__main__][INFO] - cache_dir: /tmp/ +dataset: + name: kamel-usp/aes_enem_dataset + split: JBCS2025 +training_params: + seed: 42 + num_train_epochs: 20 + logging_steps: 100 + metric_for_best_model: QWK + bf16: true +bootstrap: + enabled: true + n_bootstrap: 10000 + bootstrap_seed: 42 + metrics: + - QWK + - Macro_F1 + - Weighted_F1 +post_training_results: + model_path: /workspace/jbcs2025/outputs/2025-03-24/20-42-59 +experiments: + model: + name: microsoft/Phi-3.5-mini-instruct + type: phi35_classification_lora + num_labels: 6 + output_dir: ./results/ + logging_dir: ./logs/ + best_model_dir: ./results/best_model + lora_r: 8 + lora_dropout: 0.05 + lora_alpha: 16 + lora_target_modules: all-linear + checkpoint_path: '' + tokenizer: + name: microsoft/Phi-3.5-mini-instruct + dataset: + grade_index: 3 + use_full_context: true + training_params: + weight_decay: 0.01 + warmup_ratio: 0.1 + learning_rate: 5.0e-05 + train_batch_size: 1 + eval_batch_size: 16 + gradient_accumulation_steps: 16 + gradient_checkpointing: false + +[2025-06-30 16:50:53,829][__main__][INFO] - GPU 0: NVIDIA H200 | TDP ≈ 700 W +[2025-06-30 16:50:53,830][__main__][INFO] - Starting the Fine Tuning training process. +[2025-06-30 16:50:59,984][transformers.tokenization_utils_base][INFO] - loading file tokenizer.model from cache at /tmp/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/tokenizer.model +[2025-06-30 16:50:59,985][transformers.tokenization_utils_base][INFO] - loading file tokenizer.json from cache at /tmp/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/tokenizer.json +[2025-06-30 16:50:59,985][transformers.tokenization_utils_base][INFO] - loading file added_tokens.json from cache at /tmp/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/added_tokens.json +[2025-06-30 16:50:59,985][transformers.tokenization_utils_base][INFO] - loading file special_tokens_map.json from cache at /tmp/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/special_tokens_map.json +[2025-06-30 16:50:59,985][transformers.tokenization_utils_base][INFO] - loading file tokenizer_config.json from cache at /tmp/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/tokenizer_config.json +[2025-06-30 16:50:59,985][transformers.tokenization_utils_base][INFO] - loading file chat_template.jinja from cache at None +[2025-06-30 16:51:00,042][transformers.tokenization_utils_base][INFO] - Special tokens have been added in the vocabulary, make sure the associated word embeddings are fine-tuned or trained. +[2025-06-30 16:51:00,048][__main__][INFO] - Tokenizer function parameters- Padding:longest; Truncation: False; Use Full Context: True +[2025-06-30 16:51:01,718][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /tmp/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json +[2025-06-30 16:51:01,719][transformers.configuration_utils][INFO] - Model config Phi3Config { + "architectures": [ + "Phi3ForCausalLM" + ], + "attention_bias": false, + "attention_dropout": 0.0, + "auto_map": { + "AutoConfig": "configuration_phi3.Phi3Config", + "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM" + }, + "bos_token_id": 1, + "embd_pdrop": 0.0, + "eos_token_id": 32000, + "hidden_act": "silu", + "hidden_size": 3072, + "id2label": { + "0": "LABEL_0", + "1": "LABEL_1", + "2": "LABEL_2", + "3": "LABEL_3", + "4": "LABEL_4", + "5": "LABEL_5" + }, + "initializer_range": 0.02, + "intermediate_size": 8192, + "label2id": { + "LABEL_0": 0, + "LABEL_1": 1, + "LABEL_2": 2, + "LABEL_3": 3, + "LABEL_4": 4, + "LABEL_5": 5 + }, + "max_position_embeddings": 131072, + "model_type": "phi3", + "num_attention_heads": 32, + "num_hidden_layers": 32, + "num_key_value_heads": 32, + "original_max_position_embeddings": 4096, + "pad_token_id": 32000, + "partial_rotary_factor": 1.0, + "resid_pdrop": 0.0, + "rms_norm_eps": 1e-05, + "rope_scaling": { + "long_factor": [ + 1.0800000429153442, + 1.1100000143051147, + 1.1399999856948853, + 1.340000033378601, + 1.5899999141693115, + 1.600000023841858, + 1.6200000047683716, + 2.620000123977661, + 3.2300000190734863, + 3.2300000190734863, + 4.789999961853027, + 7.400000095367432, + 7.700000286102295, + 9.09000015258789, + 12.199999809265137, + 17.670000076293945, + 24.46000099182129, + 28.57000160217285, + 30.420001983642578, + 30.840002059936523, + 32.590003967285156, + 32.93000411987305, + 42.320003509521484, + 44.96000289916992, + 50.340003967285156, + 50.45000457763672, + 57.55000305175781, + 57.93000411987305, + 58.21000289916992, + 60.1400032043457, + 62.61000442504883, + 62.62000274658203, + 62.71000289916992, + 63.1400032043457, + 63.1400032043457, + 63.77000427246094, + 63.93000411987305, + 63.96000289916992, + 63.970001220703125, + 64.02999877929688, + 64.06999969482422, + 64.08000183105469, + 64.12000274658203, + 64.41000366210938, + 64.4800033569336, + 64.51000213623047, + 64.52999877929688, + 64.83999633789062 + ], + "short_factor": [ + 1.0, + 1.0199999809265137, + 1.0299999713897705, + 1.0299999713897705, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0699999332427979, + 1.0999999046325684, + 1.1099998950958252, + 1.1599998474121094, + 1.1599998474121094, + 1.1699998378753662, + 1.2899998426437378, + 1.339999794960022, + 1.679999828338623, + 1.7899998426437378, + 1.8199998140335083, + 1.8499997854232788, + 1.8799997568130493, + 1.9099997282028198, + 1.9399996995925903, + 1.9899996519088745, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0799996852874756, + 2.0899996757507324, + 2.189999580383301, + 2.2199995517730713, + 2.5899994373321533, + 2.729999542236328, + 2.749999523162842, + 2.8399994373321533 + ], + "type": "longrope" + }, + "rope_theta": 10000.0, + "sliding_window": 262144, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.53.0", + "use_cache": true, + "vocab_size": 32064 +} + +[2025-06-30 16:51:01,719][transformers.modeling_utils][INFO] - loading weights file model.safetensors from cache at /tmp/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/model.safetensors.index.json +[2025-06-30 16:51:01,720][transformers.modeling_utils][INFO] - Will use torch_dtype=torch.bfloat16 as defined in model's config object +[2025-06-30 16:51:01,720][transformers.modeling_utils][INFO] - Instantiating Phi3ForSequenceClassification model under default dtype torch.bfloat16. +[2025-06-30 16:51:05,324][transformers.modeling_utils][INFO] - Some weights of the model checkpoint at microsoft/Phi-3.5-mini-instruct were not used when initializing Phi3ForSequenceClassification: ['lm_head.weight'] +- This IS expected if you are initializing Phi3ForSequenceClassification from the checkpoint of a model trained on another task or with another architecture (e.g. initializing a BertForSequenceClassification model from a BertForPreTraining model). +- This IS NOT expected if you are initializing Phi3ForSequenceClassification from the checkpoint of a model that you expect to be exactly identical (initializing a BertForSequenceClassification model from a BertForSequenceClassification model). +[2025-06-30 16:51:05,324][transformers.modeling_utils][WARNING] - Some weights of Phi3ForSequenceClassification were not initialized from the model checkpoint at microsoft/Phi-3.5-mini-instruct and are newly initialized: ['score.weight'] +You should probably TRAIN this model on a down-stream task to be able to use it for predictions and inference. +[2025-06-30 16:51:05,983][__main__][INFO] - Initialized new PEFT model for ce loss +[2025-06-30 16:51:05,985][__main__][INFO] - None +[2025-06-30 16:51:05,986][transformers.training_args][INFO] - PyTorch: setting up devices +[2025-06-30 16:51:06,016][__main__][INFO] - Total steps: 620. Number of warmup steps: 62 +[2025-06-30 16:51:06,022][transformers.trainer][INFO] - You have loaded a model on multiple GPUs. `is_model_parallel` attribute will be force-set to `True` to avoid any unexpected behavior such as device placement mismatching. +[2025-06-30 16:51:06,045][transformers.trainer][INFO] - Using auto half precision backend +[2025-06-30 16:51:06,046][transformers.trainer][WARNING] - No label_names provided for model class `PeftModelForSequenceClassification`. Since `PeftModel` hides base models input arguments, if label_names is not given, label_names can't be set automatically within `Trainer`. Note that empty label_names list will be used instead. +[2025-06-30 16:51:06,047][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: grades, supporting_text, id, essay_text, id_prompt, prompt, reference, essay_year. If grades, supporting_text, id, essay_text, id_prompt, prompt, reference, essay_year are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. +[2025-06-30 16:51:06,058][transformers.trainer][INFO] - +***** Running Evaluation ***** +[2025-06-30 16:51:06,058][transformers.trainer][INFO] - Num examples = 132 +[2025-06-30 16:51:06,058][transformers.trainer][INFO] - Batch size = 16 +[2025-06-30 16:51:23,878][transformers.trainer][INFO] - The following columns in the Training set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: grades, supporting_text, id, essay_text, id_prompt, prompt, reference, essay_year. If grades, supporting_text, id, essay_text, id_prompt, prompt, reference, essay_year are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. +[2025-06-30 16:51:23,911][transformers.trainer][INFO] - ***** Running training ***** +[2025-06-30 16:51:23,911][transformers.trainer][INFO] - Num examples = 500 +[2025-06-30 16:51:23,911][transformers.trainer][INFO] - Num Epochs = 20 +[2025-06-30 16:51:23,911][transformers.trainer][INFO] - Instantaneous batch size per device = 1 +[2025-06-30 16:51:23,911][transformers.trainer][INFO] - Total train batch size (w. parallel, distributed & accumulation) = 16 +[2025-06-30 16:51:23,911][transformers.trainer][INFO] - Gradient Accumulation steps = 16 +[2025-06-30 16:51:23,911][transformers.trainer][INFO] - Total optimization steps = 640 +[2025-06-30 16:51:23,913][transformers.trainer][INFO] - Number of trainable parameters = 12,601,344 +[2025-06-30 16:54:30,648][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: grades, supporting_text, id, essay_text, id_prompt, prompt, reference, essay_year. If grades, supporting_text, id, essay_text, id_prompt, prompt, reference, essay_year are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. +[2025-06-30 16:54:30,651][transformers.trainer][INFO] - +***** Running Evaluation ***** +[2025-06-30 16:54:30,651][transformers.trainer][INFO] - Num examples = 132 +[2025-06-30 16:54:30,651][transformers.trainer][INFO] - Batch size = 16 +[2025-06-30 16:54:48,106][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-06-30/16-50-49/results/checkpoint-32 +[2025-06-30 16:54:48,678][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /workspace/.hf_home/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json +[2025-06-30 16:54:48,678][transformers.configuration_utils][INFO] - Model config Phi3Config { + "architectures": [ + "Phi3ForCausalLM" + ], + "attention_bias": false, + "attention_dropout": 0.0, + "auto_map": { + "AutoConfig": "configuration_phi3.Phi3Config", + "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM" + }, + "bos_token_id": 1, + "embd_pdrop": 0.0, + "eos_token_id": 32000, + "hidden_act": "silu", + "hidden_size": 3072, + "initializer_range": 0.02, + "intermediate_size": 8192, + "max_position_embeddings": 131072, + "model_type": "phi3", + "num_attention_heads": 32, + "num_hidden_layers": 32, + "num_key_value_heads": 32, + "original_max_position_embeddings": 4096, + "pad_token_id": 32000, + "partial_rotary_factor": 1.0, + "resid_pdrop": 0.0, + "rms_norm_eps": 1e-05, + "rope_scaling": { + "long_factor": [ + 1.0800000429153442, + 1.1100000143051147, + 1.1399999856948853, + 1.340000033378601, + 1.5899999141693115, + 1.600000023841858, + 1.6200000047683716, + 2.620000123977661, + 3.2300000190734863, + 3.2300000190734863, + 4.789999961853027, + 7.400000095367432, + 7.700000286102295, + 9.09000015258789, + 12.199999809265137, + 17.670000076293945, + 24.46000099182129, + 28.57000160217285, + 30.420001983642578, + 30.840002059936523, + 32.590003967285156, + 32.93000411987305, + 42.320003509521484, + 44.96000289916992, + 50.340003967285156, + 50.45000457763672, + 57.55000305175781, + 57.93000411987305, + 58.21000289916992, + 60.1400032043457, + 62.61000442504883, + 62.62000274658203, + 62.71000289916992, + 63.1400032043457, + 63.1400032043457, + 63.77000427246094, + 63.93000411987305, + 63.96000289916992, + 63.970001220703125, + 64.02999877929688, + 64.06999969482422, + 64.08000183105469, + 64.12000274658203, + 64.41000366210938, + 64.4800033569336, + 64.51000213623047, + 64.52999877929688, + 64.83999633789062 + ], + "short_factor": [ + 1.0, + 1.0199999809265137, + 1.0299999713897705, + 1.0299999713897705, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0699999332427979, + 1.0999999046325684, + 1.1099998950958252, + 1.1599998474121094, + 1.1599998474121094, + 1.1699998378753662, + 1.2899998426437378, + 1.339999794960022, + 1.679999828338623, + 1.7899998426437378, + 1.8199998140335083, + 1.8499997854232788, + 1.8799997568130493, + 1.9099997282028198, + 1.9399996995925903, + 1.9899996519088745, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0799996852874756, + 2.0899996757507324, + 2.189999580383301, + 2.2199995517730713, + 2.5899994373321533, + 2.729999542236328, + 2.749999523162842, + 2.8399994373321533 + ], + "type": "longrope" + }, + "rope_theta": 10000.0, + "sliding_window": 262144, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.53.0", + "use_cache": true, + "vocab_size": 32064 +} + +[2025-06-30 16:57:55,198][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: grades, supporting_text, id, essay_text, id_prompt, prompt, reference, essay_year. If grades, supporting_text, id, essay_text, id_prompt, prompt, reference, essay_year are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. +[2025-06-30 16:57:55,201][transformers.trainer][INFO] - +***** Running Evaluation ***** +[2025-06-30 16:57:55,201][transformers.trainer][INFO] - Num examples = 132 +[2025-06-30 16:57:55,201][transformers.trainer][INFO] - Batch size = 16 +[2025-06-30 16:58:12,663][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-06-30/16-50-49/results/checkpoint-64 +[2025-06-30 16:58:13,102][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /workspace/.hf_home/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json +[2025-06-30 16:58:13,103][transformers.configuration_utils][INFO] - Model config Phi3Config { + "architectures": [ + "Phi3ForCausalLM" + ], + "attention_bias": false, + "attention_dropout": 0.0, + "auto_map": { + "AutoConfig": "configuration_phi3.Phi3Config", + "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM" + }, + "bos_token_id": 1, + "embd_pdrop": 0.0, + "eos_token_id": 32000, + "hidden_act": "silu", + "hidden_size": 3072, + "initializer_range": 0.02, + "intermediate_size": 8192, + "max_position_embeddings": 131072, + "model_type": "phi3", + "num_attention_heads": 32, + "num_hidden_layers": 32, + "num_key_value_heads": 32, + "original_max_position_embeddings": 4096, + "pad_token_id": 32000, + "partial_rotary_factor": 1.0, + "resid_pdrop": 0.0, + "rms_norm_eps": 1e-05, + "rope_scaling": { + "long_factor": [ + 1.0800000429153442, + 1.1100000143051147, + 1.1399999856948853, + 1.340000033378601, + 1.5899999141693115, + 1.600000023841858, + 1.6200000047683716, + 2.620000123977661, + 3.2300000190734863, + 3.2300000190734863, + 4.789999961853027, + 7.400000095367432, + 7.700000286102295, + 9.09000015258789, + 12.199999809265137, + 17.670000076293945, + 24.46000099182129, + 28.57000160217285, + 30.420001983642578, + 30.840002059936523, + 32.590003967285156, + 32.93000411987305, + 42.320003509521484, + 44.96000289916992, + 50.340003967285156, + 50.45000457763672, + 57.55000305175781, + 57.93000411987305, + 58.21000289916992, + 60.1400032043457, + 62.61000442504883, + 62.62000274658203, + 62.71000289916992, + 63.1400032043457, + 63.1400032043457, + 63.77000427246094, + 63.93000411987305, + 63.96000289916992, + 63.970001220703125, + 64.02999877929688, + 64.06999969482422, + 64.08000183105469, + 64.12000274658203, + 64.41000366210938, + 64.4800033569336, + 64.51000213623047, + 64.52999877929688, + 64.83999633789062 + ], + "short_factor": [ + 1.0, + 1.0199999809265137, + 1.0299999713897705, + 1.0299999713897705, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0699999332427979, + 1.0999999046325684, + 1.1099998950958252, + 1.1599998474121094, + 1.1599998474121094, + 1.1699998378753662, + 1.2899998426437378, + 1.339999794960022, + 1.679999828338623, + 1.7899998426437378, + 1.8199998140335083, + 1.8499997854232788, + 1.8799997568130493, + 1.9099997282028198, + 1.9399996995925903, + 1.9899996519088745, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0799996852874756, + 2.0899996757507324, + 2.189999580383301, + 2.2199995517730713, + 2.5899994373321533, + 2.729999542236328, + 2.749999523162842, + 2.8399994373321533 + ], + "type": "longrope" + }, + "rope_theta": 10000.0, + "sliding_window": 262144, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.53.0", + "use_cache": true, + "vocab_size": 32064 +} + +[2025-06-30 16:58:13,299][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-06-30/16-50-49/results/checkpoint-32] due to args.save_total_limit +[2025-06-30 17:01:19,519][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: grades, supporting_text, id, essay_text, id_prompt, prompt, reference, essay_year. If grades, supporting_text, id, essay_text, id_prompt, prompt, reference, essay_year are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. +[2025-06-30 17:01:19,522][transformers.trainer][INFO] - +***** Running Evaluation ***** +[2025-06-30 17:01:19,522][transformers.trainer][INFO] - Num examples = 132 +[2025-06-30 17:01:19,522][transformers.trainer][INFO] - Batch size = 16 +[2025-06-30 17:01:36,953][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-06-30/16-50-49/results/checkpoint-96 +[2025-06-30 17:01:37,387][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /workspace/.hf_home/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json +[2025-06-30 17:01:37,387][transformers.configuration_utils][INFO] - Model config Phi3Config { + "architectures": [ + "Phi3ForCausalLM" + ], + "attention_bias": false, + "attention_dropout": 0.0, + "auto_map": { + "AutoConfig": "configuration_phi3.Phi3Config", + "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM" + }, + "bos_token_id": 1, + "embd_pdrop": 0.0, + "eos_token_id": 32000, + "hidden_act": "silu", + "hidden_size": 3072, + "initializer_range": 0.02, + "intermediate_size": 8192, + "max_position_embeddings": 131072, + "model_type": "phi3", + "num_attention_heads": 32, + "num_hidden_layers": 32, + "num_key_value_heads": 32, + "original_max_position_embeddings": 4096, + "pad_token_id": 32000, + "partial_rotary_factor": 1.0, + "resid_pdrop": 0.0, + "rms_norm_eps": 1e-05, + "rope_scaling": { + "long_factor": [ + 1.0800000429153442, + 1.1100000143051147, + 1.1399999856948853, + 1.340000033378601, + 1.5899999141693115, + 1.600000023841858, + 1.6200000047683716, + 2.620000123977661, + 3.2300000190734863, + 3.2300000190734863, + 4.789999961853027, + 7.400000095367432, + 7.700000286102295, + 9.09000015258789, + 12.199999809265137, + 17.670000076293945, + 24.46000099182129, + 28.57000160217285, + 30.420001983642578, + 30.840002059936523, + 32.590003967285156, + 32.93000411987305, + 42.320003509521484, + 44.96000289916992, + 50.340003967285156, + 50.45000457763672, + 57.55000305175781, + 57.93000411987305, + 58.21000289916992, + 60.1400032043457, + 62.61000442504883, + 62.62000274658203, + 62.71000289916992, + 63.1400032043457, + 63.1400032043457, + 63.77000427246094, + 63.93000411987305, + 63.96000289916992, + 63.970001220703125, + 64.02999877929688, + 64.06999969482422, + 64.08000183105469, + 64.12000274658203, + 64.41000366210938, + 64.4800033569336, + 64.51000213623047, + 64.52999877929688, + 64.83999633789062 + ], + "short_factor": [ + 1.0, + 1.0199999809265137, + 1.0299999713897705, + 1.0299999713897705, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0699999332427979, + 1.0999999046325684, + 1.1099998950958252, + 1.1599998474121094, + 1.1599998474121094, + 1.1699998378753662, + 1.2899998426437378, + 1.339999794960022, + 1.679999828338623, + 1.7899998426437378, + 1.8199998140335083, + 1.8499997854232788, + 1.8799997568130493, + 1.9099997282028198, + 1.9399996995925903, + 1.9899996519088745, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0799996852874756, + 2.0899996757507324, + 2.189999580383301, + 2.2199995517730713, + 2.5899994373321533, + 2.729999542236328, + 2.749999523162842, + 2.8399994373321533 + ], + "type": "longrope" + }, + "rope_theta": 10000.0, + "sliding_window": 262144, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.53.0", + "use_cache": true, + "vocab_size": 32064 +} + +[2025-06-30 17:01:37,590][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-06-30/16-50-49/results/checkpoint-64] due to args.save_total_limit +[2025-06-30 17:04:44,180][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: grades, supporting_text, id, essay_text, id_prompt, prompt, reference, essay_year. If grades, supporting_text, id, essay_text, id_prompt, prompt, reference, essay_year are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. +[2025-06-30 17:04:44,183][transformers.trainer][INFO] - +***** Running Evaluation ***** +[2025-06-30 17:04:44,183][transformers.trainer][INFO] - Num examples = 132 +[2025-06-30 17:04:44,184][transformers.trainer][INFO] - Batch size = 16 +[2025-06-30 17:05:01,601][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-06-30/16-50-49/results/checkpoint-128 +[2025-06-30 17:05:02,059][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /workspace/.hf_home/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json +[2025-06-30 17:05:02,060][transformers.configuration_utils][INFO] - Model config Phi3Config { + "architectures": [ + "Phi3ForCausalLM" + ], + "attention_bias": false, + "attention_dropout": 0.0, + "auto_map": { + "AutoConfig": "configuration_phi3.Phi3Config", + "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM" + }, + "bos_token_id": 1, + "embd_pdrop": 0.0, + "eos_token_id": 32000, + "hidden_act": "silu", + "hidden_size": 3072, + "initializer_range": 0.02, + "intermediate_size": 8192, + "max_position_embeddings": 131072, + "model_type": "phi3", + "num_attention_heads": 32, + "num_hidden_layers": 32, + "num_key_value_heads": 32, + "original_max_position_embeddings": 4096, + "pad_token_id": 32000, + "partial_rotary_factor": 1.0, + "resid_pdrop": 0.0, + "rms_norm_eps": 1e-05, + "rope_scaling": { + "long_factor": [ + 1.0800000429153442, + 1.1100000143051147, + 1.1399999856948853, + 1.340000033378601, + 1.5899999141693115, + 1.600000023841858, + 1.6200000047683716, + 2.620000123977661, + 3.2300000190734863, + 3.2300000190734863, + 4.789999961853027, + 7.400000095367432, + 7.700000286102295, + 9.09000015258789, + 12.199999809265137, + 17.670000076293945, + 24.46000099182129, + 28.57000160217285, + 30.420001983642578, + 30.840002059936523, + 32.590003967285156, + 32.93000411987305, + 42.320003509521484, + 44.96000289916992, + 50.340003967285156, + 50.45000457763672, + 57.55000305175781, + 57.93000411987305, + 58.21000289916992, + 60.1400032043457, + 62.61000442504883, + 62.62000274658203, + 62.71000289916992, + 63.1400032043457, + 63.1400032043457, + 63.77000427246094, + 63.93000411987305, + 63.96000289916992, + 63.970001220703125, + 64.02999877929688, + 64.06999969482422, + 64.08000183105469, + 64.12000274658203, + 64.41000366210938, + 64.4800033569336, + 64.51000213623047, + 64.52999877929688, + 64.83999633789062 + ], + "short_factor": [ + 1.0, + 1.0199999809265137, + 1.0299999713897705, + 1.0299999713897705, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0699999332427979, + 1.0999999046325684, + 1.1099998950958252, + 1.1599998474121094, + 1.1599998474121094, + 1.1699998378753662, + 1.2899998426437378, + 1.339999794960022, + 1.679999828338623, + 1.7899998426437378, + 1.8199998140335083, + 1.8499997854232788, + 1.8799997568130493, + 1.9099997282028198, + 1.9399996995925903, + 1.9899996519088745, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0799996852874756, + 2.0899996757507324, + 2.189999580383301, + 2.2199995517730713, + 2.5899994373321533, + 2.729999542236328, + 2.749999523162842, + 2.8399994373321533 + ], + "type": "longrope" + }, + "rope_theta": 10000.0, + "sliding_window": 262144, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.53.0", + "use_cache": true, + "vocab_size": 32064 +} + +[2025-06-30 17:08:08,518][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: grades, supporting_text, id, essay_text, id_prompt, prompt, reference, essay_year. If grades, supporting_text, id, essay_text, id_prompt, prompt, reference, essay_year are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. +[2025-06-30 17:08:08,521][transformers.trainer][INFO] - +***** Running Evaluation ***** +[2025-06-30 17:08:08,521][transformers.trainer][INFO] - Num examples = 132 +[2025-06-30 17:08:08,521][transformers.trainer][INFO] - Batch size = 16 +[2025-06-30 17:08:25,954][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-06-30/16-50-49/results/checkpoint-160 +[2025-06-30 17:08:26,414][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /workspace/.hf_home/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json +[2025-06-30 17:08:26,415][transformers.configuration_utils][INFO] - Model config Phi3Config { + "architectures": [ + "Phi3ForCausalLM" + ], + "attention_bias": false, + "attention_dropout": 0.0, + "auto_map": { + "AutoConfig": "configuration_phi3.Phi3Config", + "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM" + }, + "bos_token_id": 1, + "embd_pdrop": 0.0, + "eos_token_id": 32000, + "hidden_act": "silu", + "hidden_size": 3072, + "initializer_range": 0.02, + "intermediate_size": 8192, + "max_position_embeddings": 131072, + "model_type": "phi3", + "num_attention_heads": 32, + "num_hidden_layers": 32, + "num_key_value_heads": 32, + "original_max_position_embeddings": 4096, + "pad_token_id": 32000, + "partial_rotary_factor": 1.0, + "resid_pdrop": 0.0, + "rms_norm_eps": 1e-05, + "rope_scaling": { + "long_factor": [ + 1.0800000429153442, + 1.1100000143051147, + 1.1399999856948853, + 1.340000033378601, + 1.5899999141693115, + 1.600000023841858, + 1.6200000047683716, + 2.620000123977661, + 3.2300000190734863, + 3.2300000190734863, + 4.789999961853027, + 7.400000095367432, + 7.700000286102295, + 9.09000015258789, + 12.199999809265137, + 17.670000076293945, + 24.46000099182129, + 28.57000160217285, + 30.420001983642578, + 30.840002059936523, + 32.590003967285156, + 32.93000411987305, + 42.320003509521484, + 44.96000289916992, + 50.340003967285156, + 50.45000457763672, + 57.55000305175781, + 57.93000411987305, + 58.21000289916992, + 60.1400032043457, + 62.61000442504883, + 62.62000274658203, + 62.71000289916992, + 63.1400032043457, + 63.1400032043457, + 63.77000427246094, + 63.93000411987305, + 63.96000289916992, + 63.970001220703125, + 64.02999877929688, + 64.06999969482422, + 64.08000183105469, + 64.12000274658203, + 64.41000366210938, + 64.4800033569336, + 64.51000213623047, + 64.52999877929688, + 64.83999633789062 + ], + "short_factor": [ + 1.0, + 1.0199999809265137, + 1.0299999713897705, + 1.0299999713897705, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0699999332427979, + 1.0999999046325684, + 1.1099998950958252, + 1.1599998474121094, + 1.1599998474121094, + 1.1699998378753662, + 1.2899998426437378, + 1.339999794960022, + 1.679999828338623, + 1.7899998426437378, + 1.8199998140335083, + 1.8499997854232788, + 1.8799997568130493, + 1.9099997282028198, + 1.9399996995925903, + 1.9899996519088745, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0799996852874756, + 2.0899996757507324, + 2.189999580383301, + 2.2199995517730713, + 2.5899994373321533, + 2.729999542236328, + 2.749999523162842, + 2.8399994373321533 + ], + "type": "longrope" + }, + "rope_theta": 10000.0, + "sliding_window": 262144, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.53.0", + "use_cache": true, + "vocab_size": 32064 +} + +[2025-06-30 17:08:26,597][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-06-30/16-50-49/results/checkpoint-96] due to args.save_total_limit +[2025-06-30 17:08:26,605][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-06-30/16-50-49/results/checkpoint-128] due to args.save_total_limit +[2025-06-30 17:11:33,477][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: grades, supporting_text, id, essay_text, id_prompt, prompt, reference, essay_year. If grades, supporting_text, id, essay_text, id_prompt, prompt, reference, essay_year are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. +[2025-06-30 17:11:33,480][transformers.trainer][INFO] - +***** Running Evaluation ***** +[2025-06-30 17:11:33,480][transformers.trainer][INFO] - Num examples = 132 +[2025-06-30 17:11:33,480][transformers.trainer][INFO] - Batch size = 16 +[2025-06-30 17:11:50,896][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-06-30/16-50-49/results/checkpoint-192 +[2025-06-30 17:11:51,334][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /workspace/.hf_home/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json +[2025-06-30 17:11:51,335][transformers.configuration_utils][INFO] - Model config Phi3Config { + "architectures": [ + "Phi3ForCausalLM" + ], + "attention_bias": false, + "attention_dropout": 0.0, + "auto_map": { + "AutoConfig": "configuration_phi3.Phi3Config", + "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM" + }, + "bos_token_id": 1, + "embd_pdrop": 0.0, + "eos_token_id": 32000, + "hidden_act": "silu", + "hidden_size": 3072, + "initializer_range": 0.02, + "intermediate_size": 8192, + "max_position_embeddings": 131072, + "model_type": "phi3", + "num_attention_heads": 32, + "num_hidden_layers": 32, + "num_key_value_heads": 32, + "original_max_position_embeddings": 4096, + "pad_token_id": 32000, + "partial_rotary_factor": 1.0, + "resid_pdrop": 0.0, + "rms_norm_eps": 1e-05, + "rope_scaling": { + "long_factor": [ + 1.0800000429153442, + 1.1100000143051147, + 1.1399999856948853, + 1.340000033378601, + 1.5899999141693115, + 1.600000023841858, + 1.6200000047683716, + 2.620000123977661, + 3.2300000190734863, + 3.2300000190734863, + 4.789999961853027, + 7.400000095367432, + 7.700000286102295, + 9.09000015258789, + 12.199999809265137, + 17.670000076293945, + 24.46000099182129, + 28.57000160217285, + 30.420001983642578, + 30.840002059936523, + 32.590003967285156, + 32.93000411987305, + 42.320003509521484, + 44.96000289916992, + 50.340003967285156, + 50.45000457763672, + 57.55000305175781, + 57.93000411987305, + 58.21000289916992, + 60.1400032043457, + 62.61000442504883, + 62.62000274658203, + 62.71000289916992, + 63.1400032043457, + 63.1400032043457, + 63.77000427246094, + 63.93000411987305, + 63.96000289916992, + 63.970001220703125, + 64.02999877929688, + 64.06999969482422, + 64.08000183105469, + 64.12000274658203, + 64.41000366210938, + 64.4800033569336, + 64.51000213623047, + 64.52999877929688, + 64.83999633789062 + ], + "short_factor": [ + 1.0, + 1.0199999809265137, + 1.0299999713897705, + 1.0299999713897705, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0699999332427979, + 1.0999999046325684, + 1.1099998950958252, + 1.1599998474121094, + 1.1599998474121094, + 1.1699998378753662, + 1.2899998426437378, + 1.339999794960022, + 1.679999828338623, + 1.7899998426437378, + 1.8199998140335083, + 1.8499997854232788, + 1.8799997568130493, + 1.9099997282028198, + 1.9399996995925903, + 1.9899996519088745, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0799996852874756, + 2.0899996757507324, + 2.189999580383301, + 2.2199995517730713, + 2.5899994373321533, + 2.729999542236328, + 2.749999523162842, + 2.8399994373321533 + ], + "type": "longrope" + }, + "rope_theta": 10000.0, + "sliding_window": 262144, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.53.0", + "use_cache": true, + "vocab_size": 32064 +} + +[2025-06-30 17:11:51,500][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-06-30/16-50-49/results/checkpoint-160] due to args.save_total_limit +[2025-06-30 17:14:57,995][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: grades, supporting_text, id, essay_text, id_prompt, prompt, reference, essay_year. If grades, supporting_text, id, essay_text, id_prompt, prompt, reference, essay_year are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. +[2025-06-30 17:14:57,998][transformers.trainer][INFO] - +***** Running Evaluation ***** +[2025-06-30 17:14:57,998][transformers.trainer][INFO] - Num examples = 132 +[2025-06-30 17:14:57,998][transformers.trainer][INFO] - Batch size = 16 +[2025-06-30 17:15:15,420][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-06-30/16-50-49/results/checkpoint-224 +[2025-06-30 17:15:15,873][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /workspace/.hf_home/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json +[2025-06-30 17:15:15,873][transformers.configuration_utils][INFO] - Model config Phi3Config { + "architectures": [ + "Phi3ForCausalLM" + ], + "attention_bias": false, + "attention_dropout": 0.0, + "auto_map": { + "AutoConfig": "configuration_phi3.Phi3Config", + "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM" + }, + "bos_token_id": 1, + "embd_pdrop": 0.0, + "eos_token_id": 32000, + "hidden_act": "silu", + "hidden_size": 3072, + "initializer_range": 0.02, + "intermediate_size": 8192, + "max_position_embeddings": 131072, + "model_type": "phi3", + "num_attention_heads": 32, + "num_hidden_layers": 32, + "num_key_value_heads": 32, + "original_max_position_embeddings": 4096, + "pad_token_id": 32000, + "partial_rotary_factor": 1.0, + "resid_pdrop": 0.0, + "rms_norm_eps": 1e-05, + "rope_scaling": { + "long_factor": [ + 1.0800000429153442, + 1.1100000143051147, + 1.1399999856948853, + 1.340000033378601, + 1.5899999141693115, + 1.600000023841858, + 1.6200000047683716, + 2.620000123977661, + 3.2300000190734863, + 3.2300000190734863, + 4.789999961853027, + 7.400000095367432, + 7.700000286102295, + 9.09000015258789, + 12.199999809265137, + 17.670000076293945, + 24.46000099182129, + 28.57000160217285, + 30.420001983642578, + 30.840002059936523, + 32.590003967285156, + 32.93000411987305, + 42.320003509521484, + 44.96000289916992, + 50.340003967285156, + 50.45000457763672, + 57.55000305175781, + 57.93000411987305, + 58.21000289916992, + 60.1400032043457, + 62.61000442504883, + 62.62000274658203, + 62.71000289916992, + 63.1400032043457, + 63.1400032043457, + 63.77000427246094, + 63.93000411987305, + 63.96000289916992, + 63.970001220703125, + 64.02999877929688, + 64.06999969482422, + 64.08000183105469, + 64.12000274658203, + 64.41000366210938, + 64.4800033569336, + 64.51000213623047, + 64.52999877929688, + 64.83999633789062 + ], + "short_factor": [ + 1.0, + 1.0199999809265137, + 1.0299999713897705, + 1.0299999713897705, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0699999332427979, + 1.0999999046325684, + 1.1099998950958252, + 1.1599998474121094, + 1.1599998474121094, + 1.1699998378753662, + 1.2899998426437378, + 1.339999794960022, + 1.679999828338623, + 1.7899998426437378, + 1.8199998140335083, + 1.8499997854232788, + 1.8799997568130493, + 1.9099997282028198, + 1.9399996995925903, + 1.9899996519088745, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0799996852874756, + 2.0899996757507324, + 2.189999580383301, + 2.2199995517730713, + 2.5899994373321533, + 2.729999542236328, + 2.749999523162842, + 2.8399994373321533 + ], + "type": "longrope" + }, + "rope_theta": 10000.0, + "sliding_window": 262144, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.53.0", + "use_cache": true, + "vocab_size": 32064 +} + +[2025-06-30 17:15:16,103][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-06-30/16-50-49/results/checkpoint-192] due to args.save_total_limit +[2025-06-30 17:18:22,669][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: grades, supporting_text, id, essay_text, id_prompt, prompt, reference, essay_year. If grades, supporting_text, id, essay_text, id_prompt, prompt, reference, essay_year are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. +[2025-06-30 17:18:22,673][transformers.trainer][INFO] - +***** Running Evaluation ***** +[2025-06-30 17:18:22,673][transformers.trainer][INFO] - Num examples = 132 +[2025-06-30 17:18:22,673][transformers.trainer][INFO] - Batch size = 16 +[2025-06-30 17:18:40,098][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-06-30/16-50-49/results/checkpoint-256 +[2025-06-30 17:18:40,536][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /workspace/.hf_home/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json +[2025-06-30 17:18:40,536][transformers.configuration_utils][INFO] - Model config Phi3Config { + "architectures": [ + "Phi3ForCausalLM" + ], + "attention_bias": false, + "attention_dropout": 0.0, + "auto_map": { + "AutoConfig": "configuration_phi3.Phi3Config", + "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM" + }, + "bos_token_id": 1, + "embd_pdrop": 0.0, + "eos_token_id": 32000, + "hidden_act": "silu", + "hidden_size": 3072, + "initializer_range": 0.02, + "intermediate_size": 8192, + "max_position_embeddings": 131072, + "model_type": "phi3", + "num_attention_heads": 32, + "num_hidden_layers": 32, + "num_key_value_heads": 32, + "original_max_position_embeddings": 4096, + "pad_token_id": 32000, + "partial_rotary_factor": 1.0, + "resid_pdrop": 0.0, + "rms_norm_eps": 1e-05, + "rope_scaling": { + "long_factor": [ + 1.0800000429153442, + 1.1100000143051147, + 1.1399999856948853, + 1.340000033378601, + 1.5899999141693115, + 1.600000023841858, + 1.6200000047683716, + 2.620000123977661, + 3.2300000190734863, + 3.2300000190734863, + 4.789999961853027, + 7.400000095367432, + 7.700000286102295, + 9.09000015258789, + 12.199999809265137, + 17.670000076293945, + 24.46000099182129, + 28.57000160217285, + 30.420001983642578, + 30.840002059936523, + 32.590003967285156, + 32.93000411987305, + 42.320003509521484, + 44.96000289916992, + 50.340003967285156, + 50.45000457763672, + 57.55000305175781, + 57.93000411987305, + 58.21000289916992, + 60.1400032043457, + 62.61000442504883, + 62.62000274658203, + 62.71000289916992, + 63.1400032043457, + 63.1400032043457, + 63.77000427246094, + 63.93000411987305, + 63.96000289916992, + 63.970001220703125, + 64.02999877929688, + 64.06999969482422, + 64.08000183105469, + 64.12000274658203, + 64.41000366210938, + 64.4800033569336, + 64.51000213623047, + 64.52999877929688, + 64.83999633789062 + ], + "short_factor": [ + 1.0, + 1.0199999809265137, + 1.0299999713897705, + 1.0299999713897705, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0699999332427979, + 1.0999999046325684, + 1.1099998950958252, + 1.1599998474121094, + 1.1599998474121094, + 1.1699998378753662, + 1.2899998426437378, + 1.339999794960022, + 1.679999828338623, + 1.7899998426437378, + 1.8199998140335083, + 1.8499997854232788, + 1.8799997568130493, + 1.9099997282028198, + 1.9399996995925903, + 1.9899996519088745, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0799996852874756, + 2.0899996757507324, + 2.189999580383301, + 2.2199995517730713, + 2.5899994373321533, + 2.729999542236328, + 2.749999523162842, + 2.8399994373321533 + ], + "type": "longrope" + }, + "rope_theta": 10000.0, + "sliding_window": 262144, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.53.0", + "use_cache": true, + "vocab_size": 32064 +} + +[2025-06-30 17:21:47,304][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: grades, supporting_text, id, essay_text, id_prompt, prompt, reference, essay_year. If grades, supporting_text, id, essay_text, id_prompt, prompt, reference, essay_year are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. +[2025-06-30 17:21:47,307][transformers.trainer][INFO] - +***** Running Evaluation ***** +[2025-06-30 17:21:47,307][transformers.trainer][INFO] - Num examples = 132 +[2025-06-30 17:21:47,307][transformers.trainer][INFO] - Batch size = 16 +[2025-06-30 17:22:04,756][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-06-30/16-50-49/results/checkpoint-288 +[2025-06-30 17:22:05,217][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /workspace/.hf_home/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json +[2025-06-30 17:22:05,218][transformers.configuration_utils][INFO] - Model config Phi3Config { + "architectures": [ + "Phi3ForCausalLM" + ], + "attention_bias": false, + "attention_dropout": 0.0, + "auto_map": { + "AutoConfig": "configuration_phi3.Phi3Config", + "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM" + }, + "bos_token_id": 1, + "embd_pdrop": 0.0, + "eos_token_id": 32000, + "hidden_act": "silu", + "hidden_size": 3072, + "initializer_range": 0.02, + "intermediate_size": 8192, + "max_position_embeddings": 131072, + "model_type": "phi3", + "num_attention_heads": 32, + "num_hidden_layers": 32, + "num_key_value_heads": 32, + "original_max_position_embeddings": 4096, + "pad_token_id": 32000, + "partial_rotary_factor": 1.0, + "resid_pdrop": 0.0, + "rms_norm_eps": 1e-05, + "rope_scaling": { + "long_factor": [ + 1.0800000429153442, + 1.1100000143051147, + 1.1399999856948853, + 1.340000033378601, + 1.5899999141693115, + 1.600000023841858, + 1.6200000047683716, + 2.620000123977661, + 3.2300000190734863, + 3.2300000190734863, + 4.789999961853027, + 7.400000095367432, + 7.700000286102295, + 9.09000015258789, + 12.199999809265137, + 17.670000076293945, + 24.46000099182129, + 28.57000160217285, + 30.420001983642578, + 30.840002059936523, + 32.590003967285156, + 32.93000411987305, + 42.320003509521484, + 44.96000289916992, + 50.340003967285156, + 50.45000457763672, + 57.55000305175781, + 57.93000411987305, + 58.21000289916992, + 60.1400032043457, + 62.61000442504883, + 62.62000274658203, + 62.71000289916992, + 63.1400032043457, + 63.1400032043457, + 63.77000427246094, + 63.93000411987305, + 63.96000289916992, + 63.970001220703125, + 64.02999877929688, + 64.06999969482422, + 64.08000183105469, + 64.12000274658203, + 64.41000366210938, + 64.4800033569336, + 64.51000213623047, + 64.52999877929688, + 64.83999633789062 + ], + "short_factor": [ + 1.0, + 1.0199999809265137, + 1.0299999713897705, + 1.0299999713897705, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0699999332427979, + 1.0999999046325684, + 1.1099998950958252, + 1.1599998474121094, + 1.1599998474121094, + 1.1699998378753662, + 1.2899998426437378, + 1.339999794960022, + 1.679999828338623, + 1.7899998426437378, + 1.8199998140335083, + 1.8499997854232788, + 1.8799997568130493, + 1.9099997282028198, + 1.9399996995925903, + 1.9899996519088745, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0799996852874756, + 2.0899996757507324, + 2.189999580383301, + 2.2199995517730713, + 2.5899994373321533, + 2.729999542236328, + 2.749999523162842, + 2.8399994373321533 + ], + "type": "longrope" + }, + "rope_theta": 10000.0, + "sliding_window": 262144, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.53.0", + "use_cache": true, + "vocab_size": 32064 +} + +[2025-06-30 17:22:05,405][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-06-30/16-50-49/results/checkpoint-256] due to args.save_total_limit +[2025-06-30 17:25:12,450][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: grades, supporting_text, id, essay_text, id_prompt, prompt, reference, essay_year. If grades, supporting_text, id, essay_text, id_prompt, prompt, reference, essay_year are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. +[2025-06-30 17:25:12,453][transformers.trainer][INFO] - +***** Running Evaluation ***** +[2025-06-30 17:25:12,453][transformers.trainer][INFO] - Num examples = 132 +[2025-06-30 17:25:12,453][transformers.trainer][INFO] - Batch size = 16 +[2025-06-30 17:25:29,886][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-06-30/16-50-49/results/checkpoint-320 +[2025-06-30 17:25:30,334][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /workspace/.hf_home/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json +[2025-06-30 17:25:30,335][transformers.configuration_utils][INFO] - Model config Phi3Config { + "architectures": [ + "Phi3ForCausalLM" + ], + "attention_bias": false, + "attention_dropout": 0.0, + "auto_map": { + "AutoConfig": "configuration_phi3.Phi3Config", + "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM" + }, + "bos_token_id": 1, + "embd_pdrop": 0.0, + "eos_token_id": 32000, + "hidden_act": "silu", + "hidden_size": 3072, + "initializer_range": 0.02, + "intermediate_size": 8192, + "max_position_embeddings": 131072, + "model_type": "phi3", + "num_attention_heads": 32, + "num_hidden_layers": 32, + "num_key_value_heads": 32, + "original_max_position_embeddings": 4096, + "pad_token_id": 32000, + "partial_rotary_factor": 1.0, + "resid_pdrop": 0.0, + "rms_norm_eps": 1e-05, + "rope_scaling": { + "long_factor": [ + 1.0800000429153442, + 1.1100000143051147, + 1.1399999856948853, + 1.340000033378601, + 1.5899999141693115, + 1.600000023841858, + 1.6200000047683716, + 2.620000123977661, + 3.2300000190734863, + 3.2300000190734863, + 4.789999961853027, + 7.400000095367432, + 7.700000286102295, + 9.09000015258789, + 12.199999809265137, + 17.670000076293945, + 24.46000099182129, + 28.57000160217285, + 30.420001983642578, + 30.840002059936523, + 32.590003967285156, + 32.93000411987305, + 42.320003509521484, + 44.96000289916992, + 50.340003967285156, + 50.45000457763672, + 57.55000305175781, + 57.93000411987305, + 58.21000289916992, + 60.1400032043457, + 62.61000442504883, + 62.62000274658203, + 62.71000289916992, + 63.1400032043457, + 63.1400032043457, + 63.77000427246094, + 63.93000411987305, + 63.96000289916992, + 63.970001220703125, + 64.02999877929688, + 64.06999969482422, + 64.08000183105469, + 64.12000274658203, + 64.41000366210938, + 64.4800033569336, + 64.51000213623047, + 64.52999877929688, + 64.83999633789062 + ], + "short_factor": [ + 1.0, + 1.0199999809265137, + 1.0299999713897705, + 1.0299999713897705, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0699999332427979, + 1.0999999046325684, + 1.1099998950958252, + 1.1599998474121094, + 1.1599998474121094, + 1.1699998378753662, + 1.2899998426437378, + 1.339999794960022, + 1.679999828338623, + 1.7899998426437378, + 1.8199998140335083, + 1.8499997854232788, + 1.8799997568130493, + 1.9099997282028198, + 1.9399996995925903, + 1.9899996519088745, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0799996852874756, + 2.0899996757507324, + 2.189999580383301, + 2.2199995517730713, + 2.5899994373321533, + 2.729999542236328, + 2.749999523162842, + 2.8399994373321533 + ], + "type": "longrope" + }, + "rope_theta": 10000.0, + "sliding_window": 262144, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.53.0", + "use_cache": true, + "vocab_size": 32064 +} + +[2025-06-30 17:25:30,501][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-06-30/16-50-49/results/checkpoint-288] due to args.save_total_limit +[2025-06-30 17:28:37,509][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: grades, supporting_text, id, essay_text, id_prompt, prompt, reference, essay_year. If grades, supporting_text, id, essay_text, id_prompt, prompt, reference, essay_year are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. +[2025-06-30 17:28:37,512][transformers.trainer][INFO] - +***** Running Evaluation ***** +[2025-06-30 17:28:37,512][transformers.trainer][INFO] - Num examples = 132 +[2025-06-30 17:28:37,512][transformers.trainer][INFO] - Batch size = 16 +[2025-06-30 17:28:54,944][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-06-30/16-50-49/results/checkpoint-352 +[2025-06-30 17:28:55,404][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /workspace/.hf_home/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json +[2025-06-30 17:28:55,405][transformers.configuration_utils][INFO] - Model config Phi3Config { + "architectures": [ + "Phi3ForCausalLM" + ], + "attention_bias": false, + "attention_dropout": 0.0, + "auto_map": { + "AutoConfig": "configuration_phi3.Phi3Config", + "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM" + }, + "bos_token_id": 1, + "embd_pdrop": 0.0, + "eos_token_id": 32000, + "hidden_act": "silu", + "hidden_size": 3072, + "initializer_range": 0.02, + "intermediate_size": 8192, + "max_position_embeddings": 131072, + "model_type": "phi3", + "num_attention_heads": 32, + "num_hidden_layers": 32, + "num_key_value_heads": 32, + "original_max_position_embeddings": 4096, + "pad_token_id": 32000, + "partial_rotary_factor": 1.0, + "resid_pdrop": 0.0, + "rms_norm_eps": 1e-05, + "rope_scaling": { + "long_factor": [ + 1.0800000429153442, + 1.1100000143051147, + 1.1399999856948853, + 1.340000033378601, + 1.5899999141693115, + 1.600000023841858, + 1.6200000047683716, + 2.620000123977661, + 3.2300000190734863, + 3.2300000190734863, + 4.789999961853027, + 7.400000095367432, + 7.700000286102295, + 9.09000015258789, + 12.199999809265137, + 17.670000076293945, + 24.46000099182129, + 28.57000160217285, + 30.420001983642578, + 30.840002059936523, + 32.590003967285156, + 32.93000411987305, + 42.320003509521484, + 44.96000289916992, + 50.340003967285156, + 50.45000457763672, + 57.55000305175781, + 57.93000411987305, + 58.21000289916992, + 60.1400032043457, + 62.61000442504883, + 62.62000274658203, + 62.71000289916992, + 63.1400032043457, + 63.1400032043457, + 63.77000427246094, + 63.93000411987305, + 63.96000289916992, + 63.970001220703125, + 64.02999877929688, + 64.06999969482422, + 64.08000183105469, + 64.12000274658203, + 64.41000366210938, + 64.4800033569336, + 64.51000213623047, + 64.52999877929688, + 64.83999633789062 + ], + "short_factor": [ + 1.0, + 1.0199999809265137, + 1.0299999713897705, + 1.0299999713897705, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0699999332427979, + 1.0999999046325684, + 1.1099998950958252, + 1.1599998474121094, + 1.1599998474121094, + 1.1699998378753662, + 1.2899998426437378, + 1.339999794960022, + 1.679999828338623, + 1.7899998426437378, + 1.8199998140335083, + 1.8499997854232788, + 1.8799997568130493, + 1.9099997282028198, + 1.9399996995925903, + 1.9899996519088745, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0799996852874756, + 2.0899996757507324, + 2.189999580383301, + 2.2199995517730713, + 2.5899994373321533, + 2.729999542236328, + 2.749999523162842, + 2.8399994373321533 + ], + "type": "longrope" + }, + "rope_theta": 10000.0, + "sliding_window": 262144, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.53.0", + "use_cache": true, + "vocab_size": 32064 +} + +[2025-06-30 17:28:55,605][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-06-30/16-50-49/results/checkpoint-320] due to args.save_total_limit +[2025-06-30 17:32:02,688][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: grades, supporting_text, id, essay_text, id_prompt, prompt, reference, essay_year. If grades, supporting_text, id, essay_text, id_prompt, prompt, reference, essay_year are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. +[2025-06-30 17:32:02,691][transformers.trainer][INFO] - +***** Running Evaluation ***** +[2025-06-30 17:32:02,691][transformers.trainer][INFO] - Num examples = 132 +[2025-06-30 17:32:02,691][transformers.trainer][INFO] - Batch size = 16 +[2025-06-30 17:32:20,135][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-06-30/16-50-49/results/checkpoint-384 +[2025-06-30 17:32:20,588][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /workspace/.hf_home/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json +[2025-06-30 17:32:20,589][transformers.configuration_utils][INFO] - Model config Phi3Config { + "architectures": [ + "Phi3ForCausalLM" + ], + "attention_bias": false, + "attention_dropout": 0.0, + "auto_map": { + "AutoConfig": "configuration_phi3.Phi3Config", + "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM" + }, + "bos_token_id": 1, + "embd_pdrop": 0.0, + "eos_token_id": 32000, + "hidden_act": "silu", + "hidden_size": 3072, + "initializer_range": 0.02, + "intermediate_size": 8192, + "max_position_embeddings": 131072, + "model_type": "phi3", + "num_attention_heads": 32, + "num_hidden_layers": 32, + "num_key_value_heads": 32, + "original_max_position_embeddings": 4096, + "pad_token_id": 32000, + "partial_rotary_factor": 1.0, + "resid_pdrop": 0.0, + "rms_norm_eps": 1e-05, + "rope_scaling": { + "long_factor": [ + 1.0800000429153442, + 1.1100000143051147, + 1.1399999856948853, + 1.340000033378601, + 1.5899999141693115, + 1.600000023841858, + 1.6200000047683716, + 2.620000123977661, + 3.2300000190734863, + 3.2300000190734863, + 4.789999961853027, + 7.400000095367432, + 7.700000286102295, + 9.09000015258789, + 12.199999809265137, + 17.670000076293945, + 24.46000099182129, + 28.57000160217285, + 30.420001983642578, + 30.840002059936523, + 32.590003967285156, + 32.93000411987305, + 42.320003509521484, + 44.96000289916992, + 50.340003967285156, + 50.45000457763672, + 57.55000305175781, + 57.93000411987305, + 58.21000289916992, + 60.1400032043457, + 62.61000442504883, + 62.62000274658203, + 62.71000289916992, + 63.1400032043457, + 63.1400032043457, + 63.77000427246094, + 63.93000411987305, + 63.96000289916992, + 63.970001220703125, + 64.02999877929688, + 64.06999969482422, + 64.08000183105469, + 64.12000274658203, + 64.41000366210938, + 64.4800033569336, + 64.51000213623047, + 64.52999877929688, + 64.83999633789062 + ], + "short_factor": [ + 1.0, + 1.0199999809265137, + 1.0299999713897705, + 1.0299999713897705, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0699999332427979, + 1.0999999046325684, + 1.1099998950958252, + 1.1599998474121094, + 1.1599998474121094, + 1.1699998378753662, + 1.2899998426437378, + 1.339999794960022, + 1.679999828338623, + 1.7899998426437378, + 1.8199998140335083, + 1.8499997854232788, + 1.8799997568130493, + 1.9099997282028198, + 1.9399996995925903, + 1.9899996519088745, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0799996852874756, + 2.0899996757507324, + 2.189999580383301, + 2.2199995517730713, + 2.5899994373321533, + 2.729999542236328, + 2.749999523162842, + 2.8399994373321533 + ], + "type": "longrope" + }, + "rope_theta": 10000.0, + "sliding_window": 262144, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.53.0", + "use_cache": true, + "vocab_size": 32064 +} + +[2025-06-30 17:32:20,794][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-06-30/16-50-49/results/checkpoint-224] due to args.save_total_limit +[2025-06-30 17:32:20,802][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-06-30/16-50-49/results/checkpoint-352] due to args.save_total_limit +[2025-06-30 17:35:27,260][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: grades, supporting_text, id, essay_text, id_prompt, prompt, reference, essay_year. If grades, supporting_text, id, essay_text, id_prompt, prompt, reference, essay_year are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. +[2025-06-30 17:35:27,263][transformers.trainer][INFO] - +***** Running Evaluation ***** +[2025-06-30 17:35:27,263][transformers.trainer][INFO] - Num examples = 132 +[2025-06-30 17:35:27,263][transformers.trainer][INFO] - Batch size = 16 +[2025-06-30 17:35:44,690][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-06-30/16-50-49/results/checkpoint-416 +[2025-06-30 17:35:45,121][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /workspace/.hf_home/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json +[2025-06-30 17:35:45,122][transformers.configuration_utils][INFO] - Model config Phi3Config { + "architectures": [ + "Phi3ForCausalLM" + ], + "attention_bias": false, + "attention_dropout": 0.0, + "auto_map": { + "AutoConfig": "configuration_phi3.Phi3Config", + "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM" + }, + "bos_token_id": 1, + "embd_pdrop": 0.0, + "eos_token_id": 32000, + "hidden_act": "silu", + "hidden_size": 3072, + "initializer_range": 0.02, + "intermediate_size": 8192, + "max_position_embeddings": 131072, + "model_type": "phi3", + "num_attention_heads": 32, + "num_hidden_layers": 32, + "num_key_value_heads": 32, + "original_max_position_embeddings": 4096, + "pad_token_id": 32000, + "partial_rotary_factor": 1.0, + "resid_pdrop": 0.0, + "rms_norm_eps": 1e-05, + "rope_scaling": { + "long_factor": [ + 1.0800000429153442, + 1.1100000143051147, + 1.1399999856948853, + 1.340000033378601, + 1.5899999141693115, + 1.600000023841858, + 1.6200000047683716, + 2.620000123977661, + 3.2300000190734863, + 3.2300000190734863, + 4.789999961853027, + 7.400000095367432, + 7.700000286102295, + 9.09000015258789, + 12.199999809265137, + 17.670000076293945, + 24.46000099182129, + 28.57000160217285, + 30.420001983642578, + 30.840002059936523, + 32.590003967285156, + 32.93000411987305, + 42.320003509521484, + 44.96000289916992, + 50.340003967285156, + 50.45000457763672, + 57.55000305175781, + 57.93000411987305, + 58.21000289916992, + 60.1400032043457, + 62.61000442504883, + 62.62000274658203, + 62.71000289916992, + 63.1400032043457, + 63.1400032043457, + 63.77000427246094, + 63.93000411987305, + 63.96000289916992, + 63.970001220703125, + 64.02999877929688, + 64.06999969482422, + 64.08000183105469, + 64.12000274658203, + 64.41000366210938, + 64.4800033569336, + 64.51000213623047, + 64.52999877929688, + 64.83999633789062 + ], + "short_factor": [ + 1.0, + 1.0199999809265137, + 1.0299999713897705, + 1.0299999713897705, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0699999332427979, + 1.0999999046325684, + 1.1099998950958252, + 1.1599998474121094, + 1.1599998474121094, + 1.1699998378753662, + 1.2899998426437378, + 1.339999794960022, + 1.679999828338623, + 1.7899998426437378, + 1.8199998140335083, + 1.8499997854232788, + 1.8799997568130493, + 1.9099997282028198, + 1.9399996995925903, + 1.9899996519088745, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0799996852874756, + 2.0899996757507324, + 2.189999580383301, + 2.2199995517730713, + 2.5899994373321533, + 2.729999542236328, + 2.749999523162842, + 2.8399994373321533 + ], + "type": "longrope" + }, + "rope_theta": 10000.0, + "sliding_window": 262144, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.53.0", + "use_cache": true, + "vocab_size": 32064 +} + +[2025-06-30 17:38:51,499][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: grades, supporting_text, id, essay_text, id_prompt, prompt, reference, essay_year. If grades, supporting_text, id, essay_text, id_prompt, prompt, reference, essay_year are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. +[2025-06-30 17:38:51,502][transformers.trainer][INFO] - +***** Running Evaluation ***** +[2025-06-30 17:38:51,502][transformers.trainer][INFO] - Num examples = 132 +[2025-06-30 17:38:51,502][transformers.trainer][INFO] - Batch size = 16 +[2025-06-30 17:39:08,952][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-06-30/16-50-49/results/checkpoint-448 +[2025-06-30 17:39:09,422][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /workspace/.hf_home/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json +[2025-06-30 17:39:09,423][transformers.configuration_utils][INFO] - Model config Phi3Config { + "architectures": [ + "Phi3ForCausalLM" + ], + "attention_bias": false, + "attention_dropout": 0.0, + "auto_map": { + "AutoConfig": "configuration_phi3.Phi3Config", + "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM" + }, + "bos_token_id": 1, + "embd_pdrop": 0.0, + "eos_token_id": 32000, + "hidden_act": "silu", + "hidden_size": 3072, + "initializer_range": 0.02, + "intermediate_size": 8192, + "max_position_embeddings": 131072, + "model_type": "phi3", + "num_attention_heads": 32, + "num_hidden_layers": 32, + "num_key_value_heads": 32, + "original_max_position_embeddings": 4096, + "pad_token_id": 32000, + "partial_rotary_factor": 1.0, + "resid_pdrop": 0.0, + "rms_norm_eps": 1e-05, + "rope_scaling": { + "long_factor": [ + 1.0800000429153442, + 1.1100000143051147, + 1.1399999856948853, + 1.340000033378601, + 1.5899999141693115, + 1.600000023841858, + 1.6200000047683716, + 2.620000123977661, + 3.2300000190734863, + 3.2300000190734863, + 4.789999961853027, + 7.400000095367432, + 7.700000286102295, + 9.09000015258789, + 12.199999809265137, + 17.670000076293945, + 24.46000099182129, + 28.57000160217285, + 30.420001983642578, + 30.840002059936523, + 32.590003967285156, + 32.93000411987305, + 42.320003509521484, + 44.96000289916992, + 50.340003967285156, + 50.45000457763672, + 57.55000305175781, + 57.93000411987305, + 58.21000289916992, + 60.1400032043457, + 62.61000442504883, + 62.62000274658203, + 62.71000289916992, + 63.1400032043457, + 63.1400032043457, + 63.77000427246094, + 63.93000411987305, + 63.96000289916992, + 63.970001220703125, + 64.02999877929688, + 64.06999969482422, + 64.08000183105469, + 64.12000274658203, + 64.41000366210938, + 64.4800033569336, + 64.51000213623047, + 64.52999877929688, + 64.83999633789062 + ], + "short_factor": [ + 1.0, + 1.0199999809265137, + 1.0299999713897705, + 1.0299999713897705, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0699999332427979, + 1.0999999046325684, + 1.1099998950958252, + 1.1599998474121094, + 1.1599998474121094, + 1.1699998378753662, + 1.2899998426437378, + 1.339999794960022, + 1.679999828338623, + 1.7899998426437378, + 1.8199998140335083, + 1.8499997854232788, + 1.8799997568130493, + 1.9099997282028198, + 1.9399996995925903, + 1.9899996519088745, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0799996852874756, + 2.0899996757507324, + 2.189999580383301, + 2.2199995517730713, + 2.5899994373321533, + 2.729999542236328, + 2.749999523162842, + 2.8399994373321533 + ], + "type": "longrope" + }, + "rope_theta": 10000.0, + "sliding_window": 262144, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.53.0", + "use_cache": true, + "vocab_size": 32064 +} + +[2025-06-30 17:39:09,597][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-06-30/16-50-49/results/checkpoint-416] due to args.save_total_limit +[2025-06-30 17:42:15,863][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: grades, supporting_text, id, essay_text, id_prompt, prompt, reference, essay_year. If grades, supporting_text, id, essay_text, id_prompt, prompt, reference, essay_year are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. +[2025-06-30 17:42:15,866][transformers.trainer][INFO] - +***** Running Evaluation ***** +[2025-06-30 17:42:15,866][transformers.trainer][INFO] - Num examples = 132 +[2025-06-30 17:42:15,866][transformers.trainer][INFO] - Batch size = 16 +[2025-06-30 17:42:33,279][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-06-30/16-50-49/results/checkpoint-480 +[2025-06-30 17:42:33,739][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /workspace/.hf_home/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json +[2025-06-30 17:42:33,739][transformers.configuration_utils][INFO] - Model config Phi3Config { + "architectures": [ + "Phi3ForCausalLM" + ], + "attention_bias": false, + "attention_dropout": 0.0, + "auto_map": { + "AutoConfig": "configuration_phi3.Phi3Config", + "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM" + }, + "bos_token_id": 1, + "embd_pdrop": 0.0, + "eos_token_id": 32000, + "hidden_act": "silu", + "hidden_size": 3072, + "initializer_range": 0.02, + "intermediate_size": 8192, + "max_position_embeddings": 131072, + "model_type": "phi3", + "num_attention_heads": 32, + "num_hidden_layers": 32, + "num_key_value_heads": 32, + "original_max_position_embeddings": 4096, + "pad_token_id": 32000, + "partial_rotary_factor": 1.0, + "resid_pdrop": 0.0, + "rms_norm_eps": 1e-05, + "rope_scaling": { + "long_factor": [ + 1.0800000429153442, + 1.1100000143051147, + 1.1399999856948853, + 1.340000033378601, + 1.5899999141693115, + 1.600000023841858, + 1.6200000047683716, + 2.620000123977661, + 3.2300000190734863, + 3.2300000190734863, + 4.789999961853027, + 7.400000095367432, + 7.700000286102295, + 9.09000015258789, + 12.199999809265137, + 17.670000076293945, + 24.46000099182129, + 28.57000160217285, + 30.420001983642578, + 30.840002059936523, + 32.590003967285156, + 32.93000411987305, + 42.320003509521484, + 44.96000289916992, + 50.340003967285156, + 50.45000457763672, + 57.55000305175781, + 57.93000411987305, + 58.21000289916992, + 60.1400032043457, + 62.61000442504883, + 62.62000274658203, + 62.71000289916992, + 63.1400032043457, + 63.1400032043457, + 63.77000427246094, + 63.93000411987305, + 63.96000289916992, + 63.970001220703125, + 64.02999877929688, + 64.06999969482422, + 64.08000183105469, + 64.12000274658203, + 64.41000366210938, + 64.4800033569336, + 64.51000213623047, + 64.52999877929688, + 64.83999633789062 + ], + "short_factor": [ + 1.0, + 1.0199999809265137, + 1.0299999713897705, + 1.0299999713897705, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0699999332427979, + 1.0999999046325684, + 1.1099998950958252, + 1.1599998474121094, + 1.1599998474121094, + 1.1699998378753662, + 1.2899998426437378, + 1.339999794960022, + 1.679999828338623, + 1.7899998426437378, + 1.8199998140335083, + 1.8499997854232788, + 1.8799997568130493, + 1.9099997282028198, + 1.9399996995925903, + 1.9899996519088745, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0799996852874756, + 2.0899996757507324, + 2.189999580383301, + 2.2199995517730713, + 2.5899994373321533, + 2.729999542236328, + 2.749999523162842, + 2.8399994373321533 + ], + "type": "longrope" + }, + "rope_theta": 10000.0, + "sliding_window": 262144, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.53.0", + "use_cache": true, + "vocab_size": 32064 +} + +[2025-06-30 17:42:33,911][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-06-30/16-50-49/results/checkpoint-448] due to args.save_total_limit +[2025-06-30 17:45:40,244][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: grades, supporting_text, id, essay_text, id_prompt, prompt, reference, essay_year. If grades, supporting_text, id, essay_text, id_prompt, prompt, reference, essay_year are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. +[2025-06-30 17:45:40,247][transformers.trainer][INFO] - +***** Running Evaluation ***** +[2025-06-30 17:45:40,247][transformers.trainer][INFO] - Num examples = 132 +[2025-06-30 17:45:40,247][transformers.trainer][INFO] - Batch size = 16 +[2025-06-30 17:45:57,652][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-06-30/16-50-49/results/checkpoint-512 +[2025-06-30 17:45:58,098][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /workspace/.hf_home/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json +[2025-06-30 17:45:58,099][transformers.configuration_utils][INFO] - Model config Phi3Config { + "architectures": [ + "Phi3ForCausalLM" + ], + "attention_bias": false, + "attention_dropout": 0.0, + "auto_map": { + "AutoConfig": "configuration_phi3.Phi3Config", + "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM" + }, + "bos_token_id": 1, + "embd_pdrop": 0.0, + "eos_token_id": 32000, + "hidden_act": "silu", + "hidden_size": 3072, + "initializer_range": 0.02, + "intermediate_size": 8192, + "max_position_embeddings": 131072, + "model_type": "phi3", + "num_attention_heads": 32, + "num_hidden_layers": 32, + "num_key_value_heads": 32, + "original_max_position_embeddings": 4096, + "pad_token_id": 32000, + "partial_rotary_factor": 1.0, + "resid_pdrop": 0.0, + "rms_norm_eps": 1e-05, + "rope_scaling": { + "long_factor": [ + 1.0800000429153442, + 1.1100000143051147, + 1.1399999856948853, + 1.340000033378601, + 1.5899999141693115, + 1.600000023841858, + 1.6200000047683716, + 2.620000123977661, + 3.2300000190734863, + 3.2300000190734863, + 4.789999961853027, + 7.400000095367432, + 7.700000286102295, + 9.09000015258789, + 12.199999809265137, + 17.670000076293945, + 24.46000099182129, + 28.57000160217285, + 30.420001983642578, + 30.840002059936523, + 32.590003967285156, + 32.93000411987305, + 42.320003509521484, + 44.96000289916992, + 50.340003967285156, + 50.45000457763672, + 57.55000305175781, + 57.93000411987305, + 58.21000289916992, + 60.1400032043457, + 62.61000442504883, + 62.62000274658203, + 62.71000289916992, + 63.1400032043457, + 63.1400032043457, + 63.77000427246094, + 63.93000411987305, + 63.96000289916992, + 63.970001220703125, + 64.02999877929688, + 64.06999969482422, + 64.08000183105469, + 64.12000274658203, + 64.41000366210938, + 64.4800033569336, + 64.51000213623047, + 64.52999877929688, + 64.83999633789062 + ], + "short_factor": [ + 1.0, + 1.0199999809265137, + 1.0299999713897705, + 1.0299999713897705, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0699999332427979, + 1.0999999046325684, + 1.1099998950958252, + 1.1599998474121094, + 1.1599998474121094, + 1.1699998378753662, + 1.2899998426437378, + 1.339999794960022, + 1.679999828338623, + 1.7899998426437378, + 1.8199998140335083, + 1.8499997854232788, + 1.8799997568130493, + 1.9099997282028198, + 1.9399996995925903, + 1.9899996519088745, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0799996852874756, + 2.0899996757507324, + 2.189999580383301, + 2.2199995517730713, + 2.5899994373321533, + 2.729999542236328, + 2.749999523162842, + 2.8399994373321533 + ], + "type": "longrope" + }, + "rope_theta": 10000.0, + "sliding_window": 262144, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.53.0", + "use_cache": true, + "vocab_size": 32064 +} + +[2025-06-30 17:45:58,293][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-06-30/16-50-49/results/checkpoint-480] due to args.save_total_limit +[2025-06-30 17:49:04,868][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: grades, supporting_text, id, essay_text, id_prompt, prompt, reference, essay_year. If grades, supporting_text, id, essay_text, id_prompt, prompt, reference, essay_year are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. +[2025-06-30 17:49:04,871][transformers.trainer][INFO] - +***** Running Evaluation ***** +[2025-06-30 17:49:04,871][transformers.trainer][INFO] - Num examples = 132 +[2025-06-30 17:49:04,871][transformers.trainer][INFO] - Batch size = 16 +[2025-06-30 17:49:22,317][transformers.trainer][INFO] - Saving model checkpoint to /workspace/jbcs2025/outputs/2025-06-30/16-50-49/results/checkpoint-544 +[2025-06-30 17:49:22,859][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /workspace/.hf_home/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json +[2025-06-30 17:49:22,860][transformers.configuration_utils][INFO] - Model config Phi3Config { + "architectures": [ + "Phi3ForCausalLM" + ], + "attention_bias": false, + "attention_dropout": 0.0, + "auto_map": { + "AutoConfig": "configuration_phi3.Phi3Config", + "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM" + }, + "bos_token_id": 1, + "embd_pdrop": 0.0, + "eos_token_id": 32000, + "hidden_act": "silu", + "hidden_size": 3072, + "initializer_range": 0.02, + "intermediate_size": 8192, + "max_position_embeddings": 131072, + "model_type": "phi3", + "num_attention_heads": 32, + "num_hidden_layers": 32, + "num_key_value_heads": 32, + "original_max_position_embeddings": 4096, + "pad_token_id": 32000, + "partial_rotary_factor": 1.0, + "resid_pdrop": 0.0, + "rms_norm_eps": 1e-05, + "rope_scaling": { + "long_factor": [ + 1.0800000429153442, + 1.1100000143051147, + 1.1399999856948853, + 1.340000033378601, + 1.5899999141693115, + 1.600000023841858, + 1.6200000047683716, + 2.620000123977661, + 3.2300000190734863, + 3.2300000190734863, + 4.789999961853027, + 7.400000095367432, + 7.700000286102295, + 9.09000015258789, + 12.199999809265137, + 17.670000076293945, + 24.46000099182129, + 28.57000160217285, + 30.420001983642578, + 30.840002059936523, + 32.590003967285156, + 32.93000411987305, + 42.320003509521484, + 44.96000289916992, + 50.340003967285156, + 50.45000457763672, + 57.55000305175781, + 57.93000411987305, + 58.21000289916992, + 60.1400032043457, + 62.61000442504883, + 62.62000274658203, + 62.71000289916992, + 63.1400032043457, + 63.1400032043457, + 63.77000427246094, + 63.93000411987305, + 63.96000289916992, + 63.970001220703125, + 64.02999877929688, + 64.06999969482422, + 64.08000183105469, + 64.12000274658203, + 64.41000366210938, + 64.4800033569336, + 64.51000213623047, + 64.52999877929688, + 64.83999633789062 + ], + "short_factor": [ + 1.0, + 1.0199999809265137, + 1.0299999713897705, + 1.0299999713897705, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0699999332427979, + 1.0999999046325684, + 1.1099998950958252, + 1.1599998474121094, + 1.1599998474121094, + 1.1699998378753662, + 1.2899998426437378, + 1.339999794960022, + 1.679999828338623, + 1.7899998426437378, + 1.8199998140335083, + 1.8499997854232788, + 1.8799997568130493, + 1.9099997282028198, + 1.9399996995925903, + 1.9899996519088745, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0799996852874756, + 2.0899996757507324, + 2.189999580383301, + 2.2199995517730713, + 2.5899994373321533, + 2.729999542236328, + 2.749999523162842, + 2.8399994373321533 + ], + "type": "longrope" + }, + "rope_theta": 10000.0, + "sliding_window": 262144, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.53.0", + "use_cache": true, + "vocab_size": 32064 +} + +[2025-06-30 17:49:23,098][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-06-30/16-50-49/results/checkpoint-512] due to args.save_total_limit +[2025-06-30 17:49:23,110][transformers.trainer][INFO] - + +Training completed. Do not forget to share your model on huggingface.co/models =) + + +[2025-06-30 17:49:23,110][transformers.trainer][INFO] - Loading best model from /workspace/jbcs2025/outputs/2025-06-30/16-50-49/results/checkpoint-384 (score: 0.37900874635568516). +[2025-06-30 17:49:23,187][transformers.trainer][INFO] - Deleting older checkpoint [/workspace/jbcs2025/outputs/2025-06-30/16-50-49/results/checkpoint-544] due to args.save_total_limit +[2025-06-30 17:49:23,199][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: grades, supporting_text, id, essay_text, id_prompt, prompt, reference, essay_year. If grades, supporting_text, id, essay_text, id_prompt, prompt, reference, essay_year are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. +[2025-06-30 17:49:23,202][transformers.trainer][INFO] - +***** Running Evaluation ***** +[2025-06-30 17:49:23,202][transformers.trainer][INFO] - Num examples = 132 +[2025-06-30 17:49:23,202][transformers.trainer][INFO] - Batch size = 16 +[2025-06-30 17:49:40,644][__main__][INFO] - Training completed successfully. +[2025-06-30 17:49:40,645][__main__][INFO] - Running on Test +[2025-06-30 17:49:40,645][transformers.trainer][INFO] - The following columns in the Evaluation set don't have a corresponding argument in `PeftModelForSequenceClassification.forward` and have been ignored: grades, supporting_text, id, essay_text, id_prompt, prompt, reference, essay_year. If grades, supporting_text, id, essay_text, id_prompt, prompt, reference, essay_year are not expected by `PeftModelForSequenceClassification.forward`, you can safely ignore this message. +[2025-06-30 17:49:40,647][transformers.trainer][INFO] - +***** Running Evaluation ***** +[2025-06-30 17:49:40,648][transformers.trainer][INFO] - Num examples = 138 +[2025-06-30 17:49:40,648][transformers.trainer][INFO] - Batch size = 16 +[2025-06-30 17:49:58,967][__main__][INFO] - Test metrics: {'eval_loss': 1.6321039199829102, 'eval_model_preparation_time': 0.008, 'eval_accuracy': 0.6594202898550725, 'eval_RMSE': 25.931906372573962, 'eval_QWK': 0.4689490445859872, 'eval_HDIV': 0.007246376811594235, 'eval_Macro_F1': 0.25625029763322066, 'eval_Micro_F1': 0.6594202898550725, 'eval_Weighted_F1': 0.6261409273457467, 'eval_TP_0': 0, 'eval_TN_0': 137, 'eval_FP_0': 0, 'eval_FN_0': 1, 'eval_TP_1': 0, 'eval_TN_1': 137, 'eval_FP_1': 0, 'eval_FN_1': 1, 'eval_TP_2': 1, 'eval_TN_2': 128, 'eval_FP_2': 1, 'eval_FN_2': 8, 'eval_TP_3': 62, 'eval_TN_3': 34, 'eval_FP_3': 28, 'eval_FN_3': 14, 'eval_TP_4': 28, 'eval_TN_4': 74, 'eval_FP_4': 18, 'eval_FN_4': 18, 'eval_TP_5': 0, 'eval_TN_5': 133, 'eval_FP_5': 0, 'eval_FN_5': 5, 'eval_runtime': 18.3107, 'eval_samples_per_second': 7.537, 'eval_steps_per_second': 0.492, 'epoch': 17.0} +[2025-06-30 17:49:58,967][transformers.trainer][INFO] - Saving model checkpoint to ./results/best_model +[2025-06-30 17:49:59,405][transformers.configuration_utils][INFO] - loading configuration file config.json from cache at /workspace/.hf_home/hub/models--microsoft--Phi-3.5-mini-instruct/snapshots/3145e03a9fd4cdd7cd953c34d9bbf7ad606122ca/config.json +[2025-06-30 17:49:59,406][transformers.configuration_utils][INFO] - Model config Phi3Config { + "architectures": [ + "Phi3ForCausalLM" + ], + "attention_bias": false, + "attention_dropout": 0.0, + "auto_map": { + "AutoConfig": "configuration_phi3.Phi3Config", + "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM" + }, + "bos_token_id": 1, + "embd_pdrop": 0.0, + "eos_token_id": 32000, + "hidden_act": "silu", + "hidden_size": 3072, + "initializer_range": 0.02, + "intermediate_size": 8192, + "max_position_embeddings": 131072, + "model_type": "phi3", + "num_attention_heads": 32, + "num_hidden_layers": 32, + "num_key_value_heads": 32, + "original_max_position_embeddings": 4096, + "pad_token_id": 32000, + "partial_rotary_factor": 1.0, + "resid_pdrop": 0.0, + "rms_norm_eps": 1e-05, + "rope_scaling": { + "long_factor": [ + 1.0800000429153442, + 1.1100000143051147, + 1.1399999856948853, + 1.340000033378601, + 1.5899999141693115, + 1.600000023841858, + 1.6200000047683716, + 2.620000123977661, + 3.2300000190734863, + 3.2300000190734863, + 4.789999961853027, + 7.400000095367432, + 7.700000286102295, + 9.09000015258789, + 12.199999809265137, + 17.670000076293945, + 24.46000099182129, + 28.57000160217285, + 30.420001983642578, + 30.840002059936523, + 32.590003967285156, + 32.93000411987305, + 42.320003509521484, + 44.96000289916992, + 50.340003967285156, + 50.45000457763672, + 57.55000305175781, + 57.93000411987305, + 58.21000289916992, + 60.1400032043457, + 62.61000442504883, + 62.62000274658203, + 62.71000289916992, + 63.1400032043457, + 63.1400032043457, + 63.77000427246094, + 63.93000411987305, + 63.96000289916992, + 63.970001220703125, + 64.02999877929688, + 64.06999969482422, + 64.08000183105469, + 64.12000274658203, + 64.41000366210938, + 64.4800033569336, + 64.51000213623047, + 64.52999877929688, + 64.83999633789062 + ], + "short_factor": [ + 1.0, + 1.0199999809265137, + 1.0299999713897705, + 1.0299999713897705, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0499999523162842, + 1.0699999332427979, + 1.0999999046325684, + 1.1099998950958252, + 1.1599998474121094, + 1.1599998474121094, + 1.1699998378753662, + 1.2899998426437378, + 1.339999794960022, + 1.679999828338623, + 1.7899998426437378, + 1.8199998140335083, + 1.8499997854232788, + 1.8799997568130493, + 1.9099997282028198, + 1.9399996995925903, + 1.9899996519088745, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0199997425079346, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0299997329711914, + 2.0799996852874756, + 2.0899996757507324, + 2.189999580383301, + 2.2199995517730713, + 2.5899994373321533, + 2.729999542236328, + 2.749999523162842, + 2.8399994373321533 + ], + "type": "longrope" + }, + "rope_theta": 10000.0, + "sliding_window": 262144, + "tie_word_embeddings": false, + "torch_dtype": "bfloat16", + "transformers_version": "4.53.0", + "use_cache": true, + "vocab_size": 32064 +} + +[2025-06-30 17:49:59,489][transformers.tokenization_utils_base][INFO] - chat template saved in ./results/best_model/chat_template.jinja +[2025-06-30 17:49:59,489][transformers.tokenization_utils_base][INFO] - tokenizer config file saved in ./results/best_model/tokenizer_config.json +[2025-06-30 17:49:59,489][transformers.tokenization_utils_base][INFO] - Special tokens file saved in ./results/best_model/special_tokens_map.json +[2025-06-30 17:49:59,515][__main__][INFO] - Model and tokenizer saved to ./results/best_model +[2025-06-30 17:49:59,521][__main__][INFO] - Fine Tuning Finished. +[2025-06-30 17:50:00,031][__main__][INFO] - Total emissions: 0.3317 kg CO2eq