| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.2738853503184713, | |
| "eval_steps": 10000000, | |
| "global_step": 1000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.012738853503184714, | |
| "grad_norm": 8.56943733236697, | |
| "learning_rate": 6.369426751592357e-09, | |
| "loss": 2.0819, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.025477707006369428, | |
| "grad_norm": 8.50801711736602, | |
| "learning_rate": 1.2738853503184714e-08, | |
| "loss": 2.1089, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.03821656050955414, | |
| "grad_norm": 8.306574604791226, | |
| "learning_rate": 1.910828025477707e-08, | |
| "loss": 2.1193, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.050955414012738856, | |
| "grad_norm": 8.685229549619896, | |
| "learning_rate": 2.5477707006369427e-08, | |
| "loss": 2.096, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.06369426751592357, | |
| "grad_norm": 8.546769161931346, | |
| "learning_rate": 3.184713375796178e-08, | |
| "loss": 2.0835, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.07643312101910828, | |
| "grad_norm": 8.719632286997141, | |
| "learning_rate": 3.821656050955414e-08, | |
| "loss": 2.1084, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.08917197452229299, | |
| "grad_norm": 8.465502680877355, | |
| "learning_rate": 4.458598726114649e-08, | |
| "loss": 2.106, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.10191082802547771, | |
| "grad_norm": 8.160116558852692, | |
| "learning_rate": 5.0955414012738854e-08, | |
| "loss": 2.1084, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.11464968152866242, | |
| "grad_norm": 7.879463909039044, | |
| "learning_rate": 5.732484076433121e-08, | |
| "loss": 2.0807, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.12738853503184713, | |
| "grad_norm": 8.024989565954654, | |
| "learning_rate": 6.369426751592356e-08, | |
| "loss": 2.0799, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.14012738853503184, | |
| "grad_norm": 8.024652217381297, | |
| "learning_rate": 7.006369426751591e-08, | |
| "loss": 2.0851, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.15286624203821655, | |
| "grad_norm": 7.1712069050488125, | |
| "learning_rate": 7.643312101910828e-08, | |
| "loss": 2.0872, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.16560509554140126, | |
| "grad_norm": 7.280737795114974, | |
| "learning_rate": 8.280254777070063e-08, | |
| "loss": 2.0832, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.17834394904458598, | |
| "grad_norm": 7.0433682583690205, | |
| "learning_rate": 8.917197452229298e-08, | |
| "loss": 2.069, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.1910828025477707, | |
| "grad_norm": 6.719930027351258, | |
| "learning_rate": 9.554140127388536e-08, | |
| "loss": 2.0392, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.20382165605095542, | |
| "grad_norm": 5.705591268858002, | |
| "learning_rate": 1.0191082802547771e-07, | |
| "loss": 2.0493, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.21656050955414013, | |
| "grad_norm": 4.575997886363505, | |
| "learning_rate": 1.0828025477707006e-07, | |
| "loss": 1.983, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.22929936305732485, | |
| "grad_norm": 4.418462096774936, | |
| "learning_rate": 1.1464968152866242e-07, | |
| "loss": 2.0093, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.24203821656050956, | |
| "grad_norm": 4.4097948630855255, | |
| "learning_rate": 1.2101910828025477e-07, | |
| "loss": 2.0138, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.25477707006369427, | |
| "grad_norm": 3.5259545363334284, | |
| "learning_rate": 1.2738853503184713e-07, | |
| "loss": 1.972, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.267515923566879, | |
| "grad_norm": 3.0391815131588698, | |
| "learning_rate": 1.3375796178343948e-07, | |
| "loss": 1.9749, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.2802547770700637, | |
| "grad_norm": 2.6254404775057973, | |
| "learning_rate": 1.4012738853503183e-07, | |
| "loss": 1.9917, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.2929936305732484, | |
| "grad_norm": 2.4024744566371505, | |
| "learning_rate": 1.464968152866242e-07, | |
| "loss": 1.9432, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.3057324840764331, | |
| "grad_norm": 2.2547074325053256, | |
| "learning_rate": 1.5286624203821656e-07, | |
| "loss": 1.9573, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.3184713375796178, | |
| "grad_norm": 2.177178569468059, | |
| "learning_rate": 1.592356687898089e-07, | |
| "loss": 1.9387, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.33121019108280253, | |
| "grad_norm": 2.1464165722160495, | |
| "learning_rate": 1.6560509554140126e-07, | |
| "loss": 1.8581, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.34394904458598724, | |
| "grad_norm": 2.087209962861719, | |
| "learning_rate": 1.719745222929936e-07, | |
| "loss": 1.8983, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.35668789808917195, | |
| "grad_norm": 1.9318089948993342, | |
| "learning_rate": 1.7834394904458596e-07, | |
| "loss": 1.8978, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.36942675159235666, | |
| "grad_norm": 1.9797292859696498, | |
| "learning_rate": 1.847133757961783e-07, | |
| "loss": 1.8881, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.3821656050955414, | |
| "grad_norm": 1.8708460589289673, | |
| "learning_rate": 1.9108280254777072e-07, | |
| "loss": 1.8309, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.39490445859872614, | |
| "grad_norm": 1.8593376000097654, | |
| "learning_rate": 1.9745222929936307e-07, | |
| "loss": 1.8694, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.40764331210191085, | |
| "grad_norm": 1.8013443944148582, | |
| "learning_rate": 2.0382165605095542e-07, | |
| "loss": 1.8743, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.42038216560509556, | |
| "grad_norm": 1.7803529845296482, | |
| "learning_rate": 2.1019108280254777e-07, | |
| "loss": 1.9007, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.43312101910828027, | |
| "grad_norm": 1.7991102975353765, | |
| "learning_rate": 2.1656050955414012e-07, | |
| "loss": 1.8497, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.445859872611465, | |
| "grad_norm": 1.8069009405537868, | |
| "learning_rate": 2.2292993630573247e-07, | |
| "loss": 1.8562, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.4585987261146497, | |
| "grad_norm": 1.6273753515480374, | |
| "learning_rate": 2.2929936305732485e-07, | |
| "loss": 1.8603, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.4713375796178344, | |
| "grad_norm": 1.779189816867112, | |
| "learning_rate": 2.356687898089172e-07, | |
| "loss": 1.8829, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.4840764331210191, | |
| "grad_norm": 1.696286378605514, | |
| "learning_rate": 2.4203821656050955e-07, | |
| "loss": 1.8154, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.4968152866242038, | |
| "grad_norm": 1.6332996140543181, | |
| "learning_rate": 2.484076433121019e-07, | |
| "loss": 1.839, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.5095541401273885, | |
| "grad_norm": 1.6812932949046533, | |
| "learning_rate": 2.5477707006369425e-07, | |
| "loss": 1.8085, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.5222929936305732, | |
| "grad_norm": 1.6802631887025108, | |
| "learning_rate": 2.611464968152866e-07, | |
| "loss": 1.8687, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.535031847133758, | |
| "grad_norm": 2.096297259072495, | |
| "learning_rate": 2.6751592356687895e-07, | |
| "loss": 1.8425, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.5477707006369427, | |
| "grad_norm": 1.6250115457169498, | |
| "learning_rate": 2.738853503184713e-07, | |
| "loss": 1.8526, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.5605095541401274, | |
| "grad_norm": 1.9381854040760949, | |
| "learning_rate": 2.8025477707006366e-07, | |
| "loss": 1.831, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.5732484076433121, | |
| "grad_norm": 1.6807327820364515, | |
| "learning_rate": 2.86624203821656e-07, | |
| "loss": 1.8398, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.5859872611464968, | |
| "grad_norm": 1.6107440193241551, | |
| "learning_rate": 2.929936305732484e-07, | |
| "loss": 1.8267, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.5987261146496815, | |
| "grad_norm": 1.5977967583411985, | |
| "learning_rate": 2.9936305732484076e-07, | |
| "loss": 1.8355, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.6114649681528662, | |
| "grad_norm": 1.9497460290846542, | |
| "learning_rate": 3.057324840764331e-07, | |
| "loss": 1.8377, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.6242038216560509, | |
| "grad_norm": 1.6895490491683507, | |
| "learning_rate": 3.1210191082802546e-07, | |
| "loss": 1.8417, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.6369426751592356, | |
| "grad_norm": 1.6275787734591152, | |
| "learning_rate": 3.184713375796178e-07, | |
| "loss": 1.7875, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.6496815286624203, | |
| "grad_norm": 1.6968078173121193, | |
| "learning_rate": 3.2484076433121017e-07, | |
| "loss": 1.8465, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.6624203821656051, | |
| "grad_norm": 1.9081221408565316, | |
| "learning_rate": 3.312101910828025e-07, | |
| "loss": 1.8044, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.6751592356687898, | |
| "grad_norm": 1.7268827395679611, | |
| "learning_rate": 3.3757961783439487e-07, | |
| "loss": 1.8424, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.6878980891719745, | |
| "grad_norm": 1.588010653604496, | |
| "learning_rate": 3.439490445859872e-07, | |
| "loss": 1.7895, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.7006369426751592, | |
| "grad_norm": 1.5048397892480878, | |
| "learning_rate": 3.5031847133757957e-07, | |
| "loss": 1.8253, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.7133757961783439, | |
| "grad_norm": 1.707374118535617, | |
| "learning_rate": 3.566878980891719e-07, | |
| "loss": 1.8082, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.7261146496815286, | |
| "grad_norm": 2.2197114565201517, | |
| "learning_rate": 3.6305732484076427e-07, | |
| "loss": 1.779, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.7388535031847133, | |
| "grad_norm": 1.7191872664512349, | |
| "learning_rate": 3.694267515923566e-07, | |
| "loss": 1.8024, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.7515923566878981, | |
| "grad_norm": 1.408384846451388, | |
| "learning_rate": 3.757961783439491e-07, | |
| "loss": 1.7709, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.7643312101910829, | |
| "grad_norm": 1.4961385659351782, | |
| "learning_rate": 3.8216560509554143e-07, | |
| "loss": 1.8185, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.7770700636942676, | |
| "grad_norm": 1.4966732383387238, | |
| "learning_rate": 3.885350318471338e-07, | |
| "loss": 1.8015, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.7898089171974523, | |
| "grad_norm": 1.5806969964681516, | |
| "learning_rate": 3.9490445859872613e-07, | |
| "loss": 1.8667, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.802547770700637, | |
| "grad_norm": 2.1393134484625773, | |
| "learning_rate": 4.012738853503185e-07, | |
| "loss": 1.7756, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.8152866242038217, | |
| "grad_norm": 1.5449303617675734, | |
| "learning_rate": 4.0764331210191083e-07, | |
| "loss": 1.8137, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.8280254777070064, | |
| "grad_norm": 1.6616798824953516, | |
| "learning_rate": 4.140127388535032e-07, | |
| "loss": 1.8229, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.8407643312101911, | |
| "grad_norm": 1.4727929666770696, | |
| "learning_rate": 4.2038216560509554e-07, | |
| "loss": 1.7946, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.8535031847133758, | |
| "grad_norm": 1.5063558977122367, | |
| "learning_rate": 4.267515923566879e-07, | |
| "loss": 1.8227, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.8662420382165605, | |
| "grad_norm": 1.9073170511583624, | |
| "learning_rate": 4.3312101910828024e-07, | |
| "loss": 1.8304, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.8789808917197452, | |
| "grad_norm": 1.600454635611546, | |
| "learning_rate": 4.394904458598726e-07, | |
| "loss": 1.8316, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.89171974522293, | |
| "grad_norm": 1.612214525129711, | |
| "learning_rate": 4.4585987261146494e-07, | |
| "loss": 1.7905, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.9044585987261147, | |
| "grad_norm": 1.5805218517687452, | |
| "learning_rate": 4.522292993630573e-07, | |
| "loss": 1.8001, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.9171974522292994, | |
| "grad_norm": 1.5226575805751894, | |
| "learning_rate": 4.585987261146497e-07, | |
| "loss": 1.802, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.9299363057324841, | |
| "grad_norm": 2.1066153446994282, | |
| "learning_rate": 4.6496815286624205e-07, | |
| "loss": 1.824, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.9426751592356688, | |
| "grad_norm": 1.5098647042463382, | |
| "learning_rate": 4.713375796178344e-07, | |
| "loss": 1.7902, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.9554140127388535, | |
| "grad_norm": 1.7579270222970877, | |
| "learning_rate": 4.777070063694267e-07, | |
| "loss": 1.7669, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.9681528662420382, | |
| "grad_norm": 1.510473250719683, | |
| "learning_rate": 4.840764331210191e-07, | |
| "loss": 1.8189, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.9808917197452229, | |
| "grad_norm": 2.0426167655298406, | |
| "learning_rate": 4.904458598726115e-07, | |
| "loss": 1.7999, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.9936305732484076, | |
| "grad_norm": 2.1864882903503235, | |
| "learning_rate": 4.968152866242038e-07, | |
| "loss": 1.8172, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 1.0063694267515924, | |
| "grad_norm": 1.7106458473804984, | |
| "learning_rate": 5.031847133757962e-07, | |
| "loss": 1.8049, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 1.019108280254777, | |
| "grad_norm": 2.4536538299974238, | |
| "learning_rate": 5.095541401273885e-07, | |
| "loss": 1.7972, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 1.0318471337579618, | |
| "grad_norm": 2.2703003766318073, | |
| "learning_rate": 5.159235668789809e-07, | |
| "loss": 1.8039, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 1.0445859872611465, | |
| "grad_norm": 2.04435931962724, | |
| "learning_rate": 5.222929936305732e-07, | |
| "loss": 1.783, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 1.0573248407643312, | |
| "grad_norm": 1.5182977898972327, | |
| "learning_rate": 5.286624203821656e-07, | |
| "loss": 1.7601, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 1.070063694267516, | |
| "grad_norm": 2.3955602915546574, | |
| "learning_rate": 5.350318471337579e-07, | |
| "loss": 1.778, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 1.0828025477707006, | |
| "grad_norm": 2.1173201633271943, | |
| "learning_rate": 5.414012738853503e-07, | |
| "loss": 1.8397, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 1.0955414012738853, | |
| "grad_norm": 2.438031001903818, | |
| "learning_rate": 5.477707006369426e-07, | |
| "loss": 1.7873, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 1.10828025477707, | |
| "grad_norm": 2.761676188309256, | |
| "learning_rate": 5.54140127388535e-07, | |
| "loss": 1.8517, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 1.1210191082802548, | |
| "grad_norm": 1.8173434166079478, | |
| "learning_rate": 5.605095541401273e-07, | |
| "loss": 1.8146, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 1.1337579617834395, | |
| "grad_norm": 2.592129456495398, | |
| "learning_rate": 5.668789808917197e-07, | |
| "loss": 1.79, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 1.1464968152866242, | |
| "grad_norm": 2.196256448135963, | |
| "learning_rate": 5.73248407643312e-07, | |
| "loss": 1.8153, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 1.1592356687898089, | |
| "grad_norm": 2.25064776189445, | |
| "learning_rate": 5.796178343949044e-07, | |
| "loss": 1.8099, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 1.1719745222929936, | |
| "grad_norm": 1.4088922808400457, | |
| "learning_rate": 5.859872611464968e-07, | |
| "loss": 1.8257, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 1.1847133757961783, | |
| "grad_norm": 2.559249339951868, | |
| "learning_rate": 5.923566878980892e-07, | |
| "loss": 1.7803, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 1.197452229299363, | |
| "grad_norm": 1.6812495216892827, | |
| "learning_rate": 5.987261146496815e-07, | |
| "loss": 1.8071, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 1.2101910828025477, | |
| "grad_norm": 2.9865102020317735, | |
| "learning_rate": 6.050955414012739e-07, | |
| "loss": 1.8165, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 1.2229299363057324, | |
| "grad_norm": 7.283684341077877, | |
| "learning_rate": 6.114649681528662e-07, | |
| "loss": 1.7945, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 1.2356687898089171, | |
| "grad_norm": 1.5609113071701854, | |
| "learning_rate": 6.178343949044586e-07, | |
| "loss": 1.7963, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 1.2484076433121019, | |
| "grad_norm": 2.931531535579773, | |
| "learning_rate": 6.242038216560509e-07, | |
| "loss": 1.7795, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 1.2611464968152866, | |
| "grad_norm": 3.432859884879658, | |
| "learning_rate": 6.305732484076433e-07, | |
| "loss": 1.8424, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 1.2738853503184713, | |
| "grad_norm": 3.4669063068366928, | |
| "learning_rate": 6.369426751592356e-07, | |
| "loss": 1.7614, | |
| "step": 1000 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 15700, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 20, | |
| "save_steps": 1000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 43703909744640.0, | |
| "train_batch_size": 2, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |