| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.5005500550055005, | |
| "eval_steps": 500, | |
| "global_step": 455, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0011001100110011, | |
| "grad_norm": 8.375, | |
| "learning_rate": 0.0, | |
| "loss": 2.04, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0022002200220022, | |
| "grad_norm": 7.71875, | |
| "learning_rate": 4.000000000000001e-06, | |
| "loss": 2.0872, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.0033003300330033004, | |
| "grad_norm": 5.03125, | |
| "learning_rate": 8.000000000000001e-06, | |
| "loss": 1.5212, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.0044004400440044, | |
| "grad_norm": 7.15625, | |
| "learning_rate": 1.2e-05, | |
| "loss": 1.6269, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.005500550055005501, | |
| "grad_norm": 5.5, | |
| "learning_rate": 1.6000000000000003e-05, | |
| "loss": 1.7743, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.006600660066006601, | |
| "grad_norm": 4.21875, | |
| "learning_rate": 2e-05, | |
| "loss": 1.7265, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.007700770077007701, | |
| "grad_norm": 3.609375, | |
| "learning_rate": 1.9992652461425424e-05, | |
| "loss": 1.8162, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.0088008800880088, | |
| "grad_norm": 3.625, | |
| "learning_rate": 1.9985304922850847e-05, | |
| "loss": 1.5617, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.009900990099009901, | |
| "grad_norm": 3.859375, | |
| "learning_rate": 1.997795738427627e-05, | |
| "loss": 1.5426, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.011001100110011002, | |
| "grad_norm": 3.515625, | |
| "learning_rate": 1.9970609845701692e-05, | |
| "loss": 1.6676, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.0121012101210121, | |
| "grad_norm": 3.109375, | |
| "learning_rate": 1.9963262307127115e-05, | |
| "loss": 1.6194, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.013201320132013201, | |
| "grad_norm": 3.765625, | |
| "learning_rate": 1.9955914768552534e-05, | |
| "loss": 1.7107, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.014301430143014302, | |
| "grad_norm": 3.921875, | |
| "learning_rate": 1.994856722997796e-05, | |
| "loss": 1.8304, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.015401540154015401, | |
| "grad_norm": 3.84375, | |
| "learning_rate": 1.9941219691403383e-05, | |
| "loss": 1.4131, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.0165016501650165, | |
| "grad_norm": 3.28125, | |
| "learning_rate": 1.9933872152828805e-05, | |
| "loss": 1.5398, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.0176017601760176, | |
| "grad_norm": 4.0, | |
| "learning_rate": 1.9926524614254225e-05, | |
| "loss": 1.4047, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.0187018701870187, | |
| "grad_norm": 3.453125, | |
| "learning_rate": 1.991917707567965e-05, | |
| "loss": 1.4497, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.019801980198019802, | |
| "grad_norm": 3.34375, | |
| "learning_rate": 1.9911829537105073e-05, | |
| "loss": 1.686, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.020902090209020903, | |
| "grad_norm": 3.578125, | |
| "learning_rate": 1.9904481998530492e-05, | |
| "loss": 1.6911, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.022002200220022004, | |
| "grad_norm": 3.953125, | |
| "learning_rate": 1.9897134459955915e-05, | |
| "loss": 1.4358, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.0231023102310231, | |
| "grad_norm": 3.296875, | |
| "learning_rate": 1.988978692138134e-05, | |
| "loss": 1.6062, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.0242024202420242, | |
| "grad_norm": 3.4375, | |
| "learning_rate": 1.988243938280676e-05, | |
| "loss": 1.3552, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.025302530253025302, | |
| "grad_norm": 4.5, | |
| "learning_rate": 1.9875091844232183e-05, | |
| "loss": 1.6753, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.026402640264026403, | |
| "grad_norm": 4.96875, | |
| "learning_rate": 1.9867744305657606e-05, | |
| "loss": 1.4951, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.027502750275027504, | |
| "grad_norm": 5.0625, | |
| "learning_rate": 1.9860396767083028e-05, | |
| "loss": 1.4147, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.028602860286028604, | |
| "grad_norm": 4.53125, | |
| "learning_rate": 1.985304922850845e-05, | |
| "loss": 1.6033, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.0297029702970297, | |
| "grad_norm": 4.15625, | |
| "learning_rate": 1.9845701689933873e-05, | |
| "loss": 1.4862, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.030803080308030802, | |
| "grad_norm": 4.8125, | |
| "learning_rate": 1.9838354151359296e-05, | |
| "loss": 1.3463, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.0319031903190319, | |
| "grad_norm": 3.734375, | |
| "learning_rate": 1.983100661278472e-05, | |
| "loss": 1.3828, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.033003300330033, | |
| "grad_norm": 4.5625, | |
| "learning_rate": 1.982365907421014e-05, | |
| "loss": 1.5348, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.034103410341034104, | |
| "grad_norm": 3.6875, | |
| "learning_rate": 1.9816311535635564e-05, | |
| "loss": 1.4324, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.0352035203520352, | |
| "grad_norm": 4.78125, | |
| "learning_rate": 1.9808963997060987e-05, | |
| "loss": 1.4608, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.036303630363036306, | |
| "grad_norm": 3.40625, | |
| "learning_rate": 1.980161645848641e-05, | |
| "loss": 1.3659, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.0374037403740374, | |
| "grad_norm": 4.0625, | |
| "learning_rate": 1.9794268919911832e-05, | |
| "loss": 1.4568, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.03850385038503851, | |
| "grad_norm": 4.40625, | |
| "learning_rate": 1.9786921381337254e-05, | |
| "loss": 1.6008, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.039603960396039604, | |
| "grad_norm": 4.625, | |
| "learning_rate": 1.9779573842762677e-05, | |
| "loss": 1.5064, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.0407040704070407, | |
| "grad_norm": 3.34375, | |
| "learning_rate": 1.97722263041881e-05, | |
| "loss": 1.4865, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.041804180418041806, | |
| "grad_norm": 4.65625, | |
| "learning_rate": 1.9764878765613522e-05, | |
| "loss": 1.5909, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.0429042904290429, | |
| "grad_norm": 4.0, | |
| "learning_rate": 1.9757531227038945e-05, | |
| "loss": 1.4585, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.04400440044004401, | |
| "grad_norm": 3.609375, | |
| "learning_rate": 1.9750183688464368e-05, | |
| "loss": 1.4128, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.045104510451045104, | |
| "grad_norm": 3.78125, | |
| "learning_rate": 1.9742836149889787e-05, | |
| "loss": 1.4452, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.0462046204620462, | |
| "grad_norm": 3.25, | |
| "learning_rate": 1.973548861131521e-05, | |
| "loss": 1.4649, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.047304730473047306, | |
| "grad_norm": 3.59375, | |
| "learning_rate": 1.9728141072740635e-05, | |
| "loss": 1.3526, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.0484048404840484, | |
| "grad_norm": 4.03125, | |
| "learning_rate": 1.9720793534166055e-05, | |
| "loss": 1.276, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.04950495049504951, | |
| "grad_norm": 3.609375, | |
| "learning_rate": 1.9713445995591477e-05, | |
| "loss": 1.4548, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.050605060506050605, | |
| "grad_norm": 3.265625, | |
| "learning_rate": 1.97060984570169e-05, | |
| "loss": 1.3293, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.0517051705170517, | |
| "grad_norm": 4.09375, | |
| "learning_rate": 1.9698750918442326e-05, | |
| "loss": 1.363, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.052805280528052806, | |
| "grad_norm": 3.25, | |
| "learning_rate": 1.9691403379867745e-05, | |
| "loss": 1.4058, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.0539053905390539, | |
| "grad_norm": 3.375, | |
| "learning_rate": 1.9684055841293168e-05, | |
| "loss": 1.4283, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.05500550055005501, | |
| "grad_norm": 2.953125, | |
| "learning_rate": 1.967670830271859e-05, | |
| "loss": 1.4329, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.056105610561056105, | |
| "grad_norm": 3.53125, | |
| "learning_rate": 1.9669360764144013e-05, | |
| "loss": 1.1935, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.05720572057205721, | |
| "grad_norm": 2.921875, | |
| "learning_rate": 1.9662013225569436e-05, | |
| "loss": 1.3294, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.058305830583058306, | |
| "grad_norm": 3.03125, | |
| "learning_rate": 1.9654665686994858e-05, | |
| "loss": 1.4402, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.0594059405940594, | |
| "grad_norm": 3.328125, | |
| "learning_rate": 1.964731814842028e-05, | |
| "loss": 1.5124, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.06050605060506051, | |
| "grad_norm": 3.328125, | |
| "learning_rate": 1.9639970609845704e-05, | |
| "loss": 1.4588, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.061606160616061605, | |
| "grad_norm": 3.140625, | |
| "learning_rate": 1.9632623071271126e-05, | |
| "loss": 1.3324, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.0627062706270627, | |
| "grad_norm": 3.21875, | |
| "learning_rate": 1.962527553269655e-05, | |
| "loss": 1.3721, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.0638063806380638, | |
| "grad_norm": 2.859375, | |
| "learning_rate": 1.961792799412197e-05, | |
| "loss": 1.3951, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.06490649064906491, | |
| "grad_norm": 3.328125, | |
| "learning_rate": 1.961058045554739e-05, | |
| "loss": 1.5673, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.066006600660066, | |
| "grad_norm": 4.03125, | |
| "learning_rate": 1.9603232916972817e-05, | |
| "loss": 1.5362, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.0671067106710671, | |
| "grad_norm": 3.390625, | |
| "learning_rate": 1.959588537839824e-05, | |
| "loss": 1.3759, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.06820682068206821, | |
| "grad_norm": 2.8125, | |
| "learning_rate": 1.9588537839823662e-05, | |
| "loss": 1.2939, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.06930693069306931, | |
| "grad_norm": 3.375, | |
| "learning_rate": 1.958119030124908e-05, | |
| "loss": 1.5522, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.0704070407040704, | |
| "grad_norm": 3.578125, | |
| "learning_rate": 1.9573842762674507e-05, | |
| "loss": 1.4673, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.07150715071507151, | |
| "grad_norm": 3.03125, | |
| "learning_rate": 1.956649522409993e-05, | |
| "loss": 1.4753, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.07260726072607261, | |
| "grad_norm": 2.640625, | |
| "learning_rate": 1.955914768552535e-05, | |
| "loss": 1.3168, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.0737073707370737, | |
| "grad_norm": 3.171875, | |
| "learning_rate": 1.955180014695077e-05, | |
| "loss": 1.5745, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.0748074807480748, | |
| "grad_norm": 3.765625, | |
| "learning_rate": 1.9544452608376198e-05, | |
| "loss": 1.2915, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.07590759075907591, | |
| "grad_norm": 2.671875, | |
| "learning_rate": 1.9537105069801617e-05, | |
| "loss": 1.253, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.07700770077007701, | |
| "grad_norm": 4.21875, | |
| "learning_rate": 1.952975753122704e-05, | |
| "loss": 1.4901, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.0781078107810781, | |
| "grad_norm": 3.53125, | |
| "learning_rate": 1.9522409992652462e-05, | |
| "loss": 1.5415, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.07920792079207921, | |
| "grad_norm": 3.828125, | |
| "learning_rate": 1.9515062454077888e-05, | |
| "loss": 1.4292, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.08030803080308031, | |
| "grad_norm": 3.578125, | |
| "learning_rate": 1.9507714915503307e-05, | |
| "loss": 1.352, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.0814081408140814, | |
| "grad_norm": 3.171875, | |
| "learning_rate": 1.950036737692873e-05, | |
| "loss": 1.6259, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.08250825082508251, | |
| "grad_norm": 3.359375, | |
| "learning_rate": 1.9493019838354153e-05, | |
| "loss": 1.512, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.08360836083608361, | |
| "grad_norm": 2.6875, | |
| "learning_rate": 1.9485672299779575e-05, | |
| "loss": 1.3977, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.0847084708470847, | |
| "grad_norm": 3.28125, | |
| "learning_rate": 1.9478324761204998e-05, | |
| "loss": 1.3677, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.0858085808580858, | |
| "grad_norm": 3.390625, | |
| "learning_rate": 1.947097722263042e-05, | |
| "loss": 1.2124, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.08690869086908691, | |
| "grad_norm": 3.359375, | |
| "learning_rate": 1.9463629684055843e-05, | |
| "loss": 1.4011, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.08800880088008801, | |
| "grad_norm": 3.84375, | |
| "learning_rate": 1.9456282145481266e-05, | |
| "loss": 1.6235, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.0891089108910891, | |
| "grad_norm": 3.75, | |
| "learning_rate": 1.944893460690669e-05, | |
| "loss": 1.4514, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.09020902090209021, | |
| "grad_norm": 3.078125, | |
| "learning_rate": 1.944158706833211e-05, | |
| "loss": 1.3719, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.09130913091309131, | |
| "grad_norm": 3.0, | |
| "learning_rate": 1.9434239529757534e-05, | |
| "loss": 1.4901, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.0924092409240924, | |
| "grad_norm": 2.984375, | |
| "learning_rate": 1.9426891991182953e-05, | |
| "loss": 1.2011, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.09350935093509351, | |
| "grad_norm": 3.609375, | |
| "learning_rate": 1.941954445260838e-05, | |
| "loss": 1.4448, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.09460946094609461, | |
| "grad_norm": 3.203125, | |
| "learning_rate": 1.94121969140338e-05, | |
| "loss": 1.6021, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.09570957095709572, | |
| "grad_norm": 3.34375, | |
| "learning_rate": 1.9404849375459224e-05, | |
| "loss": 1.1851, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.0968096809680968, | |
| "grad_norm": 2.921875, | |
| "learning_rate": 1.9397501836884643e-05, | |
| "loss": 1.5232, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.09790979097909791, | |
| "grad_norm": 3.1875, | |
| "learning_rate": 1.939015429831007e-05, | |
| "loss": 1.4202, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.09900990099009901, | |
| "grad_norm": 3.265625, | |
| "learning_rate": 1.9382806759735492e-05, | |
| "loss": 1.478, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.1001100110011001, | |
| "grad_norm": 3.734375, | |
| "learning_rate": 1.937545922116091e-05, | |
| "loss": 1.5205, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.10121012101210121, | |
| "grad_norm": 3.078125, | |
| "learning_rate": 1.9368111682586334e-05, | |
| "loss": 1.4933, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.10231023102310231, | |
| "grad_norm": 3.0, | |
| "learning_rate": 1.9360764144011757e-05, | |
| "loss": 1.4371, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.1034103410341034, | |
| "grad_norm": 3.125, | |
| "learning_rate": 1.935341660543718e-05, | |
| "loss": 1.2289, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.10451045104510451, | |
| "grad_norm": 3.140625, | |
| "learning_rate": 1.9346069066862602e-05, | |
| "loss": 1.405, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.10561056105610561, | |
| "grad_norm": 3.609375, | |
| "learning_rate": 1.9338721528288024e-05, | |
| "loss": 1.38, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.10671067106710672, | |
| "grad_norm": 3.046875, | |
| "learning_rate": 1.9331373989713447e-05, | |
| "loss": 1.1726, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.1078107810781078, | |
| "grad_norm": 3.109375, | |
| "learning_rate": 1.932402645113887e-05, | |
| "loss": 1.3886, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.10891089108910891, | |
| "grad_norm": 3.046875, | |
| "learning_rate": 1.9316678912564292e-05, | |
| "loss": 1.2973, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.11001100110011001, | |
| "grad_norm": 3.375, | |
| "learning_rate": 1.9309331373989715e-05, | |
| "loss": 1.2057, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.1111111111111111, | |
| "grad_norm": 3.15625, | |
| "learning_rate": 1.9301983835415138e-05, | |
| "loss": 1.6023, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.11221122112211221, | |
| "grad_norm": 3.109375, | |
| "learning_rate": 1.929463629684056e-05, | |
| "loss": 1.1963, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.11331133113311331, | |
| "grad_norm": 3.046875, | |
| "learning_rate": 1.9287288758265983e-05, | |
| "loss": 1.3473, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.11441144114411442, | |
| "grad_norm": 3.5625, | |
| "learning_rate": 1.9279941219691405e-05, | |
| "loss": 1.2951, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.11551155115511551, | |
| "grad_norm": 2.71875, | |
| "learning_rate": 1.9272593681116828e-05, | |
| "loss": 1.1778, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.11661166116611661, | |
| "grad_norm": 3.421875, | |
| "learning_rate": 1.926524614254225e-05, | |
| "loss": 1.5452, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.11771177117711772, | |
| "grad_norm": 3.234375, | |
| "learning_rate": 1.9257898603967673e-05, | |
| "loss": 1.2901, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.1188118811881188, | |
| "grad_norm": 3.0625, | |
| "learning_rate": 1.9250551065393096e-05, | |
| "loss": 1.214, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.11991199119911991, | |
| "grad_norm": 2.984375, | |
| "learning_rate": 1.924320352681852e-05, | |
| "loss": 1.3448, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.12101210121012101, | |
| "grad_norm": 2.953125, | |
| "learning_rate": 1.9235855988243938e-05, | |
| "loss": 1.3194, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.12211221122112212, | |
| "grad_norm": 3.0, | |
| "learning_rate": 1.9228508449669364e-05, | |
| "loss": 1.1298, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.12321232123212321, | |
| "grad_norm": 2.828125, | |
| "learning_rate": 1.9221160911094786e-05, | |
| "loss": 1.4465, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.12431243124312431, | |
| "grad_norm": 3.328125, | |
| "learning_rate": 1.9213813372520206e-05, | |
| "loss": 1.2164, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.1254125412541254, | |
| "grad_norm": 3.171875, | |
| "learning_rate": 1.9206465833945628e-05, | |
| "loss": 1.2346, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.1265126512651265, | |
| "grad_norm": 3.203125, | |
| "learning_rate": 1.9199118295371054e-05, | |
| "loss": 1.2607, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.1276127612761276, | |
| "grad_norm": 3.28125, | |
| "learning_rate": 1.9191770756796474e-05, | |
| "loss": 1.3497, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.12871287128712872, | |
| "grad_norm": 3.328125, | |
| "learning_rate": 1.9184423218221896e-05, | |
| "loss": 1.3111, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.12981298129812982, | |
| "grad_norm": 2.875, | |
| "learning_rate": 1.917707567964732e-05, | |
| "loss": 1.5062, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.13091309130913092, | |
| "grad_norm": 3.234375, | |
| "learning_rate": 1.9169728141072745e-05, | |
| "loss": 1.2587, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.132013201320132, | |
| "grad_norm": 3.40625, | |
| "learning_rate": 1.9162380602498164e-05, | |
| "loss": 1.5219, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.1331133113311331, | |
| "grad_norm": 2.890625, | |
| "learning_rate": 1.9155033063923587e-05, | |
| "loss": 1.2036, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.1342134213421342, | |
| "grad_norm": 3.625, | |
| "learning_rate": 1.914768552534901e-05, | |
| "loss": 1.2724, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.1353135313531353, | |
| "grad_norm": 2.859375, | |
| "learning_rate": 1.9140337986774432e-05, | |
| "loss": 1.4222, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.13641364136413642, | |
| "grad_norm": 2.578125, | |
| "learning_rate": 1.9132990448199855e-05, | |
| "loss": 1.3872, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.13751375137513752, | |
| "grad_norm": 3.4375, | |
| "learning_rate": 1.9125642909625277e-05, | |
| "loss": 1.4919, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.13861386138613863, | |
| "grad_norm": 3.0, | |
| "learning_rate": 1.91182953710507e-05, | |
| "loss": 1.4967, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.1397139713971397, | |
| "grad_norm": 2.640625, | |
| "learning_rate": 1.9110947832476122e-05, | |
| "loss": 1.2575, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.1408140814081408, | |
| "grad_norm": 3.296875, | |
| "learning_rate": 1.9103600293901545e-05, | |
| "loss": 1.4994, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.1419141914191419, | |
| "grad_norm": 3.84375, | |
| "learning_rate": 1.9096252755326968e-05, | |
| "loss": 1.4374, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.14301430143014301, | |
| "grad_norm": 2.828125, | |
| "learning_rate": 1.908890521675239e-05, | |
| "loss": 1.4395, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.14411441144114412, | |
| "grad_norm": 3.296875, | |
| "learning_rate": 1.908155767817781e-05, | |
| "loss": 1.3933, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.14521452145214522, | |
| "grad_norm": 2.96875, | |
| "learning_rate": 1.9074210139603236e-05, | |
| "loss": 1.0287, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.14631463146314633, | |
| "grad_norm": 2.828125, | |
| "learning_rate": 1.9066862601028658e-05, | |
| "loss": 1.396, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.1474147414741474, | |
| "grad_norm": 2.890625, | |
| "learning_rate": 1.905951506245408e-05, | |
| "loss": 1.3219, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.1485148514851485, | |
| "grad_norm": 3.125, | |
| "learning_rate": 1.90521675238795e-05, | |
| "loss": 1.5112, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.1496149614961496, | |
| "grad_norm": 3.421875, | |
| "learning_rate": 1.9044819985304926e-05, | |
| "loss": 1.3594, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.15071507150715072, | |
| "grad_norm": 3.5625, | |
| "learning_rate": 1.903747244673035e-05, | |
| "loss": 1.6613, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.15181518151815182, | |
| "grad_norm": 3.5, | |
| "learning_rate": 1.9030124908155768e-05, | |
| "loss": 1.4788, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.15291529152915292, | |
| "grad_norm": 2.828125, | |
| "learning_rate": 1.902277736958119e-05, | |
| "loss": 1.3315, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.15401540154015403, | |
| "grad_norm": 2.546875, | |
| "learning_rate": 1.9015429831006617e-05, | |
| "loss": 1.5885, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.1551155115511551, | |
| "grad_norm": 3.453125, | |
| "learning_rate": 1.9008082292432036e-05, | |
| "loss": 1.2906, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.1562156215621562, | |
| "grad_norm": 2.984375, | |
| "learning_rate": 1.900073475385746e-05, | |
| "loss": 1.317, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.1573157315731573, | |
| "grad_norm": 3.546875, | |
| "learning_rate": 1.899338721528288e-05, | |
| "loss": 1.6912, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.15841584158415842, | |
| "grad_norm": 3.265625, | |
| "learning_rate": 1.8986039676708304e-05, | |
| "loss": 1.5334, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.15951595159515952, | |
| "grad_norm": 3.4375, | |
| "learning_rate": 1.8978692138133726e-05, | |
| "loss": 1.3498, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.16061606160616063, | |
| "grad_norm": 3.96875, | |
| "learning_rate": 1.897134459955915e-05, | |
| "loss": 1.4063, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.1617161716171617, | |
| "grad_norm": 2.96875, | |
| "learning_rate": 1.896399706098457e-05, | |
| "loss": 1.3789, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.1628162816281628, | |
| "grad_norm": 3.125, | |
| "learning_rate": 1.8956649522409994e-05, | |
| "loss": 1.4125, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.1639163916391639, | |
| "grad_norm": 3.4375, | |
| "learning_rate": 1.8949301983835417e-05, | |
| "loss": 1.3967, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.16501650165016502, | |
| "grad_norm": 3.265625, | |
| "learning_rate": 1.894195444526084e-05, | |
| "loss": 1.443, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.16611661166116612, | |
| "grad_norm": 3.0625, | |
| "learning_rate": 1.8934606906686262e-05, | |
| "loss": 1.2943, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.16721672167216722, | |
| "grad_norm": 3.046875, | |
| "learning_rate": 1.8927259368111685e-05, | |
| "loss": 1.2614, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.16831683168316833, | |
| "grad_norm": 3.390625, | |
| "learning_rate": 1.8919911829537107e-05, | |
| "loss": 1.5009, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.1694169416941694, | |
| "grad_norm": 3.390625, | |
| "learning_rate": 1.891256429096253e-05, | |
| "loss": 1.317, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.1705170517051705, | |
| "grad_norm": 2.984375, | |
| "learning_rate": 1.8905216752387953e-05, | |
| "loss": 1.4587, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.1716171617161716, | |
| "grad_norm": 3.0, | |
| "learning_rate": 1.8897869213813372e-05, | |
| "loss": 1.2592, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.17271727172717272, | |
| "grad_norm": 2.6875, | |
| "learning_rate": 1.8890521675238798e-05, | |
| "loss": 1.2526, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.17381738173817382, | |
| "grad_norm": 2.796875, | |
| "learning_rate": 1.888317413666422e-05, | |
| "loss": 1.264, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.17491749174917492, | |
| "grad_norm": 3.46875, | |
| "learning_rate": 1.8875826598089643e-05, | |
| "loss": 1.3703, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.17601760176017603, | |
| "grad_norm": 3.359375, | |
| "learning_rate": 1.8868479059515062e-05, | |
| "loss": 1.4458, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.1771177117711771, | |
| "grad_norm": 2.71875, | |
| "learning_rate": 1.8861131520940485e-05, | |
| "loss": 0.9933, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.1782178217821782, | |
| "grad_norm": 3.0, | |
| "learning_rate": 1.885378398236591e-05, | |
| "loss": 1.2284, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.1793179317931793, | |
| "grad_norm": 3.40625, | |
| "learning_rate": 1.884643644379133e-05, | |
| "loss": 1.4245, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.18041804180418042, | |
| "grad_norm": 3.171875, | |
| "learning_rate": 1.8839088905216753e-05, | |
| "loss": 1.337, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.18151815181518152, | |
| "grad_norm": 2.859375, | |
| "learning_rate": 1.8831741366642175e-05, | |
| "loss": 1.6092, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.18261826182618263, | |
| "grad_norm": 2.875, | |
| "learning_rate": 1.8824393828067598e-05, | |
| "loss": 1.1576, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.18371837183718373, | |
| "grad_norm": 3.203125, | |
| "learning_rate": 1.881704628949302e-05, | |
| "loss": 1.2219, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.1848184818481848, | |
| "grad_norm": 3.6875, | |
| "learning_rate": 1.8809698750918443e-05, | |
| "loss": 1.2481, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.1859185918591859, | |
| "grad_norm": 2.96875, | |
| "learning_rate": 1.8802351212343866e-05, | |
| "loss": 1.3549, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.18701870187018702, | |
| "grad_norm": 3.15625, | |
| "learning_rate": 1.879500367376929e-05, | |
| "loss": 1.2661, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.18811881188118812, | |
| "grad_norm": 3.1875, | |
| "learning_rate": 1.878765613519471e-05, | |
| "loss": 1.1914, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.18921892189218922, | |
| "grad_norm": 2.84375, | |
| "learning_rate": 1.8780308596620134e-05, | |
| "loss": 1.2789, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.19031903190319033, | |
| "grad_norm": 2.859375, | |
| "learning_rate": 1.8772961058045556e-05, | |
| "loss": 1.2974, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.19141914191419143, | |
| "grad_norm": 2.890625, | |
| "learning_rate": 1.876561351947098e-05, | |
| "loss": 1.2902, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.1925192519251925, | |
| "grad_norm": 2.796875, | |
| "learning_rate": 1.8758265980896402e-05, | |
| "loss": 1.3255, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.1936193619361936, | |
| "grad_norm": 2.96875, | |
| "learning_rate": 1.8750918442321824e-05, | |
| "loss": 1.6748, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.19471947194719472, | |
| "grad_norm": 2.921875, | |
| "learning_rate": 1.8743570903747247e-05, | |
| "loss": 1.3986, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.19581958195819582, | |
| "grad_norm": 3.09375, | |
| "learning_rate": 1.8736223365172666e-05, | |
| "loss": 1.5297, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.19691969196919692, | |
| "grad_norm": 3.28125, | |
| "learning_rate": 1.8728875826598092e-05, | |
| "loss": 1.5916, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.19801980198019803, | |
| "grad_norm": 2.984375, | |
| "learning_rate": 1.8721528288023515e-05, | |
| "loss": 1.2214, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.19911991199119913, | |
| "grad_norm": 3.15625, | |
| "learning_rate": 1.8714180749448937e-05, | |
| "loss": 1.2814, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.2002200220022002, | |
| "grad_norm": 3.078125, | |
| "learning_rate": 1.8706833210874357e-05, | |
| "loss": 1.6419, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.20132013201320131, | |
| "grad_norm": 3.265625, | |
| "learning_rate": 1.8699485672299783e-05, | |
| "loss": 1.4599, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.20242024202420242, | |
| "grad_norm": 3.34375, | |
| "learning_rate": 1.8692138133725205e-05, | |
| "loss": 1.4953, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.20352035203520352, | |
| "grad_norm": 2.84375, | |
| "learning_rate": 1.8684790595150625e-05, | |
| "loss": 1.3116, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.20462046204620463, | |
| "grad_norm": 2.984375, | |
| "learning_rate": 1.8677443056576047e-05, | |
| "loss": 1.3005, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.20572057205720573, | |
| "grad_norm": 3.171875, | |
| "learning_rate": 1.8670095518001473e-05, | |
| "loss": 1.3192, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.2068206820682068, | |
| "grad_norm": 3.390625, | |
| "learning_rate": 1.8662747979426892e-05, | |
| "loss": 1.1869, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.2079207920792079, | |
| "grad_norm": 3.0625, | |
| "learning_rate": 1.8655400440852315e-05, | |
| "loss": 1.3924, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.20902090209020902, | |
| "grad_norm": 2.453125, | |
| "learning_rate": 1.8648052902277738e-05, | |
| "loss": 1.3866, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.21012101210121012, | |
| "grad_norm": 2.984375, | |
| "learning_rate": 1.864070536370316e-05, | |
| "loss": 1.3815, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.21122112211221122, | |
| "grad_norm": 2.78125, | |
| "learning_rate": 1.8633357825128583e-05, | |
| "loss": 1.4873, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.21232123212321233, | |
| "grad_norm": 3.5625, | |
| "learning_rate": 1.8626010286554006e-05, | |
| "loss": 1.2423, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.21342134213421343, | |
| "grad_norm": 3.140625, | |
| "learning_rate": 1.8618662747979428e-05, | |
| "loss": 1.6128, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.2145214521452145, | |
| "grad_norm": 2.984375, | |
| "learning_rate": 1.861131520940485e-05, | |
| "loss": 1.4249, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.2156215621562156, | |
| "grad_norm": 2.859375, | |
| "learning_rate": 1.8603967670830273e-05, | |
| "loss": 1.3049, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.21672167216721672, | |
| "grad_norm": 2.671875, | |
| "learning_rate": 1.8596620132255696e-05, | |
| "loss": 1.2554, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.21782178217821782, | |
| "grad_norm": 2.53125, | |
| "learning_rate": 1.858927259368112e-05, | |
| "loss": 1.4826, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.21892189218921893, | |
| "grad_norm": 3.15625, | |
| "learning_rate": 1.858192505510654e-05, | |
| "loss": 1.5366, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.22002200220022003, | |
| "grad_norm": 2.640625, | |
| "learning_rate": 1.8574577516531964e-05, | |
| "loss": 1.2674, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.22112211221122113, | |
| "grad_norm": 2.875, | |
| "learning_rate": 1.8567229977957387e-05, | |
| "loss": 1.4328, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.2222222222222222, | |
| "grad_norm": 2.90625, | |
| "learning_rate": 1.855988243938281e-05, | |
| "loss": 1.5244, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.22332233223322331, | |
| "grad_norm": 2.890625, | |
| "learning_rate": 1.855253490080823e-05, | |
| "loss": 1.3276, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.22442244224422442, | |
| "grad_norm": 3.265625, | |
| "learning_rate": 1.8545187362233654e-05, | |
| "loss": 1.2828, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.22552255225522552, | |
| "grad_norm": 3.046875, | |
| "learning_rate": 1.8537839823659077e-05, | |
| "loss": 1.2095, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.22662266226622663, | |
| "grad_norm": 3.359375, | |
| "learning_rate": 1.85304922850845e-05, | |
| "loss": 1.2974, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.22772277227722773, | |
| "grad_norm": 3.671875, | |
| "learning_rate": 1.852314474650992e-05, | |
| "loss": 1.3114, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.22882288228822883, | |
| "grad_norm": 2.71875, | |
| "learning_rate": 1.851579720793534e-05, | |
| "loss": 1.3623, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.2299229922992299, | |
| "grad_norm": 3.171875, | |
| "learning_rate": 1.8508449669360768e-05, | |
| "loss": 1.4058, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.23102310231023102, | |
| "grad_norm": 2.828125, | |
| "learning_rate": 1.8501102130786187e-05, | |
| "loss": 1.6109, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.23212321232123212, | |
| "grad_norm": 3.0, | |
| "learning_rate": 1.849375459221161e-05, | |
| "loss": 1.4529, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.23322332233223322, | |
| "grad_norm": 3.640625, | |
| "learning_rate": 1.8486407053637032e-05, | |
| "loss": 1.2215, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.23432343234323433, | |
| "grad_norm": 2.6875, | |
| "learning_rate": 1.8479059515062455e-05, | |
| "loss": 1.462, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.23542354235423543, | |
| "grad_norm": 3.3125, | |
| "learning_rate": 1.8471711976487877e-05, | |
| "loss": 1.2214, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.23652365236523654, | |
| "grad_norm": 2.703125, | |
| "learning_rate": 1.84643644379133e-05, | |
| "loss": 1.4048, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.2376237623762376, | |
| "grad_norm": 3.1875, | |
| "learning_rate": 1.8457016899338723e-05, | |
| "loss": 1.568, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.23872387238723872, | |
| "grad_norm": 3.0625, | |
| "learning_rate": 1.8449669360764145e-05, | |
| "loss": 1.4943, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.23982398239823982, | |
| "grad_norm": 3.09375, | |
| "learning_rate": 1.8442321822189568e-05, | |
| "loss": 1.3486, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.24092409240924093, | |
| "grad_norm": 2.84375, | |
| "learning_rate": 1.843497428361499e-05, | |
| "loss": 1.3433, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.24202420242024203, | |
| "grad_norm": 2.8125, | |
| "learning_rate": 1.8427626745040413e-05, | |
| "loss": 1.334, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.24312431243124313, | |
| "grad_norm": 3.125, | |
| "learning_rate": 1.8420279206465836e-05, | |
| "loss": 1.6231, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.24422442244224424, | |
| "grad_norm": 2.875, | |
| "learning_rate": 1.841293166789126e-05, | |
| "loss": 1.5251, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.24532453245324531, | |
| "grad_norm": 2.703125, | |
| "learning_rate": 1.840558412931668e-05, | |
| "loss": 1.0413, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.24642464246424642, | |
| "grad_norm": 2.953125, | |
| "learning_rate": 1.8398236590742104e-05, | |
| "loss": 1.4903, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.24752475247524752, | |
| "grad_norm": 2.46875, | |
| "learning_rate": 1.8390889052167523e-05, | |
| "loss": 1.3652, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.24862486248624863, | |
| "grad_norm": 3.21875, | |
| "learning_rate": 1.838354151359295e-05, | |
| "loss": 1.5322, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.24972497249724973, | |
| "grad_norm": 2.90625, | |
| "learning_rate": 1.837619397501837e-05, | |
| "loss": 1.4831, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.2508250825082508, | |
| "grad_norm": 3.609375, | |
| "learning_rate": 1.836884643644379e-05, | |
| "loss": 1.3946, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.25192519251925194, | |
| "grad_norm": 2.78125, | |
| "learning_rate": 1.8361498897869213e-05, | |
| "loss": 1.2758, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.253025302530253, | |
| "grad_norm": 3.125, | |
| "learning_rate": 1.835415135929464e-05, | |
| "loss": 1.3564, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.25412541254125415, | |
| "grad_norm": 3.140625, | |
| "learning_rate": 1.8346803820720062e-05, | |
| "loss": 1.3414, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.2552255225522552, | |
| "grad_norm": 3.125, | |
| "learning_rate": 1.833945628214548e-05, | |
| "loss": 1.4999, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.2563256325632563, | |
| "grad_norm": 2.921875, | |
| "learning_rate": 1.8332108743570904e-05, | |
| "loss": 1.3395, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.25742574257425743, | |
| "grad_norm": 2.59375, | |
| "learning_rate": 1.832476120499633e-05, | |
| "loss": 1.371, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.2585258525852585, | |
| "grad_norm": 3.34375, | |
| "learning_rate": 1.831741366642175e-05, | |
| "loss": 1.2639, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.25962596259625964, | |
| "grad_norm": 2.890625, | |
| "learning_rate": 1.8310066127847172e-05, | |
| "loss": 1.5325, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.2607260726072607, | |
| "grad_norm": 3.046875, | |
| "learning_rate": 1.8302718589272594e-05, | |
| "loss": 1.4831, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.26182618261826185, | |
| "grad_norm": 2.71875, | |
| "learning_rate": 1.8295371050698017e-05, | |
| "loss": 1.172, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.2629262926292629, | |
| "grad_norm": 3.3125, | |
| "learning_rate": 1.828802351212344e-05, | |
| "loss": 1.3264, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.264026402640264, | |
| "grad_norm": 3.4375, | |
| "learning_rate": 1.8280675973548862e-05, | |
| "loss": 1.3878, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.26512651265126513, | |
| "grad_norm": 3.1875, | |
| "learning_rate": 1.8273328434974285e-05, | |
| "loss": 1.2775, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.2662266226622662, | |
| "grad_norm": 2.78125, | |
| "learning_rate": 1.8265980896399707e-05, | |
| "loss": 1.4451, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.26732673267326734, | |
| "grad_norm": 2.921875, | |
| "learning_rate": 1.825863335782513e-05, | |
| "loss": 1.2589, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.2684268426842684, | |
| "grad_norm": 3.203125, | |
| "learning_rate": 1.8251285819250553e-05, | |
| "loss": 1.266, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.26952695269526955, | |
| "grad_norm": 3.0, | |
| "learning_rate": 1.8243938280675975e-05, | |
| "loss": 1.3255, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.2706270627062706, | |
| "grad_norm": 2.65625, | |
| "learning_rate": 1.8236590742101398e-05, | |
| "loss": 1.3934, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.2717271727172717, | |
| "grad_norm": 2.984375, | |
| "learning_rate": 1.822924320352682e-05, | |
| "loss": 1.4549, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.27282728272827284, | |
| "grad_norm": 2.890625, | |
| "learning_rate": 1.8221895664952243e-05, | |
| "loss": 1.3393, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.2739273927392739, | |
| "grad_norm": 3.734375, | |
| "learning_rate": 1.8214548126377666e-05, | |
| "loss": 1.3819, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.27502750275027504, | |
| "grad_norm": 3.09375, | |
| "learning_rate": 1.8207200587803085e-05, | |
| "loss": 1.3343, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.2761276127612761, | |
| "grad_norm": 3.0, | |
| "learning_rate": 1.819985304922851e-05, | |
| "loss": 1.616, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 0.27722772277227725, | |
| "grad_norm": 3.828125, | |
| "learning_rate": 1.8192505510653934e-05, | |
| "loss": 1.4409, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.27832783278327833, | |
| "grad_norm": 3.421875, | |
| "learning_rate": 1.8185157972079356e-05, | |
| "loss": 1.3437, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 0.2794279427942794, | |
| "grad_norm": 2.796875, | |
| "learning_rate": 1.8177810433504776e-05, | |
| "loss": 1.2983, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.28052805280528054, | |
| "grad_norm": 4.4375, | |
| "learning_rate": 1.81704628949302e-05, | |
| "loss": 1.3778, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.2816281628162816, | |
| "grad_norm": 3.359375, | |
| "learning_rate": 1.8163115356355624e-05, | |
| "loss": 1.279, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.28272827282728275, | |
| "grad_norm": 2.765625, | |
| "learning_rate": 1.8155767817781043e-05, | |
| "loss": 1.2867, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 0.2838283828382838, | |
| "grad_norm": 3.0, | |
| "learning_rate": 1.8148420279206466e-05, | |
| "loss": 1.5769, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.28492849284928495, | |
| "grad_norm": 3.453125, | |
| "learning_rate": 1.814107274063189e-05, | |
| "loss": 1.4972, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 0.28602860286028603, | |
| "grad_norm": 3.0625, | |
| "learning_rate": 1.813372520205731e-05, | |
| "loss": 1.4616, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.2871287128712871, | |
| "grad_norm": 3.140625, | |
| "learning_rate": 1.8126377663482734e-05, | |
| "loss": 1.339, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 0.28822882288228824, | |
| "grad_norm": 3.671875, | |
| "learning_rate": 1.8119030124908157e-05, | |
| "loss": 1.4753, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.2893289328932893, | |
| "grad_norm": 3.109375, | |
| "learning_rate": 1.811168258633358e-05, | |
| "loss": 1.3111, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 0.29042904290429045, | |
| "grad_norm": 3.28125, | |
| "learning_rate": 1.8104335047759002e-05, | |
| "loss": 1.3179, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.2915291529152915, | |
| "grad_norm": 2.96875, | |
| "learning_rate": 1.8096987509184425e-05, | |
| "loss": 1.2999, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.29262926292629265, | |
| "grad_norm": 3.53125, | |
| "learning_rate": 1.8089639970609847e-05, | |
| "loss": 1.4513, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.29372937293729373, | |
| "grad_norm": 3.59375, | |
| "learning_rate": 1.808229243203527e-05, | |
| "loss": 1.4698, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 0.2948294829482948, | |
| "grad_norm": 3.09375, | |
| "learning_rate": 1.8074944893460692e-05, | |
| "loss": 1.2589, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.29592959295929594, | |
| "grad_norm": 2.859375, | |
| "learning_rate": 1.8067597354886115e-05, | |
| "loss": 1.4395, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 0.297029702970297, | |
| "grad_norm": 3.171875, | |
| "learning_rate": 1.8060249816311538e-05, | |
| "loss": 1.2779, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.29812981298129815, | |
| "grad_norm": 2.484375, | |
| "learning_rate": 1.805290227773696e-05, | |
| "loss": 1.3757, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 0.2992299229922992, | |
| "grad_norm": 2.96875, | |
| "learning_rate": 1.8045554739162383e-05, | |
| "loss": 1.2997, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.30033003300330036, | |
| "grad_norm": 3.21875, | |
| "learning_rate": 1.8038207200587806e-05, | |
| "loss": 1.6125, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 0.30143014301430143, | |
| "grad_norm": 3.140625, | |
| "learning_rate": 1.8030859662013228e-05, | |
| "loss": 1.4024, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.3025302530253025, | |
| "grad_norm": 3.265625, | |
| "learning_rate": 1.8023512123438647e-05, | |
| "loss": 1.2908, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.30363036303630364, | |
| "grad_norm": 3.203125, | |
| "learning_rate": 1.801616458486407e-05, | |
| "loss": 1.3183, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.3047304730473047, | |
| "grad_norm": 2.8125, | |
| "learning_rate": 1.8008817046289496e-05, | |
| "loss": 1.3881, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 0.30583058305830585, | |
| "grad_norm": 6.84375, | |
| "learning_rate": 1.800146950771492e-05, | |
| "loss": 1.4605, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.3069306930693069, | |
| "grad_norm": 3.0, | |
| "learning_rate": 1.7994121969140338e-05, | |
| "loss": 1.3702, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.30803080308030806, | |
| "grad_norm": 3.234375, | |
| "learning_rate": 1.798677443056576e-05, | |
| "loss": 1.29, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.30913091309130913, | |
| "grad_norm": 2.71875, | |
| "learning_rate": 1.7979426891991187e-05, | |
| "loss": 1.4194, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 0.3102310231023102, | |
| "grad_norm": 2.953125, | |
| "learning_rate": 1.7972079353416606e-05, | |
| "loss": 1.2229, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.31133113311331134, | |
| "grad_norm": 3.21875, | |
| "learning_rate": 1.796473181484203e-05, | |
| "loss": 1.3527, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 0.3124312431243124, | |
| "grad_norm": 3.625, | |
| "learning_rate": 1.795738427626745e-05, | |
| "loss": 1.5485, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.31353135313531355, | |
| "grad_norm": 3.296875, | |
| "learning_rate": 1.7950036737692874e-05, | |
| "loss": 1.3383, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.3146314631463146, | |
| "grad_norm": 3.109375, | |
| "learning_rate": 1.7942689199118296e-05, | |
| "loss": 1.2041, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.31573157315731576, | |
| "grad_norm": 3.296875, | |
| "learning_rate": 1.793534166054372e-05, | |
| "loss": 1.5651, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 0.31683168316831684, | |
| "grad_norm": 3.046875, | |
| "learning_rate": 1.792799412196914e-05, | |
| "loss": 1.3185, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.3179317931793179, | |
| "grad_norm": 3.125, | |
| "learning_rate": 1.7920646583394564e-05, | |
| "loss": 1.4088, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 0.31903190319031904, | |
| "grad_norm": 3.109375, | |
| "learning_rate": 1.7913299044819987e-05, | |
| "loss": 1.2913, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.3201320132013201, | |
| "grad_norm": 2.84375, | |
| "learning_rate": 1.790595150624541e-05, | |
| "loss": 1.357, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 0.32123212321232125, | |
| "grad_norm": 3.296875, | |
| "learning_rate": 1.7898603967670832e-05, | |
| "loss": 1.3788, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.32233223322332233, | |
| "grad_norm": 2.921875, | |
| "learning_rate": 1.7891256429096255e-05, | |
| "loss": 1.2544, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 0.3234323432343234, | |
| "grad_norm": 3.46875, | |
| "learning_rate": 1.7883908890521677e-05, | |
| "loss": 1.4215, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.32453245324532454, | |
| "grad_norm": 3.109375, | |
| "learning_rate": 1.78765613519471e-05, | |
| "loss": 1.2093, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.3256325632563256, | |
| "grad_norm": 3.0625, | |
| "learning_rate": 1.7869213813372523e-05, | |
| "loss": 1.3539, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.32673267326732675, | |
| "grad_norm": 3.25, | |
| "learning_rate": 1.7861866274797942e-05, | |
| "loss": 1.4945, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 0.3278327832783278, | |
| "grad_norm": 2.953125, | |
| "learning_rate": 1.7854518736223368e-05, | |
| "loss": 1.2134, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.32893289328932895, | |
| "grad_norm": 2.546875, | |
| "learning_rate": 1.784717119764879e-05, | |
| "loss": 1.3133, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 0.33003300330033003, | |
| "grad_norm": 3.625, | |
| "learning_rate": 1.783982365907421e-05, | |
| "loss": 1.4495, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.3311331133113311, | |
| "grad_norm": 3.046875, | |
| "learning_rate": 1.7832476120499632e-05, | |
| "loss": 1.6174, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 0.33223322332233224, | |
| "grad_norm": 2.765625, | |
| "learning_rate": 1.7825128581925058e-05, | |
| "loss": 1.3532, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.3333333333333333, | |
| "grad_norm": 2.96875, | |
| "learning_rate": 1.781778104335048e-05, | |
| "loss": 1.4852, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 0.33443344334433445, | |
| "grad_norm": 3.609375, | |
| "learning_rate": 1.78104335047759e-05, | |
| "loss": 1.3254, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.3355335533553355, | |
| "grad_norm": 3.78125, | |
| "learning_rate": 1.7803085966201323e-05, | |
| "loss": 1.3653, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.33663366336633666, | |
| "grad_norm": 3.671875, | |
| "learning_rate": 1.779573842762675e-05, | |
| "loss": 1.4128, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 0.33773377337733773, | |
| "grad_norm": 3.234375, | |
| "learning_rate": 1.7788390889052168e-05, | |
| "loss": 1.5845, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 0.3388338833883388, | |
| "grad_norm": 3.296875, | |
| "learning_rate": 1.778104335047759e-05, | |
| "loss": 1.384, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 0.33993399339933994, | |
| "grad_norm": 3.109375, | |
| "learning_rate": 1.7773695811903013e-05, | |
| "loss": 1.1027, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 0.341034103410341, | |
| "grad_norm": 3.25, | |
| "learning_rate": 1.7766348273328436e-05, | |
| "loss": 1.3536, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.34213421342134215, | |
| "grad_norm": 3.28125, | |
| "learning_rate": 1.775900073475386e-05, | |
| "loss": 1.4542, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 0.3432343234323432, | |
| "grad_norm": 3.203125, | |
| "learning_rate": 1.775165319617928e-05, | |
| "loss": 1.4599, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 0.34433443344334436, | |
| "grad_norm": 3.359375, | |
| "learning_rate": 1.7744305657604704e-05, | |
| "loss": 1.3303, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 0.34543454345434543, | |
| "grad_norm": 2.4375, | |
| "learning_rate": 1.7736958119030126e-05, | |
| "loss": 1.2277, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 0.3465346534653465, | |
| "grad_norm": 3.171875, | |
| "learning_rate": 1.772961058045555e-05, | |
| "loss": 1.435, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.34763476347634764, | |
| "grad_norm": 3.109375, | |
| "learning_rate": 1.772226304188097e-05, | |
| "loss": 1.7359, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 0.3487348734873487, | |
| "grad_norm": 12.4375, | |
| "learning_rate": 1.7714915503306394e-05, | |
| "loss": 1.3275, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 0.34983498349834985, | |
| "grad_norm": 3.015625, | |
| "learning_rate": 1.7707567964731817e-05, | |
| "loss": 1.4112, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 0.3509350935093509, | |
| "grad_norm": 2.75, | |
| "learning_rate": 1.770022042615724e-05, | |
| "loss": 1.1483, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 0.35203520352035206, | |
| "grad_norm": 3.28125, | |
| "learning_rate": 1.7692872887582662e-05, | |
| "loss": 1.1974, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.35313531353135313, | |
| "grad_norm": 3.1875, | |
| "learning_rate": 1.7685525349008085e-05, | |
| "loss": 1.3615, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 0.3542354235423542, | |
| "grad_norm": 3.359375, | |
| "learning_rate": 1.7678177810433504e-05, | |
| "loss": 1.6391, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 0.35533553355335534, | |
| "grad_norm": 3.28125, | |
| "learning_rate": 1.767083027185893e-05, | |
| "loss": 1.5306, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 0.3564356435643564, | |
| "grad_norm": 3.484375, | |
| "learning_rate": 1.7663482733284353e-05, | |
| "loss": 1.4618, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 0.35753575357535755, | |
| "grad_norm": 2.734375, | |
| "learning_rate": 1.7656135194709775e-05, | |
| "loss": 1.295, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.3586358635863586, | |
| "grad_norm": 2.765625, | |
| "learning_rate": 1.7648787656135195e-05, | |
| "loss": 1.5152, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 0.35973597359735976, | |
| "grad_norm": 2.921875, | |
| "learning_rate": 1.7641440117560617e-05, | |
| "loss": 1.2689, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 0.36083608360836084, | |
| "grad_norm": 2.65625, | |
| "learning_rate": 1.7634092578986043e-05, | |
| "loss": 1.378, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 0.3619361936193619, | |
| "grad_norm": 3.09375, | |
| "learning_rate": 1.7626745040411462e-05, | |
| "loss": 1.4205, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 0.36303630363036304, | |
| "grad_norm": 3.078125, | |
| "learning_rate": 1.7619397501836885e-05, | |
| "loss": 1.3336, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.3641364136413641, | |
| "grad_norm": 3.328125, | |
| "learning_rate": 1.7612049963262308e-05, | |
| "loss": 1.2472, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 0.36523652365236525, | |
| "grad_norm": 2.921875, | |
| "learning_rate": 1.760470242468773e-05, | |
| "loss": 1.0776, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 0.36633663366336633, | |
| "grad_norm": 3.40625, | |
| "learning_rate": 1.7597354886113153e-05, | |
| "loss": 1.6699, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 0.36743674367436746, | |
| "grad_norm": 2.859375, | |
| "learning_rate": 1.7590007347538576e-05, | |
| "loss": 1.468, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 0.36853685368536854, | |
| "grad_norm": 2.984375, | |
| "learning_rate": 1.7582659808963998e-05, | |
| "loss": 1.4175, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.3696369636963696, | |
| "grad_norm": 3.140625, | |
| "learning_rate": 1.757531227038942e-05, | |
| "loss": 1.4425, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 0.37073707370737075, | |
| "grad_norm": 2.875, | |
| "learning_rate": 1.7567964731814843e-05, | |
| "loss": 1.3451, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 0.3718371837183718, | |
| "grad_norm": 3.203125, | |
| "learning_rate": 1.7560617193240266e-05, | |
| "loss": 1.3692, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 0.37293729372937295, | |
| "grad_norm": 3.03125, | |
| "learning_rate": 1.755326965466569e-05, | |
| "loss": 1.235, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 0.37403740374037403, | |
| "grad_norm": 2.875, | |
| "learning_rate": 1.754592211609111e-05, | |
| "loss": 1.3405, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.37513751375137516, | |
| "grad_norm": 3.765625, | |
| "learning_rate": 1.7538574577516534e-05, | |
| "loss": 1.158, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 0.37623762376237624, | |
| "grad_norm": 3.1875, | |
| "learning_rate": 1.7531227038941957e-05, | |
| "loss": 1.303, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 0.3773377337733773, | |
| "grad_norm": 3.03125, | |
| "learning_rate": 1.752387950036738e-05, | |
| "loss": 1.5699, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 0.37843784378437845, | |
| "grad_norm": 2.84375, | |
| "learning_rate": 1.75165319617928e-05, | |
| "loss": 1.2761, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 0.3795379537953795, | |
| "grad_norm": 2.921875, | |
| "learning_rate": 1.7509184423218224e-05, | |
| "loss": 1.302, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.38063806380638066, | |
| "grad_norm": 3.140625, | |
| "learning_rate": 1.7501836884643647e-05, | |
| "loss": 1.4551, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 0.38173817381738173, | |
| "grad_norm": 2.84375, | |
| "learning_rate": 1.7494489346069066e-05, | |
| "loss": 1.3252, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 0.38283828382838286, | |
| "grad_norm": 2.953125, | |
| "learning_rate": 1.748714180749449e-05, | |
| "loss": 1.2601, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 0.38393839383938394, | |
| "grad_norm": 2.6875, | |
| "learning_rate": 1.7479794268919915e-05, | |
| "loss": 1.2193, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 0.385038503850385, | |
| "grad_norm": 3.296875, | |
| "learning_rate": 1.7472446730345338e-05, | |
| "loss": 1.4578, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.38613861386138615, | |
| "grad_norm": 3.109375, | |
| "learning_rate": 1.7465099191770757e-05, | |
| "loss": 1.2563, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 0.3872387238723872, | |
| "grad_norm": 3.0, | |
| "learning_rate": 1.745775165319618e-05, | |
| "loss": 1.4019, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 0.38833883388338836, | |
| "grad_norm": 2.984375, | |
| "learning_rate": 1.7450404114621605e-05, | |
| "loss": 1.3822, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 0.38943894389438943, | |
| "grad_norm": 2.84375, | |
| "learning_rate": 1.7443056576047025e-05, | |
| "loss": 1.3904, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 0.39053905390539057, | |
| "grad_norm": 3.234375, | |
| "learning_rate": 1.7435709037472447e-05, | |
| "loss": 1.2883, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.39163916391639164, | |
| "grad_norm": 2.3125, | |
| "learning_rate": 1.742836149889787e-05, | |
| "loss": 1.3661, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 0.3927392739273927, | |
| "grad_norm": 3.734375, | |
| "learning_rate": 1.7421013960323293e-05, | |
| "loss": 1.2189, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 0.39383938393839385, | |
| "grad_norm": 3.0, | |
| "learning_rate": 1.7413666421748715e-05, | |
| "loss": 1.5372, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 0.3949394939493949, | |
| "grad_norm": 3.3125, | |
| "learning_rate": 1.7406318883174138e-05, | |
| "loss": 1.2599, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 0.39603960396039606, | |
| "grad_norm": 3.078125, | |
| "learning_rate": 1.739897134459956e-05, | |
| "loss": 1.475, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.39713971397139713, | |
| "grad_norm": 3.125, | |
| "learning_rate": 1.7391623806024983e-05, | |
| "loss": 1.3623, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 0.39823982398239827, | |
| "grad_norm": 3.09375, | |
| "learning_rate": 1.7384276267450406e-05, | |
| "loss": 1.273, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 0.39933993399339934, | |
| "grad_norm": 3.078125, | |
| "learning_rate": 1.7376928728875828e-05, | |
| "loss": 1.5387, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 0.4004400440044004, | |
| "grad_norm": 3.046875, | |
| "learning_rate": 1.736958119030125e-05, | |
| "loss": 1.4525, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 0.40154015401540155, | |
| "grad_norm": 2.984375, | |
| "learning_rate": 1.7362233651726674e-05, | |
| "loss": 1.6482, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.40264026402640263, | |
| "grad_norm": 3.421875, | |
| "learning_rate": 1.7354886113152096e-05, | |
| "loss": 1.3836, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 0.40374037403740376, | |
| "grad_norm": 3.28125, | |
| "learning_rate": 1.734753857457752e-05, | |
| "loss": 1.2272, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 0.40484048404840484, | |
| "grad_norm": 3.203125, | |
| "learning_rate": 1.734019103600294e-05, | |
| "loss": 1.4677, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 0.40594059405940597, | |
| "grad_norm": 2.671875, | |
| "learning_rate": 1.733284349742836e-05, | |
| "loss": 1.128, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 0.40704070407040704, | |
| "grad_norm": 2.890625, | |
| "learning_rate": 1.7325495958853787e-05, | |
| "loss": 1.4081, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.4081408140814081, | |
| "grad_norm": 2.546875, | |
| "learning_rate": 1.731814842027921e-05, | |
| "loss": 1.5078, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 0.40924092409240925, | |
| "grad_norm": 3.0, | |
| "learning_rate": 1.7310800881704632e-05, | |
| "loss": 1.2856, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 0.41034103410341033, | |
| "grad_norm": 3.21875, | |
| "learning_rate": 1.730345334313005e-05, | |
| "loss": 1.5191, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 0.41144114411441146, | |
| "grad_norm": 2.796875, | |
| "learning_rate": 1.7296105804555477e-05, | |
| "loss": 1.191, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 0.41254125412541254, | |
| "grad_norm": 3.234375, | |
| "learning_rate": 1.72887582659809e-05, | |
| "loss": 1.405, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.4136413641364136, | |
| "grad_norm": 3.171875, | |
| "learning_rate": 1.728141072740632e-05, | |
| "loss": 1.4146, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 0.41474147414741475, | |
| "grad_norm": 3.28125, | |
| "learning_rate": 1.727406318883174e-05, | |
| "loss": 1.268, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 0.4158415841584158, | |
| "grad_norm": 3.3125, | |
| "learning_rate": 1.7266715650257164e-05, | |
| "loss": 1.4629, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 0.41694169416941695, | |
| "grad_norm": 2.9375, | |
| "learning_rate": 1.7259368111682587e-05, | |
| "loss": 1.1727, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 0.41804180418041803, | |
| "grad_norm": 2.8125, | |
| "learning_rate": 1.725202057310801e-05, | |
| "loss": 1.5609, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.41914191419141916, | |
| "grad_norm": 3.25, | |
| "learning_rate": 1.7244673034533432e-05, | |
| "loss": 1.3317, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 0.42024202420242024, | |
| "grad_norm": 3.171875, | |
| "learning_rate": 1.7237325495958855e-05, | |
| "loss": 1.5309, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 0.4213421342134213, | |
| "grad_norm": 3.390625, | |
| "learning_rate": 1.7229977957384277e-05, | |
| "loss": 1.5585, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 0.42244224422442245, | |
| "grad_norm": 3.640625, | |
| "learning_rate": 1.72226304188097e-05, | |
| "loss": 1.6065, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 0.4235423542354235, | |
| "grad_norm": 3.25, | |
| "learning_rate": 1.7215282880235123e-05, | |
| "loss": 1.3861, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.42464246424642466, | |
| "grad_norm": 2.4375, | |
| "learning_rate": 1.7207935341660545e-05, | |
| "loss": 1.5127, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 0.42574257425742573, | |
| "grad_norm": 2.84375, | |
| "learning_rate": 1.7200587803085968e-05, | |
| "loss": 1.4546, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 0.42684268426842686, | |
| "grad_norm": 3.1875, | |
| "learning_rate": 1.719324026451139e-05, | |
| "loss": 1.289, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 0.42794279427942794, | |
| "grad_norm": 2.953125, | |
| "learning_rate": 1.7185892725936813e-05, | |
| "loss": 1.3582, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 0.429042904290429, | |
| "grad_norm": 2.6875, | |
| "learning_rate": 1.7178545187362236e-05, | |
| "loss": 1.515, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.43014301430143015, | |
| "grad_norm": 3.140625, | |
| "learning_rate": 1.717119764878766e-05, | |
| "loss": 1.2536, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 0.4312431243124312, | |
| "grad_norm": 3.5, | |
| "learning_rate": 1.716385011021308e-05, | |
| "loss": 1.4406, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 0.43234323432343236, | |
| "grad_norm": 2.671875, | |
| "learning_rate": 1.7156502571638504e-05, | |
| "loss": 1.4176, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 0.43344334433443343, | |
| "grad_norm": 3.3125, | |
| "learning_rate": 1.7149155033063923e-05, | |
| "loss": 1.4653, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 0.43454345434543457, | |
| "grad_norm": 2.984375, | |
| "learning_rate": 1.7141807494489346e-05, | |
| "loss": 1.3209, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.43564356435643564, | |
| "grad_norm": 3.046875, | |
| "learning_rate": 1.713445995591477e-05, | |
| "loss": 1.4635, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 0.4367436743674367, | |
| "grad_norm": 3.125, | |
| "learning_rate": 1.7127112417340194e-05, | |
| "loss": 1.284, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 0.43784378437843785, | |
| "grad_norm": 2.984375, | |
| "learning_rate": 1.7119764878765613e-05, | |
| "loss": 1.4131, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 0.4389438943894389, | |
| "grad_norm": 3.015625, | |
| "learning_rate": 1.7112417340191036e-05, | |
| "loss": 1.4695, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 0.44004400440044006, | |
| "grad_norm": 3.234375, | |
| "learning_rate": 1.7105069801616462e-05, | |
| "loss": 1.5544, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.44114411441144114, | |
| "grad_norm": 3.078125, | |
| "learning_rate": 1.709772226304188e-05, | |
| "loss": 1.4181, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 0.44224422442244227, | |
| "grad_norm": 3.125, | |
| "learning_rate": 1.7090374724467304e-05, | |
| "loss": 1.4601, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 0.44334433443344334, | |
| "grad_norm": 2.578125, | |
| "learning_rate": 1.7083027185892727e-05, | |
| "loss": 1.3168, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 0.4444444444444444, | |
| "grad_norm": 2.59375, | |
| "learning_rate": 1.707567964731815e-05, | |
| "loss": 1.243, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 0.44554455445544555, | |
| "grad_norm": 2.9375, | |
| "learning_rate": 1.7068332108743572e-05, | |
| "loss": 1.3828, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.44664466446644663, | |
| "grad_norm": 2.671875, | |
| "learning_rate": 1.7060984570168994e-05, | |
| "loss": 1.4213, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 0.44774477447744776, | |
| "grad_norm": 2.9375, | |
| "learning_rate": 1.7053637031594417e-05, | |
| "loss": 1.319, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 0.44884488448844884, | |
| "grad_norm": 3.03125, | |
| "learning_rate": 1.704628949301984e-05, | |
| "loss": 1.4987, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 0.44994499449944997, | |
| "grad_norm": 3.34375, | |
| "learning_rate": 1.7038941954445262e-05, | |
| "loss": 1.2332, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 0.45104510451045104, | |
| "grad_norm": 2.609375, | |
| "learning_rate": 1.7031594415870685e-05, | |
| "loss": 1.3593, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.4521452145214521, | |
| "grad_norm": 2.84375, | |
| "learning_rate": 1.7024246877296108e-05, | |
| "loss": 1.122, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 0.45324532453245325, | |
| "grad_norm": 4.15625, | |
| "learning_rate": 1.701689933872153e-05, | |
| "loss": 1.3008, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 0.45434543454345433, | |
| "grad_norm": 3.578125, | |
| "learning_rate": 1.7009551800146953e-05, | |
| "loss": 1.2515, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 0.45544554455445546, | |
| "grad_norm": 3.421875, | |
| "learning_rate": 1.7002204261572375e-05, | |
| "loss": 1.3769, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 0.45654565456545654, | |
| "grad_norm": 3.171875, | |
| "learning_rate": 1.6994856722997798e-05, | |
| "loss": 1.4572, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.45764576457645767, | |
| "grad_norm": 2.8125, | |
| "learning_rate": 1.6987509184423217e-05, | |
| "loss": 1.3117, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 0.45874587458745875, | |
| "grad_norm": 3.109375, | |
| "learning_rate": 1.6980161645848643e-05, | |
| "loss": 1.4277, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 0.4598459845984598, | |
| "grad_norm": 3.234375, | |
| "learning_rate": 1.6972814107274066e-05, | |
| "loss": 1.392, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 0.46094609460946095, | |
| "grad_norm": 3.296875, | |
| "learning_rate": 1.6965466568699485e-05, | |
| "loss": 1.2267, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 0.46204620462046203, | |
| "grad_norm": 3.0, | |
| "learning_rate": 1.6958119030124908e-05, | |
| "loss": 1.2943, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.46314631463146316, | |
| "grad_norm": 2.765625, | |
| "learning_rate": 1.6950771491550334e-05, | |
| "loss": 1.2395, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 0.46424642464246424, | |
| "grad_norm": 2.84375, | |
| "learning_rate": 1.6943423952975756e-05, | |
| "loss": 1.3261, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 0.46534653465346537, | |
| "grad_norm": 2.765625, | |
| "learning_rate": 1.6936076414401176e-05, | |
| "loss": 1.1243, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 0.46644664466446645, | |
| "grad_norm": 3.15625, | |
| "learning_rate": 1.69287288758266e-05, | |
| "loss": 1.4143, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 0.4675467546754675, | |
| "grad_norm": 2.953125, | |
| "learning_rate": 1.6921381337252024e-05, | |
| "loss": 1.4624, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.46864686468646866, | |
| "grad_norm": 3.21875, | |
| "learning_rate": 1.6914033798677444e-05, | |
| "loss": 1.4152, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 0.46974697469746973, | |
| "grad_norm": 3.0625, | |
| "learning_rate": 1.6906686260102866e-05, | |
| "loss": 1.2366, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 0.47084708470847086, | |
| "grad_norm": 3.15625, | |
| "learning_rate": 1.689933872152829e-05, | |
| "loss": 1.2582, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 0.47194719471947194, | |
| "grad_norm": 2.953125, | |
| "learning_rate": 1.689199118295371e-05, | |
| "loss": 1.4998, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 0.4730473047304731, | |
| "grad_norm": 3.234375, | |
| "learning_rate": 1.6884643644379134e-05, | |
| "loss": 1.3833, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.47414741474147415, | |
| "grad_norm": 2.53125, | |
| "learning_rate": 1.6877296105804557e-05, | |
| "loss": 1.2878, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 0.4752475247524752, | |
| "grad_norm": 2.734375, | |
| "learning_rate": 1.686994856722998e-05, | |
| "loss": 1.5415, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 0.47634763476347636, | |
| "grad_norm": 2.828125, | |
| "learning_rate": 1.6862601028655402e-05, | |
| "loss": 1.2775, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 0.47744774477447743, | |
| "grad_norm": 3.34375, | |
| "learning_rate": 1.6855253490080825e-05, | |
| "loss": 1.2987, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 0.47854785478547857, | |
| "grad_norm": 3.203125, | |
| "learning_rate": 1.6847905951506247e-05, | |
| "loss": 1.4325, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.47964796479647964, | |
| "grad_norm": 2.921875, | |
| "learning_rate": 1.684055841293167e-05, | |
| "loss": 1.2542, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 0.4807480748074808, | |
| "grad_norm": 3.109375, | |
| "learning_rate": 1.6833210874357092e-05, | |
| "loss": 1.4031, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 0.48184818481848185, | |
| "grad_norm": 3.21875, | |
| "learning_rate": 1.6825863335782515e-05, | |
| "loss": 1.5221, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 0.4829482948294829, | |
| "grad_norm": 3.3125, | |
| "learning_rate": 1.6818515797207938e-05, | |
| "loss": 1.263, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 0.48404840484048406, | |
| "grad_norm": 2.984375, | |
| "learning_rate": 1.681116825863336e-05, | |
| "loss": 1.49, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.48514851485148514, | |
| "grad_norm": 3.28125, | |
| "learning_rate": 1.680382072005878e-05, | |
| "loss": 1.2209, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 0.48624862486248627, | |
| "grad_norm": 2.8125, | |
| "learning_rate": 1.6796473181484206e-05, | |
| "loss": 1.4303, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 0.48734873487348734, | |
| "grad_norm": 2.703125, | |
| "learning_rate": 1.6789125642909628e-05, | |
| "loss": 1.4277, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 0.4884488448844885, | |
| "grad_norm": 3.34375, | |
| "learning_rate": 1.678177810433505e-05, | |
| "loss": 1.5043, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 0.48954895489548955, | |
| "grad_norm": 3.359375, | |
| "learning_rate": 1.677443056576047e-05, | |
| "loss": 1.3045, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.49064906490649063, | |
| "grad_norm": 4.21875, | |
| "learning_rate": 1.6767083027185893e-05, | |
| "loss": 1.2976, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 0.49174917491749176, | |
| "grad_norm": 2.953125, | |
| "learning_rate": 1.675973548861132e-05, | |
| "loss": 1.3205, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 0.49284928492849284, | |
| "grad_norm": 2.953125, | |
| "learning_rate": 1.6752387950036738e-05, | |
| "loss": 1.5202, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 0.49394939493949397, | |
| "grad_norm": 3.328125, | |
| "learning_rate": 1.674504041146216e-05, | |
| "loss": 1.2326, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 0.49504950495049505, | |
| "grad_norm": 3.625, | |
| "learning_rate": 1.6737692872887583e-05, | |
| "loss": 1.7037, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.4961496149614962, | |
| "grad_norm": 3.34375, | |
| "learning_rate": 1.6730345334313006e-05, | |
| "loss": 1.2061, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 0.49724972497249725, | |
| "grad_norm": 3.375, | |
| "learning_rate": 1.672299779573843e-05, | |
| "loss": 1.3182, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 0.49834983498349833, | |
| "grad_norm": 2.703125, | |
| "learning_rate": 1.671565025716385e-05, | |
| "loss": 1.1659, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 0.49944994499449946, | |
| "grad_norm": 2.984375, | |
| "learning_rate": 1.6708302718589274e-05, | |
| "loss": 1.4035, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 0.5005500550055005, | |
| "grad_norm": 3.296875, | |
| "learning_rate": 1.6700955180014696e-05, | |
| "loss": 1.3379, | |
| "step": 455 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 2727, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 455, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1.453968500526981e+17, | |
| "train_batch_size": 16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |