| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.5125815470643057, | |
| "eval_steps": 500, | |
| "global_step": 550, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.004659832246039142, | |
| "grad_norm": 55.709484937361424, | |
| "learning_rate": 7.763975155279503e-07, | |
| "loss": 11.0694, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.009319664492078284, | |
| "grad_norm": 57.1398137451701, | |
| "learning_rate": 1.5527950310559006e-06, | |
| "loss": 10.8846, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.013979496738117428, | |
| "grad_norm": 97.98047478379912, | |
| "learning_rate": 2.329192546583851e-06, | |
| "loss": 9.4413, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.01863932898415657, | |
| "grad_norm": 32.76887701215898, | |
| "learning_rate": 3.1055900621118013e-06, | |
| "loss": 3.214, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.023299161230195712, | |
| "grad_norm": 3.315394944010228, | |
| "learning_rate": 3.881987577639752e-06, | |
| "loss": 1.346, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.027958993476234855, | |
| "grad_norm": 1.29231664681427, | |
| "learning_rate": 4.658385093167702e-06, | |
| "loss": 1.006, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.032618825722273995, | |
| "grad_norm": 0.8629586527731651, | |
| "learning_rate": 5.4347826086956525e-06, | |
| "loss": 0.8411, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.03727865796831314, | |
| "grad_norm": 0.6212648141101664, | |
| "learning_rate": 6.2111801242236025e-06, | |
| "loss": 0.7754, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.04193849021435228, | |
| "grad_norm": 0.46040470834395936, | |
| "learning_rate": 6.9875776397515525e-06, | |
| "loss": 0.7136, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.046598322460391424, | |
| "grad_norm": 0.42693871288027446, | |
| "learning_rate": 7.763975155279503e-06, | |
| "loss": 0.6831, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.05125815470643057, | |
| "grad_norm": 0.44192720551532577, | |
| "learning_rate": 8.540372670807453e-06, | |
| "loss": 0.646, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.05591798695246971, | |
| "grad_norm": 0.31222059058105195, | |
| "learning_rate": 9.316770186335403e-06, | |
| "loss": 0.6088, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.06057781919850885, | |
| "grad_norm": 0.30027349299916717, | |
| "learning_rate": 1.0093167701863353e-05, | |
| "loss": 0.5932, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.06523765144454799, | |
| "grad_norm": 0.3320480978786744, | |
| "learning_rate": 1.0869565217391305e-05, | |
| "loss": 0.5892, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.06989748369058714, | |
| "grad_norm": 0.3001101299512147, | |
| "learning_rate": 1.1645962732919255e-05, | |
| "loss": 0.5713, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.07455731593662628, | |
| "grad_norm": 0.30892715626333545, | |
| "learning_rate": 1.2422360248447205e-05, | |
| "loss": 0.5619, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.07921714818266543, | |
| "grad_norm": 0.3319072685834915, | |
| "learning_rate": 1.3198757763975155e-05, | |
| "loss": 0.5525, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.08387698042870456, | |
| "grad_norm": 0.29047402255141175, | |
| "learning_rate": 1.3975155279503105e-05, | |
| "loss": 0.5628, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.08853681267474371, | |
| "grad_norm": 0.25922715271689156, | |
| "learning_rate": 1.4751552795031057e-05, | |
| "loss": 0.5443, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.09319664492078285, | |
| "grad_norm": 0.31531185304643106, | |
| "learning_rate": 1.5527950310559007e-05, | |
| "loss": 0.5296, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.097856477166822, | |
| "grad_norm": 0.3013647908850556, | |
| "learning_rate": 1.630434782608696e-05, | |
| "loss": 0.5243, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.10251630941286113, | |
| "grad_norm": 0.3047350591065195, | |
| "learning_rate": 1.7080745341614907e-05, | |
| "loss": 0.5261, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.10717614165890028, | |
| "grad_norm": 0.30780196150291267, | |
| "learning_rate": 1.785714285714286e-05, | |
| "loss": 0.5231, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.11183597390493942, | |
| "grad_norm": 0.3804666896095633, | |
| "learning_rate": 1.8633540372670807e-05, | |
| "loss": 0.5098, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.11649580615097857, | |
| "grad_norm": 0.2868238329098672, | |
| "learning_rate": 1.940993788819876e-05, | |
| "loss": 0.5057, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.1211556383970177, | |
| "grad_norm": 0.32341698454060924, | |
| "learning_rate": 2.0186335403726707e-05, | |
| "loss": 0.5114, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.12581547064305684, | |
| "grad_norm": 0.4221000244611152, | |
| "learning_rate": 2.096273291925466e-05, | |
| "loss": 0.5106, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.13047530288909598, | |
| "grad_norm": 0.47197386491626697, | |
| "learning_rate": 2.173913043478261e-05, | |
| "loss": 0.4951, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.13513513513513514, | |
| "grad_norm": 0.3721540533945164, | |
| "learning_rate": 2.2515527950310562e-05, | |
| "loss": 0.522, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.13979496738117428, | |
| "grad_norm": 0.4388944569421825, | |
| "learning_rate": 2.329192546583851e-05, | |
| "loss": 0.4959, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.14445479962721341, | |
| "grad_norm": 0.4622898334441083, | |
| "learning_rate": 2.4068322981366462e-05, | |
| "loss": 0.5041, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.14911463187325255, | |
| "grad_norm": 0.49630920093444675, | |
| "learning_rate": 2.484472049689441e-05, | |
| "loss": 0.4849, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.15377446411929171, | |
| "grad_norm": 0.46849210325540636, | |
| "learning_rate": 2.5621118012422362e-05, | |
| "loss": 0.4961, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.15843429636533085, | |
| "grad_norm": 0.4082563546888238, | |
| "learning_rate": 2.639751552795031e-05, | |
| "loss": 0.4719, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.16309412861137, | |
| "grad_norm": 0.4549138940471945, | |
| "learning_rate": 2.7173913043478262e-05, | |
| "loss": 0.4852, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.16775396085740912, | |
| "grad_norm": 0.41032309538662626, | |
| "learning_rate": 2.795031055900621e-05, | |
| "loss": 0.478, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.1724137931034483, | |
| "grad_norm": 0.36866874054662224, | |
| "learning_rate": 2.8726708074534165e-05, | |
| "loss": 0.481, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.17707362534948742, | |
| "grad_norm": 0.36280659725390446, | |
| "learning_rate": 2.9503105590062114e-05, | |
| "loss": 0.4908, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.18173345759552656, | |
| "grad_norm": 0.4446282244198985, | |
| "learning_rate": 3.0279503105590062e-05, | |
| "loss": 0.4802, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.1863932898415657, | |
| "grad_norm": 0.4818264381472692, | |
| "learning_rate": 3.1055900621118014e-05, | |
| "loss": 0.4818, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.19105312208760486, | |
| "grad_norm": 0.5191372230429591, | |
| "learning_rate": 3.183229813664597e-05, | |
| "loss": 0.4708, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.195712954333644, | |
| "grad_norm": 0.5690145277815086, | |
| "learning_rate": 3.260869565217392e-05, | |
| "loss": 0.4848, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.20037278657968313, | |
| "grad_norm": 0.41006685540485915, | |
| "learning_rate": 3.3385093167701865e-05, | |
| "loss": 0.467, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.20503261882572227, | |
| "grad_norm": 0.47272355547942274, | |
| "learning_rate": 3.4161490683229814e-05, | |
| "loss": 0.4716, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.2096924510717614, | |
| "grad_norm": 0.5135260711958035, | |
| "learning_rate": 3.493788819875777e-05, | |
| "loss": 0.4536, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.21435228331780057, | |
| "grad_norm": 0.5124408748792798, | |
| "learning_rate": 3.571428571428572e-05, | |
| "loss": 0.467, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.2190121155638397, | |
| "grad_norm": 0.4205466417850958, | |
| "learning_rate": 3.6490683229813665e-05, | |
| "loss": 0.4647, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.22367194780987884, | |
| "grad_norm": 0.6251611138936444, | |
| "learning_rate": 3.7267080745341614e-05, | |
| "loss": 0.4624, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.22833178005591798, | |
| "grad_norm": 0.5610747747609784, | |
| "learning_rate": 3.804347826086957e-05, | |
| "loss": 0.4676, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.23299161230195714, | |
| "grad_norm": 0.4692752738735764, | |
| "learning_rate": 3.881987577639752e-05, | |
| "loss": 0.4592, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.23765144454799628, | |
| "grad_norm": 0.5435654364057863, | |
| "learning_rate": 3.9596273291925465e-05, | |
| "loss": 0.4679, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.2423112767940354, | |
| "grad_norm": 0.5264727485653637, | |
| "learning_rate": 4.0372670807453414e-05, | |
| "loss": 0.463, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.24697110904007455, | |
| "grad_norm": 0.5096976732033014, | |
| "learning_rate": 4.114906832298137e-05, | |
| "loss": 0.4613, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.2516309412861137, | |
| "grad_norm": 0.4649125930738299, | |
| "learning_rate": 4.192546583850932e-05, | |
| "loss": 0.4594, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.25629077353215285, | |
| "grad_norm": 0.5173502447936299, | |
| "learning_rate": 4.270186335403727e-05, | |
| "loss": 0.4652, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.26095060577819196, | |
| "grad_norm": 0.3868002746815184, | |
| "learning_rate": 4.347826086956522e-05, | |
| "loss": 0.4572, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.2656104380242311, | |
| "grad_norm": 0.5001940885551612, | |
| "learning_rate": 4.425465838509317e-05, | |
| "loss": 0.4613, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.2702702702702703, | |
| "grad_norm": 0.6213239402228385, | |
| "learning_rate": 4.5031055900621124e-05, | |
| "loss": 0.4633, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.2749301025163094, | |
| "grad_norm": 0.6125652570862165, | |
| "learning_rate": 4.580745341614907e-05, | |
| "loss": 0.4673, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.27958993476234856, | |
| "grad_norm": 0.7479197761570839, | |
| "learning_rate": 4.658385093167702e-05, | |
| "loss": 0.4693, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.2842497670083877, | |
| "grad_norm": 0.5483826450591869, | |
| "learning_rate": 4.736024844720497e-05, | |
| "loss": 0.4579, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.28890959925442683, | |
| "grad_norm": 0.6644586513259841, | |
| "learning_rate": 4.8136645962732924e-05, | |
| "loss": 0.45, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.293569431500466, | |
| "grad_norm": 0.8188490252541196, | |
| "learning_rate": 4.891304347826087e-05, | |
| "loss": 0.459, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.2982292637465051, | |
| "grad_norm": 1.034709523419619, | |
| "learning_rate": 4.968944099378882e-05, | |
| "loss": 0.4441, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.30288909599254427, | |
| "grad_norm": 0.759919110757548, | |
| "learning_rate": 4.994822229892993e-05, | |
| "loss": 0.4415, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.30754892823858343, | |
| "grad_norm": 0.6470711913841568, | |
| "learning_rate": 4.986192613047981e-05, | |
| "loss": 0.4519, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.31220876048462254, | |
| "grad_norm": 0.8153464642106912, | |
| "learning_rate": 4.977562996202969e-05, | |
| "loss": 0.4527, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.3168685927306617, | |
| "grad_norm": 0.4698984383111023, | |
| "learning_rate": 4.968933379357957e-05, | |
| "loss": 0.4532, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.32152842497670087, | |
| "grad_norm": 0.4756319068347418, | |
| "learning_rate": 4.9603037625129445e-05, | |
| "loss": 0.4479, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.32618825722274, | |
| "grad_norm": 0.35969300915800667, | |
| "learning_rate": 4.951674145667933e-05, | |
| "loss": 0.4613, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.33084808946877914, | |
| "grad_norm": 0.5175115868112082, | |
| "learning_rate": 4.94304452882292e-05, | |
| "loss": 0.4473, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.33550792171481825, | |
| "grad_norm": 0.46094013566363407, | |
| "learning_rate": 4.934414911977908e-05, | |
| "loss": 0.445, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.3401677539608574, | |
| "grad_norm": 0.4104764392290741, | |
| "learning_rate": 4.9257852951328965e-05, | |
| "loss": 0.4415, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.3448275862068966, | |
| "grad_norm": 0.40096213441032663, | |
| "learning_rate": 4.917155678287884e-05, | |
| "loss": 0.4546, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.3494874184529357, | |
| "grad_norm": 0.4645867033422015, | |
| "learning_rate": 4.908526061442872e-05, | |
| "loss": 0.4408, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.35414725069897485, | |
| "grad_norm": 0.4400737008116359, | |
| "learning_rate": 4.89989644459786e-05, | |
| "loss": 0.4557, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.35880708294501396, | |
| "grad_norm": 0.4260635316927096, | |
| "learning_rate": 4.891266827752848e-05, | |
| "loss": 0.4575, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.3634669151910531, | |
| "grad_norm": 0.5323727061907297, | |
| "learning_rate": 4.882637210907836e-05, | |
| "loss": 0.4417, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.3681267474370923, | |
| "grad_norm": 0.5836242210222355, | |
| "learning_rate": 4.874007594062824e-05, | |
| "loss": 0.4481, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.3727865796831314, | |
| "grad_norm": 0.4273563357499149, | |
| "learning_rate": 4.865377977217811e-05, | |
| "loss": 0.4327, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.37744641192917056, | |
| "grad_norm": 0.3953667888190435, | |
| "learning_rate": 4.8567483603728e-05, | |
| "loss": 0.4512, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.3821062441752097, | |
| "grad_norm": 0.41871105603942904, | |
| "learning_rate": 4.8481187435277875e-05, | |
| "loss": 0.4471, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.38676607642124883, | |
| "grad_norm": 0.5437922609735028, | |
| "learning_rate": 4.839489126682776e-05, | |
| "loss": 0.4338, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.391425908667288, | |
| "grad_norm": 0.5564237695130677, | |
| "learning_rate": 4.830859509837763e-05, | |
| "loss": 0.4521, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.3960857409133271, | |
| "grad_norm": 0.34206255911684263, | |
| "learning_rate": 4.822229892992751e-05, | |
| "loss": 0.4495, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.40074557315936626, | |
| "grad_norm": 0.4019378807561449, | |
| "learning_rate": 4.8136002761477395e-05, | |
| "loss": 0.4365, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.40540540540540543, | |
| "grad_norm": 0.5440015382583567, | |
| "learning_rate": 4.804970659302727e-05, | |
| "loss": 0.4499, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.41006523765144454, | |
| "grad_norm": 0.5537615079643711, | |
| "learning_rate": 4.796341042457715e-05, | |
| "loss": 0.4438, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.4147250698974837, | |
| "grad_norm": 0.586960808161654, | |
| "learning_rate": 4.787711425612703e-05, | |
| "loss": 0.4386, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.4193849021435228, | |
| "grad_norm": 0.5072343488418793, | |
| "learning_rate": 4.779081808767691e-05, | |
| "loss": 0.4292, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.424044734389562, | |
| "grad_norm": 0.48278977249280486, | |
| "learning_rate": 4.770452191922679e-05, | |
| "loss": 0.4362, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.42870456663560114, | |
| "grad_norm": 0.39818179591075503, | |
| "learning_rate": 4.761822575077667e-05, | |
| "loss": 0.4329, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.43336439888164024, | |
| "grad_norm": 0.48993775783018667, | |
| "learning_rate": 4.753192958232654e-05, | |
| "loss": 0.4299, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.4380242311276794, | |
| "grad_norm": 0.41171920657337485, | |
| "learning_rate": 4.744563341387643e-05, | |
| "loss": 0.4428, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.4426840633737186, | |
| "grad_norm": 0.4400559670696649, | |
| "learning_rate": 4.7359337245426306e-05, | |
| "loss": 0.432, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.4473438956197577, | |
| "grad_norm": 0.4686484619433322, | |
| "learning_rate": 4.7273041076976184e-05, | |
| "loss": 0.4441, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.45200372786579684, | |
| "grad_norm": 0.4455986109270811, | |
| "learning_rate": 4.718674490852606e-05, | |
| "loss": 0.4368, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.45666356011183595, | |
| "grad_norm": 0.4366809315888367, | |
| "learning_rate": 4.710044874007594e-05, | |
| "loss": 0.4312, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.4613233923578751, | |
| "grad_norm": 0.4587131719176713, | |
| "learning_rate": 4.7014152571625826e-05, | |
| "loss": 0.4369, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.4659832246039143, | |
| "grad_norm": 0.5269402211397143, | |
| "learning_rate": 4.6927856403175704e-05, | |
| "loss": 0.44, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.4706430568499534, | |
| "grad_norm": 0.387320399512531, | |
| "learning_rate": 4.684156023472558e-05, | |
| "loss": 0.4266, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.47530288909599255, | |
| "grad_norm": 0.4446811703899567, | |
| "learning_rate": 4.675526406627546e-05, | |
| "loss": 0.435, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.47996272134203166, | |
| "grad_norm": 0.6228000644001809, | |
| "learning_rate": 4.666896789782534e-05, | |
| "loss": 0.433, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.4846225535880708, | |
| "grad_norm": 0.5372584652001571, | |
| "learning_rate": 4.658267172937522e-05, | |
| "loss": 0.4368, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.48928238583411, | |
| "grad_norm": 0.4279339582606534, | |
| "learning_rate": 4.64963755609251e-05, | |
| "loss": 0.4358, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.4939422180801491, | |
| "grad_norm": 0.5212302132415493, | |
| "learning_rate": 4.641007939247497e-05, | |
| "loss": 0.4327, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.49860205032618826, | |
| "grad_norm": 0.38993326302525777, | |
| "learning_rate": 4.632378322402486e-05, | |
| "loss": 0.4329, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.5032618825722274, | |
| "grad_norm": 0.3927986683558084, | |
| "learning_rate": 4.6237487055574736e-05, | |
| "loss": 0.4228, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.5079217148182665, | |
| "grad_norm": 0.48245624377171165, | |
| "learning_rate": 4.6151190887124615e-05, | |
| "loss": 0.4292, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.5125815470643057, | |
| "grad_norm": 0.4417482867433448, | |
| "learning_rate": 4.606489471867449e-05, | |
| "loss": 0.4293, | |
| "step": 550 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 3219, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 550, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 4.7079688949989376e+17, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |