| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 3.0, | |
| "eval_steps": 500, | |
| "global_step": 3216, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0046641791044776115, | |
| "grad_norm": 22.738352492679688, | |
| "learning_rate": 7.763975155279503e-07, | |
| "loss": 0.72, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.009328358208955223, | |
| "grad_norm": 2.4490676748237585, | |
| "learning_rate": 1.5527950310559006e-06, | |
| "loss": 0.7066, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.013992537313432836, | |
| "grad_norm": 1.4469646402845542, | |
| "learning_rate": 2.329192546583851e-06, | |
| "loss": 0.6888, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.018656716417910446, | |
| "grad_norm": 0.9845035992234836, | |
| "learning_rate": 3.1055900621118013e-06, | |
| "loss": 0.6505, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.02332089552238806, | |
| "grad_norm": 1.0688651774494708, | |
| "learning_rate": 3.881987577639752e-06, | |
| "loss": 0.6187, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.027985074626865673, | |
| "grad_norm": 0.8822496021528499, | |
| "learning_rate": 4.658385093167702e-06, | |
| "loss": 0.5751, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.03264925373134328, | |
| "grad_norm": 0.6794397979439452, | |
| "learning_rate": 5.4347826086956525e-06, | |
| "loss": 0.5652, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.03731343283582089, | |
| "grad_norm": 0.6208547823627876, | |
| "learning_rate": 6.2111801242236025e-06, | |
| "loss": 0.539, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.04197761194029851, | |
| "grad_norm": 0.6099100526267329, | |
| "learning_rate": 6.9875776397515525e-06, | |
| "loss": 0.5253, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.04664179104477612, | |
| "grad_norm": 0.4822046752780638, | |
| "learning_rate": 7.763975155279503e-06, | |
| "loss": 0.5231, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.051305970149253734, | |
| "grad_norm": 0.5241141664226747, | |
| "learning_rate": 8.540372670807453e-06, | |
| "loss": 0.5038, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.055970149253731345, | |
| "grad_norm": 0.5355899963948073, | |
| "learning_rate": 9.316770186335403e-06, | |
| "loss": 0.5133, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.06063432835820896, | |
| "grad_norm": 0.45783155598157194, | |
| "learning_rate": 1.0093167701863353e-05, | |
| "loss": 0.4971, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.06529850746268656, | |
| "grad_norm": 0.47752401280380147, | |
| "learning_rate": 1.0869565217391305e-05, | |
| "loss": 0.4909, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.06996268656716417, | |
| "grad_norm": 96.36653266959638, | |
| "learning_rate": 1.1645962732919255e-05, | |
| "loss": 0.4909, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.07462686567164178, | |
| "grad_norm": 0.7133531495033483, | |
| "learning_rate": 1.2422360248447205e-05, | |
| "loss": 0.4845, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.07929104477611941, | |
| "grad_norm": 0.7425853191312465, | |
| "learning_rate": 1.3198757763975155e-05, | |
| "loss": 0.4837, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.08395522388059702, | |
| "grad_norm": 0.5960980488366953, | |
| "learning_rate": 1.3975155279503105e-05, | |
| "loss": 0.4804, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.08861940298507463, | |
| "grad_norm": 0.5447402853557884, | |
| "learning_rate": 1.4751552795031057e-05, | |
| "loss": 0.4782, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.09328358208955224, | |
| "grad_norm": 0.45990879787168926, | |
| "learning_rate": 1.5527950310559007e-05, | |
| "loss": 0.4743, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.09794776119402986, | |
| "grad_norm": 0.6338310274877534, | |
| "learning_rate": 1.630434782608696e-05, | |
| "loss": 0.4779, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.10261194029850747, | |
| "grad_norm": 0.6117280329152313, | |
| "learning_rate": 1.7080745341614907e-05, | |
| "loss": 0.4818, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.10727611940298508, | |
| "grad_norm": 0.5756936186506824, | |
| "learning_rate": 1.785714285714286e-05, | |
| "loss": 0.4667, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.11194029850746269, | |
| "grad_norm": 0.7217520805100247, | |
| "learning_rate": 1.8633540372670807e-05, | |
| "loss": 0.4657, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.1166044776119403, | |
| "grad_norm": 0.6784605417054568, | |
| "learning_rate": 1.940993788819876e-05, | |
| "loss": 0.4797, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.12126865671641791, | |
| "grad_norm": 0.7268764826046913, | |
| "learning_rate": 2.0186335403726707e-05, | |
| "loss": 0.4626, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.1259328358208955, | |
| "grad_norm": 0.5183033448501775, | |
| "learning_rate": 2.096273291925466e-05, | |
| "loss": 0.459, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.13059701492537312, | |
| "grad_norm": 0.5673656480219814, | |
| "learning_rate": 2.173913043478261e-05, | |
| "loss": 0.4636, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.13526119402985073, | |
| "grad_norm": 0.6186921007326888, | |
| "learning_rate": 2.2515527950310562e-05, | |
| "loss": 0.4606, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.13992537313432835, | |
| "grad_norm": 0.5241075651728282, | |
| "learning_rate": 2.329192546583851e-05, | |
| "loss": 0.47, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.14458955223880596, | |
| "grad_norm": 0.7361437374863329, | |
| "learning_rate": 2.4068322981366462e-05, | |
| "loss": 0.4714, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.14925373134328357, | |
| "grad_norm": 0.581348172865908, | |
| "learning_rate": 2.484472049689441e-05, | |
| "loss": 0.4735, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.15391791044776118, | |
| "grad_norm": 0.632566759184536, | |
| "learning_rate": 2.5621118012422362e-05, | |
| "loss": 0.4622, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.15858208955223882, | |
| "grad_norm": 0.4846953103835023, | |
| "learning_rate": 2.639751552795031e-05, | |
| "loss": 0.4664, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.16324626865671643, | |
| "grad_norm": 0.5995307211510615, | |
| "learning_rate": 2.7173913043478262e-05, | |
| "loss": 0.4658, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.16791044776119404, | |
| "grad_norm": 0.6381243666779519, | |
| "learning_rate": 2.795031055900621e-05, | |
| "loss": 0.4704, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.17257462686567165, | |
| "grad_norm": 0.7396312448933672, | |
| "learning_rate": 2.8726708074534165e-05, | |
| "loss": 0.4595, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.17723880597014927, | |
| "grad_norm": 0.646685589511252, | |
| "learning_rate": 2.9503105590062114e-05, | |
| "loss": 0.4584, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.18190298507462688, | |
| "grad_norm": 0.5522136252347458, | |
| "learning_rate": 3.0279503105590062e-05, | |
| "loss": 0.4572, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.1865671641791045, | |
| "grad_norm": 0.592465600869023, | |
| "learning_rate": 3.1055900621118014e-05, | |
| "loss": 0.4569, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.1912313432835821, | |
| "grad_norm": 0.5816557821115675, | |
| "learning_rate": 3.183229813664597e-05, | |
| "loss": 0.4562, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.1958955223880597, | |
| "grad_norm": 0.454302816876782, | |
| "learning_rate": 3.260869565217392e-05, | |
| "loss": 0.4581, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.20055970149253732, | |
| "grad_norm": 0.7248560879258924, | |
| "learning_rate": 3.3385093167701865e-05, | |
| "loss": 0.463, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.20522388059701493, | |
| "grad_norm": 0.47216907134628167, | |
| "learning_rate": 3.4161490683229814e-05, | |
| "loss": 0.4557, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.20988805970149255, | |
| "grad_norm": 0.5120620150755747, | |
| "learning_rate": 3.493788819875777e-05, | |
| "loss": 0.4605, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.21455223880597016, | |
| "grad_norm": 0.5019928041010577, | |
| "learning_rate": 3.571428571428572e-05, | |
| "loss": 0.4704, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.21921641791044777, | |
| "grad_norm": 0.7005277033846086, | |
| "learning_rate": 3.6490683229813665e-05, | |
| "loss": 0.4537, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.22388059701492538, | |
| "grad_norm": 0.6004748284147888, | |
| "learning_rate": 3.7267080745341614e-05, | |
| "loss": 0.4462, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.228544776119403, | |
| "grad_norm": 0.5859496174366943, | |
| "learning_rate": 3.804347826086957e-05, | |
| "loss": 0.4464, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.2332089552238806, | |
| "grad_norm": 0.6271369089477161, | |
| "learning_rate": 3.881987577639752e-05, | |
| "loss": 0.4645, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.23787313432835822, | |
| "grad_norm": 0.7212050029976267, | |
| "learning_rate": 3.9596273291925465e-05, | |
| "loss": 0.454, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.24253731343283583, | |
| "grad_norm": 0.8038873758232625, | |
| "learning_rate": 4.0372670807453414e-05, | |
| "loss": 0.4512, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.24720149253731344, | |
| "grad_norm": 0.6067839995070565, | |
| "learning_rate": 4.114906832298137e-05, | |
| "loss": 0.4526, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.251865671641791, | |
| "grad_norm": 0.8830412074378836, | |
| "learning_rate": 4.192546583850932e-05, | |
| "loss": 0.457, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.25652985074626866, | |
| "grad_norm": 0.5901692717297442, | |
| "learning_rate": 4.270186335403727e-05, | |
| "loss": 0.4595, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.26119402985074625, | |
| "grad_norm": 0.4945775641198145, | |
| "learning_rate": 4.347826086956522e-05, | |
| "loss": 0.4588, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.2658582089552239, | |
| "grad_norm": 0.4880940893571753, | |
| "learning_rate": 4.425465838509317e-05, | |
| "loss": 0.4596, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.27052238805970147, | |
| "grad_norm": 0.42270582247479116, | |
| "learning_rate": 4.5031055900621124e-05, | |
| "loss": 0.4524, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.2751865671641791, | |
| "grad_norm": 0.530692682877917, | |
| "learning_rate": 4.580745341614907e-05, | |
| "loss": 0.4548, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.2798507462686567, | |
| "grad_norm": 0.44616652442353133, | |
| "learning_rate": 4.658385093167702e-05, | |
| "loss": 0.4493, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.28451492537313433, | |
| "grad_norm": 0.5991737332340853, | |
| "learning_rate": 4.736024844720497e-05, | |
| "loss": 0.4608, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.2891791044776119, | |
| "grad_norm": 0.6052807893055346, | |
| "learning_rate": 4.8136645962732924e-05, | |
| "loss": 0.4732, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.29384328358208955, | |
| "grad_norm": 0.5343456372550424, | |
| "learning_rate": 4.891304347826087e-05, | |
| "loss": 0.4598, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.29850746268656714, | |
| "grad_norm": 0.49567258993855884, | |
| "learning_rate": 4.968944099378882e-05, | |
| "loss": 0.4628, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.3031716417910448, | |
| "grad_norm": 0.4971614255612932, | |
| "learning_rate": 4.994816862474084e-05, | |
| "loss": 0.4603, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.30783582089552236, | |
| "grad_norm": 0.6039335231824655, | |
| "learning_rate": 4.9861782999308914e-05, | |
| "loss": 0.4482, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.3125, | |
| "grad_norm": 0.6778615211932396, | |
| "learning_rate": 4.977539737387699e-05, | |
| "loss": 0.4638, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.31716417910447764, | |
| "grad_norm": 0.7498957256886681, | |
| "learning_rate": 4.968901174844506e-05, | |
| "loss": 0.4625, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.3218283582089552, | |
| "grad_norm": 0.5615202268133589, | |
| "learning_rate": 4.9602626123013134e-05, | |
| "loss": 0.4488, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.32649253731343286, | |
| "grad_norm": 0.4523233930114676, | |
| "learning_rate": 4.9516240497581205e-05, | |
| "loss": 0.462, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.33115671641791045, | |
| "grad_norm": 0.5033467091434207, | |
| "learning_rate": 4.942985487214928e-05, | |
| "loss": 0.4668, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.3358208955223881, | |
| "grad_norm": 0.5553787776412434, | |
| "learning_rate": 4.934346924671735e-05, | |
| "loss": 0.45, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.34048507462686567, | |
| "grad_norm": 0.4708580590293377, | |
| "learning_rate": 4.925708362128542e-05, | |
| "loss": 0.4426, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.3451492537313433, | |
| "grad_norm": 0.38806331893861945, | |
| "learning_rate": 4.917069799585349e-05, | |
| "loss": 0.4392, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.3498134328358209, | |
| "grad_norm": 0.40827614257116257, | |
| "learning_rate": 4.908431237042156e-05, | |
| "loss": 0.451, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.35447761194029853, | |
| "grad_norm": 0.45747856282523847, | |
| "learning_rate": 4.899792674498963e-05, | |
| "loss": 0.4476, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.3591417910447761, | |
| "grad_norm": 0.5052139973437662, | |
| "learning_rate": 4.891154111955771e-05, | |
| "loss": 0.4432, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.36380597014925375, | |
| "grad_norm": 0.4170880345897222, | |
| "learning_rate": 4.882515549412578e-05, | |
| "loss": 0.4511, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.36847014925373134, | |
| "grad_norm": 0.44440917008972314, | |
| "learning_rate": 4.873876986869385e-05, | |
| "loss": 0.434, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.373134328358209, | |
| "grad_norm": 0.5535197338182013, | |
| "learning_rate": 4.8652384243261925e-05, | |
| "loss": 0.4417, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.37779850746268656, | |
| "grad_norm": 0.4543003823042689, | |
| "learning_rate": 4.8565998617829996e-05, | |
| "loss": 0.4427, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.3824626865671642, | |
| "grad_norm": 0.4061460281209753, | |
| "learning_rate": 4.847961299239807e-05, | |
| "loss": 0.4508, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.3871268656716418, | |
| "grad_norm": 0.40101055697914884, | |
| "learning_rate": 4.839322736696614e-05, | |
| "loss": 0.4361, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.3917910447761194, | |
| "grad_norm": 0.45957316229962214, | |
| "learning_rate": 4.830684174153421e-05, | |
| "loss": 0.4368, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.396455223880597, | |
| "grad_norm": 0.5103635688354818, | |
| "learning_rate": 4.822045611610228e-05, | |
| "loss": 0.4526, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.40111940298507465, | |
| "grad_norm": 0.44638400654327204, | |
| "learning_rate": 4.813407049067035e-05, | |
| "loss": 0.4442, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.40578358208955223, | |
| "grad_norm": 0.42134048096774834, | |
| "learning_rate": 4.804768486523843e-05, | |
| "loss": 0.4446, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.41044776119402987, | |
| "grad_norm": 0.3714177623843857, | |
| "learning_rate": 4.79612992398065e-05, | |
| "loss": 0.4434, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.41511194029850745, | |
| "grad_norm": 0.4080807783226958, | |
| "learning_rate": 4.787491361437457e-05, | |
| "loss": 0.4365, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.4197761194029851, | |
| "grad_norm": 0.4296755547496576, | |
| "learning_rate": 4.7788527988942644e-05, | |
| "loss": 0.4439, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.4244402985074627, | |
| "grad_norm": 0.5229869594711577, | |
| "learning_rate": 4.7702142363510715e-05, | |
| "loss": 0.4511, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.4291044776119403, | |
| "grad_norm": 0.4541583197754355, | |
| "learning_rate": 4.7615756738078786e-05, | |
| "loss": 0.443, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.4337686567164179, | |
| "grad_norm": 0.39753221770465835, | |
| "learning_rate": 4.752937111264686e-05, | |
| "loss": 0.4424, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.43843283582089554, | |
| "grad_norm": 0.452450873927536, | |
| "learning_rate": 4.744298548721493e-05, | |
| "loss": 0.4457, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.4430970149253731, | |
| "grad_norm": 0.34559208848036266, | |
| "learning_rate": 4.7356599861783e-05, | |
| "loss": 0.4383, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.44776119402985076, | |
| "grad_norm": 0.47402006860137447, | |
| "learning_rate": 4.727021423635107e-05, | |
| "loss": 0.4458, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.45242537313432835, | |
| "grad_norm": 0.4884874645329745, | |
| "learning_rate": 4.718382861091914e-05, | |
| "loss": 0.4481, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.457089552238806, | |
| "grad_norm": 0.451658711505894, | |
| "learning_rate": 4.7097442985487214e-05, | |
| "loss": 0.443, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.46175373134328357, | |
| "grad_norm": 0.3843680671503601, | |
| "learning_rate": 4.701105736005529e-05, | |
| "loss": 0.4445, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.4664179104477612, | |
| "grad_norm": 0.3920028718627014, | |
| "learning_rate": 4.692467173462336e-05, | |
| "loss": 0.4443, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.4710820895522388, | |
| "grad_norm": 0.4995471633561056, | |
| "learning_rate": 4.6838286109191434e-05, | |
| "loss": 0.4415, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.47574626865671643, | |
| "grad_norm": 0.44771115752949703, | |
| "learning_rate": 4.6751900483759505e-05, | |
| "loss": 0.4506, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.480410447761194, | |
| "grad_norm": 0.3985541451173659, | |
| "learning_rate": 4.6665514858327577e-05, | |
| "loss": 0.4372, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.48507462686567165, | |
| "grad_norm": 0.357625229306546, | |
| "learning_rate": 4.657912923289565e-05, | |
| "loss": 0.4378, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.48973880597014924, | |
| "grad_norm": 0.4738854377700535, | |
| "learning_rate": 4.649274360746372e-05, | |
| "loss": 0.443, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.4944029850746269, | |
| "grad_norm": 0.39605876768330955, | |
| "learning_rate": 4.640635798203179e-05, | |
| "loss": 0.4418, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.49906716417910446, | |
| "grad_norm": 0.4413650110655974, | |
| "learning_rate": 4.631997235659987e-05, | |
| "loss": 0.4402, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.503731343283582, | |
| "grad_norm": 0.4056918494159327, | |
| "learning_rate": 4.623358673116794e-05, | |
| "loss": 0.444, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.5083955223880597, | |
| "grad_norm": 0.3856067914304802, | |
| "learning_rate": 4.614720110573601e-05, | |
| "loss": 0.4326, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.5130597014925373, | |
| "grad_norm": 0.4232443035636539, | |
| "learning_rate": 4.606081548030408e-05, | |
| "loss": 0.4404, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.5177238805970149, | |
| "grad_norm": 0.3589248472646083, | |
| "learning_rate": 4.597442985487215e-05, | |
| "loss": 0.4319, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.5223880597014925, | |
| "grad_norm": 0.37321752712330447, | |
| "learning_rate": 4.5888044229440225e-05, | |
| "loss": 0.4483, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.5270522388059702, | |
| "grad_norm": 0.34891184329028024, | |
| "learning_rate": 4.5801658604008296e-05, | |
| "loss": 0.4289, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.5317164179104478, | |
| "grad_norm": 0.45076872631286924, | |
| "learning_rate": 4.571527297857637e-05, | |
| "loss": 0.4542, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.5363805970149254, | |
| "grad_norm": 0.3873796320347349, | |
| "learning_rate": 4.562888735314444e-05, | |
| "loss": 0.4209, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.5410447761194029, | |
| "grad_norm": 0.34669576923189943, | |
| "learning_rate": 4.554250172771251e-05, | |
| "loss": 0.4287, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.5457089552238806, | |
| "grad_norm": 0.3638419363193581, | |
| "learning_rate": 4.545611610228058e-05, | |
| "loss": 0.436, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.5503731343283582, | |
| "grad_norm": 0.39053662968410374, | |
| "learning_rate": 4.536973047684865e-05, | |
| "loss": 0.4404, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.5550373134328358, | |
| "grad_norm": 0.37585133117143305, | |
| "learning_rate": 4.528334485141672e-05, | |
| "loss": 0.4233, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.5597014925373134, | |
| "grad_norm": 0.3731978079425957, | |
| "learning_rate": 4.5196959225984794e-05, | |
| "loss": 0.4343, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.5643656716417911, | |
| "grad_norm": 0.3117162045325204, | |
| "learning_rate": 4.5110573600552866e-05, | |
| "loss": 0.4337, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.5690298507462687, | |
| "grad_norm": 0.365948185811666, | |
| "learning_rate": 4.5024187975120944e-05, | |
| "loss": 0.4238, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.5736940298507462, | |
| "grad_norm": 0.45289486340894153, | |
| "learning_rate": 4.4937802349689015e-05, | |
| "loss": 0.4364, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.5783582089552238, | |
| "grad_norm": 0.3968118190520084, | |
| "learning_rate": 4.4851416724257086e-05, | |
| "loss": 0.4399, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.5830223880597015, | |
| "grad_norm": 0.3725245026138153, | |
| "learning_rate": 4.476503109882516e-05, | |
| "loss": 0.4322, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.5876865671641791, | |
| "grad_norm": 0.3467913237906032, | |
| "learning_rate": 4.467864547339323e-05, | |
| "loss": 0.4256, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.5923507462686567, | |
| "grad_norm": 0.3197433679572902, | |
| "learning_rate": 4.459225984796131e-05, | |
| "loss": 0.4185, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.5970149253731343, | |
| "grad_norm": 0.3073797063902973, | |
| "learning_rate": 4.450587422252938e-05, | |
| "loss": 0.4182, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.601679104477612, | |
| "grad_norm": 0.3327114253437715, | |
| "learning_rate": 4.441948859709745e-05, | |
| "loss": 0.4361, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.6063432835820896, | |
| "grad_norm": 0.3638896400741939, | |
| "learning_rate": 4.433310297166552e-05, | |
| "loss": 0.4325, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.6110074626865671, | |
| "grad_norm": 0.36150055403867176, | |
| "learning_rate": 4.424671734623359e-05, | |
| "loss": 0.4229, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.6156716417910447, | |
| "grad_norm": 0.36773037100563893, | |
| "learning_rate": 4.416033172080166e-05, | |
| "loss": 0.4234, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.6203358208955224, | |
| "grad_norm": 0.3433086649257135, | |
| "learning_rate": 4.4073946095369734e-05, | |
| "loss": 0.4157, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.625, | |
| "grad_norm": 0.35990799973671744, | |
| "learning_rate": 4.3987560469937805e-05, | |
| "loss": 0.4356, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.6296641791044776, | |
| "grad_norm": 0.36355717173952407, | |
| "learning_rate": 4.3901174844505877e-05, | |
| "loss": 0.433, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.6343283582089553, | |
| "grad_norm": 0.3638690271633339, | |
| "learning_rate": 4.381478921907395e-05, | |
| "loss": 0.429, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.6389925373134329, | |
| "grad_norm": 0.34841776534326835, | |
| "learning_rate": 4.372840359364202e-05, | |
| "loss": 0.4168, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.6436567164179104, | |
| "grad_norm": 0.3639322088966222, | |
| "learning_rate": 4.364201796821009e-05, | |
| "loss": 0.4292, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.648320895522388, | |
| "grad_norm": 0.35025635629771007, | |
| "learning_rate": 4.355563234277816e-05, | |
| "loss": 0.4254, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.6529850746268657, | |
| "grad_norm": 0.32140112365871065, | |
| "learning_rate": 4.346924671734623e-05, | |
| "loss": 0.4267, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.6576492537313433, | |
| "grad_norm": 0.34172203594661366, | |
| "learning_rate": 4.3382861091914304e-05, | |
| "loss": 0.4221, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.6623134328358209, | |
| "grad_norm": 0.3289285022359723, | |
| "learning_rate": 4.3296475466482375e-05, | |
| "loss": 0.4331, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.6669776119402985, | |
| "grad_norm": 0.34058632106260917, | |
| "learning_rate": 4.3210089841050446e-05, | |
| "loss": 0.4458, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.6716417910447762, | |
| "grad_norm": 0.3486643210893037, | |
| "learning_rate": 4.3123704215618525e-05, | |
| "loss": 0.4337, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.6763059701492538, | |
| "grad_norm": 0.34029563361342546, | |
| "learning_rate": 4.3037318590186596e-05, | |
| "loss": 0.4346, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.6809701492537313, | |
| "grad_norm": 0.3203267957973422, | |
| "learning_rate": 4.295093296475467e-05, | |
| "loss": 0.4315, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.6856343283582089, | |
| "grad_norm": 0.33247438829317333, | |
| "learning_rate": 4.286454733932274e-05, | |
| "loss": 0.4289, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.6902985074626866, | |
| "grad_norm": 0.3639271853650925, | |
| "learning_rate": 4.2778161713890816e-05, | |
| "loss": 0.4313, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.6949626865671642, | |
| "grad_norm": 0.3647011321225644, | |
| "learning_rate": 4.269177608845889e-05, | |
| "loss": 0.421, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.6996268656716418, | |
| "grad_norm": 0.4544655797193905, | |
| "learning_rate": 4.260539046302696e-05, | |
| "loss": 0.43, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.7042910447761194, | |
| "grad_norm": 0.3560911463639429, | |
| "learning_rate": 4.251900483759503e-05, | |
| "loss": 0.416, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 0.7089552238805971, | |
| "grad_norm": 0.3721360929204025, | |
| "learning_rate": 4.24326192121631e-05, | |
| "loss": 0.4333, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.7136194029850746, | |
| "grad_norm": 0.3637105793107694, | |
| "learning_rate": 4.234623358673117e-05, | |
| "loss": 0.4258, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 0.7182835820895522, | |
| "grad_norm": 0.34776510236390523, | |
| "learning_rate": 4.2259847961299244e-05, | |
| "loss": 0.4363, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.7229477611940298, | |
| "grad_norm": 0.3258555701876529, | |
| "learning_rate": 4.2173462335867315e-05, | |
| "loss": 0.4188, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.7276119402985075, | |
| "grad_norm": 0.32865402875328575, | |
| "learning_rate": 4.2087076710435386e-05, | |
| "loss": 0.4268, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.7322761194029851, | |
| "grad_norm": 0.36196646399760873, | |
| "learning_rate": 4.200069108500346e-05, | |
| "loss": 0.4258, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 0.7369402985074627, | |
| "grad_norm": 0.32676922443067036, | |
| "learning_rate": 4.191430545957153e-05, | |
| "loss": 0.412, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.7416044776119403, | |
| "grad_norm": 0.39681571587950204, | |
| "learning_rate": 4.18279198341396e-05, | |
| "loss": 0.6017, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 0.746268656716418, | |
| "grad_norm": 10.033180529879662, | |
| "learning_rate": 4.174153420870767e-05, | |
| "loss": 0.7944, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.7509328358208955, | |
| "grad_norm": 27.24004574164007, | |
| "learning_rate": 4.165514858327574e-05, | |
| "loss": 3.1338, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 0.7555970149253731, | |
| "grad_norm": 20.883032361930415, | |
| "learning_rate": 4.1568762957843814e-05, | |
| "loss": 1.6484, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.7602611940298507, | |
| "grad_norm": 141.9043761638311, | |
| "learning_rate": 4.1482377332411885e-05, | |
| "loss": 4.9296, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 0.7649253731343284, | |
| "grad_norm": 100.61464359675652, | |
| "learning_rate": 4.1395991706979956e-05, | |
| "loss": 5.7512, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.769589552238806, | |
| "grad_norm": 3.990188508436384, | |
| "learning_rate": 4.130960608154803e-05, | |
| "loss": 2.0464, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 0.7742537313432836, | |
| "grad_norm": 45.318855779474845, | |
| "learning_rate": 4.12232204561161e-05, | |
| "loss": 1.3146, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.7789179104477612, | |
| "grad_norm": 4.832438159991322, | |
| "learning_rate": 4.1136834830684177e-05, | |
| "loss": 0.8527, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 0.7835820895522388, | |
| "grad_norm": 0.6749808872081724, | |
| "learning_rate": 4.105044920525225e-05, | |
| "loss": 0.5167, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.7882462686567164, | |
| "grad_norm": 0.5425395415571037, | |
| "learning_rate": 4.096406357982032e-05, | |
| "loss": 0.4614, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 0.792910447761194, | |
| "grad_norm": 24.980581731800832, | |
| "learning_rate": 4.087767795438839e-05, | |
| "loss": 0.4761, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.7975746268656716, | |
| "grad_norm": 1.4501348911481409, | |
| "learning_rate": 4.079129232895647e-05, | |
| "loss": 0.4674, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 0.8022388059701493, | |
| "grad_norm": 0.641416013117446, | |
| "learning_rate": 4.070490670352454e-05, | |
| "loss": 0.5038, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.8069029850746269, | |
| "grad_norm": 0.5627030438174753, | |
| "learning_rate": 4.061852107809261e-05, | |
| "loss": 0.4418, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 0.8115671641791045, | |
| "grad_norm": 0.46915399699593396, | |
| "learning_rate": 4.053213545266068e-05, | |
| "loss": 0.4447, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.816231343283582, | |
| "grad_norm": 0.3856436638838522, | |
| "learning_rate": 4.044574982722875e-05, | |
| "loss": 0.4276, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 0.8208955223880597, | |
| "grad_norm": 0.48756997938144137, | |
| "learning_rate": 4.0359364201796824e-05, | |
| "loss": 0.434, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.8255597014925373, | |
| "grad_norm": 0.51766421308121, | |
| "learning_rate": 4.0272978576364896e-05, | |
| "loss": 0.5357, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 0.8302238805970149, | |
| "grad_norm": 0.3908157940426824, | |
| "learning_rate": 4.018659295093297e-05, | |
| "loss": 0.4284, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.8348880597014925, | |
| "grad_norm": 0.33875913171292416, | |
| "learning_rate": 4.010020732550104e-05, | |
| "loss": 0.4246, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 0.8395522388059702, | |
| "grad_norm": 0.3558826759533804, | |
| "learning_rate": 4.001382170006911e-05, | |
| "loss": 0.4303, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.8442164179104478, | |
| "grad_norm": 0.3450728154450144, | |
| "learning_rate": 3.992743607463718e-05, | |
| "loss": 0.4349, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 0.8488805970149254, | |
| "grad_norm": 0.42579004238364515, | |
| "learning_rate": 3.984105044920525e-05, | |
| "loss": 0.4366, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.8535447761194029, | |
| "grad_norm": 0.32173962732848427, | |
| "learning_rate": 3.975466482377332e-05, | |
| "loss": 0.4288, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 0.8582089552238806, | |
| "grad_norm": 0.33734225850694, | |
| "learning_rate": 3.9668279198341394e-05, | |
| "loss": 0.4217, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.8628731343283582, | |
| "grad_norm": 0.29598470338565125, | |
| "learning_rate": 3.9581893572909466e-05, | |
| "loss": 0.4285, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 0.8675373134328358, | |
| "grad_norm": 0.27627966729614656, | |
| "learning_rate": 3.949550794747754e-05, | |
| "loss": 0.4185, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.8722014925373134, | |
| "grad_norm": 0.2959580635871162, | |
| "learning_rate": 3.9409122322045615e-05, | |
| "loss": 0.4189, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 0.8768656716417911, | |
| "grad_norm": 0.27114553180881057, | |
| "learning_rate": 3.9322736696613686e-05, | |
| "loss": 0.4351, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.8815298507462687, | |
| "grad_norm": 0.2776062047868956, | |
| "learning_rate": 3.923635107118176e-05, | |
| "loss": 0.4198, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 0.8861940298507462, | |
| "grad_norm": 0.3413855890463926, | |
| "learning_rate": 3.914996544574983e-05, | |
| "loss": 0.4259, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.8908582089552238, | |
| "grad_norm": 0.2865197722582667, | |
| "learning_rate": 3.90635798203179e-05, | |
| "loss": 0.4228, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 0.8955223880597015, | |
| "grad_norm": 0.28256042480288196, | |
| "learning_rate": 3.897719419488597e-05, | |
| "loss": 0.425, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.9001865671641791, | |
| "grad_norm": 0.29562003853312135, | |
| "learning_rate": 3.889080856945405e-05, | |
| "loss": 0.415, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 0.9048507462686567, | |
| "grad_norm": 0.35132080002687516, | |
| "learning_rate": 3.880442294402212e-05, | |
| "loss": 0.4208, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.9095149253731343, | |
| "grad_norm": 0.3284303035217882, | |
| "learning_rate": 3.871803731859019e-05, | |
| "loss": 0.4137, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 0.914179104477612, | |
| "grad_norm": 0.32530707433107603, | |
| "learning_rate": 3.863165169315826e-05, | |
| "loss": 0.4269, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.9188432835820896, | |
| "grad_norm": 0.34371360958272734, | |
| "learning_rate": 3.8545266067726334e-05, | |
| "loss": 0.4194, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 0.9235074626865671, | |
| "grad_norm": 0.31146967143943166, | |
| "learning_rate": 3.8458880442294405e-05, | |
| "loss": 0.4134, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.9281716417910447, | |
| "grad_norm": 0.2920921345847662, | |
| "learning_rate": 3.8372494816862477e-05, | |
| "loss": 0.421, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 0.9328358208955224, | |
| "grad_norm": 0.31675384800507816, | |
| "learning_rate": 3.828610919143055e-05, | |
| "loss": 0.4041, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.9375, | |
| "grad_norm": 0.27613571571669987, | |
| "learning_rate": 3.819972356599862e-05, | |
| "loss": 0.4155, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 0.9421641791044776, | |
| "grad_norm": 0.2753364251435907, | |
| "learning_rate": 3.811333794056669e-05, | |
| "loss": 0.4211, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.9468283582089553, | |
| "grad_norm": 0.2698239989351873, | |
| "learning_rate": 3.802695231513476e-05, | |
| "loss": 0.4239, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 0.9514925373134329, | |
| "grad_norm": 0.27126755394004687, | |
| "learning_rate": 3.794056668970283e-05, | |
| "loss": 0.4253, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.9561567164179104, | |
| "grad_norm": 0.26224378178964536, | |
| "learning_rate": 3.7854181064270904e-05, | |
| "loss": 0.4185, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 0.960820895522388, | |
| "grad_norm": 0.28906256595472496, | |
| "learning_rate": 3.7767795438838975e-05, | |
| "loss": 0.4142, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.9654850746268657, | |
| "grad_norm": 0.27553156279452773, | |
| "learning_rate": 3.7681409813407046e-05, | |
| "loss": 0.4207, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 0.9701492537313433, | |
| "grad_norm": 0.2696811759327458, | |
| "learning_rate": 3.7595024187975124e-05, | |
| "loss": 0.4162, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.9748134328358209, | |
| "grad_norm": 0.2662441763669896, | |
| "learning_rate": 3.7508638562543196e-05, | |
| "loss": 0.4235, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 0.9794776119402985, | |
| "grad_norm": 0.28036180463737576, | |
| "learning_rate": 3.742225293711127e-05, | |
| "loss": 0.4177, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.9841417910447762, | |
| "grad_norm": 0.31473452358181914, | |
| "learning_rate": 3.733586731167934e-05, | |
| "loss": 0.4159, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 0.9888059701492538, | |
| "grad_norm": 0.2953342050150981, | |
| "learning_rate": 3.724948168624741e-05, | |
| "loss": 0.4174, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.9934701492537313, | |
| "grad_norm": 0.3131377965861431, | |
| "learning_rate": 3.716309606081548e-05, | |
| "loss": 0.4137, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 0.9981343283582089, | |
| "grad_norm": 0.3374438004794526, | |
| "learning_rate": 3.707671043538355e-05, | |
| "loss": 0.4173, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 1.0027985074626866, | |
| "grad_norm": 0.37344978388914235, | |
| "learning_rate": 3.699032480995162e-05, | |
| "loss": 0.3806, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 1.007462686567164, | |
| "grad_norm": 0.31801489919883985, | |
| "learning_rate": 3.69039391845197e-05, | |
| "loss": 0.3596, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 1.0121268656716418, | |
| "grad_norm": 0.3317256069439382, | |
| "learning_rate": 3.681755355908777e-05, | |
| "loss": 0.3695, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 1.0167910447761195, | |
| "grad_norm": 0.3306503536325789, | |
| "learning_rate": 3.6731167933655844e-05, | |
| "loss": 0.3654, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 1.021455223880597, | |
| "grad_norm": 0.32913429869876926, | |
| "learning_rate": 3.6644782308223915e-05, | |
| "loss": 0.3464, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 1.0261194029850746, | |
| "grad_norm": 0.33050928773733657, | |
| "learning_rate": 3.6558396682791986e-05, | |
| "loss": 0.3535, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 1.0307835820895523, | |
| "grad_norm": 0.31629181857188565, | |
| "learning_rate": 3.647201105736006e-05, | |
| "loss": 0.3493, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 1.0354477611940298, | |
| "grad_norm": 0.31384987421251836, | |
| "learning_rate": 3.638562543192813e-05, | |
| "loss": 0.3613, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 1.0401119402985075, | |
| "grad_norm": 0.28143199316278955, | |
| "learning_rate": 3.62992398064962e-05, | |
| "loss": 0.3561, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 1.044776119402985, | |
| "grad_norm": 0.28932574570028824, | |
| "learning_rate": 3.621285418106427e-05, | |
| "loss": 0.3505, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 1.0494402985074627, | |
| "grad_norm": 0.3130030748917116, | |
| "learning_rate": 3.612646855563234e-05, | |
| "loss": 0.3508, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 1.0541044776119404, | |
| "grad_norm": 0.30018396691904375, | |
| "learning_rate": 3.6040082930200414e-05, | |
| "loss": 0.3559, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 1.0587686567164178, | |
| "grad_norm": 0.2797661475731455, | |
| "learning_rate": 3.5953697304768485e-05, | |
| "loss": 0.3564, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 1.0634328358208955, | |
| "grad_norm": 0.2614821767599866, | |
| "learning_rate": 3.586731167933656e-05, | |
| "loss": 0.3555, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 1.0680970149253732, | |
| "grad_norm": 0.29189032628078715, | |
| "learning_rate": 3.5780926053904634e-05, | |
| "loss": 0.3611, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 1.0727611940298507, | |
| "grad_norm": 0.276817755710161, | |
| "learning_rate": 3.5694540428472705e-05, | |
| "loss": 0.3492, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 1.0774253731343284, | |
| "grad_norm": 0.2936426921451626, | |
| "learning_rate": 3.5608154803040776e-05, | |
| "loss": 0.3695, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 1.0820895522388059, | |
| "grad_norm": 0.27722338160031423, | |
| "learning_rate": 3.552176917760885e-05, | |
| "loss": 0.3615, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 1.0867537313432836, | |
| "grad_norm": 0.300519914595335, | |
| "learning_rate": 3.543538355217692e-05, | |
| "loss": 0.3623, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 1.0914179104477613, | |
| "grad_norm": 0.3084112389185803, | |
| "learning_rate": 3.534899792674499e-05, | |
| "loss": 0.3659, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 1.0960820895522387, | |
| "grad_norm": 0.33450358480683023, | |
| "learning_rate": 3.526261230131306e-05, | |
| "loss": 0.3678, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 1.1007462686567164, | |
| "grad_norm": 0.3356314974475923, | |
| "learning_rate": 3.517622667588113e-05, | |
| "loss": 0.358, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 1.1054104477611941, | |
| "grad_norm": 0.2702552333196796, | |
| "learning_rate": 3.5089841050449204e-05, | |
| "loss": 0.35, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 1.1100746268656716, | |
| "grad_norm": 0.32551848258956373, | |
| "learning_rate": 3.500345542501728e-05, | |
| "loss": 0.3619, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 1.1147388059701493, | |
| "grad_norm": 0.2777051336359984, | |
| "learning_rate": 3.491706979958535e-05, | |
| "loss": 0.3519, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 1.1194029850746268, | |
| "grad_norm": 0.3069482002112525, | |
| "learning_rate": 3.4830684174153424e-05, | |
| "loss": 0.3426, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 1.1240671641791045, | |
| "grad_norm": 0.30929936209908054, | |
| "learning_rate": 3.4744298548721496e-05, | |
| "loss": 0.3547, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 1.1287313432835822, | |
| "grad_norm": 0.45688393479074363, | |
| "learning_rate": 3.465791292328957e-05, | |
| "loss": 0.3627, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 1.1333955223880596, | |
| "grad_norm": 0.29264477117159665, | |
| "learning_rate": 3.457152729785764e-05, | |
| "loss": 0.359, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 1.1380597014925373, | |
| "grad_norm": 0.2491978795882593, | |
| "learning_rate": 3.448514167242571e-05, | |
| "loss": 0.3625, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 1.142723880597015, | |
| "grad_norm": 0.2755648656759251, | |
| "learning_rate": 3.439875604699378e-05, | |
| "loss": 0.3607, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 1.1473880597014925, | |
| "grad_norm": 0.2868010990674123, | |
| "learning_rate": 3.431237042156185e-05, | |
| "loss": 0.3499, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 1.1520522388059702, | |
| "grad_norm": 0.31447891110269854, | |
| "learning_rate": 3.422598479612992e-05, | |
| "loss": 0.3543, | |
| "step": 1235 | |
| }, | |
| { | |
| "epoch": 1.1567164179104479, | |
| "grad_norm": 0.30726658838845144, | |
| "learning_rate": 3.4139599170698e-05, | |
| "loss": 0.3446, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 1.1613805970149254, | |
| "grad_norm": 0.27966083222913996, | |
| "learning_rate": 3.405321354526607e-05, | |
| "loss": 0.3633, | |
| "step": 1245 | |
| }, | |
| { | |
| "epoch": 1.166044776119403, | |
| "grad_norm": 0.3009610165825193, | |
| "learning_rate": 3.3966827919834144e-05, | |
| "loss": 0.3662, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 1.1707089552238805, | |
| "grad_norm": 0.25774204723518807, | |
| "learning_rate": 3.3880442294402215e-05, | |
| "loss": 0.3508, | |
| "step": 1255 | |
| }, | |
| { | |
| "epoch": 1.1753731343283582, | |
| "grad_norm": 0.2701469969202779, | |
| "learning_rate": 3.3794056668970286e-05, | |
| "loss": 0.35, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 1.180037313432836, | |
| "grad_norm": 0.31197901530431127, | |
| "learning_rate": 3.370767104353836e-05, | |
| "loss": 0.3573, | |
| "step": 1265 | |
| }, | |
| { | |
| "epoch": 1.1847014925373134, | |
| "grad_norm": 0.27299025339878263, | |
| "learning_rate": 3.362128541810643e-05, | |
| "loss": 0.3552, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 1.189365671641791, | |
| "grad_norm": 0.2968602253177383, | |
| "learning_rate": 3.35348997926745e-05, | |
| "loss": 0.3589, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 1.1940298507462686, | |
| "grad_norm": 0.2763238287094402, | |
| "learning_rate": 3.344851416724257e-05, | |
| "loss": 0.351, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 1.1986940298507462, | |
| "grad_norm": 0.3202573581070322, | |
| "learning_rate": 3.336212854181064e-05, | |
| "loss": 0.3411, | |
| "step": 1285 | |
| }, | |
| { | |
| "epoch": 1.203358208955224, | |
| "grad_norm": 0.30764924957330353, | |
| "learning_rate": 3.3275742916378713e-05, | |
| "loss": 0.3575, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 1.2080223880597014, | |
| "grad_norm": 0.27603722828472366, | |
| "learning_rate": 3.3189357290946785e-05, | |
| "loss": 0.3602, | |
| "step": 1295 | |
| }, | |
| { | |
| "epoch": 1.212686567164179, | |
| "grad_norm": 0.2694909612103058, | |
| "learning_rate": 3.3102971665514856e-05, | |
| "loss": 0.3592, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 1.2173507462686568, | |
| "grad_norm": 0.2707890483863253, | |
| "learning_rate": 3.3016586040082934e-05, | |
| "loss": 0.3588, | |
| "step": 1305 | |
| }, | |
| { | |
| "epoch": 1.2220149253731343, | |
| "grad_norm": 0.2628158624428764, | |
| "learning_rate": 3.2930200414651005e-05, | |
| "loss": 0.355, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 1.226679104477612, | |
| "grad_norm": 0.2838867705687958, | |
| "learning_rate": 3.2843814789219076e-05, | |
| "loss": 0.3592, | |
| "step": 1315 | |
| }, | |
| { | |
| "epoch": 1.2313432835820897, | |
| "grad_norm": 0.2846843088911086, | |
| "learning_rate": 3.275742916378715e-05, | |
| "loss": 0.347, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 1.2360074626865671, | |
| "grad_norm": 0.29888113772876473, | |
| "learning_rate": 3.267104353835522e-05, | |
| "loss": 0.3587, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 1.2406716417910448, | |
| "grad_norm": 0.2638641795487795, | |
| "learning_rate": 3.258465791292329e-05, | |
| "loss": 0.3501, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 1.2453358208955223, | |
| "grad_norm": 0.26109118367411616, | |
| "learning_rate": 3.249827228749136e-05, | |
| "loss": 0.3488, | |
| "step": 1335 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "grad_norm": 0.28582457615761736, | |
| "learning_rate": 3.241188666205944e-05, | |
| "loss": 0.3546, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 1.2546641791044777, | |
| "grad_norm": 0.26317489646737013, | |
| "learning_rate": 3.232550103662751e-05, | |
| "loss": 0.3599, | |
| "step": 1345 | |
| }, | |
| { | |
| "epoch": 1.2593283582089552, | |
| "grad_norm": 0.2850497488023984, | |
| "learning_rate": 3.223911541119558e-05, | |
| "loss": 0.3545, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 1.2639925373134329, | |
| "grad_norm": 0.26322987421118205, | |
| "learning_rate": 3.215272978576365e-05, | |
| "loss": 0.3535, | |
| "step": 1355 | |
| }, | |
| { | |
| "epoch": 1.2686567164179103, | |
| "grad_norm": 0.25245802499063735, | |
| "learning_rate": 3.2066344160331724e-05, | |
| "loss": 0.3607, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 1.273320895522388, | |
| "grad_norm": 0.2500221917459853, | |
| "learning_rate": 3.1979958534899796e-05, | |
| "loss": 0.3458, | |
| "step": 1365 | |
| }, | |
| { | |
| "epoch": 1.2779850746268657, | |
| "grad_norm": 0.27255455009058094, | |
| "learning_rate": 3.189357290946787e-05, | |
| "loss": 0.3551, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 1.2826492537313432, | |
| "grad_norm": 0.26761217332861165, | |
| "learning_rate": 3.180718728403594e-05, | |
| "loss": 0.3573, | |
| "step": 1375 | |
| }, | |
| { | |
| "epoch": 1.287313432835821, | |
| "grad_norm": 0.2888414899260542, | |
| "learning_rate": 3.172080165860401e-05, | |
| "loss": 0.3562, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 1.2919776119402986, | |
| "grad_norm": 0.2892039309471524, | |
| "learning_rate": 3.163441603317208e-05, | |
| "loss": 0.3481, | |
| "step": 1385 | |
| }, | |
| { | |
| "epoch": 1.296641791044776, | |
| "grad_norm": 0.26582052221038566, | |
| "learning_rate": 3.154803040774015e-05, | |
| "loss": 0.3505, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 1.3013059701492538, | |
| "grad_norm": 0.32068551340121987, | |
| "learning_rate": 3.146164478230822e-05, | |
| "loss": 0.3578, | |
| "step": 1395 | |
| }, | |
| { | |
| "epoch": 1.3059701492537314, | |
| "grad_norm": 0.3207593982574588, | |
| "learning_rate": 3.1375259156876294e-05, | |
| "loss": 0.356, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 1.310634328358209, | |
| "grad_norm": 0.2605884985951683, | |
| "learning_rate": 3.1288873531444366e-05, | |
| "loss": 0.3576, | |
| "step": 1405 | |
| }, | |
| { | |
| "epoch": 1.3152985074626866, | |
| "grad_norm": 0.27127620421192306, | |
| "learning_rate": 3.120248790601244e-05, | |
| "loss": 0.3617, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 1.3199626865671643, | |
| "grad_norm": 0.27493702262711306, | |
| "learning_rate": 3.111610228058051e-05, | |
| "loss": 0.3536, | |
| "step": 1415 | |
| }, | |
| { | |
| "epoch": 1.3246268656716418, | |
| "grad_norm": 0.2843286909110491, | |
| "learning_rate": 3.1029716655148586e-05, | |
| "loss": 0.3528, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 1.3292910447761195, | |
| "grad_norm": 0.2622391936635339, | |
| "learning_rate": 3.094333102971666e-05, | |
| "loss": 0.3628, | |
| "step": 1425 | |
| }, | |
| { | |
| "epoch": 1.333955223880597, | |
| "grad_norm": 0.26845938818311155, | |
| "learning_rate": 3.085694540428473e-05, | |
| "loss": 0.3512, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 1.3386194029850746, | |
| "grad_norm": 0.24854846652155657, | |
| "learning_rate": 3.07705597788528e-05, | |
| "loss": 0.339, | |
| "step": 1435 | |
| }, | |
| { | |
| "epoch": 1.3432835820895521, | |
| "grad_norm": 0.25581871461970673, | |
| "learning_rate": 3.068417415342088e-05, | |
| "loss": 0.3496, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 1.3479477611940298, | |
| "grad_norm": 0.27147944463759316, | |
| "learning_rate": 3.059778852798895e-05, | |
| "loss": 0.3611, | |
| "step": 1445 | |
| }, | |
| { | |
| "epoch": 1.3526119402985075, | |
| "grad_norm": 0.2551083854492795, | |
| "learning_rate": 3.0511402902557017e-05, | |
| "loss": 0.3503, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 1.357276119402985, | |
| "grad_norm": 0.2815408353745405, | |
| "learning_rate": 3.0425017277125088e-05, | |
| "loss": 0.3593, | |
| "step": 1455 | |
| }, | |
| { | |
| "epoch": 1.3619402985074627, | |
| "grad_norm": 0.24695125511750265, | |
| "learning_rate": 3.0338631651693163e-05, | |
| "loss": 0.3604, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 1.3666044776119404, | |
| "grad_norm": 0.27023404455157657, | |
| "learning_rate": 3.0252246026261234e-05, | |
| "loss": 0.3631, | |
| "step": 1465 | |
| }, | |
| { | |
| "epoch": 1.3712686567164178, | |
| "grad_norm": 0.2667366939937409, | |
| "learning_rate": 3.0165860400829305e-05, | |
| "loss": 0.3657, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 1.3759328358208955, | |
| "grad_norm": 0.25070674429334766, | |
| "learning_rate": 3.0079474775397376e-05, | |
| "loss": 0.3482, | |
| "step": 1475 | |
| }, | |
| { | |
| "epoch": 1.3805970149253732, | |
| "grad_norm": 0.23895341038260484, | |
| "learning_rate": 2.9993089149965448e-05, | |
| "loss": 0.3491, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 1.3852611940298507, | |
| "grad_norm": 0.2929506520863598, | |
| "learning_rate": 2.990670352453352e-05, | |
| "loss": 0.3602, | |
| "step": 1485 | |
| }, | |
| { | |
| "epoch": 1.3899253731343284, | |
| "grad_norm": 0.2937866630652489, | |
| "learning_rate": 2.982031789910159e-05, | |
| "loss": 0.3557, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 1.394589552238806, | |
| "grad_norm": 0.2976396674050369, | |
| "learning_rate": 2.973393227366966e-05, | |
| "loss": 0.3532, | |
| "step": 1495 | |
| }, | |
| { | |
| "epoch": 1.3992537313432836, | |
| "grad_norm": 0.2870217248229499, | |
| "learning_rate": 2.9647546648237733e-05, | |
| "loss": 0.3538, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 1.4039179104477613, | |
| "grad_norm": 0.28599489651818, | |
| "learning_rate": 2.9561161022805807e-05, | |
| "loss": 0.3499, | |
| "step": 1505 | |
| }, | |
| { | |
| "epoch": 1.4085820895522387, | |
| "grad_norm": 0.2952620243285733, | |
| "learning_rate": 2.947477539737388e-05, | |
| "loss": 0.3506, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 1.4132462686567164, | |
| "grad_norm": 0.2871779692255422, | |
| "learning_rate": 2.938838977194195e-05, | |
| "loss": 0.3544, | |
| "step": 1515 | |
| }, | |
| { | |
| "epoch": 1.417910447761194, | |
| "grad_norm": 0.30382607213132756, | |
| "learning_rate": 2.930200414651002e-05, | |
| "loss": 0.3643, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 1.4225746268656716, | |
| "grad_norm": 0.2891786692354116, | |
| "learning_rate": 2.9215618521078092e-05, | |
| "loss": 0.3544, | |
| "step": 1525 | |
| }, | |
| { | |
| "epoch": 1.4272388059701493, | |
| "grad_norm": 0.32658648615980534, | |
| "learning_rate": 2.9129232895646163e-05, | |
| "loss": 0.3494, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 1.4319029850746268, | |
| "grad_norm": 0.27089902379485054, | |
| "learning_rate": 2.9042847270214235e-05, | |
| "loss": 0.3537, | |
| "step": 1535 | |
| }, | |
| { | |
| "epoch": 1.4365671641791045, | |
| "grad_norm": 0.2398733939827338, | |
| "learning_rate": 2.8956461644782313e-05, | |
| "loss": 0.3602, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 1.4412313432835822, | |
| "grad_norm": 0.27466664266850493, | |
| "learning_rate": 2.8870076019350384e-05, | |
| "loss": 0.3591, | |
| "step": 1545 | |
| }, | |
| { | |
| "epoch": 1.4458955223880596, | |
| "grad_norm": 0.26504415968893347, | |
| "learning_rate": 2.8783690393918455e-05, | |
| "loss": 0.3537, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 1.4505597014925373, | |
| "grad_norm": 0.27382492022256605, | |
| "learning_rate": 2.8697304768486526e-05, | |
| "loss": 0.3634, | |
| "step": 1555 | |
| }, | |
| { | |
| "epoch": 1.455223880597015, | |
| "grad_norm": 0.28596038604462387, | |
| "learning_rate": 2.8610919143054598e-05, | |
| "loss": 0.356, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 1.4598880597014925, | |
| "grad_norm": 0.26726224473515064, | |
| "learning_rate": 2.852453351762267e-05, | |
| "loss": 0.3561, | |
| "step": 1565 | |
| }, | |
| { | |
| "epoch": 1.4645522388059702, | |
| "grad_norm": 0.266577658523579, | |
| "learning_rate": 2.843814789219074e-05, | |
| "loss": 0.366, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 1.4692164179104479, | |
| "grad_norm": 0.2779897992685907, | |
| "learning_rate": 2.8351762266758815e-05, | |
| "loss": 0.3524, | |
| "step": 1575 | |
| }, | |
| { | |
| "epoch": 1.4738805970149254, | |
| "grad_norm": 0.2820315034671796, | |
| "learning_rate": 2.8265376641326886e-05, | |
| "loss": 0.3585, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 1.478544776119403, | |
| "grad_norm": 0.27211062325749025, | |
| "learning_rate": 2.8178991015894957e-05, | |
| "loss": 0.3566, | |
| "step": 1585 | |
| }, | |
| { | |
| "epoch": 1.4832089552238805, | |
| "grad_norm": 0.2800867266262322, | |
| "learning_rate": 2.809260539046303e-05, | |
| "loss": 0.3506, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 1.4878731343283582, | |
| "grad_norm": 0.2625597818795497, | |
| "learning_rate": 2.80062197650311e-05, | |
| "loss": 0.3598, | |
| "step": 1595 | |
| }, | |
| { | |
| "epoch": 1.4925373134328357, | |
| "grad_norm": 0.2639178137625416, | |
| "learning_rate": 2.791983413959917e-05, | |
| "loss": 0.3584, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 1.4972014925373134, | |
| "grad_norm": 0.28168075764440176, | |
| "learning_rate": 2.7833448514167242e-05, | |
| "loss": 0.3506, | |
| "step": 1605 | |
| }, | |
| { | |
| "epoch": 1.501865671641791, | |
| "grad_norm": 0.2456611629883674, | |
| "learning_rate": 2.7747062888735313e-05, | |
| "loss": 0.3498, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 1.5065298507462686, | |
| "grad_norm": 0.2793295001558765, | |
| "learning_rate": 2.7660677263303385e-05, | |
| "loss": 0.3569, | |
| "step": 1615 | |
| }, | |
| { | |
| "epoch": 1.5111940298507462, | |
| "grad_norm": 0.28320988864172636, | |
| "learning_rate": 2.757429163787146e-05, | |
| "loss": 0.3567, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 1.515858208955224, | |
| "grad_norm": 0.24600781780761763, | |
| "learning_rate": 2.748790601243953e-05, | |
| "loss": 0.3546, | |
| "step": 1625 | |
| }, | |
| { | |
| "epoch": 1.5205223880597014, | |
| "grad_norm": 0.29802437311497687, | |
| "learning_rate": 2.7401520387007602e-05, | |
| "loss": 0.359, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 1.525186567164179, | |
| "grad_norm": 0.2898548349483986, | |
| "learning_rate": 2.7315134761575673e-05, | |
| "loss": 0.357, | |
| "step": 1635 | |
| }, | |
| { | |
| "epoch": 1.5298507462686568, | |
| "grad_norm": 0.2652778573810245, | |
| "learning_rate": 2.722874913614375e-05, | |
| "loss": 0.354, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 1.5345149253731343, | |
| "grad_norm": 0.27998502867011055, | |
| "learning_rate": 2.7142363510711822e-05, | |
| "loss": 0.3483, | |
| "step": 1645 | |
| }, | |
| { | |
| "epoch": 1.539179104477612, | |
| "grad_norm": 0.24843738466219273, | |
| "learning_rate": 2.7055977885279894e-05, | |
| "loss": 0.357, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 1.5438432835820897, | |
| "grad_norm": 0.2792384527501717, | |
| "learning_rate": 2.6969592259847965e-05, | |
| "loss": 0.3472, | |
| "step": 1655 | |
| }, | |
| { | |
| "epoch": 1.5485074626865671, | |
| "grad_norm": 0.30365838441381043, | |
| "learning_rate": 2.6883206634416036e-05, | |
| "loss": 0.3519, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 1.5531716417910446, | |
| "grad_norm": 0.25442907208142573, | |
| "learning_rate": 2.6796821008984107e-05, | |
| "loss": 0.3558, | |
| "step": 1665 | |
| }, | |
| { | |
| "epoch": 1.5578358208955225, | |
| "grad_norm": 0.24290716232321657, | |
| "learning_rate": 2.671043538355218e-05, | |
| "loss": 0.3602, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 1.5625, | |
| "grad_norm": 0.25386647347414554, | |
| "learning_rate": 2.662404975812025e-05, | |
| "loss": 0.3521, | |
| "step": 1675 | |
| }, | |
| { | |
| "epoch": 1.5671641791044775, | |
| "grad_norm": 0.2864019555023363, | |
| "learning_rate": 2.653766413268832e-05, | |
| "loss": 0.3609, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 1.5718283582089554, | |
| "grad_norm": 0.25579257207665645, | |
| "learning_rate": 2.6451278507256396e-05, | |
| "loss": 0.3529, | |
| "step": 1685 | |
| }, | |
| { | |
| "epoch": 1.5764925373134329, | |
| "grad_norm": 0.25166320213104026, | |
| "learning_rate": 2.6364892881824467e-05, | |
| "loss": 0.3492, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 1.5811567164179103, | |
| "grad_norm": 0.30094990205601113, | |
| "learning_rate": 2.6278507256392538e-05, | |
| "loss": 0.3505, | |
| "step": 1695 | |
| }, | |
| { | |
| "epoch": 1.585820895522388, | |
| "grad_norm": 0.26105519447474484, | |
| "learning_rate": 2.619212163096061e-05, | |
| "loss": 0.3432, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 1.5904850746268657, | |
| "grad_norm": 0.27293116111619453, | |
| "learning_rate": 2.610573600552868e-05, | |
| "loss": 0.3408, | |
| "step": 1705 | |
| }, | |
| { | |
| "epoch": 1.5951492537313432, | |
| "grad_norm": 0.2615370848704197, | |
| "learning_rate": 2.6019350380096752e-05, | |
| "loss": 0.3631, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 1.599813432835821, | |
| "grad_norm": 0.2470060807723687, | |
| "learning_rate": 2.5932964754664823e-05, | |
| "loss": 0.3577, | |
| "step": 1715 | |
| }, | |
| { | |
| "epoch": 1.6044776119402986, | |
| "grad_norm": 0.29796340873977156, | |
| "learning_rate": 2.5846579129232894e-05, | |
| "loss": 0.3555, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 1.609141791044776, | |
| "grad_norm": 0.25003177847747987, | |
| "learning_rate": 2.5760193503800965e-05, | |
| "loss": 0.3523, | |
| "step": 1725 | |
| }, | |
| { | |
| "epoch": 1.6138059701492538, | |
| "grad_norm": 0.2550676926859736, | |
| "learning_rate": 2.5673807878369037e-05, | |
| "loss": 0.3422, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 1.6184701492537314, | |
| "grad_norm": 0.26181500216568176, | |
| "learning_rate": 2.558742225293711e-05, | |
| "loss": 0.3542, | |
| "step": 1735 | |
| }, | |
| { | |
| "epoch": 1.623134328358209, | |
| "grad_norm": 0.2828771287093038, | |
| "learning_rate": 2.5501036627505186e-05, | |
| "loss": 0.3576, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 1.6277985074626866, | |
| "grad_norm": 0.2742845163473262, | |
| "learning_rate": 2.5414651002073257e-05, | |
| "loss": 0.3643, | |
| "step": 1745 | |
| }, | |
| { | |
| "epoch": 1.6324626865671643, | |
| "grad_norm": 0.26805652418683656, | |
| "learning_rate": 2.5328265376641332e-05, | |
| "loss": 0.3532, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 1.6371268656716418, | |
| "grad_norm": 0.27359721321194713, | |
| "learning_rate": 2.5241879751209403e-05, | |
| "loss": 0.3566, | |
| "step": 1755 | |
| }, | |
| { | |
| "epoch": 1.6417910447761193, | |
| "grad_norm": 0.5232067363708305, | |
| "learning_rate": 2.5155494125777474e-05, | |
| "loss": 0.3453, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 1.6464552238805972, | |
| "grad_norm": 0.26771546515006417, | |
| "learning_rate": 2.5069108500345546e-05, | |
| "loss": 0.3439, | |
| "step": 1765 | |
| }, | |
| { | |
| "epoch": 1.6511194029850746, | |
| "grad_norm": 0.2622810928129032, | |
| "learning_rate": 2.4982722874913617e-05, | |
| "loss": 0.3471, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 1.6557835820895521, | |
| "grad_norm": 0.24009740199328528, | |
| "learning_rate": 2.4896337249481688e-05, | |
| "loss": 0.3482, | |
| "step": 1775 | |
| }, | |
| { | |
| "epoch": 1.6604477611940298, | |
| "grad_norm": 0.2926201618624942, | |
| "learning_rate": 2.480995162404976e-05, | |
| "loss": 0.3555, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 1.6651119402985075, | |
| "grad_norm": 0.24374656418311616, | |
| "learning_rate": 2.472356599861783e-05, | |
| "loss": 0.3449, | |
| "step": 1785 | |
| }, | |
| { | |
| "epoch": 1.669776119402985, | |
| "grad_norm": 0.2768964721803294, | |
| "learning_rate": 2.4637180373185902e-05, | |
| "loss": 0.3542, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 1.6744402985074627, | |
| "grad_norm": 0.25840400655196105, | |
| "learning_rate": 2.4550794747753973e-05, | |
| "loss": 0.3465, | |
| "step": 1795 | |
| }, | |
| { | |
| "epoch": 1.6791044776119404, | |
| "grad_norm": 0.2649760623817318, | |
| "learning_rate": 2.4464409122322048e-05, | |
| "loss": 0.3524, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 1.6837686567164178, | |
| "grad_norm": 0.23933292859492197, | |
| "learning_rate": 2.437802349689012e-05, | |
| "loss": 0.3576, | |
| "step": 1805 | |
| }, | |
| { | |
| "epoch": 1.6884328358208955, | |
| "grad_norm": 0.25053732733007344, | |
| "learning_rate": 2.429163787145819e-05, | |
| "loss": 0.3645, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 1.6930970149253732, | |
| "grad_norm": 0.2825893772142663, | |
| "learning_rate": 2.4205252246026265e-05, | |
| "loss": 0.3478, | |
| "step": 1815 | |
| }, | |
| { | |
| "epoch": 1.6977611940298507, | |
| "grad_norm": 0.28417859863332967, | |
| "learning_rate": 2.4118866620594336e-05, | |
| "loss": 0.348, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 1.7024253731343284, | |
| "grad_norm": 0.2879489604508746, | |
| "learning_rate": 2.4032480995162407e-05, | |
| "loss": 0.349, | |
| "step": 1825 | |
| }, | |
| { | |
| "epoch": 1.707089552238806, | |
| "grad_norm": 0.24265037778338103, | |
| "learning_rate": 2.394609536973048e-05, | |
| "loss": 0.3422, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 1.7117537313432836, | |
| "grad_norm": 0.2881369749112946, | |
| "learning_rate": 2.385970974429855e-05, | |
| "loss": 0.3538, | |
| "step": 1835 | |
| }, | |
| { | |
| "epoch": 1.716417910447761, | |
| "grad_norm": 0.232491104669316, | |
| "learning_rate": 2.377332411886662e-05, | |
| "loss": 0.3507, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 1.721082089552239, | |
| "grad_norm": 0.2895447015016962, | |
| "learning_rate": 2.3686938493434692e-05, | |
| "loss": 0.3492, | |
| "step": 1845 | |
| }, | |
| { | |
| "epoch": 1.7257462686567164, | |
| "grad_norm": 0.26044324435331456, | |
| "learning_rate": 2.3600552868002763e-05, | |
| "loss": 0.3632, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 1.730410447761194, | |
| "grad_norm": 0.2517957721149909, | |
| "learning_rate": 2.3514167242570838e-05, | |
| "loss": 0.3578, | |
| "step": 1855 | |
| }, | |
| { | |
| "epoch": 1.7350746268656716, | |
| "grad_norm": 0.25700908015060114, | |
| "learning_rate": 2.342778161713891e-05, | |
| "loss": 0.3494, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 1.7397388059701493, | |
| "grad_norm": 0.2313225543144057, | |
| "learning_rate": 2.3341395991706984e-05, | |
| "loss": 0.3426, | |
| "step": 1865 | |
| }, | |
| { | |
| "epoch": 1.7444029850746268, | |
| "grad_norm": 0.2600707753157153, | |
| "learning_rate": 2.3255010366275055e-05, | |
| "loss": 0.3557, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 1.7490671641791045, | |
| "grad_norm": 0.25589288460782644, | |
| "learning_rate": 2.3168624740843126e-05, | |
| "loss": 0.343, | |
| "step": 1875 | |
| }, | |
| { | |
| "epoch": 1.7537313432835822, | |
| "grad_norm": 0.24986531492565742, | |
| "learning_rate": 2.3082239115411198e-05, | |
| "loss": 0.3548, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 1.7583955223880596, | |
| "grad_norm": 0.2516820034206935, | |
| "learning_rate": 2.299585348997927e-05, | |
| "loss": 0.3472, | |
| "step": 1885 | |
| }, | |
| { | |
| "epoch": 1.7630597014925373, | |
| "grad_norm": 0.24770724816038975, | |
| "learning_rate": 2.290946786454734e-05, | |
| "loss": 0.3519, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 1.767723880597015, | |
| "grad_norm": 0.24869502131677107, | |
| "learning_rate": 2.282308223911541e-05, | |
| "loss": 0.3545, | |
| "step": 1895 | |
| }, | |
| { | |
| "epoch": 1.7723880597014925, | |
| "grad_norm": 0.255903729440434, | |
| "learning_rate": 2.2736696613683483e-05, | |
| "loss": 0.347, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 1.7770522388059702, | |
| "grad_norm": 0.2397326237151425, | |
| "learning_rate": 2.2650310988251554e-05, | |
| "loss": 0.3537, | |
| "step": 1905 | |
| }, | |
| { | |
| "epoch": 1.7817164179104479, | |
| "grad_norm": 0.2589396668760254, | |
| "learning_rate": 2.256392536281963e-05, | |
| "loss": 0.3505, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 1.7863805970149254, | |
| "grad_norm": 0.24670368572333448, | |
| "learning_rate": 2.24775397373877e-05, | |
| "loss": 0.3472, | |
| "step": 1915 | |
| }, | |
| { | |
| "epoch": 1.7910447761194028, | |
| "grad_norm": 0.2614986243573285, | |
| "learning_rate": 2.2391154111955774e-05, | |
| "loss": 0.3367, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 1.7957089552238807, | |
| "grad_norm": 0.26142392440881457, | |
| "learning_rate": 2.2304768486523846e-05, | |
| "loss": 0.3463, | |
| "step": 1925 | |
| }, | |
| { | |
| "epoch": 1.8003731343283582, | |
| "grad_norm": 0.2739383114427929, | |
| "learning_rate": 2.2218382861091917e-05, | |
| "loss": 0.3578, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 1.8050373134328357, | |
| "grad_norm": 0.2734039958767109, | |
| "learning_rate": 2.2131997235659988e-05, | |
| "loss": 0.3442, | |
| "step": 1935 | |
| }, | |
| { | |
| "epoch": 1.8097014925373134, | |
| "grad_norm": 0.25506385638157236, | |
| "learning_rate": 2.204561161022806e-05, | |
| "loss": 0.3558, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 1.814365671641791, | |
| "grad_norm": 0.25093530490708016, | |
| "learning_rate": 2.195922598479613e-05, | |
| "loss": 0.3481, | |
| "step": 1945 | |
| }, | |
| { | |
| "epoch": 1.8190298507462686, | |
| "grad_norm": 0.25930375140048917, | |
| "learning_rate": 2.1872840359364202e-05, | |
| "loss": 0.3374, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 1.8236940298507462, | |
| "grad_norm": 0.24404930352930873, | |
| "learning_rate": 2.1786454733932273e-05, | |
| "loss": 0.3471, | |
| "step": 1955 | |
| }, | |
| { | |
| "epoch": 1.828358208955224, | |
| "grad_norm": 0.23965079348393534, | |
| "learning_rate": 2.1700069108500344e-05, | |
| "loss": 0.3487, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 1.8330223880597014, | |
| "grad_norm": 0.24990708777991258, | |
| "learning_rate": 2.161368348306842e-05, | |
| "loss": 0.3457, | |
| "step": 1965 | |
| }, | |
| { | |
| "epoch": 1.837686567164179, | |
| "grad_norm": 0.24963255152262548, | |
| "learning_rate": 2.152729785763649e-05, | |
| "loss": 0.354, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 1.8423507462686568, | |
| "grad_norm": 0.2436420122081074, | |
| "learning_rate": 2.1440912232204565e-05, | |
| "loss": 0.3534, | |
| "step": 1975 | |
| }, | |
| { | |
| "epoch": 1.8470149253731343, | |
| "grad_norm": 0.25361981108278814, | |
| "learning_rate": 2.1354526606772636e-05, | |
| "loss": 0.3546, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 1.851679104477612, | |
| "grad_norm": 0.2542823223867117, | |
| "learning_rate": 2.1268140981340707e-05, | |
| "loss": 0.3471, | |
| "step": 1985 | |
| }, | |
| { | |
| "epoch": 1.8563432835820897, | |
| "grad_norm": 0.23722347416075998, | |
| "learning_rate": 2.118175535590878e-05, | |
| "loss": 0.3421, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 1.8610074626865671, | |
| "grad_norm": 0.25312929188512606, | |
| "learning_rate": 2.109536973047685e-05, | |
| "loss": 0.3441, | |
| "step": 1995 | |
| }, | |
| { | |
| "epoch": 1.8656716417910446, | |
| "grad_norm": 0.2341466410072212, | |
| "learning_rate": 2.100898410504492e-05, | |
| "loss": 0.3425, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 1.8703358208955225, | |
| "grad_norm": 0.2457623931567879, | |
| "learning_rate": 2.0922598479612992e-05, | |
| "loss": 0.347, | |
| "step": 2005 | |
| }, | |
| { | |
| "epoch": 1.875, | |
| "grad_norm": 0.24966420919273546, | |
| "learning_rate": 2.0836212854181063e-05, | |
| "loss": 0.3461, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 1.8796641791044775, | |
| "grad_norm": 0.2680320625734047, | |
| "learning_rate": 2.0749827228749135e-05, | |
| "loss": 0.3564, | |
| "step": 2015 | |
| }, | |
| { | |
| "epoch": 1.8843283582089554, | |
| "grad_norm": 0.24478590874399667, | |
| "learning_rate": 2.066344160331721e-05, | |
| "loss": 0.349, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 1.8889925373134329, | |
| "grad_norm": 0.2545220538690271, | |
| "learning_rate": 2.057705597788528e-05, | |
| "loss": 0.3567, | |
| "step": 2025 | |
| }, | |
| { | |
| "epoch": 1.8936567164179103, | |
| "grad_norm": 0.25461855757867324, | |
| "learning_rate": 2.0490670352453352e-05, | |
| "loss": 0.3511, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 1.898320895522388, | |
| "grad_norm": 0.2593526072638512, | |
| "learning_rate": 2.0404284727021426e-05, | |
| "loss": 0.3391, | |
| "step": 2035 | |
| }, | |
| { | |
| "epoch": 1.9029850746268657, | |
| "grad_norm": 0.25564276928826335, | |
| "learning_rate": 2.0317899101589498e-05, | |
| "loss": 0.3429, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 1.9076492537313432, | |
| "grad_norm": 0.23400294570265173, | |
| "learning_rate": 2.023151347615757e-05, | |
| "loss": 0.3457, | |
| "step": 2045 | |
| }, | |
| { | |
| "epoch": 1.912313432835821, | |
| "grad_norm": 0.2533206273447285, | |
| "learning_rate": 2.014512785072564e-05, | |
| "loss": 0.3569, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 1.9169776119402986, | |
| "grad_norm": 0.2606716293694525, | |
| "learning_rate": 2.005874222529371e-05, | |
| "loss": 0.3483, | |
| "step": 2055 | |
| }, | |
| { | |
| "epoch": 1.921641791044776, | |
| "grad_norm": 0.2607506562456741, | |
| "learning_rate": 1.9972356599861783e-05, | |
| "loss": 0.3431, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 1.9263059701492538, | |
| "grad_norm": 0.250090794633157, | |
| "learning_rate": 1.9885970974429854e-05, | |
| "loss": 0.3423, | |
| "step": 2065 | |
| }, | |
| { | |
| "epoch": 1.9309701492537314, | |
| "grad_norm": 0.27321428099761597, | |
| "learning_rate": 1.979958534899793e-05, | |
| "loss": 0.3513, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 1.935634328358209, | |
| "grad_norm": 0.23816182108366313, | |
| "learning_rate": 1.9713199723566e-05, | |
| "loss": 0.3479, | |
| "step": 2075 | |
| }, | |
| { | |
| "epoch": 1.9402985074626866, | |
| "grad_norm": 0.23363715952709058, | |
| "learning_rate": 1.962681409813407e-05, | |
| "loss": 0.3481, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 1.9449626865671643, | |
| "grad_norm": 0.25020392869055014, | |
| "learning_rate": 1.9540428472702142e-05, | |
| "loss": 0.346, | |
| "step": 2085 | |
| }, | |
| { | |
| "epoch": 1.9496268656716418, | |
| "grad_norm": 0.2364846986541251, | |
| "learning_rate": 1.9454042847270217e-05, | |
| "loss": 0.3417, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 1.9542910447761193, | |
| "grad_norm": 0.23696354740046346, | |
| "learning_rate": 1.9367657221838288e-05, | |
| "loss": 0.3488, | |
| "step": 2095 | |
| }, | |
| { | |
| "epoch": 1.9589552238805972, | |
| "grad_norm": 0.2376480979373365, | |
| "learning_rate": 1.928127159640636e-05, | |
| "loss": 0.3438, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 1.9636194029850746, | |
| "grad_norm": 0.22706636086111684, | |
| "learning_rate": 1.919488597097443e-05, | |
| "loss": 0.3493, | |
| "step": 2105 | |
| }, | |
| { | |
| "epoch": 1.9682835820895521, | |
| "grad_norm": 0.22925919268119685, | |
| "learning_rate": 1.9108500345542502e-05, | |
| "loss": 0.3474, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 1.9729477611940298, | |
| "grad_norm": 0.23285022865823277, | |
| "learning_rate": 1.9022114720110573e-05, | |
| "loss": 0.3458, | |
| "step": 2115 | |
| }, | |
| { | |
| "epoch": 1.9776119402985075, | |
| "grad_norm": 0.23533589864100854, | |
| "learning_rate": 1.8935729094678648e-05, | |
| "loss": 0.3469, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 1.982276119402985, | |
| "grad_norm": 0.24900209553097785, | |
| "learning_rate": 1.884934346924672e-05, | |
| "loss": 0.3457, | |
| "step": 2125 | |
| }, | |
| { | |
| "epoch": 1.9869402985074627, | |
| "grad_norm": 0.2596527300112273, | |
| "learning_rate": 1.876295784381479e-05, | |
| "loss": 0.3458, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 1.9916044776119404, | |
| "grad_norm": 0.25297637923104466, | |
| "learning_rate": 1.867657221838286e-05, | |
| "loss": 0.3538, | |
| "step": 2135 | |
| }, | |
| { | |
| "epoch": 1.9962686567164178, | |
| "grad_norm": 0.25115287512115325, | |
| "learning_rate": 1.8590186592950933e-05, | |
| "loss": 0.3494, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 2.0009328358208953, | |
| "grad_norm": 0.34462376078995915, | |
| "learning_rate": 1.8503800967519007e-05, | |
| "loss": 0.3311, | |
| "step": 2145 | |
| }, | |
| { | |
| "epoch": 2.0055970149253732, | |
| "grad_norm": 0.31745989539860364, | |
| "learning_rate": 1.841741534208708e-05, | |
| "loss": 0.2704, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 2.0102611940298507, | |
| "grad_norm": 0.3325856581226571, | |
| "learning_rate": 1.833102971665515e-05, | |
| "loss": 0.2658, | |
| "step": 2155 | |
| }, | |
| { | |
| "epoch": 2.014925373134328, | |
| "grad_norm": 0.28111688309902305, | |
| "learning_rate": 1.824464409122322e-05, | |
| "loss": 0.2776, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 2.019589552238806, | |
| "grad_norm": 0.8343734957734922, | |
| "learning_rate": 1.8158258465791292e-05, | |
| "loss": 0.2674, | |
| "step": 2165 | |
| }, | |
| { | |
| "epoch": 2.0242537313432836, | |
| "grad_norm": 0.3008918771929192, | |
| "learning_rate": 1.8071872840359367e-05, | |
| "loss": 0.2637, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 2.028917910447761, | |
| "grad_norm": 0.2674156399841297, | |
| "learning_rate": 1.7985487214927438e-05, | |
| "loss": 0.2653, | |
| "step": 2175 | |
| }, | |
| { | |
| "epoch": 2.033582089552239, | |
| "grad_norm": 0.27071839379349627, | |
| "learning_rate": 1.789910158949551e-05, | |
| "loss": 0.272, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 2.0382462686567164, | |
| "grad_norm": 0.25835150358502124, | |
| "learning_rate": 1.781271596406358e-05, | |
| "loss": 0.2723, | |
| "step": 2185 | |
| }, | |
| { | |
| "epoch": 2.042910447761194, | |
| "grad_norm": 0.26405617682044674, | |
| "learning_rate": 1.772633033863165e-05, | |
| "loss": 0.2683, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 2.047574626865672, | |
| "grad_norm": 0.27323016614087425, | |
| "learning_rate": 1.7639944713199723e-05, | |
| "loss": 0.2675, | |
| "step": 2195 | |
| }, | |
| { | |
| "epoch": 2.0522388059701493, | |
| "grad_norm": 0.24268603832962887, | |
| "learning_rate": 1.7553559087767794e-05, | |
| "loss": 0.2706, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 2.0569029850746268, | |
| "grad_norm": 0.28853015272296106, | |
| "learning_rate": 1.746717346233587e-05, | |
| "loss": 0.2592, | |
| "step": 2205 | |
| }, | |
| { | |
| "epoch": 2.0615671641791047, | |
| "grad_norm": 0.24816793104410012, | |
| "learning_rate": 1.738078783690394e-05, | |
| "loss": 0.2727, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 2.066231343283582, | |
| "grad_norm": 0.24920948703787066, | |
| "learning_rate": 1.729440221147201e-05, | |
| "loss": 0.2694, | |
| "step": 2215 | |
| }, | |
| { | |
| "epoch": 2.0708955223880596, | |
| "grad_norm": 0.2527052338758247, | |
| "learning_rate": 1.7208016586040086e-05, | |
| "loss": 0.2652, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 2.075559701492537, | |
| "grad_norm": 0.26490434665717205, | |
| "learning_rate": 1.7121630960608157e-05, | |
| "loss": 0.2644, | |
| "step": 2225 | |
| }, | |
| { | |
| "epoch": 2.080223880597015, | |
| "grad_norm": 0.2658463283273834, | |
| "learning_rate": 1.703524533517623e-05, | |
| "loss": 0.2639, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 2.0848880597014925, | |
| "grad_norm": 0.2591667706104725, | |
| "learning_rate": 1.69488597097443e-05, | |
| "loss": 0.2696, | |
| "step": 2235 | |
| }, | |
| { | |
| "epoch": 2.08955223880597, | |
| "grad_norm": 0.2758661720017509, | |
| "learning_rate": 1.686247408431237e-05, | |
| "loss": 0.2667, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 2.094216417910448, | |
| "grad_norm": 0.2572245381793782, | |
| "learning_rate": 1.6776088458880442e-05, | |
| "loss": 0.2719, | |
| "step": 2245 | |
| }, | |
| { | |
| "epoch": 2.0988805970149254, | |
| "grad_norm": 0.25761215099207907, | |
| "learning_rate": 1.6689702833448513e-05, | |
| "loss": 0.2709, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 2.103544776119403, | |
| "grad_norm": 0.24800705383243069, | |
| "learning_rate": 1.6603317208016585e-05, | |
| "loss": 0.2672, | |
| "step": 2255 | |
| }, | |
| { | |
| "epoch": 2.1082089552238807, | |
| "grad_norm": 0.2503537601758487, | |
| "learning_rate": 1.651693158258466e-05, | |
| "loss": 0.2633, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 2.112873134328358, | |
| "grad_norm": 0.26387924392152445, | |
| "learning_rate": 1.643054595715273e-05, | |
| "loss": 0.266, | |
| "step": 2265 | |
| }, | |
| { | |
| "epoch": 2.1175373134328357, | |
| "grad_norm": 0.27222115707184863, | |
| "learning_rate": 1.6344160331720805e-05, | |
| "loss": 0.2673, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 2.1222014925373136, | |
| "grad_norm": 0.2548814810402365, | |
| "learning_rate": 1.6257774706288876e-05, | |
| "loss": 0.2681, | |
| "step": 2275 | |
| }, | |
| { | |
| "epoch": 2.126865671641791, | |
| "grad_norm": 0.2529910054582486, | |
| "learning_rate": 1.6171389080856948e-05, | |
| "loss": 0.2696, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 2.1315298507462686, | |
| "grad_norm": 0.2712749904632538, | |
| "learning_rate": 1.608500345542502e-05, | |
| "loss": 0.2675, | |
| "step": 2285 | |
| }, | |
| { | |
| "epoch": 2.1361940298507465, | |
| "grad_norm": 0.26886315839009545, | |
| "learning_rate": 1.599861782999309e-05, | |
| "loss": 0.2721, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 2.140858208955224, | |
| "grad_norm": 0.2393049208128386, | |
| "learning_rate": 1.591223220456116e-05, | |
| "loss": 0.2747, | |
| "step": 2295 | |
| }, | |
| { | |
| "epoch": 2.1455223880597014, | |
| "grad_norm": 0.26561723189427416, | |
| "learning_rate": 1.5825846579129232e-05, | |
| "loss": 0.2672, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 2.1501865671641793, | |
| "grad_norm": 0.24774625188071692, | |
| "learning_rate": 1.5739460953697304e-05, | |
| "loss": 0.2605, | |
| "step": 2305 | |
| }, | |
| { | |
| "epoch": 2.154850746268657, | |
| "grad_norm": 0.2499864107130725, | |
| "learning_rate": 1.5653075328265375e-05, | |
| "loss": 0.274, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 2.1595149253731343, | |
| "grad_norm": 0.2586158825182529, | |
| "learning_rate": 1.556668970283345e-05, | |
| "loss": 0.2641, | |
| "step": 2315 | |
| }, | |
| { | |
| "epoch": 2.1641791044776117, | |
| "grad_norm": 0.2571013103716005, | |
| "learning_rate": 1.548030407740152e-05, | |
| "loss": 0.2743, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 2.1688432835820897, | |
| "grad_norm": 0.2739745801801461, | |
| "learning_rate": 1.5393918451969595e-05, | |
| "loss": 0.2721, | |
| "step": 2325 | |
| }, | |
| { | |
| "epoch": 2.173507462686567, | |
| "grad_norm": 0.3329837011115105, | |
| "learning_rate": 1.5307532826537667e-05, | |
| "loss": 0.2686, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 2.1781716417910446, | |
| "grad_norm": 0.2516711017515163, | |
| "learning_rate": 1.5221147201105738e-05, | |
| "loss": 0.2588, | |
| "step": 2335 | |
| }, | |
| { | |
| "epoch": 2.1828358208955225, | |
| "grad_norm": 0.25954675565880664, | |
| "learning_rate": 1.513476157567381e-05, | |
| "loss": 0.2641, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 2.1875, | |
| "grad_norm": 0.2510807553710561, | |
| "learning_rate": 1.504837595024188e-05, | |
| "loss": 0.2746, | |
| "step": 2345 | |
| }, | |
| { | |
| "epoch": 2.1921641791044775, | |
| "grad_norm": 0.24971265081225327, | |
| "learning_rate": 1.4961990324809952e-05, | |
| "loss": 0.2784, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 2.1968283582089554, | |
| "grad_norm": 0.24765682727687305, | |
| "learning_rate": 1.4875604699378023e-05, | |
| "loss": 0.273, | |
| "step": 2355 | |
| }, | |
| { | |
| "epoch": 2.201492537313433, | |
| "grad_norm": 0.23776621022399377, | |
| "learning_rate": 1.4789219073946096e-05, | |
| "loss": 0.2735, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 2.2061567164179103, | |
| "grad_norm": 0.26558690571436727, | |
| "learning_rate": 1.4702833448514167e-05, | |
| "loss": 0.2793, | |
| "step": 2365 | |
| }, | |
| { | |
| "epoch": 2.2108208955223883, | |
| "grad_norm": 0.2474183073689056, | |
| "learning_rate": 1.4616447823082242e-05, | |
| "loss": 0.2716, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 2.2154850746268657, | |
| "grad_norm": 0.2545095185911103, | |
| "learning_rate": 1.4530062197650313e-05, | |
| "loss": 0.2701, | |
| "step": 2375 | |
| }, | |
| { | |
| "epoch": 2.220149253731343, | |
| "grad_norm": 0.2536956835957964, | |
| "learning_rate": 1.4443676572218384e-05, | |
| "loss": 0.2662, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 2.2248134328358207, | |
| "grad_norm": 0.24639634009671005, | |
| "learning_rate": 1.4357290946786455e-05, | |
| "loss": 0.262, | |
| "step": 2385 | |
| }, | |
| { | |
| "epoch": 2.2294776119402986, | |
| "grad_norm": 0.24238939975130466, | |
| "learning_rate": 1.4270905321354527e-05, | |
| "loss": 0.2723, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 2.234141791044776, | |
| "grad_norm": 0.24211925647253976, | |
| "learning_rate": 1.41845196959226e-05, | |
| "loss": 0.2572, | |
| "step": 2395 | |
| }, | |
| { | |
| "epoch": 2.2388059701492535, | |
| "grad_norm": 0.2540915419635605, | |
| "learning_rate": 1.409813407049067e-05, | |
| "loss": 0.2698, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 2.2434701492537314, | |
| "grad_norm": 0.25208723649450565, | |
| "learning_rate": 1.4011748445058742e-05, | |
| "loss": 0.2739, | |
| "step": 2405 | |
| }, | |
| { | |
| "epoch": 2.248134328358209, | |
| "grad_norm": 0.24342760359756468, | |
| "learning_rate": 1.3925362819626813e-05, | |
| "loss": 0.2678, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 2.2527985074626864, | |
| "grad_norm": 0.24459442060443407, | |
| "learning_rate": 1.3838977194194885e-05, | |
| "loss": 0.2698, | |
| "step": 2415 | |
| }, | |
| { | |
| "epoch": 2.2574626865671643, | |
| "grad_norm": 0.2552099134775623, | |
| "learning_rate": 1.375259156876296e-05, | |
| "loss": 0.2657, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 2.262126865671642, | |
| "grad_norm": 0.25793465754628664, | |
| "learning_rate": 1.3666205943331032e-05, | |
| "loss": 0.2681, | |
| "step": 2425 | |
| }, | |
| { | |
| "epoch": 2.2667910447761193, | |
| "grad_norm": 0.25349870543799646, | |
| "learning_rate": 1.3579820317899103e-05, | |
| "loss": 0.2707, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 2.271455223880597, | |
| "grad_norm": 0.2580724536578584, | |
| "learning_rate": 1.3493434692467175e-05, | |
| "loss": 0.2751, | |
| "step": 2435 | |
| }, | |
| { | |
| "epoch": 2.2761194029850746, | |
| "grad_norm": 0.24703169309863668, | |
| "learning_rate": 1.3407049067035246e-05, | |
| "loss": 0.2746, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 2.280783582089552, | |
| "grad_norm": 0.2618326690814996, | |
| "learning_rate": 1.3320663441603317e-05, | |
| "loss": 0.268, | |
| "step": 2445 | |
| }, | |
| { | |
| "epoch": 2.28544776119403, | |
| "grad_norm": 0.23857314436169325, | |
| "learning_rate": 1.323427781617139e-05, | |
| "loss": 0.2751, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 2.2901119402985075, | |
| "grad_norm": 0.2623148211362948, | |
| "learning_rate": 1.3147892190739461e-05, | |
| "loss": 0.2747, | |
| "step": 2455 | |
| }, | |
| { | |
| "epoch": 2.294776119402985, | |
| "grad_norm": 0.2517592706151068, | |
| "learning_rate": 1.3061506565307532e-05, | |
| "loss": 0.2598, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 2.299440298507463, | |
| "grad_norm": 0.24437509035028881, | |
| "learning_rate": 1.2975120939875604e-05, | |
| "loss": 0.2722, | |
| "step": 2465 | |
| }, | |
| { | |
| "epoch": 2.3041044776119404, | |
| "grad_norm": 0.2340530191985727, | |
| "learning_rate": 1.2888735314443678e-05, | |
| "loss": 0.2651, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 2.308768656716418, | |
| "grad_norm": 0.25808129186069695, | |
| "learning_rate": 1.280234968901175e-05, | |
| "loss": 0.2782, | |
| "step": 2475 | |
| }, | |
| { | |
| "epoch": 2.3134328358208958, | |
| "grad_norm": 0.23746879979058075, | |
| "learning_rate": 1.271596406357982e-05, | |
| "loss": 0.2652, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 2.3180970149253732, | |
| "grad_norm": 0.23791567374207684, | |
| "learning_rate": 1.2629578438147894e-05, | |
| "loss": 0.2582, | |
| "step": 2485 | |
| }, | |
| { | |
| "epoch": 2.3227611940298507, | |
| "grad_norm": 0.2553728045896781, | |
| "learning_rate": 1.2543192812715965e-05, | |
| "loss": 0.2675, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 2.327425373134328, | |
| "grad_norm": 0.24968752391415577, | |
| "learning_rate": 1.2456807187284036e-05, | |
| "loss": 0.2667, | |
| "step": 2495 | |
| }, | |
| { | |
| "epoch": 2.332089552238806, | |
| "grad_norm": 0.23464332995564288, | |
| "learning_rate": 1.2370421561852107e-05, | |
| "loss": 0.2737, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 2.3367537313432836, | |
| "grad_norm": 0.2512072779658876, | |
| "learning_rate": 1.228403593642018e-05, | |
| "loss": 0.2654, | |
| "step": 2505 | |
| }, | |
| { | |
| "epoch": 2.341417910447761, | |
| "grad_norm": 0.2493626585034648, | |
| "learning_rate": 1.2197650310988253e-05, | |
| "loss": 0.271, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 2.346082089552239, | |
| "grad_norm": 0.23896708455496457, | |
| "learning_rate": 1.2111264685556325e-05, | |
| "loss": 0.2642, | |
| "step": 2515 | |
| }, | |
| { | |
| "epoch": 2.3507462686567164, | |
| "grad_norm": 0.25362677211109363, | |
| "learning_rate": 1.2024879060124396e-05, | |
| "loss": 0.2712, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 2.355410447761194, | |
| "grad_norm": 0.24181526461056832, | |
| "learning_rate": 1.1938493434692467e-05, | |
| "loss": 0.2715, | |
| "step": 2525 | |
| }, | |
| { | |
| "epoch": 2.360074626865672, | |
| "grad_norm": 0.23971465758847246, | |
| "learning_rate": 1.1852107809260538e-05, | |
| "loss": 0.2687, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 2.3647388059701493, | |
| "grad_norm": 0.24128763933759378, | |
| "learning_rate": 1.1765722183828611e-05, | |
| "loss": 0.2695, | |
| "step": 2535 | |
| }, | |
| { | |
| "epoch": 2.3694029850746268, | |
| "grad_norm": 0.23460755324463464, | |
| "learning_rate": 1.1679336558396684e-05, | |
| "loss": 0.2632, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 2.3740671641791042, | |
| "grad_norm": 0.22549225312112542, | |
| "learning_rate": 1.1592950932964755e-05, | |
| "loss": 0.2654, | |
| "step": 2545 | |
| }, | |
| { | |
| "epoch": 2.378731343283582, | |
| "grad_norm": 0.2526617631910458, | |
| "learning_rate": 1.1506565307532827e-05, | |
| "loss": 0.2738, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 2.3833955223880596, | |
| "grad_norm": 0.24021776725835262, | |
| "learning_rate": 1.1420179682100898e-05, | |
| "loss": 0.2619, | |
| "step": 2555 | |
| }, | |
| { | |
| "epoch": 2.388059701492537, | |
| "grad_norm": 0.23540902781004067, | |
| "learning_rate": 1.133379405666897e-05, | |
| "loss": 0.2741, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 2.392723880597015, | |
| "grad_norm": 0.23479698925891263, | |
| "learning_rate": 1.1247408431237042e-05, | |
| "loss": 0.2682, | |
| "step": 2565 | |
| }, | |
| { | |
| "epoch": 2.3973880597014925, | |
| "grad_norm": 0.24887016643289742, | |
| "learning_rate": 1.1161022805805115e-05, | |
| "loss": 0.268, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 2.40205223880597, | |
| "grad_norm": 0.24593086117067733, | |
| "learning_rate": 1.1074637180373186e-05, | |
| "loss": 0.2709, | |
| "step": 2575 | |
| }, | |
| { | |
| "epoch": 2.406716417910448, | |
| "grad_norm": 0.25885964922435833, | |
| "learning_rate": 1.0988251554941257e-05, | |
| "loss": 0.2728, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 2.4113805970149254, | |
| "grad_norm": 0.24435455232131156, | |
| "learning_rate": 1.090186592950933e-05, | |
| "loss": 0.2664, | |
| "step": 2585 | |
| }, | |
| { | |
| "epoch": 2.416044776119403, | |
| "grad_norm": 0.24469244162406, | |
| "learning_rate": 1.0815480304077402e-05, | |
| "loss": 0.2701, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 2.4207089552238807, | |
| "grad_norm": 0.2512349716696054, | |
| "learning_rate": 1.0729094678645475e-05, | |
| "loss": 0.2678, | |
| "step": 2595 | |
| }, | |
| { | |
| "epoch": 2.425373134328358, | |
| "grad_norm": 0.25803301191456496, | |
| "learning_rate": 1.0642709053213546e-05, | |
| "loss": 0.2668, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 2.4300373134328357, | |
| "grad_norm": 0.2458613070225411, | |
| "learning_rate": 1.0556323427781617e-05, | |
| "loss": 0.2622, | |
| "step": 2605 | |
| }, | |
| { | |
| "epoch": 2.4347014925373136, | |
| "grad_norm": 0.2710975501873587, | |
| "learning_rate": 1.046993780234969e-05, | |
| "loss": 0.2679, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 2.439365671641791, | |
| "grad_norm": 0.25008012288704096, | |
| "learning_rate": 1.0383552176917761e-05, | |
| "loss": 0.2692, | |
| "step": 2615 | |
| }, | |
| { | |
| "epoch": 2.4440298507462686, | |
| "grad_norm": 0.23392754951465664, | |
| "learning_rate": 1.0297166551485832e-05, | |
| "loss": 0.2702, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 2.4486940298507465, | |
| "grad_norm": 0.243302798889899, | |
| "learning_rate": 1.0210780926053905e-05, | |
| "loss": 0.2735, | |
| "step": 2625 | |
| }, | |
| { | |
| "epoch": 2.453358208955224, | |
| "grad_norm": 0.24131928720078527, | |
| "learning_rate": 1.0124395300621977e-05, | |
| "loss": 0.2688, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 2.4580223880597014, | |
| "grad_norm": 0.2344949285280544, | |
| "learning_rate": 1.003800967519005e-05, | |
| "loss": 0.267, | |
| "step": 2635 | |
| }, | |
| { | |
| "epoch": 2.4626865671641793, | |
| "grad_norm": 0.24796769794999612, | |
| "learning_rate": 9.95162404975812e-06, | |
| "loss": 0.2667, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 2.467350746268657, | |
| "grad_norm": 0.23828706282350953, | |
| "learning_rate": 9.865238424326192e-06, | |
| "loss": 0.2608, | |
| "step": 2645 | |
| }, | |
| { | |
| "epoch": 2.4720149253731343, | |
| "grad_norm": 0.22762286203128362, | |
| "learning_rate": 9.778852798894263e-06, | |
| "loss": 0.2642, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 2.4766791044776117, | |
| "grad_norm": 0.2321126906149145, | |
| "learning_rate": 9.692467173462336e-06, | |
| "loss": 0.2664, | |
| "step": 2655 | |
| }, | |
| { | |
| "epoch": 2.4813432835820897, | |
| "grad_norm": 0.23435128567563568, | |
| "learning_rate": 9.606081548030409e-06, | |
| "loss": 0.2711, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 2.486007462686567, | |
| "grad_norm": 0.2521748738007196, | |
| "learning_rate": 9.51969592259848e-06, | |
| "loss": 0.2683, | |
| "step": 2665 | |
| }, | |
| { | |
| "epoch": 2.4906716417910446, | |
| "grad_norm": 0.260424429247942, | |
| "learning_rate": 9.433310297166552e-06, | |
| "loss": 0.2693, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 2.4953358208955225, | |
| "grad_norm": 0.22458772586911388, | |
| "learning_rate": 9.346924671734623e-06, | |
| "loss": 0.2628, | |
| "step": 2675 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "grad_norm": 0.23713403054801951, | |
| "learning_rate": 9.260539046302696e-06, | |
| "loss": 0.2634, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 2.5046641791044775, | |
| "grad_norm": 0.22955478548333993, | |
| "learning_rate": 9.174153420870769e-06, | |
| "loss": 0.2631, | |
| "step": 2685 | |
| }, | |
| { | |
| "epoch": 2.5093283582089554, | |
| "grad_norm": 0.25496880460601673, | |
| "learning_rate": 9.08776779543884e-06, | |
| "loss": 0.2632, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 2.513992537313433, | |
| "grad_norm": 0.24983728425632132, | |
| "learning_rate": 9.001382170006911e-06, | |
| "loss": 0.2659, | |
| "step": 2695 | |
| }, | |
| { | |
| "epoch": 2.5186567164179103, | |
| "grad_norm": 0.23995566764939302, | |
| "learning_rate": 8.914996544574982e-06, | |
| "loss": 0.2568, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 2.523320895522388, | |
| "grad_norm": 0.24223925619413772, | |
| "learning_rate": 8.828610919143054e-06, | |
| "loss": 0.2686, | |
| "step": 2705 | |
| }, | |
| { | |
| "epoch": 2.5279850746268657, | |
| "grad_norm": 0.23646068120323002, | |
| "learning_rate": 8.742225293711127e-06, | |
| "loss": 0.2721, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 2.532649253731343, | |
| "grad_norm": 0.2460843766164646, | |
| "learning_rate": 8.6558396682792e-06, | |
| "loss": 0.2626, | |
| "step": 2715 | |
| }, | |
| { | |
| "epoch": 2.5373134328358207, | |
| "grad_norm": 0.22864093941848215, | |
| "learning_rate": 8.56945404284727e-06, | |
| "loss": 0.2688, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 2.5419776119402986, | |
| "grad_norm": 0.23742190282233713, | |
| "learning_rate": 8.483068417415342e-06, | |
| "loss": 0.2692, | |
| "step": 2725 | |
| }, | |
| { | |
| "epoch": 2.546641791044776, | |
| "grad_norm": 0.2478901568656718, | |
| "learning_rate": 8.396682791983413e-06, | |
| "loss": 0.2684, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 2.5513059701492535, | |
| "grad_norm": 0.23309010407386355, | |
| "learning_rate": 8.310297166551486e-06, | |
| "loss": 0.2646, | |
| "step": 2735 | |
| }, | |
| { | |
| "epoch": 2.5559701492537314, | |
| "grad_norm": 0.24254951275370504, | |
| "learning_rate": 8.223911541119559e-06, | |
| "loss": 0.2673, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 2.560634328358209, | |
| "grad_norm": 0.2339314140337939, | |
| "learning_rate": 8.13752591568763e-06, | |
| "loss": 0.2686, | |
| "step": 2745 | |
| }, | |
| { | |
| "epoch": 2.5652985074626864, | |
| "grad_norm": 0.22243043480980354, | |
| "learning_rate": 8.051140290255702e-06, | |
| "loss": 0.2634, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 2.5699626865671643, | |
| "grad_norm": 0.22860327053487292, | |
| "learning_rate": 7.964754664823773e-06, | |
| "loss": 0.2676, | |
| "step": 2755 | |
| }, | |
| { | |
| "epoch": 2.574626865671642, | |
| "grad_norm": 0.23587109847868956, | |
| "learning_rate": 7.878369039391846e-06, | |
| "loss": 0.2729, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 2.5792910447761193, | |
| "grad_norm": 0.24460193568093883, | |
| "learning_rate": 7.791983413959917e-06, | |
| "loss": 0.2722, | |
| "step": 2765 | |
| }, | |
| { | |
| "epoch": 2.583955223880597, | |
| "grad_norm": 0.22653703483273155, | |
| "learning_rate": 7.70559778852799e-06, | |
| "loss": 0.271, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 2.5886194029850746, | |
| "grad_norm": 0.24437000886780794, | |
| "learning_rate": 7.619212163096061e-06, | |
| "loss": 0.2722, | |
| "step": 2775 | |
| }, | |
| { | |
| "epoch": 2.593283582089552, | |
| "grad_norm": 0.23272167194006402, | |
| "learning_rate": 7.532826537664132e-06, | |
| "loss": 0.2583, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 2.59794776119403, | |
| "grad_norm": 0.23973782103532768, | |
| "learning_rate": 7.446440912232205e-06, | |
| "loss": 0.2657, | |
| "step": 2785 | |
| }, | |
| { | |
| "epoch": 2.6026119402985075, | |
| "grad_norm": 0.24964486940054742, | |
| "learning_rate": 7.3600552868002774e-06, | |
| "loss": 0.2729, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 2.607276119402985, | |
| "grad_norm": 0.2251632575914843, | |
| "learning_rate": 7.273669661368349e-06, | |
| "loss": 0.2612, | |
| "step": 2795 | |
| }, | |
| { | |
| "epoch": 2.611940298507463, | |
| "grad_norm": 0.23594503024645086, | |
| "learning_rate": 7.18728403593642e-06, | |
| "loss": 0.2701, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 2.6166044776119404, | |
| "grad_norm": 0.2370886994709572, | |
| "learning_rate": 7.100898410504492e-06, | |
| "loss": 0.2658, | |
| "step": 2805 | |
| }, | |
| { | |
| "epoch": 2.621268656716418, | |
| "grad_norm": 0.23665493281733124, | |
| "learning_rate": 7.014512785072565e-06, | |
| "loss": 0.2612, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 2.6259328358208958, | |
| "grad_norm": 0.2247939758977245, | |
| "learning_rate": 6.928127159640636e-06, | |
| "loss": 0.2682, | |
| "step": 2815 | |
| }, | |
| { | |
| "epoch": 2.6305970149253732, | |
| "grad_norm": 0.2372209574517549, | |
| "learning_rate": 6.841741534208708e-06, | |
| "loss": 0.2581, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 2.6352611940298507, | |
| "grad_norm": 0.22631563786357567, | |
| "learning_rate": 6.7553559087767795e-06, | |
| "loss": 0.2605, | |
| "step": 2825 | |
| }, | |
| { | |
| "epoch": 2.6399253731343286, | |
| "grad_norm": 0.24593389084232972, | |
| "learning_rate": 6.668970283344851e-06, | |
| "loss": 0.2674, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 2.644589552238806, | |
| "grad_norm": 0.23018686121213885, | |
| "learning_rate": 6.5825846579129245e-06, | |
| "loss": 0.269, | |
| "step": 2835 | |
| }, | |
| { | |
| "epoch": 2.6492537313432836, | |
| "grad_norm": 0.22849941700351192, | |
| "learning_rate": 6.496199032480996e-06, | |
| "loss": 0.262, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 2.653917910447761, | |
| "grad_norm": 0.2371527092890804, | |
| "learning_rate": 6.409813407049067e-06, | |
| "loss": 0.2714, | |
| "step": 2845 | |
| }, | |
| { | |
| "epoch": 2.658582089552239, | |
| "grad_norm": 0.23677974841599528, | |
| "learning_rate": 6.323427781617139e-06, | |
| "loss": 0.2657, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 2.6632462686567164, | |
| "grad_norm": 0.22612454836371737, | |
| "learning_rate": 6.237042156185211e-06, | |
| "loss": 0.2674, | |
| "step": 2855 | |
| }, | |
| { | |
| "epoch": 2.667910447761194, | |
| "grad_norm": 0.23927600835764143, | |
| "learning_rate": 6.150656530753282e-06, | |
| "loss": 0.2598, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 2.6725746268656714, | |
| "grad_norm": 0.23360708406229097, | |
| "learning_rate": 6.064270905321355e-06, | |
| "loss": 0.2642, | |
| "step": 2865 | |
| }, | |
| { | |
| "epoch": 2.6772388059701493, | |
| "grad_norm": 0.23561433170123894, | |
| "learning_rate": 5.9778852798894266e-06, | |
| "loss": 0.266, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 2.6819029850746268, | |
| "grad_norm": 0.23767100396638932, | |
| "learning_rate": 5.891499654457499e-06, | |
| "loss": 0.2648, | |
| "step": 2875 | |
| }, | |
| { | |
| "epoch": 2.6865671641791042, | |
| "grad_norm": 0.23229283704689183, | |
| "learning_rate": 5.805114029025571e-06, | |
| "loss": 0.2652, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 2.691231343283582, | |
| "grad_norm": 0.22264370232401456, | |
| "learning_rate": 5.718728403593642e-06, | |
| "loss": 0.2657, | |
| "step": 2885 | |
| }, | |
| { | |
| "epoch": 2.6958955223880596, | |
| "grad_norm": 0.24521767390443636, | |
| "learning_rate": 5.632342778161714e-06, | |
| "loss": 0.2662, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 2.700559701492537, | |
| "grad_norm": 0.2166915889888561, | |
| "learning_rate": 5.545957152729786e-06, | |
| "loss": 0.2625, | |
| "step": 2895 | |
| }, | |
| { | |
| "epoch": 2.705223880597015, | |
| "grad_norm": 0.2355151338288367, | |
| "learning_rate": 5.459571527297857e-06, | |
| "loss": 0.2686, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 2.7098880597014925, | |
| "grad_norm": 0.2367183025823066, | |
| "learning_rate": 5.3731859018659295e-06, | |
| "loss": 0.2612, | |
| "step": 2905 | |
| }, | |
| { | |
| "epoch": 2.71455223880597, | |
| "grad_norm": 0.23675936181578167, | |
| "learning_rate": 5.2868002764340016e-06, | |
| "loss": 0.2627, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 2.719216417910448, | |
| "grad_norm": 0.24200409873118536, | |
| "learning_rate": 5.200414651002074e-06, | |
| "loss": 0.2644, | |
| "step": 2915 | |
| }, | |
| { | |
| "epoch": 2.7238805970149254, | |
| "grad_norm": 0.23428774327228738, | |
| "learning_rate": 5.114029025570146e-06, | |
| "loss": 0.2722, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 2.728544776119403, | |
| "grad_norm": 0.22879617404363742, | |
| "learning_rate": 5.027643400138217e-06, | |
| "loss": 0.2607, | |
| "step": 2925 | |
| }, | |
| { | |
| "epoch": 2.7332089552238807, | |
| "grad_norm": 0.24708519577601004, | |
| "learning_rate": 4.941257774706289e-06, | |
| "loss": 0.2733, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 2.737873134328358, | |
| "grad_norm": 0.23245944685874, | |
| "learning_rate": 4.854872149274361e-06, | |
| "loss": 0.2653, | |
| "step": 2935 | |
| }, | |
| { | |
| "epoch": 2.7425373134328357, | |
| "grad_norm": 0.22140473852044298, | |
| "learning_rate": 4.768486523842433e-06, | |
| "loss": 0.2624, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 2.7472014925373136, | |
| "grad_norm": 0.22724853656050234, | |
| "learning_rate": 4.6821008984105045e-06, | |
| "loss": 0.2662, | |
| "step": 2945 | |
| }, | |
| { | |
| "epoch": 2.751865671641791, | |
| "grad_norm": 0.23340631353511435, | |
| "learning_rate": 4.5957152729785766e-06, | |
| "loss": 0.2692, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 2.7565298507462686, | |
| "grad_norm": 0.22231588245367515, | |
| "learning_rate": 4.509329647546649e-06, | |
| "loss": 0.2619, | |
| "step": 2955 | |
| }, | |
| { | |
| "epoch": 2.7611940298507465, | |
| "grad_norm": 0.236805529088383, | |
| "learning_rate": 4.42294402211472e-06, | |
| "loss": 0.2671, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 2.765858208955224, | |
| "grad_norm": 0.2263524205327475, | |
| "learning_rate": 4.336558396682793e-06, | |
| "loss": 0.2639, | |
| "step": 2965 | |
| }, | |
| { | |
| "epoch": 2.7705223880597014, | |
| "grad_norm": 0.2374336486785027, | |
| "learning_rate": 4.250172771250864e-06, | |
| "loss": 0.2517, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 2.7751865671641793, | |
| "grad_norm": 0.22723414967421426, | |
| "learning_rate": 4.163787145818935e-06, | |
| "loss": 0.2625, | |
| "step": 2975 | |
| }, | |
| { | |
| "epoch": 2.779850746268657, | |
| "grad_norm": 0.23760961802576186, | |
| "learning_rate": 4.077401520387008e-06, | |
| "loss": 0.2564, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 2.7845149253731343, | |
| "grad_norm": 0.23000542372097346, | |
| "learning_rate": 3.9910158949550795e-06, | |
| "loss": 0.2595, | |
| "step": 2985 | |
| }, | |
| { | |
| "epoch": 2.789179104477612, | |
| "grad_norm": 0.22896874249878957, | |
| "learning_rate": 3.9046302695231515e-06, | |
| "loss": 0.2619, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 2.7938432835820897, | |
| "grad_norm": 0.24648549788545385, | |
| "learning_rate": 3.818244644091224e-06, | |
| "loss": 0.2679, | |
| "step": 2995 | |
| }, | |
| { | |
| "epoch": 2.798507462686567, | |
| "grad_norm": 0.22576605474395228, | |
| "learning_rate": 3.731859018659295e-06, | |
| "loss": 0.2599, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 2.8031716417910446, | |
| "grad_norm": 0.22746614636418047, | |
| "learning_rate": 3.6454733932273674e-06, | |
| "loss": 0.2655, | |
| "step": 3005 | |
| }, | |
| { | |
| "epoch": 2.8078358208955225, | |
| "grad_norm": 0.22233559413114062, | |
| "learning_rate": 3.559087767795439e-06, | |
| "loss": 0.2565, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 2.8125, | |
| "grad_norm": 0.2349815405469564, | |
| "learning_rate": 3.472702142363511e-06, | |
| "loss": 0.264, | |
| "step": 3015 | |
| }, | |
| { | |
| "epoch": 2.8171641791044775, | |
| "grad_norm": 0.23053391248756278, | |
| "learning_rate": 3.386316516931583e-06, | |
| "loss": 0.2629, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 2.8218283582089554, | |
| "grad_norm": 0.21811515702756912, | |
| "learning_rate": 3.2999308914996545e-06, | |
| "loss": 0.2615, | |
| "step": 3025 | |
| }, | |
| { | |
| "epoch": 2.826492537313433, | |
| "grad_norm": 0.21665332770700163, | |
| "learning_rate": 3.2135452660677265e-06, | |
| "loss": 0.267, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 2.8311567164179103, | |
| "grad_norm": 0.22517528765133638, | |
| "learning_rate": 3.127159640635798e-06, | |
| "loss": 0.2611, | |
| "step": 3035 | |
| }, | |
| { | |
| "epoch": 2.835820895522388, | |
| "grad_norm": 0.2139885432553865, | |
| "learning_rate": 3.0407740152038703e-06, | |
| "loss": 0.2631, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 2.8404850746268657, | |
| "grad_norm": 0.23238616180772478, | |
| "learning_rate": 2.954388389771942e-06, | |
| "loss": 0.2627, | |
| "step": 3045 | |
| }, | |
| { | |
| "epoch": 2.845149253731343, | |
| "grad_norm": 0.21869810037080065, | |
| "learning_rate": 2.868002764340014e-06, | |
| "loss": 0.2607, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 2.8498134328358207, | |
| "grad_norm": 0.22529522775714875, | |
| "learning_rate": 2.781617138908086e-06, | |
| "loss": 0.2604, | |
| "step": 3055 | |
| }, | |
| { | |
| "epoch": 2.8544776119402986, | |
| "grad_norm": 0.22972955920381508, | |
| "learning_rate": 2.6952315134761578e-06, | |
| "loss": 0.2672, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 2.859141791044776, | |
| "grad_norm": 0.22620059875485618, | |
| "learning_rate": 2.6088458880442294e-06, | |
| "loss": 0.2608, | |
| "step": 3065 | |
| }, | |
| { | |
| "epoch": 2.8638059701492535, | |
| "grad_norm": 0.22552058174302478, | |
| "learning_rate": 2.5224602626123015e-06, | |
| "loss": 0.2622, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 2.8684701492537314, | |
| "grad_norm": 0.22266025670903605, | |
| "learning_rate": 2.436074637180373e-06, | |
| "loss": 0.2686, | |
| "step": 3075 | |
| }, | |
| { | |
| "epoch": 2.873134328358209, | |
| "grad_norm": 0.22514304735226173, | |
| "learning_rate": 2.3496890117484453e-06, | |
| "loss": 0.2607, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 2.8777985074626864, | |
| "grad_norm": 0.23340983657432543, | |
| "learning_rate": 2.263303386316517e-06, | |
| "loss": 0.2648, | |
| "step": 3085 | |
| }, | |
| { | |
| "epoch": 2.8824626865671643, | |
| "grad_norm": 0.23238784929746975, | |
| "learning_rate": 2.176917760884589e-06, | |
| "loss": 0.2619, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 2.887126865671642, | |
| "grad_norm": 0.27021352976013885, | |
| "learning_rate": 2.0905321354526607e-06, | |
| "loss": 0.2593, | |
| "step": 3095 | |
| }, | |
| { | |
| "epoch": 2.8917910447761193, | |
| "grad_norm": 0.2242124531556358, | |
| "learning_rate": 2.0041465100207328e-06, | |
| "loss": 0.2714, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 2.896455223880597, | |
| "grad_norm": 0.2260153767022701, | |
| "learning_rate": 1.917760884588805e-06, | |
| "loss": 0.2639, | |
| "step": 3105 | |
| }, | |
| { | |
| "epoch": 2.9011194029850746, | |
| "grad_norm": 0.2298641212059407, | |
| "learning_rate": 1.8313752591568763e-06, | |
| "loss": 0.2646, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 2.905783582089552, | |
| "grad_norm": 0.22934005244360692, | |
| "learning_rate": 1.7449896337249482e-06, | |
| "loss": 0.2691, | |
| "step": 3115 | |
| }, | |
| { | |
| "epoch": 2.91044776119403, | |
| "grad_norm": 0.2217417759392089, | |
| "learning_rate": 1.65860400829302e-06, | |
| "loss": 0.2561, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 2.9151119402985075, | |
| "grad_norm": 0.22169874259375805, | |
| "learning_rate": 1.5722183828610921e-06, | |
| "loss": 0.2649, | |
| "step": 3125 | |
| }, | |
| { | |
| "epoch": 2.919776119402985, | |
| "grad_norm": 0.2235745535270876, | |
| "learning_rate": 1.4858327574291638e-06, | |
| "loss": 0.2628, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 2.924440298507463, | |
| "grad_norm": 0.2239141489078527, | |
| "learning_rate": 1.3994471319972359e-06, | |
| "loss": 0.264, | |
| "step": 3135 | |
| }, | |
| { | |
| "epoch": 2.9291044776119404, | |
| "grad_norm": 0.2249688263655623, | |
| "learning_rate": 1.3130615065653076e-06, | |
| "loss": 0.2602, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 2.933768656716418, | |
| "grad_norm": 0.23769451372144912, | |
| "learning_rate": 1.2266758811333794e-06, | |
| "loss": 0.2541, | |
| "step": 3145 | |
| }, | |
| { | |
| "epoch": 2.9384328358208958, | |
| "grad_norm": 0.23728318174190557, | |
| "learning_rate": 1.1402902557014513e-06, | |
| "loss": 0.2646, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 2.9430970149253732, | |
| "grad_norm": 0.22839884292634693, | |
| "learning_rate": 1.0539046302695232e-06, | |
| "loss": 0.2615, | |
| "step": 3155 | |
| }, | |
| { | |
| "epoch": 2.9477611940298507, | |
| "grad_norm": 0.23139006855930142, | |
| "learning_rate": 9.67519004837595e-07, | |
| "loss": 0.2579, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 2.9524253731343286, | |
| "grad_norm": 0.22216419082170286, | |
| "learning_rate": 8.811333794056669e-07, | |
| "loss": 0.2662, | |
| "step": 3165 | |
| }, | |
| { | |
| "epoch": 2.957089552238806, | |
| "grad_norm": 0.22079723061063125, | |
| "learning_rate": 7.947477539737388e-07, | |
| "loss": 0.2654, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 2.9617537313432836, | |
| "grad_norm": 0.22847718221366062, | |
| "learning_rate": 7.083621285418107e-07, | |
| "loss": 0.2615, | |
| "step": 3175 | |
| }, | |
| { | |
| "epoch": 2.966417910447761, | |
| "grad_norm": 0.23440022893622037, | |
| "learning_rate": 6.219765031098825e-07, | |
| "loss": 0.2719, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 2.971082089552239, | |
| "grad_norm": 0.2305883800669961, | |
| "learning_rate": 5.355908776779544e-07, | |
| "loss": 0.2548, | |
| "step": 3185 | |
| }, | |
| { | |
| "epoch": 2.9757462686567164, | |
| "grad_norm": 0.22417016768930675, | |
| "learning_rate": 4.4920525224602624e-07, | |
| "loss": 0.2571, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 2.980410447761194, | |
| "grad_norm": 0.22648679110330472, | |
| "learning_rate": 3.6281962681409817e-07, | |
| "loss": 0.2633, | |
| "step": 3195 | |
| }, | |
| { | |
| "epoch": 2.9850746268656714, | |
| "grad_norm": 0.23713954900229853, | |
| "learning_rate": 2.7643400138217e-07, | |
| "loss": 0.2665, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 2.9897388059701493, | |
| "grad_norm": 0.22580398448113528, | |
| "learning_rate": 1.900483759502419e-07, | |
| "loss": 0.2612, | |
| "step": 3205 | |
| }, | |
| { | |
| "epoch": 2.9944029850746268, | |
| "grad_norm": 0.23856010297463578, | |
| "learning_rate": 1.0366275051831375e-07, | |
| "loss": 0.2648, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 2.9990671641791042, | |
| "grad_norm": 0.23955494919425113, | |
| "learning_rate": 1.7277125086385625e-08, | |
| "loss": 0.2637, | |
| "step": 3215 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "step": 3216, | |
| "total_flos": 2.7528656719005614e+18, | |
| "train_loss": 0.3836823864029118, | |
| "train_runtime": 42286.4104, | |
| "train_samples_per_second": 1.216, | |
| "train_steps_per_second": 0.076 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 3216, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 2.7528656719005614e+18, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |