| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 6.955414012738854, | |
| "eval_steps": 500, | |
| "global_step": 364, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.01910828025477707, | |
| "grad_norm": 16.207577936529294, | |
| "learning_rate": 5.405405405405406e-07, | |
| "loss": 1.5883, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.03821656050955414, | |
| "grad_norm": 16.230231151699186, | |
| "learning_rate": 1.0810810810810812e-06, | |
| "loss": 1.5507, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.05732484076433121, | |
| "grad_norm": 16.762660353394043, | |
| "learning_rate": 1.6216216216216219e-06, | |
| "loss": 1.5822, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.07643312101910828, | |
| "grad_norm": 15.762795594875254, | |
| "learning_rate": 2.1621621621621623e-06, | |
| "loss": 1.5375, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.09554140127388536, | |
| "grad_norm": 14.06711681784781, | |
| "learning_rate": 2.702702702702703e-06, | |
| "loss": 1.4839, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.11464968152866242, | |
| "grad_norm": 12.490981158011854, | |
| "learning_rate": 3.2432432432432437e-06, | |
| "loss": 1.4123, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.1337579617834395, | |
| "grad_norm": 8.777964378285029, | |
| "learning_rate": 3.7837837837837844e-06, | |
| "loss": 1.2828, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.15286624203821655, | |
| "grad_norm": 7.7256263016456534, | |
| "learning_rate": 4.324324324324325e-06, | |
| "loss": 1.2497, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.17197452229299362, | |
| "grad_norm": 6.685288662005224, | |
| "learning_rate": 4.864864864864866e-06, | |
| "loss": 1.2233, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.1910828025477707, | |
| "grad_norm": 7.331907189555277, | |
| "learning_rate": 5.405405405405406e-06, | |
| "loss": 1.1612, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.21019108280254778, | |
| "grad_norm": 5.997175055735039, | |
| "learning_rate": 5.945945945945947e-06, | |
| "loss": 1.1161, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.22929936305732485, | |
| "grad_norm": 4.571983748158492, | |
| "learning_rate": 6.486486486486487e-06, | |
| "loss": 1.0614, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.2484076433121019, | |
| "grad_norm": 4.562958103868048, | |
| "learning_rate": 7.027027027027028e-06, | |
| "loss": 1.0143, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.267515923566879, | |
| "grad_norm": 5.499482262122252, | |
| "learning_rate": 7.567567567567569e-06, | |
| "loss": 0.9787, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.28662420382165604, | |
| "grad_norm": 5.124069500758053, | |
| "learning_rate": 8.108108108108109e-06, | |
| "loss": 0.9588, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.3057324840764331, | |
| "grad_norm": 4.148025737238825, | |
| "learning_rate": 8.64864864864865e-06, | |
| "loss": 0.9381, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.3248407643312102, | |
| "grad_norm": 2.7291692689575813, | |
| "learning_rate": 9.189189189189191e-06, | |
| "loss": 0.9492, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.34394904458598724, | |
| "grad_norm": 2.776890761322164, | |
| "learning_rate": 9.729729729729732e-06, | |
| "loss": 0.9234, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.3630573248407643, | |
| "grad_norm": 3.0168951061622344, | |
| "learning_rate": 1.027027027027027e-05, | |
| "loss": 0.921, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.3821656050955414, | |
| "grad_norm": 3.306312456821951, | |
| "learning_rate": 1.0810810810810812e-05, | |
| "loss": 0.8969, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.4012738853503185, | |
| "grad_norm": 2.5792665666577834, | |
| "learning_rate": 1.1351351351351352e-05, | |
| "loss": 0.8863, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.42038216560509556, | |
| "grad_norm": 1.9005562538642717, | |
| "learning_rate": 1.1891891891891894e-05, | |
| "loss": 0.8669, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.4394904458598726, | |
| "grad_norm": 2.251284284438843, | |
| "learning_rate": 1.2432432432432433e-05, | |
| "loss": 0.8793, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.4585987261146497, | |
| "grad_norm": 2.2277623219616434, | |
| "learning_rate": 1.2972972972972975e-05, | |
| "loss": 0.8425, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.47770700636942676, | |
| "grad_norm": 2.141589871208081, | |
| "learning_rate": 1.3513513513513515e-05, | |
| "loss": 0.8491, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.4968152866242038, | |
| "grad_norm": 2.027931686119723, | |
| "learning_rate": 1.4054054054054055e-05, | |
| "loss": 0.8538, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.5159235668789809, | |
| "grad_norm": 1.8301112334746343, | |
| "learning_rate": 1.4594594594594596e-05, | |
| "loss": 0.854, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.535031847133758, | |
| "grad_norm": 1.7691120873443633, | |
| "learning_rate": 1.5135135135135138e-05, | |
| "loss": 0.8456, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.554140127388535, | |
| "grad_norm": 1.6866307946704169, | |
| "learning_rate": 1.5675675675675676e-05, | |
| "loss": 0.8466, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.5732484076433121, | |
| "grad_norm": 1.6972756420973574, | |
| "learning_rate": 1.6216216216216218e-05, | |
| "loss": 0.8436, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.5923566878980892, | |
| "grad_norm": 1.6432569739538123, | |
| "learning_rate": 1.6756756756756757e-05, | |
| "loss": 0.7894, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.6114649681528662, | |
| "grad_norm": 1.854670403733654, | |
| "learning_rate": 1.72972972972973e-05, | |
| "loss": 0.8254, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.6305732484076433, | |
| "grad_norm": 1.64124439180178, | |
| "learning_rate": 1.783783783783784e-05, | |
| "loss": 0.8176, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.6496815286624203, | |
| "grad_norm": 1.7261180813219317, | |
| "learning_rate": 1.8378378378378383e-05, | |
| "loss": 0.8253, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.6687898089171974, | |
| "grad_norm": 1.619845522166602, | |
| "learning_rate": 1.891891891891892e-05, | |
| "loss": 0.815, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.6878980891719745, | |
| "grad_norm": 1.599927606949769, | |
| "learning_rate": 1.9459459459459463e-05, | |
| "loss": 0.7872, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.7070063694267515, | |
| "grad_norm": 1.7922176299227872, | |
| "learning_rate": 2e-05, | |
| "loss": 0.7935, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.7261146496815286, | |
| "grad_norm": 1.536007138312651, | |
| "learning_rate": 1.9999538500851633e-05, | |
| "loss": 0.8142, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.7452229299363057, | |
| "grad_norm": 1.6613731410993215, | |
| "learning_rate": 1.9998154046002822e-05, | |
| "loss": 0.822, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.7643312101910829, | |
| "grad_norm": 2.2532855017558195, | |
| "learning_rate": 1.9995846763238514e-05, | |
| "loss": 0.8162, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.7834394904458599, | |
| "grad_norm": 2.074212855804817, | |
| "learning_rate": 1.9992616865520515e-05, | |
| "loss": 0.7803, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.802547770700637, | |
| "grad_norm": 1.5058548913559864, | |
| "learning_rate": 1.9988464650967834e-05, | |
| "loss": 0.7997, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.821656050955414, | |
| "grad_norm": 1.8080159889460914, | |
| "learning_rate": 1.9983390502829168e-05, | |
| "loss": 0.8126, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.8407643312101911, | |
| "grad_norm": 1.618198273307259, | |
| "learning_rate": 1.9977394889447526e-05, | |
| "loss": 0.7781, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.8598726114649682, | |
| "grad_norm": 1.4264557820424202, | |
| "learning_rate": 1.9970478364217e-05, | |
| "loss": 0.8137, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.8789808917197452, | |
| "grad_norm": 1.6470566590256357, | |
| "learning_rate": 1.9962641565531694e-05, | |
| "loss": 0.8133, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.8980891719745223, | |
| "grad_norm": 1.447847475953182, | |
| "learning_rate": 1.9953885216726788e-05, | |
| "loss": 0.8048, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.9171974522292994, | |
| "grad_norm": 1.5787974396014408, | |
| "learning_rate": 1.994421012601179e-05, | |
| "loss": 0.7991, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.9363057324840764, | |
| "grad_norm": 1.5103267139592034, | |
| "learning_rate": 1.9933617186395917e-05, | |
| "loss": 0.7991, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.9554140127388535, | |
| "grad_norm": 1.3923044521351269, | |
| "learning_rate": 1.99221073756057e-05, | |
| "loss": 0.8025, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.9745222929936306, | |
| "grad_norm": 1.467958208632134, | |
| "learning_rate": 1.990968175599471e-05, | |
| "loss": 0.8108, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.9936305732484076, | |
| "grad_norm": 1.4703780581780375, | |
| "learning_rate": 1.9896341474445526e-05, | |
| "loss": 0.7862, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 1.0127388535031847, | |
| "grad_norm": 1.3469297799342106, | |
| "learning_rate": 1.9882087762263857e-05, | |
| "loss": 0.703, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 1.0318471337579618, | |
| "grad_norm": 1.5042329509547805, | |
| "learning_rate": 1.9866921935064907e-05, | |
| "loss": 0.6626, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 1.0509554140127388, | |
| "grad_norm": 1.393720796250388, | |
| "learning_rate": 1.985084539265195e-05, | |
| "loss": 0.6451, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 1.070063694267516, | |
| "grad_norm": 1.4037559400641226, | |
| "learning_rate": 1.983385961888711e-05, | |
| "loss": 0.5885, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 1.089171974522293, | |
| "grad_norm": 1.3525097928344776, | |
| "learning_rate": 1.9815966181554412e-05, | |
| "loss": 0.5928, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 1.10828025477707, | |
| "grad_norm": 1.4663292110903436, | |
| "learning_rate": 1.9797166732215078e-05, | |
| "loss": 0.5763, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 1.127388535031847, | |
| "grad_norm": 1.600751836606609, | |
| "learning_rate": 1.977746300605507e-05, | |
| "loss": 0.5754, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 1.1464968152866242, | |
| "grad_norm": 1.5837199956944725, | |
| "learning_rate": 1.975685682172497e-05, | |
| "loss": 0.5921, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 1.1656050955414012, | |
| "grad_norm": 1.5553081218046214, | |
| "learning_rate": 1.973535008117207e-05, | |
| "loss": 0.6176, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 1.1847133757961783, | |
| "grad_norm": 1.370254316271248, | |
| "learning_rate": 1.9712944769464864e-05, | |
| "loss": 0.5795, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 1.2038216560509554, | |
| "grad_norm": 1.5128484926324381, | |
| "learning_rate": 1.9689642954609808e-05, | |
| "loss": 0.6265, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 1.2229299363057324, | |
| "grad_norm": 1.6301580261980453, | |
| "learning_rate": 1.9665446787360444e-05, | |
| "loss": 0.5898, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 1.2420382165605095, | |
| "grad_norm": 1.367871159170793, | |
| "learning_rate": 1.9640358501018885e-05, | |
| "loss": 0.5458, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 1.2611464968152866, | |
| "grad_norm": 1.6545804449139618, | |
| "learning_rate": 1.9614380411229693e-05, | |
| "loss": 0.5681, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 1.2802547770700636, | |
| "grad_norm": 1.4134557013101863, | |
| "learning_rate": 1.9587514915766124e-05, | |
| "loss": 0.5665, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 1.2993630573248407, | |
| "grad_norm": 1.762211740513235, | |
| "learning_rate": 1.9559764494308838e-05, | |
| "loss": 0.5596, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 1.3184713375796178, | |
| "grad_norm": 1.69069319727097, | |
| "learning_rate": 1.9531131708217005e-05, | |
| "loss": 0.5844, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 1.3375796178343948, | |
| "grad_norm": 1.3995395125931538, | |
| "learning_rate": 1.950161920029191e-05, | |
| "loss": 0.5774, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 1.356687898089172, | |
| "grad_norm": 1.7282434635957937, | |
| "learning_rate": 1.9471229694533003e-05, | |
| "loss": 0.5738, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 1.3757961783439492, | |
| "grad_norm": 1.2784959217059895, | |
| "learning_rate": 1.943996599588649e-05, | |
| "loss": 0.59, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 1.394904458598726, | |
| "grad_norm": 1.2847919850285983, | |
| "learning_rate": 1.940783098998643e-05, | |
| "loss": 0.6056, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 1.4140127388535033, | |
| "grad_norm": 1.347161734736366, | |
| "learning_rate": 1.93748276428884e-05, | |
| "loss": 0.5491, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 1.4331210191082802, | |
| "grad_norm": 1.3959400214553517, | |
| "learning_rate": 1.9340959000795707e-05, | |
| "loss": 0.5982, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 1.4522292993630574, | |
| "grad_norm": 1.30264128607857, | |
| "learning_rate": 1.9306228189778255e-05, | |
| "loss": 0.5659, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 1.4713375796178343, | |
| "grad_norm": 1.3854931038782616, | |
| "learning_rate": 1.927063841548398e-05, | |
| "loss": 0.5501, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 1.4904458598726116, | |
| "grad_norm": 1.342352789440788, | |
| "learning_rate": 1.9234192962842996e-05, | |
| "loss": 0.5704, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 1.5095541401273884, | |
| "grad_norm": 1.4898768421021498, | |
| "learning_rate": 1.9196895195764363e-05, | |
| "loss": 0.6231, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 1.5286624203821657, | |
| "grad_norm": 1.3100235123413457, | |
| "learning_rate": 1.9158748556825637e-05, | |
| "loss": 0.5737, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 1.5477707006369426, | |
| "grad_norm": 1.3140747899036214, | |
| "learning_rate": 1.9119756566955092e-05, | |
| "loss": 0.5758, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 1.5668789808917198, | |
| "grad_norm": 1.1457878792947769, | |
| "learning_rate": 1.907992282510675e-05, | |
| "loss": 0.6008, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 1.5859872611464967, | |
| "grad_norm": 1.2806483267217852, | |
| "learning_rate": 1.90392510079282e-05, | |
| "loss": 0.5783, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 1.605095541401274, | |
| "grad_norm": 1.339094154523494, | |
| "learning_rate": 1.8997744869421248e-05, | |
| "loss": 0.5787, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 1.6242038216560508, | |
| "grad_norm": 1.4804989945217308, | |
| "learning_rate": 1.8955408240595396e-05, | |
| "loss": 0.6337, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 1.643312101910828, | |
| "grad_norm": 1.3722576915531601, | |
| "learning_rate": 1.891224502911428e-05, | |
| "loss": 0.5819, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 1.662420382165605, | |
| "grad_norm": 1.303374906577917, | |
| "learning_rate": 1.886825921893497e-05, | |
| "loss": 0.564, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 1.6815286624203822, | |
| "grad_norm": 1.3557245031901457, | |
| "learning_rate": 1.8823454869940243e-05, | |
| "loss": 0.5912, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 1.700636942675159, | |
| "grad_norm": 1.3659905279941662, | |
| "learning_rate": 1.8777836117563894e-05, | |
| "loss": 0.5729, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 1.7197452229299364, | |
| "grad_norm": 1.2228884554693173, | |
| "learning_rate": 1.873140717240899e-05, | |
| "loss": 0.6001, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 1.7388535031847132, | |
| "grad_norm": 1.2696589961480313, | |
| "learning_rate": 1.8684172319859258e-05, | |
| "loss": 0.6095, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 1.7579617834394905, | |
| "grad_norm": 1.288210734538388, | |
| "learning_rate": 1.863613591968355e-05, | |
| "loss": 0.6119, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 1.7770700636942676, | |
| "grad_norm": 1.4101820674772771, | |
| "learning_rate": 1.858730240563342e-05, | |
| "loss": 0.5596, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 1.7961783439490446, | |
| "grad_norm": 1.4868704394795493, | |
| "learning_rate": 1.8537676285033886e-05, | |
| "loss": 0.6112, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 1.8152866242038217, | |
| "grad_norm": 1.2615707287217766, | |
| "learning_rate": 1.848726213836744e-05, | |
| "loss": 0.5481, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 1.8343949044585988, | |
| "grad_norm": 1.4120088218938998, | |
| "learning_rate": 1.8436064618851225e-05, | |
| "loss": 0.574, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 1.8535031847133758, | |
| "grad_norm": 1.3872342324809461, | |
| "learning_rate": 1.838408845200758e-05, | |
| "loss": 0.5501, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 1.872611464968153, | |
| "grad_norm": 1.265935149100833, | |
| "learning_rate": 1.8331338435227838e-05, | |
| "loss": 0.5783, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 1.89171974522293, | |
| "grad_norm": 1.3615136971359445, | |
| "learning_rate": 1.8277819437329577e-05, | |
| "loss": 0.6018, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 1.910828025477707, | |
| "grad_norm": 1.2831355214080522, | |
| "learning_rate": 1.8223536398107177e-05, | |
| "loss": 0.5573, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 1.929936305732484, | |
| "grad_norm": 1.414983019094222, | |
| "learning_rate": 1.8168494327875918e-05, | |
| "loss": 0.5748, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 1.9490445859872612, | |
| "grad_norm": 1.2644989588144375, | |
| "learning_rate": 1.8112698307009506e-05, | |
| "loss": 0.5961, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 1.9681528662420382, | |
| "grad_norm": 1.3973089183381058, | |
| "learning_rate": 1.8056153485471167e-05, | |
| "loss": 0.6006, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 1.9872611464968153, | |
| "grad_norm": 1.245233746740617, | |
| "learning_rate": 1.799886508233829e-05, | |
| "loss": 0.5827, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 2.0063694267515926, | |
| "grad_norm": 1.3953137880450943, | |
| "learning_rate": 1.7940838385320732e-05, | |
| "loss": 0.4899, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 2.0254777070063694, | |
| "grad_norm": 1.2422344782506196, | |
| "learning_rate": 1.788207875027274e-05, | |
| "loss": 0.3823, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 2.0445859872611467, | |
| "grad_norm": 1.5194218407821385, | |
| "learning_rate": 1.7822591600698632e-05, | |
| "loss": 0.3707, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 2.0636942675159236, | |
| "grad_norm": 1.3858369380237823, | |
| "learning_rate": 1.776238242725217e-05, | |
| "loss": 0.3515, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 2.082802547770701, | |
| "grad_norm": 1.2040084839099645, | |
| "learning_rate": 1.7701456787229805e-05, | |
| "loss": 0.3351, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 2.1019108280254777, | |
| "grad_norm": 1.0813271075846485, | |
| "learning_rate": 1.7639820304057745e-05, | |
| "loss": 0.3682, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 2.121019108280255, | |
| "grad_norm": 1.1941413633816864, | |
| "learning_rate": 1.7577478666772886e-05, | |
| "loss": 0.347, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 2.140127388535032, | |
| "grad_norm": 1.2351647393004164, | |
| "learning_rate": 1.751443762949772e-05, | |
| "loss": 0.306, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 2.159235668789809, | |
| "grad_norm": 1.1577600566794273, | |
| "learning_rate": 1.7450703010909263e-05, | |
| "loss": 0.3544, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 2.178343949044586, | |
| "grad_norm": 1.139653261043234, | |
| "learning_rate": 1.738628069370195e-05, | |
| "loss": 0.3186, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 2.1974522292993632, | |
| "grad_norm": 1.1660921196049734, | |
| "learning_rate": 1.732117662404469e-05, | |
| "loss": 0.3264, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 2.21656050955414, | |
| "grad_norm": 1.2411659528086756, | |
| "learning_rate": 1.7255396811032014e-05, | |
| "loss": 0.3757, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 2.2356687898089174, | |
| "grad_norm": 1.0805464668608402, | |
| "learning_rate": 1.718894732612947e-05, | |
| "loss": 0.3388, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 2.254777070063694, | |
| "grad_norm": 1.077882198033458, | |
| "learning_rate": 1.712183430261319e-05, | |
| "loss": 0.3461, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 2.2738853503184715, | |
| "grad_norm": 1.0679311923670507, | |
| "learning_rate": 1.7054063935003813e-05, | |
| "loss": 0.343, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 2.2929936305732483, | |
| "grad_norm": 1.1845581622661834, | |
| "learning_rate": 1.698564247849473e-05, | |
| "loss": 0.3578, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 2.3121019108280256, | |
| "grad_norm": 1.1549978407787982, | |
| "learning_rate": 1.691657624837472e-05, | |
| "loss": 0.3338, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 2.3312101910828025, | |
| "grad_norm": 1.1992184381026434, | |
| "learning_rate": 1.684687161944506e-05, | |
| "loss": 0.3513, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 2.3503184713375798, | |
| "grad_norm": 1.1870994342333885, | |
| "learning_rate": 1.677653502543113e-05, | |
| "loss": 0.3774, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 2.3694267515923566, | |
| "grad_norm": 1.1727869438510825, | |
| "learning_rate": 1.6705572958388576e-05, | |
| "loss": 0.366, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 2.388535031847134, | |
| "grad_norm": 1.137731691236232, | |
| "learning_rate": 1.6633991968104095e-05, | |
| "loss": 0.3268, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 2.4076433121019107, | |
| "grad_norm": 1.061973314666275, | |
| "learning_rate": 1.6561798661490904e-05, | |
| "loss": 0.3329, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 2.426751592356688, | |
| "grad_norm": 1.084312908045151, | |
| "learning_rate": 1.6488999701978905e-05, | |
| "loss": 0.367, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 2.445859872611465, | |
| "grad_norm": 1.14305018626842, | |
| "learning_rate": 1.6415601808899658e-05, | |
| "loss": 0.2974, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 2.464968152866242, | |
| "grad_norm": 1.157749585944929, | |
| "learning_rate": 1.63416117568662e-05, | |
| "loss": 0.3347, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 2.484076433121019, | |
| "grad_norm": 1.0864100134349601, | |
| "learning_rate": 1.6267036375147728e-05, | |
| "loss": 0.336, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 2.5031847133757963, | |
| "grad_norm": 1.140281558684133, | |
| "learning_rate": 1.619188254703927e-05, | |
| "loss": 0.3083, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 2.522292993630573, | |
| "grad_norm": 1.0816276956540412, | |
| "learning_rate": 1.6116157209226356e-05, | |
| "loss": 0.3366, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 2.5414012738853504, | |
| "grad_norm": 1.0325317959119171, | |
| "learning_rate": 1.6039867351144778e-05, | |
| "loss": 0.3719, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 2.5605095541401273, | |
| "grad_norm": 1.210569091837068, | |
| "learning_rate": 1.5963020014335437e-05, | |
| "loss": 0.3283, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 2.5796178343949046, | |
| "grad_norm": 1.1318710036112303, | |
| "learning_rate": 1.588562229179443e-05, | |
| "loss": 0.3597, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 2.5987261146496814, | |
| "grad_norm": 1.209379497308929, | |
| "learning_rate": 1.5807681327318372e-05, | |
| "loss": 0.361, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 2.6178343949044587, | |
| "grad_norm": 1.1460305712676861, | |
| "learning_rate": 1.5729204314845002e-05, | |
| "loss": 0.3604, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 2.6369426751592355, | |
| "grad_norm": 1.1732165904010456, | |
| "learning_rate": 1.56501984977892e-05, | |
| "loss": 0.3523, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 2.656050955414013, | |
| "grad_norm": 1.1621107710443261, | |
| "learning_rate": 1.557067116837444e-05, | |
| "loss": 0.299, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 2.6751592356687897, | |
| "grad_norm": 1.2164346262777883, | |
| "learning_rate": 1.5490629666959668e-05, | |
| "loss": 0.3457, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 2.694267515923567, | |
| "grad_norm": 1.1823153475333987, | |
| "learning_rate": 1.541008138136183e-05, | |
| "loss": 0.3444, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 2.713375796178344, | |
| "grad_norm": 1.1174073949760772, | |
| "learning_rate": 1.5329033746173975e-05, | |
| "loss": 0.3729, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 2.732484076433121, | |
| "grad_norm": 1.1314239511081268, | |
| "learning_rate": 1.5247494242079024e-05, | |
| "loss": 0.326, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 2.7515923566878984, | |
| "grad_norm": 1.1174177994358805, | |
| "learning_rate": 1.5165470395159314e-05, | |
| "loss": 0.3256, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 2.770700636942675, | |
| "grad_norm": 1.219689897297813, | |
| "learning_rate": 1.5082969776201948e-05, | |
| "loss": 0.3161, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 2.789808917197452, | |
| "grad_norm": 1.0765986737838706, | |
| "learning_rate": 1.5000000000000002e-05, | |
| "loss": 0.3379, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 2.8089171974522293, | |
| "grad_norm": 1.1077246642654066, | |
| "learning_rate": 1.4916568724649688e-05, | |
| "loss": 0.3501, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 2.8280254777070066, | |
| "grad_norm": 1.2638295093687193, | |
| "learning_rate": 1.483268365084351e-05, | |
| "loss": 0.3216, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 2.8471337579617835, | |
| "grad_norm": 1.125839857498556, | |
| "learning_rate": 1.4748352521159492e-05, | |
| "loss": 0.3653, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 2.8662420382165603, | |
| "grad_norm": 1.081095422354445, | |
| "learning_rate": 1.466358311934654e-05, | |
| "loss": 0.3613, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 2.8853503184713376, | |
| "grad_norm": 1.2431757494407534, | |
| "learning_rate": 1.4578383269606004e-05, | |
| "loss": 0.3194, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 2.904458598726115, | |
| "grad_norm": 1.0554692013448208, | |
| "learning_rate": 1.4492760835869504e-05, | |
| "loss": 0.3425, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 2.9235668789808917, | |
| "grad_norm": 1.0106608052986603, | |
| "learning_rate": 1.4406723721073088e-05, | |
| "loss": 0.3483, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 2.9426751592356686, | |
| "grad_norm": 1.0489479689551857, | |
| "learning_rate": 1.4320279866427798e-05, | |
| "loss": 0.3504, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 2.961783439490446, | |
| "grad_norm": 1.0761618786744391, | |
| "learning_rate": 1.4233437250686695e-05, | |
| "loss": 0.3707, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 2.980891719745223, | |
| "grad_norm": 1.157018379192101, | |
| "learning_rate": 1.4146203889408418e-05, | |
| "loss": 0.3321, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "grad_norm": 1.0533667581883366, | |
| "learning_rate": 1.4058587834217356e-05, | |
| "loss": 0.3053, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 3.0191082802547773, | |
| "grad_norm": 1.0547313417924546, | |
| "learning_rate": 1.3970597172060482e-05, | |
| "loss": 0.2188, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 3.038216560509554, | |
| "grad_norm": 0.930934263644112, | |
| "learning_rate": 1.3882240024460928e-05, | |
| "loss": 0.1843, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 3.0573248407643314, | |
| "grad_norm": 0.983478027637516, | |
| "learning_rate": 1.3793524546768358e-05, | |
| "loss": 0.201, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 3.0764331210191083, | |
| "grad_norm": 1.2407814520098055, | |
| "learning_rate": 1.3704458927406261e-05, | |
| "loss": 0.2039, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 3.0955414012738856, | |
| "grad_norm": 1.2647200327067536, | |
| "learning_rate": 1.3615051387116131e-05, | |
| "loss": 0.2, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 3.1146496815286624, | |
| "grad_norm": 0.9644590953094815, | |
| "learning_rate": 1.3525310178198707e-05, | |
| "loss": 0.2408, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 3.1337579617834397, | |
| "grad_norm": 0.9190602503630407, | |
| "learning_rate": 1.3435243583752294e-05, | |
| "loss": 0.2229, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 3.1528662420382165, | |
| "grad_norm": 0.9243907234468131, | |
| "learning_rate": 1.3344859916908206e-05, | |
| "loss": 0.2032, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 3.171974522292994, | |
| "grad_norm": 0.9442060532851655, | |
| "learning_rate": 1.325416752006351e-05, | |
| "loss": 0.2025, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 3.1910828025477707, | |
| "grad_norm": 0.963218763581881, | |
| "learning_rate": 1.3163174764110985e-05, | |
| "loss": 0.1829, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 3.210191082802548, | |
| "grad_norm": 0.9071743366026043, | |
| "learning_rate": 1.3071890047666498e-05, | |
| "loss": 0.227, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 3.229299363057325, | |
| "grad_norm": 1.0315900230652744, | |
| "learning_rate": 1.2980321796293838e-05, | |
| "loss": 0.2156, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 3.248407643312102, | |
| "grad_norm": 1.0187692244394628, | |
| "learning_rate": 1.288847846172701e-05, | |
| "loss": 0.2216, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 3.267515923566879, | |
| "grad_norm": 0.9225592237396303, | |
| "learning_rate": 1.2796368521090143e-05, | |
| "loss": 0.2107, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 3.286624203821656, | |
| "grad_norm": 0.8748399197222058, | |
| "learning_rate": 1.2704000476115079e-05, | |
| "loss": 0.2395, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 3.305732484076433, | |
| "grad_norm": 0.8746681159163586, | |
| "learning_rate": 1.2611382852356632e-05, | |
| "loss": 0.2083, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 3.3248407643312103, | |
| "grad_norm": 0.8888551584950397, | |
| "learning_rate": 1.2518524198405699e-05, | |
| "loss": 0.2076, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 3.343949044585987, | |
| "grad_norm": 0.9463638003097762, | |
| "learning_rate": 1.2425433085100224e-05, | |
| "loss": 0.1926, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 3.3630573248407645, | |
| "grad_norm": 0.9111688438287611, | |
| "learning_rate": 1.233211810473411e-05, | |
| "loss": 0.2096, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 3.3821656050955413, | |
| "grad_norm": 0.923419758792075, | |
| "learning_rate": 1.2238587870264152e-05, | |
| "loss": 0.1926, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 3.4012738853503186, | |
| "grad_norm": 0.946486281258778, | |
| "learning_rate": 1.2144851014515055e-05, | |
| "loss": 0.194, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 3.4203821656050954, | |
| "grad_norm": 0.892594471211745, | |
| "learning_rate": 1.2050916189382646e-05, | |
| "loss": 0.2237, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 3.4394904458598727, | |
| "grad_norm": 0.9130234965474789, | |
| "learning_rate": 1.1956792065035281e-05, | |
| "loss": 0.1652, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 3.4585987261146496, | |
| "grad_norm": 0.8769940165131497, | |
| "learning_rate": 1.1862487329113606e-05, | |
| "loss": 0.239, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 3.477707006369427, | |
| "grad_norm": 0.9228491362173967, | |
| "learning_rate": 1.1768010685928686e-05, | |
| "loss": 0.1775, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 3.4968152866242037, | |
| "grad_norm": 0.8281719893074526, | |
| "learning_rate": 1.1673370855658592e-05, | |
| "loss": 0.2222, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 3.515923566878981, | |
| "grad_norm": 0.8788653643194168, | |
| "learning_rate": 1.1578576573543541e-05, | |
| "loss": 0.1812, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 3.535031847133758, | |
| "grad_norm": 0.869849366163653, | |
| "learning_rate": 1.1483636589079627e-05, | |
| "loss": 0.1933, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 3.554140127388535, | |
| "grad_norm": 0.8998889330605255, | |
| "learning_rate": 1.1388559665211241e-05, | |
| "loss": 0.2232, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 3.573248407643312, | |
| "grad_norm": 0.8751652390981322, | |
| "learning_rate": 1.1293354577522264e-05, | |
| "loss": 0.2141, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 3.5923566878980893, | |
| "grad_norm": 0.8626223839515736, | |
| "learning_rate": 1.1198030113426074e-05, | |
| "loss": 0.1982, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 3.611464968152866, | |
| "grad_norm": 0.8499479463433588, | |
| "learning_rate": 1.1102595071354471e-05, | |
| "loss": 0.18, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 3.6305732484076434, | |
| "grad_norm": 0.9117331474207767, | |
| "learning_rate": 1.1007058259945584e-05, | |
| "loss": 0.228, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 3.6496815286624202, | |
| "grad_norm": 0.7897026778368444, | |
| "learning_rate": 1.0911428497230834e-05, | |
| "loss": 0.2184, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 3.6687898089171975, | |
| "grad_norm": 0.8366867452316553, | |
| "learning_rate": 1.0815714609821027e-05, | |
| "loss": 0.2053, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 3.6878980891719744, | |
| "grad_norm": 0.8587082513394615, | |
| "learning_rate": 1.0719925432091671e-05, | |
| "loss": 0.208, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 3.7070063694267517, | |
| "grad_norm": 0.8675289819107732, | |
| "learning_rate": 1.0624069805367558e-05, | |
| "loss": 0.2075, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 3.7261146496815285, | |
| "grad_norm": 0.8618107937937802, | |
| "learning_rate": 1.0528156577106703e-05, | |
| "loss": 0.2136, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 3.745222929936306, | |
| "grad_norm": 0.8379198429484322, | |
| "learning_rate": 1.043219460008374e-05, | |
| "loss": 0.2306, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 3.7643312101910826, | |
| "grad_norm": 0.8491037924292353, | |
| "learning_rate": 1.0336192731572805e-05, | |
| "loss": 0.1892, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 3.78343949044586, | |
| "grad_norm": 0.8631451283341048, | |
| "learning_rate": 1.0240159832530007e-05, | |
| "loss": 0.2007, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 3.802547770700637, | |
| "grad_norm": 0.892801565631552, | |
| "learning_rate": 1.0144104766775574e-05, | |
| "loss": 0.2022, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 3.821656050955414, | |
| "grad_norm": 0.8581598389309619, | |
| "learning_rate": 1.004803640017571e-05, | |
| "loss": 0.1823, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 3.840764331210191, | |
| "grad_norm": 0.8570289033219508, | |
| "learning_rate": 9.951963599824294e-06, | |
| "loss": 0.2201, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 3.859872611464968, | |
| "grad_norm": 0.860535004343562, | |
| "learning_rate": 9.855895233224431e-06, | |
| "loss": 0.2381, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 3.8789808917197455, | |
| "grad_norm": 0.880171224866928, | |
| "learning_rate": 9.759840167469995e-06, | |
| "loss": 0.159, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 3.8980891719745223, | |
| "grad_norm": 0.8998977132258694, | |
| "learning_rate": 9.663807268427197e-06, | |
| "loss": 0.2114, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 3.917197452229299, | |
| "grad_norm": 0.944910434342897, | |
| "learning_rate": 9.56780539991626e-06, | |
| "loss": 0.1834, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 3.9363057324840764, | |
| "grad_norm": 0.888128708062558, | |
| "learning_rate": 9.471843422893299e-06, | |
| "loss": 0.2224, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 3.9554140127388537, | |
| "grad_norm": 0.8574141414851673, | |
| "learning_rate": 9.375930194632447e-06, | |
| "loss": 0.1822, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 3.9745222929936306, | |
| "grad_norm": 0.8258326063326947, | |
| "learning_rate": 9.28007456790833e-06, | |
| "loss": 0.2582, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 3.9936305732484074, | |
| "grad_norm": 0.8702456676005106, | |
| "learning_rate": 9.184285390178978e-06, | |
| "loss": 0.1941, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 4.012738853503185, | |
| "grad_norm": 0.6419823328435715, | |
| "learning_rate": 9.08857150276917e-06, | |
| "loss": 0.1618, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 4.031847133757962, | |
| "grad_norm": 0.7013125447364849, | |
| "learning_rate": 8.992941740054418e-06, | |
| "loss": 0.1326, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 4.050955414012739, | |
| "grad_norm": 0.6282089961157545, | |
| "learning_rate": 8.897404928645529e-06, | |
| "loss": 0.1239, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 4.070063694267516, | |
| "grad_norm": 0.63559224911429, | |
| "learning_rate": 8.80196988657393e-06, | |
| "loss": 0.1174, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 4.089171974522293, | |
| "grad_norm": 0.744688936893004, | |
| "learning_rate": 8.706645422477739e-06, | |
| "loss": 0.1459, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 4.10828025477707, | |
| "grad_norm": 0.7641003977764952, | |
| "learning_rate": 8.611440334788762e-06, | |
| "loss": 0.1365, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 4.127388535031847, | |
| "grad_norm": 0.8312252860589785, | |
| "learning_rate": 8.516363410920376e-06, | |
| "loss": 0.1055, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 4.146496815286624, | |
| "grad_norm": 0.7705703082497701, | |
| "learning_rate": 8.42142342645646e-06, | |
| "loss": 0.1167, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 4.165605095541402, | |
| "grad_norm": 0.6820769471933187, | |
| "learning_rate": 8.326629144341408e-06, | |
| "loss": 0.1091, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 4.1847133757961785, | |
| "grad_norm": 0.6177823284633078, | |
| "learning_rate": 8.231989314071318e-06, | |
| "loss": 0.108, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 4.203821656050955, | |
| "grad_norm": 0.6334985610396245, | |
| "learning_rate": 8.137512670886397e-06, | |
| "loss": 0.1358, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 4.222929936305732, | |
| "grad_norm": 0.6487652927915302, | |
| "learning_rate": 8.043207934964722e-06, | |
| "loss": 0.1323, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 4.24203821656051, | |
| "grad_norm": 0.6861755433216611, | |
| "learning_rate": 7.949083810617358e-06, | |
| "loss": 0.1203, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 4.261146496815287, | |
| "grad_norm": 0.6070051869465705, | |
| "learning_rate": 7.855148985484946e-06, | |
| "loss": 0.1198, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 4.280254777070064, | |
| "grad_norm": 0.672669819013237, | |
| "learning_rate": 7.761412129735853e-06, | |
| "loss": 0.1328, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 4.2993630573248405, | |
| "grad_norm": 0.6363832766423693, | |
| "learning_rate": 7.667881895265895e-06, | |
| "loss": 0.099, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 4.318471337579618, | |
| "grad_norm": 0.6521275103263113, | |
| "learning_rate": 7.574566914899779e-06, | |
| "loss": 0.1268, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 4.337579617834395, | |
| "grad_norm": 0.6488602770991176, | |
| "learning_rate": 7.481475801594302e-06, | |
| "loss": 0.1164, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 4.356687898089172, | |
| "grad_norm": 0.6579781306485809, | |
| "learning_rate": 7.388617147643371e-06, | |
| "loss": 0.1194, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 4.375796178343949, | |
| "grad_norm": 0.6341477387146806, | |
| "learning_rate": 7.295999523884921e-06, | |
| "loss": 0.1316, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 4.3949044585987265, | |
| "grad_norm": 0.6580995643113278, | |
| "learning_rate": 7.203631478909857e-06, | |
| "loss": 0.129, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 4.414012738853503, | |
| "grad_norm": 0.6606683663693255, | |
| "learning_rate": 7.111521538272997e-06, | |
| "loss": 0.1526, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 4.43312101910828, | |
| "grad_norm": 0.6353533372696129, | |
| "learning_rate": 7.019678203706164e-06, | |
| "loss": 0.1217, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 4.452229299363057, | |
| "grad_norm": 0.6492987875417681, | |
| "learning_rate": 6.928109952333506e-06, | |
| "loss": 0.1308, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 4.471337579617835, | |
| "grad_norm": 0.5683739215865904, | |
| "learning_rate": 6.83682523588902e-06, | |
| "loss": 0.141, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 4.490445859872612, | |
| "grad_norm": 0.5969068164655831, | |
| "learning_rate": 6.745832479936492e-06, | |
| "loss": 0.1076, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 4.509554140127388, | |
| "grad_norm": 0.5935296810482609, | |
| "learning_rate": 6.655140083091794e-06, | |
| "loss": 0.1093, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 4.528662420382165, | |
| "grad_norm": 0.6141503156568103, | |
| "learning_rate": 6.564756416247712e-06, | |
| "loss": 0.1121, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 4.547770700636943, | |
| "grad_norm": 0.606512092804129, | |
| "learning_rate": 6.474689821801295e-06, | |
| "loss": 0.1194, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 4.56687898089172, | |
| "grad_norm": 0.5946851091687231, | |
| "learning_rate": 6.384948612883872e-06, | |
| "loss": 0.1048, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 4.585987261146497, | |
| "grad_norm": 0.6401167422534727, | |
| "learning_rate": 6.2955410725937405e-06, | |
| "loss": 0.1336, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 4.6050955414012735, | |
| "grad_norm": 0.6295133570230956, | |
| "learning_rate": 6.206475453231644e-06, | |
| "loss": 0.1158, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 4.624203821656051, | |
| "grad_norm": 0.6158966292962275, | |
| "learning_rate": 6.117759975539075e-06, | |
| "loss": 0.1252, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 4.643312101910828, | |
| "grad_norm": 0.6115229159056143, | |
| "learning_rate": 6.029402827939519e-06, | |
| "loss": 0.1481, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 4.662420382165605, | |
| "grad_norm": 0.5877582785702361, | |
| "learning_rate": 5.941412165782645e-06, | |
| "loss": 0.1233, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 4.681528662420382, | |
| "grad_norm": 0.575927739748183, | |
| "learning_rate": 5.853796110591583e-06, | |
| "loss": 0.1145, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 4.7006369426751595, | |
| "grad_norm": 0.5798796204817728, | |
| "learning_rate": 5.766562749313309e-06, | |
| "loss": 0.1095, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 4.719745222929936, | |
| "grad_norm": 0.5667423982047629, | |
| "learning_rate": 5.6797201335722064e-06, | |
| "loss": 0.1039, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 4.738853503184713, | |
| "grad_norm": 0.5900922967570229, | |
| "learning_rate": 5.593276278926912e-06, | |
| "loss": 0.1166, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 4.757961783439491, | |
| "grad_norm": 0.5987524140989823, | |
| "learning_rate": 5.507239164130501e-06, | |
| "loss": 0.1434, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 4.777070063694268, | |
| "grad_norm": 0.5668228735032542, | |
| "learning_rate": 5.421616730394e-06, | |
| "loss": 0.1203, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 4.796178343949045, | |
| "grad_norm": 0.6042014069032966, | |
| "learning_rate": 5.336416880653461e-06, | |
| "loss": 0.1192, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 4.8152866242038215, | |
| "grad_norm": 0.6119673045706518, | |
| "learning_rate": 5.251647478840511e-06, | |
| "loss": 0.1049, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 4.834394904458598, | |
| "grad_norm": 0.6047175720281526, | |
| "learning_rate": 5.167316349156495e-06, | |
| "loss": 0.1067, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 4.853503184713376, | |
| "grad_norm": 0.5864037008649898, | |
| "learning_rate": 5.083431275350312e-06, | |
| "loss": 0.1324, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 4.872611464968153, | |
| "grad_norm": 0.6234019198830574, | |
| "learning_rate": 5.000000000000003e-06, | |
| "loss": 0.1016, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 4.89171974522293, | |
| "grad_norm": 0.5973842089783172, | |
| "learning_rate": 4.917030223798057e-06, | |
| "loss": 0.1238, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 4.9108280254777075, | |
| "grad_norm": 0.5421130895514578, | |
| "learning_rate": 4.834529604840686e-06, | |
| "loss": 0.1068, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 4.929936305732484, | |
| "grad_norm": 0.5748700997816383, | |
| "learning_rate": 4.7525057579209775e-06, | |
| "loss": 0.1093, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 4.949044585987261, | |
| "grad_norm": 0.6353485915241656, | |
| "learning_rate": 4.670966253826027e-06, | |
| "loss": 0.1179, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 4.968152866242038, | |
| "grad_norm": 0.59089915427817, | |
| "learning_rate": 4.589918618638173e-06, | |
| "loss": 0.1397, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 4.987261146496815, | |
| "grad_norm": 0.5876648737817116, | |
| "learning_rate": 4.5093703330403385e-06, | |
| "loss": 0.121, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 5.006369426751593, | |
| "grad_norm": 0.500425847147982, | |
| "learning_rate": 4.429328831625565e-06, | |
| "loss": 0.1088, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 5.025477707006369, | |
| "grad_norm": 0.4173760948587404, | |
| "learning_rate": 4.349801502210801e-06, | |
| "loss": 0.0692, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 5.044585987261146, | |
| "grad_norm": 0.45131347307487774, | |
| "learning_rate": 4.270795685155001e-06, | |
| "loss": 0.1, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 5.063694267515924, | |
| "grad_norm": 0.40406972837102645, | |
| "learning_rate": 4.192318672681631e-06, | |
| "loss": 0.088, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 5.082802547770701, | |
| "grad_norm": 0.38068965606156097, | |
| "learning_rate": 4.1143777082055715e-06, | |
| "loss": 0.0632, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 5.101910828025478, | |
| "grad_norm": 0.4033571835311679, | |
| "learning_rate": 4.036979985664566e-06, | |
| "loss": 0.0785, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 5.1210191082802545, | |
| "grad_norm": 0.4003438449286176, | |
| "learning_rate": 3.960132648855226e-06, | |
| "loss": 0.0807, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 5.140127388535032, | |
| "grad_norm": 0.4610641419364593, | |
| "learning_rate": 3.883842790773647e-06, | |
| "loss": 0.0743, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 5.159235668789809, | |
| "grad_norm": 0.46772011723957047, | |
| "learning_rate": 3.8081174529607346e-06, | |
| "loss": 0.083, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 5.178343949044586, | |
| "grad_norm": 0.48469156213513115, | |
| "learning_rate": 3.732963624852275e-06, | |
| "loss": 0.0688, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 5.197452229299363, | |
| "grad_norm": 0.4097707650542374, | |
| "learning_rate": 3.6583882431338047e-06, | |
| "loss": 0.0732, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 5.2165605095541405, | |
| "grad_norm": 0.41662851225355924, | |
| "learning_rate": 3.584398191100341e-06, | |
| "loss": 0.0794, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 5.235668789808917, | |
| "grad_norm": 0.5142480756247851, | |
| "learning_rate": 3.511000298021098e-06, | |
| "loss": 0.0987, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 5.254777070063694, | |
| "grad_norm": 0.4435030725498896, | |
| "learning_rate": 3.4382013385090985e-06, | |
| "loss": 0.0519, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 5.273885350318471, | |
| "grad_norm": 0.45114056016762916, | |
| "learning_rate": 3.3660080318959043e-06, | |
| "loss": 0.1122, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 5.292993630573249, | |
| "grad_norm": 0.4602596362040264, | |
| "learning_rate": 3.2944270416114256e-06, | |
| "loss": 0.074, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 5.312101910828026, | |
| "grad_norm": 0.4215918278487679, | |
| "learning_rate": 3.223464974568874e-06, | |
| "loss": 0.0714, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 5.3312101910828025, | |
| "grad_norm": 0.37639160238215197, | |
| "learning_rate": 3.153128380554941e-06, | |
| "loss": 0.0709, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 5.350318471337579, | |
| "grad_norm": 0.42709349372212563, | |
| "learning_rate": 3.0834237516252817e-06, | |
| "loss": 0.0888, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 5.369426751592357, | |
| "grad_norm": 0.38376449225454184, | |
| "learning_rate": 3.0143575215052732e-06, | |
| "loss": 0.0738, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 5.388535031847134, | |
| "grad_norm": 0.37353723261024324, | |
| "learning_rate": 2.94593606499619e-06, | |
| "loss": 0.1008, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 5.407643312101911, | |
| "grad_norm": 0.4023560034782532, | |
| "learning_rate": 2.878165697386812e-06, | |
| "loss": 0.0812, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 5.426751592356688, | |
| "grad_norm": 0.452345994244169, | |
| "learning_rate": 2.8110526738705345e-06, | |
| "loss": 0.0869, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 5.445859872611465, | |
| "grad_norm": 0.41655876492240235, | |
| "learning_rate": 2.7446031889679893e-06, | |
| "loss": 0.0931, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 5.464968152866242, | |
| "grad_norm": 0.40303769880729684, | |
| "learning_rate": 2.678823375955314e-06, | |
| "loss": 0.0656, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 5.484076433121019, | |
| "grad_norm": 0.3736703542936502, | |
| "learning_rate": 2.6137193062980506e-06, | |
| "loss": 0.0954, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 5.503184713375796, | |
| "grad_norm": 0.3610101674372163, | |
| "learning_rate": 2.5492969890907383e-06, | |
| "loss": 0.0701, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 5.522292993630574, | |
| "grad_norm": 0.3974548348249396, | |
| "learning_rate": 2.485562370502279e-06, | |
| "loss": 0.1008, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 5.54140127388535, | |
| "grad_norm": 0.4075609990175683, | |
| "learning_rate": 2.4225213332271203e-06, | |
| "loss": 0.0752, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 5.560509554140127, | |
| "grad_norm": 0.41207353791669765, | |
| "learning_rate": 2.3601796959422585e-06, | |
| "loss": 0.09, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 5.579617834394904, | |
| "grad_norm": 0.4197933372931118, | |
| "learning_rate": 2.2985432127701945e-06, | |
| "loss": 0.1096, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 5.598726114649682, | |
| "grad_norm": 0.3970630962450117, | |
| "learning_rate": 2.2376175727478346e-06, | |
| "loss": 0.0831, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 5.617834394904459, | |
| "grad_norm": 0.4233872661264893, | |
| "learning_rate": 2.1774083993013715e-06, | |
| "loss": 0.0899, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 5.6369426751592355, | |
| "grad_norm": 0.45476349355512374, | |
| "learning_rate": 2.1179212497272582e-06, | |
| "loss": 0.0844, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 5.656050955414012, | |
| "grad_norm": 0.3652938444091257, | |
| "learning_rate": 2.0591616146792705e-06, | |
| "loss": 0.0638, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 5.67515923566879, | |
| "grad_norm": 0.4198343795853902, | |
| "learning_rate": 2.0011349176617133e-06, | |
| "loss": 0.0987, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 5.694267515923567, | |
| "grad_norm": 0.4637461724517846, | |
| "learning_rate": 1.9438465145288377e-06, | |
| "loss": 0.0857, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 5.713375796178344, | |
| "grad_norm": 0.41439129316088197, | |
| "learning_rate": 1.8873016929904942e-06, | |
| "loss": 0.0981, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 5.732484076433121, | |
| "grad_norm": 0.399364720681734, | |
| "learning_rate": 1.8315056721240831e-06, | |
| "loss": 0.0661, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 5.751592356687898, | |
| "grad_norm": 0.3592826811293026, | |
| "learning_rate": 1.7764636018928249e-06, | |
| "loss": 0.0743, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 5.770700636942675, | |
| "grad_norm": 0.3747827275166178, | |
| "learning_rate": 1.722180562670428e-06, | |
| "loss": 0.061, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 5.789808917197452, | |
| "grad_norm": 0.42899694389637955, | |
| "learning_rate": 1.6686615647721638e-06, | |
| "loss": 0.0812, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 5.80891719745223, | |
| "grad_norm": 0.3890663817302002, | |
| "learning_rate": 1.6159115479924259e-06, | |
| "loss": 0.0918, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 5.828025477707007, | |
| "grad_norm": 0.39746898582488294, | |
| "learning_rate": 1.5639353811487744e-06, | |
| "loss": 0.0825, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 5.8471337579617835, | |
| "grad_norm": 0.3899707969607275, | |
| "learning_rate": 1.5127378616325606e-06, | |
| "loss": 0.1, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 5.86624203821656, | |
| "grad_norm": 0.3878767177963925, | |
| "learning_rate": 1.462323714966114e-06, | |
| "loss": 0.0974, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 5.885350318471337, | |
| "grad_norm": 0.36936598310841057, | |
| "learning_rate": 1.4126975943665844e-06, | |
| "loss": 0.0639, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 5.904458598726115, | |
| "grad_norm": 0.42012980899969893, | |
| "learning_rate": 1.3638640803164516e-06, | |
| "loss": 0.1117, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 5.923566878980892, | |
| "grad_norm": 0.3805627255371633, | |
| "learning_rate": 1.3158276801407432e-06, | |
| "loss": 0.0695, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 5.942675159235669, | |
| "grad_norm": 0.39789591608124625, | |
| "learning_rate": 1.2685928275910142e-06, | |
| "loss": 0.0669, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 5.961783439490446, | |
| "grad_norm": 0.46238384001020055, | |
| "learning_rate": 1.222163882436107e-06, | |
| "loss": 0.0626, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 5.980891719745223, | |
| "grad_norm": 0.41303332492968153, | |
| "learning_rate": 1.1765451300597574e-06, | |
| "loss": 0.0892, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "grad_norm": 0.3770562669073382, | |
| "learning_rate": 1.1317407810650372e-06, | |
| "loss": 0.0607, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 6.019108280254777, | |
| "grad_norm": 0.272426829181539, | |
| "learning_rate": 1.0877549708857228e-06, | |
| "loss": 0.0678, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 6.038216560509555, | |
| "grad_norm": 0.269954454426176, | |
| "learning_rate": 1.0445917594046073e-06, | |
| "loss": 0.0627, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 6.057324840764331, | |
| "grad_norm": 0.3003403674449553, | |
| "learning_rate": 1.0022551305787564e-06, | |
| "loss": 0.0494, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 6.076433121019108, | |
| "grad_norm": 0.2966586294718324, | |
| "learning_rate": 9.607489920717983e-07, | |
| "loss": 0.0949, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 6.095541401273885, | |
| "grad_norm": 0.2919665902100848, | |
| "learning_rate": 9.200771748932513e-07, | |
| "loss": 0.1041, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 6.114649681528663, | |
| "grad_norm": 0.28482433883332764, | |
| "learning_rate": 8.802434330449128e-07, | |
| "loss": 0.0698, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 6.13375796178344, | |
| "grad_norm": 0.29065813094699533, | |
| "learning_rate": 8.412514431743657e-07, | |
| "loss": 0.0698, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 6.1528662420382165, | |
| "grad_norm": 0.3231834141577625, | |
| "learning_rate": 8.031048042356393e-07, | |
| "loss": 0.0846, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 6.171974522292993, | |
| "grad_norm": 0.2949302797499998, | |
| "learning_rate": 7.65807037157007e-07, | |
| "loss": 0.0708, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 6.191082802547771, | |
| "grad_norm": 0.2902725445633967, | |
| "learning_rate": 7.293615845160196e-07, | |
| "loss": 0.0672, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 6.210191082802548, | |
| "grad_norm": 0.28620474425555903, | |
| "learning_rate": 6.937718102217461e-07, | |
| "loss": 0.0847, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 6.229299363057325, | |
| "grad_norm": 0.2655524610396661, | |
| "learning_rate": 6.590409992042957e-07, | |
| "loss": 0.0586, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 6.248407643312102, | |
| "grad_norm": 0.2825516847146469, | |
| "learning_rate": 6.251723571116031e-07, | |
| "loss": 0.0737, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 6.267515923566879, | |
| "grad_norm": 0.2728625024671345, | |
| "learning_rate": 5.921690100135713e-07, | |
| "loss": 0.061, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 6.286624203821656, | |
| "grad_norm": 0.26716342873087257, | |
| "learning_rate": 5.600340041135133e-07, | |
| "loss": 0.0654, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 6.305732484076433, | |
| "grad_norm": 0.3370958532154618, | |
| "learning_rate": 5.287703054670012e-07, | |
| "loss": 0.115, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 6.32484076433121, | |
| "grad_norm": 0.28827532695871755, | |
| "learning_rate": 4.983807997080925e-07, | |
| "loss": 0.0672, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 6.343949044585988, | |
| "grad_norm": 0.31630942692758224, | |
| "learning_rate": 4.6886829178299676e-07, | |
| "loss": 0.0755, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 6.3630573248407645, | |
| "grad_norm": 0.29665281820775447, | |
| "learning_rate": 4.402355056911656e-07, | |
| "loss": 0.0634, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 6.382165605095541, | |
| "grad_norm": 0.2853111456175537, | |
| "learning_rate": 4.124850842338779e-07, | |
| "loss": 0.0652, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 6.401273885350318, | |
| "grad_norm": 0.268449628624925, | |
| "learning_rate": 3.8561958877030957e-07, | |
| "loss": 0.0532, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 6.420382165605096, | |
| "grad_norm": 0.27983344118352355, | |
| "learning_rate": 3.5964149898111587e-07, | |
| "loss": 0.0426, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 6.439490445859873, | |
| "grad_norm": 0.3060643682948817, | |
| "learning_rate": 3.345532126395579e-07, | |
| "loss": 0.0932, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 6.45859872611465, | |
| "grad_norm": 0.27201807551747703, | |
| "learning_rate": 3.1035704539019384e-07, | |
| "loss": 0.0595, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 6.477707006369426, | |
| "grad_norm": 0.26756794456829175, | |
| "learning_rate": 2.870552305351382e-07, | |
| "loss": 0.0758, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 6.496815286624204, | |
| "grad_norm": 0.28201972789789587, | |
| "learning_rate": 2.646499188279328e-07, | |
| "loss": 0.0668, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 6.515923566878981, | |
| "grad_norm": 0.2980403873708406, | |
| "learning_rate": 2.4314317827503375e-07, | |
| "loss": 0.0743, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 6.535031847133758, | |
| "grad_norm": 0.2651477141587895, | |
| "learning_rate": 2.2253699394493066e-07, | |
| "loss": 0.0449, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 6.554140127388535, | |
| "grad_norm": 0.30345561025935347, | |
| "learning_rate": 2.028332677849254e-07, | |
| "loss": 0.077, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 6.573248407643312, | |
| "grad_norm": 0.2906970002565656, | |
| "learning_rate": 1.840338184455881e-07, | |
| "loss": 0.0824, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 6.592356687898089, | |
| "grad_norm": 0.24346921023831067, | |
| "learning_rate": 1.6614038111289034e-07, | |
| "loss": 0.0461, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 6.611464968152866, | |
| "grad_norm": 0.2858491763288342, | |
| "learning_rate": 1.49154607348051e-07, | |
| "loss": 0.0749, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 6.630573248407643, | |
| "grad_norm": 0.33936195518059825, | |
| "learning_rate": 1.330780649350938e-07, | |
| "loss": 0.0743, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 6.649681528662421, | |
| "grad_norm": 0.31972369564639225, | |
| "learning_rate": 1.1791223773614635e-07, | |
| "loss": 0.0809, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 6.6687898089171975, | |
| "grad_norm": 0.2538217909225657, | |
| "learning_rate": 1.0365852555447642e-07, | |
| "loss": 0.0413, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 6.687898089171974, | |
| "grad_norm": 0.25490331468770727, | |
| "learning_rate": 9.031824400528854e-08, | |
| "loss": 0.0552, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 6.707006369426751, | |
| "grad_norm": 0.2760373792657459, | |
| "learning_rate": 7.789262439430012e-08, | |
| "loss": 0.0827, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 6.726114649681529, | |
| "grad_norm": 0.22765815548854168, | |
| "learning_rate": 6.638281360408339e-08, | |
| "loss": 0.0367, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 6.745222929936306, | |
| "grad_norm": 0.3282183977270304, | |
| "learning_rate": 5.578987398821345e-08, | |
| "loss": 0.0711, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 6.764331210191083, | |
| "grad_norm": 0.25052516066266456, | |
| "learning_rate": 4.6114783273213395e-08, | |
| "loss": 0.0581, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 6.7834394904458595, | |
| "grad_norm": 0.29962769715666265, | |
| "learning_rate": 3.735843446830867e-08, | |
| "loss": 0.0836, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 6.802547770700637, | |
| "grad_norm": 0.2869938989717563, | |
| "learning_rate": 2.9521635783001932e-08, | |
| "loss": 0.0706, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 6.821656050955414, | |
| "grad_norm": 0.27576347274162744, | |
| "learning_rate": 2.2605110552477162e-08, | |
| "loss": 0.073, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 6.840764331210191, | |
| "grad_norm": 0.26560170917028647, | |
| "learning_rate": 1.6609497170834154e-08, | |
| "loss": 0.0539, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 6.859872611464969, | |
| "grad_norm": 0.3020008036757105, | |
| "learning_rate": 1.1535349032167908e-08, | |
| "loss": 0.0621, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 6.8789808917197455, | |
| "grad_norm": 0.2527492475352435, | |
| "learning_rate": 7.3831344794872415e-09, | |
| "loss": 0.0518, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 6.898089171974522, | |
| "grad_norm": 0.24543537904631924, | |
| "learning_rate": 4.153236761488266e-09, | |
| "loss": 0.0576, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 6.917197452229299, | |
| "grad_norm": 0.3026674348438263, | |
| "learning_rate": 1.8459539971804608e-09, | |
| "loss": 0.0688, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 6.936305732484076, | |
| "grad_norm": 0.2910639101055271, | |
| "learning_rate": 4.614991483686826e-10, | |
| "loss": 0.0633, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 6.955414012738854, | |
| "grad_norm": 0.24760222696562117, | |
| "learning_rate": 0.0, | |
| "loss": 0.0559, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 6.955414012738854, | |
| "step": 364, | |
| "total_flos": 8.423791610395853e+16, | |
| "train_loss": 0.340403069547572, | |
| "train_runtime": 2690.5724, | |
| "train_samples_per_second": 13.008, | |
| "train_steps_per_second": 0.135 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 364, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 7, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 8.423791610395853e+16, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |