| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.7980221756068326, | |
| "eval_steps": 500, | |
| "global_step": 30000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.005993407252022775, | |
| "grad_norm": 1.0038542747497559, | |
| "learning_rate": 2.996703626011388e-07, | |
| "loss": 0.1541, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.01198681450404555, | |
| "grad_norm": 0.5848270058631897, | |
| "learning_rate": 5.993407252022776e-07, | |
| "loss": 0.1208, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.017980221756068324, | |
| "grad_norm": 0.9016316533088684, | |
| "learning_rate": 8.990110878034162e-07, | |
| "loss": 0.1106, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.0239736290080911, | |
| "grad_norm": 0.21770495176315308, | |
| "learning_rate": 1.1986814504045551e-06, | |
| "loss": 0.1085, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.029967036260113874, | |
| "grad_norm": 0.1711203008890152, | |
| "learning_rate": 1.4983518130056939e-06, | |
| "loss": 0.1064, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.03596044351213665, | |
| "grad_norm": 0.22904595732688904, | |
| "learning_rate": 1.7980221756068325e-06, | |
| "loss": 0.1033, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.041953850764159424, | |
| "grad_norm": 0.060541968792676926, | |
| "learning_rate": 2.0976925382079712e-06, | |
| "loss": 0.1036, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.0479472580161822, | |
| "grad_norm": 1.8464969396591187, | |
| "learning_rate": 2.3973629008091102e-06, | |
| "loss": 0.1024, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.05394066526820498, | |
| "grad_norm": 0.9996387362480164, | |
| "learning_rate": 2.6970332634102492e-06, | |
| "loss": 0.103, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.05993407252022775, | |
| "grad_norm": 0.11365871131420135, | |
| "learning_rate": 2.9967036260113878e-06, | |
| "loss": 0.1025, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.06592747977225052, | |
| "grad_norm": 0.34113025665283203, | |
| "learning_rate": 3.2963739886125264e-06, | |
| "loss": 0.1028, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.0719208870242733, | |
| "grad_norm": 0.21321642398834229, | |
| "learning_rate": 3.596044351213665e-06, | |
| "loss": 0.1014, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.07791429427629608, | |
| "grad_norm": 0.6457433104515076, | |
| "learning_rate": 3.8957147138148035e-06, | |
| "loss": 0.1011, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.08390770152831885, | |
| "grad_norm": 0.7026138305664062, | |
| "learning_rate": 4.1953850764159425e-06, | |
| "loss": 0.1011, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.08990110878034162, | |
| "grad_norm": 0.22752709686756134, | |
| "learning_rate": 4.4950554390170815e-06, | |
| "loss": 0.101, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.0958945160323644, | |
| "grad_norm": 0.16539303958415985, | |
| "learning_rate": 4.7947258016182205e-06, | |
| "loss": 0.1008, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.10188792328438717, | |
| "grad_norm": 0.10093910992145538, | |
| "learning_rate": 5.0943961642193594e-06, | |
| "loss": 0.1005, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.10788133053640996, | |
| "grad_norm": 0.06153643876314163, | |
| "learning_rate": 5.3940665268204984e-06, | |
| "loss": 0.1011, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.11387473778843273, | |
| "grad_norm": 0.5297218561172485, | |
| "learning_rate": 5.6937368894216374e-06, | |
| "loss": 0.1001, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.1198681450404555, | |
| "grad_norm": 0.710209310054779, | |
| "learning_rate": 5.9934072520227756e-06, | |
| "loss": 0.1004, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.12586155229247828, | |
| "grad_norm": 0.043333955109119415, | |
| "learning_rate": 6.2930776146239146e-06, | |
| "loss": 0.1011, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.13185495954450105, | |
| "grad_norm": 0.2191428393125534, | |
| "learning_rate": 6.592747977225053e-06, | |
| "loss": 0.1002, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.13784836679652382, | |
| "grad_norm": 0.24634818732738495, | |
| "learning_rate": 6.892418339826191e-06, | |
| "loss": 0.1003, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.1438417740485466, | |
| "grad_norm": 0.19500990211963654, | |
| "learning_rate": 7.19208870242733e-06, | |
| "loss": 0.1017, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.1498351813005694, | |
| "grad_norm": 0.20727553963661194, | |
| "learning_rate": 7.491759065028469e-06, | |
| "loss": 0.1009, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.15582858855259216, | |
| "grad_norm": 0.19461679458618164, | |
| "learning_rate": 7.791429427629607e-06, | |
| "loss": 0.1006, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 0.16182199580461493, | |
| "grad_norm": 0.12335213273763657, | |
| "learning_rate": 8.091099790230747e-06, | |
| "loss": 0.1008, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 0.1678154030566377, | |
| "grad_norm": 0.1840120255947113, | |
| "learning_rate": 8.390770152831885e-06, | |
| "loss": 0.0996, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 0.17380881030866047, | |
| "grad_norm": 0.047822967171669006, | |
| "learning_rate": 8.690440515433025e-06, | |
| "loss": 0.1006, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 0.17980221756068324, | |
| "grad_norm": 0.05022828280925751, | |
| "learning_rate": 8.990110878034163e-06, | |
| "loss": 0.1004, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.18579562481270603, | |
| "grad_norm": 0.08173201233148575, | |
| "learning_rate": 9.289781240635301e-06, | |
| "loss": 0.1016, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 0.1917890320647288, | |
| "grad_norm": 0.25700005888938904, | |
| "learning_rate": 9.589451603236441e-06, | |
| "loss": 0.1012, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 0.19778243931675157, | |
| "grad_norm": 0.929848849773407, | |
| "learning_rate": 9.889121965837579e-06, | |
| "loss": 0.1003, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 0.20377584656877434, | |
| "grad_norm": 0.16289019584655762, | |
| "learning_rate": 9.97902307461792e-06, | |
| "loss": 0.1005, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 0.2097692538207971, | |
| "grad_norm": 0.8773489594459534, | |
| "learning_rate": 9.94572636766224e-06, | |
| "loss": 0.1007, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 0.2157626610728199, | |
| "grad_norm": 0.0891551822423935, | |
| "learning_rate": 9.912429660706557e-06, | |
| "loss": 0.1001, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 0.22175606832484268, | |
| "grad_norm": 0.06410754472017288, | |
| "learning_rate": 9.879132953750875e-06, | |
| "loss": 0.0993, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 0.22774947557686545, | |
| "grad_norm": 0.21573640406131744, | |
| "learning_rate": 9.845836246795192e-06, | |
| "loss": 0.1004, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 0.23374288282888822, | |
| "grad_norm": 0.07671983540058136, | |
| "learning_rate": 9.812539539839511e-06, | |
| "loss": 0.1006, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 0.239736290080911, | |
| "grad_norm": 0.6497980952262878, | |
| "learning_rate": 9.779242832883829e-06, | |
| "loss": 0.1, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.24572969733293376, | |
| "grad_norm": 0.7966341972351074, | |
| "learning_rate": 9.745946125928146e-06, | |
| "loss": 0.0982, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 0.25172310458495656, | |
| "grad_norm": 0.13860899209976196, | |
| "learning_rate": 9.712649418972464e-06, | |
| "loss": 0.1007, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 0.2577165118369793, | |
| "grad_norm": 0.21768826246261597, | |
| "learning_rate": 9.679352712016781e-06, | |
| "loss": 0.0996, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 0.2637099190890021, | |
| "grad_norm": 0.5174150466918945, | |
| "learning_rate": 9.6460560050611e-06, | |
| "loss": 0.1006, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 0.2697033263410249, | |
| "grad_norm": 0.5629359483718872, | |
| "learning_rate": 9.61275929810542e-06, | |
| "loss": 0.1014, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.27569673359304764, | |
| "grad_norm": 0.21343261003494263, | |
| "learning_rate": 9.579462591149737e-06, | |
| "loss": 0.1011, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 0.28169014084507044, | |
| "grad_norm": 0.36314478516578674, | |
| "learning_rate": 9.546165884194054e-06, | |
| "loss": 0.1006, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 0.2876835480970932, | |
| "grad_norm": 0.07583048939704895, | |
| "learning_rate": 9.512869177238371e-06, | |
| "loss": 0.098, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 0.293676955349116, | |
| "grad_norm": 0.014798377640545368, | |
| "learning_rate": 9.479572470282689e-06, | |
| "loss": 0.099, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 0.2996703626011388, | |
| "grad_norm": 0.21406540274620056, | |
| "learning_rate": 9.446275763327008e-06, | |
| "loss": 0.0993, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.3056637698531615, | |
| "grad_norm": 0.36721229553222656, | |
| "learning_rate": 9.412979056371325e-06, | |
| "loss": 0.1006, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 0.3116571771051843, | |
| "grad_norm": 0.18309548497200012, | |
| "learning_rate": 9.379682349415643e-06, | |
| "loss": 0.0995, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 0.31765058435720706, | |
| "grad_norm": 0.1970544010400772, | |
| "learning_rate": 9.346385642459962e-06, | |
| "loss": 0.0999, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 0.32364399160922985, | |
| "grad_norm": 0.052760008722543716, | |
| "learning_rate": 9.31308893550428e-06, | |
| "loss": 0.0992, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 0.3296373988612526, | |
| "grad_norm": 0.1352636069059372, | |
| "learning_rate": 9.279792228548597e-06, | |
| "loss": 0.099, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 0.3356308061132754, | |
| "grad_norm": 0.024126173928380013, | |
| "learning_rate": 9.246495521592916e-06, | |
| "loss": 0.1001, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 0.3416242133652982, | |
| "grad_norm": 0.12636955082416534, | |
| "learning_rate": 9.213198814637233e-06, | |
| "loss": 0.1003, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 0.34761762061732093, | |
| "grad_norm": 0.021808089688420296, | |
| "learning_rate": 9.17990210768155e-06, | |
| "loss": 0.1, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 0.35361102786934373, | |
| "grad_norm": 0.11340193450450897, | |
| "learning_rate": 9.146605400725868e-06, | |
| "loss": 0.1005, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 0.3596044351213665, | |
| "grad_norm": 0.20579053461551666, | |
| "learning_rate": 9.113308693770187e-06, | |
| "loss": 0.1007, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.36559784237338927, | |
| "grad_norm": 0.14165253937244415, | |
| "learning_rate": 9.080011986814505e-06, | |
| "loss": 0.0997, | |
| "step": 6100 | |
| }, | |
| { | |
| "epoch": 0.37159124962541207, | |
| "grad_norm": 0.20944170653820038, | |
| "learning_rate": 9.046715279858824e-06, | |
| "loss": 0.0997, | |
| "step": 6200 | |
| }, | |
| { | |
| "epoch": 0.3775846568774348, | |
| "grad_norm": 0.15122222900390625, | |
| "learning_rate": 9.013418572903141e-06, | |
| "loss": 0.0993, | |
| "step": 6300 | |
| }, | |
| { | |
| "epoch": 0.3835780641294576, | |
| "grad_norm": 0.1885826140642166, | |
| "learning_rate": 8.980121865947459e-06, | |
| "loss": 0.0989, | |
| "step": 6400 | |
| }, | |
| { | |
| "epoch": 0.38957147138148035, | |
| "grad_norm": 0.04748326912522316, | |
| "learning_rate": 8.946825158991776e-06, | |
| "loss": 0.0998, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 0.39556487863350315, | |
| "grad_norm": 0.018379371613264084, | |
| "learning_rate": 8.913528452036094e-06, | |
| "loss": 0.0997, | |
| "step": 6600 | |
| }, | |
| { | |
| "epoch": 0.40155828588552595, | |
| "grad_norm": 0.22810649871826172, | |
| "learning_rate": 8.880231745080413e-06, | |
| "loss": 0.0999, | |
| "step": 6700 | |
| }, | |
| { | |
| "epoch": 0.4075516931375487, | |
| "grad_norm": 0.028912872076034546, | |
| "learning_rate": 8.84693503812473e-06, | |
| "loss": 0.0977, | |
| "step": 6800 | |
| }, | |
| { | |
| "epoch": 0.4135451003895715, | |
| "grad_norm": 0.09700625389814377, | |
| "learning_rate": 8.813638331169047e-06, | |
| "loss": 0.0998, | |
| "step": 6900 | |
| }, | |
| { | |
| "epoch": 0.4195385076415942, | |
| "grad_norm": 0.04979974031448364, | |
| "learning_rate": 8.780341624213367e-06, | |
| "loss": 0.0997, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 0.425531914893617, | |
| "grad_norm": 0.10137055069208145, | |
| "learning_rate": 8.747044917257684e-06, | |
| "loss": 0.0991, | |
| "step": 7100 | |
| }, | |
| { | |
| "epoch": 0.4315253221456398, | |
| "grad_norm": 0.0385773666203022, | |
| "learning_rate": 8.713748210302001e-06, | |
| "loss": 0.0996, | |
| "step": 7200 | |
| }, | |
| { | |
| "epoch": 0.43751872939766256, | |
| "grad_norm": 0.05161421373486519, | |
| "learning_rate": 8.68045150334632e-06, | |
| "loss": 0.1003, | |
| "step": 7300 | |
| }, | |
| { | |
| "epoch": 0.44351213664968536, | |
| "grad_norm": 0.14135126769542694, | |
| "learning_rate": 8.647154796390638e-06, | |
| "loss": 0.0996, | |
| "step": 7400 | |
| }, | |
| { | |
| "epoch": 0.4495055439017081, | |
| "grad_norm": 0.40400320291519165, | |
| "learning_rate": 8.613858089434955e-06, | |
| "loss": 0.0995, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 0.4554989511537309, | |
| "grad_norm": 0.5337746143341064, | |
| "learning_rate": 8.580561382479273e-06, | |
| "loss": 0.1002, | |
| "step": 7600 | |
| }, | |
| { | |
| "epoch": 0.46149235840575364, | |
| "grad_norm": 0.25852474570274353, | |
| "learning_rate": 8.547264675523592e-06, | |
| "loss": 0.0988, | |
| "step": 7700 | |
| }, | |
| { | |
| "epoch": 0.46748576565777644, | |
| "grad_norm": 0.02634989097714424, | |
| "learning_rate": 8.51396796856791e-06, | |
| "loss": 0.0998, | |
| "step": 7800 | |
| }, | |
| { | |
| "epoch": 0.47347917290979924, | |
| "grad_norm": 0.045602887868881226, | |
| "learning_rate": 8.480671261612228e-06, | |
| "loss": 0.0993, | |
| "step": 7900 | |
| }, | |
| { | |
| "epoch": 0.479472580161822, | |
| "grad_norm": 0.4190271198749542, | |
| "learning_rate": 8.447374554656546e-06, | |
| "loss": 0.0992, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 0.4854659874138448, | |
| "grad_norm": 0.03802090883255005, | |
| "learning_rate": 8.414077847700863e-06, | |
| "loss": 0.1003, | |
| "step": 8100 | |
| }, | |
| { | |
| "epoch": 0.4914593946658675, | |
| "grad_norm": 0.187332421541214, | |
| "learning_rate": 8.38078114074518e-06, | |
| "loss": 0.099, | |
| "step": 8200 | |
| }, | |
| { | |
| "epoch": 0.4974528019178903, | |
| "grad_norm": 0.08949125558137894, | |
| "learning_rate": 8.347484433789498e-06, | |
| "loss": 0.0991, | |
| "step": 8300 | |
| }, | |
| { | |
| "epoch": 0.5034462091699131, | |
| "grad_norm": 0.32420963048934937, | |
| "learning_rate": 8.314187726833817e-06, | |
| "loss": 0.1001, | |
| "step": 8400 | |
| }, | |
| { | |
| "epoch": 0.5094396164219359, | |
| "grad_norm": 0.06165235489606857, | |
| "learning_rate": 8.280891019878135e-06, | |
| "loss": 0.0994, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 0.5154330236739586, | |
| "grad_norm": 0.14306558668613434, | |
| "learning_rate": 8.247594312922452e-06, | |
| "loss": 0.1, | |
| "step": 8600 | |
| }, | |
| { | |
| "epoch": 0.5214264309259814, | |
| "grad_norm": 0.1607675403356552, | |
| "learning_rate": 8.21429760596677e-06, | |
| "loss": 0.0999, | |
| "step": 8700 | |
| }, | |
| { | |
| "epoch": 0.5274198381780042, | |
| "grad_norm": 0.16511593759059906, | |
| "learning_rate": 8.181000899011089e-06, | |
| "loss": 0.1012, | |
| "step": 8800 | |
| }, | |
| { | |
| "epoch": 0.533413245430027, | |
| "grad_norm": 0.08098283410072327, | |
| "learning_rate": 8.147704192055406e-06, | |
| "loss": 0.0998, | |
| "step": 8900 | |
| }, | |
| { | |
| "epoch": 0.5394066526820498, | |
| "grad_norm": 0.18850570917129517, | |
| "learning_rate": 8.114407485099725e-06, | |
| "loss": 0.1004, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 0.5454000599340725, | |
| "grad_norm": 0.03290531411767006, | |
| "learning_rate": 8.081110778144043e-06, | |
| "loss": 0.0998, | |
| "step": 9100 | |
| }, | |
| { | |
| "epoch": 0.5513934671860953, | |
| "grad_norm": 0.799527645111084, | |
| "learning_rate": 8.04781407118836e-06, | |
| "loss": 0.0995, | |
| "step": 9200 | |
| }, | |
| { | |
| "epoch": 0.5573868744381181, | |
| "grad_norm": 0.45988729596138, | |
| "learning_rate": 8.014517364232677e-06, | |
| "loss": 0.1, | |
| "step": 9300 | |
| }, | |
| { | |
| "epoch": 0.5633802816901409, | |
| "grad_norm": 0.035269495099782944, | |
| "learning_rate": 7.981220657276996e-06, | |
| "loss": 0.0993, | |
| "step": 9400 | |
| }, | |
| { | |
| "epoch": 0.5693736889421637, | |
| "grad_norm": 0.29002872109413147, | |
| "learning_rate": 7.947923950321314e-06, | |
| "loss": 0.0994, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 0.5753670961941864, | |
| "grad_norm": 0.06160065904259682, | |
| "learning_rate": 7.914627243365631e-06, | |
| "loss": 0.0995, | |
| "step": 9600 | |
| }, | |
| { | |
| "epoch": 0.5813605034462092, | |
| "grad_norm": 0.14128552377223969, | |
| "learning_rate": 7.88133053640995e-06, | |
| "loss": 0.0998, | |
| "step": 9700 | |
| }, | |
| { | |
| "epoch": 0.587353910698232, | |
| "grad_norm": 0.14790260791778564, | |
| "learning_rate": 7.848033829454268e-06, | |
| "loss": 0.0996, | |
| "step": 9800 | |
| }, | |
| { | |
| "epoch": 0.5933473179502547, | |
| "grad_norm": 0.4272247552871704, | |
| "learning_rate": 7.814737122498585e-06, | |
| "loss": 0.0996, | |
| "step": 9900 | |
| }, | |
| { | |
| "epoch": 0.5993407252022775, | |
| "grad_norm": 0.39960777759552, | |
| "learning_rate": 7.781440415542903e-06, | |
| "loss": 0.1001, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 0.6053341324543002, | |
| "grad_norm": 0.27847522497177124, | |
| "learning_rate": 7.748143708587222e-06, | |
| "loss": 0.0986, | |
| "step": 10100 | |
| }, | |
| { | |
| "epoch": 0.611327539706323, | |
| "grad_norm": 0.06485463678836823, | |
| "learning_rate": 7.71484700163154e-06, | |
| "loss": 0.0985, | |
| "step": 10200 | |
| }, | |
| { | |
| "epoch": 0.6173209469583458, | |
| "grad_norm": 0.05133885145187378, | |
| "learning_rate": 7.681550294675857e-06, | |
| "loss": 0.0996, | |
| "step": 10300 | |
| }, | |
| { | |
| "epoch": 0.6233143542103686, | |
| "grad_norm": 0.0617876760661602, | |
| "learning_rate": 7.648253587720174e-06, | |
| "loss": 0.0989, | |
| "step": 10400 | |
| }, | |
| { | |
| "epoch": 0.6293077614623914, | |
| "grad_norm": 0.06740323454141617, | |
| "learning_rate": 7.614956880764492e-06, | |
| "loss": 0.1, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 0.6353011687144141, | |
| "grad_norm": 0.20278900861740112, | |
| "learning_rate": 7.5816601738088115e-06, | |
| "loss": 0.0999, | |
| "step": 10600 | |
| }, | |
| { | |
| "epoch": 0.6412945759664369, | |
| "grad_norm": 0.06642678380012512, | |
| "learning_rate": 7.548363466853129e-06, | |
| "loss": 0.0991, | |
| "step": 10700 | |
| }, | |
| { | |
| "epoch": 0.6472879832184597, | |
| "grad_norm": 0.058061473071575165, | |
| "learning_rate": 7.515066759897447e-06, | |
| "loss": 0.0997, | |
| "step": 10800 | |
| }, | |
| { | |
| "epoch": 0.6532813904704825, | |
| "grad_norm": 0.20366477966308594, | |
| "learning_rate": 7.4817700529417646e-06, | |
| "loss": 0.0983, | |
| "step": 10900 | |
| }, | |
| { | |
| "epoch": 0.6592747977225052, | |
| "grad_norm": 0.016720598563551903, | |
| "learning_rate": 7.448473345986083e-06, | |
| "loss": 0.0991, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 0.665268204974528, | |
| "grad_norm": 0.0325213186442852, | |
| "learning_rate": 7.4151766390304e-06, | |
| "loss": 0.0988, | |
| "step": 11100 | |
| }, | |
| { | |
| "epoch": 0.6712616122265508, | |
| "grad_norm": 0.06385581195354462, | |
| "learning_rate": 7.381879932074718e-06, | |
| "loss": 0.0996, | |
| "step": 11200 | |
| }, | |
| { | |
| "epoch": 0.6772550194785736, | |
| "grad_norm": 0.2656920850276947, | |
| "learning_rate": 7.348583225119036e-06, | |
| "loss": 0.1001, | |
| "step": 11300 | |
| }, | |
| { | |
| "epoch": 0.6832484267305964, | |
| "grad_norm": 0.04199494421482086, | |
| "learning_rate": 7.315286518163355e-06, | |
| "loss": 0.0994, | |
| "step": 11400 | |
| }, | |
| { | |
| "epoch": 0.6892418339826191, | |
| "grad_norm": 0.23018859326839447, | |
| "learning_rate": 7.2819898112076725e-06, | |
| "loss": 0.1008, | |
| "step": 11500 | |
| }, | |
| { | |
| "epoch": 0.6952352412346419, | |
| "grad_norm": 0.16758233308792114, | |
| "learning_rate": 7.248693104251991e-06, | |
| "loss": 0.0996, | |
| "step": 11600 | |
| }, | |
| { | |
| "epoch": 0.7012286484866647, | |
| "grad_norm": 0.07982943207025528, | |
| "learning_rate": 7.215396397296308e-06, | |
| "loss": 0.0987, | |
| "step": 11700 | |
| }, | |
| { | |
| "epoch": 0.7072220557386875, | |
| "grad_norm": 0.17444317042827606, | |
| "learning_rate": 7.1820996903406256e-06, | |
| "loss": 0.1003, | |
| "step": 11800 | |
| }, | |
| { | |
| "epoch": 0.7132154629907103, | |
| "grad_norm": 0.019602667540311813, | |
| "learning_rate": 7.148802983384944e-06, | |
| "loss": 0.0998, | |
| "step": 11900 | |
| }, | |
| { | |
| "epoch": 0.719208870242733, | |
| "grad_norm": 0.06751447916030884, | |
| "learning_rate": 7.115506276429261e-06, | |
| "loss": 0.0996, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 0.7252022774947557, | |
| "grad_norm": 0.6664422154426575, | |
| "learning_rate": 7.0822095694735795e-06, | |
| "loss": 0.0999, | |
| "step": 12100 | |
| }, | |
| { | |
| "epoch": 0.7311956847467785, | |
| "grad_norm": 0.11597349494695663, | |
| "learning_rate": 7.048912862517897e-06, | |
| "loss": 0.098, | |
| "step": 12200 | |
| }, | |
| { | |
| "epoch": 0.7371890919988013, | |
| "grad_norm": 0.021992051973938942, | |
| "learning_rate": 7.015616155562216e-06, | |
| "loss": 0.0997, | |
| "step": 12300 | |
| }, | |
| { | |
| "epoch": 0.7431824992508241, | |
| "grad_norm": 0.04850628226995468, | |
| "learning_rate": 6.9823194486065335e-06, | |
| "loss": 0.1002, | |
| "step": 12400 | |
| }, | |
| { | |
| "epoch": 0.7491759065028468, | |
| "grad_norm": 0.05067530274391174, | |
| "learning_rate": 6.949022741650852e-06, | |
| "loss": 0.0993, | |
| "step": 12500 | |
| }, | |
| { | |
| "epoch": 0.7551693137548696, | |
| "grad_norm": 0.0720600038766861, | |
| "learning_rate": 6.915726034695169e-06, | |
| "loss": 0.0993, | |
| "step": 12600 | |
| }, | |
| { | |
| "epoch": 0.7611627210068924, | |
| "grad_norm": 0.05677128955721855, | |
| "learning_rate": 6.8824293277394874e-06, | |
| "loss": 0.0999, | |
| "step": 12700 | |
| }, | |
| { | |
| "epoch": 0.7671561282589152, | |
| "grad_norm": 0.06587408483028412, | |
| "learning_rate": 6.849132620783805e-06, | |
| "loss": 0.0989, | |
| "step": 12800 | |
| }, | |
| { | |
| "epoch": 0.773149535510938, | |
| "grad_norm": 0.1323852837085724, | |
| "learning_rate": 6.815835913828123e-06, | |
| "loss": 0.0992, | |
| "step": 12900 | |
| }, | |
| { | |
| "epoch": 0.7791429427629607, | |
| "grad_norm": 0.016273049637675285, | |
| "learning_rate": 6.7825392068724405e-06, | |
| "loss": 0.0999, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 0.7851363500149835, | |
| "grad_norm": 0.11021006107330322, | |
| "learning_rate": 6.749242499916758e-06, | |
| "loss": 0.0993, | |
| "step": 13100 | |
| }, | |
| { | |
| "epoch": 0.7911297572670063, | |
| "grad_norm": 0.03431228548288345, | |
| "learning_rate": 6.715945792961077e-06, | |
| "loss": 0.0995, | |
| "step": 13200 | |
| }, | |
| { | |
| "epoch": 0.7971231645190291, | |
| "grad_norm": 0.19659392535686493, | |
| "learning_rate": 6.682649086005395e-06, | |
| "loss": 0.0991, | |
| "step": 13300 | |
| }, | |
| { | |
| "epoch": 0.8031165717710519, | |
| "grad_norm": 0.17087887227535248, | |
| "learning_rate": 6.649352379049713e-06, | |
| "loss": 0.0999, | |
| "step": 13400 | |
| }, | |
| { | |
| "epoch": 0.8091099790230746, | |
| "grad_norm": 0.04571348801255226, | |
| "learning_rate": 6.61605567209403e-06, | |
| "loss": 0.0987, | |
| "step": 13500 | |
| }, | |
| { | |
| "epoch": 0.8151033862750974, | |
| "grad_norm": 0.06407848745584488, | |
| "learning_rate": 6.5827589651383484e-06, | |
| "loss": 0.0995, | |
| "step": 13600 | |
| }, | |
| { | |
| "epoch": 0.8210967935271202, | |
| "grad_norm": 0.041546691209077835, | |
| "learning_rate": 6.549462258182666e-06, | |
| "loss": 0.1001, | |
| "step": 13700 | |
| }, | |
| { | |
| "epoch": 0.827090200779143, | |
| "grad_norm": 0.027348553761839867, | |
| "learning_rate": 6.516165551226984e-06, | |
| "loss": 0.099, | |
| "step": 13800 | |
| }, | |
| { | |
| "epoch": 0.8330836080311658, | |
| "grad_norm": 0.08637899160385132, | |
| "learning_rate": 6.4828688442713015e-06, | |
| "loss": 0.0987, | |
| "step": 13900 | |
| }, | |
| { | |
| "epoch": 0.8390770152831885, | |
| "grad_norm": 0.02239215560257435, | |
| "learning_rate": 6.44957213731562e-06, | |
| "loss": 0.0992, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 0.8450704225352113, | |
| "grad_norm": 0.03839031606912613, | |
| "learning_rate": 6.416275430359938e-06, | |
| "loss": 0.0987, | |
| "step": 14100 | |
| }, | |
| { | |
| "epoch": 0.851063829787234, | |
| "grad_norm": 0.08829605579376221, | |
| "learning_rate": 6.382978723404256e-06, | |
| "loss": 0.0988, | |
| "step": 14200 | |
| }, | |
| { | |
| "epoch": 0.8570572370392568, | |
| "grad_norm": 0.2102310210466385, | |
| "learning_rate": 6.349682016448574e-06, | |
| "loss": 0.0991, | |
| "step": 14300 | |
| }, | |
| { | |
| "epoch": 0.8630506442912796, | |
| "grad_norm": 0.033268921077251434, | |
| "learning_rate": 6.316385309492892e-06, | |
| "loss": 0.0983, | |
| "step": 14400 | |
| }, | |
| { | |
| "epoch": 0.8690440515433023, | |
| "grad_norm": 0.040406860411167145, | |
| "learning_rate": 6.2830886025372095e-06, | |
| "loss": 0.0973, | |
| "step": 14500 | |
| }, | |
| { | |
| "epoch": 0.8750374587953251, | |
| "grad_norm": 0.04502435401082039, | |
| "learning_rate": 6.249791895581528e-06, | |
| "loss": 0.0999, | |
| "step": 14600 | |
| }, | |
| { | |
| "epoch": 0.8810308660473479, | |
| "grad_norm": 0.026787765324115753, | |
| "learning_rate": 6.216495188625845e-06, | |
| "loss": 0.1002, | |
| "step": 14700 | |
| }, | |
| { | |
| "epoch": 0.8870242732993707, | |
| "grad_norm": 0.13938550651073456, | |
| "learning_rate": 6.1831984816701626e-06, | |
| "loss": 0.099, | |
| "step": 14800 | |
| }, | |
| { | |
| "epoch": 0.8930176805513935, | |
| "grad_norm": 0.135736882686615, | |
| "learning_rate": 6.149901774714481e-06, | |
| "loss": 0.0999, | |
| "step": 14900 | |
| }, | |
| { | |
| "epoch": 0.8990110878034162, | |
| "grad_norm": 0.15793441236019135, | |
| "learning_rate": 6.1166050677588e-06, | |
| "loss": 0.0989, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 0.905004495055439, | |
| "grad_norm": 0.08003054559230804, | |
| "learning_rate": 6.083308360803117e-06, | |
| "loss": 0.0991, | |
| "step": 15100 | |
| }, | |
| { | |
| "epoch": 0.9109979023074618, | |
| "grad_norm": 0.0406046137213707, | |
| "learning_rate": 6.050011653847435e-06, | |
| "loss": 0.0985, | |
| "step": 15200 | |
| }, | |
| { | |
| "epoch": 0.9169913095594846, | |
| "grad_norm": 0.04845142737030983, | |
| "learning_rate": 6.016714946891753e-06, | |
| "loss": 0.099, | |
| "step": 15300 | |
| }, | |
| { | |
| "epoch": 0.9229847168115073, | |
| "grad_norm": 0.023725613951683044, | |
| "learning_rate": 5.9834182399360705e-06, | |
| "loss": 0.0995, | |
| "step": 15400 | |
| }, | |
| { | |
| "epoch": 0.9289781240635301, | |
| "grad_norm": 0.04039942845702171, | |
| "learning_rate": 5.950121532980389e-06, | |
| "loss": 0.0996, | |
| "step": 15500 | |
| }, | |
| { | |
| "epoch": 0.9349715313155529, | |
| "grad_norm": 0.026223324239253998, | |
| "learning_rate": 5.916824826024706e-06, | |
| "loss": 0.0997, | |
| "step": 15600 | |
| }, | |
| { | |
| "epoch": 0.9409649385675757, | |
| "grad_norm": 0.27790361642837524, | |
| "learning_rate": 5.883528119069024e-06, | |
| "loss": 0.099, | |
| "step": 15700 | |
| }, | |
| { | |
| "epoch": 0.9469583458195985, | |
| "grad_norm": 0.058665353804826736, | |
| "learning_rate": 5.850231412113343e-06, | |
| "loss": 0.0998, | |
| "step": 15800 | |
| }, | |
| { | |
| "epoch": 0.9529517530716212, | |
| "grad_norm": 0.02454979345202446, | |
| "learning_rate": 5.816934705157661e-06, | |
| "loss": 0.1003, | |
| "step": 15900 | |
| }, | |
| { | |
| "epoch": 0.958945160323644, | |
| "grad_norm": 0.06335332244634628, | |
| "learning_rate": 5.783637998201978e-06, | |
| "loss": 0.0995, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 0.9649385675756668, | |
| "grad_norm": 0.10977023839950562, | |
| "learning_rate": 5.750341291246297e-06, | |
| "loss": 0.0974, | |
| "step": 16100 | |
| }, | |
| { | |
| "epoch": 0.9709319748276896, | |
| "grad_norm": 0.025012081488966942, | |
| "learning_rate": 5.717044584290614e-06, | |
| "loss": 0.0992, | |
| "step": 16200 | |
| }, | |
| { | |
| "epoch": 0.9769253820797124, | |
| "grad_norm": 0.022056572139263153, | |
| "learning_rate": 5.683747877334932e-06, | |
| "loss": 0.099, | |
| "step": 16300 | |
| }, | |
| { | |
| "epoch": 0.982918789331735, | |
| "grad_norm": 0.10653574019670486, | |
| "learning_rate": 5.65045117037925e-06, | |
| "loss": 0.1009, | |
| "step": 16400 | |
| }, | |
| { | |
| "epoch": 0.9889121965837578, | |
| "grad_norm": 0.04108584672212601, | |
| "learning_rate": 5.617154463423567e-06, | |
| "loss": 0.0995, | |
| "step": 16500 | |
| }, | |
| { | |
| "epoch": 0.9949056038357806, | |
| "grad_norm": 0.05849546194076538, | |
| "learning_rate": 5.583857756467885e-06, | |
| "loss": 0.0991, | |
| "step": 16600 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_accuracy": 0.7163220105947296, | |
| "eval_f1": 0.6846578561420671, | |
| "eval_loss": 0.09925069659948349, | |
| "eval_precision": 0.6620272988688642, | |
| "eval_recall": 0.7163220105947296, | |
| "eval_runtime": 253.2812, | |
| "eval_samples_per_second": 58.555, | |
| "eval_steps_per_second": 7.32, | |
| "step": 16685 | |
| }, | |
| { | |
| "epoch": 1.0008990110878033, | |
| "grad_norm": 0.022984975948929787, | |
| "learning_rate": 5.5505610495122045e-06, | |
| "loss": 0.0986, | |
| "step": 16700 | |
| }, | |
| { | |
| "epoch": 1.0068924183398262, | |
| "grad_norm": 0.022939996793866158, | |
| "learning_rate": 5.517264342556522e-06, | |
| "loss": 0.0987, | |
| "step": 16800 | |
| }, | |
| { | |
| "epoch": 1.012885825591849, | |
| "grad_norm": 0.014104674570262432, | |
| "learning_rate": 5.483967635600839e-06, | |
| "loss": 0.098, | |
| "step": 16900 | |
| }, | |
| { | |
| "epoch": 1.0188792328438718, | |
| "grad_norm": 0.02679424174129963, | |
| "learning_rate": 5.450670928645158e-06, | |
| "loss": 0.0978, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 1.0248726400958945, | |
| "grad_norm": 0.007641422096639872, | |
| "learning_rate": 5.417374221689475e-06, | |
| "loss": 0.0982, | |
| "step": 17100 | |
| }, | |
| { | |
| "epoch": 1.0308660473479172, | |
| "grad_norm": 0.03306014835834503, | |
| "learning_rate": 5.384077514733793e-06, | |
| "loss": 0.0972, | |
| "step": 17200 | |
| }, | |
| { | |
| "epoch": 1.0368594545999401, | |
| "grad_norm": 0.053686805069446564, | |
| "learning_rate": 5.350780807778111e-06, | |
| "loss": 0.0976, | |
| "step": 17300 | |
| }, | |
| { | |
| "epoch": 1.0428528618519628, | |
| "grad_norm": 0.05771619454026222, | |
| "learning_rate": 5.317484100822429e-06, | |
| "loss": 0.0986, | |
| "step": 17400 | |
| }, | |
| { | |
| "epoch": 1.0488462691039857, | |
| "grad_norm": 0.08814644813537598, | |
| "learning_rate": 5.2841873938667464e-06, | |
| "loss": 0.0978, | |
| "step": 17500 | |
| }, | |
| { | |
| "epoch": 1.0548396763560084, | |
| "grad_norm": 0.18690770864486694, | |
| "learning_rate": 5.2508906869110655e-06, | |
| "loss": 0.0986, | |
| "step": 17600 | |
| }, | |
| { | |
| "epoch": 1.060833083608031, | |
| "grad_norm": 0.01733516715466976, | |
| "learning_rate": 5.217593979955383e-06, | |
| "loss": 0.0987, | |
| "step": 17700 | |
| }, | |
| { | |
| "epoch": 1.066826490860054, | |
| "grad_norm": 0.12736350297927856, | |
| "learning_rate": 5.184297272999701e-06, | |
| "loss": 0.0987, | |
| "step": 17800 | |
| }, | |
| { | |
| "epoch": 1.0728198981120767, | |
| "grad_norm": 0.1056164801120758, | |
| "learning_rate": 5.151000566044019e-06, | |
| "loss": 0.0984, | |
| "step": 17900 | |
| }, | |
| { | |
| "epoch": 1.0788133053640996, | |
| "grad_norm": 0.014793598093092442, | |
| "learning_rate": 5.117703859088337e-06, | |
| "loss": 0.0973, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 1.0848067126161223, | |
| "grad_norm": 0.015460701659321785, | |
| "learning_rate": 5.084407152132654e-06, | |
| "loss": 0.0975, | |
| "step": 18100 | |
| }, | |
| { | |
| "epoch": 1.090800119868145, | |
| "grad_norm": 0.01873720809817314, | |
| "learning_rate": 5.051110445176972e-06, | |
| "loss": 0.0987, | |
| "step": 18200 | |
| }, | |
| { | |
| "epoch": 1.0967935271201679, | |
| "grad_norm": 0.01362083200365305, | |
| "learning_rate": 5.01781373822129e-06, | |
| "loss": 0.0984, | |
| "step": 18300 | |
| }, | |
| { | |
| "epoch": 1.1027869343721906, | |
| "grad_norm": 0.019239643588662148, | |
| "learning_rate": 4.984517031265608e-06, | |
| "loss": 0.0986, | |
| "step": 18400 | |
| }, | |
| { | |
| "epoch": 1.1087803416242135, | |
| "grad_norm": 0.03043738380074501, | |
| "learning_rate": 4.951220324309926e-06, | |
| "loss": 0.0986, | |
| "step": 18500 | |
| }, | |
| { | |
| "epoch": 1.1147737488762361, | |
| "grad_norm": 0.12742625176906586, | |
| "learning_rate": 4.917923617354244e-06, | |
| "loss": 0.0983, | |
| "step": 18600 | |
| }, | |
| { | |
| "epoch": 1.1207671561282588, | |
| "grad_norm": 0.18706390261650085, | |
| "learning_rate": 4.884626910398562e-06, | |
| "loss": 0.0991, | |
| "step": 18700 | |
| }, | |
| { | |
| "epoch": 1.1267605633802817, | |
| "grad_norm": 0.21639247238636017, | |
| "learning_rate": 4.85133020344288e-06, | |
| "loss": 0.0984, | |
| "step": 18800 | |
| }, | |
| { | |
| "epoch": 1.1327539706323044, | |
| "grad_norm": 0.06566977500915527, | |
| "learning_rate": 4.818033496487198e-06, | |
| "loss": 0.0983, | |
| "step": 18900 | |
| }, | |
| { | |
| "epoch": 1.1387473778843273, | |
| "grad_norm": 0.06070713698863983, | |
| "learning_rate": 4.784736789531515e-06, | |
| "loss": 0.0984, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 1.14474078513635, | |
| "grad_norm": 0.01181207038462162, | |
| "learning_rate": 4.751440082575834e-06, | |
| "loss": 0.0967, | |
| "step": 19100 | |
| }, | |
| { | |
| "epoch": 1.1507341923883727, | |
| "grad_norm": 0.014737865887582302, | |
| "learning_rate": 4.718143375620152e-06, | |
| "loss": 0.0979, | |
| "step": 19200 | |
| }, | |
| { | |
| "epoch": 1.1567275996403956, | |
| "grad_norm": 0.09678817540407181, | |
| "learning_rate": 4.684846668664469e-06, | |
| "loss": 0.0987, | |
| "step": 19300 | |
| }, | |
| { | |
| "epoch": 1.1627210068924183, | |
| "grad_norm": 0.042408786714076996, | |
| "learning_rate": 4.6515499617087876e-06, | |
| "loss": 0.0989, | |
| "step": 19400 | |
| }, | |
| { | |
| "epoch": 1.168714414144441, | |
| "grad_norm": 0.026022640988230705, | |
| "learning_rate": 4.618253254753105e-06, | |
| "loss": 0.0976, | |
| "step": 19500 | |
| }, | |
| { | |
| "epoch": 1.174707821396464, | |
| "grad_norm": 0.029273219406604767, | |
| "learning_rate": 4.584956547797423e-06, | |
| "loss": 0.0971, | |
| "step": 19600 | |
| }, | |
| { | |
| "epoch": 1.1807012286484866, | |
| "grad_norm": 0.019847506657242775, | |
| "learning_rate": 4.5516598408417415e-06, | |
| "loss": 0.0979, | |
| "step": 19700 | |
| }, | |
| { | |
| "epoch": 1.1866946359005095, | |
| "grad_norm": 0.09878643602132797, | |
| "learning_rate": 4.518363133886059e-06, | |
| "loss": 0.0983, | |
| "step": 19800 | |
| }, | |
| { | |
| "epoch": 1.1926880431525322, | |
| "grad_norm": 0.031175991520285606, | |
| "learning_rate": 4.485066426930376e-06, | |
| "loss": 0.0986, | |
| "step": 19900 | |
| }, | |
| { | |
| "epoch": 1.198681450404555, | |
| "grad_norm": 0.057501692324876785, | |
| "learning_rate": 4.4517697199746955e-06, | |
| "loss": 0.098, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 1.2046748576565778, | |
| "grad_norm": 0.06590039283037186, | |
| "learning_rate": 4.418473013019013e-06, | |
| "loss": 0.0978, | |
| "step": 20100 | |
| }, | |
| { | |
| "epoch": 1.2106682649086005, | |
| "grad_norm": 0.021284347400069237, | |
| "learning_rate": 4.38517630606333e-06, | |
| "loss": 0.0982, | |
| "step": 20200 | |
| }, | |
| { | |
| "epoch": 1.2166616721606234, | |
| "grad_norm": 0.26528528332710266, | |
| "learning_rate": 4.3518795991076486e-06, | |
| "loss": 0.0978, | |
| "step": 20300 | |
| }, | |
| { | |
| "epoch": 1.222655079412646, | |
| "grad_norm": 0.06511413305997849, | |
| "learning_rate": 4.318582892151967e-06, | |
| "loss": 0.099, | |
| "step": 20400 | |
| }, | |
| { | |
| "epoch": 1.2286484866646687, | |
| "grad_norm": 0.048637229949235916, | |
| "learning_rate": 4.285286185196284e-06, | |
| "loss": 0.0978, | |
| "step": 20500 | |
| }, | |
| { | |
| "epoch": 1.2346418939166917, | |
| "grad_norm": 0.021369967609643936, | |
| "learning_rate": 4.2519894782406025e-06, | |
| "loss": 0.0984, | |
| "step": 20600 | |
| }, | |
| { | |
| "epoch": 1.2406353011687143, | |
| "grad_norm": 0.5301014184951782, | |
| "learning_rate": 4.21869277128492e-06, | |
| "loss": 0.0975, | |
| "step": 20700 | |
| }, | |
| { | |
| "epoch": 1.2466287084207373, | |
| "grad_norm": 0.01625882275402546, | |
| "learning_rate": 4.185396064329238e-06, | |
| "loss": 0.0983, | |
| "step": 20800 | |
| }, | |
| { | |
| "epoch": 1.25262211567276, | |
| "grad_norm": 0.16671617329120636, | |
| "learning_rate": 4.1520993573735565e-06, | |
| "loss": 0.0986, | |
| "step": 20900 | |
| }, | |
| { | |
| "epoch": 1.2586155229247828, | |
| "grad_norm": 0.15410806238651276, | |
| "learning_rate": 4.118802650417874e-06, | |
| "loss": 0.0992, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 1.2646089301768055, | |
| "grad_norm": 0.030888069421052933, | |
| "learning_rate": 4.085505943462192e-06, | |
| "loss": 0.0981, | |
| "step": 21100 | |
| }, | |
| { | |
| "epoch": 1.2706023374288282, | |
| "grad_norm": 0.014099245890974998, | |
| "learning_rate": 4.05220923650651e-06, | |
| "loss": 0.0985, | |
| "step": 21200 | |
| }, | |
| { | |
| "epoch": 1.2765957446808511, | |
| "grad_norm": 0.019736776128411293, | |
| "learning_rate": 4.018912529550828e-06, | |
| "loss": 0.0976, | |
| "step": 21300 | |
| }, | |
| { | |
| "epoch": 1.2825891519328738, | |
| "grad_norm": 0.011912483721971512, | |
| "learning_rate": 3.985615822595146e-06, | |
| "loss": 0.0987, | |
| "step": 21400 | |
| }, | |
| { | |
| "epoch": 1.2885825591848965, | |
| "grad_norm": 0.030794724822044373, | |
| "learning_rate": 3.9523191156394635e-06, | |
| "loss": 0.0986, | |
| "step": 21500 | |
| }, | |
| { | |
| "epoch": 1.2945759664369194, | |
| "grad_norm": 0.018548903986811638, | |
| "learning_rate": 3.919022408683781e-06, | |
| "loss": 0.0993, | |
| "step": 21600 | |
| }, | |
| { | |
| "epoch": 1.300569373688942, | |
| "grad_norm": 0.0847175121307373, | |
| "learning_rate": 3.885725701728099e-06, | |
| "loss": 0.0985, | |
| "step": 21700 | |
| }, | |
| { | |
| "epoch": 1.306562780940965, | |
| "grad_norm": 0.02340412326157093, | |
| "learning_rate": 3.8524289947724175e-06, | |
| "loss": 0.0988, | |
| "step": 21800 | |
| }, | |
| { | |
| "epoch": 1.3125561881929877, | |
| "grad_norm": 0.06334514170885086, | |
| "learning_rate": 3.819132287816735e-06, | |
| "loss": 0.0988, | |
| "step": 21900 | |
| }, | |
| { | |
| "epoch": 1.3185495954450106, | |
| "grad_norm": 0.03040698543190956, | |
| "learning_rate": 3.785835580861053e-06, | |
| "loss": 0.0984, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 1.3245430026970333, | |
| "grad_norm": 0.4113400876522064, | |
| "learning_rate": 3.752538873905371e-06, | |
| "loss": 0.0989, | |
| "step": 22100 | |
| }, | |
| { | |
| "epoch": 1.330536409949056, | |
| "grad_norm": 0.03668079525232315, | |
| "learning_rate": 3.7192421669496893e-06, | |
| "loss": 0.0983, | |
| "step": 22200 | |
| }, | |
| { | |
| "epoch": 1.3365298172010789, | |
| "grad_norm": 0.040583133697509766, | |
| "learning_rate": 3.685945459994007e-06, | |
| "loss": 0.0982, | |
| "step": 22300 | |
| }, | |
| { | |
| "epoch": 1.3425232244531016, | |
| "grad_norm": 0.2958316504955292, | |
| "learning_rate": 3.652648753038325e-06, | |
| "loss": 0.0976, | |
| "step": 22400 | |
| }, | |
| { | |
| "epoch": 1.3485166317051243, | |
| "grad_norm": 0.07291315495967865, | |
| "learning_rate": 3.6193520460826424e-06, | |
| "loss": 0.0993, | |
| "step": 22500 | |
| }, | |
| { | |
| "epoch": 1.3545100389571472, | |
| "grad_norm": 0.16284628212451935, | |
| "learning_rate": 3.586055339126961e-06, | |
| "loss": 0.0979, | |
| "step": 22600 | |
| }, | |
| { | |
| "epoch": 1.3605034462091699, | |
| "grad_norm": 0.018767952919006348, | |
| "learning_rate": 3.5527586321712785e-06, | |
| "loss": 0.0983, | |
| "step": 22700 | |
| }, | |
| { | |
| "epoch": 1.3664968534611928, | |
| "grad_norm": 0.32872429490089417, | |
| "learning_rate": 3.5194619252155963e-06, | |
| "loss": 0.0971, | |
| "step": 22800 | |
| }, | |
| { | |
| "epoch": 1.3724902607132154, | |
| "grad_norm": 0.10787306725978851, | |
| "learning_rate": 3.486165218259914e-06, | |
| "loss": 0.0988, | |
| "step": 22900 | |
| }, | |
| { | |
| "epoch": 1.3784836679652384, | |
| "grad_norm": 0.11872507631778717, | |
| "learning_rate": 3.452868511304232e-06, | |
| "loss": 0.0985, | |
| "step": 23000 | |
| }, | |
| { | |
| "epoch": 1.384477075217261, | |
| "grad_norm": 0.01392502523958683, | |
| "learning_rate": 3.4195718043485503e-06, | |
| "loss": 0.0987, | |
| "step": 23100 | |
| }, | |
| { | |
| "epoch": 1.3904704824692837, | |
| "grad_norm": 0.2750115394592285, | |
| "learning_rate": 3.386275097392868e-06, | |
| "loss": 0.0997, | |
| "step": 23200 | |
| }, | |
| { | |
| "epoch": 1.3964638897213066, | |
| "grad_norm": 0.01633540168404579, | |
| "learning_rate": 3.352978390437186e-06, | |
| "loss": 0.0994, | |
| "step": 23300 | |
| }, | |
| { | |
| "epoch": 1.4024572969733293, | |
| "grad_norm": 0.018824318423867226, | |
| "learning_rate": 3.319681683481504e-06, | |
| "loss": 0.0985, | |
| "step": 23400 | |
| }, | |
| { | |
| "epoch": 1.408450704225352, | |
| "grad_norm": 0.09030365198850632, | |
| "learning_rate": 3.286384976525822e-06, | |
| "loss": 0.0984, | |
| "step": 23500 | |
| }, | |
| { | |
| "epoch": 1.414444111477375, | |
| "grad_norm": 0.014728711917996407, | |
| "learning_rate": 3.25308826957014e-06, | |
| "loss": 0.0986, | |
| "step": 23600 | |
| }, | |
| { | |
| "epoch": 1.4204375187293976, | |
| "grad_norm": 0.1276281625032425, | |
| "learning_rate": 3.2197915626144578e-06, | |
| "loss": 0.0988, | |
| "step": 23700 | |
| }, | |
| { | |
| "epoch": 1.4264309259814205, | |
| "grad_norm": 0.0615570992231369, | |
| "learning_rate": 3.1864948556587756e-06, | |
| "loss": 0.0984, | |
| "step": 23800 | |
| }, | |
| { | |
| "epoch": 1.4324243332334432, | |
| "grad_norm": 0.04221341758966446, | |
| "learning_rate": 3.1531981487030935e-06, | |
| "loss": 0.0977, | |
| "step": 23900 | |
| }, | |
| { | |
| "epoch": 1.4384177404854661, | |
| "grad_norm": 0.022126736119389534, | |
| "learning_rate": 3.1199014417474117e-06, | |
| "loss": 0.0982, | |
| "step": 24000 | |
| }, | |
| { | |
| "epoch": 1.4444111477374888, | |
| "grad_norm": 0.02455032989382744, | |
| "learning_rate": 3.0866047347917296e-06, | |
| "loss": 0.0976, | |
| "step": 24100 | |
| }, | |
| { | |
| "epoch": 1.4504045549895115, | |
| "grad_norm": 0.5411427617073059, | |
| "learning_rate": 3.053308027836047e-06, | |
| "loss": 0.0971, | |
| "step": 24200 | |
| }, | |
| { | |
| "epoch": 1.4563979622415344, | |
| "grad_norm": 0.22378544509410858, | |
| "learning_rate": 3.020011320880365e-06, | |
| "loss": 0.0991, | |
| "step": 24300 | |
| }, | |
| { | |
| "epoch": 1.462391369493557, | |
| "grad_norm": 0.03697386384010315, | |
| "learning_rate": 2.9867146139246835e-06, | |
| "loss": 0.0987, | |
| "step": 24400 | |
| }, | |
| { | |
| "epoch": 1.4683847767455798, | |
| "grad_norm": 0.05567880719900131, | |
| "learning_rate": 2.953417906969001e-06, | |
| "loss": 0.0973, | |
| "step": 24500 | |
| }, | |
| { | |
| "epoch": 1.4743781839976027, | |
| "grad_norm": 0.008923672139644623, | |
| "learning_rate": 2.9201212000133188e-06, | |
| "loss": 0.0974, | |
| "step": 24600 | |
| }, | |
| { | |
| "epoch": 1.4803715912496254, | |
| "grad_norm": 0.7570419907569885, | |
| "learning_rate": 2.8868244930576366e-06, | |
| "loss": 0.099, | |
| "step": 24700 | |
| }, | |
| { | |
| "epoch": 1.486364998501648, | |
| "grad_norm": 0.03995317593216896, | |
| "learning_rate": 2.853527786101955e-06, | |
| "loss": 0.0976, | |
| "step": 24800 | |
| }, | |
| { | |
| "epoch": 1.492358405753671, | |
| "grad_norm": 0.015565958805382252, | |
| "learning_rate": 2.8202310791462727e-06, | |
| "loss": 0.0971, | |
| "step": 24900 | |
| }, | |
| { | |
| "epoch": 1.4983518130056939, | |
| "grad_norm": 0.06947562098503113, | |
| "learning_rate": 2.7869343721905906e-06, | |
| "loss": 0.0986, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 1.5043452202577166, | |
| "grad_norm": 0.09315433353185654, | |
| "learning_rate": 2.7536376652349084e-06, | |
| "loss": 0.099, | |
| "step": 25100 | |
| }, | |
| { | |
| "epoch": 1.5103386275097392, | |
| "grad_norm": 0.03829352185130119, | |
| "learning_rate": 2.7203409582792263e-06, | |
| "loss": 0.0986, | |
| "step": 25200 | |
| }, | |
| { | |
| "epoch": 1.5163320347617621, | |
| "grad_norm": 0.0827546939253807, | |
| "learning_rate": 2.6870442513235445e-06, | |
| "loss": 0.0981, | |
| "step": 25300 | |
| }, | |
| { | |
| "epoch": 1.5223254420137848, | |
| "grad_norm": 0.009773751720786095, | |
| "learning_rate": 2.6537475443678624e-06, | |
| "loss": 0.0981, | |
| "step": 25400 | |
| }, | |
| { | |
| "epoch": 1.5283188492658075, | |
| "grad_norm": 0.03792467713356018, | |
| "learning_rate": 2.6204508374121802e-06, | |
| "loss": 0.0971, | |
| "step": 25500 | |
| }, | |
| { | |
| "epoch": 1.5343122565178304, | |
| "grad_norm": 0.2956952452659607, | |
| "learning_rate": 2.587154130456498e-06, | |
| "loss": 0.0973, | |
| "step": 25600 | |
| }, | |
| { | |
| "epoch": 1.5403056637698531, | |
| "grad_norm": 0.007948148064315319, | |
| "learning_rate": 2.5538574235008163e-06, | |
| "loss": 0.0992, | |
| "step": 25700 | |
| }, | |
| { | |
| "epoch": 1.5462990710218758, | |
| "grad_norm": 0.030265573412179947, | |
| "learning_rate": 2.520560716545134e-06, | |
| "loss": 0.0978, | |
| "step": 25800 | |
| }, | |
| { | |
| "epoch": 1.5522924782738987, | |
| "grad_norm": 0.06282780319452286, | |
| "learning_rate": 2.487264009589452e-06, | |
| "loss": 0.0988, | |
| "step": 25900 | |
| }, | |
| { | |
| "epoch": 1.5582858855259216, | |
| "grad_norm": 0.19928309321403503, | |
| "learning_rate": 2.4539673026337694e-06, | |
| "loss": 0.0973, | |
| "step": 26000 | |
| }, | |
| { | |
| "epoch": 1.5642792927779443, | |
| "grad_norm": 0.3808630108833313, | |
| "learning_rate": 2.4206705956780877e-06, | |
| "loss": 0.0983, | |
| "step": 26100 | |
| }, | |
| { | |
| "epoch": 1.570272700029967, | |
| "grad_norm": 0.07692461460828781, | |
| "learning_rate": 2.3873738887224055e-06, | |
| "loss": 0.0971, | |
| "step": 26200 | |
| }, | |
| { | |
| "epoch": 1.57626610728199, | |
| "grad_norm": 0.0316767655313015, | |
| "learning_rate": 2.3540771817667234e-06, | |
| "loss": 0.0969, | |
| "step": 26300 | |
| }, | |
| { | |
| "epoch": 1.5822595145340126, | |
| "grad_norm": 0.054147470742464066, | |
| "learning_rate": 2.3207804748110412e-06, | |
| "loss": 0.0987, | |
| "step": 26400 | |
| }, | |
| { | |
| "epoch": 1.5882529217860353, | |
| "grad_norm": 0.021603476256132126, | |
| "learning_rate": 2.2874837678553595e-06, | |
| "loss": 0.0993, | |
| "step": 26500 | |
| }, | |
| { | |
| "epoch": 1.5942463290380582, | |
| "grad_norm": 0.008439415134489536, | |
| "learning_rate": 2.2541870608996773e-06, | |
| "loss": 0.0961, | |
| "step": 26600 | |
| }, | |
| { | |
| "epoch": 1.6002397362900809, | |
| "grad_norm": 0.019180960953235626, | |
| "learning_rate": 2.220890353943995e-06, | |
| "loss": 0.0973, | |
| "step": 26700 | |
| }, | |
| { | |
| "epoch": 1.6062331435421036, | |
| "grad_norm": 0.0572553388774395, | |
| "learning_rate": 2.187593646988313e-06, | |
| "loss": 0.0985, | |
| "step": 26800 | |
| }, | |
| { | |
| "epoch": 1.6122265507941265, | |
| "grad_norm": 0.06225167587399483, | |
| "learning_rate": 2.154296940032631e-06, | |
| "loss": 0.0995, | |
| "step": 26900 | |
| }, | |
| { | |
| "epoch": 1.6182199580461494, | |
| "grad_norm": 0.024926872923970222, | |
| "learning_rate": 2.1210002330769487e-06, | |
| "loss": 0.0981, | |
| "step": 27000 | |
| }, | |
| { | |
| "epoch": 1.624213365298172, | |
| "grad_norm": 0.04497619345784187, | |
| "learning_rate": 2.0877035261212665e-06, | |
| "loss": 0.0983, | |
| "step": 27100 | |
| }, | |
| { | |
| "epoch": 1.6302067725501947, | |
| "grad_norm": 0.008968952111899853, | |
| "learning_rate": 2.054406819165585e-06, | |
| "loss": 0.0989, | |
| "step": 27200 | |
| }, | |
| { | |
| "epoch": 1.6362001798022177, | |
| "grad_norm": 0.5678277611732483, | |
| "learning_rate": 2.0211101122099027e-06, | |
| "loss": 0.0977, | |
| "step": 27300 | |
| }, | |
| { | |
| "epoch": 1.6421935870542403, | |
| "grad_norm": 0.006168660242110491, | |
| "learning_rate": 1.9878134052542205e-06, | |
| "loss": 0.0987, | |
| "step": 27400 | |
| }, | |
| { | |
| "epoch": 1.648186994306263, | |
| "grad_norm": 0.012832088395953178, | |
| "learning_rate": 1.9545166982985383e-06, | |
| "loss": 0.0977, | |
| "step": 27500 | |
| }, | |
| { | |
| "epoch": 1.654180401558286, | |
| "grad_norm": 0.019368445500731468, | |
| "learning_rate": 1.9212199913428566e-06, | |
| "loss": 0.0983, | |
| "step": 27600 | |
| }, | |
| { | |
| "epoch": 1.6601738088103086, | |
| "grad_norm": 0.014382258988916874, | |
| "learning_rate": 1.8879232843871742e-06, | |
| "loss": 0.0983, | |
| "step": 27700 | |
| }, | |
| { | |
| "epoch": 1.6661672160623313, | |
| "grad_norm": 0.020989634096622467, | |
| "learning_rate": 1.8546265774314923e-06, | |
| "loss": 0.098, | |
| "step": 27800 | |
| }, | |
| { | |
| "epoch": 1.6721606233143542, | |
| "grad_norm": 0.06429729610681534, | |
| "learning_rate": 1.8213298704758101e-06, | |
| "loss": 0.0983, | |
| "step": 27900 | |
| }, | |
| { | |
| "epoch": 1.6781540305663771, | |
| "grad_norm": 0.06610631942749023, | |
| "learning_rate": 1.7880331635201282e-06, | |
| "loss": 0.0982, | |
| "step": 28000 | |
| }, | |
| { | |
| "epoch": 1.6841474378183996, | |
| "grad_norm": 0.018632225692272186, | |
| "learning_rate": 1.7547364565644458e-06, | |
| "loss": 0.0981, | |
| "step": 28100 | |
| }, | |
| { | |
| "epoch": 1.6901408450704225, | |
| "grad_norm": 0.0523594506084919, | |
| "learning_rate": 1.7214397496087637e-06, | |
| "loss": 0.0984, | |
| "step": 28200 | |
| }, | |
| { | |
| "epoch": 1.6961342523224454, | |
| "grad_norm": 0.2683553993701935, | |
| "learning_rate": 1.6881430426530817e-06, | |
| "loss": 0.0982, | |
| "step": 28300 | |
| }, | |
| { | |
| "epoch": 1.702127659574468, | |
| "grad_norm": 0.0381014347076416, | |
| "learning_rate": 1.6548463356973996e-06, | |
| "loss": 0.0982, | |
| "step": 28400 | |
| }, | |
| { | |
| "epoch": 1.7081210668264908, | |
| "grad_norm": 0.0280121061950922, | |
| "learning_rate": 1.6215496287417176e-06, | |
| "loss": 0.0993, | |
| "step": 28500 | |
| }, | |
| { | |
| "epoch": 1.7141144740785137, | |
| "grad_norm": 0.028142858296632767, | |
| "learning_rate": 1.5882529217860355e-06, | |
| "loss": 0.0979, | |
| "step": 28600 | |
| }, | |
| { | |
| "epoch": 1.7201078813305364, | |
| "grad_norm": 0.17541886866092682, | |
| "learning_rate": 1.5549562148303535e-06, | |
| "loss": 0.0974, | |
| "step": 28700 | |
| }, | |
| { | |
| "epoch": 1.726101288582559, | |
| "grad_norm": 0.02304765395820141, | |
| "learning_rate": 1.5216595078746714e-06, | |
| "loss": 0.0972, | |
| "step": 28800 | |
| }, | |
| { | |
| "epoch": 1.732094695834582, | |
| "grad_norm": 0.039642009884119034, | |
| "learning_rate": 1.4883628009189894e-06, | |
| "loss": 0.0982, | |
| "step": 28900 | |
| }, | |
| { | |
| "epoch": 1.7380881030866049, | |
| "grad_norm": 0.013521491549909115, | |
| "learning_rate": 1.455066093963307e-06, | |
| "loss": 0.0969, | |
| "step": 29000 | |
| }, | |
| { | |
| "epoch": 1.7440815103386273, | |
| "grad_norm": 0.01796823740005493, | |
| "learning_rate": 1.421769387007625e-06, | |
| "loss": 0.0983, | |
| "step": 29100 | |
| }, | |
| { | |
| "epoch": 1.7500749175906503, | |
| "grad_norm": 0.013001805171370506, | |
| "learning_rate": 1.388472680051943e-06, | |
| "loss": 0.0973, | |
| "step": 29200 | |
| }, | |
| { | |
| "epoch": 1.7560683248426732, | |
| "grad_norm": 0.16073040664196014, | |
| "learning_rate": 1.3551759730962608e-06, | |
| "loss": 0.0977, | |
| "step": 29300 | |
| }, | |
| { | |
| "epoch": 1.7620617320946959, | |
| "grad_norm": 0.0181583184748888, | |
| "learning_rate": 1.3218792661405788e-06, | |
| "loss": 0.098, | |
| "step": 29400 | |
| }, | |
| { | |
| "epoch": 1.7680551393467185, | |
| "grad_norm": 0.0457487516105175, | |
| "learning_rate": 1.2885825591848967e-06, | |
| "loss": 0.0978, | |
| "step": 29500 | |
| }, | |
| { | |
| "epoch": 1.7740485465987414, | |
| "grad_norm": 0.04452770575881004, | |
| "learning_rate": 1.2552858522292147e-06, | |
| "loss": 0.0982, | |
| "step": 29600 | |
| }, | |
| { | |
| "epoch": 1.7800419538507641, | |
| "grad_norm": 0.5107665657997131, | |
| "learning_rate": 1.2219891452735326e-06, | |
| "loss": 0.0972, | |
| "step": 29700 | |
| }, | |
| { | |
| "epoch": 1.7860353611027868, | |
| "grad_norm": 0.01403922587633133, | |
| "learning_rate": 1.1886924383178504e-06, | |
| "loss": 0.0986, | |
| "step": 29800 | |
| }, | |
| { | |
| "epoch": 1.7920287683548097, | |
| "grad_norm": 0.020552983507514, | |
| "learning_rate": 1.1553957313621683e-06, | |
| "loss": 0.0989, | |
| "step": 29900 | |
| }, | |
| { | |
| "epoch": 1.7980221756068326, | |
| "grad_norm": 0.011963661760091782, | |
| "learning_rate": 1.1220990244064863e-06, | |
| "loss": 0.098, | |
| "step": 30000 | |
| } | |
| ], | |
| "logging_steps": 100, | |
| "max_steps": 33370, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 2, | |
| "save_steps": 10000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1.3389545272056584e+18, | |
| "train_batch_size": 8, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |