| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 0.09617148843410506, | |
| "eval_steps": 100000000, | |
| "global_step": 11800, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 8.15012613848348e-06, | |
| "grad_norm": 3.4710192680358887, | |
| "learning_rate": 1.0000000000000001e-07, | |
| "loss": 11.2605, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0002445037841545044, | |
| "grad_norm": 5.464046955108643, | |
| "learning_rate": 3e-06, | |
| "loss": 10.9977, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.0004890075683090088, | |
| "grad_norm": 1.1376756429672241, | |
| "learning_rate": 6e-06, | |
| "loss": 9.4082, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.0007335113524635132, | |
| "grad_norm": 1.2878344058990479, | |
| "learning_rate": 9e-06, | |
| "loss": 8.549, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.0009780151366180175, | |
| "grad_norm": 1.2800588607788086, | |
| "learning_rate": 1.2e-05, | |
| "loss": 8.0361, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.001222518920772522, | |
| "grad_norm": 1.4315314292907715, | |
| "learning_rate": 1.5e-05, | |
| "loss": 7.653, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.0014670227049270264, | |
| "grad_norm": 1.381317377090454, | |
| "learning_rate": 1.8e-05, | |
| "loss": 7.4179, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.0017115264890815308, | |
| "grad_norm": 1.7989460229873657, | |
| "learning_rate": 2.1e-05, | |
| "loss": 7.2012, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.001956030273236035, | |
| "grad_norm": 1.3431414365768433, | |
| "learning_rate": 2.4e-05, | |
| "loss": 7.0383, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.0022005340573905395, | |
| "grad_norm": 1.028826117515564, | |
| "learning_rate": 2.7000000000000002e-05, | |
| "loss": 6.866, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.002445037841545044, | |
| "grad_norm": 1.201025128364563, | |
| "learning_rate": 3e-05, | |
| "loss": 6.717, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.0026895416256995483, | |
| "grad_norm": 1.1023098230361938, | |
| "learning_rate": 3.3e-05, | |
| "loss": 6.5535, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.0029340454098540527, | |
| "grad_norm": 1.2839674949645996, | |
| "learning_rate": 3.6e-05, | |
| "loss": 6.4022, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.003178549194008557, | |
| "grad_norm": 2.267265796661377, | |
| "learning_rate": 3.9000000000000006e-05, | |
| "loss": 6.2858, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.0034230529781630616, | |
| "grad_norm": 1.0635628700256348, | |
| "learning_rate": 4.2e-05, | |
| "loss": 6.1681, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.0036675567623175656, | |
| "grad_norm": 1.263838768005371, | |
| "learning_rate": 4.5e-05, | |
| "loss": 6.0728, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.00391206054647207, | |
| "grad_norm": 1.4611454010009766, | |
| "learning_rate": 4.8e-05, | |
| "loss": 5.972, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.0041565643306265745, | |
| "grad_norm": 1.0120561122894287, | |
| "learning_rate": 4.999999990869806e-05, | |
| "loss": 5.8619, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.004401068114781079, | |
| "grad_norm": 1.1349974870681763, | |
| "learning_rate": 4.999999853916893e-05, | |
| "loss": 5.785, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.004645571898935583, | |
| "grad_norm": 1.0840613842010498, | |
| "learning_rate": 4.9999995526204936e-05, | |
| "loss": 5.7071, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.004890075683090088, | |
| "grad_norm": 1.258074402809143, | |
| "learning_rate": 4.999999086980628e-05, | |
| "loss": 5.6199, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.005134579467244592, | |
| "grad_norm": 1.284726858139038, | |
| "learning_rate": 4.999998456997326e-05, | |
| "loss": 5.5465, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.005379083251399097, | |
| "grad_norm": 1.2079874277114868, | |
| "learning_rate": 4.999997662670628e-05, | |
| "loss": 5.4816, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.005623587035553601, | |
| "grad_norm": 1.3364052772521973, | |
| "learning_rate": 4.999996704000589e-05, | |
| "loss": 5.4079, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.0058680908197081055, | |
| "grad_norm": 0.9860705137252808, | |
| "learning_rate": 4.99999558098727e-05, | |
| "loss": 5.3598, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.00611259460386261, | |
| "grad_norm": 1.2071930170059204, | |
| "learning_rate": 4.9999942936307445e-05, | |
| "loss": 5.2884, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.006357098388017114, | |
| "grad_norm": 0.8959563970565796, | |
| "learning_rate": 4.9999928419310994e-05, | |
| "loss": 5.2391, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.006601602172171619, | |
| "grad_norm": 1.2356096506118774, | |
| "learning_rate": 4.999991225888427e-05, | |
| "loss": 5.1879, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.006846105956326123, | |
| "grad_norm": 0.9705113172531128, | |
| "learning_rate": 4.999989445502837e-05, | |
| "loss": 5.1424, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.007090609740480627, | |
| "grad_norm": 0.9504437446594238, | |
| "learning_rate": 4.9999875007744436e-05, | |
| "loss": 5.0966, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.007335113524635131, | |
| "grad_norm": 0.9488673806190491, | |
| "learning_rate": 4.9999853917033756e-05, | |
| "loss": 5.0424, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.007579617308789636, | |
| "grad_norm": 0.959373950958252, | |
| "learning_rate": 4.999983118289773e-05, | |
| "loss": 5.0387, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.00782412109294414, | |
| "grad_norm": 0.8465414643287659, | |
| "learning_rate": 4.999980680533782e-05, | |
| "loss": 4.9769, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.008068624877098645, | |
| "grad_norm": 0.8328993916511536, | |
| "learning_rate": 4.999978078435567e-05, | |
| "loss": 4.9335, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.008313128661253149, | |
| "grad_norm": 0.8107655644416809, | |
| "learning_rate": 4.999975311995295e-05, | |
| "loss": 4.9214, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.008557632445407654, | |
| "grad_norm": 0.8149654865264893, | |
| "learning_rate": 4.99997238121315e-05, | |
| "loss": 4.8651, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.008802136229562158, | |
| "grad_norm": 0.8837414979934692, | |
| "learning_rate": 4.999969286089325e-05, | |
| "loss": 4.8327, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.009046640013716663, | |
| "grad_norm": 1.1360137462615967, | |
| "learning_rate": 4.9999660266240235e-05, | |
| "loss": 4.7906, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.009291143797871167, | |
| "grad_norm": 0.7992026209831238, | |
| "learning_rate": 4.9999626028174585e-05, | |
| "loss": 4.7612, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.00953564758202567, | |
| "grad_norm": 0.8481825590133667, | |
| "learning_rate": 4.999959014669856e-05, | |
| "loss": 4.7106, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.009780151366180176, | |
| "grad_norm": 0.8183879256248474, | |
| "learning_rate": 4.9999552621814513e-05, | |
| "loss": 4.6993, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.010024655150334679, | |
| "grad_norm": 0.8460689187049866, | |
| "learning_rate": 4.9999513453524917e-05, | |
| "loss": 4.6664, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.010269158934489184, | |
| "grad_norm": 0.8723706007003784, | |
| "learning_rate": 4.9999472641832336e-05, | |
| "loss": 4.6371, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.010513662718643688, | |
| "grad_norm": 0.7682787179946899, | |
| "learning_rate": 4.999943018673946e-05, | |
| "loss": 4.6184, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.010758166502798193, | |
| "grad_norm": 0.9049955606460571, | |
| "learning_rate": 4.999938608824909e-05, | |
| "loss": 4.5968, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.011002670286952697, | |
| "grad_norm": 0.7860899567604065, | |
| "learning_rate": 4.999934034636411e-05, | |
| "loss": 4.5266, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.011247174071107202, | |
| "grad_norm": 0.7918768525123596, | |
| "learning_rate": 4.999929296108753e-05, | |
| "loss": 4.5069, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.011491677855261706, | |
| "grad_norm": 0.6713089942932129, | |
| "learning_rate": 4.9999243932422466e-05, | |
| "loss": 4.4663, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.011736181639416211, | |
| "grad_norm": 0.756101131439209, | |
| "learning_rate": 4.999919326037215e-05, | |
| "loss": 4.4782, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.011980685423570715, | |
| "grad_norm": 0.7067996263504028, | |
| "learning_rate": 4.99991409449399e-05, | |
| "loss": 4.4388, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.01222518920772522, | |
| "grad_norm": 0.7132194638252258, | |
| "learning_rate": 4.999908698612916e-05, | |
| "loss": 4.4135, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.012469692991879723, | |
| "grad_norm": 0.7282501459121704, | |
| "learning_rate": 4.9999031383943486e-05, | |
| "loss": 4.4057, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.012714196776034229, | |
| "grad_norm": 0.6851722598075867, | |
| "learning_rate": 4.999897413838651e-05, | |
| "loss": 4.3612, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.012958700560188732, | |
| "grad_norm": 0.7953110337257385, | |
| "learning_rate": 4.999891524946202e-05, | |
| "loss": 4.3268, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.013203204344343238, | |
| "grad_norm": 0.726719319820404, | |
| "learning_rate": 4.999885471717387e-05, | |
| "loss": 4.314, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.013447708128497741, | |
| "grad_norm": 0.7354792356491089, | |
| "learning_rate": 4.999879254152605e-05, | |
| "loss": 4.2807, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.013692211912652246, | |
| "grad_norm": 0.7072017788887024, | |
| "learning_rate": 4.999872872252265e-05, | |
| "loss": 4.282, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.01393671569680675, | |
| "grad_norm": 0.690112292766571, | |
| "learning_rate": 4.999866326016785e-05, | |
| "loss": 4.2686, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.014181219480961254, | |
| "grad_norm": 0.697634756565094, | |
| "learning_rate": 4.999859615446596e-05, | |
| "loss": 4.2662, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.014425723265115759, | |
| "grad_norm": 0.6999862790107727, | |
| "learning_rate": 4.99985274054214e-05, | |
| "loss": 4.2207, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.014670227049270262, | |
| "grad_norm": 0.6920334100723267, | |
| "learning_rate": 4.999845701303868e-05, | |
| "loss": 4.2163, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.014914730833424768, | |
| "grad_norm": 0.6884493827819824, | |
| "learning_rate": 4.999838497732243e-05, | |
| "loss": 4.209, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.015159234617579271, | |
| "grad_norm": 0.776447057723999, | |
| "learning_rate": 4.999831129827739e-05, | |
| "loss": 4.1856, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.015403738401733777, | |
| "grad_norm": 0.7332949042320251, | |
| "learning_rate": 4.9998235975908394e-05, | |
| "loss": 4.156, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.01564824218588828, | |
| "grad_norm": 0.6691558361053467, | |
| "learning_rate": 4.99981590102204e-05, | |
| "loss": 4.1339, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.015892745970042785, | |
| "grad_norm": 0.6789201498031616, | |
| "learning_rate": 4.9998080401218464e-05, | |
| "loss": 4.1344, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.01613724975419729, | |
| "grad_norm": 0.6654175519943237, | |
| "learning_rate": 4.999800014890777e-05, | |
| "loss": 4.1295, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.016381753538351793, | |
| "grad_norm": 0.6632519364356995, | |
| "learning_rate": 4.9997918253293555e-05, | |
| "loss": 4.1036, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 0.016626257322506298, | |
| "grad_norm": 0.6618740558624268, | |
| "learning_rate": 4.999783471438124e-05, | |
| "loss": 4.0857, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 0.016870761106660803, | |
| "grad_norm": 0.6384952068328857, | |
| "learning_rate": 4.999774953217631e-05, | |
| "loss": 4.0755, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 0.01711526489081531, | |
| "grad_norm": 0.6808215975761414, | |
| "learning_rate": 4.9997662706684345e-05, | |
| "loss": 4.0568, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.01735976867496981, | |
| "grad_norm": 0.6514068841934204, | |
| "learning_rate": 4.999757423791107e-05, | |
| "loss": 4.0474, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 0.017604272459124316, | |
| "grad_norm": 0.6691142320632935, | |
| "learning_rate": 4.9997484125862306e-05, | |
| "loss": 4.0467, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 0.01784877624327882, | |
| "grad_norm": 0.6240984201431274, | |
| "learning_rate": 4.999739237054395e-05, | |
| "loss": 4.0259, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 0.018093280027433326, | |
| "grad_norm": 0.6286123991012573, | |
| "learning_rate": 4.9997298971962065e-05, | |
| "loss": 4.0098, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 0.018337783811587828, | |
| "grad_norm": 0.6232910752296448, | |
| "learning_rate": 4.999720393012277e-05, | |
| "loss": 4.0203, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.018582287595742333, | |
| "grad_norm": 0.6792376041412354, | |
| "learning_rate": 4.999710724503233e-05, | |
| "loss": 3.9909, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 0.01882679137989684, | |
| "grad_norm": 0.6824718117713928, | |
| "learning_rate": 4.9997008916697075e-05, | |
| "loss": 3.9922, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 0.01907129516405134, | |
| "grad_norm": 0.6520410776138306, | |
| "learning_rate": 4.999690894512349e-05, | |
| "loss": 3.9533, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 0.019315798948205846, | |
| "grad_norm": 0.6467030048370361, | |
| "learning_rate": 4.999680733031814e-05, | |
| "loss": 3.9464, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 0.01956030273236035, | |
| "grad_norm": 0.6356080770492554, | |
| "learning_rate": 4.9996704072287716e-05, | |
| "loss": 3.9288, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.019804806516514856, | |
| "grad_norm": 0.6498362421989441, | |
| "learning_rate": 4.9996599171038984e-05, | |
| "loss": 3.9345, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 0.020049310300669358, | |
| "grad_norm": 0.6107562184333801, | |
| "learning_rate": 4.999649262657886e-05, | |
| "loss": 3.9368, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 0.020293814084823864, | |
| "grad_norm": 0.6478250622749329, | |
| "learning_rate": 4.999638443891434e-05, | |
| "loss": 3.9273, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 0.02053831786897837, | |
| "grad_norm": 0.631263256072998, | |
| "learning_rate": 4.999627460805253e-05, | |
| "loss": 3.9136, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 0.020782821653132874, | |
| "grad_norm": 0.6120920777320862, | |
| "learning_rate": 4.999616313400066e-05, | |
| "loss": 3.8834, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 0.021027325437287376, | |
| "grad_norm": 0.6151197552680969, | |
| "learning_rate": 4.999605001676605e-05, | |
| "loss": 3.8994, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 0.02127182922144188, | |
| "grad_norm": 0.623715877532959, | |
| "learning_rate": 4.9995935256356144e-05, | |
| "loss": 3.8929, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 0.021516333005596387, | |
| "grad_norm": 0.6688769459724426, | |
| "learning_rate": 4.9995818852778476e-05, | |
| "loss": 3.8499, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 0.021760836789750892, | |
| "grad_norm": 0.6272155046463013, | |
| "learning_rate": 4.999570080604071e-05, | |
| "loss": 3.8861, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 0.022005340573905394, | |
| "grad_norm": 0.597653329372406, | |
| "learning_rate": 4.99955811161506e-05, | |
| "loss": 3.8674, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 0.0222498443580599, | |
| "grad_norm": 0.5608483552932739, | |
| "learning_rate": 4.9995459783116004e-05, | |
| "loss": 3.8493, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 0.022494348142214404, | |
| "grad_norm": 0.5991063117980957, | |
| "learning_rate": 4.999533680694493e-05, | |
| "loss": 3.8454, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 0.02273885192636891, | |
| "grad_norm": 0.5738102793693542, | |
| "learning_rate": 4.9995212187645416e-05, | |
| "loss": 3.8395, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 0.02298335571052341, | |
| "grad_norm": 0.6145568490028381, | |
| "learning_rate": 4.9995085925225693e-05, | |
| "loss": 3.8313, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 0.023227859494677917, | |
| "grad_norm": 0.6019515991210938, | |
| "learning_rate": 4.999495801969404e-05, | |
| "loss": 3.8277, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 0.023472363278832422, | |
| "grad_norm": 0.6177758574485779, | |
| "learning_rate": 4.9994828471058876e-05, | |
| "loss": 3.8083, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 0.023716867062986924, | |
| "grad_norm": 0.5616528987884521, | |
| "learning_rate": 4.9994697279328714e-05, | |
| "loss": 3.8114, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 0.02396137084714143, | |
| "grad_norm": 0.6194447875022888, | |
| "learning_rate": 4.9994564444512176e-05, | |
| "loss": 3.8183, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 0.024205874631295934, | |
| "grad_norm": 0.578895092010498, | |
| "learning_rate": 4.9994429966618e-05, | |
| "loss": 3.7871, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 0.02445037841545044, | |
| "grad_norm": 0.6014060378074646, | |
| "learning_rate": 4.999429384565502e-05, | |
| "loss": 3.7711, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.02469488219960494, | |
| "grad_norm": 0.5589067339897156, | |
| "learning_rate": 4.999415608163217e-05, | |
| "loss": 3.7533, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 0.024939385983759447, | |
| "grad_norm": 0.5722873210906982, | |
| "learning_rate": 4.999401667455854e-05, | |
| "loss": 3.7585, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 0.025183889767913952, | |
| "grad_norm": 0.5734965801239014, | |
| "learning_rate": 4.9993875624443274e-05, | |
| "loss": 3.77, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 0.025428393552068457, | |
| "grad_norm": 0.5524207353591919, | |
| "learning_rate": 4.9993732931295646e-05, | |
| "loss": 3.718, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 0.02567289733622296, | |
| "grad_norm": 0.5776082277297974, | |
| "learning_rate": 4.999358859512503e-05, | |
| "loss": 3.7573, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 0.025917401120377465, | |
| "grad_norm": 0.5799595713615417, | |
| "learning_rate": 4.9993442615940936e-05, | |
| "loss": 3.7552, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 0.02616190490453197, | |
| "grad_norm": 0.5820346474647522, | |
| "learning_rate": 4.999329499375292e-05, | |
| "loss": 3.7394, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 0.026406408688686475, | |
| "grad_norm": 0.5450282096862793, | |
| "learning_rate": 4.999314572857074e-05, | |
| "loss": 3.7393, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 0.026650912472840977, | |
| "grad_norm": 0.6213249564170837, | |
| "learning_rate": 4.9992994820404174e-05, | |
| "loss": 3.7191, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 0.026895416256995482, | |
| "grad_norm": 0.5940688848495483, | |
| "learning_rate": 4.999284226926314e-05, | |
| "loss": 3.719, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 0.027139920041149988, | |
| "grad_norm": 0.5779993534088135, | |
| "learning_rate": 4.999268807515768e-05, | |
| "loss": 3.7092, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 0.027384423825304493, | |
| "grad_norm": 0.5899255871772766, | |
| "learning_rate": 4.999253223809792e-05, | |
| "loss": 3.6939, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 0.027628927609458995, | |
| "grad_norm": 0.6330375671386719, | |
| "learning_rate": 4.999237475809411e-05, | |
| "loss": 3.7102, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 0.0278734313936135, | |
| "grad_norm": 0.5771914720535278, | |
| "learning_rate": 4.99922156351566e-05, | |
| "loss": 3.7196, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 0.028117935177768005, | |
| "grad_norm": 0.5769143104553223, | |
| "learning_rate": 4.999205486929586e-05, | |
| "loss": 3.7127, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 0.028362438961922507, | |
| "grad_norm": 0.5581954121589661, | |
| "learning_rate": 4.999189246052245e-05, | |
| "loss": 3.689, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 0.028606942746077012, | |
| "grad_norm": 0.6041043400764465, | |
| "learning_rate": 4.999172840884704e-05, | |
| "loss": 3.6831, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 0.028851446530231518, | |
| "grad_norm": 0.5458335280418396, | |
| "learning_rate": 4.999156271428043e-05, | |
| "loss": 3.6694, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 0.029095950314386023, | |
| "grad_norm": 0.5470607280731201, | |
| "learning_rate": 4.9991395376833496e-05, | |
| "loss": 3.6702, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 0.029340454098540525, | |
| "grad_norm": 0.5878787040710449, | |
| "learning_rate": 4.999122639651725e-05, | |
| "loss": 3.6492, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 0.02958495788269503, | |
| "grad_norm": 0.5691691637039185, | |
| "learning_rate": 4.9991055773342795e-05, | |
| "loss": 3.6812, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 0.029829461666849535, | |
| "grad_norm": 0.5548356771469116, | |
| "learning_rate": 4.9990883507321354e-05, | |
| "loss": 3.645, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 0.03007396545100404, | |
| "grad_norm": 0.5686156153678894, | |
| "learning_rate": 4.999070959846424e-05, | |
| "loss": 3.6505, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 0.030318469235158543, | |
| "grad_norm": 0.5596534013748169, | |
| "learning_rate": 4.999053404678289e-05, | |
| "loss": 3.6532, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 0.030562973019313048, | |
| "grad_norm": 0.54421067237854, | |
| "learning_rate": 4.999035685228884e-05, | |
| "loss": 3.6238, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 0.030807476803467553, | |
| "grad_norm": 0.56732177734375, | |
| "learning_rate": 4.999017801499375e-05, | |
| "loss": 3.6449, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 0.03105198058762206, | |
| "grad_norm": 0.5608410239219666, | |
| "learning_rate": 4.998999753490937e-05, | |
| "loss": 3.6344, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 0.03129648437177656, | |
| "grad_norm": 0.5919491648674011, | |
| "learning_rate": 4.998981541204757e-05, | |
| "loss": 3.6213, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 0.031540988155931066, | |
| "grad_norm": 0.5795233249664307, | |
| "learning_rate": 4.998963164642031e-05, | |
| "loss": 3.6239, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 0.03178549194008557, | |
| "grad_norm": 0.5669205784797668, | |
| "learning_rate": 4.9989446238039676e-05, | |
| "loss": 3.5831, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 0.032029995724240076, | |
| "grad_norm": 0.5817368626594543, | |
| "learning_rate": 4.998925918691786e-05, | |
| "loss": 3.6074, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 0.03227449950839458, | |
| "grad_norm": 0.5662333369255066, | |
| "learning_rate": 4.998907049306715e-05, | |
| "loss": 3.5874, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 0.03251900329254909, | |
| "grad_norm": 0.5641735792160034, | |
| "learning_rate": 4.998888015649996e-05, | |
| "loss": 3.6056, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 0.032763507076703585, | |
| "grad_norm": 0.524918794631958, | |
| "learning_rate": 4.99886881772288e-05, | |
| "loss": 3.5963, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 0.03300801086085809, | |
| "grad_norm": 0.5625722408294678, | |
| "learning_rate": 4.998849455526628e-05, | |
| "loss": 3.5917, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 0.033252514645012596, | |
| "grad_norm": 0.5612478852272034, | |
| "learning_rate": 4.998829929062515e-05, | |
| "loss": 3.5792, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 0.0334970184291671, | |
| "grad_norm": 0.5586293935775757, | |
| "learning_rate": 4.998810238331822e-05, | |
| "loss": 3.5708, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 0.033741522213321606, | |
| "grad_norm": 0.53324955701828, | |
| "learning_rate": 4.998790383335845e-05, | |
| "loss": 3.5686, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 0.03398602599747611, | |
| "grad_norm": 0.5210742950439453, | |
| "learning_rate": 4.9987703640758894e-05, | |
| "loss": 3.575, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 0.03423052978163062, | |
| "grad_norm": 0.5591189861297607, | |
| "learning_rate": 4.99875018055327e-05, | |
| "loss": 3.5717, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 0.034475033565785115, | |
| "grad_norm": 0.5435970425605774, | |
| "learning_rate": 4.998729832769315e-05, | |
| "loss": 3.5638, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 0.03471953734993962, | |
| "grad_norm": 0.5489551424980164, | |
| "learning_rate": 4.998709320725361e-05, | |
| "loss": 3.574, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 0.034964041134094126, | |
| "grad_norm": 0.5275290012359619, | |
| "learning_rate": 4.998688644422756e-05, | |
| "loss": 3.5695, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 0.03520854491824863, | |
| "grad_norm": 0.583881139755249, | |
| "learning_rate": 4.998667803862861e-05, | |
| "loss": 3.5703, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 0.03545304870240314, | |
| "grad_norm": 0.5317121744155884, | |
| "learning_rate": 4.9986467990470445e-05, | |
| "loss": 3.5668, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 0.03569755248655764, | |
| "grad_norm": 0.5981696248054504, | |
| "learning_rate": 4.998625629976688e-05, | |
| "loss": 3.5411, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 0.03594205627071215, | |
| "grad_norm": 0.55133455991745, | |
| "learning_rate": 4.998604296653182e-05, | |
| "loss": 3.521, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 0.03618656005486665, | |
| "grad_norm": 0.5481094717979431, | |
| "learning_rate": 4.99858279907793e-05, | |
| "loss": 3.5421, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 0.03643106383902115, | |
| "grad_norm": 0.5218031406402588, | |
| "learning_rate": 4.998561137252346e-05, | |
| "loss": 3.5305, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 0.036675567623175656, | |
| "grad_norm": 0.5458360910415649, | |
| "learning_rate": 4.9985393111778525e-05, | |
| "loss": 3.5332, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 0.03692007140733016, | |
| "grad_norm": 0.5501233339309692, | |
| "learning_rate": 4.998517320855884e-05, | |
| "loss": 3.5485, | |
| "step": 4530 | |
| }, | |
| { | |
| "epoch": 0.03716457519148467, | |
| "grad_norm": 0.5359978675842285, | |
| "learning_rate": 4.998495166287887e-05, | |
| "loss": 3.534, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 0.03740907897563917, | |
| "grad_norm": 0.5447133183479309, | |
| "learning_rate": 4.998472847475318e-05, | |
| "loss": 3.5176, | |
| "step": 4590 | |
| }, | |
| { | |
| "epoch": 0.03765358275979368, | |
| "grad_norm": 0.5201069712638855, | |
| "learning_rate": 4.998450364419643e-05, | |
| "loss": 3.518, | |
| "step": 4620 | |
| }, | |
| { | |
| "epoch": 0.03789808654394818, | |
| "grad_norm": 0.5251840353012085, | |
| "learning_rate": 4.998427717122342e-05, | |
| "loss": 3.5021, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 0.03814259032810268, | |
| "grad_norm": 0.5204648375511169, | |
| "learning_rate": 4.9984049055849024e-05, | |
| "loss": 3.505, | |
| "step": 4680 | |
| }, | |
| { | |
| "epoch": 0.038387094112257186, | |
| "grad_norm": 0.5137141942977905, | |
| "learning_rate": 4.9983819298088234e-05, | |
| "loss": 3.4997, | |
| "step": 4710 | |
| }, | |
| { | |
| "epoch": 0.03863159789641169, | |
| "grad_norm": 0.5232805609703064, | |
| "learning_rate": 4.9983587897956166e-05, | |
| "loss": 3.5049, | |
| "step": 4740 | |
| }, | |
| { | |
| "epoch": 0.0388761016805662, | |
| "grad_norm": 0.5415229201316833, | |
| "learning_rate": 4.998335485546802e-05, | |
| "loss": 3.5123, | |
| "step": 4770 | |
| }, | |
| { | |
| "epoch": 0.0391206054647207, | |
| "grad_norm": 0.5097187161445618, | |
| "learning_rate": 4.998312017063912e-05, | |
| "loss": 3.4839, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 0.03936510924887521, | |
| "grad_norm": 0.5637670755386353, | |
| "learning_rate": 4.9982883843484895e-05, | |
| "loss": 3.5084, | |
| "step": 4830 | |
| }, | |
| { | |
| "epoch": 0.03960961303302971, | |
| "grad_norm": 0.5436129570007324, | |
| "learning_rate": 4.998264587402088e-05, | |
| "loss": 3.5184, | |
| "step": 4860 | |
| }, | |
| { | |
| "epoch": 0.03985411681718422, | |
| "grad_norm": 0.5133230090141296, | |
| "learning_rate": 4.9982406262262726e-05, | |
| "loss": 3.48, | |
| "step": 4890 | |
| }, | |
| { | |
| "epoch": 0.040098620601338716, | |
| "grad_norm": 0.5678510069847107, | |
| "learning_rate": 4.9982165008226175e-05, | |
| "loss": 3.4899, | |
| "step": 4920 | |
| }, | |
| { | |
| "epoch": 0.04034312438549322, | |
| "grad_norm": 0.5121597647666931, | |
| "learning_rate": 4.998192211192708e-05, | |
| "loss": 3.4942, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 0.04058762816964773, | |
| "grad_norm": 0.537929117679596, | |
| "learning_rate": 4.9981677573381423e-05, | |
| "loss": 3.4721, | |
| "step": 4980 | |
| }, | |
| { | |
| "epoch": 0.04083213195380223, | |
| "grad_norm": 0.537289023399353, | |
| "learning_rate": 4.9981431392605274e-05, | |
| "loss": 3.4919, | |
| "step": 5010 | |
| }, | |
| { | |
| "epoch": 0.04107663573795674, | |
| "grad_norm": 0.5594364404678345, | |
| "learning_rate": 4.998118356961481e-05, | |
| "loss": 3.4665, | |
| "step": 5040 | |
| }, | |
| { | |
| "epoch": 0.04132113952211124, | |
| "grad_norm": 0.5009840726852417, | |
| "learning_rate": 4.998093410442632e-05, | |
| "loss": 3.4524, | |
| "step": 5070 | |
| }, | |
| { | |
| "epoch": 0.04156564330626575, | |
| "grad_norm": 0.53801429271698, | |
| "learning_rate": 4.998068299705623e-05, | |
| "loss": 3.4629, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 0.041810147090420254, | |
| "grad_norm": 0.5134426355361938, | |
| "learning_rate": 4.9980430247521014e-05, | |
| "loss": 3.4504, | |
| "step": 5130 | |
| }, | |
| { | |
| "epoch": 0.04205465087457475, | |
| "grad_norm": 0.5095092058181763, | |
| "learning_rate": 4.998017585583731e-05, | |
| "loss": 3.4684, | |
| "step": 5160 | |
| }, | |
| { | |
| "epoch": 0.04229915465872926, | |
| "grad_norm": 0.5233584642410278, | |
| "learning_rate": 4.9979919822021824e-05, | |
| "loss": 3.4726, | |
| "step": 5190 | |
| }, | |
| { | |
| "epoch": 0.04254365844288376, | |
| "grad_norm": 0.5375773906707764, | |
| "learning_rate": 4.99796621460914e-05, | |
| "loss": 3.4502, | |
| "step": 5220 | |
| }, | |
| { | |
| "epoch": 0.04278816222703827, | |
| "grad_norm": 0.5292348265647888, | |
| "learning_rate": 4.9979402828062963e-05, | |
| "loss": 3.4494, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 0.04303266601119277, | |
| "grad_norm": 0.5170340538024902, | |
| "learning_rate": 4.997914186795358e-05, | |
| "loss": 3.4671, | |
| "step": 5280 | |
| }, | |
| { | |
| "epoch": 0.04327716979534728, | |
| "grad_norm": 0.5027357339859009, | |
| "learning_rate": 4.9978879265780385e-05, | |
| "loss": 3.4424, | |
| "step": 5310 | |
| }, | |
| { | |
| "epoch": 0.043521673579501784, | |
| "grad_norm": 0.4898991584777832, | |
| "learning_rate": 4.997861502156066e-05, | |
| "loss": 3.4515, | |
| "step": 5340 | |
| }, | |
| { | |
| "epoch": 0.04376617736365628, | |
| "grad_norm": 0.5362656712532043, | |
| "learning_rate": 4.997834913531176e-05, | |
| "loss": 3.4209, | |
| "step": 5370 | |
| }, | |
| { | |
| "epoch": 0.04401068114781079, | |
| "grad_norm": 0.5150277614593506, | |
| "learning_rate": 4.9978081607051176e-05, | |
| "loss": 3.4575, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 0.04425518493196529, | |
| "grad_norm": 0.5086013674736023, | |
| "learning_rate": 4.997781243679648e-05, | |
| "loss": 3.4377, | |
| "step": 5430 | |
| }, | |
| { | |
| "epoch": 0.0444996887161198, | |
| "grad_norm": 0.538194477558136, | |
| "learning_rate": 4.9977541624565374e-05, | |
| "loss": 3.4282, | |
| "step": 5460 | |
| }, | |
| { | |
| "epoch": 0.0447441925002743, | |
| "grad_norm": 0.5045614838600159, | |
| "learning_rate": 4.9977269170375665e-05, | |
| "loss": 3.4109, | |
| "step": 5490 | |
| }, | |
| { | |
| "epoch": 0.04498869628442881, | |
| "grad_norm": 0.5368480086326599, | |
| "learning_rate": 4.997699507424526e-05, | |
| "loss": 3.4341, | |
| "step": 5520 | |
| }, | |
| { | |
| "epoch": 0.045233200068583314, | |
| "grad_norm": 0.6185830235481262, | |
| "learning_rate": 4.997671933619218e-05, | |
| "loss": 3.4203, | |
| "step": 5550 | |
| }, | |
| { | |
| "epoch": 0.04547770385273782, | |
| "grad_norm": 0.4984918534755707, | |
| "learning_rate": 4.9976441956234546e-05, | |
| "loss": 3.4309, | |
| "step": 5580 | |
| }, | |
| { | |
| "epoch": 0.04572220763689232, | |
| "grad_norm": 0.5066754221916199, | |
| "learning_rate": 4.99761629343906e-05, | |
| "loss": 3.3986, | |
| "step": 5610 | |
| }, | |
| { | |
| "epoch": 0.04596671142104682, | |
| "grad_norm": 0.5132448673248291, | |
| "learning_rate": 4.9975882270678676e-05, | |
| "loss": 3.4126, | |
| "step": 5640 | |
| }, | |
| { | |
| "epoch": 0.04621121520520133, | |
| "grad_norm": 0.5501627922058105, | |
| "learning_rate": 4.997559996511723e-05, | |
| "loss": 3.4057, | |
| "step": 5670 | |
| }, | |
| { | |
| "epoch": 0.04645571898935583, | |
| "grad_norm": 0.4679185152053833, | |
| "learning_rate": 4.997531601772481e-05, | |
| "loss": 3.428, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 0.04670022277351034, | |
| "grad_norm": 0.5020308494567871, | |
| "learning_rate": 4.9975030428520084e-05, | |
| "loss": 3.3977, | |
| "step": 5730 | |
| }, | |
| { | |
| "epoch": 0.046944726557664844, | |
| "grad_norm": 0.5141638517379761, | |
| "learning_rate": 4.997474319752184e-05, | |
| "loss": 3.4044, | |
| "step": 5760 | |
| }, | |
| { | |
| "epoch": 0.04718923034181935, | |
| "grad_norm": 0.5127893090248108, | |
| "learning_rate": 4.997445432474895e-05, | |
| "loss": 3.4071, | |
| "step": 5790 | |
| }, | |
| { | |
| "epoch": 0.04743373412597385, | |
| "grad_norm": 0.5528402924537659, | |
| "learning_rate": 4.9974163810220406e-05, | |
| "loss": 3.3891, | |
| "step": 5820 | |
| }, | |
| { | |
| "epoch": 0.04767823791012835, | |
| "grad_norm": 0.5092859864234924, | |
| "learning_rate": 4.99738716539553e-05, | |
| "loss": 3.4118, | |
| "step": 5850 | |
| }, | |
| { | |
| "epoch": 0.04792274169428286, | |
| "grad_norm": 0.49626457691192627, | |
| "learning_rate": 4.997357785597284e-05, | |
| "loss": 3.3834, | |
| "step": 5880 | |
| }, | |
| { | |
| "epoch": 0.048167245478437364, | |
| "grad_norm": 0.5132192969322205, | |
| "learning_rate": 4.997328241629234e-05, | |
| "loss": 3.3944, | |
| "step": 5910 | |
| }, | |
| { | |
| "epoch": 0.04841174926259187, | |
| "grad_norm": 0.5104256868362427, | |
| "learning_rate": 4.997298533493323e-05, | |
| "loss": 3.4001, | |
| "step": 5940 | |
| }, | |
| { | |
| "epoch": 0.048656253046746374, | |
| "grad_norm": 0.507205069065094, | |
| "learning_rate": 4.997268661191503e-05, | |
| "loss": 3.3867, | |
| "step": 5970 | |
| }, | |
| { | |
| "epoch": 0.04890075683090088, | |
| "grad_norm": 0.6091153025627136, | |
| "learning_rate": 4.9972386247257385e-05, | |
| "loss": 3.3984, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.049145260615055385, | |
| "grad_norm": 0.4803309142589569, | |
| "learning_rate": 4.9972084240980025e-05, | |
| "loss": 3.3703, | |
| "step": 6030 | |
| }, | |
| { | |
| "epoch": 0.04938976439920988, | |
| "grad_norm": 0.515164852142334, | |
| "learning_rate": 4.997178059310281e-05, | |
| "loss": 3.3733, | |
| "step": 6060 | |
| }, | |
| { | |
| "epoch": 0.04963426818336439, | |
| "grad_norm": 0.515275776386261, | |
| "learning_rate": 4.997147530364571e-05, | |
| "loss": 3.3802, | |
| "step": 6090 | |
| }, | |
| { | |
| "epoch": 0.049878771967518894, | |
| "grad_norm": 0.5258405208587646, | |
| "learning_rate": 4.9971168372628793e-05, | |
| "loss": 3.3705, | |
| "step": 6120 | |
| }, | |
| { | |
| "epoch": 0.0501232757516734, | |
| "grad_norm": 0.49037066102027893, | |
| "learning_rate": 4.997085980007222e-05, | |
| "loss": 3.37, | |
| "step": 6150 | |
| }, | |
| { | |
| "epoch": 0.050367779535827904, | |
| "grad_norm": 0.47182220220565796, | |
| "learning_rate": 4.99705495859963e-05, | |
| "loss": 3.3705, | |
| "step": 6180 | |
| }, | |
| { | |
| "epoch": 0.05061228331998241, | |
| "grad_norm": 0.4967211186885834, | |
| "learning_rate": 4.99702377304214e-05, | |
| "loss": 3.3743, | |
| "step": 6210 | |
| }, | |
| { | |
| "epoch": 0.050856787104136915, | |
| "grad_norm": 0.7469291090965271, | |
| "learning_rate": 4.9969924233368036e-05, | |
| "loss": 3.3732, | |
| "step": 6240 | |
| }, | |
| { | |
| "epoch": 0.05110129088829142, | |
| "grad_norm": 0.5237293839454651, | |
| "learning_rate": 4.996960909485681e-05, | |
| "loss": 3.3646, | |
| "step": 6270 | |
| }, | |
| { | |
| "epoch": 0.05134579467244592, | |
| "grad_norm": 0.48839274048805237, | |
| "learning_rate": 4.9969292314908446e-05, | |
| "loss": 3.3665, | |
| "step": 6300 | |
| }, | |
| { | |
| "epoch": 0.051590298456600424, | |
| "grad_norm": 0.48733407258987427, | |
| "learning_rate": 4.996897389354376e-05, | |
| "loss": 3.3646, | |
| "step": 6330 | |
| }, | |
| { | |
| "epoch": 0.05183480224075493, | |
| "grad_norm": 0.4938340187072754, | |
| "learning_rate": 4.9968653830783684e-05, | |
| "loss": 3.3542, | |
| "step": 6360 | |
| }, | |
| { | |
| "epoch": 0.052079306024909434, | |
| "grad_norm": 0.5001193284988403, | |
| "learning_rate": 4.996833212664927e-05, | |
| "loss": 3.3583, | |
| "step": 6390 | |
| }, | |
| { | |
| "epoch": 0.05232380980906394, | |
| "grad_norm": 0.4772029221057892, | |
| "learning_rate": 4.996800878116166e-05, | |
| "loss": 3.336, | |
| "step": 6420 | |
| }, | |
| { | |
| "epoch": 0.052568313593218445, | |
| "grad_norm": 0.49077659845352173, | |
| "learning_rate": 4.99676837943421e-05, | |
| "loss": 3.3681, | |
| "step": 6450 | |
| }, | |
| { | |
| "epoch": 0.05281281737737295, | |
| "grad_norm": 0.5102148056030273, | |
| "learning_rate": 4.996735716621196e-05, | |
| "loss": 3.3437, | |
| "step": 6480 | |
| }, | |
| { | |
| "epoch": 0.05305732116152745, | |
| "grad_norm": 0.5012289881706238, | |
| "learning_rate": 4.996702889679272e-05, | |
| "loss": 3.3536, | |
| "step": 6510 | |
| }, | |
| { | |
| "epoch": 0.053301824945681954, | |
| "grad_norm": 0.4870162308216095, | |
| "learning_rate": 4.996669898610595e-05, | |
| "loss": 3.3513, | |
| "step": 6540 | |
| }, | |
| { | |
| "epoch": 0.05354632872983646, | |
| "grad_norm": 0.48452699184417725, | |
| "learning_rate": 4.996636743417334e-05, | |
| "loss": 3.3415, | |
| "step": 6570 | |
| }, | |
| { | |
| "epoch": 0.053790832513990965, | |
| "grad_norm": 0.4973999559879303, | |
| "learning_rate": 4.996603424101669e-05, | |
| "loss": 3.3304, | |
| "step": 6600 | |
| }, | |
| { | |
| "epoch": 0.05403533629814547, | |
| "grad_norm": 0.4833717942237854, | |
| "learning_rate": 4.996569940665789e-05, | |
| "loss": 3.3424, | |
| "step": 6630 | |
| }, | |
| { | |
| "epoch": 0.054279840082299975, | |
| "grad_norm": 0.5074206590652466, | |
| "learning_rate": 4.996536293111896e-05, | |
| "loss": 3.3302, | |
| "step": 6660 | |
| }, | |
| { | |
| "epoch": 0.05452434386645448, | |
| "grad_norm": 0.4932290315628052, | |
| "learning_rate": 4.996502481442202e-05, | |
| "loss": 3.3388, | |
| "step": 6690 | |
| }, | |
| { | |
| "epoch": 0.054768847650608986, | |
| "grad_norm": 0.48740679025650024, | |
| "learning_rate": 4.9964685056589314e-05, | |
| "loss": 3.3182, | |
| "step": 6720 | |
| }, | |
| { | |
| "epoch": 0.055013351434763484, | |
| "grad_norm": 0.48752760887145996, | |
| "learning_rate": 4.996434365764314e-05, | |
| "loss": 3.3065, | |
| "step": 6750 | |
| }, | |
| { | |
| "epoch": 0.05525785521891799, | |
| "grad_norm": 0.50692218542099, | |
| "learning_rate": 4.996400061760597e-05, | |
| "loss": 3.3379, | |
| "step": 6780 | |
| }, | |
| { | |
| "epoch": 0.055502359003072495, | |
| "grad_norm": 0.479159414768219, | |
| "learning_rate": 4.996365593650033e-05, | |
| "loss": 3.3317, | |
| "step": 6810 | |
| }, | |
| { | |
| "epoch": 0.055746862787227, | |
| "grad_norm": 0.498662531375885, | |
| "learning_rate": 4.99633096143489e-05, | |
| "loss": 3.3306, | |
| "step": 6840 | |
| }, | |
| { | |
| "epoch": 0.055991366571381505, | |
| "grad_norm": 1.4371449947357178, | |
| "learning_rate": 4.9962961651174436e-05, | |
| "loss": 3.3334, | |
| "step": 6870 | |
| }, | |
| { | |
| "epoch": 0.05623587035553601, | |
| "grad_norm": 0.49862873554229736, | |
| "learning_rate": 4.9962612046999827e-05, | |
| "loss": 3.3142, | |
| "step": 6900 | |
| }, | |
| { | |
| "epoch": 0.056480374139690516, | |
| "grad_norm": 0.4759610593318939, | |
| "learning_rate": 4.996226080184803e-05, | |
| "loss": 3.3238, | |
| "step": 6930 | |
| }, | |
| { | |
| "epoch": 0.056724877923845014, | |
| "grad_norm": 0.4844242334365845, | |
| "learning_rate": 4.996190791574215e-05, | |
| "loss": 3.3197, | |
| "step": 6960 | |
| }, | |
| { | |
| "epoch": 0.05696938170799952, | |
| "grad_norm": 0.46844130754470825, | |
| "learning_rate": 4.996155338870538e-05, | |
| "loss": 3.2949, | |
| "step": 6990 | |
| }, | |
| { | |
| "epoch": 0.057213885492154025, | |
| "grad_norm": 0.4850478768348694, | |
| "learning_rate": 4.9961197220761035e-05, | |
| "loss": 3.3143, | |
| "step": 7020 | |
| }, | |
| { | |
| "epoch": 0.05745838927630853, | |
| "grad_norm": 0.4838846027851105, | |
| "learning_rate": 4.996083941193252e-05, | |
| "loss": 3.3015, | |
| "step": 7050 | |
| }, | |
| { | |
| "epoch": 0.057702893060463036, | |
| "grad_norm": 0.49992483854293823, | |
| "learning_rate": 4.9960479962243367e-05, | |
| "loss": 3.3099, | |
| "step": 7080 | |
| }, | |
| { | |
| "epoch": 0.05794739684461754, | |
| "grad_norm": 0.49964553117752075, | |
| "learning_rate": 4.996011887171719e-05, | |
| "loss": 3.3046, | |
| "step": 7110 | |
| }, | |
| { | |
| "epoch": 0.058191900628772046, | |
| "grad_norm": 0.4723115563392639, | |
| "learning_rate": 4.995975614037773e-05, | |
| "loss": 3.3009, | |
| "step": 7140 | |
| }, | |
| { | |
| "epoch": 0.05843640441292655, | |
| "grad_norm": 0.48575958609580994, | |
| "learning_rate": 4.995939176824883e-05, | |
| "loss": 3.3018, | |
| "step": 7170 | |
| }, | |
| { | |
| "epoch": 0.05868090819708105, | |
| "grad_norm": 0.5264491438865662, | |
| "learning_rate": 4.995902575535446e-05, | |
| "loss": 3.2877, | |
| "step": 7200 | |
| }, | |
| { | |
| "epoch": 0.058925411981235555, | |
| "grad_norm": 0.4813016355037689, | |
| "learning_rate": 4.995865810171866e-05, | |
| "loss": 3.2933, | |
| "step": 7230 | |
| }, | |
| { | |
| "epoch": 0.05916991576539006, | |
| "grad_norm": 0.47151580452919006, | |
| "learning_rate": 4.995828880736561e-05, | |
| "loss": 3.3143, | |
| "step": 7260 | |
| }, | |
| { | |
| "epoch": 0.059414419549544566, | |
| "grad_norm": 0.4812193512916565, | |
| "learning_rate": 4.995791787231958e-05, | |
| "loss": 3.2914, | |
| "step": 7290 | |
| }, | |
| { | |
| "epoch": 0.05965892333369907, | |
| "grad_norm": 0.4926256239414215, | |
| "learning_rate": 4.9957545296604965e-05, | |
| "loss": 3.2756, | |
| "step": 7320 | |
| }, | |
| { | |
| "epoch": 0.059903427117853576, | |
| "grad_norm": 0.47530651092529297, | |
| "learning_rate": 4.9957171080246245e-05, | |
| "loss": 3.3075, | |
| "step": 7350 | |
| }, | |
| { | |
| "epoch": 0.06014793090200808, | |
| "grad_norm": 0.4838476777076721, | |
| "learning_rate": 4.995679522326803e-05, | |
| "loss": 3.292, | |
| "step": 7380 | |
| }, | |
| { | |
| "epoch": 0.06039243468616259, | |
| "grad_norm": 0.48683232069015503, | |
| "learning_rate": 4.995641772569502e-05, | |
| "loss": 3.269, | |
| "step": 7410 | |
| }, | |
| { | |
| "epoch": 0.060636938470317085, | |
| "grad_norm": 0.4818269908428192, | |
| "learning_rate": 4.995603858755203e-05, | |
| "loss": 3.2798, | |
| "step": 7440 | |
| }, | |
| { | |
| "epoch": 0.06088144225447159, | |
| "grad_norm": 0.46415388584136963, | |
| "learning_rate": 4.9955657808863985e-05, | |
| "loss": 3.2768, | |
| "step": 7470 | |
| }, | |
| { | |
| "epoch": 0.061125946038626096, | |
| "grad_norm": 0.48808780312538147, | |
| "learning_rate": 4.995527538965593e-05, | |
| "loss": 3.2797, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 0.0613704498227806, | |
| "grad_norm": 0.4957239627838135, | |
| "learning_rate": 4.995489132995298e-05, | |
| "loss": 3.2912, | |
| "step": 7530 | |
| }, | |
| { | |
| "epoch": 0.061614953606935106, | |
| "grad_norm": 0.4858773350715637, | |
| "learning_rate": 4.99545056297804e-05, | |
| "loss": 3.269, | |
| "step": 7560 | |
| }, | |
| { | |
| "epoch": 0.06185945739108961, | |
| "grad_norm": 0.46054506301879883, | |
| "learning_rate": 4.995411828916354e-05, | |
| "loss": 3.2663, | |
| "step": 7590 | |
| }, | |
| { | |
| "epoch": 0.06210396117524412, | |
| "grad_norm": 0.4704018831253052, | |
| "learning_rate": 4.9953729308127874e-05, | |
| "loss": 3.263, | |
| "step": 7620 | |
| }, | |
| { | |
| "epoch": 0.062348464959398615, | |
| "grad_norm": 0.491974413394928, | |
| "learning_rate": 4.995333868669895e-05, | |
| "loss": 3.2709, | |
| "step": 7650 | |
| }, | |
| { | |
| "epoch": 0.06259296874355312, | |
| "grad_norm": 0.46538054943084717, | |
| "learning_rate": 4.995294642490246e-05, | |
| "loss": 3.2818, | |
| "step": 7680 | |
| }, | |
| { | |
| "epoch": 0.06283747252770763, | |
| "grad_norm": 0.49248039722442627, | |
| "learning_rate": 4.995255252276418e-05, | |
| "loss": 3.2581, | |
| "step": 7710 | |
| }, | |
| { | |
| "epoch": 0.06308197631186213, | |
| "grad_norm": 0.45257478952407837, | |
| "learning_rate": 4.9952156980310016e-05, | |
| "loss": 3.2691, | |
| "step": 7740 | |
| }, | |
| { | |
| "epoch": 0.06332648009601663, | |
| "grad_norm": 0.479942262172699, | |
| "learning_rate": 4.9951759797565965e-05, | |
| "loss": 3.276, | |
| "step": 7770 | |
| }, | |
| { | |
| "epoch": 0.06357098388017114, | |
| "grad_norm": 0.48799383640289307, | |
| "learning_rate": 4.995136097455815e-05, | |
| "loss": 3.2668, | |
| "step": 7800 | |
| }, | |
| { | |
| "epoch": 0.06381548766432564, | |
| "grad_norm": 0.46520474553108215, | |
| "learning_rate": 4.995096051131276e-05, | |
| "loss": 3.2509, | |
| "step": 7830 | |
| }, | |
| { | |
| "epoch": 0.06405999144848015, | |
| "grad_norm": 0.4901852309703827, | |
| "learning_rate": 4.995055840785614e-05, | |
| "loss": 3.2383, | |
| "step": 7860 | |
| }, | |
| { | |
| "epoch": 0.06430449523263465, | |
| "grad_norm": 0.46401068568229675, | |
| "learning_rate": 4.995015466421473e-05, | |
| "loss": 3.2626, | |
| "step": 7890 | |
| }, | |
| { | |
| "epoch": 0.06454899901678916, | |
| "grad_norm": 0.46814706921577454, | |
| "learning_rate": 4.9949749280415056e-05, | |
| "loss": 3.2488, | |
| "step": 7920 | |
| }, | |
| { | |
| "epoch": 0.06479350280094366, | |
| "grad_norm": 0.48936349153518677, | |
| "learning_rate": 4.9949342256483766e-05, | |
| "loss": 3.2368, | |
| "step": 7950 | |
| }, | |
| { | |
| "epoch": 0.06503800658509817, | |
| "grad_norm": 0.4871247708797455, | |
| "learning_rate": 4.9948933592447636e-05, | |
| "loss": 3.2603, | |
| "step": 7980 | |
| }, | |
| { | |
| "epoch": 0.06528251036925267, | |
| "grad_norm": 0.4689597487449646, | |
| "learning_rate": 4.9948523288333506e-05, | |
| "loss": 3.2437, | |
| "step": 8010 | |
| }, | |
| { | |
| "epoch": 0.06552701415340717, | |
| "grad_norm": 0.45705971121788025, | |
| "learning_rate": 4.994811134416836e-05, | |
| "loss": 3.2415, | |
| "step": 8040 | |
| }, | |
| { | |
| "epoch": 0.06577151793756168, | |
| "grad_norm": 0.46996039152145386, | |
| "learning_rate": 4.994769775997927e-05, | |
| "loss": 3.2545, | |
| "step": 8070 | |
| }, | |
| { | |
| "epoch": 0.06601602172171618, | |
| "grad_norm": 0.4835875928401947, | |
| "learning_rate": 4.994728253579345e-05, | |
| "loss": 3.2587, | |
| "step": 8100 | |
| }, | |
| { | |
| "epoch": 0.0662605255058707, | |
| "grad_norm": 0.4678030014038086, | |
| "learning_rate": 4.9946865671638166e-05, | |
| "loss": 3.2368, | |
| "step": 8130 | |
| }, | |
| { | |
| "epoch": 0.06650502929002519, | |
| "grad_norm": 0.5105451345443726, | |
| "learning_rate": 4.9946447167540835e-05, | |
| "loss": 3.259, | |
| "step": 8160 | |
| }, | |
| { | |
| "epoch": 0.0667495330741797, | |
| "grad_norm": 0.471935898065567, | |
| "learning_rate": 4.994602702352896e-05, | |
| "loss": 3.2631, | |
| "step": 8190 | |
| }, | |
| { | |
| "epoch": 0.0669940368583342, | |
| "grad_norm": 0.48006853461265564, | |
| "learning_rate": 4.994560523963018e-05, | |
| "loss": 3.2313, | |
| "step": 8220 | |
| }, | |
| { | |
| "epoch": 0.0672385406424887, | |
| "grad_norm": 0.46124544739723206, | |
| "learning_rate": 4.9945181815872196e-05, | |
| "loss": 3.215, | |
| "step": 8250 | |
| }, | |
| { | |
| "epoch": 0.06748304442664321, | |
| "grad_norm": 0.481571763753891, | |
| "learning_rate": 4.9944756752282855e-05, | |
| "loss": 3.2357, | |
| "step": 8280 | |
| }, | |
| { | |
| "epoch": 0.06772754821079771, | |
| "grad_norm": 0.47422316670417786, | |
| "learning_rate": 4.994433004889011e-05, | |
| "loss": 3.2279, | |
| "step": 8310 | |
| }, | |
| { | |
| "epoch": 0.06797205199495222, | |
| "grad_norm": 0.48801445960998535, | |
| "learning_rate": 4.994390170572199e-05, | |
| "loss": 3.2369, | |
| "step": 8340 | |
| }, | |
| { | |
| "epoch": 0.06821655577910672, | |
| "grad_norm": 0.46795913577079773, | |
| "learning_rate": 4.994347172280667e-05, | |
| "loss": 3.2187, | |
| "step": 8370 | |
| }, | |
| { | |
| "epoch": 0.06846105956326123, | |
| "grad_norm": 0.4645237326622009, | |
| "learning_rate": 4.994304010017241e-05, | |
| "loss": 3.2183, | |
| "step": 8400 | |
| }, | |
| { | |
| "epoch": 0.06870556334741573, | |
| "grad_norm": 0.45294952392578125, | |
| "learning_rate": 4.994260683784758e-05, | |
| "loss": 3.2449, | |
| "step": 8430 | |
| }, | |
| { | |
| "epoch": 0.06895006713157023, | |
| "grad_norm": 0.47424617409706116, | |
| "learning_rate": 4.9942171935860674e-05, | |
| "loss": 3.2249, | |
| "step": 8460 | |
| }, | |
| { | |
| "epoch": 0.06919457091572474, | |
| "grad_norm": 0.4657289683818817, | |
| "learning_rate": 4.994173539424026e-05, | |
| "loss": 3.2313, | |
| "step": 8490 | |
| }, | |
| { | |
| "epoch": 0.06943907469987924, | |
| "grad_norm": 0.4828115701675415, | |
| "learning_rate": 4.994129721301506e-05, | |
| "loss": 3.236, | |
| "step": 8520 | |
| }, | |
| { | |
| "epoch": 0.06968357848403375, | |
| "grad_norm": 0.4601866602897644, | |
| "learning_rate": 4.994085739221386e-05, | |
| "loss": 3.2459, | |
| "step": 8550 | |
| }, | |
| { | |
| "epoch": 0.06992808226818825, | |
| "grad_norm": 0.45241278409957886, | |
| "learning_rate": 4.994041593186558e-05, | |
| "loss": 3.2077, | |
| "step": 8580 | |
| }, | |
| { | |
| "epoch": 0.07017258605234276, | |
| "grad_norm": 0.47955596446990967, | |
| "learning_rate": 4.993997283199924e-05, | |
| "loss": 3.2378, | |
| "step": 8610 | |
| }, | |
| { | |
| "epoch": 0.07041708983649726, | |
| "grad_norm": 0.4656619727611542, | |
| "learning_rate": 4.993952809264397e-05, | |
| "loss": 3.2277, | |
| "step": 8640 | |
| }, | |
| { | |
| "epoch": 0.07066159362065176, | |
| "grad_norm": 0.45982396602630615, | |
| "learning_rate": 4.9939081713829006e-05, | |
| "loss": 3.2192, | |
| "step": 8670 | |
| }, | |
| { | |
| "epoch": 0.07090609740480627, | |
| "grad_norm": 0.4598824381828308, | |
| "learning_rate": 4.993863369558369e-05, | |
| "loss": 3.2286, | |
| "step": 8700 | |
| }, | |
| { | |
| "epoch": 0.07115060118896077, | |
| "grad_norm": 0.4744945168495178, | |
| "learning_rate": 4.9938184037937466e-05, | |
| "loss": 3.2201, | |
| "step": 8730 | |
| }, | |
| { | |
| "epoch": 0.07139510497311528, | |
| "grad_norm": 0.46979376673698425, | |
| "learning_rate": 4.993773274091991e-05, | |
| "loss": 3.1986, | |
| "step": 8760 | |
| }, | |
| { | |
| "epoch": 0.07163960875726978, | |
| "grad_norm": 0.46936362981796265, | |
| "learning_rate": 4.993727980456067e-05, | |
| "loss": 3.2047, | |
| "step": 8790 | |
| }, | |
| { | |
| "epoch": 0.0718841125414243, | |
| "grad_norm": 0.4529063105583191, | |
| "learning_rate": 4.993682522888954e-05, | |
| "loss": 3.221, | |
| "step": 8820 | |
| }, | |
| { | |
| "epoch": 0.07212861632557879, | |
| "grad_norm": 0.46270012855529785, | |
| "learning_rate": 4.993636901393639e-05, | |
| "loss": 3.2054, | |
| "step": 8850 | |
| }, | |
| { | |
| "epoch": 0.0723731201097333, | |
| "grad_norm": 0.6890011429786682, | |
| "learning_rate": 4.993591115973121e-05, | |
| "loss": 3.2124, | |
| "step": 8880 | |
| }, | |
| { | |
| "epoch": 0.0726176238938878, | |
| "grad_norm": 0.48755085468292236, | |
| "learning_rate": 4.9935451666304105e-05, | |
| "loss": 3.2177, | |
| "step": 8910 | |
| }, | |
| { | |
| "epoch": 0.0728621276780423, | |
| "grad_norm": 0.4768828749656677, | |
| "learning_rate": 4.993499053368528e-05, | |
| "loss": 3.2075, | |
| "step": 8940 | |
| }, | |
| { | |
| "epoch": 0.07310663146219681, | |
| "grad_norm": 0.5368286371231079, | |
| "learning_rate": 4.993452776190504e-05, | |
| "loss": 3.1981, | |
| "step": 8970 | |
| }, | |
| { | |
| "epoch": 0.07335113524635131, | |
| "grad_norm": 0.45793619751930237, | |
| "learning_rate": 4.993406335099382e-05, | |
| "loss": 3.2143, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 0.07359563903050582, | |
| "grad_norm": 0.459844172000885, | |
| "learning_rate": 4.993359730098214e-05, | |
| "loss": 3.2107, | |
| "step": 9030 | |
| }, | |
| { | |
| "epoch": 0.07384014281466032, | |
| "grad_norm": 0.4570547640323639, | |
| "learning_rate": 4.993312961190064e-05, | |
| "loss": 3.1962, | |
| "step": 9060 | |
| }, | |
| { | |
| "epoch": 0.07408464659881484, | |
| "grad_norm": 0.4817084074020386, | |
| "learning_rate": 4.993266028378006e-05, | |
| "loss": 3.1895, | |
| "step": 9090 | |
| }, | |
| { | |
| "epoch": 0.07432915038296933, | |
| "grad_norm": 0.46276503801345825, | |
| "learning_rate": 4.993218931665126e-05, | |
| "loss": 3.1916, | |
| "step": 9120 | |
| }, | |
| { | |
| "epoch": 0.07457365416712383, | |
| "grad_norm": 0.4731460511684418, | |
| "learning_rate": 4.993171671054519e-05, | |
| "loss": 3.2119, | |
| "step": 9150 | |
| }, | |
| { | |
| "epoch": 0.07481815795127834, | |
| "grad_norm": 0.4732898473739624, | |
| "learning_rate": 4.993124246549293e-05, | |
| "loss": 3.1993, | |
| "step": 9180 | |
| }, | |
| { | |
| "epoch": 0.07506266173543284, | |
| "grad_norm": 0.4521377682685852, | |
| "learning_rate": 4.9930766581525645e-05, | |
| "loss": 3.1948, | |
| "step": 9210 | |
| }, | |
| { | |
| "epoch": 0.07530716551958735, | |
| "grad_norm": 0.466330885887146, | |
| "learning_rate": 4.993028905867463e-05, | |
| "loss": 3.1929, | |
| "step": 9240 | |
| }, | |
| { | |
| "epoch": 0.07555166930374185, | |
| "grad_norm": 0.4678347110748291, | |
| "learning_rate": 4.992980989697126e-05, | |
| "loss": 3.1962, | |
| "step": 9270 | |
| }, | |
| { | |
| "epoch": 0.07579617308789637, | |
| "grad_norm": 0.4650745391845703, | |
| "learning_rate": 4.992932909644705e-05, | |
| "loss": 3.19, | |
| "step": 9300 | |
| }, | |
| { | |
| "epoch": 0.07604067687205086, | |
| "grad_norm": 0.45810264348983765, | |
| "learning_rate": 4.9928846657133596e-05, | |
| "loss": 3.1937, | |
| "step": 9330 | |
| }, | |
| { | |
| "epoch": 0.07628518065620536, | |
| "grad_norm": 0.4602174162864685, | |
| "learning_rate": 4.992836257906262e-05, | |
| "loss": 3.2078, | |
| "step": 9360 | |
| }, | |
| { | |
| "epoch": 0.07652968444035987, | |
| "grad_norm": 0.44673460721969604, | |
| "learning_rate": 4.9927876862265935e-05, | |
| "loss": 3.1726, | |
| "step": 9390 | |
| }, | |
| { | |
| "epoch": 0.07677418822451437, | |
| "grad_norm": 0.4698408246040344, | |
| "learning_rate": 4.992738950677548e-05, | |
| "loss": 3.1658, | |
| "step": 9420 | |
| }, | |
| { | |
| "epoch": 0.07701869200866888, | |
| "grad_norm": 0.47652050852775574, | |
| "learning_rate": 4.992690051262329e-05, | |
| "loss": 3.1642, | |
| "step": 9450 | |
| }, | |
| { | |
| "epoch": 0.07726319579282338, | |
| "grad_norm": 0.4623103737831116, | |
| "learning_rate": 4.99264098798415e-05, | |
| "loss": 3.1788, | |
| "step": 9480 | |
| }, | |
| { | |
| "epoch": 0.0775076995769779, | |
| "grad_norm": 0.4699987471103668, | |
| "learning_rate": 4.992591760846238e-05, | |
| "loss": 3.1679, | |
| "step": 9510 | |
| }, | |
| { | |
| "epoch": 0.0777522033611324, | |
| "grad_norm": 0.466349720954895, | |
| "learning_rate": 4.9925423698518274e-05, | |
| "loss": 3.1674, | |
| "step": 9540 | |
| }, | |
| { | |
| "epoch": 0.0779967071452869, | |
| "grad_norm": 0.4621599316596985, | |
| "learning_rate": 4.992492815004166e-05, | |
| "loss": 3.193, | |
| "step": 9570 | |
| }, | |
| { | |
| "epoch": 0.0782412109294414, | |
| "grad_norm": 0.4567157030105591, | |
| "learning_rate": 4.992443096306512e-05, | |
| "loss": 3.1681, | |
| "step": 9600 | |
| }, | |
| { | |
| "epoch": 0.0784857147135959, | |
| "grad_norm": 0.45757701992988586, | |
| "learning_rate": 4.992393213762132e-05, | |
| "loss": 3.2002, | |
| "step": 9630 | |
| }, | |
| { | |
| "epoch": 0.07873021849775041, | |
| "grad_norm": 0.45566242933273315, | |
| "learning_rate": 4.992343167374307e-05, | |
| "loss": 3.1651, | |
| "step": 9660 | |
| }, | |
| { | |
| "epoch": 0.07897472228190491, | |
| "grad_norm": 0.4485650360584259, | |
| "learning_rate": 4.992292957146326e-05, | |
| "loss": 3.1613, | |
| "step": 9690 | |
| }, | |
| { | |
| "epoch": 0.07921922606605943, | |
| "grad_norm": 0.4732878804206848, | |
| "learning_rate": 4.992242583081489e-05, | |
| "loss": 3.1602, | |
| "step": 9720 | |
| }, | |
| { | |
| "epoch": 0.07946372985021392, | |
| "grad_norm": 0.4469018280506134, | |
| "learning_rate": 4.992192045183109e-05, | |
| "loss": 3.1487, | |
| "step": 9750 | |
| }, | |
| { | |
| "epoch": 0.07970823363436844, | |
| "grad_norm": 0.45860594511032104, | |
| "learning_rate": 4.9921413434545075e-05, | |
| "loss": 3.1516, | |
| "step": 9780 | |
| }, | |
| { | |
| "epoch": 0.07995273741852293, | |
| "grad_norm": 0.45328783988952637, | |
| "learning_rate": 4.992090477899018e-05, | |
| "loss": 3.1481, | |
| "step": 9810 | |
| }, | |
| { | |
| "epoch": 0.08019724120267743, | |
| "grad_norm": 0.48180997371673584, | |
| "learning_rate": 4.992039448519982e-05, | |
| "loss": 3.1459, | |
| "step": 9840 | |
| }, | |
| { | |
| "epoch": 0.08044174498683195, | |
| "grad_norm": 0.45059671998023987, | |
| "learning_rate": 4.9919882553207566e-05, | |
| "loss": 3.1432, | |
| "step": 9870 | |
| }, | |
| { | |
| "epoch": 0.08068624877098644, | |
| "grad_norm": 0.4592690169811249, | |
| "learning_rate": 4.9919368983047066e-05, | |
| "loss": 3.1532, | |
| "step": 9900 | |
| }, | |
| { | |
| "epoch": 0.08093075255514096, | |
| "grad_norm": 0.4452771842479706, | |
| "learning_rate": 4.9918853774752074e-05, | |
| "loss": 3.171, | |
| "step": 9930 | |
| }, | |
| { | |
| "epoch": 0.08117525633929545, | |
| "grad_norm": 0.45305460691452026, | |
| "learning_rate": 4.991833692835646e-05, | |
| "loss": 3.1422, | |
| "step": 9960 | |
| }, | |
| { | |
| "epoch": 0.08141976012344997, | |
| "grad_norm": 0.47526153922080994, | |
| "learning_rate": 4.9917818443894203e-05, | |
| "loss": 3.1565, | |
| "step": 9990 | |
| }, | |
| { | |
| "epoch": 0.08166426390760446, | |
| "grad_norm": 0.4707624614238739, | |
| "learning_rate": 4.991729832139939e-05, | |
| "loss": 3.1481, | |
| "step": 10020 | |
| }, | |
| { | |
| "epoch": 0.08190876769175896, | |
| "grad_norm": 0.44442108273506165, | |
| "learning_rate": 4.991677656090621e-05, | |
| "loss": 3.1455, | |
| "step": 10050 | |
| }, | |
| { | |
| "epoch": 0.08215327147591348, | |
| "grad_norm": 0.4681081473827362, | |
| "learning_rate": 4.991625316244896e-05, | |
| "loss": 3.1703, | |
| "step": 10080 | |
| }, | |
| { | |
| "epoch": 0.08239777526006797, | |
| "grad_norm": 0.4762340486049652, | |
| "learning_rate": 4.991572812606205e-05, | |
| "loss": 3.1636, | |
| "step": 10110 | |
| }, | |
| { | |
| "epoch": 0.08264227904422249, | |
| "grad_norm": 0.4579741954803467, | |
| "learning_rate": 4.991520145177998e-05, | |
| "loss": 3.1546, | |
| "step": 10140 | |
| }, | |
| { | |
| "epoch": 0.08288678282837698, | |
| "grad_norm": 0.45810720324516296, | |
| "learning_rate": 4.991467313963739e-05, | |
| "loss": 3.1456, | |
| "step": 10170 | |
| }, | |
| { | |
| "epoch": 0.0831312866125315, | |
| "grad_norm": 0.4368288516998291, | |
| "learning_rate": 4.991414318966901e-05, | |
| "loss": 3.1393, | |
| "step": 10200 | |
| }, | |
| { | |
| "epoch": 0.083375790396686, | |
| "grad_norm": 0.4572373330593109, | |
| "learning_rate": 4.991361160190966e-05, | |
| "loss": 3.1258, | |
| "step": 10230 | |
| }, | |
| { | |
| "epoch": 0.08362029418084051, | |
| "grad_norm": 0.488158255815506, | |
| "learning_rate": 4.9913078376394304e-05, | |
| "loss": 3.1563, | |
| "step": 10260 | |
| }, | |
| { | |
| "epoch": 0.083864797964995, | |
| "grad_norm": 0.4564719498157501, | |
| "learning_rate": 4.991254351315799e-05, | |
| "loss": 3.1344, | |
| "step": 10290 | |
| }, | |
| { | |
| "epoch": 0.0841093017491495, | |
| "grad_norm": 0.4469270408153534, | |
| "learning_rate": 4.991200701223587e-05, | |
| "loss": 3.1493, | |
| "step": 10320 | |
| }, | |
| { | |
| "epoch": 0.08435380553330402, | |
| "grad_norm": 0.46140623092651367, | |
| "learning_rate": 4.991146887366323e-05, | |
| "loss": 3.1517, | |
| "step": 10350 | |
| }, | |
| { | |
| "epoch": 0.08459830931745851, | |
| "grad_norm": 0.4541148841381073, | |
| "learning_rate": 4.991092909747542e-05, | |
| "loss": 3.1442, | |
| "step": 10380 | |
| }, | |
| { | |
| "epoch": 0.08484281310161303, | |
| "grad_norm": 0.4556724727153778, | |
| "learning_rate": 4.9910387683707946e-05, | |
| "loss": 3.1429, | |
| "step": 10410 | |
| }, | |
| { | |
| "epoch": 0.08508731688576752, | |
| "grad_norm": 0.4350408613681793, | |
| "learning_rate": 4.9909844632396386e-05, | |
| "loss": 3.1356, | |
| "step": 10440 | |
| }, | |
| { | |
| "epoch": 0.08533182066992204, | |
| "grad_norm": 0.45419394969940186, | |
| "learning_rate": 4.9909299943576445e-05, | |
| "loss": 3.1321, | |
| "step": 10470 | |
| }, | |
| { | |
| "epoch": 0.08557632445407654, | |
| "grad_norm": 0.4699922204017639, | |
| "learning_rate": 4.990875361728393e-05, | |
| "loss": 3.1406, | |
| "step": 10500 | |
| }, | |
| { | |
| "epoch": 0.08582082823823103, | |
| "grad_norm": 0.45109736919403076, | |
| "learning_rate": 4.990820565355475e-05, | |
| "loss": 3.1369, | |
| "step": 10530 | |
| }, | |
| { | |
| "epoch": 0.08606533202238555, | |
| "grad_norm": 0.45368725061416626, | |
| "learning_rate": 4.990765605242493e-05, | |
| "loss": 3.1299, | |
| "step": 10560 | |
| }, | |
| { | |
| "epoch": 0.08630983580654004, | |
| "grad_norm": 0.4491060674190521, | |
| "learning_rate": 4.990710481393061e-05, | |
| "loss": 3.1139, | |
| "step": 10590 | |
| }, | |
| { | |
| "epoch": 0.08655433959069456, | |
| "grad_norm": 0.47071024775505066, | |
| "learning_rate": 4.9906551938108003e-05, | |
| "loss": 3.159, | |
| "step": 10620 | |
| }, | |
| { | |
| "epoch": 0.08679884337484906, | |
| "grad_norm": 0.4578341841697693, | |
| "learning_rate": 4.990599742499347e-05, | |
| "loss": 3.1449, | |
| "step": 10650 | |
| }, | |
| { | |
| "epoch": 0.08704334715900357, | |
| "grad_norm": 0.4422541856765747, | |
| "learning_rate": 4.990544127462346e-05, | |
| "loss": 3.125, | |
| "step": 10680 | |
| }, | |
| { | |
| "epoch": 0.08728785094315807, | |
| "grad_norm": 0.44820329546928406, | |
| "learning_rate": 4.9904883487034537e-05, | |
| "loss": 3.1328, | |
| "step": 10710 | |
| }, | |
| { | |
| "epoch": 0.08753235472731256, | |
| "grad_norm": 0.4683224856853485, | |
| "learning_rate": 4.990432406226336e-05, | |
| "loss": 3.1296, | |
| "step": 10740 | |
| }, | |
| { | |
| "epoch": 0.08777685851146708, | |
| "grad_norm": 0.4463740885257721, | |
| "learning_rate": 4.990376300034671e-05, | |
| "loss": 3.1177, | |
| "step": 10770 | |
| }, | |
| { | |
| "epoch": 0.08802136229562157, | |
| "grad_norm": 0.4537316560745239, | |
| "learning_rate": 4.990320030132147e-05, | |
| "loss": 3.1269, | |
| "step": 10800 | |
| }, | |
| { | |
| "epoch": 0.08826586607977609, | |
| "grad_norm": 0.472412109375, | |
| "learning_rate": 4.990263596522462e-05, | |
| "loss": 3.1378, | |
| "step": 10830 | |
| }, | |
| { | |
| "epoch": 0.08851036986393059, | |
| "grad_norm": 0.4640551805496216, | |
| "learning_rate": 4.9902069992093275e-05, | |
| "loss": 3.1381, | |
| "step": 10860 | |
| }, | |
| { | |
| "epoch": 0.0887548736480851, | |
| "grad_norm": 0.4387233853340149, | |
| "learning_rate": 4.990150238196463e-05, | |
| "loss": 3.1299, | |
| "step": 10890 | |
| }, | |
| { | |
| "epoch": 0.0889993774322396, | |
| "grad_norm": 0.4460201561450958, | |
| "learning_rate": 4.9900933134876e-05, | |
| "loss": 3.127, | |
| "step": 10920 | |
| }, | |
| { | |
| "epoch": 0.0892438812163941, | |
| "grad_norm": 0.4335034191608429, | |
| "learning_rate": 4.990036225086481e-05, | |
| "loss": 3.1032, | |
| "step": 10950 | |
| }, | |
| { | |
| "epoch": 0.0894883850005486, | |
| "grad_norm": 0.44630929827690125, | |
| "learning_rate": 4.9899789729968585e-05, | |
| "loss": 3.119, | |
| "step": 10980 | |
| }, | |
| { | |
| "epoch": 0.0897328887847031, | |
| "grad_norm": 0.4360102117061615, | |
| "learning_rate": 4.989921557222496e-05, | |
| "loss": 3.1123, | |
| "step": 11010 | |
| }, | |
| { | |
| "epoch": 0.08997739256885762, | |
| "grad_norm": 0.45332589745521545, | |
| "learning_rate": 4.989863977767167e-05, | |
| "loss": 3.123, | |
| "step": 11040 | |
| }, | |
| { | |
| "epoch": 0.09022189635301212, | |
| "grad_norm": 0.4539777338504791, | |
| "learning_rate": 4.989806234634659e-05, | |
| "loss": 3.1144, | |
| "step": 11070 | |
| }, | |
| { | |
| "epoch": 0.09046640013716663, | |
| "grad_norm": 0.5205842852592468, | |
| "learning_rate": 4.989748327828765e-05, | |
| "loss": 3.1199, | |
| "step": 11100 | |
| }, | |
| { | |
| "epoch": 0.09071090392132113, | |
| "grad_norm": 0.44270429015159607, | |
| "learning_rate": 4.989690257353294e-05, | |
| "loss": 3.0783, | |
| "step": 11130 | |
| }, | |
| { | |
| "epoch": 0.09095540770547564, | |
| "grad_norm": 0.4286257326602936, | |
| "learning_rate": 4.989632023212062e-05, | |
| "loss": 3.1162, | |
| "step": 11160 | |
| }, | |
| { | |
| "epoch": 0.09119991148963014, | |
| "grad_norm": 0.49412211775779724, | |
| "learning_rate": 4.9895736254088975e-05, | |
| "loss": 3.1142, | |
| "step": 11190 | |
| }, | |
| { | |
| "epoch": 0.09144441527378463, | |
| "grad_norm": 0.4755348563194275, | |
| "learning_rate": 4.989515063947641e-05, | |
| "loss": 3.1173, | |
| "step": 11220 | |
| }, | |
| { | |
| "epoch": 0.09168891905793915, | |
| "grad_norm": 0.48216554522514343, | |
| "learning_rate": 4.9894563388321395e-05, | |
| "loss": 3.1027, | |
| "step": 11250 | |
| }, | |
| { | |
| "epoch": 0.09193342284209365, | |
| "grad_norm": 0.4571470320224762, | |
| "learning_rate": 4.989397450066254e-05, | |
| "loss": 3.1191, | |
| "step": 11280 | |
| }, | |
| { | |
| "epoch": 0.09217792662624816, | |
| "grad_norm": 0.4681689441204071, | |
| "learning_rate": 4.989338397653858e-05, | |
| "loss": 3.097, | |
| "step": 11310 | |
| }, | |
| { | |
| "epoch": 0.09242243041040266, | |
| "grad_norm": 0.4475450813770294, | |
| "learning_rate": 4.98927918159883e-05, | |
| "loss": 3.118, | |
| "step": 11340 | |
| }, | |
| { | |
| "epoch": 0.09266693419455717, | |
| "grad_norm": 0.4555697739124298, | |
| "learning_rate": 4.989219801905066e-05, | |
| "loss": 3.1078, | |
| "step": 11370 | |
| }, | |
| { | |
| "epoch": 0.09291143797871167, | |
| "grad_norm": 0.45538437366485596, | |
| "learning_rate": 4.989160258576469e-05, | |
| "loss": 3.1106, | |
| "step": 11400 | |
| }, | |
| { | |
| "epoch": 0.09315594176286617, | |
| "grad_norm": 0.48153620958328247, | |
| "learning_rate": 4.98910055161695e-05, | |
| "loss": 3.0844, | |
| "step": 11430 | |
| }, | |
| { | |
| "epoch": 0.09340044554702068, | |
| "grad_norm": 0.42441487312316895, | |
| "learning_rate": 4.989040681030437e-05, | |
| "loss": 3.1159, | |
| "step": 11460 | |
| }, | |
| { | |
| "epoch": 0.09364494933117518, | |
| "grad_norm": 0.4604051411151886, | |
| "learning_rate": 4.988980646820865e-05, | |
| "loss": 3.1035, | |
| "step": 11490 | |
| }, | |
| { | |
| "epoch": 0.09388945311532969, | |
| "grad_norm": 0.43700629472732544, | |
| "learning_rate": 4.9889204489921804e-05, | |
| "loss": 3.0811, | |
| "step": 11520 | |
| }, | |
| { | |
| "epoch": 0.09413395689948419, | |
| "grad_norm": 0.4468238949775696, | |
| "learning_rate": 4.9888600875483404e-05, | |
| "loss": 3.1104, | |
| "step": 11550 | |
| }, | |
| { | |
| "epoch": 0.0943784606836387, | |
| "grad_norm": 0.44943588972091675, | |
| "learning_rate": 4.9887995624933137e-05, | |
| "loss": 3.0917, | |
| "step": 11580 | |
| }, | |
| { | |
| "epoch": 0.0946229644677932, | |
| "grad_norm": 0.4467644691467285, | |
| "learning_rate": 4.988738873831078e-05, | |
| "loss": 3.087, | |
| "step": 11610 | |
| }, | |
| { | |
| "epoch": 0.0948674682519477, | |
| "grad_norm": 0.44013822078704834, | |
| "learning_rate": 4.988678021565623e-05, | |
| "loss": 3.0868, | |
| "step": 11640 | |
| }, | |
| { | |
| "epoch": 0.09511197203610221, | |
| "grad_norm": 0.4420963525772095, | |
| "learning_rate": 4.988617005700949e-05, | |
| "loss": 3.102, | |
| "step": 11670 | |
| }, | |
| { | |
| "epoch": 0.0953564758202567, | |
| "grad_norm": 0.45339009165763855, | |
| "learning_rate": 4.988555826241068e-05, | |
| "loss": 3.1001, | |
| "step": 11700 | |
| }, | |
| { | |
| "epoch": 0.09560097960441122, | |
| "grad_norm": 0.4557483196258545, | |
| "learning_rate": 4.988494483190001e-05, | |
| "loss": 3.0894, | |
| "step": 11730 | |
| }, | |
| { | |
| "epoch": 0.09584548338856572, | |
| "grad_norm": 0.43180930614471436, | |
| "learning_rate": 4.98843297655178e-05, | |
| "loss": 3.1116, | |
| "step": 11760 | |
| }, | |
| { | |
| "epoch": 0.09608998717272023, | |
| "grad_norm": 0.43689119815826416, | |
| "learning_rate": 4.988371306330449e-05, | |
| "loss": 3.0829, | |
| "step": 11790 | |
| } | |
| ], | |
| "logging_steps": 30, | |
| "max_steps": 368091, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 100, | |
| "total_flos": 1.1287586370748416e+19, | |
| "train_batch_size": 2, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |