| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 100000000, | |
| "global_step": 100000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.01, | |
| "grad_norm": 1.43193519115448, | |
| "learning_rate": 9.9003e-06, | |
| "loss": 1.0381, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "grad_norm": 1.9174104928970337, | |
| "learning_rate": 9.8003e-06, | |
| "loss": 1.0262, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "grad_norm": 1.5657432079315186, | |
| "learning_rate": 9.700300000000001e-06, | |
| "loss": 1.024, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "grad_norm": 1.5156095027923584, | |
| "learning_rate": 9.600300000000002e-06, | |
| "loss": 1.0186, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 1.6368006467819214, | |
| "learning_rate": 9.500300000000001e-06, | |
| "loss": 1.0183, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "grad_norm": 1.562856674194336, | |
| "learning_rate": 9.4003e-06, | |
| "loss": 1.0194, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "grad_norm": 1.552268147468567, | |
| "learning_rate": 9.300300000000001e-06, | |
| "loss": 1.0222, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "grad_norm": 1.5083566904067993, | |
| "learning_rate": 9.2003e-06, | |
| "loss": 1.0152, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "grad_norm": 1.4872832298278809, | |
| "learning_rate": 9.100300000000002e-06, | |
| "loss": 1.0183, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 1.5950316190719604, | |
| "learning_rate": 9.000300000000001e-06, | |
| "loss": 1.0233, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "grad_norm": 1.5159080028533936, | |
| "learning_rate": 8.900400000000002e-06, | |
| "loss": 1.0248, | |
| "step": 11000 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "grad_norm": 1.7614279985427856, | |
| "learning_rate": 8.800400000000001e-06, | |
| "loss": 1.0197, | |
| "step": 12000 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "grad_norm": 1.6475117206573486, | |
| "learning_rate": 8.7005e-06, | |
| "loss": 1.0232, | |
| "step": 13000 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "grad_norm": 1.6129435300827026, | |
| "learning_rate": 8.600600000000002e-06, | |
| "loss": 1.027, | |
| "step": 14000 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 1.5888806581497192, | |
| "learning_rate": 8.500600000000001e-06, | |
| "loss": 1.0169, | |
| "step": 15000 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "grad_norm": 1.8314592838287354, | |
| "learning_rate": 8.4007e-06, | |
| "loss": 1.0274, | |
| "step": 16000 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "grad_norm": 1.8506577014923096, | |
| "learning_rate": 8.3007e-06, | |
| "loss": 1.0241, | |
| "step": 17000 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "grad_norm": 1.5969544649124146, | |
| "learning_rate": 8.200800000000001e-06, | |
| "loss": 1.0268, | |
| "step": 18000 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "grad_norm": 1.716140627861023, | |
| "learning_rate": 8.1008e-06, | |
| "loss": 1.028, | |
| "step": 19000 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 1.5300655364990234, | |
| "learning_rate": 8.000800000000001e-06, | |
| "loss": 1.0289, | |
| "step": 20000 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "grad_norm": 2.258260488510132, | |
| "learning_rate": 7.900900000000001e-06, | |
| "loss": 1.0238, | |
| "step": 21000 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "grad_norm": 1.6213842630386353, | |
| "learning_rate": 7.801e-06, | |
| "loss": 1.0288, | |
| "step": 22000 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "grad_norm": 1.9307907819747925, | |
| "learning_rate": 7.701000000000001e-06, | |
| "loss": 1.0301, | |
| "step": 23000 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "grad_norm": 1.6679686307907104, | |
| "learning_rate": 7.601000000000001e-06, | |
| "loss": 1.0373, | |
| "step": 24000 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 1.8511743545532227, | |
| "learning_rate": 7.501000000000001e-06, | |
| "loss": 1.0294, | |
| "step": 25000 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "grad_norm": 2.0238113403320312, | |
| "learning_rate": 7.401100000000001e-06, | |
| "loss": 1.0318, | |
| "step": 26000 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "grad_norm": 1.9416135549545288, | |
| "learning_rate": 7.301100000000001e-06, | |
| "loss": 1.0285, | |
| "step": 27000 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "grad_norm": 1.8442825078964233, | |
| "learning_rate": 7.201100000000001e-06, | |
| "loss": 1.0356, | |
| "step": 28000 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "grad_norm": 1.7065600156784058, | |
| "learning_rate": 7.1011000000000005e-06, | |
| "loss": 1.0261, | |
| "step": 29000 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 1.6564326286315918, | |
| "learning_rate": 7.0012e-06, | |
| "loss": 1.029, | |
| "step": 30000 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "grad_norm": 1.886237382888794, | |
| "learning_rate": 6.901300000000001e-06, | |
| "loss": 1.0321, | |
| "step": 31000 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "grad_norm": 1.6222507953643799, | |
| "learning_rate": 6.8013000000000004e-06, | |
| "loss": 1.0359, | |
| "step": 32000 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "grad_norm": 1.8137024641036987, | |
| "learning_rate": 6.7013000000000005e-06, | |
| "loss": 1.0423, | |
| "step": 33000 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 1.7678865194320679, | |
| "learning_rate": 6.601300000000001e-06, | |
| "loss": 1.0342, | |
| "step": 34000 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "grad_norm": 1.7596269845962524, | |
| "learning_rate": 6.501400000000001e-06, | |
| "loss": 1.0359, | |
| "step": 35000 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "grad_norm": 1.9185104370117188, | |
| "learning_rate": 6.401400000000001e-06, | |
| "loss": 1.0355, | |
| "step": 36000 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "grad_norm": 1.8480957746505737, | |
| "learning_rate": 6.301400000000001e-06, | |
| "loss": 1.0405, | |
| "step": 37000 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "grad_norm": 2.2403833866119385, | |
| "learning_rate": 6.2014000000000005e-06, | |
| "loss": 1.0366, | |
| "step": 38000 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 1.678298830986023, | |
| "learning_rate": 6.1015e-06, | |
| "loss": 1.0396, | |
| "step": 39000 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "grad_norm": 1.8097726106643677, | |
| "learning_rate": 6.0015e-06, | |
| "loss": 1.032, | |
| "step": 40000 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "grad_norm": 1.9577500820159912, | |
| "learning_rate": 5.9016e-06, | |
| "loss": 1.034, | |
| "step": 41000 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "grad_norm": 2.1903908252716064, | |
| "learning_rate": 5.8016000000000005e-06, | |
| "loss": 1.0471, | |
| "step": 42000 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "grad_norm": 1.9866857528686523, | |
| "learning_rate": 5.7017e-06, | |
| "loss": 1.0392, | |
| "step": 43000 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 2.159879207611084, | |
| "learning_rate": 5.6017e-06, | |
| "loss": 1.0386, | |
| "step": 44000 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "grad_norm": 2.0432796478271484, | |
| "learning_rate": 5.5018e-06, | |
| "loss": 1.0388, | |
| "step": 45000 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "grad_norm": 2.1784613132476807, | |
| "learning_rate": 5.4019e-06, | |
| "loss": 1.0482, | |
| "step": 46000 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "grad_norm": 2.118239402770996, | |
| "learning_rate": 5.301900000000001e-06, | |
| "loss": 1.0478, | |
| "step": 47000 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "grad_norm": 2.4094135761260986, | |
| "learning_rate": 5.201900000000001e-06, | |
| "loss": 1.0492, | |
| "step": 48000 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 2.2859714031219482, | |
| "learning_rate": 5.101900000000001e-06, | |
| "loss": 1.0456, | |
| "step": 49000 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 2.0180344581604004, | |
| "learning_rate": 5.0019e-06, | |
| "loss": 1.0467, | |
| "step": 50000 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "grad_norm": 1.944109559059143, | |
| "learning_rate": 4.9019000000000005e-06, | |
| "loss": 1.0449, | |
| "step": 51000 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "grad_norm": 2.5860376358032227, | |
| "learning_rate": 4.802100000000001e-06, | |
| "loss": 1.0429, | |
| "step": 52000 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "grad_norm": 2.394335985183716, | |
| "learning_rate": 4.7021e-06, | |
| "loss": 1.0438, | |
| "step": 53000 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 1.9749242067337036, | |
| "learning_rate": 4.6021e-06, | |
| "loss": 1.0297, | |
| "step": 54000 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "grad_norm": 2.190340518951416, | |
| "learning_rate": 4.5021000000000005e-06, | |
| "loss": 1.0371, | |
| "step": 55000 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "grad_norm": 2.4714150428771973, | |
| "learning_rate": 4.4022e-06, | |
| "loss": 1.0383, | |
| "step": 56000 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "grad_norm": 2.1997413635253906, | |
| "learning_rate": 4.3023e-06, | |
| "loss": 1.0388, | |
| "step": 57000 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "grad_norm": 2.3572850227355957, | |
| "learning_rate": 4.2023e-06, | |
| "loss": 1.0385, | |
| "step": 58000 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 1.820908546447754, | |
| "learning_rate": 4.1023000000000005e-06, | |
| "loss": 1.0463, | |
| "step": 59000 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "grad_norm": 2.5761115550994873, | |
| "learning_rate": 4.0023e-06, | |
| "loss": 1.0319, | |
| "step": 60000 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "grad_norm": 2.0554027557373047, | |
| "learning_rate": 3.902300000000001e-06, | |
| "loss": 1.0402, | |
| "step": 61000 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "grad_norm": 2.0767500400543213, | |
| "learning_rate": 3.8023000000000004e-06, | |
| "loss": 1.045, | |
| "step": 62000 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "grad_norm": 1.9431368112564087, | |
| "learning_rate": 3.7024000000000003e-06, | |
| "loss": 1.0367, | |
| "step": 63000 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 1.6991102695465088, | |
| "learning_rate": 3.6025000000000002e-06, | |
| "loss": 1.031, | |
| "step": 64000 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "grad_norm": 2.0003433227539062, | |
| "learning_rate": 3.5025000000000003e-06, | |
| "loss": 1.0336, | |
| "step": 65000 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "grad_norm": 2.5492982864379883, | |
| "learning_rate": 3.4025000000000005e-06, | |
| "loss": 1.0363, | |
| "step": 66000 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "grad_norm": 2.1553924083709717, | |
| "learning_rate": 3.3025e-06, | |
| "loss": 1.0301, | |
| "step": 67000 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "grad_norm": 1.9345446825027466, | |
| "learning_rate": 3.2025000000000003e-06, | |
| "loss": 1.0322, | |
| "step": 68000 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 2.102257251739502, | |
| "learning_rate": 3.1026e-06, | |
| "loss": 1.0257, | |
| "step": 69000 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "grad_norm": 1.8478143215179443, | |
| "learning_rate": 3.0026000000000007e-06, | |
| "loss": 1.0308, | |
| "step": 70000 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "grad_norm": 1.9828852415084839, | |
| "learning_rate": 2.9026000000000004e-06, | |
| "loss": 1.0309, | |
| "step": 71000 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "grad_norm": 2.127558708190918, | |
| "learning_rate": 2.8027000000000003e-06, | |
| "loss": 1.0334, | |
| "step": 72000 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "grad_norm": 1.8984837532043457, | |
| "learning_rate": 2.7028000000000006e-06, | |
| "loss": 1.0338, | |
| "step": 73000 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 2.105077028274536, | |
| "learning_rate": 2.6028000000000003e-06, | |
| "loss": 1.0217, | |
| "step": 74000 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 2.1777162551879883, | |
| "learning_rate": 2.5028000000000004e-06, | |
| "loss": 1.0331, | |
| "step": 75000 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "grad_norm": 2.2614808082580566, | |
| "learning_rate": 2.4028e-06, | |
| "loss": 1.0288, | |
| "step": 76000 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "grad_norm": 2.123326063156128, | |
| "learning_rate": 2.3029e-06, | |
| "loss": 1.0311, | |
| "step": 77000 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "grad_norm": 2.9798057079315186, | |
| "learning_rate": 2.2029e-06, | |
| "loss": 1.0438, | |
| "step": 78000 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 2.481782913208008, | |
| "learning_rate": 2.1029000000000002e-06, | |
| "loss": 1.0308, | |
| "step": 79000 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "grad_norm": 2.476494073867798, | |
| "learning_rate": 2.003e-06, | |
| "loss": 1.0315, | |
| "step": 80000 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "grad_norm": 2.2931506633758545, | |
| "learning_rate": 1.9031000000000003e-06, | |
| "loss": 1.0232, | |
| "step": 81000 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "grad_norm": 1.8126013278961182, | |
| "learning_rate": 1.8031000000000002e-06, | |
| "loss": 1.028, | |
| "step": 82000 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "grad_norm": 2.0132505893707275, | |
| "learning_rate": 1.7031e-06, | |
| "loss": 1.0293, | |
| "step": 83000 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 2.030576467514038, | |
| "learning_rate": 1.6031000000000002e-06, | |
| "loss": 1.0266, | |
| "step": 84000 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "grad_norm": 2.3928701877593994, | |
| "learning_rate": 1.5031e-06, | |
| "loss": 1.0303, | |
| "step": 85000 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "grad_norm": 2.091346502304077, | |
| "learning_rate": 1.4033000000000002e-06, | |
| "loss": 1.0303, | |
| "step": 86000 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "grad_norm": 1.9686309099197388, | |
| "learning_rate": 1.3033e-06, | |
| "loss": 1.0339, | |
| "step": 87000 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "grad_norm": 1.8884007930755615, | |
| "learning_rate": 1.2034000000000002e-06, | |
| "loss": 1.0289, | |
| "step": 88000 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 2.19309663772583, | |
| "learning_rate": 1.1034e-06, | |
| "loss": 1.0257, | |
| "step": 89000 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "grad_norm": 2.507934808731079, | |
| "learning_rate": 1.0034e-06, | |
| "loss": 1.0228, | |
| "step": 90000 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "grad_norm": 2.336470603942871, | |
| "learning_rate": 9.034000000000001e-07, | |
| "loss": 1.0232, | |
| "step": 91000 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "grad_norm": 1.7181164026260376, | |
| "learning_rate": 8.034e-07, | |
| "loss": 1.0283, | |
| "step": 92000 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 1.9081435203552246, | |
| "learning_rate": 7.034e-07, | |
| "loss": 1.0235, | |
| "step": 93000 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "grad_norm": 1.7745755910873413, | |
| "learning_rate": 6.034e-07, | |
| "loss": 1.0254, | |
| "step": 94000 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "grad_norm": 2.0873942375183105, | |
| "learning_rate": 5.035e-07, | |
| "loss": 1.0323, | |
| "step": 95000 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "grad_norm": 2.1039388179779053, | |
| "learning_rate": 4.035e-07, | |
| "loss": 1.0247, | |
| "step": 96000 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "grad_norm": 1.9102365970611572, | |
| "learning_rate": 3.0360000000000005e-07, | |
| "loss": 1.0313, | |
| "step": 97000 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 2.2324767112731934, | |
| "learning_rate": 2.037e-07, | |
| "loss": 1.0193, | |
| "step": 98000 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "grad_norm": 2.005739450454712, | |
| "learning_rate": 1.0370000000000002e-07, | |
| "loss": 1.0318, | |
| "step": 99000 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 2.0899102687835693, | |
| "learning_rate": 3.8e-09, | |
| "loss": 1.0217, | |
| "step": 100000 | |
| } | |
| ], | |
| "logging_steps": 1000, | |
| "max_steps": 100000, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 9223372036854775807, | |
| "save_steps": 20000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1.0909051256832e+18, | |
| "train_batch_size": 2, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |