| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 493, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 0.0, | |
| "loss": 2.0613, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0, | |
| "learning_rate": 0.0001, | |
| "loss": 2.0156, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 0.0002, | |
| "loss": 1.8851, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 0.0001995983935742972, | |
| "loss": 1.7813, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 0.0001991967871485944, | |
| "loss": 1.5375, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 0.00019879518072289158, | |
| "loss": 1.3064, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.01, | |
| "learning_rate": 0.00019839357429718877, | |
| "loss": 1.143, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 0.00019799196787148596, | |
| "loss": 0.9751, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 0.00019759036144578314, | |
| "loss": 0.9015, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 0.00019718875502008033, | |
| "loss": 0.8413, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 0.00019678714859437752, | |
| "loss": 0.7695, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 0.0001963855421686747, | |
| "loss": 0.7559, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 0.0001959839357429719, | |
| "loss": 0.7035, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 0.00019558232931726906, | |
| "loss": 0.8613, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 0.00019518072289156628, | |
| "loss": 0.5807, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 0.00019477911646586347, | |
| "loss": 0.6502, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 0.00019437751004016066, | |
| "loss": 0.6727, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 0.00019397590361445782, | |
| "loss": 0.6364, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 0.00019357429718875504, | |
| "loss": 0.5241, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 0.00019317269076305223, | |
| "loss": 0.6633, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 0.00019277108433734942, | |
| "loss": 0.4925, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.04, | |
| "learning_rate": 0.00019236947791164658, | |
| "loss": 0.6516, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 0.00019196787148594377, | |
| "loss": 0.6102, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 0.00019156626506024098, | |
| "loss": 0.7041, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 0.00019116465863453817, | |
| "loss": 0.7139, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 0.00019076305220883533, | |
| "loss": 0.6343, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 0.00019036144578313252, | |
| "loss": 0.623, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 0.00018995983935742974, | |
| "loss": 0.4868, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 0.00018955823293172693, | |
| "loss": 0.6889, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 0.0001891566265060241, | |
| "loss": 0.5797, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 0.00018875502008032128, | |
| "loss": 0.732, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 0.0001883534136546185, | |
| "loss": 0.6025, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 0.00018795180722891569, | |
| "loss": 0.6949, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 0.00018755020080321285, | |
| "loss": 0.7054, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 0.00018714859437751004, | |
| "loss": 0.6639, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.07, | |
| "learning_rate": 0.00018674698795180723, | |
| "loss": 0.5592, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 0.00018634538152610444, | |
| "loss": 0.7139, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 0.0001859437751004016, | |
| "loss": 0.6933, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 0.0001855421686746988, | |
| "loss": 0.6588, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 0.00018514056224899598, | |
| "loss": 0.6297, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 0.0001847389558232932, | |
| "loss": 0.7057, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 0.00018433734939759036, | |
| "loss": 0.7226, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 0.00018393574297188755, | |
| "loss": 0.5385, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 0.00018353413654618474, | |
| "loss": 0.5898, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 0.00018313253012048193, | |
| "loss": 0.5986, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.09, | |
| "learning_rate": 0.00018273092369477912, | |
| "loss": 0.5737, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.0001823293172690763, | |
| "loss": 0.6359, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.0001819277108433735, | |
| "loss": 0.6719, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.0001815261044176707, | |
| "loss": 0.5734, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.0001811244979919679, | |
| "loss": 0.677, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 0.00018072289156626507, | |
| "loss": 0.6028, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 0.00018032128514056225, | |
| "loss": 0.5125, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 0.00017991967871485944, | |
| "loss": 0.5452, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 0.00017951807228915663, | |
| "loss": 0.5815, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 0.00017911646586345382, | |
| "loss": 0.6096, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 0.000178714859437751, | |
| "loss": 0.5586, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.0001783132530120482, | |
| "loss": 0.5975, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.0001779116465863454, | |
| "loss": 0.5385, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.00017751004016064258, | |
| "loss": 0.6037, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.00017710843373493977, | |
| "loss": 0.5263, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 0.00017670682730923696, | |
| "loss": 0.5214, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 0.00017630522088353415, | |
| "loss": 0.5572, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 0.00017590361445783134, | |
| "loss": 0.5407, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 0.00017550200803212853, | |
| "loss": 0.642, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 0.00017510040160642571, | |
| "loss": 0.564, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 0.00017510040160642571, | |
| "loss": 0.5841, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 0.0001746987951807229, | |
| "loss": 0.5943, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 0.0001742971887550201, | |
| "loss": 0.6104, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 0.00017389558232931728, | |
| "loss": 0.6396, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 0.00017389558232931728, | |
| "loss": 0.5897, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.14, | |
| "learning_rate": 0.00017349397590361447, | |
| "loss": 0.6791, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 0.00017349397590361447, | |
| "loss": 0.5242, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 0.00017309236947791166, | |
| "loss": 0.5663, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 0.00017269076305220885, | |
| "loss": 0.64, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 0.00017228915662650604, | |
| "loss": 0.6026, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 0.00017188755020080323, | |
| "loss": 0.6369, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 0.00017148594377510042, | |
| "loss": 0.5729, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 0.0001710843373493976, | |
| "loss": 0.6093, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 0.00017068273092369477, | |
| "loss": 0.4828, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 0.00017028112449799199, | |
| "loss": 0.6198, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 0.00016987951807228917, | |
| "loss": 0.5897, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 0.00016947791164658636, | |
| "loss": 0.5199, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 0.00016907630522088353, | |
| "loss": 0.6618, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 0.00016867469879518074, | |
| "loss": 0.6122, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 0.00016827309236947793, | |
| "loss": 0.5936, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.17, | |
| "learning_rate": 0.00016787148594377512, | |
| "loss": 0.5536, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 0.00016746987951807228, | |
| "loss": 0.5842, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 0.00016706827309236947, | |
| "loss": 0.5186, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 0.0001666666666666667, | |
| "loss": 0.6374, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 0.00016626506024096388, | |
| "loss": 0.5971, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 0.00016586345381526104, | |
| "loss": 0.5287, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 0.00016546184738955823, | |
| "loss": 0.5564, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 0.00016506024096385545, | |
| "loss": 0.5261, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 0.00016465863453815263, | |
| "loss": 0.6054, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 0.0001642570281124498, | |
| "loss": 0.5507, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 0.00016385542168674699, | |
| "loss": 0.5036, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 0.00016345381526104417, | |
| "loss": 0.6015, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 0.0001630522088353414, | |
| "loss": 0.4337, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 0.00016265060240963855, | |
| "loss": 0.4683, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 0.00016224899598393574, | |
| "loss": 0.5412, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "learning_rate": 0.00016184738955823293, | |
| "loss": 0.4477, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 0.00016144578313253015, | |
| "loss": 0.604, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 0.0001610441767068273, | |
| "loss": 0.4492, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 0.0001606425702811245, | |
| "loss": 0.5725, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 0.0001602409638554217, | |
| "loss": 0.5427, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 0.00015983935742971888, | |
| "loss": 0.5181, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 0.00015943775100401607, | |
| "loss": 0.5349, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 0.00015903614457831326, | |
| "loss": 0.5541, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 0.00015863453815261045, | |
| "loss": 0.645, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.22, | |
| "learning_rate": 0.00015823293172690763, | |
| "loss": 0.4867, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 0.00015783132530120482, | |
| "loss": 0.5756, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 0.000157429718875502, | |
| "loss": 0.5538, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 0.0001570281124497992, | |
| "loss": 0.5045, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 0.0001566265060240964, | |
| "loss": 0.5994, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 0.00015622489959839358, | |
| "loss": 0.501, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 0.00015582329317269077, | |
| "loss": 0.7085, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 0.00015542168674698796, | |
| "loss": 0.4625, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 0.00015502008032128515, | |
| "loss": 0.5, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 0.00015461847389558234, | |
| "loss": 0.5701, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 0.00015421686746987953, | |
| "loss": 0.6223, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 0.00015381526104417672, | |
| "loss": 0.5365, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 0.0001534136546184739, | |
| "loss": 0.5218, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 0.0001530120481927711, | |
| "loss": 0.4085, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 0.00015261044176706828, | |
| "loss": 0.6416, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 0.00015220883534136547, | |
| "loss": 0.5738, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 0.00015180722891566266, | |
| "loss": 0.5344, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 0.00015140562248995985, | |
| "loss": 0.4046, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 0.00015100401606425701, | |
| "loss": 0.533, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 0.00015060240963855423, | |
| "loss": 0.5562, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 0.00015020080321285142, | |
| "loss": 0.5604, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 0.0001497991967871486, | |
| "loss": 0.4536, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 0.00014939759036144577, | |
| "loss": 0.6567, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 0.000148995983935743, | |
| "loss": 0.5409, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 0.00014859437751004018, | |
| "loss": 0.5631, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.27, | |
| "learning_rate": 0.00014819277108433737, | |
| "loss": 0.6738, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 0.00014779116465863453, | |
| "loss": 0.6269, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 0.00014738955823293172, | |
| "loss": 0.4706, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 0.00014698795180722893, | |
| "loss": 0.5329, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 0.00014658634538152612, | |
| "loss": 0.5667, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 0.00014618473895582328, | |
| "loss": 0.5383, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 0.00014578313253012047, | |
| "loss": 0.5738, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 0.0001453815261044177, | |
| "loss": 0.5858, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 0.00014497991967871488, | |
| "loss": 0.547, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 0.00014457831325301204, | |
| "loss": 0.5007, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 0.00014417670682730923, | |
| "loss": 0.4873, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 0.00014377510040160642, | |
| "loss": 0.5448, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 0.00014337349397590364, | |
| "loss": 0.5411, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 0.0001429718875502008, | |
| "loss": 0.4557, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 0.000142570281124498, | |
| "loss": 0.6834, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "learning_rate": 0.00014216867469879518, | |
| "loss": 0.4711, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 0.0001417670682730924, | |
| "loss": 0.5493, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 0.00014136546184738956, | |
| "loss": 0.6398, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 0.00014096385542168674, | |
| "loss": 0.431, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 0.00014056224899598393, | |
| "loss": 0.5327, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 0.00014016064257028115, | |
| "loss": 0.4872, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 0.00013975903614457834, | |
| "loss": 0.5585, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 0.0001393574297188755, | |
| "loss": 0.5265, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 0.0001389558232931727, | |
| "loss": 0.4452, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 0.00013855421686746988, | |
| "loss": 0.4484, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 0.0001381526104417671, | |
| "loss": 0.5937, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 0.00013775100401606426, | |
| "loss": 0.633, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 0.00013734939759036145, | |
| "loss": 0.5567, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 0.00013694779116465864, | |
| "loss": 0.4977, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 0.00013654618473895585, | |
| "loss": 0.5725, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.33, | |
| "learning_rate": 0.00013614457831325302, | |
| "loss": 0.4755, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 0.0001357429718875502, | |
| "loss": 0.5775, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 0.0001353413654618474, | |
| "loss": 0.5108, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 0.00013493975903614458, | |
| "loss": 0.4864, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 0.00013453815261044177, | |
| "loss": 0.453, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 0.00013413654618473896, | |
| "loss": 0.5195, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 0.00013373493975903615, | |
| "loss": 0.505, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 0.00013333333333333334, | |
| "loss": 0.5824, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 0.00013293172690763053, | |
| "loss": 0.5636, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 0.00013253012048192772, | |
| "loss": 0.5301, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.35, | |
| "learning_rate": 0.0001321285140562249, | |
| "loss": 0.5407, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 0.0001317269076305221, | |
| "loss": 0.5523, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 0.00013132530120481929, | |
| "loss": 0.4935, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 0.00013092369477911648, | |
| "loss": 0.5117, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 0.00013052208835341366, | |
| "loss": 0.5467, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 0.00013012048192771085, | |
| "loss": 0.483, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 0.00012971887550200804, | |
| "loss": 0.5672, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 0.00012931726907630523, | |
| "loss": 0.5243, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 0.00012891566265060242, | |
| "loss": 0.5918, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 0.0001285140562248996, | |
| "loss": 0.5734, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 0.0001281124497991968, | |
| "loss": 0.4241, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 0.00012771084337349396, | |
| "loss": 0.4793, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 0.00012730923694779118, | |
| "loss": 0.5891, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 0.00012690763052208837, | |
| "loss": 0.3679, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.38, | |
| "learning_rate": 0.00012650602409638556, | |
| "loss": 0.5683, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 0.00012610441767068272, | |
| "loss": 0.6756, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 0.00012570281124497994, | |
| "loss": 0.5447, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 0.00012530120481927712, | |
| "loss": 0.5521, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 0.0001248995983935743, | |
| "loss": 0.4863, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 0.00012449799196787148, | |
| "loss": 0.4513, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 0.0001240963855421687, | |
| "loss": 0.4295, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 0.00012369477911646588, | |
| "loss": 0.6476, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 0.00012329317269076307, | |
| "loss": 0.7451, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 0.00012289156626506023, | |
| "loss": 0.4192, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.4, | |
| "learning_rate": 0.00012248995983935742, | |
| "loss": 0.5759, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 0.00012208835341365464, | |
| "loss": 0.4549, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 0.00012168674698795181, | |
| "loss": 0.4356, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 0.000121285140562249, | |
| "loss": 0.4919, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 0.00012088353413654618, | |
| "loss": 0.4903, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 0.0001204819277108434, | |
| "loss": 0.5258, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 0.00012008032128514057, | |
| "loss": 0.4879, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 0.00011967871485943776, | |
| "loss": 0.6125, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 0.00011927710843373494, | |
| "loss": 0.3603, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 0.00011887550200803212, | |
| "loss": 0.5075, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 0.00011847389558232933, | |
| "loss": 0.477, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 0.00011807228915662652, | |
| "loss": 0.5993, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 0.00011767068273092369, | |
| "loss": 0.6308, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 0.00011726907630522088, | |
| "loss": 0.5536, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 0.00011686746987951808, | |
| "loss": 0.543, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 0.00011646586345381527, | |
| "loss": 0.4425, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 0.00011606425702811245, | |
| "loss": 0.4654, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 0.00011566265060240964, | |
| "loss": 0.6456, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 0.00011526104417670683, | |
| "loss": 0.3711, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 0.00011485943775100403, | |
| "loss": 0.6118, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 0.0001144578313253012, | |
| "loss": 0.4701, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 0.0001140562248995984, | |
| "loss": 0.6464, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 0.00011365461847389558, | |
| "loss": 0.5584, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 0.00011325301204819279, | |
| "loss": 0.4335, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 0.00011285140562248996, | |
| "loss": 0.4801, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 0.00011244979919678715, | |
| "loss": 0.3407, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 0.00011204819277108434, | |
| "loss": 0.6034, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 0.00011164658634538152, | |
| "loss": 0.5506, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 0.00011124497991967872, | |
| "loss": 0.481, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 0.00011084337349397591, | |
| "loss": 0.4276, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.46, | |
| "learning_rate": 0.0001104417670682731, | |
| "loss": 0.4313, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 0.00011004016064257027, | |
| "loss": 0.4949, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 0.00010963855421686749, | |
| "loss": 0.6185, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 0.00010923694779116467, | |
| "loss": 0.4672, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 0.00010883534136546186, | |
| "loss": 0.4117, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 0.00010843373493975903, | |
| "loss": 0.4563, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 0.00010803212851405625, | |
| "loss": 0.4599, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 0.00010763052208835342, | |
| "loss": 0.4727, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 0.00010722891566265061, | |
| "loss": 0.4977, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 0.00010682730923694779, | |
| "loss": 0.4998, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.48, | |
| "learning_rate": 0.00010642570281124498, | |
| "loss": 0.5356, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 0.00010602409638554218, | |
| "loss": 0.682, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 0.00010562248995983937, | |
| "loss": 0.4225, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 0.00010522088353413654, | |
| "loss": 0.4163, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 0.00010481927710843373, | |
| "loss": 0.5193, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 0.00010441767068273094, | |
| "loss": 0.4881, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 0.00010401606425702813, | |
| "loss": 0.5973, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 0.0001036144578313253, | |
| "loss": 0.6056, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 0.00010321285140562249, | |
| "loss": 0.4962, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 0.00010281124497991968, | |
| "loss": 0.4292, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 0.00010240963855421688, | |
| "loss": 0.3025, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 0.00010200803212851406, | |
| "loss": 0.5663, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 0.00010160642570281125, | |
| "loss": 0.5619, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 0.00010120481927710844, | |
| "loss": 0.3658, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 0.51, | |
| "learning_rate": 0.00010080321285140564, | |
| "loss": 0.3896, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 0.00010040160642570282, | |
| "loss": 0.5241, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 0.0001, | |
| "loss": 0.5142, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 9.95983935742972e-05, | |
| "loss": 0.4884, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 9.919678714859438e-05, | |
| "loss": 0.5735, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 9.879518072289157e-05, | |
| "loss": 0.583, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 9.839357429718876e-05, | |
| "loss": 0.4489, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 9.799196787148595e-05, | |
| "loss": 0.4264, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 9.759036144578314e-05, | |
| "loss": 0.4278, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 9.718875502008033e-05, | |
| "loss": 0.4702, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 9.678714859437752e-05, | |
| "loss": 0.449, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 9.638554216867471e-05, | |
| "loss": 0.4962, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 9.598393574297188e-05, | |
| "loss": 0.3863, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 9.558232931726909e-05, | |
| "loss": 0.5568, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 9.518072289156626e-05, | |
| "loss": 0.5375, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "learning_rate": 9.477911646586346e-05, | |
| "loss": 0.5499, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 9.437751004016064e-05, | |
| "loss": 0.4131, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 9.397590361445784e-05, | |
| "loss": 0.5534, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 9.357429718875502e-05, | |
| "loss": 0.4585, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 9.317269076305222e-05, | |
| "loss": 0.4265, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 9.27710843373494e-05, | |
| "loss": 0.5282, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 9.23694779116466e-05, | |
| "loss": 0.4547, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 9.196787148594378e-05, | |
| "loss": 0.4545, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 9.156626506024096e-05, | |
| "loss": 0.5389, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 9.116465863453815e-05, | |
| "loss": 0.4894, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 9.076305220883534e-05, | |
| "loss": 0.6222, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 9.036144578313253e-05, | |
| "loss": 0.5307, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 8.995983935742972e-05, | |
| "loss": 0.5401, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 8.955823293172691e-05, | |
| "loss": 0.4126, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 8.91566265060241e-05, | |
| "loss": 0.5427, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 8.875502008032129e-05, | |
| "loss": 0.5385, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 8.835341365461848e-05, | |
| "loss": 0.5756, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 8.795180722891567e-05, | |
| "loss": 0.5019, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 8.755020080321286e-05, | |
| "loss": 0.5949, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 8.714859437751005e-05, | |
| "loss": 0.4836, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 8.674698795180724e-05, | |
| "loss": 0.4241, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 8.634538152610442e-05, | |
| "loss": 0.4537, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 8.594377510040161e-05, | |
| "loss": 0.5246, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 8.55421686746988e-05, | |
| "loss": 0.5373, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 8.514056224899599e-05, | |
| "loss": 0.3863, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "learning_rate": 8.473895582329318e-05, | |
| "loss": 0.5396, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 8.433734939759037e-05, | |
| "loss": 0.4953, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 8.393574297188756e-05, | |
| "loss": 0.5412, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 8.353413654618474e-05, | |
| "loss": 0.441, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 8.313253012048194e-05, | |
| "loss": 0.4736, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 8.273092369477911e-05, | |
| "loss": 0.502, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 8.232931726907632e-05, | |
| "loss": 0.459, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 8.192771084337349e-05, | |
| "loss": 0.6463, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 8.15261044176707e-05, | |
| "loss": 0.476, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 8.112449799196787e-05, | |
| "loss": 0.3673, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 0.61, | |
| "learning_rate": 8.072289156626507e-05, | |
| "loss": 0.4755, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 8.032128514056225e-05, | |
| "loss": 0.5218, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 7.991967871485944e-05, | |
| "loss": 0.4963, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 7.951807228915663e-05, | |
| "loss": 0.388, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 7.911646586345382e-05, | |
| "loss": 0.4418, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 7.8714859437751e-05, | |
| "loss": 0.4558, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 7.83132530120482e-05, | |
| "loss": 0.6408, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 7.791164658634539e-05, | |
| "loss": 0.4292, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 7.751004016064257e-05, | |
| "loss": 0.5375, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 7.710843373493976e-05, | |
| "loss": 0.489, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 7.670682730923695e-05, | |
| "loss": 0.4983, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 7.630522088353414e-05, | |
| "loss": 0.4975, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 7.590361445783133e-05, | |
| "loss": 0.4147, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 7.550200803212851e-05, | |
| "loss": 0.454, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "learning_rate": 7.510040160642571e-05, | |
| "loss": 0.4512, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 7.469879518072289e-05, | |
| "loss": 0.4948, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 7.429718875502009e-05, | |
| "loss": 0.4972, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 7.389558232931726e-05, | |
| "loss": 0.3688, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 7.349397590361447e-05, | |
| "loss": 0.652, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 7.309236947791164e-05, | |
| "loss": 0.4209, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 7.269076305220885e-05, | |
| "loss": 0.482, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 7.228915662650602e-05, | |
| "loss": 0.5904, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 7.188755020080321e-05, | |
| "loss": 0.4019, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 7.14859437751004e-05, | |
| "loss": 0.5322, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 7.108433734939759e-05, | |
| "loss": 0.5597, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 7.068273092369478e-05, | |
| "loss": 0.4894, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 7.028112449799197e-05, | |
| "loss": 0.4797, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 6.987951807228917e-05, | |
| "loss": 0.395, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 6.947791164658635e-05, | |
| "loss": 0.3612, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 0.67, | |
| "learning_rate": 6.907630522088355e-05, | |
| "loss": 0.423, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 6.867469879518072e-05, | |
| "loss": 0.6258, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 6.827309236947793e-05, | |
| "loss": 0.4617, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 6.78714859437751e-05, | |
| "loss": 0.4518, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 6.746987951807229e-05, | |
| "loss": 0.4168, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 6.706827309236948e-05, | |
| "loss": 0.4555, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 6.666666666666667e-05, | |
| "loss": 0.4744, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 6.626506024096386e-05, | |
| "loss": 0.453, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 6.586345381526105e-05, | |
| "loss": 0.4896, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 6.546184738955824e-05, | |
| "loss": 0.4241, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "learning_rate": 6.506024096385543e-05, | |
| "loss": 0.4516, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 6.465863453815262e-05, | |
| "loss": 0.4736, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 6.42570281124498e-05, | |
| "loss": 0.4267, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 6.385542168674698e-05, | |
| "loss": 0.448, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 6.345381526104418e-05, | |
| "loss": 0.4851, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 6.305220883534136e-05, | |
| "loss": 0.5561, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 6.265060240963856e-05, | |
| "loss": 0.4359, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 6.224899598393574e-05, | |
| "loss": 0.4507, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 6.184738955823294e-05, | |
| "loss": 0.4253, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 6.144578313253012e-05, | |
| "loss": 0.4921, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 6.104417670682732e-05, | |
| "loss": 0.469, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 6.06425702811245e-05, | |
| "loss": 0.5165, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 6.02409638554217e-05, | |
| "loss": 0.4878, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 5.983935742971888e-05, | |
| "loss": 0.4495, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 5.943775100401606e-05, | |
| "loss": 0.4517, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 0.72, | |
| "learning_rate": 5.903614457831326e-05, | |
| "loss": 0.4733, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 5.863453815261044e-05, | |
| "loss": 0.4265, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 5.823293172690764e-05, | |
| "loss": 0.5215, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 5.783132530120482e-05, | |
| "loss": 0.4302, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 5.7429718875502015e-05, | |
| "loss": 0.4549, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 5.70281124497992e-05, | |
| "loss": 0.5584, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 5.6626506024096394e-05, | |
| "loss": 0.5008, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 5.6224899598393576e-05, | |
| "loss": 0.5518, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 5.582329317269076e-05, | |
| "loss": 0.4294, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 5.5421686746987955e-05, | |
| "loss": 0.4513, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 5.502008032128514e-05, | |
| "loss": 0.4221, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 5.461847389558233e-05, | |
| "loss": 0.4586, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 5.4216867469879516e-05, | |
| "loss": 0.5993, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 5.381526104417671e-05, | |
| "loss": 0.4284, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 5.3413654618473894e-05, | |
| "loss": 0.3946, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 5.301204819277109e-05, | |
| "loss": 0.526, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 5.261044176706827e-05, | |
| "loss": 0.5814, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 5.220883534136547e-05, | |
| "loss": 0.4208, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 5.180722891566265e-05, | |
| "loss": 0.5794, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 5.140562248995984e-05, | |
| "loss": 0.5465, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 5.100401606425703e-05, | |
| "loss": 0.5736, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 5.060240963855422e-05, | |
| "loss": 0.3972, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 5.020080321285141e-05, | |
| "loss": 0.4275, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 4.97991967871486e-05, | |
| "loss": 0.4918, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 4.9397590361445786e-05, | |
| "loss": 0.3725, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 0.77, | |
| "learning_rate": 4.8995983935742975e-05, | |
| "loss": 0.4689, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 4.8594377510040165e-05, | |
| "loss": 0.4357, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 4.8192771084337354e-05, | |
| "loss": 0.5619, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 4.779116465863454e-05, | |
| "loss": 0.422, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 4.738955823293173e-05, | |
| "loss": 0.517, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 4.698795180722892e-05, | |
| "loss": 0.38, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 4.658634538152611e-05, | |
| "loss": 0.5408, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 4.61847389558233e-05, | |
| "loss": 0.4982, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 4.578313253012048e-05, | |
| "loss": 0.5085, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 4.538152610441767e-05, | |
| "loss": 0.5275, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 4.497991967871486e-05, | |
| "loss": 0.583, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 4.457831325301205e-05, | |
| "loss": 0.4223, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 4.417670682730924e-05, | |
| "loss": 0.3978, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 4.377510040160643e-05, | |
| "loss": 0.46, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.8, | |
| "learning_rate": 4.337349397590362e-05, | |
| "loss": 0.4327, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.297188755020081e-05, | |
| "loss": 0.4635, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.2570281124497996e-05, | |
| "loss": 0.4616, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.2168674698795186e-05, | |
| "loss": 0.493, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.176706827309237e-05, | |
| "loss": 0.4173, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 4.136546184738956e-05, | |
| "loss": 0.3762, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 4.0963855421686746e-05, | |
| "loss": 0.6041, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 4.0562248995983936e-05, | |
| "loss": 0.3642, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 4.0160642570281125e-05, | |
| "loss": 0.4811, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 3.9759036144578314e-05, | |
| "loss": 0.5519, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.82, | |
| "learning_rate": 3.93574297188755e-05, | |
| "loss": 0.4675, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 3.895582329317269e-05, | |
| "loss": 0.3397, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 3.855421686746988e-05, | |
| "loss": 0.4192, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 3.815261044176707e-05, | |
| "loss": 0.507, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 3.7751004016064253e-05, | |
| "loss": 0.4024, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 3.734939759036144e-05, | |
| "loss": 0.4753, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.694779116465863e-05, | |
| "loss": 0.489, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.654618473895582e-05, | |
| "loss": 0.5496, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.614457831325301e-05, | |
| "loss": 0.3261, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.57429718875502e-05, | |
| "loss": 0.3958, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 3.534136546184739e-05, | |
| "loss": 0.5015, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 3.4939759036144585e-05, | |
| "loss": 0.4502, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 3.4538152610441774e-05, | |
| "loss": 0.5057, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 3.413654618473896e-05, | |
| "loss": 0.405, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 3.3734939759036146e-05, | |
| "loss": 0.3629, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.85, | |
| "learning_rate": 3.3333333333333335e-05, | |
| "loss": 0.5095, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 3.2931726907630524e-05, | |
| "loss": 0.4017, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 3.253012048192771e-05, | |
| "loss": 0.5238, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 3.21285140562249e-05, | |
| "loss": 0.5054, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 3.172690763052209e-05, | |
| "loss": 0.5104, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 3.132530120481928e-05, | |
| "loss": 0.5683, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 3.092369477911647e-05, | |
| "loss": 0.4381, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 3.052208835341366e-05, | |
| "loss": 0.3411, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 3.012048192771085e-05, | |
| "loss": 0.4983, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 2.971887550200803e-05, | |
| "loss": 0.5935, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 2.931726907630522e-05, | |
| "loss": 0.4499, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 2.891566265060241e-05, | |
| "loss": 0.4653, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 2.85140562248996e-05, | |
| "loss": 0.4165, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 2.8112449799196788e-05, | |
| "loss": 0.4763, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 2.7710843373493977e-05, | |
| "loss": 0.548, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 2.7309236947791167e-05, | |
| "loss": 0.5539, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 2.6907630522088356e-05, | |
| "loss": 0.49, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 2.6506024096385545e-05, | |
| "loss": 0.5338, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 2.6104417670682734e-05, | |
| "loss": 0.3611, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 2.570281124497992e-05, | |
| "loss": 0.4707, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 2.530120481927711e-05, | |
| "loss": 0.5268, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 2.48995983935743e-05, | |
| "loss": 0.5854, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 2.4497991967871488e-05, | |
| "loss": 0.5126, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 2.4096385542168677e-05, | |
| "loss": 0.4192, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 2.3694779116465866e-05, | |
| "loss": 0.3792, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.9, | |
| "learning_rate": 2.3293172690763055e-05, | |
| "loss": 0.3348, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 2.289156626506024e-05, | |
| "loss": 0.5458, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 2.248995983935743e-05, | |
| "loss": 0.5158, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 2.208835341365462e-05, | |
| "loss": 0.4395, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 2.168674698795181e-05, | |
| "loss": 0.4672, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 2.1285140562248998e-05, | |
| "loss": 0.4106, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 2.0883534136546184e-05, | |
| "loss": 0.4594, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 2.0481927710843373e-05, | |
| "loss": 0.4819, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 2.0080321285140562e-05, | |
| "loss": 0.4699, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 1.967871485943775e-05, | |
| "loss": 0.5291, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 1.927710843373494e-05, | |
| "loss": 0.3946, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 1.8875502008032127e-05, | |
| "loss": 0.4366, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 1.8473895582329316e-05, | |
| "loss": 0.405, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 1.8072289156626505e-05, | |
| "loss": 0.4309, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 1.7670682730923694e-05, | |
| "loss": 0.4395, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.7269076305220887e-05, | |
| "loss": 0.3887, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.6867469879518073e-05, | |
| "loss": 0.4208, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.6465863453815262e-05, | |
| "loss": 0.4555, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.606425702811245e-05, | |
| "loss": 0.4111, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.566265060240964e-05, | |
| "loss": 0.527, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.526104417670683e-05, | |
| "loss": 0.5546, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.4859437751004016e-05, | |
| "loss": 0.4826, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.4457831325301205e-05, | |
| "loss": 0.4615, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.4056224899598394e-05, | |
| "loss": 0.3488, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 0.95, | |
| "learning_rate": 1.3654618473895583e-05, | |
| "loss": 0.4609, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.3253012048192772e-05, | |
| "loss": 0.5662, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.285140562248996e-05, | |
| "loss": 0.4465, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.244979919678715e-05, | |
| "loss": 0.4034, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.2048192771084338e-05, | |
| "loss": 0.5641, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.1646586345381528e-05, | |
| "loss": 0.3677, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.1244979919678715e-05, | |
| "loss": 0.4641, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.0843373493975904e-05, | |
| "loss": 0.418, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.0441767068273092e-05, | |
| "loss": 0.6101, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.0040160642570281e-05, | |
| "loss": 0.3983, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 9.63855421686747e-06, | |
| "loss": 0.483, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 9.236947791164658e-06, | |
| "loss": 0.5455, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 8.835341365461847e-06, | |
| "loss": 0.4421, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 8.433734939759036e-06, | |
| "loss": 0.4869, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 8.032128514056226e-06, | |
| "loss": 0.4239, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "learning_rate": 7.630522088353415e-06, | |
| "loss": 0.368, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 7.228915662650602e-06, | |
| "loss": 0.4092, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 6.827309236947792e-06, | |
| "loss": 0.4514, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 6.42570281124498e-06, | |
| "loss": 0.4038, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 6.024096385542169e-06, | |
| "loss": 0.4729, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 5.622489959839358e-06, | |
| "loss": 0.4279, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 5.220883534136546e-06, | |
| "loss": 0.503, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.819277108433735e-06, | |
| "loss": 0.5054, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 4.417670682730924e-06, | |
| "loss": 0.4004, | |
| "step": 493 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 500, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 2, | |
| "save_steps": 500, | |
| "total_flos": 418467312304128.0, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |