Training in progress, step 39800
Browse files- adapter_model.safetensors +1 -1
- trainer_log.jsonl +41 -0
adapter_model.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 1638528
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:eb097045d48da2b207cd2a3390fd3df957c91fdb58c597a5e731af42193705ac
|
| 3 |
size 1638528
|
trainer_log.jsonl
CHANGED
|
@@ -8116,3 +8116,44 @@
|
|
| 8116 |
{"current_steps": 39595, "total_steps": 40000, "loss": 0.2598, "lr": 7.625288752117209e-05, "epoch": 82.31808731808732, "percentage": 98.99, "elapsed_time": "6:42:35", "remaining_time": "0:04:07", "throughput": 1250.2, "total_tokens": 30199768}
|
| 8117 |
{"current_steps": 39600, "total_steps": 40000, "loss": 0.2635, "lr": 7.4386456836667e-05, "epoch": 82.32848232848232, "percentage": 99.0, "elapsed_time": "6:42:38", "remaining_time": "0:04:04", "throughput": 1250.22, "total_tokens": 30203576}
|
| 8118 |
{"current_steps": 39600, "total_steps": 40000, "eval_loss": 0.2492460459470749, "epoch": 82.32848232848232, "percentage": 99.0, "elapsed_time": "6:42:51", "remaining_time": "0:04:04", "throughput": 1249.53, "total_tokens": 30203576}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 8116 |
{"current_steps": 39595, "total_steps": 40000, "loss": 0.2598, "lr": 7.625288752117209e-05, "epoch": 82.31808731808732, "percentage": 98.99, "elapsed_time": "6:42:35", "remaining_time": "0:04:07", "throughput": 1250.2, "total_tokens": 30199768}
|
| 8117 |
{"current_steps": 39600, "total_steps": 40000, "loss": 0.2635, "lr": 7.4386456836667e-05, "epoch": 82.32848232848232, "percentage": 99.0, "elapsed_time": "6:42:38", "remaining_time": "0:04:04", "throughput": 1250.22, "total_tokens": 30203576}
|
| 8118 |
{"current_steps": 39600, "total_steps": 40000, "eval_loss": 0.2492460459470749, "epoch": 82.32848232848232, "percentage": 99.0, "elapsed_time": "6:42:51", "remaining_time": "0:04:04", "throughput": 1249.53, "total_tokens": 30203576}
|
| 8119 |
+
{"current_steps": 39605, "total_steps": 40000, "loss": 0.272, "lr": 7.254314656586214e-05, "epoch": 82.33887733887734, "percentage": 99.01, "elapsed_time": "6:42:55", "remaining_time": "0:04:01", "throughput": 1249.5, "total_tokens": 30207512}
|
| 8120 |
+
{"current_steps": 39610, "total_steps": 40000, "loss": 0.3037, "lr": 7.07229569929968e-05, "epoch": 82.34927234927235, "percentage": 99.02, "elapsed_time": "6:42:58", "remaining_time": "0:03:58", "throughput": 1249.53, "total_tokens": 30211384}
|
| 8121 |
+
{"current_steps": 39615, "total_steps": 40000, "loss": 0.2513, "lr": 6.892588839879643e-05, "epoch": 82.35966735966736, "percentage": 99.04, "elapsed_time": "6:43:00", "remaining_time": "0:03:55", "throughput": 1249.55, "total_tokens": 30215320}
|
| 8122 |
+
{"current_steps": 39620, "total_steps": 40000, "loss": 0.2784, "lr": 6.71519410603727e-05, "epoch": 82.37006237006237, "percentage": 99.05, "elapsed_time": "6:43:03", "remaining_time": "0:03:51", "throughput": 1249.57, "total_tokens": 30219096}
|
| 8123 |
+
{"current_steps": 39625, "total_steps": 40000, "loss": 0.2416, "lr": 6.540111525129011e-05, "epoch": 82.38045738045739, "percentage": 99.06, "elapsed_time": "6:43:06", "remaining_time": "0:03:48", "throughput": 1249.59, "total_tokens": 30222872}
|
| 8124 |
+
{"current_steps": 39630, "total_steps": 40000, "loss": 0.2776, "lr": 6.367341124154934e-05, "epoch": 82.39085239085239, "percentage": 99.08, "elapsed_time": "6:43:08", "remaining_time": "0:03:45", "throughput": 1249.61, "total_tokens": 30226680}
|
| 8125 |
+
{"current_steps": 39635, "total_steps": 40000, "loss": 0.2416, "lr": 6.19688292975873e-05, "epoch": 82.4012474012474, "percentage": 99.09, "elapsed_time": "6:43:11", "remaining_time": "0:03:42", "throughput": 1249.63, "total_tokens": 30230616}
|
| 8126 |
+
{"current_steps": 39640, "total_steps": 40000, "loss": 0.2419, "lr": 6.0287369682260336e-05, "epoch": 82.41164241164242, "percentage": 99.1, "elapsed_time": "6:43:14", "remaining_time": "0:03:39", "throughput": 1249.65, "total_tokens": 30234360}
|
| 8127 |
+
{"current_steps": 39645, "total_steps": 40000, "loss": 0.2723, "lr": 5.8629032654894384e-05, "epoch": 82.42203742203742, "percentage": 99.11, "elapsed_time": "6:43:17", "remaining_time": "0:03:36", "throughput": 1249.66, "total_tokens": 30238040}
|
| 8128 |
+
{"current_steps": 39650, "total_steps": 40000, "loss": 0.2567, "lr": 5.699381847120155e-05, "epoch": 82.43243243243244, "percentage": 99.12, "elapsed_time": "6:43:19", "remaining_time": "0:03:33", "throughput": 1249.68, "total_tokens": 30242008}
|
| 8129 |
+
{"current_steps": 39655, "total_steps": 40000, "loss": 0.2591, "lr": 5.5381727383380094e-05, "epoch": 82.44282744282744, "percentage": 99.14, "elapsed_time": "6:43:22", "remaining_time": "0:03:30", "throughput": 1249.7, "total_tokens": 30245912}
|
| 8130 |
+
{"current_steps": 39660, "total_steps": 40000, "loss": 0.27, "lr": 5.379275964001451e-05, "epoch": 82.45322245322245, "percentage": 99.15, "elapsed_time": "6:43:25", "remaining_time": "0:03:27", "throughput": 1249.73, "total_tokens": 30249752}
|
| 8131 |
+
{"current_steps": 39665, "total_steps": 40000, "loss": 0.2403, "lr": 5.222691548614211e-05, "epoch": 82.46361746361747, "percentage": 99.16, "elapsed_time": "6:43:27", "remaining_time": "0:03:24", "throughput": 1249.74, "total_tokens": 30253496}
|
| 8132 |
+
{"current_steps": 39670, "total_steps": 40000, "loss": 0.2419, "lr": 5.068419516323641e-05, "epoch": 82.47401247401247, "percentage": 99.17, "elapsed_time": "6:43:30", "remaining_time": "0:03:21", "throughput": 1249.77, "total_tokens": 30257560}
|
| 8133 |
+
{"current_steps": 39675, "total_steps": 40000, "loss": 0.2806, "lr": 4.91645989092071e-05, "epoch": 82.48440748440748, "percentage": 99.19, "elapsed_time": "6:43:33", "remaining_time": "0:03:18", "throughput": 1249.79, "total_tokens": 30261432}
|
| 8134 |
+
{"current_steps": 39680, "total_steps": 40000, "loss": 0.277, "lr": 4.7668126958400056e-05, "epoch": 82.4948024948025, "percentage": 99.2, "elapsed_time": "6:43:35", "remaining_time": "0:03:15", "throughput": 1249.81, "total_tokens": 30265144}
|
| 8135 |
+
{"current_steps": 39685, "total_steps": 40000, "loss": 0.2432, "lr": 4.619477954159734e-05, "epoch": 82.5051975051975, "percentage": 99.21, "elapsed_time": "6:43:38", "remaining_time": "0:03:12", "throughput": 1249.83, "total_tokens": 30269016}
|
| 8136 |
+
{"current_steps": 39690, "total_steps": 40000, "loss": 0.2662, "lr": 4.4744556885983884e-05, "epoch": 82.51559251559252, "percentage": 99.22, "elapsed_time": "6:43:41", "remaining_time": "0:03:09", "throughput": 1249.84, "total_tokens": 30272728}
|
| 8137 |
+
{"current_steps": 39695, "total_steps": 40000, "loss": 0.2748, "lr": 4.331745921523078e-05, "epoch": 82.52598752598753, "percentage": 99.24, "elapsed_time": "6:43:43", "remaining_time": "0:03:06", "throughput": 1249.86, "total_tokens": 30276504}
|
| 8138 |
+
{"current_steps": 39700, "total_steps": 40000, "loss": 0.2641, "lr": 4.191348674937867e-05, "epoch": 82.53638253638253, "percentage": 99.25, "elapsed_time": "6:43:46", "remaining_time": "0:03:03", "throughput": 1249.87, "total_tokens": 30280120}
|
| 8139 |
+
{"current_steps": 39705, "total_steps": 40000, "loss": 0.2635, "lr": 4.0532639704971006e-05, "epoch": 82.54677754677755, "percentage": 99.26, "elapsed_time": "6:43:49", "remaining_time": "0:03:00", "throughput": 1249.89, "total_tokens": 30283832}
|
| 8140 |
+
{"current_steps": 39710, "total_steps": 40000, "loss": 0.2658, "lr": 3.917491829493747e-05, "epoch": 82.55717255717256, "percentage": 99.28, "elapsed_time": "6:43:51", "remaining_time": "0:02:56", "throughput": 1249.91, "total_tokens": 30287736}
|
| 8141 |
+
{"current_steps": 39715, "total_steps": 40000, "loss": 0.2508, "lr": 3.78403227286439e-05, "epoch": 82.56756756756756, "percentage": 99.29, "elapsed_time": "6:43:54", "remaining_time": "0:02:53", "throughput": 1249.93, "total_tokens": 30291576}
|
| 8142 |
+
{"current_steps": 39720, "total_steps": 40000, "loss": 0.2736, "lr": 3.652885321192567e-05, "epoch": 82.57796257796258, "percentage": 99.3, "elapsed_time": "6:43:57", "remaining_time": "0:02:50", "throughput": 1249.95, "total_tokens": 30295416}
|
| 8143 |
+
{"current_steps": 39725, "total_steps": 40000, "loss": 0.2641, "lr": 3.524050994702099e-05, "epoch": 82.58835758835758, "percentage": 99.31, "elapsed_time": "6:44:00", "remaining_time": "0:02:47", "throughput": 1249.97, "total_tokens": 30299352}
|
| 8144 |
+
{"current_steps": 39730, "total_steps": 40000, "loss": 0.272, "lr": 3.3975293132604276e-05, "epoch": 82.5987525987526, "percentage": 99.33, "elapsed_time": "6:44:02", "remaining_time": "0:02:44", "throughput": 1249.99, "total_tokens": 30303192}
|
| 8145 |
+
{"current_steps": 39735, "total_steps": 40000, "loss": 0.2671, "lr": 3.2733202963786125e-05, "epoch": 82.60914760914761, "percentage": 99.34, "elapsed_time": "6:44:05", "remaining_time": "0:02:41", "throughput": 1250.01, "total_tokens": 30306904}
|
| 8146 |
+
{"current_steps": 39740, "total_steps": 40000, "loss": 0.2594, "lr": 3.15142396321133e-05, "epoch": 82.61954261954261, "percentage": 99.35, "elapsed_time": "6:44:08", "remaining_time": "0:02:38", "throughput": 1250.03, "total_tokens": 30310648}
|
| 8147 |
+
{"current_steps": 39745, "total_steps": 40000, "loss": 0.2457, "lr": 3.0318403325552132e-05, "epoch": 82.62993762993763, "percentage": 99.36, "elapsed_time": "6:44:10", "remaining_time": "0:02:35", "throughput": 1250.05, "total_tokens": 30314488}
|
| 8148 |
+
{"current_steps": 39750, "total_steps": 40000, "loss": 0.2755, "lr": 2.914569422855506e-05, "epoch": 82.64033264033264, "percentage": 99.38, "elapsed_time": "6:44:13", "remaining_time": "0:02:32", "throughput": 1250.07, "total_tokens": 30318296}
|
| 8149 |
+
{"current_steps": 39755, "total_steps": 40000, "loss": 0.2716, "lr": 2.7996112521927462e-05, "epoch": 82.65072765072765, "percentage": 99.39, "elapsed_time": "6:44:16", "remaining_time": "0:02:29", "throughput": 1250.09, "total_tokens": 30322232}
|
| 8150 |
+
{"current_steps": 39760, "total_steps": 40000, "loss": 0.2587, "lr": 2.68696583829775e-05, "epoch": 82.66112266112266, "percentage": 99.4, "elapsed_time": "6:44:18", "remaining_time": "0:02:26", "throughput": 1250.11, "total_tokens": 30326040}
|
| 8151 |
+
{"current_steps": 39765, "total_steps": 40000, "loss": 0.268, "lr": 2.576633198539957e-05, "epoch": 82.67151767151768, "percentage": 99.41, "elapsed_time": "6:44:21", "remaining_time": "0:02:23", "throughput": 1250.12, "total_tokens": 30329784}
|
| 8152 |
+
{"current_steps": 39770, "total_steps": 40000, "loss": 0.2656, "lr": 2.46861334993409e-05, "epoch": 82.68191268191268, "percentage": 99.42, "elapsed_time": "6:44:24", "remaining_time": "0:02:20", "throughput": 1250.15, "total_tokens": 30333624}
|
| 8153 |
+
{"current_steps": 39775, "total_steps": 40000, "loss": 0.2624, "lr": 2.3629063091384903e-05, "epoch": 82.6923076923077, "percentage": 99.44, "elapsed_time": "6:44:26", "remaining_time": "0:02:17", "throughput": 1250.17, "total_tokens": 30337464}
|
| 8154 |
+
{"current_steps": 39780, "total_steps": 40000, "loss": 0.2537, "lr": 2.2595120924567834e-05, "epoch": 82.70270270270271, "percentage": 99.45, "elapsed_time": "6:44:29", "remaining_time": "0:02:14", "throughput": 1250.18, "total_tokens": 30341336}
|
| 8155 |
+
{"current_steps": 39785, "total_steps": 40000, "loss": 0.2268, "lr": 2.158430715829551e-05, "epoch": 82.71309771309771, "percentage": 99.46, "elapsed_time": "6:44:32", "remaining_time": "0:02:11", "throughput": 1250.2, "total_tokens": 30345144}
|
| 8156 |
+
{"current_steps": 39790, "total_steps": 40000, "loss": 0.2772, "lr": 2.059662194849321e-05, "epoch": 82.72349272349273, "percentage": 99.48, "elapsed_time": "6:44:34", "remaining_time": "0:02:08", "throughput": 1250.22, "total_tokens": 30348888}
|
| 8157 |
+
{"current_steps": 39795, "total_steps": 40000, "loss": 0.2722, "lr": 1.9632065447422463e-05, "epoch": 82.73388773388774, "percentage": 99.49, "elapsed_time": "6:44:37", "remaining_time": "0:02:05", "throughput": 1250.23, "total_tokens": 30352568}
|
| 8158 |
+
{"current_steps": 39800, "total_steps": 40000, "loss": 0.2449, "lr": 1.8690637803880916e-05, "epoch": 82.74428274428274, "percentage": 99.5, "elapsed_time": "6:44:40", "remaining_time": "0:02:02", "throughput": 1250.25, "total_tokens": 30356408}
|
| 8159 |
+
{"current_steps": 39800, "total_steps": 40000, "eval_loss": 0.2485324889421463, "epoch": 82.74428274428274, "percentage": 99.5, "elapsed_time": "6:44:53", "remaining_time": "0:02:02", "throughput": 1249.57, "total_tokens": 30356408}
|