| { | |
| "best_metric": 0.9883040935672515, | |
| "best_model_checkpoint": "./outputs/convnextv2-nano-22k-384-boulderspot-vN/checkpoint-1015", | |
| "epoch": 4.993849938499385, | |
| "eval_steps": 500, | |
| "global_step": 1015, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.05, | |
| "grad_norm": 26.665199279785156, | |
| "learning_rate": 3.92156862745098e-06, | |
| "loss": 0.6334, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "grad_norm": 7.943151473999023, | |
| "learning_rate": 7.84313725490196e-06, | |
| "loss": 0.365, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "grad_norm": 16.875411987304688, | |
| "learning_rate": 1.1764705882352942e-05, | |
| "loss": 0.2382, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.2, | |
| "grad_norm": 5.587479114532471, | |
| "learning_rate": 1.568627450980392e-05, | |
| "loss": 0.1425, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 7.946935176849365, | |
| "learning_rate": 1.9607843137254903e-05, | |
| "loss": 0.1257, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.3, | |
| "grad_norm": 4.481125831604004, | |
| "learning_rate": 1.9995698998770955e-05, | |
| "loss": 0.0967, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "grad_norm": 9.453625679016113, | |
| "learning_rate": 1.998083609002402e-05, | |
| "loss": 0.0829, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "grad_norm": 13.032307624816895, | |
| "learning_rate": 1.995537395500004e-05, | |
| "loss": 0.1073, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "grad_norm": 5.036722183227539, | |
| "learning_rate": 1.9919339633410737e-05, | |
| "loss": 0.0788, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "grad_norm": 10.13275146484375, | |
| "learning_rate": 1.9872771392183334e-05, | |
| "loss": 0.1421, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.54, | |
| "grad_norm": 10.442366600036621, | |
| "learning_rate": 1.981571868482269e-05, | |
| "loss": 0.1035, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.59, | |
| "grad_norm": 7.753129959106445, | |
| "learning_rate": 1.974824209889377e-05, | |
| "loss": 0.1001, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.64, | |
| "grad_norm": 10.481592178344727, | |
| "learning_rate": 1.9670413291680223e-05, | |
| "loss": 0.1007, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.69, | |
| "grad_norm": 7.881091594696045, | |
| "learning_rate": 1.9582314914087344e-05, | |
| "loss": 0.0669, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "grad_norm": 12.763803482055664, | |
| "learning_rate": 1.9484040522870333e-05, | |
| "loss": 0.0723, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "grad_norm": 6.890717029571533, | |
| "learning_rate": 1.9375694481280965e-05, | |
| "loss": 0.0804, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "grad_norm": 6.086670398712158, | |
| "learning_rate": 1.9257391848238212e-05, | |
| "loss": 0.1073, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "grad_norm": 4.475172996520996, | |
| "learning_rate": 1.9129258256140556e-05, | |
| "loss": 0.0779, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "grad_norm": 4.19266414642334, | |
| "learning_rate": 1.8991429777449674e-05, | |
| "loss": 0.0934, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.98, | |
| "grad_norm": 14.975569725036621, | |
| "learning_rate": 1.884405278018722e-05, | |
| "loss": 0.1102, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_accuracy": 0.9839181286549707, | |
| "eval_f1": 0.9840087498605248, | |
| "eval_loss": 0.0431428886950016, | |
| "eval_matthews_correlation": 0.8589815510372486, | |
| "eval_precision": 0.9841155979252388, | |
| "eval_recall": 0.9839181286549707, | |
| "eval_runtime": 4.9799, | |
| "eval_samples_per_second": 137.351, | |
| "eval_steps_per_second": 8.635, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 1.03, | |
| "grad_norm": 14.823522567749023, | |
| "learning_rate": 1.8687283772498205e-05, | |
| "loss": 0.0567, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 1.08, | |
| "grad_norm": 2.4599366188049316, | |
| "learning_rate": 1.852128923644593e-05, | |
| "loss": 0.08, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "grad_norm": 1.6709442138671875, | |
| "learning_rate": 1.8346245451215068e-05, | |
| "loss": 0.056, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "grad_norm": 6.953076362609863, | |
| "learning_rate": 1.8162338305910636e-05, | |
| "loss": 0.0683, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "grad_norm": 3.510495901107788, | |
| "learning_rate": 1.79697631021516e-05, | |
| "loss": 0.0825, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "grad_norm": 8.399980545043945, | |
| "learning_rate": 1.776872434666882e-05, | |
| "loss": 0.0584, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "grad_norm": 14.62066650390625, | |
| "learning_rate": 1.7559435534127534e-05, | |
| "loss": 0.0998, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "grad_norm": 6.287339210510254, | |
| "learning_rate": 1.7342118920405035e-05, | |
| "loss": 0.0813, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "grad_norm": 7.23698091506958, | |
| "learning_rate": 1.7117005286564344e-05, | |
| "loss": 0.0973, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 1.48, | |
| "grad_norm": 12.766968727111816, | |
| "learning_rate": 1.688433369377444e-05, | |
| "loss": 0.0928, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.53, | |
| "grad_norm": 16.611492156982422, | |
| "learning_rate": 1.6644351229437416e-05, | |
| "loss": 0.08, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "grad_norm": 1.000801920890808, | |
| "learning_rate": 1.63973127447921e-05, | |
| "loss": 0.0582, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "grad_norm": 10.745514869689941, | |
| "learning_rate": 1.6143480584272794e-05, | |
| "loss": 0.0915, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "grad_norm": 10.188589096069336, | |
| "learning_rate": 1.5883124306910563e-05, | |
| "loss": 0.0869, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "grad_norm": 7.215929985046387, | |
| "learning_rate": 1.5616520400072963e-05, | |
| "loss": 0.1329, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "grad_norm": 4.967738151550293, | |
| "learning_rate": 1.5343951985846096e-05, | |
| "loss": 0.0781, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "grad_norm": 2.9467406272888184, | |
| "learning_rate": 1.5065708520370943e-05, | |
| "loss": 0.0582, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 1.87, | |
| "grad_norm": 6.54543399810791, | |
| "learning_rate": 1.4782085486453155e-05, | |
| "loss": 0.0626, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "grad_norm": 7.8247833251953125, | |
| "learning_rate": 1.4493384079772815e-05, | |
| "loss": 0.081, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 1.97, | |
| "grad_norm": 8.889460563659668, | |
| "learning_rate": 1.4199910889027335e-05, | |
| "loss": 0.0559, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_accuracy": 0.9839181286549707, | |
| "eval_f1": 0.9845065535493497, | |
| "eval_loss": 0.04758350923657417, | |
| "eval_matthews_correlation": 0.8709455436036694, | |
| "eval_precision": 0.9858048061716136, | |
| "eval_recall": 0.9839181286549707, | |
| "eval_runtime": 5.1397, | |
| "eval_samples_per_second": 133.081, | |
| "eval_steps_per_second": 8.366, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 2.02, | |
| "grad_norm": 11.741949081420898, | |
| "learning_rate": 1.390197757034721e-05, | |
| "loss": 0.0806, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "grad_norm": 8.677542686462402, | |
| "learning_rate": 1.3599900516330382e-05, | |
| "loss": 0.1033, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "grad_norm": 1.3803766965866089, | |
| "learning_rate": 1.3294000520046666e-05, | |
| "loss": 0.0433, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "grad_norm": 8.467944145202637, | |
| "learning_rate": 1.2984602434369058e-05, | |
| "loss": 0.0691, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 2.21, | |
| "grad_norm": 6.276478290557861, | |
| "learning_rate": 1.2672034826993716e-05, | |
| "loss": 0.0678, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 2.26, | |
| "grad_norm": 11.57728385925293, | |
| "learning_rate": 1.235662963151493e-05, | |
| "loss": 0.0804, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 2.31, | |
| "grad_norm": 0.9122611880302429, | |
| "learning_rate": 1.2038721794925689e-05, | |
| "loss": 0.0476, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "grad_norm": 1.983340859413147, | |
| "learning_rate": 1.1718648921918112e-05, | |
| "loss": 0.0545, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "grad_norm": 17.866886138916016, | |
| "learning_rate": 1.1396750916361526e-05, | |
| "loss": 0.0512, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "grad_norm": 9.374113082885742, | |
| "learning_rate": 1.1073369620338928e-05, | |
| "loss": 0.0604, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "grad_norm": 4.585148811340332, | |
| "learning_rate": 1.074884845112512e-05, | |
| "loss": 0.0629, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "grad_norm": 9.917503356933594, | |
| "learning_rate": 1.0423532036492077e-05, | |
| "loss": 0.054, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "grad_norm": 1.8329569101333618, | |
| "learning_rate": 1.0097765848728825e-05, | |
| "loss": 0.071, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "grad_norm": 10.834183692932129, | |
| "learning_rate": 9.771895837764438e-06, | |
| "loss": 0.0642, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "grad_norm": 5.503483295440674, | |
| "learning_rate": 9.446268063783853e-06, | |
| "loss": 0.0654, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "grad_norm": 6.054934024810791, | |
| "learning_rate": 9.121228329726563e-06, | |
| "loss": 0.061, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "grad_norm": 8.71143627166748, | |
| "learning_rate": 8.797121814058502e-06, | |
| "loss": 0.0759, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "grad_norm": 4.225427150726318, | |
| "learning_rate": 8.474292704207095e-06, | |
| "loss": 0.0662, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "grad_norm": 6.926836967468262, | |
| "learning_rate": 8.153083831048772e-06, | |
| "loss": 0.07, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "grad_norm": 1.132318377494812, | |
| "learning_rate": 7.833836304837022e-06, | |
| "loss": 0.0402, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "eval_accuracy": 0.9809941520467836, | |
| "eval_f1": 0.9816895632855951, | |
| "eval_loss": 0.04637432098388672, | |
| "eval_matthews_correlation": 0.8468348607550005, | |
| "eval_precision": 0.9830779451248668, | |
| "eval_recall": 0.9809941520467836, | |
| "eval_runtime": 5.1222, | |
| "eval_samples_per_second": 133.536, | |
| "eval_steps_per_second": 8.395, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "grad_norm": 2.548271656036377, | |
| "learning_rate": 7.516889152957744e-06, | |
| "loss": 0.0706, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 3.05, | |
| "grad_norm": 17.59124755859375, | |
| "learning_rate": 7.202578959896491e-06, | |
| "loss": 0.0777, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 3.1, | |
| "grad_norm": 10.620137214660645, | |
| "learning_rate": 6.891239509799932e-06, | |
| "loss": 0.0443, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 3.15, | |
| "grad_norm": 20.15471839904785, | |
| "learning_rate": 6.583201432011217e-06, | |
| "loss": 0.0746, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 3.2, | |
| "grad_norm": 7.445899486541748, | |
| "learning_rate": 6.278791849955583e-06, | |
| "loss": 0.0468, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 3.25, | |
| "grad_norm": 10.309353828430176, | |
| "learning_rate": 5.978334033749076e-06, | |
| "loss": 0.0698, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 3.3, | |
| "grad_norm": 4.898565769195557, | |
| "learning_rate": 5.682147056899361e-06, | |
| "loss": 0.058, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 3.35, | |
| "grad_norm": 8.0520658493042, | |
| "learning_rate": 5.390545457463134e-06, | |
| "loss": 0.0468, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 3.39, | |
| "grad_norm": 3.444305419921875, | |
| "learning_rate": 5.103838904019993e-06, | |
| "loss": 0.0381, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 3.44, | |
| "grad_norm": 11.149181365966797, | |
| "learning_rate": 4.822331866817478e-06, | |
| "loss": 0.0693, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 3.49, | |
| "grad_norm": 12.743651390075684, | |
| "learning_rate": 4.546323294436556e-06, | |
| "loss": 0.0739, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 3.54, | |
| "grad_norm": 9.802193641662598, | |
| "learning_rate": 4.276106296320828e-06, | |
| "loss": 0.0655, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 3.59, | |
| "grad_norm": 6.496978759765625, | |
| "learning_rate": 4.0119678315067025e-06, | |
| "loss": 0.066, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 3.64, | |
| "grad_norm": 12.13724422454834, | |
| "learning_rate": 3.754188403885013e-06, | |
| "loss": 0.0483, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 3.69, | |
| "grad_norm": 11.302549362182617, | |
| "learning_rate": 3.5030417643177416e-06, | |
| "loss": 0.0727, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 3.74, | |
| "grad_norm": 3.291935443878174, | |
| "learning_rate": 3.258794619926159e-06, | |
| "loss": 0.0685, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 3.79, | |
| "grad_norm": 7.776196479797363, | |
| "learning_rate": 3.021706350859147e-06, | |
| "loss": 0.0482, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 3.84, | |
| "grad_norm": 7.8189568519592285, | |
| "learning_rate": 2.792028734842418e-06, | |
| "loss": 0.0504, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 3.89, | |
| "grad_norm": 1.9077569246292114, | |
| "learning_rate": 2.5700056798012164e-06, | |
| "loss": 0.0578, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 3.94, | |
| "grad_norm": 5.7739667892456055, | |
| "learning_rate": 2.3558729648404065e-06, | |
| "loss": 0.0712, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 3.99, | |
| "grad_norm": 6.1386027336120605, | |
| "learning_rate": 2.1498579898570228e-06, | |
| "loss": 0.0334, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "eval_accuracy": 0.9868421052631579, | |
| "eval_f1": 0.986916249885884, | |
| "eval_loss": 0.03484980762004852, | |
| "eval_matthews_correlation": 0.884636226651146, | |
| "eval_precision": 0.9870070425284395, | |
| "eval_recall": 0.9868421052631579, | |
| "eval_runtime": 5.1774, | |
| "eval_samples_per_second": 132.112, | |
| "eval_steps_per_second": 8.305, | |
| "step": 813 | |
| }, | |
| { | |
| "epoch": 4.03, | |
| "grad_norm": 2.534424304962158, | |
| "learning_rate": 1.952179534051183e-06, | |
| "loss": 0.0613, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 4.08, | |
| "grad_norm": 3.645782947540283, | |
| "learning_rate": 1.763047523591831e-06, | |
| "loss": 0.0817, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 4.13, | |
| "grad_norm": 10.930766105651855, | |
| "learning_rate": 1.5826628086839968e-06, | |
| "loss": 0.081, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 4.18, | |
| "grad_norm": 6.021533489227295, | |
| "learning_rate": 1.41121695027438e-06, | |
| "loss": 0.0522, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 4.23, | |
| "grad_norm": 6.584732532501221, | |
| "learning_rate": 1.2488920166217034e-06, | |
| "loss": 0.054, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 4.28, | |
| "grad_norm": 5.629753112792969, | |
| "learning_rate": 1.095860389947928e-06, | |
| "loss": 0.0475, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 4.33, | |
| "grad_norm": 4.272614479064941, | |
| "learning_rate": 9.522845833756001e-07, | |
| "loss": 0.0458, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 4.38, | |
| "grad_norm": 9.34067153930664, | |
| "learning_rate": 8.183170683457986e-07, | |
| "loss": 0.0746, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 4.43, | |
| "grad_norm": 2.2390189170837402, | |
| "learning_rate": 6.941001126998892e-07, | |
| "loss": 0.0509, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 4.48, | |
| "grad_norm": 1.508949637413025, | |
| "learning_rate": 5.797656295970955e-07, | |
| "loss": 0.0329, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 4.53, | |
| "grad_norm": 7.0009355545043945, | |
| "learning_rate": 4.754350374283001e-07, | |
| "loss": 0.0709, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 4.58, | |
| "grad_norm": 6.07660436630249, | |
| "learning_rate": 3.8121913087483033e-07, | |
| "loss": 0.0393, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 4.62, | |
| "grad_norm": 6.966567039489746, | |
| "learning_rate": 2.972179632491989e-07, | |
| "loss": 0.0464, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 4.67, | |
| "grad_norm": 3.8328962326049805, | |
| "learning_rate": 2.23520740242712e-07, | |
| "loss": 0.0457, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 4.72, | |
| "grad_norm": 3.535372018814087, | |
| "learning_rate": 1.602057251927891e-07, | |
| "loss": 0.0599, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 4.77, | |
| "grad_norm": 11.542516708374023, | |
| "learning_rate": 1.0734015597060222e-07, | |
| "loss": 0.0557, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 4.82, | |
| "grad_norm": 7.840158939361572, | |
| "learning_rate": 6.498017357731035e-08, | |
| "loss": 0.0564, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 4.87, | |
| "grad_norm": 3.7357852458953857, | |
| "learning_rate": 3.317076252467133e-08, | |
| "loss": 0.033, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 4.92, | |
| "grad_norm": 4.429032802581787, | |
| "learning_rate": 1.1945703063402925e-08, | |
| "loss": 0.0496, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 4.97, | |
| "grad_norm": 4.065526485443115, | |
| "learning_rate": 1.327535309979533e-09, | |
| "loss": 0.0445, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 4.99, | |
| "eval_accuracy": 0.9883040935672515, | |
| "eval_f1": 0.9883040935672515, | |
| "eval_loss": 0.03396734222769737, | |
| "eval_matthews_correlation": 0.8962181845768691, | |
| "eval_precision": 0.9883040935672515, | |
| "eval_recall": 0.9883040935672515, | |
| "eval_runtime": 5.2898, | |
| "eval_samples_per_second": 129.305, | |
| "eval_steps_per_second": 8.129, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 4.99, | |
| "step": 1015, | |
| "total_flos": 2.581447650539471e+18, | |
| "train_loss": 0.08031210996247277, | |
| "train_runtime": 701.9998, | |
| "train_samples_per_second": 92.557, | |
| "train_steps_per_second": 1.446 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 1015, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 5, | |
| "save_steps": 500, | |
| "total_flos": 2.581447650539471e+18, | |
| "train_batch_size": 16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |