| { | |
| "best_metric": 0.20642544329166412, | |
| "best_model_checkpoint": "/leonardo_work/IscrC_AGENT/PROFES2025/results/balanced/oss_similar/checkpoint-617", | |
| "epoch": 6.0, | |
| "eval_steps": 500, | |
| "global_step": 3702, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.02, | |
| "learning_rate": 1.9967585089141006e-05, | |
| "loss": 0.6266, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.03, | |
| "learning_rate": 1.993517017828201e-05, | |
| "loss": 0.5164, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.05, | |
| "learning_rate": 1.9902755267423015e-05, | |
| "loss": 0.4395, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 1.9870340356564023e-05, | |
| "loss": 0.3996, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.08, | |
| "learning_rate": 1.9837925445705027e-05, | |
| "loss": 0.4822, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.1, | |
| "learning_rate": 1.980551053484603e-05, | |
| "loss": 0.3884, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.11, | |
| "learning_rate": 1.9773095623987036e-05, | |
| "loss": 0.4001, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.13, | |
| "learning_rate": 1.974068071312804e-05, | |
| "loss": 0.4072, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.15, | |
| "learning_rate": 1.9708265802269045e-05, | |
| "loss": 0.3381, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.16, | |
| "learning_rate": 1.9675850891410053e-05, | |
| "loss": 0.4163, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.18, | |
| "learning_rate": 1.9643435980551057e-05, | |
| "loss": 0.4844, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 1.961102106969206e-05, | |
| "loss": 0.3406, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.21, | |
| "learning_rate": 1.9578606158833066e-05, | |
| "loss": 0.4334, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.23, | |
| "learning_rate": 1.954619124797407e-05, | |
| "loss": 0.3949, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.24, | |
| "learning_rate": 1.9513776337115075e-05, | |
| "loss": 0.4326, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.26, | |
| "learning_rate": 1.948136142625608e-05, | |
| "loss": 0.4364, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.28, | |
| "learning_rate": 1.9448946515397084e-05, | |
| "loss": 0.3108, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.29, | |
| "learning_rate": 1.9416531604538088e-05, | |
| "loss": 0.3492, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 1.9384116693679093e-05, | |
| "loss": 0.4007, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.32, | |
| "learning_rate": 1.9351701782820097e-05, | |
| "loss": 0.3187, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.34, | |
| "learning_rate": 1.93192868719611e-05, | |
| "loss": 0.3055, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.36, | |
| "learning_rate": 1.928687196110211e-05, | |
| "loss": 0.338, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 1.9254457050243114e-05, | |
| "loss": 0.3226, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.39, | |
| "learning_rate": 1.9222042139384118e-05, | |
| "loss": 0.4158, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.41, | |
| "learning_rate": 1.9189627228525122e-05, | |
| "loss": 0.3931, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.42, | |
| "learning_rate": 1.9157212317666127e-05, | |
| "loss": 0.4295, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.44, | |
| "learning_rate": 1.912479740680713e-05, | |
| "loss": 0.278, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.45, | |
| "learning_rate": 1.909238249594814e-05, | |
| "loss": 0.2717, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.47, | |
| "learning_rate": 1.9059967585089144e-05, | |
| "loss": 0.2201, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.49, | |
| "learning_rate": 1.9027552674230148e-05, | |
| "loss": 0.3366, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 1.8995137763371152e-05, | |
| "loss": 0.3817, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.52, | |
| "learning_rate": 1.8962722852512157e-05, | |
| "loss": 0.2543, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.53, | |
| "learning_rate": 1.893030794165316e-05, | |
| "loss": 0.3394, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.55, | |
| "learning_rate": 1.889789303079417e-05, | |
| "loss": 0.3671, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.57, | |
| "learning_rate": 1.8865478119935173e-05, | |
| "loss": 0.3203, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.58, | |
| "learning_rate": 1.8833063209076178e-05, | |
| "loss": 0.3554, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.6, | |
| "learning_rate": 1.8800648298217182e-05, | |
| "loss": 0.447, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.8768233387358187e-05, | |
| "loss": 0.2841, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.63, | |
| "learning_rate": 1.873581847649919e-05, | |
| "loss": 0.2836, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.65, | |
| "learning_rate": 1.8703403565640196e-05, | |
| "loss": 0.2695, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.66, | |
| "learning_rate": 1.86709886547812e-05, | |
| "loss": 0.2575, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.8638573743922204e-05, | |
| "loss": 0.3278, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.7, | |
| "learning_rate": 1.860615883306321e-05, | |
| "loss": 0.3364, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.71, | |
| "learning_rate": 1.8573743922204213e-05, | |
| "loss": 0.3943, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.73, | |
| "learning_rate": 1.8541329011345218e-05, | |
| "loss": 0.2842, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "learning_rate": 1.8508914100486226e-05, | |
| "loss": 0.3015, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.76, | |
| "learning_rate": 1.847649918962723e-05, | |
| "loss": 0.4142, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.78, | |
| "learning_rate": 1.8444084278768234e-05, | |
| "loss": 0.2241, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.79, | |
| "learning_rate": 1.841166936790924e-05, | |
| "loss": 0.2813, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.8379254457050243e-05, | |
| "loss": 0.4838, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.83, | |
| "learning_rate": 1.8346839546191248e-05, | |
| "loss": 0.3666, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.84, | |
| "learning_rate": 1.8314424635332255e-05, | |
| "loss": 0.3057, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.86, | |
| "learning_rate": 1.828200972447326e-05, | |
| "loss": 0.2827, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.88, | |
| "learning_rate": 1.8249594813614264e-05, | |
| "loss": 0.3023, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.89, | |
| "learning_rate": 1.821717990275527e-05, | |
| "loss": 0.3193, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.91, | |
| "learning_rate": 1.8184764991896273e-05, | |
| "loss": 0.2723, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.92, | |
| "learning_rate": 1.8152350081037278e-05, | |
| "loss": 0.3176, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.94, | |
| "learning_rate": 1.8119935170178285e-05, | |
| "loss": 0.3287, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.96, | |
| "learning_rate": 1.808752025931929e-05, | |
| "loss": 0.3411, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.97, | |
| "learning_rate": 1.8055105348460294e-05, | |
| "loss": 0.258, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 1.80226904376013e-05, | |
| "loss": 0.2652, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_FN": 482, | |
| "eval_FP": 1632, | |
| "eval_TN": 20082, | |
| "eval_TP": 1620, | |
| "eval_accuracy": 0.9112361437688948, | |
| "eval_f1": 0.6051550242809115, | |
| "eval_loss": 0.20642544329166412, | |
| "eval_precision": 0.4981549815498155, | |
| "eval_recall": 0.7706945765937203, | |
| "eval_runtime": 163.4555, | |
| "eval_samples_per_second": 145.703, | |
| "eval_steps_per_second": 9.11, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "learning_rate": 1.7990275526742303e-05, | |
| "loss": 0.2846, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 1.02, | |
| "learning_rate": 1.7957860615883308e-05, | |
| "loss": 0.2579, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 1.04, | |
| "learning_rate": 1.7925445705024312e-05, | |
| "loss": 0.2258, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 1.7893030794165316e-05, | |
| "loss": 0.2329, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 1.07, | |
| "learning_rate": 1.7860615883306324e-05, | |
| "loss": 0.3005, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 1.09, | |
| "learning_rate": 1.782820097244733e-05, | |
| "loss": 0.4518, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 1.1, | |
| "learning_rate": 1.7795786061588333e-05, | |
| "loss": 0.3148, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 1.7763371150729338e-05, | |
| "loss": 0.2639, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 1.13, | |
| "learning_rate": 1.7730956239870342e-05, | |
| "loss": 0.3315, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 1.15, | |
| "learning_rate": 1.7698541329011346e-05, | |
| "loss": 0.2185, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 1.17, | |
| "learning_rate": 1.766612641815235e-05, | |
| "loss": 0.2925, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 1.7633711507293355e-05, | |
| "loss": 0.3322, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 1.2, | |
| "learning_rate": 1.760129659643436e-05, | |
| "loss": 0.2494, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 1.22, | |
| "learning_rate": 1.7568881685575364e-05, | |
| "loss": 0.3229, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 1.23, | |
| "learning_rate": 1.7536466774716372e-05, | |
| "loss": 0.2811, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "learning_rate": 1.7504051863857376e-05, | |
| "loss": 0.2889, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 1.26, | |
| "learning_rate": 1.747163695299838e-05, | |
| "loss": 0.2616, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 1.28, | |
| "learning_rate": 1.7439222042139385e-05, | |
| "loss": 0.1848, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 1.740680713128039e-05, | |
| "loss": 0.2647, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 1.31, | |
| "learning_rate": 1.7374392220421394e-05, | |
| "loss": 0.3515, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 1.33, | |
| "learning_rate": 1.7341977309562402e-05, | |
| "loss": 0.1854, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 1.35, | |
| "learning_rate": 1.7309562398703406e-05, | |
| "loss": 0.2333, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 1.727714748784441e-05, | |
| "loss": 0.2139, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 1.38, | |
| "learning_rate": 1.7244732576985415e-05, | |
| "loss": 0.1914, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 1.39, | |
| "learning_rate": 1.721231766612642e-05, | |
| "loss": 0.2545, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 1.41, | |
| "learning_rate": 1.7179902755267424e-05, | |
| "loss": 0.291, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 1.7147487844408432e-05, | |
| "loss": 0.1979, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 1.44, | |
| "learning_rate": 1.7115072933549436e-05, | |
| "loss": 0.2861, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 1.46, | |
| "learning_rate": 1.708265802269044e-05, | |
| "loss": 0.2521, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 1.47, | |
| "learning_rate": 1.7050243111831445e-05, | |
| "loss": 0.2698, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 1.701782820097245e-05, | |
| "loss": 0.2292, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 1.51, | |
| "learning_rate": 1.6985413290113454e-05, | |
| "loss": 0.2355, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 1.52, | |
| "learning_rate": 1.695299837925446e-05, | |
| "loss": 0.3023, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 1.54, | |
| "learning_rate": 1.6920583468395463e-05, | |
| "loss": 0.2353, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 1.56, | |
| "learning_rate": 1.6888168557536467e-05, | |
| "loss": 0.1852, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 1.57, | |
| "learning_rate": 1.685575364667747e-05, | |
| "loss": 0.2002, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 1.59, | |
| "learning_rate": 1.6823338735818476e-05, | |
| "loss": 0.2244, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 1.6, | |
| "learning_rate": 1.679092382495948e-05, | |
| "loss": 0.2472, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 1.62, | |
| "learning_rate": 1.6758508914100488e-05, | |
| "loss": 0.2106, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 1.64, | |
| "learning_rate": 1.6726094003241493e-05, | |
| "loss": 0.2613, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 1.65, | |
| "learning_rate": 1.6693679092382497e-05, | |
| "loss": 0.1889, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 1.66612641815235e-05, | |
| "loss": 0.2468, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 1.69, | |
| "learning_rate": 1.6628849270664506e-05, | |
| "loss": 0.233, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 1.7, | |
| "learning_rate": 1.659643435980551e-05, | |
| "loss": 0.2879, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 1.72, | |
| "learning_rate": 1.6564019448946518e-05, | |
| "loss": 0.2971, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 1.73, | |
| "learning_rate": 1.6531604538087523e-05, | |
| "loss": 0.1761, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "learning_rate": 1.6499189627228527e-05, | |
| "loss": 0.2463, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 1.77, | |
| "learning_rate": 1.646677471636953e-05, | |
| "loss": 0.3134, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 1.78, | |
| "learning_rate": 1.6434359805510536e-05, | |
| "loss": 0.2656, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 1.640194489465154e-05, | |
| "loss": 0.2192, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 1.82, | |
| "learning_rate": 1.6369529983792548e-05, | |
| "loss": 0.2498, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 1.83, | |
| "learning_rate": 1.6337115072933553e-05, | |
| "loss": 0.3333, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 1.85, | |
| "learning_rate": 1.6304700162074557e-05, | |
| "loss": 0.1494, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 1.627228525121556e-05, | |
| "loss": 0.2732, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 1.88, | |
| "learning_rate": 1.6239870340356566e-05, | |
| "loss": 0.1968, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 1.9, | |
| "learning_rate": 1.620745542949757e-05, | |
| "loss": 0.2895, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 1.91, | |
| "learning_rate": 1.6175040518638575e-05, | |
| "loss": 0.2978, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 1.93, | |
| "learning_rate": 1.614262560777958e-05, | |
| "loss": 0.154, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 1.94, | |
| "learning_rate": 1.6110210696920584e-05, | |
| "loss": 0.2967, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 1.96, | |
| "learning_rate": 1.6077795786061588e-05, | |
| "loss": 0.1759, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 1.6045380875202596e-05, | |
| "loss": 0.2615, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 1.99, | |
| "learning_rate": 1.60129659643436e-05, | |
| "loss": 0.1546, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_FN": 157, | |
| "eval_FP": 2909, | |
| "eval_TN": 18805, | |
| "eval_TP": 1945, | |
| "eval_accuracy": 0.871263016459523, | |
| "eval_f1": 0.5592294422081656, | |
| "eval_loss": 0.3451124131679535, | |
| "eval_precision": 0.4007004532344458, | |
| "eval_recall": 0.9253092293054234, | |
| "eval_runtime": 163.5121, | |
| "eval_samples_per_second": 145.653, | |
| "eval_steps_per_second": 9.106, | |
| "step": 1234 | |
| }, | |
| { | |
| "epoch": 2.01, | |
| "learning_rate": 1.5980551053484605e-05, | |
| "loss": 0.168, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 2.03, | |
| "learning_rate": 1.594813614262561e-05, | |
| "loss": 0.2176, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 2.04, | |
| "learning_rate": 1.5915721231766613e-05, | |
| "loss": 0.2096, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 2.06, | |
| "learning_rate": 1.5883306320907618e-05, | |
| "loss": 0.2449, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 2.07, | |
| "learning_rate": 1.5850891410048622e-05, | |
| "loss": 0.192, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 2.09, | |
| "learning_rate": 1.5818476499189627e-05, | |
| "loss": 0.1588, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 1.578606158833063e-05, | |
| "loss": 0.1574, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 2.12, | |
| "learning_rate": 1.575364667747164e-05, | |
| "loss": 0.1314, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 2.14, | |
| "learning_rate": 1.5721231766612643e-05, | |
| "loss": 0.1042, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 2.16, | |
| "learning_rate": 1.5688816855753648e-05, | |
| "loss": 0.2451, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 1.5656401944894652e-05, | |
| "loss": 0.2176, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 2.19, | |
| "learning_rate": 1.5623987034035657e-05, | |
| "loss": 0.2368, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 2.2, | |
| "learning_rate": 1.559157212317666e-05, | |
| "loss": 0.2696, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 2.22, | |
| "learning_rate": 1.555915721231767e-05, | |
| "loss": 0.1635, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 2.24, | |
| "learning_rate": 1.5526742301458673e-05, | |
| "loss": 0.2513, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "learning_rate": 1.5494327390599678e-05, | |
| "loss": 0.2009, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 2.27, | |
| "learning_rate": 1.5461912479740682e-05, | |
| "loss": 0.2145, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 1.5429497568881687e-05, | |
| "loss": 0.1785, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 2.3, | |
| "learning_rate": 1.539708265802269e-05, | |
| "loss": 0.2542, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "learning_rate": 1.53646677471637e-05, | |
| "loss": 0.1935, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 2.33, | |
| "learning_rate": 1.5332252836304703e-05, | |
| "loss": 0.2062, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 2.35, | |
| "learning_rate": 1.5299837925445708e-05, | |
| "loss": 0.1961, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 2.37, | |
| "learning_rate": 1.5267423014586712e-05, | |
| "loss": 0.2316, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 2.38, | |
| "learning_rate": 1.5235008103727715e-05, | |
| "loss": 0.2016, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 2.4, | |
| "learning_rate": 1.520259319286872e-05, | |
| "loss": 0.259, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 2.41, | |
| "learning_rate": 1.5170178282009727e-05, | |
| "loss": 0.2153, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 2.43, | |
| "learning_rate": 1.5137763371150732e-05, | |
| "loss": 0.2561, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 2.45, | |
| "learning_rate": 1.5105348460291736e-05, | |
| "loss": 0.1776, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 2.46, | |
| "learning_rate": 1.507293354943274e-05, | |
| "loss": 0.1333, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 1.5040518638573745e-05, | |
| "loss": 0.1771, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "learning_rate": 1.500810372771475e-05, | |
| "loss": 0.2519, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 2.51, | |
| "learning_rate": 1.4975688816855755e-05, | |
| "loss": 0.2557, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 2.53, | |
| "learning_rate": 1.494327390599676e-05, | |
| "loss": 0.1668, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 1.4910858995137764e-05, | |
| "loss": 0.2402, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 2.56, | |
| "learning_rate": 1.4878444084278769e-05, | |
| "loss": 0.1755, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 2.58, | |
| "learning_rate": 1.4846029173419773e-05, | |
| "loss": 0.2089, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 2.59, | |
| "learning_rate": 1.4813614262560778e-05, | |
| "loss": 0.1058, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 2.61, | |
| "learning_rate": 1.4781199351701785e-05, | |
| "loss": 0.1285, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 2.63, | |
| "learning_rate": 1.474878444084279e-05, | |
| "loss": 0.1526, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "learning_rate": 1.4716369529983794e-05, | |
| "loss": 0.1623, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 2.66, | |
| "learning_rate": 1.4683954619124799e-05, | |
| "loss": 0.0889, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 1.4651539708265803e-05, | |
| "loss": 0.1593, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 2.69, | |
| "learning_rate": 1.4619124797406807e-05, | |
| "loss": 0.2728, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 2.71, | |
| "learning_rate": 1.4586709886547814e-05, | |
| "loss": 0.2498, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 2.72, | |
| "learning_rate": 1.4554294975688818e-05, | |
| "loss": 0.2153, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 2.74, | |
| "learning_rate": 1.4521880064829822e-05, | |
| "loss": 0.1796, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 2.76, | |
| "learning_rate": 1.4489465153970827e-05, | |
| "loss": 0.2013, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 2.77, | |
| "learning_rate": 1.4457050243111831e-05, | |
| "loss": 0.1228, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 1.4424635332252836e-05, | |
| "loss": 0.199, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 2.8, | |
| "learning_rate": 1.4392220421393844e-05, | |
| "loss": 0.1922, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 2.82, | |
| "learning_rate": 1.4359805510534848e-05, | |
| "loss": 0.2059, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 2.84, | |
| "learning_rate": 1.4327390599675852e-05, | |
| "loss": 0.2216, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 1.4294975688816857e-05, | |
| "loss": 0.1289, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 2.87, | |
| "learning_rate": 1.4262560777957861e-05, | |
| "loss": 0.2393, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 2.88, | |
| "learning_rate": 1.4230145867098866e-05, | |
| "loss": 0.2273, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 2.9, | |
| "learning_rate": 1.4197730956239872e-05, | |
| "loss": 0.2158, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 2.92, | |
| "learning_rate": 1.4165316045380876e-05, | |
| "loss": 0.1251, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 2.93, | |
| "learning_rate": 1.413290113452188e-05, | |
| "loss": 0.225, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 2.95, | |
| "learning_rate": 1.4100486223662885e-05, | |
| "loss": 0.1223, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 2.97, | |
| "learning_rate": 1.4068071312803891e-05, | |
| "loss": 0.284, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 1.4035656401944896e-05, | |
| "loss": 0.2243, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "learning_rate": 1.4003241491085902e-05, | |
| "loss": 0.1072, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "eval_FN": 156, | |
| "eval_FP": 2712, | |
| "eval_TN": 19002, | |
| "eval_TP": 1946, | |
| "eval_accuracy": 0.8795767551226067, | |
| "eval_f1": 0.5757396449704142, | |
| "eval_loss": 0.35726749897003174, | |
| "eval_precision": 0.41777586947187634, | |
| "eval_recall": 0.9257849666983825, | |
| "eval_runtime": 163.4686, | |
| "eval_samples_per_second": 145.692, | |
| "eval_steps_per_second": 9.109, | |
| "step": 1851 | |
| }, | |
| { | |
| "epoch": 3.01, | |
| "learning_rate": 1.3970826580226906e-05, | |
| "loss": 0.1103, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 3.03, | |
| "learning_rate": 1.393841166936791e-05, | |
| "loss": 0.1909, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 3.05, | |
| "learning_rate": 1.3905996758508915e-05, | |
| "loss": 0.1369, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 3.06, | |
| "learning_rate": 1.387358184764992e-05, | |
| "loss": 0.0592, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 3.08, | |
| "learning_rate": 1.3841166936790924e-05, | |
| "loss": 0.1428, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 3.1, | |
| "learning_rate": 1.380875202593193e-05, | |
| "loss": 0.1818, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 3.11, | |
| "learning_rate": 1.3776337115072936e-05, | |
| "loss": 0.1445, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 3.13, | |
| "learning_rate": 1.374392220421394e-05, | |
| "loss": 0.1359, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 3.14, | |
| "learning_rate": 1.3711507293354945e-05, | |
| "loss": 0.1422, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 3.16, | |
| "learning_rate": 1.367909238249595e-05, | |
| "loss": 0.1126, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 3.18, | |
| "learning_rate": 1.3646677471636954e-05, | |
| "loss": 0.114, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 3.19, | |
| "learning_rate": 1.361426256077796e-05, | |
| "loss": 0.1249, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 3.21, | |
| "learning_rate": 1.3581847649918964e-05, | |
| "loss": 0.1207, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 3.23, | |
| "learning_rate": 1.3549432739059969e-05, | |
| "loss": 0.0807, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 3.24, | |
| "learning_rate": 1.3517017828200973e-05, | |
| "loss": 0.0677, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 3.26, | |
| "learning_rate": 1.3484602917341978e-05, | |
| "loss": 0.1993, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 3.27, | |
| "learning_rate": 1.3452188006482982e-05, | |
| "loss": 0.1866, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 3.29, | |
| "learning_rate": 1.341977309562399e-05, | |
| "loss": 0.1439, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 3.31, | |
| "learning_rate": 1.3387358184764994e-05, | |
| "loss": 0.262, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 3.32, | |
| "learning_rate": 1.3354943273905999e-05, | |
| "loss": 0.1777, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 3.34, | |
| "learning_rate": 1.3322528363047003e-05, | |
| "loss": 0.0708, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 3.35, | |
| "learning_rate": 1.3290113452188008e-05, | |
| "loss": 0.2216, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 3.37, | |
| "learning_rate": 1.3257698541329012e-05, | |
| "loss": 0.1717, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 3.39, | |
| "learning_rate": 1.3225283630470018e-05, | |
| "loss": 0.1828, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 3.4, | |
| "learning_rate": 1.3192868719611023e-05, | |
| "loss": 0.0833, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 3.42, | |
| "learning_rate": 1.3160453808752027e-05, | |
| "loss": 0.1837, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 3.44, | |
| "learning_rate": 1.3128038897893031e-05, | |
| "loss": 0.1991, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 3.45, | |
| "learning_rate": 1.3095623987034036e-05, | |
| "loss": 0.142, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 3.47, | |
| "learning_rate": 1.306320907617504e-05, | |
| "loss": 0.0458, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 3.48, | |
| "learning_rate": 1.3030794165316048e-05, | |
| "loss": 0.0546, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 3.5, | |
| "learning_rate": 1.2998379254457052e-05, | |
| "loss": 0.0612, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 3.52, | |
| "learning_rate": 1.2965964343598057e-05, | |
| "loss": 0.1679, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 3.53, | |
| "learning_rate": 1.2933549432739061e-05, | |
| "loss": 0.1088, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 3.55, | |
| "learning_rate": 1.2901134521880066e-05, | |
| "loss": 0.1309, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 3.57, | |
| "learning_rate": 1.286871961102107e-05, | |
| "loss": 0.186, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 3.58, | |
| "learning_rate": 1.2836304700162076e-05, | |
| "loss": 0.1601, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 3.6, | |
| "learning_rate": 1.280388978930308e-05, | |
| "loss": 0.1139, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 3.61, | |
| "learning_rate": 1.2771474878444085e-05, | |
| "loss": 0.2198, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 3.63, | |
| "learning_rate": 1.273905996758509e-05, | |
| "loss": 0.0883, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 3.65, | |
| "learning_rate": 1.2706645056726094e-05, | |
| "loss": 0.1959, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 3.66, | |
| "learning_rate": 1.2674230145867098e-05, | |
| "loss": 0.1163, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 3.68, | |
| "learning_rate": 1.2641815235008106e-05, | |
| "loss": 0.0485, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 3.7, | |
| "learning_rate": 1.260940032414911e-05, | |
| "loss": 0.1146, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 3.71, | |
| "learning_rate": 1.2576985413290115e-05, | |
| "loss": 0.1108, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 3.73, | |
| "learning_rate": 1.254457050243112e-05, | |
| "loss": 0.19, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 3.74, | |
| "learning_rate": 1.2512155591572124e-05, | |
| "loss": 0.1567, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 3.76, | |
| "learning_rate": 1.2479740680713128e-05, | |
| "loss": 0.2529, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 3.78, | |
| "learning_rate": 1.2447325769854134e-05, | |
| "loss": 0.138, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 3.79, | |
| "learning_rate": 1.2414910858995139e-05, | |
| "loss": 0.0951, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 3.81, | |
| "learning_rate": 1.2382495948136143e-05, | |
| "loss": 0.1542, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 3.82, | |
| "learning_rate": 1.2350081037277148e-05, | |
| "loss": 0.1228, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 3.84, | |
| "learning_rate": 1.2317666126418152e-05, | |
| "loss": 0.2143, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 3.86, | |
| "learning_rate": 1.2285251215559157e-05, | |
| "loss": 0.173, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 3.87, | |
| "learning_rate": 1.2252836304700164e-05, | |
| "loss": 0.1822, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 3.89, | |
| "learning_rate": 1.2220421393841169e-05, | |
| "loss": 0.0713, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 3.91, | |
| "learning_rate": 1.2188006482982173e-05, | |
| "loss": 0.1723, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 3.92, | |
| "learning_rate": 1.2155591572123178e-05, | |
| "loss": 0.081, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 3.94, | |
| "learning_rate": 1.2123176661264182e-05, | |
| "loss": 0.2095, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 3.95, | |
| "learning_rate": 1.2090761750405187e-05, | |
| "loss": 0.0885, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 3.97, | |
| "learning_rate": 1.2058346839546193e-05, | |
| "loss": 0.1621, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 3.99, | |
| "learning_rate": 1.2025931928687197e-05, | |
| "loss": 0.1174, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "eval_FN": 248, | |
| "eval_FP": 2228, | |
| "eval_TN": 19486, | |
| "eval_TP": 1854, | |
| "eval_accuracy": 0.896036278132348, | |
| "eval_f1": 0.5996119016817594, | |
| "eval_loss": 0.40582790970802307, | |
| "eval_precision": 0.4541891229789319, | |
| "eval_recall": 0.8820171265461465, | |
| "eval_runtime": 163.4648, | |
| "eval_samples_per_second": 145.695, | |
| "eval_steps_per_second": 9.109, | |
| "step": 2468 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "learning_rate": 1.1993517017828202e-05, | |
| "loss": 0.0838, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 4.02, | |
| "learning_rate": 1.1961102106969208e-05, | |
| "loss": 0.2068, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 4.04, | |
| "learning_rate": 1.1928687196110212e-05, | |
| "loss": 0.0945, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 4.05, | |
| "learning_rate": 1.1896272285251216e-05, | |
| "loss": 0.1064, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 4.07, | |
| "learning_rate": 1.1863857374392221e-05, | |
| "loss": 0.0534, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 4.08, | |
| "learning_rate": 1.1831442463533227e-05, | |
| "loss": 0.0908, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 4.1, | |
| "learning_rate": 1.1799027552674231e-05, | |
| "loss": 0.0668, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 4.12, | |
| "learning_rate": 1.1766612641815236e-05, | |
| "loss": 0.0302, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 4.13, | |
| "learning_rate": 1.173419773095624e-05, | |
| "loss": 0.2408, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 4.15, | |
| "learning_rate": 1.1701782820097245e-05, | |
| "loss": 0.0738, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 4.17, | |
| "learning_rate": 1.166936790923825e-05, | |
| "loss": 0.0726, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 4.18, | |
| "learning_rate": 1.1636952998379257e-05, | |
| "loss": 0.0425, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 4.2, | |
| "learning_rate": 1.1604538087520261e-05, | |
| "loss": 0.1253, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 4.21, | |
| "learning_rate": 1.1572123176661266e-05, | |
| "loss": 0.0968, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 4.23, | |
| "learning_rate": 1.153970826580227e-05, | |
| "loss": 0.0373, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 4.25, | |
| "learning_rate": 1.1507293354943275e-05, | |
| "loss": 0.119, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 4.26, | |
| "learning_rate": 1.1474878444084279e-05, | |
| "loss": 0.0756, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 4.28, | |
| "learning_rate": 1.1442463533225285e-05, | |
| "loss": 0.0924, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 4.29, | |
| "learning_rate": 1.141004862236629e-05, | |
| "loss": 0.0425, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 4.31, | |
| "learning_rate": 1.1377633711507294e-05, | |
| "loss": 0.1172, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 4.33, | |
| "learning_rate": 1.1345218800648299e-05, | |
| "loss": 0.088, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 4.34, | |
| "learning_rate": 1.1312803889789303e-05, | |
| "loss": 0.2053, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 4.36, | |
| "learning_rate": 1.1280388978930307e-05, | |
| "loss": 0.071, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 4.38, | |
| "learning_rate": 1.1247974068071315e-05, | |
| "loss": 0.1104, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 4.39, | |
| "learning_rate": 1.121555915721232e-05, | |
| "loss": 0.0138, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 4.41, | |
| "learning_rate": 1.1183144246353324e-05, | |
| "loss": 0.2031, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 4.42, | |
| "learning_rate": 1.1150729335494328e-05, | |
| "loss": 0.0964, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 4.44, | |
| "learning_rate": 1.1118314424635333e-05, | |
| "loss": 0.1935, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 4.46, | |
| "learning_rate": 1.1085899513776337e-05, | |
| "loss": 0.0557, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 4.47, | |
| "learning_rate": 1.1053484602917343e-05, | |
| "loss": 0.0468, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 4.49, | |
| "learning_rate": 1.1021069692058348e-05, | |
| "loss": 0.2173, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 4.51, | |
| "learning_rate": 1.0988654781199352e-05, | |
| "loss": 0.0768, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 4.52, | |
| "learning_rate": 1.0956239870340357e-05, | |
| "loss": 0.0247, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 4.54, | |
| "learning_rate": 1.0923824959481361e-05, | |
| "loss": 0.1874, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 4.55, | |
| "learning_rate": 1.0891410048622366e-05, | |
| "loss": 0.0641, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 4.57, | |
| "learning_rate": 1.0858995137763373e-05, | |
| "loss": 0.0508, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 4.59, | |
| "learning_rate": 1.0826580226904378e-05, | |
| "loss": 0.1219, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 4.6, | |
| "learning_rate": 1.0794165316045382e-05, | |
| "loss": 0.1631, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 4.62, | |
| "learning_rate": 1.0761750405186387e-05, | |
| "loss": 0.0482, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 4.64, | |
| "learning_rate": 1.0729335494327391e-05, | |
| "loss": 0.074, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 4.65, | |
| "learning_rate": 1.0696920583468395e-05, | |
| "loss": 0.0962, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 4.67, | |
| "learning_rate": 1.0664505672609402e-05, | |
| "loss": 0.0345, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 4.68, | |
| "learning_rate": 1.0632090761750406e-05, | |
| "loss": 0.1194, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 4.7, | |
| "learning_rate": 1.059967585089141e-05, | |
| "loss": 0.214, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 4.72, | |
| "learning_rate": 1.0567260940032415e-05, | |
| "loss": 0.1247, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 4.73, | |
| "learning_rate": 1.053484602917342e-05, | |
| "loss": 0.1379, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 4.75, | |
| "learning_rate": 1.0502431118314424e-05, | |
| "loss": 0.1062, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 4.76, | |
| "learning_rate": 1.0470016207455432e-05, | |
| "loss": 0.0877, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 4.78, | |
| "learning_rate": 1.0437601296596436e-05, | |
| "loss": 0.0496, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 4.8, | |
| "learning_rate": 1.040518638573744e-05, | |
| "loss": 0.1037, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 4.81, | |
| "learning_rate": 1.0372771474878445e-05, | |
| "loss": 0.246, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 4.83, | |
| "learning_rate": 1.034035656401945e-05, | |
| "loss": 0.1238, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 4.85, | |
| "learning_rate": 1.0307941653160454e-05, | |
| "loss": 0.1542, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 4.86, | |
| "learning_rate": 1.027552674230146e-05, | |
| "loss": 0.0263, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 4.88, | |
| "learning_rate": 1.0243111831442464e-05, | |
| "loss": 0.1362, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 4.89, | |
| "learning_rate": 1.0210696920583469e-05, | |
| "loss": 0.1256, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 4.91, | |
| "learning_rate": 1.0178282009724473e-05, | |
| "loss": 0.1255, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 4.93, | |
| "learning_rate": 1.014586709886548e-05, | |
| "loss": 0.1026, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 4.94, | |
| "learning_rate": 1.0113452188006484e-05, | |
| "loss": 0.1085, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 4.96, | |
| "learning_rate": 1.008103727714749e-05, | |
| "loss": 0.089, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 4.98, | |
| "learning_rate": 1.0048622366288494e-05, | |
| "loss": 0.0874, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 4.99, | |
| "learning_rate": 1.0016207455429499e-05, | |
| "loss": 0.0332, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "eval_FN": 265, | |
| "eval_FP": 2027, | |
| "eval_TN": 19687, | |
| "eval_TP": 1837, | |
| "eval_accuracy": 0.9037621766879409, | |
| "eval_f1": 0.6158229969829031, | |
| "eval_loss": 0.44811293482780457, | |
| "eval_precision": 0.4754140786749482, | |
| "eval_recall": 0.8739295908658421, | |
| "eval_runtime": 163.4388, | |
| "eval_samples_per_second": 145.718, | |
| "eval_steps_per_second": 9.11, | |
| "step": 3085 | |
| }, | |
| { | |
| "epoch": 5.01, | |
| "learning_rate": 9.983792544570503e-06, | |
| "loss": 0.1044, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 5.02, | |
| "learning_rate": 9.951377633711507e-06, | |
| "loss": 0.0295, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 5.04, | |
| "learning_rate": 9.918962722852514e-06, | |
| "loss": 0.1118, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 5.06, | |
| "learning_rate": 9.886547811993518e-06, | |
| "loss": 0.1137, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 5.07, | |
| "learning_rate": 9.854132901134522e-06, | |
| "loss": 0.0962, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 5.09, | |
| "learning_rate": 9.821717990275529e-06, | |
| "loss": 0.0406, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 5.11, | |
| "learning_rate": 9.789303079416533e-06, | |
| "loss": 0.0423, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 5.12, | |
| "learning_rate": 9.756888168557537e-06, | |
| "loss": 0.0911, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 5.14, | |
| "learning_rate": 9.724473257698542e-06, | |
| "loss": 0.0596, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 5.15, | |
| "learning_rate": 9.692058346839546e-06, | |
| "loss": 0.0669, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 5.17, | |
| "learning_rate": 9.65964343598055e-06, | |
| "loss": 0.1184, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 5.19, | |
| "learning_rate": 9.627228525121557e-06, | |
| "loss": 0.0717, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 5.2, | |
| "learning_rate": 9.594813614262561e-06, | |
| "loss": 0.0908, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 5.22, | |
| "learning_rate": 9.562398703403566e-06, | |
| "loss": 0.0396, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 5.24, | |
| "learning_rate": 9.529983792544572e-06, | |
| "loss": 0.1159, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 5.25, | |
| "learning_rate": 9.497568881685576e-06, | |
| "loss": 0.0859, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 5.27, | |
| "learning_rate": 9.46515397082658e-06, | |
| "loss": 0.1926, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 5.28, | |
| "learning_rate": 9.432739059967587e-06, | |
| "loss": 0.063, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 5.3, | |
| "learning_rate": 9.400324149108591e-06, | |
| "loss": 0.0484, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 5.32, | |
| "learning_rate": 9.367909238249596e-06, | |
| "loss": 0.0662, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 5.33, | |
| "learning_rate": 9.3354943273906e-06, | |
| "loss": 0.0176, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 5.35, | |
| "learning_rate": 9.303079416531604e-06, | |
| "loss": 0.0706, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 5.36, | |
| "learning_rate": 9.270664505672609e-06, | |
| "loss": 0.0137, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 5.38, | |
| "learning_rate": 9.238249594813615e-06, | |
| "loss": 0.0692, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 5.4, | |
| "learning_rate": 9.20583468395462e-06, | |
| "loss": 0.1163, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 5.41, | |
| "learning_rate": 9.173419773095624e-06, | |
| "loss": 0.0309, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 5.43, | |
| "learning_rate": 9.14100486223663e-06, | |
| "loss": 0.1162, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 5.45, | |
| "learning_rate": 9.108589951377634e-06, | |
| "loss": 0.0535, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 5.46, | |
| "learning_rate": 9.076175040518639e-06, | |
| "loss": 0.0639, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 5.48, | |
| "learning_rate": 9.043760129659645e-06, | |
| "loss": 0.0373, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 5.49, | |
| "learning_rate": 9.01134521880065e-06, | |
| "loss": 0.029, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 5.51, | |
| "learning_rate": 8.978930307941654e-06, | |
| "loss": 0.0226, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 5.53, | |
| "learning_rate": 8.946515397082658e-06, | |
| "loss": 0.0424, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 5.54, | |
| "learning_rate": 8.914100486223664e-06, | |
| "loss": 0.0916, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 5.56, | |
| "learning_rate": 8.881685575364669e-06, | |
| "loss": 0.0545, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 5.58, | |
| "learning_rate": 8.849270664505673e-06, | |
| "loss": 0.0816, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 5.59, | |
| "learning_rate": 8.816855753646678e-06, | |
| "loss": 0.0476, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 5.61, | |
| "learning_rate": 8.784440842787682e-06, | |
| "loss": 0.0787, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 5.62, | |
| "learning_rate": 8.752025931928688e-06, | |
| "loss": 0.1371, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 5.64, | |
| "learning_rate": 8.719611021069693e-06, | |
| "loss": 0.0987, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 5.66, | |
| "learning_rate": 8.687196110210697e-06, | |
| "loss": 0.033, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 5.67, | |
| "learning_rate": 8.654781199351703e-06, | |
| "loss": 0.033, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 5.69, | |
| "learning_rate": 8.622366288492708e-06, | |
| "loss": 0.0422, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 5.71, | |
| "learning_rate": 8.589951377633712e-06, | |
| "loss": 0.0528, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 5.72, | |
| "learning_rate": 8.557536466774718e-06, | |
| "loss": 0.0403, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 5.74, | |
| "learning_rate": 8.525121555915723e-06, | |
| "loss": 0.0983, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 5.75, | |
| "learning_rate": 8.492706645056727e-06, | |
| "loss": 0.0734, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 5.77, | |
| "learning_rate": 8.460291734197731e-06, | |
| "loss": 0.055, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 5.79, | |
| "learning_rate": 8.427876823338736e-06, | |
| "loss": 0.0899, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 5.8, | |
| "learning_rate": 8.39546191247974e-06, | |
| "loss": 0.1225, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 5.82, | |
| "learning_rate": 8.363047001620746e-06, | |
| "loss": 0.0634, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 5.83, | |
| "learning_rate": 8.33063209076175e-06, | |
| "loss": 0.106, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 5.85, | |
| "learning_rate": 8.298217179902755e-06, | |
| "loss": 0.0207, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 5.87, | |
| "learning_rate": 8.265802269043761e-06, | |
| "loss": 0.0404, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 5.88, | |
| "learning_rate": 8.233387358184766e-06, | |
| "loss": 0.1048, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 5.9, | |
| "learning_rate": 8.20097244732577e-06, | |
| "loss": 0.002, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 5.92, | |
| "learning_rate": 8.168557536466776e-06, | |
| "loss": 0.1558, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 5.93, | |
| "learning_rate": 8.13614262560778e-06, | |
| "loss": 0.0639, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 5.95, | |
| "learning_rate": 8.103727714748785e-06, | |
| "loss": 0.1332, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 5.96, | |
| "learning_rate": 8.07131280388979e-06, | |
| "loss": 0.0599, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 5.98, | |
| "learning_rate": 8.038897893030794e-06, | |
| "loss": 0.1027, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "learning_rate": 8.0064829821718e-06, | |
| "loss": 0.1128, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "eval_FN": 302, | |
| "eval_FP": 1834, | |
| "eval_TN": 19880, | |
| "eval_TP": 1800, | |
| "eval_accuracy": 0.9103123950285522, | |
| "eval_f1": 0.6276150627615064, | |
| "eval_loss": 0.4452013671398163, | |
| "eval_precision": 0.4953219592735278, | |
| "eval_recall": 0.8563273073263559, | |
| "eval_runtime": 163.472, | |
| "eval_samples_per_second": 145.689, | |
| "eval_steps_per_second": 9.109, | |
| "step": 3702 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 6170, | |
| "num_train_epochs": 10, | |
| "save_steps": 500, | |
| "total_flos": 1.557827936575488e+16, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |