|
{ |
|
"best_metric": 0.4437299035369775, |
|
"best_model_checkpoint": "../save/jtrans-malware/checkpoint-3000", |
|
"epoch": 9.043927648578812, |
|
"global_step": 3500, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.03, |
|
"learning_rate": 9.974160206718347e-05, |
|
"loss": 0.434, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.948320413436693e-05, |
|
"loss": 0.222, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.08, |
|
"learning_rate": 9.92248062015504e-05, |
|
"loss": 0.1538, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.1, |
|
"learning_rate": 9.896640826873386e-05, |
|
"loss": 0.1393, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.13, |
|
"learning_rate": 9.870801033591732e-05, |
|
"loss": 0.1326, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 9.844961240310078e-05, |
|
"loss": 0.1337, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.819121447028425e-05, |
|
"loss": 0.1312, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.21, |
|
"learning_rate": 9.793281653746771e-05, |
|
"loss": 0.1239, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 9.767441860465116e-05, |
|
"loss": 0.1261, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"learning_rate": 9.741602067183462e-05, |
|
"loss": 0.1246, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.26, |
|
"eval_accuracy": 0.9733742331288343, |
|
"eval_f1": 0.09958506224066391, |
|
"eval_loss": 0.13609381020069122, |
|
"eval_precision": 0.9230769230769231, |
|
"eval_recall": 0.05263157894736842, |
|
"eval_runtime": 11.6719, |
|
"eval_samples_per_second": 13.965, |
|
"eval_steps_per_second": 0.942, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.28, |
|
"learning_rate": 9.71576227390181e-05, |
|
"loss": 0.1192, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.31, |
|
"learning_rate": 9.689922480620155e-05, |
|
"loss": 0.1341, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.664082687338501e-05, |
|
"loss": 0.1231, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 9.638242894056848e-05, |
|
"loss": 0.1399, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.39, |
|
"learning_rate": 9.612403100775195e-05, |
|
"loss": 0.1262, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 9.58656330749354e-05, |
|
"loss": 0.133, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.44, |
|
"learning_rate": 9.560723514211886e-05, |
|
"loss": 0.1247, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 9.534883720930233e-05, |
|
"loss": 0.129, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.49, |
|
"learning_rate": 9.509043927648579e-05, |
|
"loss": 0.1275, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.483204134366925e-05, |
|
"loss": 0.1267, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"eval_accuracy": 0.9720245398773006, |
|
"eval_f1": 0.0, |
|
"eval_loss": 0.12009111791849136, |
|
"eval_precision": 0.0, |
|
"eval_recall": 0.0, |
|
"eval_runtime": 11.6034, |
|
"eval_samples_per_second": 14.048, |
|
"eval_steps_per_second": 0.948, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.457364341085272e-05, |
|
"loss": 0.1257, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.57, |
|
"learning_rate": 9.431524547803618e-05, |
|
"loss": 0.121, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 9.405684754521964e-05, |
|
"loss": 0.1238, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.62, |
|
"learning_rate": 9.379844961240311e-05, |
|
"loss": 0.1284, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 9.354005167958657e-05, |
|
"loss": 0.1206, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.67, |
|
"learning_rate": 9.328165374677002e-05, |
|
"loss": 0.1288, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 9.30232558139535e-05, |
|
"loss": 0.1151, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 9.276485788113696e-05, |
|
"loss": 0.1144, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.75, |
|
"learning_rate": 9.250645994832042e-05, |
|
"loss": 0.13, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"learning_rate": 9.224806201550387e-05, |
|
"loss": 0.1202, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.78, |
|
"eval_accuracy": 0.9734969325153374, |
|
"eval_f1": 0.10743801652892561, |
|
"eval_loss": 0.11869814246892929, |
|
"eval_precision": 0.9285714285714286, |
|
"eval_recall": 0.05701754385964912, |
|
"eval_runtime": 11.8735, |
|
"eval_samples_per_second": 13.728, |
|
"eval_steps_per_second": 0.926, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"learning_rate": 9.198966408268735e-05, |
|
"loss": 0.1218, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.83, |
|
"learning_rate": 9.173126614987081e-05, |
|
"loss": 0.1218, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.85, |
|
"learning_rate": 9.147286821705426e-05, |
|
"loss": 0.1197, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 9.121447028423772e-05, |
|
"loss": 0.1221, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 9.09560723514212e-05, |
|
"loss": 0.1223, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 9.069767441860465e-05, |
|
"loss": 0.12, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.96, |
|
"learning_rate": 9.043927648578811e-05, |
|
"loss": 0.1113, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.98, |
|
"learning_rate": 9.018087855297158e-05, |
|
"loss": 0.121, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.01, |
|
"learning_rate": 8.992248062015505e-05, |
|
"loss": 0.1195, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"learning_rate": 8.96640826873385e-05, |
|
"loss": 0.1227, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.03, |
|
"eval_accuracy": 0.9734969325153374, |
|
"eval_f1": 0.10743801652892561, |
|
"eval_loss": 0.11648325622081757, |
|
"eval_precision": 0.9285714285714286, |
|
"eval_recall": 0.05701754385964912, |
|
"eval_runtime": 11.6629, |
|
"eval_samples_per_second": 13.976, |
|
"eval_steps_per_second": 0.943, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 8.940568475452197e-05, |
|
"loss": 0.118, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.09, |
|
"learning_rate": 8.914728682170543e-05, |
|
"loss": 0.12, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 8.888888888888889e-05, |
|
"loss": 0.1223, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 1.14, |
|
"learning_rate": 8.863049095607236e-05, |
|
"loss": 0.1154, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.16, |
|
"learning_rate": 8.837209302325582e-05, |
|
"loss": 0.1124, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.19, |
|
"learning_rate": 8.811369509043928e-05, |
|
"loss": 0.1095, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.21, |
|
"learning_rate": 8.785529715762275e-05, |
|
"loss": 0.1159, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 8.759689922480621e-05, |
|
"loss": 0.1229, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.27, |
|
"learning_rate": 8.733850129198967e-05, |
|
"loss": 0.1185, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 8.708010335917312e-05, |
|
"loss": 0.1139, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"eval_accuracy": 0.9734969325153374, |
|
"eval_f1": 0.10743801652892561, |
|
"eval_loss": 0.11325845867395401, |
|
"eval_precision": 0.9285714285714286, |
|
"eval_recall": 0.05701754385964912, |
|
"eval_runtime": 11.9509, |
|
"eval_samples_per_second": 13.639, |
|
"eval_steps_per_second": 0.92, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.32, |
|
"learning_rate": 8.68217054263566e-05, |
|
"loss": 0.1167, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.34, |
|
"learning_rate": 8.656330749354006e-05, |
|
"loss": 0.119, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.37, |
|
"learning_rate": 8.630490956072352e-05, |
|
"loss": 0.1115, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 8.604651162790697e-05, |
|
"loss": 0.1108, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 8.578811369509044e-05, |
|
"loss": 0.1194, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.45, |
|
"learning_rate": 8.552971576227391e-05, |
|
"loss": 0.1147, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 8.527131782945736e-05, |
|
"loss": 0.1171, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"learning_rate": 8.501291989664083e-05, |
|
"loss": 0.1218, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.52, |
|
"learning_rate": 8.475452196382429e-05, |
|
"loss": 0.1211, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"learning_rate": 8.449612403100775e-05, |
|
"loss": 0.1102, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.55, |
|
"eval_accuracy": 0.9734969325153374, |
|
"eval_f1": 0.10743801652892561, |
|
"eval_loss": 0.10692530870437622, |
|
"eval_precision": 0.9285714285714286, |
|
"eval_recall": 0.05701754385964912, |
|
"eval_runtime": 11.9673, |
|
"eval_samples_per_second": 13.62, |
|
"eval_steps_per_second": 0.919, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 8.423772609819122e-05, |
|
"loss": 0.1061, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 8.397932816537468e-05, |
|
"loss": 0.1062, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 8.372093023255814e-05, |
|
"loss": 0.1148, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 8.34625322997416e-05, |
|
"loss": 0.1114, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.68, |
|
"learning_rate": 8.320413436692507e-05, |
|
"loss": 0.1117, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.71, |
|
"learning_rate": 8.294573643410853e-05, |
|
"loss": 0.109, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.73, |
|
"learning_rate": 8.2687338501292e-05, |
|
"loss": 0.1042, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 8.242894056847546e-05, |
|
"loss": 0.1153, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 8.217054263565892e-05, |
|
"loss": 0.1026, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 8.191214470284238e-05, |
|
"loss": 0.1102, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"eval_accuracy": 0.9734969325153374, |
|
"eval_f1": 0.10743801652892561, |
|
"eval_loss": 0.10341155529022217, |
|
"eval_precision": 0.9285714285714286, |
|
"eval_recall": 0.05701754385964912, |
|
"eval_runtime": 11.7838, |
|
"eval_samples_per_second": 13.833, |
|
"eval_steps_per_second": 0.933, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 8.165374677002583e-05, |
|
"loss": 0.1002, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.86, |
|
"learning_rate": 8.139534883720931e-05, |
|
"loss": 0.1159, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.89, |
|
"learning_rate": 8.113695090439277e-05, |
|
"loss": 0.1098, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.91, |
|
"learning_rate": 8.087855297157622e-05, |
|
"loss": 0.1057, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 8.062015503875969e-05, |
|
"loss": 0.1024, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 8.036175710594316e-05, |
|
"loss": 0.1013, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 8.010335917312663e-05, |
|
"loss": 0.0976, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 2.02, |
|
"learning_rate": 7.984496124031008e-05, |
|
"loss": 0.1043, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 2.04, |
|
"learning_rate": 7.958656330749354e-05, |
|
"loss": 0.1005, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"learning_rate": 7.932816537467702e-05, |
|
"loss": 0.0986, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 2.07, |
|
"eval_accuracy": 0.9738650306748466, |
|
"eval_f1": 0.13061224489795917, |
|
"eval_loss": 0.10085967928171158, |
|
"eval_precision": 0.9411764705882353, |
|
"eval_recall": 0.07017543859649122, |
|
"eval_runtime": 11.946, |
|
"eval_samples_per_second": 13.645, |
|
"eval_steps_per_second": 0.921, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 2.09, |
|
"learning_rate": 7.906976744186047e-05, |
|
"loss": 0.0978, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 7.881136950904393e-05, |
|
"loss": 0.0943, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 7.855297157622739e-05, |
|
"loss": 0.0916, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 7.829457364341086e-05, |
|
"loss": 0.1007, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 2.2, |
|
"learning_rate": 7.803617571059432e-05, |
|
"loss": 0.1053, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 2.22, |
|
"learning_rate": 7.777777777777778e-05, |
|
"loss": 0.0992, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 2.25, |
|
"learning_rate": 7.751937984496124e-05, |
|
"loss": 0.1038, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 2.27, |
|
"learning_rate": 7.726098191214471e-05, |
|
"loss": 0.0982, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 7.700258397932817e-05, |
|
"loss": 0.1031, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 7.674418604651163e-05, |
|
"loss": 0.0964, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"eval_accuracy": 0.9749693251533742, |
|
"eval_f1": 0.20930232558139536, |
|
"eval_loss": 0.09663821756839752, |
|
"eval_precision": 0.9, |
|
"eval_recall": 0.11842105263157894, |
|
"eval_runtime": 11.6402, |
|
"eval_samples_per_second": 14.003, |
|
"eval_steps_per_second": 0.945, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 7.648578811369508e-05, |
|
"loss": 0.0991, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 2.38, |
|
"learning_rate": 7.622739018087856e-05, |
|
"loss": 0.0971, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 2.4, |
|
"learning_rate": 7.596899224806202e-05, |
|
"loss": 0.093, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 2.43, |
|
"learning_rate": 7.571059431524549e-05, |
|
"loss": 0.0965, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 2.45, |
|
"learning_rate": 7.545219638242894e-05, |
|
"loss": 0.0949, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 7.519379844961241e-05, |
|
"loss": 0.0941, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 7.493540051679588e-05, |
|
"loss": 0.0985, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 7.467700258397933e-05, |
|
"loss": 0.0999, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 2.56, |
|
"learning_rate": 7.441860465116279e-05, |
|
"loss": 0.1096, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"learning_rate": 7.416020671834627e-05, |
|
"loss": 0.0994, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.58, |
|
"eval_accuracy": 0.9750920245398773, |
|
"eval_f1": 0.20392156862745098, |
|
"eval_loss": 0.094399094581604, |
|
"eval_precision": 0.9629629629629629, |
|
"eval_recall": 0.11403508771929824, |
|
"eval_runtime": 11.5607, |
|
"eval_samples_per_second": 14.1, |
|
"eval_steps_per_second": 0.952, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.61, |
|
"learning_rate": 7.390180878552973e-05, |
|
"loss": 0.1052, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 7.364341085271318e-05, |
|
"loss": 0.0974, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 7.338501291989664e-05, |
|
"loss": 0.099, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 7.31266149870801e-05, |
|
"loss": 0.0999, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 7.286821705426357e-05, |
|
"loss": 0.1039, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 2.74, |
|
"learning_rate": 7.260981912144703e-05, |
|
"loss": 0.0935, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 2.76, |
|
"learning_rate": 7.23514211886305e-05, |
|
"loss": 0.1, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 2.79, |
|
"learning_rate": 7.209302325581396e-05, |
|
"loss": 0.0921, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 7.183462532299742e-05, |
|
"loss": 0.0941, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 7.157622739018088e-05, |
|
"loss": 0.0913, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"eval_accuracy": 0.9757055214723926, |
|
"eval_f1": 0.23846153846153845, |
|
"eval_loss": 0.09476516395807266, |
|
"eval_precision": 0.96875, |
|
"eval_recall": 0.13596491228070176, |
|
"eval_runtime": 11.7524, |
|
"eval_samples_per_second": 13.87, |
|
"eval_steps_per_second": 0.936, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 7.131782945736435e-05, |
|
"loss": 0.0944, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 7.105943152454781e-05, |
|
"loss": 0.0946, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 2.92, |
|
"learning_rate": 7.080103359173127e-05, |
|
"loss": 0.0842, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 2.95, |
|
"learning_rate": 7.054263565891474e-05, |
|
"loss": 0.0896, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 2.97, |
|
"learning_rate": 7.028423772609819e-05, |
|
"loss": 0.0977, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 7.002583979328165e-05, |
|
"loss": 0.1078, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 6.976744186046513e-05, |
|
"loss": 0.0854, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 6.950904392764859e-05, |
|
"loss": 0.0872, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 6.925064599483204e-05, |
|
"loss": 0.1017, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"learning_rate": 6.89922480620155e-05, |
|
"loss": 0.0964, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 3.1, |
|
"eval_accuracy": 0.9737423312883435, |
|
"eval_f1": 0.2014925373134328, |
|
"eval_loss": 0.09359467029571533, |
|
"eval_precision": 0.675, |
|
"eval_recall": 0.11842105263157894, |
|
"eval_runtime": 11.5484, |
|
"eval_samples_per_second": 14.115, |
|
"eval_steps_per_second": 0.953, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 3.13, |
|
"learning_rate": 6.873385012919898e-05, |
|
"loss": 0.092, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 3.15, |
|
"learning_rate": 6.847545219638243e-05, |
|
"loss": 0.0919, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 6.821705426356589e-05, |
|
"loss": 0.0872, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 3.2, |
|
"learning_rate": 6.795865633074935e-05, |
|
"loss": 0.0822, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 6.770025839793283e-05, |
|
"loss": 0.0931, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 3.26, |
|
"learning_rate": 6.744186046511628e-05, |
|
"loss": 0.0878, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 3.28, |
|
"learning_rate": 6.718346253229974e-05, |
|
"loss": 0.0896, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 3.31, |
|
"learning_rate": 6.69250645994832e-05, |
|
"loss": 0.0917, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 3.33, |
|
"learning_rate": 6.666666666666667e-05, |
|
"loss": 0.0949, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 6.640826873385013e-05, |
|
"loss": 0.1, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"eval_accuracy": 0.9759509202453988, |
|
"eval_f1": 0.2740740740740741, |
|
"eval_loss": 0.08845529705286026, |
|
"eval_precision": 0.8809523809523809, |
|
"eval_recall": 0.16228070175438597, |
|
"eval_runtime": 11.5355, |
|
"eval_samples_per_second": 14.13, |
|
"eval_steps_per_second": 0.954, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 6.61498708010336e-05, |
|
"loss": 0.092, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 6.589147286821705e-05, |
|
"loss": 0.09, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 3.44, |
|
"learning_rate": 6.563307493540052e-05, |
|
"loss": 0.0844, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 3.46, |
|
"learning_rate": 6.537467700258399e-05, |
|
"loss": 0.087, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 3.49, |
|
"learning_rate": 6.511627906976745e-05, |
|
"loss": 0.0792, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 3.51, |
|
"learning_rate": 6.48578811369509e-05, |
|
"loss": 0.0868, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 6.459948320413438e-05, |
|
"loss": 0.0892, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 6.434108527131784e-05, |
|
"loss": 0.0792, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 6.408268733850129e-05, |
|
"loss": 0.0876, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"learning_rate": 6.382428940568475e-05, |
|
"loss": 0.0982, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 3.62, |
|
"eval_accuracy": 0.9753374233128834, |
|
"eval_f1": 0.22988505747126434, |
|
"eval_loss": 0.09349281340837479, |
|
"eval_precision": 0.9090909090909091, |
|
"eval_recall": 0.13157894736842105, |
|
"eval_runtime": 11.6178, |
|
"eval_samples_per_second": 14.03, |
|
"eval_steps_per_second": 0.947, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 3.64, |
|
"learning_rate": 6.356589147286823e-05, |
|
"loss": 0.0905, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 3.67, |
|
"learning_rate": 6.330749354005169e-05, |
|
"loss": 0.0881, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 6.304909560723514e-05, |
|
"loss": 0.0849, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 6.27906976744186e-05, |
|
"loss": 0.0829, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 6.253229974160208e-05, |
|
"loss": 0.0992, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 6.227390180878553e-05, |
|
"loss": 0.0998, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 3.8, |
|
"learning_rate": 6.201550387596899e-05, |
|
"loss": 0.0966, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 3.82, |
|
"learning_rate": 6.175710594315246e-05, |
|
"loss": 0.0833, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 3.85, |
|
"learning_rate": 6.149870801033592e-05, |
|
"loss": 0.0914, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 6.124031007751938e-05, |
|
"loss": 0.0785, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"eval_accuracy": 0.9759509202453988, |
|
"eval_f1": 0.2949640287769784, |
|
"eval_loss": 0.08881861716508865, |
|
"eval_precision": 0.82, |
|
"eval_recall": 0.17982456140350878, |
|
"eval_runtime": 11.8266, |
|
"eval_samples_per_second": 13.783, |
|
"eval_steps_per_second": 0.93, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 3.9, |
|
"learning_rate": 6.0981912144702846e-05, |
|
"loss": 0.0993, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 6.072351421188631e-05, |
|
"loss": 0.0951, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 6.0465116279069765e-05, |
|
"loss": 0.0937, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 3.98, |
|
"learning_rate": 6.0206718346253235e-05, |
|
"loss": 0.089, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 4.01, |
|
"learning_rate": 5.99483204134367e-05, |
|
"loss": 0.0891, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 4.03, |
|
"learning_rate": 5.9689922480620155e-05, |
|
"loss": 0.0856, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 5.943152454780362e-05, |
|
"loss": 0.0832, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 4.08, |
|
"learning_rate": 5.917312661498709e-05, |
|
"loss": 0.088, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 5.891472868217055e-05, |
|
"loss": 0.0829, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 5.865633074935401e-05, |
|
"loss": 0.0859, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"eval_accuracy": 0.9759509202453988, |
|
"eval_f1": 0.3, |
|
"eval_loss": 0.08691960573196411, |
|
"eval_precision": 0.8076923076923077, |
|
"eval_recall": 0.18421052631578946, |
|
"eval_runtime": 11.8328, |
|
"eval_samples_per_second": 13.775, |
|
"eval_steps_per_second": 0.93, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 4.16, |
|
"learning_rate": 5.839793281653747e-05, |
|
"loss": 0.0927, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 4.19, |
|
"learning_rate": 5.8139534883720933e-05, |
|
"loss": 0.0904, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 4.21, |
|
"learning_rate": 5.788113695090439e-05, |
|
"loss": 0.0891, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 5.762273901808786e-05, |
|
"loss": 0.0819, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 4.26, |
|
"learning_rate": 5.736434108527132e-05, |
|
"loss": 0.0868, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 5.7105943152454786e-05, |
|
"loss": 0.0909, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 4.32, |
|
"learning_rate": 5.684754521963824e-05, |
|
"loss": 0.0797, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 4.34, |
|
"learning_rate": 5.6589147286821706e-05, |
|
"loss": 0.0863, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 4.37, |
|
"learning_rate": 5.6330749354005176e-05, |
|
"loss": 0.0849, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"learning_rate": 5.607235142118863e-05, |
|
"loss": 0.0794, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 4.39, |
|
"eval_accuracy": 0.976319018404908, |
|
"eval_f1": 0.3228070175438596, |
|
"eval_loss": 0.08520764857530594, |
|
"eval_precision": 0.8070175438596491, |
|
"eval_recall": 0.20175438596491227, |
|
"eval_runtime": 11.7131, |
|
"eval_samples_per_second": 13.916, |
|
"eval_steps_per_second": 0.939, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 5.5813953488372095e-05, |
|
"loss": 0.0857, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 4.44, |
|
"learning_rate": 5.555555555555556e-05, |
|
"loss": 0.0876, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 5.5297157622739015e-05, |
|
"loss": 0.0807, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 4.5, |
|
"learning_rate": 5.503875968992248e-05, |
|
"loss": 0.0903, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 4.52, |
|
"learning_rate": 5.478036175710595e-05, |
|
"loss": 0.0804, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 4.55, |
|
"learning_rate": 5.452196382428941e-05, |
|
"loss": 0.0921, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 4.57, |
|
"learning_rate": 5.426356589147287e-05, |
|
"loss": 0.0734, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 5.400516795865633e-05, |
|
"loss": 0.0778, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 5.37467700258398e-05, |
|
"loss": 0.0833, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 5.348837209302326e-05, |
|
"loss": 0.0835, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"eval_accuracy": 0.9773006134969325, |
|
"eval_f1": 0.37288135593220334, |
|
"eval_loss": 0.08421062678098679, |
|
"eval_precision": 0.8208955223880597, |
|
"eval_recall": 0.2412280701754386, |
|
"eval_runtime": 11.9185, |
|
"eval_samples_per_second": 13.676, |
|
"eval_steps_per_second": 0.923, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 4.68, |
|
"learning_rate": 5.322997416020672e-05, |
|
"loss": 0.0774, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 5.297157622739018e-05, |
|
"loss": 0.0829, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 4.73, |
|
"learning_rate": 5.271317829457365e-05, |
|
"loss": 0.0821, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 4.75, |
|
"learning_rate": 5.24547803617571e-05, |
|
"loss": 0.0827, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 4.78, |
|
"learning_rate": 5.219638242894057e-05, |
|
"loss": 0.0836, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 5.1937984496124036e-05, |
|
"loss": 0.0767, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 5.167958656330749e-05, |
|
"loss": 0.0769, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 4.86, |
|
"learning_rate": 5.1421188630490955e-05, |
|
"loss": 0.0823, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 5.1162790697674425e-05, |
|
"loss": 0.0833, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"learning_rate": 5.090439276485789e-05, |
|
"loss": 0.0831, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 4.91, |
|
"eval_accuracy": 0.9775460122699386, |
|
"eval_f1": 0.37542662116040953, |
|
"eval_loss": 0.0831713080406189, |
|
"eval_precision": 0.8461538461538461, |
|
"eval_recall": 0.2412280701754386, |
|
"eval_runtime": 11.6419, |
|
"eval_samples_per_second": 14.001, |
|
"eval_steps_per_second": 0.945, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 5.0645994832041345e-05, |
|
"loss": 0.0885, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 4.96, |
|
"learning_rate": 5.038759689922481e-05, |
|
"loss": 0.0838, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 5.012919896640828e-05, |
|
"loss": 0.0797, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 5.01, |
|
"learning_rate": 4.9870801033591734e-05, |
|
"loss": 0.0734, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 5.04, |
|
"learning_rate": 4.96124031007752e-05, |
|
"loss": 0.075, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 5.06, |
|
"learning_rate": 4.935400516795866e-05, |
|
"loss": 0.0721, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 5.09, |
|
"learning_rate": 4.9095607235142123e-05, |
|
"loss": 0.0746, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 5.12, |
|
"learning_rate": 4.883720930232558e-05, |
|
"loss": 0.0678, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 5.14, |
|
"learning_rate": 4.857881136950905e-05, |
|
"loss": 0.077, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 5.17, |
|
"learning_rate": 4.8320413436692506e-05, |
|
"loss": 0.0763, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 5.17, |
|
"eval_accuracy": 0.9771779141104294, |
|
"eval_f1": 0.3673469387755102, |
|
"eval_loss": 0.08154629170894623, |
|
"eval_precision": 0.8181818181818182, |
|
"eval_recall": 0.23684210526315788, |
|
"eval_runtime": 11.9474, |
|
"eval_samples_per_second": 13.643, |
|
"eval_steps_per_second": 0.921, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"learning_rate": 4.8062015503875976e-05, |
|
"loss": 0.0803, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 5.22, |
|
"learning_rate": 4.780361757105943e-05, |
|
"loss": 0.086, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 5.25, |
|
"learning_rate": 4.7545219638242896e-05, |
|
"loss": 0.0766, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 5.27, |
|
"learning_rate": 4.728682170542636e-05, |
|
"loss": 0.0832, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 5.3, |
|
"learning_rate": 4.702842377260982e-05, |
|
"loss": 0.0817, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 5.32, |
|
"learning_rate": 4.6770025839793285e-05, |
|
"loss": 0.0825, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 5.35, |
|
"learning_rate": 4.651162790697675e-05, |
|
"loss": 0.0751, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 5.37, |
|
"learning_rate": 4.625322997416021e-05, |
|
"loss": 0.0719, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 5.4, |
|
"learning_rate": 4.5994832041343674e-05, |
|
"loss": 0.0826, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 5.43, |
|
"learning_rate": 4.573643410852713e-05, |
|
"loss": 0.0784, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 5.43, |
|
"eval_accuracy": 0.9773006134969325, |
|
"eval_f1": 0.3642611683848797, |
|
"eval_loss": 0.08145377784967422, |
|
"eval_precision": 0.8412698412698413, |
|
"eval_recall": 0.2324561403508772, |
|
"eval_runtime": 11.7404, |
|
"eval_samples_per_second": 13.884, |
|
"eval_steps_per_second": 0.937, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 5.45, |
|
"learning_rate": 4.54780361757106e-05, |
|
"loss": 0.0888, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 5.48, |
|
"learning_rate": 4.521963824289406e-05, |
|
"loss": 0.0743, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 5.5, |
|
"learning_rate": 4.496124031007753e-05, |
|
"loss": 0.0678, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 5.53, |
|
"learning_rate": 4.4702842377260983e-05, |
|
"loss": 0.0737, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 5.56, |
|
"learning_rate": 4.4444444444444447e-05, |
|
"loss": 0.0747, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 5.58, |
|
"learning_rate": 4.418604651162791e-05, |
|
"loss": 0.0735, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 5.61, |
|
"learning_rate": 4.392764857881137e-05, |
|
"loss": 0.075, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 5.63, |
|
"learning_rate": 4.3669250645994836e-05, |
|
"loss": 0.0726, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 5.66, |
|
"learning_rate": 4.34108527131783e-05, |
|
"loss": 0.0701, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 5.68, |
|
"learning_rate": 4.315245478036176e-05, |
|
"loss": 0.073, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 5.68, |
|
"eval_accuracy": 0.9769325153374233, |
|
"eval_f1": 0.38562091503267976, |
|
"eval_loss": 0.08116535097360611, |
|
"eval_precision": 0.7564102564102564, |
|
"eval_recall": 0.25877192982456143, |
|
"eval_runtime": 11.6668, |
|
"eval_samples_per_second": 13.971, |
|
"eval_steps_per_second": 0.943, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 5.71, |
|
"learning_rate": 4.289405684754522e-05, |
|
"loss": 0.0725, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 5.74, |
|
"learning_rate": 4.263565891472868e-05, |
|
"loss": 0.087, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 5.76, |
|
"learning_rate": 4.2377260981912145e-05, |
|
"loss": 0.0849, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 5.79, |
|
"learning_rate": 4.211886304909561e-05, |
|
"loss": 0.0696, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 5.81, |
|
"learning_rate": 4.186046511627907e-05, |
|
"loss": 0.0848, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 5.84, |
|
"learning_rate": 4.1602067183462534e-05, |
|
"loss": 0.0798, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 5.87, |
|
"learning_rate": 4.1343669250646e-05, |
|
"loss": 0.0751, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 5.89, |
|
"learning_rate": 4.108527131782946e-05, |
|
"loss": 0.0773, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 5.92, |
|
"learning_rate": 4.082687338501292e-05, |
|
"loss": 0.0848, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 5.94, |
|
"learning_rate": 4.056847545219639e-05, |
|
"loss": 0.0801, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 5.94, |
|
"eval_accuracy": 0.9776687116564418, |
|
"eval_f1": 0.3591549295774648, |
|
"eval_loss": 0.07930972427129745, |
|
"eval_precision": 0.9107142857142857, |
|
"eval_recall": 0.2236842105263158, |
|
"eval_runtime": 11.5705, |
|
"eval_samples_per_second": 14.088, |
|
"eval_steps_per_second": 0.951, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 5.97, |
|
"learning_rate": 4.0310077519379843e-05, |
|
"loss": 0.0853, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 5.99, |
|
"learning_rate": 4.005167958656331e-05, |
|
"loss": 0.0789, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 6.02, |
|
"learning_rate": 3.979328165374677e-05, |
|
"loss": 0.0724, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 6.05, |
|
"learning_rate": 3.953488372093023e-05, |
|
"loss": 0.0754, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 6.07, |
|
"learning_rate": 3.9276485788113696e-05, |
|
"loss": 0.0769, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 6.1, |
|
"learning_rate": 3.901808785529716e-05, |
|
"loss": 0.0665, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 6.12, |
|
"learning_rate": 3.875968992248062e-05, |
|
"loss": 0.067, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 6.15, |
|
"learning_rate": 3.8501291989664085e-05, |
|
"loss": 0.0682, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 6.18, |
|
"learning_rate": 3.824289405684754e-05, |
|
"loss": 0.0741, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 6.2, |
|
"learning_rate": 3.798449612403101e-05, |
|
"loss": 0.077, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 6.2, |
|
"eval_accuracy": 0.9774233128834355, |
|
"eval_f1": 0.3825503355704698, |
|
"eval_loss": 0.07888315618038177, |
|
"eval_precision": 0.8142857142857143, |
|
"eval_recall": 0.25, |
|
"eval_runtime": 11.8196, |
|
"eval_samples_per_second": 13.791, |
|
"eval_steps_per_second": 0.931, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 6.23, |
|
"learning_rate": 3.772609819121447e-05, |
|
"loss": 0.0685, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 6.25, |
|
"learning_rate": 3.746770025839794e-05, |
|
"loss": 0.0752, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 6.28, |
|
"learning_rate": 3.7209302325581394e-05, |
|
"loss": 0.0704, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 6.3, |
|
"learning_rate": 3.6950904392764864e-05, |
|
"loss": 0.0755, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 6.33, |
|
"learning_rate": 3.669250645994832e-05, |
|
"loss": 0.0726, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 6.36, |
|
"learning_rate": 3.6434108527131784e-05, |
|
"loss": 0.0622, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 6.38, |
|
"learning_rate": 3.617571059431525e-05, |
|
"loss": 0.0742, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 6.41, |
|
"learning_rate": 3.591731266149871e-05, |
|
"loss": 0.0813, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 6.43, |
|
"learning_rate": 3.565891472868217e-05, |
|
"loss": 0.0724, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 6.46, |
|
"learning_rate": 3.5400516795865637e-05, |
|
"loss": 0.0749, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 6.46, |
|
"eval_accuracy": 0.9779141104294479, |
|
"eval_f1": 0.41558441558441556, |
|
"eval_loss": 0.077651746571064, |
|
"eval_precision": 0.8, |
|
"eval_recall": 0.2807017543859649, |
|
"eval_runtime": 12.0214, |
|
"eval_samples_per_second": 13.559, |
|
"eval_steps_per_second": 0.915, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 6.49, |
|
"learning_rate": 3.514211886304909e-05, |
|
"loss": 0.0667, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 6.51, |
|
"learning_rate": 3.488372093023256e-05, |
|
"loss": 0.0646, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 6.54, |
|
"learning_rate": 3.462532299741602e-05, |
|
"loss": 0.0744, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 6.56, |
|
"learning_rate": 3.436692506459949e-05, |
|
"loss": 0.0815, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 6.59, |
|
"learning_rate": 3.4108527131782945e-05, |
|
"loss": 0.0629, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 6.61, |
|
"learning_rate": 3.3850129198966415e-05, |
|
"loss": 0.0729, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 6.64, |
|
"learning_rate": 3.359173126614987e-05, |
|
"loss": 0.0874, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 6.67, |
|
"learning_rate": 3.3333333333333335e-05, |
|
"loss": 0.0725, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 6.69, |
|
"learning_rate": 3.30749354005168e-05, |
|
"loss": 0.0693, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 6.72, |
|
"learning_rate": 3.281653746770026e-05, |
|
"loss": 0.0704, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 6.72, |
|
"eval_accuracy": 0.9786503067484662, |
|
"eval_f1": 0.423841059602649, |
|
"eval_loss": 0.07763181626796722, |
|
"eval_precision": 0.8648648648648649, |
|
"eval_recall": 0.2807017543859649, |
|
"eval_runtime": 11.5898, |
|
"eval_samples_per_second": 14.064, |
|
"eval_steps_per_second": 0.949, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 6.74, |
|
"learning_rate": 3.2558139534883724e-05, |
|
"loss": 0.0812, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 6.77, |
|
"learning_rate": 3.229974160206719e-05, |
|
"loss": 0.0693, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 6.8, |
|
"learning_rate": 3.2041343669250644e-05, |
|
"loss": 0.0687, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 6.82, |
|
"learning_rate": 3.1782945736434114e-05, |
|
"loss": 0.0698, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 6.85, |
|
"learning_rate": 3.152454780361757e-05, |
|
"loss": 0.0688, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 6.87, |
|
"learning_rate": 3.126614987080104e-05, |
|
"loss": 0.0742, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 6.9, |
|
"learning_rate": 3.1007751937984497e-05, |
|
"loss": 0.0773, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 6.93, |
|
"learning_rate": 3.074935400516796e-05, |
|
"loss": 0.0714, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 6.95, |
|
"learning_rate": 3.0490956072351423e-05, |
|
"loss": 0.0678, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 6.98, |
|
"learning_rate": 3.0232558139534883e-05, |
|
"loss": 0.0718, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 6.98, |
|
"eval_accuracy": 0.9776687116564418, |
|
"eval_f1": 0.389261744966443, |
|
"eval_loss": 0.07732577621936798, |
|
"eval_precision": 0.8285714285714286, |
|
"eval_recall": 0.2543859649122807, |
|
"eval_runtime": 11.7112, |
|
"eval_samples_per_second": 13.918, |
|
"eval_steps_per_second": 0.939, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"learning_rate": 2.997416020671835e-05, |
|
"loss": 0.0723, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 7.03, |
|
"learning_rate": 2.971576227390181e-05, |
|
"loss": 0.0669, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 7.05, |
|
"learning_rate": 2.9457364341085275e-05, |
|
"loss": 0.0705, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 7.08, |
|
"learning_rate": 2.9198966408268735e-05, |
|
"loss": 0.0697, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 7.11, |
|
"learning_rate": 2.8940568475452195e-05, |
|
"loss": 0.0723, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 7.13, |
|
"learning_rate": 2.868217054263566e-05, |
|
"loss": 0.0693, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 7.16, |
|
"learning_rate": 2.842377260981912e-05, |
|
"loss": 0.0653, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 7.18, |
|
"learning_rate": 2.8165374677002588e-05, |
|
"loss": 0.0651, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 7.21, |
|
"learning_rate": 2.7906976744186048e-05, |
|
"loss": 0.0714, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 7.24, |
|
"learning_rate": 2.7648578811369507e-05, |
|
"loss": 0.0624, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 7.24, |
|
"eval_accuracy": 0.978159509202454, |
|
"eval_f1": 0.3945578231292517, |
|
"eval_loss": 0.0775144025683403, |
|
"eval_precision": 0.8787878787878788, |
|
"eval_recall": 0.2543859649122807, |
|
"eval_runtime": 11.801, |
|
"eval_samples_per_second": 13.812, |
|
"eval_steps_per_second": 0.932, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 7.26, |
|
"learning_rate": 2.7390180878552974e-05, |
|
"loss": 0.0596, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 7.29, |
|
"learning_rate": 2.7131782945736434e-05, |
|
"loss": 0.0712, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 7.31, |
|
"learning_rate": 2.68733850129199e-05, |
|
"loss": 0.0605, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 7.34, |
|
"learning_rate": 2.661498708010336e-05, |
|
"loss": 0.0665, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 7.36, |
|
"learning_rate": 2.6356589147286826e-05, |
|
"loss": 0.0651, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 7.39, |
|
"learning_rate": 2.6098191214470286e-05, |
|
"loss": 0.0677, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 7.42, |
|
"learning_rate": 2.5839793281653746e-05, |
|
"loss": 0.0715, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 7.44, |
|
"learning_rate": 2.5581395348837212e-05, |
|
"loss": 0.0691, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 7.47, |
|
"learning_rate": 2.5322997416020672e-05, |
|
"loss": 0.0689, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 7.49, |
|
"learning_rate": 2.506459948320414e-05, |
|
"loss": 0.0718, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 7.49, |
|
"eval_accuracy": 0.9780368098159509, |
|
"eval_f1": 0.4013377926421405, |
|
"eval_loss": 0.07592039555311203, |
|
"eval_precision": 0.8450704225352113, |
|
"eval_recall": 0.2631578947368421, |
|
"eval_runtime": 11.7263, |
|
"eval_samples_per_second": 13.9, |
|
"eval_steps_per_second": 0.938, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 7.52, |
|
"learning_rate": 2.48062015503876e-05, |
|
"loss": 0.0668, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 7.55, |
|
"learning_rate": 2.4547803617571062e-05, |
|
"loss": 0.0664, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 7.57, |
|
"learning_rate": 2.4289405684754525e-05, |
|
"loss": 0.0645, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 7.6, |
|
"learning_rate": 2.4031007751937988e-05, |
|
"loss": 0.0763, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 7.62, |
|
"learning_rate": 2.3772609819121448e-05, |
|
"loss": 0.0677, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 7.65, |
|
"learning_rate": 2.351421188630491e-05, |
|
"loss": 0.0679, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 7.67, |
|
"learning_rate": 2.3255813953488374e-05, |
|
"loss": 0.0722, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 7.7, |
|
"learning_rate": 2.2997416020671837e-05, |
|
"loss": 0.0655, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 7.73, |
|
"learning_rate": 2.27390180878553e-05, |
|
"loss": 0.0702, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 7.75, |
|
"learning_rate": 2.2480620155038764e-05, |
|
"loss": 0.0671, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 7.75, |
|
"eval_accuracy": 0.9787730061349693, |
|
"eval_f1": 0.4437299035369775, |
|
"eval_loss": 0.07497038692235947, |
|
"eval_precision": 0.8313253012048193, |
|
"eval_recall": 0.3026315789473684, |
|
"eval_runtime": 11.8994, |
|
"eval_samples_per_second": 13.698, |
|
"eval_steps_per_second": 0.924, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 7.78, |
|
"learning_rate": 2.2222222222222223e-05, |
|
"loss": 0.0681, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 7.8, |
|
"learning_rate": 2.1963824289405686e-05, |
|
"loss": 0.0687, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 7.83, |
|
"learning_rate": 2.170542635658915e-05, |
|
"loss": 0.07, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 7.86, |
|
"learning_rate": 2.144702842377261e-05, |
|
"loss": 0.0702, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 7.88, |
|
"learning_rate": 2.1188630490956073e-05, |
|
"loss": 0.0717, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 7.91, |
|
"learning_rate": 2.0930232558139536e-05, |
|
"loss": 0.0646, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 7.93, |
|
"learning_rate": 2.0671834625323e-05, |
|
"loss": 0.0706, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 7.96, |
|
"learning_rate": 2.041343669250646e-05, |
|
"loss": 0.0649, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 7.98, |
|
"learning_rate": 2.0155038759689922e-05, |
|
"loss": 0.0663, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 8.01, |
|
"learning_rate": 1.9896640826873385e-05, |
|
"loss": 0.0554, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 8.01, |
|
"eval_accuracy": 0.9790184049079754, |
|
"eval_f1": 0.45714285714285713, |
|
"eval_loss": 0.07580507546663284, |
|
"eval_precision": 0.8275862068965517, |
|
"eval_recall": 0.3157894736842105, |
|
"eval_runtime": 11.6291, |
|
"eval_samples_per_second": 14.017, |
|
"eval_steps_per_second": 0.946, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 8.04, |
|
"learning_rate": 1.9638242894056848e-05, |
|
"loss": 0.0562, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 8.06, |
|
"learning_rate": 1.937984496124031e-05, |
|
"loss": 0.061, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 8.09, |
|
"learning_rate": 1.912144702842377e-05, |
|
"loss": 0.0642, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 8.11, |
|
"learning_rate": 1.8863049095607234e-05, |
|
"loss": 0.0581, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 8.14, |
|
"learning_rate": 1.8604651162790697e-05, |
|
"loss": 0.0611, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 8.17, |
|
"learning_rate": 1.834625322997416e-05, |
|
"loss": 0.0683, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 8.19, |
|
"learning_rate": 1.8087855297157624e-05, |
|
"loss": 0.0635, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 8.22, |
|
"learning_rate": 1.7829457364341087e-05, |
|
"loss": 0.0617, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 8.24, |
|
"learning_rate": 1.7571059431524546e-05, |
|
"loss": 0.0656, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 8.27, |
|
"learning_rate": 1.731266149870801e-05, |
|
"loss": 0.0533, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 8.27, |
|
"eval_accuracy": 0.9787730061349693, |
|
"eval_f1": 0.4507936507936508, |
|
"eval_loss": 0.07549890875816345, |
|
"eval_precision": 0.8160919540229885, |
|
"eval_recall": 0.31140350877192985, |
|
"eval_runtime": 11.6344, |
|
"eval_samples_per_second": 14.01, |
|
"eval_steps_per_second": 0.945, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 8.29, |
|
"learning_rate": 1.7054263565891473e-05, |
|
"loss": 0.061, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 8.32, |
|
"learning_rate": 1.6795865633074936e-05, |
|
"loss": 0.0727, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 8.35, |
|
"learning_rate": 1.65374677002584e-05, |
|
"loss": 0.057, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 8.37, |
|
"learning_rate": 1.6279069767441862e-05, |
|
"loss": 0.0692, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 8.4, |
|
"learning_rate": 1.6020671834625322e-05, |
|
"loss": 0.0709, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 8.42, |
|
"learning_rate": 1.5762273901808785e-05, |
|
"loss": 0.055, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 8.45, |
|
"learning_rate": 1.5503875968992248e-05, |
|
"loss": 0.0626, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 8.48, |
|
"learning_rate": 1.5245478036175711e-05, |
|
"loss": 0.0696, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 8.5, |
|
"learning_rate": 1.4987080103359175e-05, |
|
"loss": 0.063, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 8.53, |
|
"learning_rate": 1.4728682170542638e-05, |
|
"loss": 0.063, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 8.53, |
|
"eval_accuracy": 0.9790184049079754, |
|
"eval_f1": 0.4605678233438486, |
|
"eval_loss": 0.07512389868497849, |
|
"eval_precision": 0.8202247191011236, |
|
"eval_recall": 0.3201754385964912, |
|
"eval_runtime": 11.9167, |
|
"eval_samples_per_second": 13.678, |
|
"eval_steps_per_second": 0.923, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 8.55, |
|
"learning_rate": 1.4470284237726097e-05, |
|
"loss": 0.0664, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 8.58, |
|
"learning_rate": 1.421188630490956e-05, |
|
"loss": 0.0717, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 8.6, |
|
"learning_rate": 1.3953488372093024e-05, |
|
"loss": 0.0632, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 8.63, |
|
"learning_rate": 1.3695090439276487e-05, |
|
"loss": 0.0645, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 8.66, |
|
"learning_rate": 1.343669250645995e-05, |
|
"loss": 0.062, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 8.68, |
|
"learning_rate": 1.3178294573643413e-05, |
|
"loss": 0.0628, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 8.71, |
|
"learning_rate": 1.2919896640826873e-05, |
|
"loss": 0.0647, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 8.73, |
|
"learning_rate": 1.2661498708010336e-05, |
|
"loss": 0.0675, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 8.76, |
|
"learning_rate": 1.24031007751938e-05, |
|
"loss": 0.0563, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 8.79, |
|
"learning_rate": 1.2144702842377262e-05, |
|
"loss": 0.0605, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 8.79, |
|
"eval_accuracy": 0.9790184049079754, |
|
"eval_f1": 0.4605678233438486, |
|
"eval_loss": 0.07484734058380127, |
|
"eval_precision": 0.8202247191011236, |
|
"eval_recall": 0.3201754385964912, |
|
"eval_runtime": 12.0314, |
|
"eval_samples_per_second": 13.548, |
|
"eval_steps_per_second": 0.914, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 8.81, |
|
"learning_rate": 1.1886304909560724e-05, |
|
"loss": 0.0623, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 8.84, |
|
"learning_rate": 1.1627906976744187e-05, |
|
"loss": 0.0666, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 8.86, |
|
"learning_rate": 1.136950904392765e-05, |
|
"loss": 0.0601, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 8.89, |
|
"learning_rate": 1.1111111111111112e-05, |
|
"loss": 0.0758, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 8.91, |
|
"learning_rate": 1.0852713178294575e-05, |
|
"loss": 0.0687, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 8.94, |
|
"learning_rate": 1.0594315245478036e-05, |
|
"loss": 0.0569, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 8.97, |
|
"learning_rate": 1.03359173126615e-05, |
|
"loss": 0.0714, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 8.99, |
|
"learning_rate": 1.0077519379844961e-05, |
|
"loss": 0.0742, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 9.02, |
|
"learning_rate": 9.819121447028424e-06, |
|
"loss": 0.0665, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 9.04, |
|
"learning_rate": 9.560723514211885e-06, |
|
"loss": 0.0592, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 9.04, |
|
"eval_accuracy": 0.9787730061349693, |
|
"eval_f1": 0.4327868852459017, |
|
"eval_loss": 0.07429111003875732, |
|
"eval_precision": 0.8571428571428571, |
|
"eval_recall": 0.2894736842105263, |
|
"eval_runtime": 11.4554, |
|
"eval_samples_per_second": 14.229, |
|
"eval_steps_per_second": 0.96, |
|
"step": 3500 |
|
} |
|
], |
|
"max_steps": 3870, |
|
"num_train_epochs": 10, |
|
"total_flos": 1.1850942195223757e+17, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|