|
{ |
|
"best_metric": 0.9001329288116611, |
|
"best_model_checkpoint": "../save/jtrans-malware-3f-100c/checkpoint-4000", |
|
"epoch": 9.029345372460497, |
|
"global_step": 4000, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.02, |
|
"learning_rate": 9.97742663656885e-05, |
|
"loss": 0.4773, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.05, |
|
"learning_rate": 9.954853273137697e-05, |
|
"loss": 0.2494, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.07, |
|
"learning_rate": 9.932279909706546e-05, |
|
"loss": 0.1534, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.09, |
|
"learning_rate": 9.909706546275395e-05, |
|
"loss": 0.11, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.11, |
|
"learning_rate": 9.887133182844243e-05, |
|
"loss": 0.0966, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.14, |
|
"learning_rate": 9.864559819413092e-05, |
|
"loss": 0.0899, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.16, |
|
"learning_rate": 9.841986455981941e-05, |
|
"loss": 0.0873, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.18, |
|
"learning_rate": 9.81941309255079e-05, |
|
"loss": 0.0868, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.2, |
|
"learning_rate": 9.79683972911964e-05, |
|
"loss": 0.0829, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"learning_rate": 9.774266365688489e-05, |
|
"loss": 0.087, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.23, |
|
"eval_accuracy": 0.9824731182795698, |
|
"eval_f1": 0.0, |
|
"eval_loss": 0.08580606430768967, |
|
"eval_precision": 0.0, |
|
"eval_recall": 0.0, |
|
"eval_roc_auc_score": 0.6281820495242495, |
|
"eval_runtime": 2.8582, |
|
"eval_samples_per_second": 65.075, |
|
"eval_steps_per_second": 4.198, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.25, |
|
"learning_rate": 9.751693002257338e-05, |
|
"loss": 0.0794, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.27, |
|
"learning_rate": 9.729119638826185e-05, |
|
"loss": 0.0855, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.29, |
|
"learning_rate": 9.706546275395035e-05, |
|
"loss": 0.0887, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.32, |
|
"learning_rate": 9.683972911963884e-05, |
|
"loss": 0.0833, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.34, |
|
"learning_rate": 9.661399548532731e-05, |
|
"loss": 0.086, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.36, |
|
"learning_rate": 9.63882618510158e-05, |
|
"loss": 0.0794, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.38, |
|
"learning_rate": 9.61625282167043e-05, |
|
"loss": 0.083, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.41, |
|
"learning_rate": 9.593679458239279e-05, |
|
"loss": 0.082, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.43, |
|
"learning_rate": 9.571106094808126e-05, |
|
"loss": 0.0815, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"learning_rate": 9.548532731376975e-05, |
|
"loss": 0.0842, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.45, |
|
"eval_accuracy": 0.9824731182795698, |
|
"eval_f1": 0.0, |
|
"eval_loss": 0.08476322144269943, |
|
"eval_precision": 0.0, |
|
"eval_recall": 0.0, |
|
"eval_roc_auc_score": 0.651179875393717, |
|
"eval_runtime": 2.8338, |
|
"eval_samples_per_second": 65.637, |
|
"eval_steps_per_second": 4.235, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.47, |
|
"learning_rate": 9.525959367945825e-05, |
|
"loss": 0.0811, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"learning_rate": 9.503386004514672e-05, |
|
"loss": 0.0839, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.52, |
|
"learning_rate": 9.480812641083521e-05, |
|
"loss": 0.0919, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.54, |
|
"learning_rate": 9.45823927765237e-05, |
|
"loss": 0.0815, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.56, |
|
"learning_rate": 9.43566591422122e-05, |
|
"loss": 0.0858, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.59, |
|
"learning_rate": 9.413092550790069e-05, |
|
"loss": 0.0814, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.61, |
|
"learning_rate": 9.390519187358918e-05, |
|
"loss": 0.0822, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.63, |
|
"learning_rate": 9.367945823927767e-05, |
|
"loss": 0.0845, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.65, |
|
"learning_rate": 9.345372460496615e-05, |
|
"loss": 0.0824, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"learning_rate": 9.322799097065464e-05, |
|
"loss": 0.0764, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.68, |
|
"eval_accuracy": 0.9824731182795698, |
|
"eval_f1": 0.0, |
|
"eval_loss": 0.08410274237394333, |
|
"eval_precision": 0.0, |
|
"eval_recall": 0.0, |
|
"eval_roc_auc_score": 0.6459055273810859, |
|
"eval_runtime": 2.8678, |
|
"eval_samples_per_second": 64.858, |
|
"eval_steps_per_second": 4.184, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.7, |
|
"learning_rate": 9.300225733634313e-05, |
|
"loss": 0.0882, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.72, |
|
"learning_rate": 9.27765237020316e-05, |
|
"loss": 0.0886, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.74, |
|
"learning_rate": 9.25507900677201e-05, |
|
"loss": 0.0769, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.77, |
|
"learning_rate": 9.232505643340859e-05, |
|
"loss": 0.0794, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.79, |
|
"learning_rate": 9.209932279909706e-05, |
|
"loss": 0.0862, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.81, |
|
"learning_rate": 9.187358916478555e-05, |
|
"loss": 0.0838, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.84, |
|
"learning_rate": 9.164785553047405e-05, |
|
"loss": 0.0788, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.86, |
|
"learning_rate": 9.142212189616254e-05, |
|
"loss": 0.0837, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.88, |
|
"learning_rate": 9.119638826185101e-05, |
|
"loss": 0.0828, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"learning_rate": 9.09706546275395e-05, |
|
"loss": 0.0848, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.9, |
|
"eval_accuracy": 0.9824731182795698, |
|
"eval_f1": 0.0, |
|
"eval_loss": 0.0832749530673027, |
|
"eval_precision": 0.0, |
|
"eval_recall": 0.0, |
|
"eval_roc_auc_score": 0.6494154925936545, |
|
"eval_runtime": 2.9377, |
|
"eval_samples_per_second": 63.315, |
|
"eval_steps_per_second": 4.085, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.93, |
|
"learning_rate": 9.0744920993228e-05, |
|
"loss": 0.0801, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.95, |
|
"learning_rate": 9.051918735891649e-05, |
|
"loss": 0.0797, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.97, |
|
"learning_rate": 9.029345372460498e-05, |
|
"loss": 0.0843, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.99, |
|
"learning_rate": 9.006772009029347e-05, |
|
"loss": 0.0809, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 1.02, |
|
"learning_rate": 8.984198645598195e-05, |
|
"loss": 0.0852, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 1.04, |
|
"learning_rate": 8.961625282167044e-05, |
|
"loss": 0.0844, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 1.06, |
|
"learning_rate": 8.939051918735893e-05, |
|
"loss": 0.0772, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 1.08, |
|
"learning_rate": 8.91647855530474e-05, |
|
"loss": 0.0838, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 1.11, |
|
"learning_rate": 8.89390519187359e-05, |
|
"loss": 0.0813, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"learning_rate": 8.871331828442439e-05, |
|
"loss": 0.0844, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.13, |
|
"eval_accuracy": 0.9831182795698925, |
|
"eval_f1": 0.07647058823529411, |
|
"eval_loss": 0.08306349813938141, |
|
"eval_precision": 0.9285714285714286, |
|
"eval_recall": 0.03987730061349693, |
|
"eval_roc_auc_score": 0.6617425877793452, |
|
"eval_runtime": 2.9724, |
|
"eval_samples_per_second": 62.576, |
|
"eval_steps_per_second": 4.037, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 1.15, |
|
"learning_rate": 8.848758465011288e-05, |
|
"loss": 0.0828, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 1.17, |
|
"learning_rate": 8.826185101580135e-05, |
|
"loss": 0.075, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 1.2, |
|
"learning_rate": 8.803611738148985e-05, |
|
"loss": 0.0821, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 1.22, |
|
"learning_rate": 8.781038374717834e-05, |
|
"loss": 0.0808, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 1.24, |
|
"learning_rate": 8.758465011286681e-05, |
|
"loss": 0.0827, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 1.26, |
|
"learning_rate": 8.73589164785553e-05, |
|
"loss": 0.0845, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 1.29, |
|
"learning_rate": 8.71331828442438e-05, |
|
"loss": 0.0865, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 1.31, |
|
"learning_rate": 8.690744920993227e-05, |
|
"loss": 0.081, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.33, |
|
"learning_rate": 8.668171557562076e-05, |
|
"loss": 0.0778, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"learning_rate": 8.645598194130925e-05, |
|
"loss": 0.0806, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.35, |
|
"eval_accuracy": 0.9831182795698925, |
|
"eval_f1": 0.07647058823529411, |
|
"eval_loss": 0.0819987803697586, |
|
"eval_precision": 0.9285714285714286, |
|
"eval_recall": 0.03987730061349693, |
|
"eval_roc_auc_score": 0.6814540387596847, |
|
"eval_runtime": 2.9546, |
|
"eval_samples_per_second": 62.953, |
|
"eval_steps_per_second": 4.061, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.38, |
|
"learning_rate": 8.623024830699775e-05, |
|
"loss": 0.0805, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.4, |
|
"learning_rate": 8.600451467268624e-05, |
|
"loss": 0.0883, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.42, |
|
"learning_rate": 8.577878103837473e-05, |
|
"loss": 0.0791, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.44, |
|
"learning_rate": 8.555304740406322e-05, |
|
"loss": 0.0821, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.47, |
|
"learning_rate": 8.53273137697517e-05, |
|
"loss": 0.0809, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.49, |
|
"learning_rate": 8.510158013544019e-05, |
|
"loss": 0.0815, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.51, |
|
"learning_rate": 8.487584650112868e-05, |
|
"loss": 0.0815, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.53, |
|
"learning_rate": 8.465011286681715e-05, |
|
"loss": 0.0796, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.56, |
|
"learning_rate": 8.442437923250564e-05, |
|
"loss": 0.0859, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"learning_rate": 8.419864559819414e-05, |
|
"loss": 0.0781, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.58, |
|
"eval_accuracy": 0.9831182795698925, |
|
"eval_f1": 0.07647058823529411, |
|
"eval_loss": 0.08179940283298492, |
|
"eval_precision": 0.9285714285714286, |
|
"eval_recall": 0.03987730061349693, |
|
"eval_roc_auc_score": 0.6900263440430636, |
|
"eval_runtime": 3.0126, |
|
"eval_samples_per_second": 61.741, |
|
"eval_steps_per_second": 3.983, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.6, |
|
"learning_rate": 8.397291196388263e-05, |
|
"loss": 0.0795, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.63, |
|
"learning_rate": 8.37471783295711e-05, |
|
"loss": 0.0823, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.65, |
|
"learning_rate": 8.35214446952596e-05, |
|
"loss": 0.0805, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.67, |
|
"learning_rate": 8.329571106094809e-05, |
|
"loss": 0.0816, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.69, |
|
"learning_rate": 8.306997742663656e-05, |
|
"loss": 0.0815, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.72, |
|
"learning_rate": 8.284424379232505e-05, |
|
"loss": 0.075, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.74, |
|
"learning_rate": 8.261851015801354e-05, |
|
"loss": 0.0816, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.76, |
|
"learning_rate": 8.239277652370204e-05, |
|
"loss": 0.0735, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.78, |
|
"learning_rate": 8.216704288939053e-05, |
|
"loss": 0.0842, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"learning_rate": 8.194130925507902e-05, |
|
"loss": 0.0802, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.81, |
|
"eval_accuracy": 0.9831182795698925, |
|
"eval_f1": 0.07647058823529411, |
|
"eval_loss": 0.08180813491344452, |
|
"eval_precision": 0.9285714285714286, |
|
"eval_recall": 0.03987730061349693, |
|
"eval_roc_auc_score": 0.6852675798731107, |
|
"eval_runtime": 2.8449, |
|
"eval_samples_per_second": 65.38, |
|
"eval_steps_per_second": 4.218, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.83, |
|
"learning_rate": 8.17155756207675e-05, |
|
"loss": 0.0732, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.85, |
|
"learning_rate": 8.148984198645599e-05, |
|
"loss": 0.0798, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.87, |
|
"learning_rate": 8.126410835214448e-05, |
|
"loss": 0.0809, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.9, |
|
"learning_rate": 8.103837471783297e-05, |
|
"loss": 0.0777, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.92, |
|
"learning_rate": 8.081264108352144e-05, |
|
"loss": 0.0736, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.94, |
|
"learning_rate": 8.058690744920994e-05, |
|
"loss": 0.0831, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.96, |
|
"learning_rate": 8.036117381489843e-05, |
|
"loss": 0.0829, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.99, |
|
"learning_rate": 8.01354401805869e-05, |
|
"loss": 0.0824, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 2.01, |
|
"learning_rate": 7.99097065462754e-05, |
|
"loss": 0.0769, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"learning_rate": 7.968397291196389e-05, |
|
"loss": 0.0831, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 2.03, |
|
"eval_accuracy": 0.9831182795698925, |
|
"eval_f1": 0.07647058823529411, |
|
"eval_loss": 0.08205953240394592, |
|
"eval_precision": 0.9285714285714286, |
|
"eval_recall": 0.03987730061349693, |
|
"eval_roc_auc_score": 0.684445314708416, |
|
"eval_runtime": 2.9199, |
|
"eval_samples_per_second": 63.701, |
|
"eval_steps_per_second": 4.11, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 2.05, |
|
"learning_rate": 7.945823927765236e-05, |
|
"loss": 0.076, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 2.08, |
|
"learning_rate": 7.923250564334085e-05, |
|
"loss": 0.0832, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 2.1, |
|
"learning_rate": 7.900677200902934e-05, |
|
"loss": 0.0805, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 2.12, |
|
"learning_rate": 7.878103837471784e-05, |
|
"loss": 0.0763, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 2.14, |
|
"learning_rate": 7.855530474040633e-05, |
|
"loss": 0.0805, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 2.17, |
|
"learning_rate": 7.832957110609482e-05, |
|
"loss": 0.079, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 2.19, |
|
"learning_rate": 7.810383747178331e-05, |
|
"loss": 0.081, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 2.21, |
|
"learning_rate": 7.787810383747179e-05, |
|
"loss": 0.0858, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 2.23, |
|
"learning_rate": 7.765237020316028e-05, |
|
"loss": 0.0809, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"learning_rate": 7.742663656884877e-05, |
|
"loss": 0.0778, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.26, |
|
"eval_accuracy": 0.9830645161290322, |
|
"eval_f1": 0.07624633431085044, |
|
"eval_loss": 0.08076750487089157, |
|
"eval_precision": 0.8666666666666667, |
|
"eval_recall": 0.03987730061349693, |
|
"eval_roc_auc_score": 0.7127667556775492, |
|
"eval_runtime": 2.9944, |
|
"eval_samples_per_second": 62.116, |
|
"eval_steps_per_second": 4.008, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 2.28, |
|
"learning_rate": 7.720090293453724e-05, |
|
"loss": 0.0806, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 2.3, |
|
"learning_rate": 7.697516930022574e-05, |
|
"loss": 0.0785, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 2.33, |
|
"learning_rate": 7.674943566591423e-05, |
|
"loss": 0.0759, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 2.35, |
|
"learning_rate": 7.652370203160272e-05, |
|
"loss": 0.0717, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 2.37, |
|
"learning_rate": 7.62979683972912e-05, |
|
"loss": 0.0828, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 2.39, |
|
"learning_rate": 7.607223476297969e-05, |
|
"loss": 0.0747, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 2.42, |
|
"learning_rate": 7.584650112866818e-05, |
|
"loss": 0.0803, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 2.44, |
|
"learning_rate": 7.562076749435665e-05, |
|
"loss": 0.0767, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 2.46, |
|
"learning_rate": 7.539503386004514e-05, |
|
"loss": 0.079, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"learning_rate": 7.516930022573364e-05, |
|
"loss": 0.0851, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 2.48, |
|
"eval_accuracy": 0.9830645161290322, |
|
"eval_f1": 0.07079646017699115, |
|
"eval_loss": 0.07936662435531616, |
|
"eval_precision": 0.9230769230769231, |
|
"eval_recall": 0.03680981595092025, |
|
"eval_roc_auc_score": 0.7414136783562553, |
|
"eval_runtime": 2.9647, |
|
"eval_samples_per_second": 62.738, |
|
"eval_steps_per_second": 4.048, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 2.51, |
|
"learning_rate": 7.494356659142213e-05, |
|
"loss": 0.0801, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 2.53, |
|
"learning_rate": 7.471783295711062e-05, |
|
"loss": 0.0747, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 2.55, |
|
"learning_rate": 7.449209932279911e-05, |
|
"loss": 0.0764, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 2.57, |
|
"learning_rate": 7.42663656884876e-05, |
|
"loss": 0.0767, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 2.6, |
|
"learning_rate": 7.404063205417608e-05, |
|
"loss": 0.0785, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 2.62, |
|
"learning_rate": 7.381489841986457e-05, |
|
"loss": 0.08, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 2.64, |
|
"learning_rate": 7.358916478555306e-05, |
|
"loss": 0.0797, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 2.66, |
|
"learning_rate": 7.336343115124154e-05, |
|
"loss": 0.0791, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 2.69, |
|
"learning_rate": 7.313769751693003e-05, |
|
"loss": 0.0711, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"learning_rate": 7.291196388261852e-05, |
|
"loss": 0.0721, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 2.71, |
|
"eval_accuracy": 0.9831182795698925, |
|
"eval_f1": 0.07647058823529411, |
|
"eval_loss": 0.07887007296085358, |
|
"eval_precision": 0.9285714285714286, |
|
"eval_recall": 0.03987730061349693, |
|
"eval_roc_auc_score": 0.747081239831844, |
|
"eval_runtime": 2.9555, |
|
"eval_samples_per_second": 62.934, |
|
"eval_steps_per_second": 4.06, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 2.73, |
|
"learning_rate": 7.2686230248307e-05, |
|
"loss": 0.0734, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 2.75, |
|
"learning_rate": 7.246049661399549e-05, |
|
"loss": 0.0834, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 2.78, |
|
"learning_rate": 7.223476297968398e-05, |
|
"loss": 0.0799, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 2.8, |
|
"learning_rate": 7.200902934537245e-05, |
|
"loss": 0.0784, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 2.82, |
|
"learning_rate": 7.178329571106094e-05, |
|
"loss": 0.0759, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 2.84, |
|
"learning_rate": 7.155756207674944e-05, |
|
"loss": 0.0771, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 2.87, |
|
"learning_rate": 7.133182844243793e-05, |
|
"loss": 0.0785, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 2.89, |
|
"learning_rate": 7.110609480812642e-05, |
|
"loss": 0.0823, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 2.91, |
|
"learning_rate": 7.088036117381491e-05, |
|
"loss": 0.0812, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"learning_rate": 7.06546275395034e-05, |
|
"loss": 0.0729, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 2.93, |
|
"eval_accuracy": 0.9830107526881721, |
|
"eval_f1": 0.07602339181286549, |
|
"eval_loss": 0.07854945212602615, |
|
"eval_precision": 0.8125, |
|
"eval_recall": 0.03987730061349693, |
|
"eval_roc_auc_score": 0.7547807539089698, |
|
"eval_runtime": 2.9838, |
|
"eval_samples_per_second": 62.337, |
|
"eval_steps_per_second": 4.022, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 2.96, |
|
"learning_rate": 7.042889390519188e-05, |
|
"loss": 0.0764, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 2.98, |
|
"learning_rate": 7.020316027088037e-05, |
|
"loss": 0.0777, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"learning_rate": 6.997742663656886e-05, |
|
"loss": 0.0726, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 3.02, |
|
"learning_rate": 6.975169300225734e-05, |
|
"loss": 0.0753, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 3.05, |
|
"learning_rate": 6.952595936794583e-05, |
|
"loss": 0.0838, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 3.07, |
|
"learning_rate": 6.930022573363432e-05, |
|
"loss": 0.0784, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 3.09, |
|
"learning_rate": 6.907449209932281e-05, |
|
"loss": 0.0771, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 3.12, |
|
"learning_rate": 6.884875846501129e-05, |
|
"loss": 0.0775, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 3.14, |
|
"learning_rate": 6.862302483069978e-05, |
|
"loss": 0.0748, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"learning_rate": 6.839729119638827e-05, |
|
"loss": 0.0777, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 3.16, |
|
"eval_accuracy": 0.9824731182795698, |
|
"eval_f1": 0.0, |
|
"eval_loss": 0.07838458567857742, |
|
"eval_precision": 0.0, |
|
"eval_recall": 0.0, |
|
"eval_roc_auc_score": 0.7442572369741851, |
|
"eval_runtime": 3.0168, |
|
"eval_samples_per_second": 61.656, |
|
"eval_steps_per_second": 3.978, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 3.18, |
|
"learning_rate": 6.817155756207674e-05, |
|
"loss": 0.0786, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 3.21, |
|
"learning_rate": 6.794582392776524e-05, |
|
"loss": 0.0741, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 3.23, |
|
"learning_rate": 6.772009029345373e-05, |
|
"loss": 0.0768, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 3.25, |
|
"learning_rate": 6.749435665914222e-05, |
|
"loss": 0.0706, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 3.27, |
|
"learning_rate": 6.726862302483071e-05, |
|
"loss": 0.0705, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 3.3, |
|
"learning_rate": 6.70428893905192e-05, |
|
"loss": 0.0728, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 3.32, |
|
"learning_rate": 6.681715575620769e-05, |
|
"loss": 0.0734, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 3.34, |
|
"learning_rate": 6.659142212189617e-05, |
|
"loss": 0.0794, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 3.36, |
|
"learning_rate": 6.636568848758466e-05, |
|
"loss": 0.0737, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"learning_rate": 6.613995485327315e-05, |
|
"loss": 0.0752, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 3.39, |
|
"eval_accuracy": 0.9831182795698925, |
|
"eval_f1": 0.07647058823529411, |
|
"eval_loss": 0.07648065686225891, |
|
"eval_precision": 0.9285714285714286, |
|
"eval_recall": 0.03987730061349693, |
|
"eval_roc_auc_score": 0.7748898666582513, |
|
"eval_runtime": 2.9841, |
|
"eval_samples_per_second": 62.33, |
|
"eval_steps_per_second": 4.021, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 3.41, |
|
"learning_rate": 6.591422121896163e-05, |
|
"loss": 0.076, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 3.43, |
|
"learning_rate": 6.568848758465012e-05, |
|
"loss": 0.0718, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 3.45, |
|
"learning_rate": 6.546275395033861e-05, |
|
"loss": 0.0732, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 3.48, |
|
"learning_rate": 6.523702031602708e-05, |
|
"loss": 0.0738, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 3.5, |
|
"learning_rate": 6.501128668171558e-05, |
|
"loss": 0.0803, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 3.52, |
|
"learning_rate": 6.478555304740407e-05, |
|
"loss": 0.0761, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 3.54, |
|
"learning_rate": 6.455981941309256e-05, |
|
"loss": 0.0801, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 3.57, |
|
"learning_rate": 6.433408577878103e-05, |
|
"loss": 0.07, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 3.59, |
|
"learning_rate": 6.410835214446953e-05, |
|
"loss": 0.072, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"learning_rate": 6.388261851015802e-05, |
|
"loss": 0.074, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 3.61, |
|
"eval_accuracy": 0.9831182795698925, |
|
"eval_f1": 0.07647058823529411, |
|
"eval_loss": 0.07616131007671356, |
|
"eval_precision": 0.9285714285714286, |
|
"eval_recall": 0.03987730061349693, |
|
"eval_roc_auc_score": 0.7751983105166012, |
|
"eval_runtime": 2.9678, |
|
"eval_samples_per_second": 62.673, |
|
"eval_steps_per_second": 4.043, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 3.63, |
|
"learning_rate": 6.365688487584651e-05, |
|
"loss": 0.0727, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 3.66, |
|
"learning_rate": 6.3431151241535e-05, |
|
"loss": 0.0682, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 3.68, |
|
"learning_rate": 6.320541760722349e-05, |
|
"loss": 0.0768, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 3.7, |
|
"learning_rate": 6.297968397291197e-05, |
|
"loss": 0.0716, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 3.72, |
|
"learning_rate": 6.275395033860046e-05, |
|
"loss": 0.0748, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 3.75, |
|
"learning_rate": 6.252821670428895e-05, |
|
"loss": 0.0743, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 3.77, |
|
"learning_rate": 6.230248306997743e-05, |
|
"loss": 0.0748, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 3.79, |
|
"learning_rate": 6.207674943566592e-05, |
|
"loss": 0.0692, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 3.81, |
|
"learning_rate": 6.185101580135441e-05, |
|
"loss": 0.0803, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"learning_rate": 6.16252821670429e-05, |
|
"loss": 0.0731, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 3.84, |
|
"eval_accuracy": 0.9820430107526882, |
|
"eval_f1": 0.07222222222222223, |
|
"eval_loss": 0.07798729836940765, |
|
"eval_precision": 0.38235294117647056, |
|
"eval_recall": 0.03987730061349693, |
|
"eval_roc_auc_score": 0.7578447134988798, |
|
"eval_runtime": 3.0028, |
|
"eval_samples_per_second": 61.942, |
|
"eval_steps_per_second": 3.996, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 3.86, |
|
"learning_rate": 6.139954853273138e-05, |
|
"loss": 0.0827, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 3.88, |
|
"learning_rate": 6.117381489841987e-05, |
|
"loss": 0.072, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 3.91, |
|
"learning_rate": 6.094808126410836e-05, |
|
"loss": 0.0712, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 3.93, |
|
"learning_rate": 6.072234762979684e-05, |
|
"loss": 0.0755, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 3.95, |
|
"learning_rate": 6.049661399548533e-05, |
|
"loss": 0.0719, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 3.97, |
|
"learning_rate": 6.027088036117382e-05, |
|
"loss": 0.074, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 4.0, |
|
"learning_rate": 6.00451467268623e-05, |
|
"loss": 0.0735, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 4.02, |
|
"learning_rate": 5.981941309255079e-05, |
|
"loss": 0.0678, |
|
"step": 1780 |
|
}, |
|
{ |
|
"epoch": 4.04, |
|
"learning_rate": 5.959367945823928e-05, |
|
"loss": 0.071, |
|
"step": 1790 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"learning_rate": 5.936794582392777e-05, |
|
"loss": 0.074, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 4.06, |
|
"eval_accuracy": 0.9831182795698925, |
|
"eval_f1": 0.07647058823529411, |
|
"eval_loss": 0.07262326031923294, |
|
"eval_precision": 0.9285714285714286, |
|
"eval_recall": 0.03987730061349693, |
|
"eval_roc_auc_score": 0.8143374441275982, |
|
"eval_runtime": 2.9291, |
|
"eval_samples_per_second": 63.501, |
|
"eval_steps_per_second": 4.097, |
|
"step": 1800 |
|
}, |
|
{ |
|
"epoch": 4.09, |
|
"learning_rate": 5.914221218961625e-05, |
|
"loss": 0.0744, |
|
"step": 1810 |
|
}, |
|
{ |
|
"epoch": 4.11, |
|
"learning_rate": 5.891647855530474e-05, |
|
"loss": 0.0711, |
|
"step": 1820 |
|
}, |
|
{ |
|
"epoch": 4.13, |
|
"learning_rate": 5.869074492099323e-05, |
|
"loss": 0.0753, |
|
"step": 1830 |
|
}, |
|
{ |
|
"epoch": 4.15, |
|
"learning_rate": 5.8465011286681716e-05, |
|
"loss": 0.0713, |
|
"step": 1840 |
|
}, |
|
{ |
|
"epoch": 4.18, |
|
"learning_rate": 5.823927765237021e-05, |
|
"loss": 0.0688, |
|
"step": 1850 |
|
}, |
|
{ |
|
"epoch": 4.2, |
|
"learning_rate": 5.80135440180587e-05, |
|
"loss": 0.0714, |
|
"step": 1860 |
|
}, |
|
{ |
|
"epoch": 4.22, |
|
"learning_rate": 5.7787810383747176e-05, |
|
"loss": 0.0673, |
|
"step": 1870 |
|
}, |
|
{ |
|
"epoch": 4.24, |
|
"learning_rate": 5.7562076749435666e-05, |
|
"loss": 0.0683, |
|
"step": 1880 |
|
}, |
|
{ |
|
"epoch": 4.27, |
|
"learning_rate": 5.733634311512416e-05, |
|
"loss": 0.0754, |
|
"step": 1890 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"learning_rate": 5.711060948081265e-05, |
|
"loss": 0.0663, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 4.29, |
|
"eval_accuracy": 0.9834408602150537, |
|
"eval_f1": 0.11494252873563218, |
|
"eval_loss": 0.07110567390918732, |
|
"eval_precision": 0.9090909090909091, |
|
"eval_recall": 0.06134969325153374, |
|
"eval_roc_auc_score": 0.8266475853923675, |
|
"eval_runtime": 2.9864, |
|
"eval_samples_per_second": 62.282, |
|
"eval_steps_per_second": 4.018, |
|
"step": 1900 |
|
}, |
|
{ |
|
"epoch": 4.31, |
|
"learning_rate": 5.688487584650113e-05, |
|
"loss": 0.0668, |
|
"step": 1910 |
|
}, |
|
{ |
|
"epoch": 4.33, |
|
"learning_rate": 5.665914221218962e-05, |
|
"loss": 0.0726, |
|
"step": 1920 |
|
}, |
|
{ |
|
"epoch": 4.36, |
|
"learning_rate": 5.6433408577878114e-05, |
|
"loss": 0.0695, |
|
"step": 1930 |
|
}, |
|
{ |
|
"epoch": 4.38, |
|
"learning_rate": 5.620767494356659e-05, |
|
"loss": 0.0713, |
|
"step": 1940 |
|
}, |
|
{ |
|
"epoch": 4.4, |
|
"learning_rate": 5.598194130925508e-05, |
|
"loss": 0.0676, |
|
"step": 1950 |
|
}, |
|
{ |
|
"epoch": 4.42, |
|
"learning_rate": 5.575620767494357e-05, |
|
"loss": 0.0673, |
|
"step": 1960 |
|
}, |
|
{ |
|
"epoch": 4.45, |
|
"learning_rate": 5.553047404063205e-05, |
|
"loss": 0.0663, |
|
"step": 1970 |
|
}, |
|
{ |
|
"epoch": 4.47, |
|
"learning_rate": 5.530474040632054e-05, |
|
"loss": 0.0714, |
|
"step": 1980 |
|
}, |
|
{ |
|
"epoch": 4.49, |
|
"learning_rate": 5.507900677200903e-05, |
|
"loss": 0.0712, |
|
"step": 1990 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"learning_rate": 5.485327313769752e-05, |
|
"loss": 0.0665, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 4.51, |
|
"eval_accuracy": 0.9835483870967742, |
|
"eval_f1": 0.12571428571428572, |
|
"eval_loss": 0.07043366134166718, |
|
"eval_precision": 0.9166666666666666, |
|
"eval_recall": 0.06748466257668712, |
|
"eval_roc_auc_score": 0.8280574465985064, |
|
"eval_runtime": 2.8389, |
|
"eval_samples_per_second": 65.518, |
|
"eval_steps_per_second": 4.227, |
|
"step": 2000 |
|
}, |
|
{ |
|
"epoch": 4.54, |
|
"learning_rate": 5.462753950338601e-05, |
|
"loss": 0.073, |
|
"step": 2010 |
|
}, |
|
{ |
|
"epoch": 4.56, |
|
"learning_rate": 5.44018058690745e-05, |
|
"loss": 0.0745, |
|
"step": 2020 |
|
}, |
|
{ |
|
"epoch": 4.58, |
|
"learning_rate": 5.417607223476299e-05, |
|
"loss": 0.0726, |
|
"step": 2030 |
|
}, |
|
{ |
|
"epoch": 4.6, |
|
"learning_rate": 5.3950338600451466e-05, |
|
"loss": 0.0661, |
|
"step": 2040 |
|
}, |
|
{ |
|
"epoch": 4.63, |
|
"learning_rate": 5.372460496613996e-05, |
|
"loss": 0.0686, |
|
"step": 2050 |
|
}, |
|
{ |
|
"epoch": 4.65, |
|
"learning_rate": 5.349887133182845e-05, |
|
"loss": 0.0684, |
|
"step": 2060 |
|
}, |
|
{ |
|
"epoch": 4.67, |
|
"learning_rate": 5.3273137697516925e-05, |
|
"loss": 0.0688, |
|
"step": 2070 |
|
}, |
|
{ |
|
"epoch": 4.7, |
|
"learning_rate": 5.3047404063205416e-05, |
|
"loss": 0.0657, |
|
"step": 2080 |
|
}, |
|
{ |
|
"epoch": 4.72, |
|
"learning_rate": 5.282167042889391e-05, |
|
"loss": 0.0669, |
|
"step": 2090 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"learning_rate": 5.259593679458239e-05, |
|
"loss": 0.0678, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 4.74, |
|
"eval_accuracy": 0.9834408602150537, |
|
"eval_f1": 0.1098265895953757, |
|
"eval_loss": 0.06957084685564041, |
|
"eval_precision": 0.95, |
|
"eval_recall": 0.05828220858895705, |
|
"eval_roc_auc_score": 0.8349440957047158, |
|
"eval_runtime": 2.8855, |
|
"eval_samples_per_second": 64.461, |
|
"eval_steps_per_second": 4.159, |
|
"step": 2100 |
|
}, |
|
{ |
|
"epoch": 4.76, |
|
"learning_rate": 5.237020316027088e-05, |
|
"loss": 0.0712, |
|
"step": 2110 |
|
}, |
|
{ |
|
"epoch": 4.79, |
|
"learning_rate": 5.214446952595937e-05, |
|
"loss": 0.0649, |
|
"step": 2120 |
|
}, |
|
{ |
|
"epoch": 4.81, |
|
"learning_rate": 5.1918735891647864e-05, |
|
"loss": 0.068, |
|
"step": 2130 |
|
}, |
|
{ |
|
"epoch": 4.83, |
|
"learning_rate": 5.169300225733634e-05, |
|
"loss": 0.073, |
|
"step": 2140 |
|
}, |
|
{ |
|
"epoch": 4.85, |
|
"learning_rate": 5.146726862302483e-05, |
|
"loss": 0.0724, |
|
"step": 2150 |
|
}, |
|
{ |
|
"epoch": 4.88, |
|
"learning_rate": 5.124153498871332e-05, |
|
"loss": 0.0686, |
|
"step": 2160 |
|
}, |
|
{ |
|
"epoch": 4.9, |
|
"learning_rate": 5.101580135440181e-05, |
|
"loss": 0.0712, |
|
"step": 2170 |
|
}, |
|
{ |
|
"epoch": 4.92, |
|
"learning_rate": 5.07900677200903e-05, |
|
"loss": 0.069, |
|
"step": 2180 |
|
}, |
|
{ |
|
"epoch": 4.94, |
|
"learning_rate": 5.056433408577879e-05, |
|
"loss": 0.0712, |
|
"step": 2190 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"learning_rate": 5.0338600451467266e-05, |
|
"loss": 0.0655, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 4.97, |
|
"eval_accuracy": 0.9836021505376344, |
|
"eval_f1": 0.13105413105413105, |
|
"eval_loss": 0.06820350885391235, |
|
"eval_precision": 0.92, |
|
"eval_recall": 0.0705521472392638, |
|
"eval_roc_auc_score": 0.8508380608474544, |
|
"eval_runtime": 2.9822, |
|
"eval_samples_per_second": 62.37, |
|
"eval_steps_per_second": 4.024, |
|
"step": 2200 |
|
}, |
|
{ |
|
"epoch": 4.99, |
|
"learning_rate": 5.011286681715576e-05, |
|
"loss": 0.0678, |
|
"step": 2210 |
|
}, |
|
{ |
|
"epoch": 5.01, |
|
"learning_rate": 4.988713318284425e-05, |
|
"loss": 0.0643, |
|
"step": 2220 |
|
}, |
|
{ |
|
"epoch": 5.03, |
|
"learning_rate": 4.966139954853273e-05, |
|
"loss": 0.0636, |
|
"step": 2230 |
|
}, |
|
{ |
|
"epoch": 5.06, |
|
"learning_rate": 4.9435665914221216e-05, |
|
"loss": 0.0635, |
|
"step": 2240 |
|
}, |
|
{ |
|
"epoch": 5.08, |
|
"learning_rate": 4.920993227990971e-05, |
|
"loss": 0.0711, |
|
"step": 2250 |
|
}, |
|
{ |
|
"epoch": 5.1, |
|
"learning_rate": 4.89841986455982e-05, |
|
"loss": 0.0655, |
|
"step": 2260 |
|
}, |
|
{ |
|
"epoch": 5.12, |
|
"learning_rate": 4.875846501128669e-05, |
|
"loss": 0.0651, |
|
"step": 2270 |
|
}, |
|
{ |
|
"epoch": 5.15, |
|
"learning_rate": 4.853273137697517e-05, |
|
"loss": 0.0637, |
|
"step": 2280 |
|
}, |
|
{ |
|
"epoch": 5.17, |
|
"learning_rate": 4.830699774266366e-05, |
|
"loss": 0.0641, |
|
"step": 2290 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"learning_rate": 4.808126410835215e-05, |
|
"loss": 0.0685, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 5.19, |
|
"eval_accuracy": 0.9836559139784946, |
|
"eval_f1": 0.13636363636363635, |
|
"eval_loss": 0.06742916256189346, |
|
"eval_precision": 0.9230769230769231, |
|
"eval_recall": 0.0736196319018405, |
|
"eval_roc_auc_score": 0.8499358101053426, |
|
"eval_runtime": 3.0707, |
|
"eval_samples_per_second": 60.572, |
|
"eval_steps_per_second": 3.908, |
|
"step": 2300 |
|
}, |
|
{ |
|
"epoch": 5.21, |
|
"learning_rate": 4.785553047404063e-05, |
|
"loss": 0.0625, |
|
"step": 2310 |
|
}, |
|
{ |
|
"epoch": 5.24, |
|
"learning_rate": 4.762979683972912e-05, |
|
"loss": 0.0656, |
|
"step": 2320 |
|
}, |
|
{ |
|
"epoch": 5.26, |
|
"learning_rate": 4.740406320541761e-05, |
|
"loss": 0.062, |
|
"step": 2330 |
|
}, |
|
{ |
|
"epoch": 5.28, |
|
"learning_rate": 4.71783295711061e-05, |
|
"loss": 0.071, |
|
"step": 2340 |
|
}, |
|
{ |
|
"epoch": 5.3, |
|
"learning_rate": 4.695259593679459e-05, |
|
"loss": 0.06, |
|
"step": 2350 |
|
}, |
|
{ |
|
"epoch": 5.33, |
|
"learning_rate": 4.672686230248307e-05, |
|
"loss": 0.0683, |
|
"step": 2360 |
|
}, |
|
{ |
|
"epoch": 5.35, |
|
"learning_rate": 4.6501128668171564e-05, |
|
"loss": 0.0639, |
|
"step": 2370 |
|
}, |
|
{ |
|
"epoch": 5.37, |
|
"learning_rate": 4.627539503386005e-05, |
|
"loss": 0.0662, |
|
"step": 2380 |
|
}, |
|
{ |
|
"epoch": 5.4, |
|
"learning_rate": 4.604966139954853e-05, |
|
"loss": 0.0709, |
|
"step": 2390 |
|
}, |
|
{ |
|
"epoch": 5.42, |
|
"learning_rate": 4.582392776523702e-05, |
|
"loss": 0.0682, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 5.42, |
|
"eval_accuracy": 0.9834408602150537, |
|
"eval_f1": 0.11494252873563218, |
|
"eval_loss": 0.06691926717758179, |
|
"eval_precision": 0.9090909090909091, |
|
"eval_recall": 0.06134969325153374, |
|
"eval_roc_auc_score": 0.8571847023932222, |
|
"eval_runtime": 2.9736, |
|
"eval_samples_per_second": 62.551, |
|
"eval_steps_per_second": 4.036, |
|
"step": 2400 |
|
}, |
|
{ |
|
"epoch": 5.44, |
|
"learning_rate": 4.559819413092551e-05, |
|
"loss": 0.0666, |
|
"step": 2410 |
|
}, |
|
{ |
|
"epoch": 5.46, |
|
"learning_rate": 4.5372460496614e-05, |
|
"loss": 0.0637, |
|
"step": 2420 |
|
}, |
|
{ |
|
"epoch": 5.49, |
|
"learning_rate": 4.514672686230249e-05, |
|
"loss": 0.0668, |
|
"step": 2430 |
|
}, |
|
{ |
|
"epoch": 5.51, |
|
"learning_rate": 4.492099322799097e-05, |
|
"loss": 0.0616, |
|
"step": 2440 |
|
}, |
|
{ |
|
"epoch": 5.53, |
|
"learning_rate": 4.4695259593679463e-05, |
|
"loss": 0.0665, |
|
"step": 2450 |
|
}, |
|
{ |
|
"epoch": 5.55, |
|
"learning_rate": 4.446952595936795e-05, |
|
"loss": 0.065, |
|
"step": 2460 |
|
}, |
|
{ |
|
"epoch": 5.58, |
|
"learning_rate": 4.424379232505644e-05, |
|
"loss": 0.0632, |
|
"step": 2470 |
|
}, |
|
{ |
|
"epoch": 5.6, |
|
"learning_rate": 4.401805869074492e-05, |
|
"loss": 0.0693, |
|
"step": 2480 |
|
}, |
|
{ |
|
"epoch": 5.62, |
|
"learning_rate": 4.379232505643341e-05, |
|
"loss": 0.0656, |
|
"step": 2490 |
|
}, |
|
{ |
|
"epoch": 5.64, |
|
"learning_rate": 4.35665914221219e-05, |
|
"loss": 0.0664, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 5.64, |
|
"eval_accuracy": 0.9834408602150537, |
|
"eval_f1": 0.11494252873563218, |
|
"eval_loss": 0.06578636169433594, |
|
"eval_precision": 0.9090909090909091, |
|
"eval_recall": 0.06134969325153374, |
|
"eval_roc_auc_score": 0.8651269429025514, |
|
"eval_runtime": 2.999, |
|
"eval_samples_per_second": 62.02, |
|
"eval_steps_per_second": 4.001, |
|
"step": 2500 |
|
}, |
|
{ |
|
"epoch": 5.67, |
|
"learning_rate": 4.334085778781038e-05, |
|
"loss": 0.0708, |
|
"step": 2510 |
|
}, |
|
{ |
|
"epoch": 5.69, |
|
"learning_rate": 4.311512415349887e-05, |
|
"loss": 0.0611, |
|
"step": 2520 |
|
}, |
|
{ |
|
"epoch": 5.71, |
|
"learning_rate": 4.2889390519187363e-05, |
|
"loss": 0.0643, |
|
"step": 2530 |
|
}, |
|
{ |
|
"epoch": 5.73, |
|
"learning_rate": 4.266365688487585e-05, |
|
"loss": 0.0627, |
|
"step": 2540 |
|
}, |
|
{ |
|
"epoch": 5.76, |
|
"learning_rate": 4.243792325056434e-05, |
|
"loss": 0.0686, |
|
"step": 2550 |
|
}, |
|
{ |
|
"epoch": 5.78, |
|
"learning_rate": 4.221218961625282e-05, |
|
"loss": 0.066, |
|
"step": 2560 |
|
}, |
|
{ |
|
"epoch": 5.8, |
|
"learning_rate": 4.198645598194131e-05, |
|
"loss": 0.0621, |
|
"step": 2570 |
|
}, |
|
{ |
|
"epoch": 5.82, |
|
"learning_rate": 4.17607223476298e-05, |
|
"loss": 0.0656, |
|
"step": 2580 |
|
}, |
|
{ |
|
"epoch": 5.85, |
|
"learning_rate": 4.153498871331828e-05, |
|
"loss": 0.0639, |
|
"step": 2590 |
|
}, |
|
{ |
|
"epoch": 5.87, |
|
"learning_rate": 4.130925507900677e-05, |
|
"loss": 0.0646, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 5.87, |
|
"eval_accuracy": 0.9832795698924731, |
|
"eval_f1": 0.09855072463768115, |
|
"eval_loss": 0.06509752571582794, |
|
"eval_precision": 0.8947368421052632, |
|
"eval_recall": 0.05214723926380368, |
|
"eval_roc_auc_score": 0.8666736944305866, |
|
"eval_runtime": 2.8751, |
|
"eval_samples_per_second": 64.693, |
|
"eval_steps_per_second": 4.174, |
|
"step": 2600 |
|
}, |
|
{ |
|
"epoch": 5.89, |
|
"learning_rate": 4.108352144469526e-05, |
|
"loss": 0.0617, |
|
"step": 2610 |
|
}, |
|
{ |
|
"epoch": 5.91, |
|
"learning_rate": 4.085778781038375e-05, |
|
"loss": 0.0644, |
|
"step": 2620 |
|
}, |
|
{ |
|
"epoch": 5.94, |
|
"learning_rate": 4.063205417607224e-05, |
|
"loss": 0.0602, |
|
"step": 2630 |
|
}, |
|
{ |
|
"epoch": 5.96, |
|
"learning_rate": 4.040632054176072e-05, |
|
"loss": 0.0591, |
|
"step": 2640 |
|
}, |
|
{ |
|
"epoch": 5.98, |
|
"learning_rate": 4.018058690744921e-05, |
|
"loss": 0.063, |
|
"step": 2650 |
|
}, |
|
{ |
|
"epoch": 6.0, |
|
"learning_rate": 3.99548532731377e-05, |
|
"loss": 0.0637, |
|
"step": 2660 |
|
}, |
|
{ |
|
"epoch": 6.03, |
|
"learning_rate": 3.972911963882618e-05, |
|
"loss": 0.0625, |
|
"step": 2670 |
|
}, |
|
{ |
|
"epoch": 6.05, |
|
"learning_rate": 3.950338600451467e-05, |
|
"loss": 0.067, |
|
"step": 2680 |
|
}, |
|
{ |
|
"epoch": 6.07, |
|
"learning_rate": 3.927765237020316e-05, |
|
"loss": 0.0584, |
|
"step": 2690 |
|
}, |
|
{ |
|
"epoch": 6.09, |
|
"learning_rate": 3.9051918735891654e-05, |
|
"loss": 0.0645, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 6.09, |
|
"eval_accuracy": 0.9837096774193549, |
|
"eval_f1": 0.14647887323943662, |
|
"eval_loss": 0.06549175083637238, |
|
"eval_precision": 0.896551724137931, |
|
"eval_recall": 0.07975460122699386, |
|
"eval_roc_auc_score": 0.8645642741606803, |
|
"eval_runtime": 2.9585, |
|
"eval_samples_per_second": 62.87, |
|
"eval_steps_per_second": 4.056, |
|
"step": 2700 |
|
}, |
|
{ |
|
"epoch": 6.12, |
|
"learning_rate": 3.882618510158014e-05, |
|
"loss": 0.0594, |
|
"step": 2710 |
|
}, |
|
{ |
|
"epoch": 6.14, |
|
"learning_rate": 3.860045146726862e-05, |
|
"loss": 0.0588, |
|
"step": 2720 |
|
}, |
|
{ |
|
"epoch": 6.16, |
|
"learning_rate": 3.837471783295711e-05, |
|
"loss": 0.0615, |
|
"step": 2730 |
|
}, |
|
{ |
|
"epoch": 6.19, |
|
"learning_rate": 3.81489841986456e-05, |
|
"loss": 0.0633, |
|
"step": 2740 |
|
}, |
|
{ |
|
"epoch": 6.21, |
|
"learning_rate": 3.792325056433409e-05, |
|
"loss": 0.0661, |
|
"step": 2750 |
|
}, |
|
{ |
|
"epoch": 6.23, |
|
"learning_rate": 3.769751693002257e-05, |
|
"loss": 0.0651, |
|
"step": 2760 |
|
}, |
|
{ |
|
"epoch": 6.25, |
|
"learning_rate": 3.747178329571106e-05, |
|
"loss": 0.0612, |
|
"step": 2770 |
|
}, |
|
{ |
|
"epoch": 6.28, |
|
"learning_rate": 3.7246049661399554e-05, |
|
"loss": 0.0632, |
|
"step": 2780 |
|
}, |
|
{ |
|
"epoch": 6.3, |
|
"learning_rate": 3.702031602708804e-05, |
|
"loss": 0.0572, |
|
"step": 2790 |
|
}, |
|
{ |
|
"epoch": 6.32, |
|
"learning_rate": 3.679458239277653e-05, |
|
"loss": 0.0666, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 6.32, |
|
"eval_accuracy": 0.9836559139784946, |
|
"eval_f1": 0.14606741573033707, |
|
"eval_loss": 0.0642370954155922, |
|
"eval_precision": 0.8666666666666667, |
|
"eval_recall": 0.07975460122699386, |
|
"eval_roc_auc_score": 0.873464327271775, |
|
"eval_runtime": 2.9767, |
|
"eval_samples_per_second": 62.485, |
|
"eval_steps_per_second": 4.031, |
|
"step": 2800 |
|
}, |
|
{ |
|
"epoch": 6.34, |
|
"learning_rate": 3.656884875846501e-05, |
|
"loss": 0.0576, |
|
"step": 2810 |
|
}, |
|
{ |
|
"epoch": 6.37, |
|
"learning_rate": 3.63431151241535e-05, |
|
"loss": 0.0639, |
|
"step": 2820 |
|
}, |
|
{ |
|
"epoch": 6.39, |
|
"learning_rate": 3.611738148984199e-05, |
|
"loss": 0.0621, |
|
"step": 2830 |
|
}, |
|
{ |
|
"epoch": 6.41, |
|
"learning_rate": 3.589164785553047e-05, |
|
"loss": 0.0632, |
|
"step": 2840 |
|
}, |
|
{ |
|
"epoch": 6.43, |
|
"learning_rate": 3.566591422121896e-05, |
|
"loss": 0.0587, |
|
"step": 2850 |
|
}, |
|
{ |
|
"epoch": 6.46, |
|
"learning_rate": 3.5440180586907454e-05, |
|
"loss": 0.0593, |
|
"step": 2860 |
|
}, |
|
{ |
|
"epoch": 6.48, |
|
"learning_rate": 3.521444695259594e-05, |
|
"loss": 0.0629, |
|
"step": 2870 |
|
}, |
|
{ |
|
"epoch": 6.5, |
|
"learning_rate": 3.498871331828443e-05, |
|
"loss": 0.064, |
|
"step": 2880 |
|
}, |
|
{ |
|
"epoch": 6.52, |
|
"learning_rate": 3.476297968397291e-05, |
|
"loss": 0.0657, |
|
"step": 2890 |
|
}, |
|
{ |
|
"epoch": 6.55, |
|
"learning_rate": 3.4537246049661404e-05, |
|
"loss": 0.0589, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 6.55, |
|
"eval_accuracy": 0.9837096774193549, |
|
"eval_f1": 0.14647887323943662, |
|
"eval_loss": 0.06352133303880692, |
|
"eval_precision": 0.896551724137931, |
|
"eval_recall": 0.07975460122699386, |
|
"eval_roc_auc_score": 0.8781696278396138, |
|
"eval_runtime": 2.9622, |
|
"eval_samples_per_second": 62.792, |
|
"eval_steps_per_second": 4.051, |
|
"step": 2900 |
|
}, |
|
{ |
|
"epoch": 6.57, |
|
"learning_rate": 3.431151241534989e-05, |
|
"loss": 0.0613, |
|
"step": 2910 |
|
}, |
|
{ |
|
"epoch": 6.59, |
|
"learning_rate": 3.408577878103837e-05, |
|
"loss": 0.0629, |
|
"step": 2920 |
|
}, |
|
{ |
|
"epoch": 6.61, |
|
"learning_rate": 3.386004514672686e-05, |
|
"loss": 0.0628, |
|
"step": 2930 |
|
}, |
|
{ |
|
"epoch": 6.64, |
|
"learning_rate": 3.3634311512415354e-05, |
|
"loss": 0.0619, |
|
"step": 2940 |
|
}, |
|
{ |
|
"epoch": 6.66, |
|
"learning_rate": 3.3408577878103845e-05, |
|
"loss": 0.0605, |
|
"step": 2950 |
|
}, |
|
{ |
|
"epoch": 6.68, |
|
"learning_rate": 3.318284424379233e-05, |
|
"loss": 0.0556, |
|
"step": 2960 |
|
}, |
|
{ |
|
"epoch": 6.7, |
|
"learning_rate": 3.295711060948081e-05, |
|
"loss": 0.0582, |
|
"step": 2970 |
|
}, |
|
{ |
|
"epoch": 6.73, |
|
"learning_rate": 3.2731376975169304e-05, |
|
"loss": 0.0554, |
|
"step": 2980 |
|
}, |
|
{ |
|
"epoch": 6.75, |
|
"learning_rate": 3.250564334085779e-05, |
|
"loss": 0.0634, |
|
"step": 2990 |
|
}, |
|
{ |
|
"epoch": 6.77, |
|
"learning_rate": 3.227990970654628e-05, |
|
"loss": 0.06, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 6.77, |
|
"eval_accuracy": 0.9836559139784946, |
|
"eval_f1": 0.15083798882681562, |
|
"eval_loss": 0.06312836706638336, |
|
"eval_precision": 0.84375, |
|
"eval_recall": 0.08282208588957055, |
|
"eval_roc_auc_score": 0.8836481782760178, |
|
"eval_runtime": 3.0134, |
|
"eval_samples_per_second": 61.725, |
|
"eval_steps_per_second": 3.982, |
|
"step": 3000 |
|
}, |
|
{ |
|
"epoch": 6.79, |
|
"learning_rate": 3.205417607223476e-05, |
|
"loss": 0.0619, |
|
"step": 3010 |
|
}, |
|
{ |
|
"epoch": 6.82, |
|
"learning_rate": 3.1828442437923254e-05, |
|
"loss": 0.0583, |
|
"step": 3020 |
|
}, |
|
{ |
|
"epoch": 6.84, |
|
"learning_rate": 3.1602708803611745e-05, |
|
"loss": 0.0579, |
|
"step": 3030 |
|
}, |
|
{ |
|
"epoch": 6.86, |
|
"learning_rate": 3.137697516930023e-05, |
|
"loss": 0.0589, |
|
"step": 3040 |
|
}, |
|
{ |
|
"epoch": 6.88, |
|
"learning_rate": 3.115124153498871e-05, |
|
"loss": 0.057, |
|
"step": 3050 |
|
}, |
|
{ |
|
"epoch": 6.91, |
|
"learning_rate": 3.0925507900677204e-05, |
|
"loss": 0.0616, |
|
"step": 3060 |
|
}, |
|
{ |
|
"epoch": 6.93, |
|
"learning_rate": 3.069977426636569e-05, |
|
"loss": 0.0598, |
|
"step": 3070 |
|
}, |
|
{ |
|
"epoch": 6.95, |
|
"learning_rate": 3.047404063205418e-05, |
|
"loss": 0.0584, |
|
"step": 3080 |
|
}, |
|
{ |
|
"epoch": 6.98, |
|
"learning_rate": 3.0248306997742666e-05, |
|
"loss": 0.0541, |
|
"step": 3090 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"learning_rate": 3.002257336343115e-05, |
|
"loss": 0.0644, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 7.0, |
|
"eval_accuracy": 0.9840860215053764, |
|
"eval_f1": 0.18232044198895028, |
|
"eval_loss": 0.06328887492418289, |
|
"eval_precision": 0.9166666666666666, |
|
"eval_recall": 0.10122699386503067, |
|
"eval_roc_auc_score": 0.8779561091523644, |
|
"eval_runtime": 2.8494, |
|
"eval_samples_per_second": 65.278, |
|
"eval_steps_per_second": 4.211, |
|
"step": 3100 |
|
}, |
|
{ |
|
"epoch": 7.02, |
|
"learning_rate": 2.979683972911964e-05, |
|
"loss": 0.0581, |
|
"step": 3110 |
|
}, |
|
{ |
|
"epoch": 7.04, |
|
"learning_rate": 2.9571106094808125e-05, |
|
"loss": 0.0614, |
|
"step": 3120 |
|
}, |
|
{ |
|
"epoch": 7.07, |
|
"learning_rate": 2.9345372460496616e-05, |
|
"loss": 0.056, |
|
"step": 3130 |
|
}, |
|
{ |
|
"epoch": 7.09, |
|
"learning_rate": 2.9119638826185104e-05, |
|
"loss": 0.0583, |
|
"step": 3140 |
|
}, |
|
{ |
|
"epoch": 7.11, |
|
"learning_rate": 2.8893905191873588e-05, |
|
"loss": 0.0582, |
|
"step": 3150 |
|
}, |
|
{ |
|
"epoch": 7.13, |
|
"learning_rate": 2.866817155756208e-05, |
|
"loss": 0.0562, |
|
"step": 3160 |
|
}, |
|
{ |
|
"epoch": 7.16, |
|
"learning_rate": 2.8442437923250566e-05, |
|
"loss": 0.0598, |
|
"step": 3170 |
|
}, |
|
{ |
|
"epoch": 7.18, |
|
"learning_rate": 2.8216704288939057e-05, |
|
"loss": 0.0611, |
|
"step": 3180 |
|
}, |
|
{ |
|
"epoch": 7.2, |
|
"learning_rate": 2.799097065462754e-05, |
|
"loss": 0.0606, |
|
"step": 3190 |
|
}, |
|
{ |
|
"epoch": 7.22, |
|
"learning_rate": 2.7765237020316025e-05, |
|
"loss": 0.0613, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 7.22, |
|
"eval_accuracy": 0.9840322580645161, |
|
"eval_f1": 0.1772853185595568, |
|
"eval_loss": 0.0627669170498848, |
|
"eval_precision": 0.9142857142857143, |
|
"eval_recall": 0.09815950920245399, |
|
"eval_roc_auc_score": 0.8836935845691789, |
|
"eval_runtime": 2.8418, |
|
"eval_samples_per_second": 65.453, |
|
"eval_steps_per_second": 4.223, |
|
"step": 3200 |
|
}, |
|
{ |
|
"epoch": 7.25, |
|
"learning_rate": 2.7539503386004516e-05, |
|
"loss": 0.0568, |
|
"step": 3210 |
|
}, |
|
{ |
|
"epoch": 7.27, |
|
"learning_rate": 2.7313769751693004e-05, |
|
"loss": 0.0561, |
|
"step": 3220 |
|
}, |
|
{ |
|
"epoch": 7.29, |
|
"learning_rate": 2.7088036117381494e-05, |
|
"loss": 0.0569, |
|
"step": 3230 |
|
}, |
|
{ |
|
"epoch": 7.31, |
|
"learning_rate": 2.686230248306998e-05, |
|
"loss": 0.0601, |
|
"step": 3240 |
|
}, |
|
{ |
|
"epoch": 7.34, |
|
"learning_rate": 2.6636568848758463e-05, |
|
"loss": 0.0591, |
|
"step": 3250 |
|
}, |
|
{ |
|
"epoch": 7.36, |
|
"learning_rate": 2.6410835214446954e-05, |
|
"loss": 0.0573, |
|
"step": 3260 |
|
}, |
|
{ |
|
"epoch": 7.38, |
|
"learning_rate": 2.618510158013544e-05, |
|
"loss": 0.0534, |
|
"step": 3270 |
|
}, |
|
{ |
|
"epoch": 7.4, |
|
"learning_rate": 2.5959367945823932e-05, |
|
"loss": 0.0596, |
|
"step": 3280 |
|
}, |
|
{ |
|
"epoch": 7.43, |
|
"learning_rate": 2.5733634311512416e-05, |
|
"loss": 0.0563, |
|
"step": 3290 |
|
}, |
|
{ |
|
"epoch": 7.45, |
|
"learning_rate": 2.5507900677200903e-05, |
|
"loss": 0.0544, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 7.45, |
|
"eval_accuracy": 0.9844086021505376, |
|
"eval_f1": 0.21195652173913043, |
|
"eval_loss": 0.0618860088288784, |
|
"eval_precision": 0.9285714285714286, |
|
"eval_recall": 0.1196319018404908, |
|
"eval_roc_auc_score": 0.8914865970022781, |
|
"eval_runtime": 3.0409, |
|
"eval_samples_per_second": 61.167, |
|
"eval_steps_per_second": 3.946, |
|
"step": 3300 |
|
}, |
|
{ |
|
"epoch": 7.47, |
|
"learning_rate": 2.5282167042889394e-05, |
|
"loss": 0.0606, |
|
"step": 3310 |
|
}, |
|
{ |
|
"epoch": 7.49, |
|
"learning_rate": 2.505643340857788e-05, |
|
"loss": 0.0618, |
|
"step": 3320 |
|
}, |
|
{ |
|
"epoch": 7.52, |
|
"learning_rate": 2.4830699774266366e-05, |
|
"loss": 0.0576, |
|
"step": 3330 |
|
}, |
|
{ |
|
"epoch": 7.54, |
|
"learning_rate": 2.4604966139954853e-05, |
|
"loss": 0.0587, |
|
"step": 3340 |
|
}, |
|
{ |
|
"epoch": 7.56, |
|
"learning_rate": 2.4379232505643344e-05, |
|
"loss": 0.0558, |
|
"step": 3350 |
|
}, |
|
{ |
|
"epoch": 7.58, |
|
"learning_rate": 2.415349887133183e-05, |
|
"loss": 0.0591, |
|
"step": 3360 |
|
}, |
|
{ |
|
"epoch": 7.61, |
|
"learning_rate": 2.3927765237020316e-05, |
|
"loss": 0.0578, |
|
"step": 3370 |
|
}, |
|
{ |
|
"epoch": 7.63, |
|
"learning_rate": 2.3702031602708803e-05, |
|
"loss": 0.0603, |
|
"step": 3380 |
|
}, |
|
{ |
|
"epoch": 7.65, |
|
"learning_rate": 2.3476297968397294e-05, |
|
"loss": 0.0623, |
|
"step": 3390 |
|
}, |
|
{ |
|
"epoch": 7.67, |
|
"learning_rate": 2.3250564334085782e-05, |
|
"loss": 0.0528, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 7.67, |
|
"eval_accuracy": 0.9840860215053764, |
|
"eval_f1": 0.18232044198895028, |
|
"eval_loss": 0.06114303320646286, |
|
"eval_precision": 0.9166666666666666, |
|
"eval_recall": 0.10122699386503067, |
|
"eval_roc_auc_score": 0.8927644358440132, |
|
"eval_runtime": 2.9925, |
|
"eval_samples_per_second": 62.156, |
|
"eval_steps_per_second": 4.01, |
|
"step": 3400 |
|
}, |
|
{ |
|
"epoch": 7.7, |
|
"learning_rate": 2.3024830699774266e-05, |
|
"loss": 0.0545, |
|
"step": 3410 |
|
}, |
|
{ |
|
"epoch": 7.72, |
|
"learning_rate": 2.2799097065462753e-05, |
|
"loss": 0.058, |
|
"step": 3420 |
|
}, |
|
{ |
|
"epoch": 7.74, |
|
"learning_rate": 2.2573363431151244e-05, |
|
"loss": 0.0537, |
|
"step": 3430 |
|
}, |
|
{ |
|
"epoch": 7.77, |
|
"learning_rate": 2.2347629796839732e-05, |
|
"loss": 0.057, |
|
"step": 3440 |
|
}, |
|
{ |
|
"epoch": 7.79, |
|
"learning_rate": 2.212189616252822e-05, |
|
"loss": 0.0545, |
|
"step": 3450 |
|
}, |
|
{ |
|
"epoch": 7.81, |
|
"learning_rate": 2.1896162528216703e-05, |
|
"loss": 0.0536, |
|
"step": 3460 |
|
}, |
|
{ |
|
"epoch": 7.83, |
|
"learning_rate": 2.167042889390519e-05, |
|
"loss": 0.0536, |
|
"step": 3470 |
|
}, |
|
{ |
|
"epoch": 7.86, |
|
"learning_rate": 2.1444695259593682e-05, |
|
"loss": 0.0594, |
|
"step": 3480 |
|
}, |
|
{ |
|
"epoch": 7.88, |
|
"learning_rate": 2.121896162528217e-05, |
|
"loss": 0.0555, |
|
"step": 3490 |
|
}, |
|
{ |
|
"epoch": 7.9, |
|
"learning_rate": 2.0993227990970657e-05, |
|
"loss": 0.0554, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 7.9, |
|
"eval_accuracy": 0.9841935483870968, |
|
"eval_f1": 0.20540540540540542, |
|
"eval_loss": 0.06044604256749153, |
|
"eval_precision": 0.8636363636363636, |
|
"eval_recall": 0.1165644171779141, |
|
"eval_roc_auc_score": 0.8967183923520023, |
|
"eval_runtime": 2.9873, |
|
"eval_samples_per_second": 62.264, |
|
"eval_steps_per_second": 4.017, |
|
"step": 3500 |
|
}, |
|
{ |
|
"epoch": 7.92, |
|
"learning_rate": 2.076749435665914e-05, |
|
"loss": 0.0565, |
|
"step": 3510 |
|
}, |
|
{ |
|
"epoch": 7.95, |
|
"learning_rate": 2.054176072234763e-05, |
|
"loss": 0.0578, |
|
"step": 3520 |
|
}, |
|
{ |
|
"epoch": 7.97, |
|
"learning_rate": 2.031602708803612e-05, |
|
"loss": 0.0583, |
|
"step": 3530 |
|
}, |
|
{ |
|
"epoch": 7.99, |
|
"learning_rate": 2.0090293453724607e-05, |
|
"loss": 0.056, |
|
"step": 3540 |
|
}, |
|
{ |
|
"epoch": 8.01, |
|
"learning_rate": 1.986455981941309e-05, |
|
"loss": 0.048, |
|
"step": 3550 |
|
}, |
|
{ |
|
"epoch": 8.04, |
|
"learning_rate": 1.963882618510158e-05, |
|
"loss": 0.0586, |
|
"step": 3560 |
|
}, |
|
{ |
|
"epoch": 8.06, |
|
"learning_rate": 1.941309255079007e-05, |
|
"loss": 0.0566, |
|
"step": 3570 |
|
}, |
|
{ |
|
"epoch": 8.08, |
|
"learning_rate": 1.9187358916478557e-05, |
|
"loss": 0.0574, |
|
"step": 3580 |
|
}, |
|
{ |
|
"epoch": 8.1, |
|
"learning_rate": 1.8961625282167044e-05, |
|
"loss": 0.056, |
|
"step": 3590 |
|
}, |
|
{ |
|
"epoch": 8.13, |
|
"learning_rate": 1.873589164785553e-05, |
|
"loss": 0.0522, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 8.13, |
|
"eval_accuracy": 0.9843010752688172, |
|
"eval_f1": 0.2108108108108108, |
|
"eval_loss": 0.06024692952632904, |
|
"eval_precision": 0.8863636363636364, |
|
"eval_recall": 0.1196319018404908, |
|
"eval_roc_auc_score": 0.8948983637619845, |
|
"eval_runtime": 2.8994, |
|
"eval_samples_per_second": 64.151, |
|
"eval_steps_per_second": 4.139, |
|
"step": 3600 |
|
}, |
|
{ |
|
"epoch": 8.15, |
|
"learning_rate": 1.851015801354402e-05, |
|
"loss": 0.0535, |
|
"step": 3610 |
|
}, |
|
{ |
|
"epoch": 8.17, |
|
"learning_rate": 1.8284424379232507e-05, |
|
"loss": 0.0524, |
|
"step": 3620 |
|
}, |
|
{ |
|
"epoch": 8.19, |
|
"learning_rate": 1.8058690744920994e-05, |
|
"loss": 0.055, |
|
"step": 3630 |
|
}, |
|
{ |
|
"epoch": 8.22, |
|
"learning_rate": 1.783295711060948e-05, |
|
"loss": 0.0548, |
|
"step": 3640 |
|
}, |
|
{ |
|
"epoch": 8.24, |
|
"learning_rate": 1.760722347629797e-05, |
|
"loss": 0.0589, |
|
"step": 3650 |
|
}, |
|
{ |
|
"epoch": 8.26, |
|
"learning_rate": 1.7381489841986457e-05, |
|
"loss": 0.0517, |
|
"step": 3660 |
|
}, |
|
{ |
|
"epoch": 8.28, |
|
"learning_rate": 1.7155756207674944e-05, |
|
"loss": 0.0546, |
|
"step": 3670 |
|
}, |
|
{ |
|
"epoch": 8.31, |
|
"learning_rate": 1.693002257336343e-05, |
|
"loss": 0.0522, |
|
"step": 3680 |
|
}, |
|
{ |
|
"epoch": 8.33, |
|
"learning_rate": 1.6704288939051922e-05, |
|
"loss": 0.0536, |
|
"step": 3690 |
|
}, |
|
{ |
|
"epoch": 8.35, |
|
"learning_rate": 1.6478555304740406e-05, |
|
"loss": 0.0518, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 8.35, |
|
"eval_accuracy": 0.984516129032258, |
|
"eval_f1": 0.22580645161290322, |
|
"eval_loss": 0.060264650732278824, |
|
"eval_precision": 0.9130434782608695, |
|
"eval_recall": 0.12883435582822086, |
|
"eval_roc_auc_score": 0.8884129014973838, |
|
"eval_runtime": 2.852, |
|
"eval_samples_per_second": 65.217, |
|
"eval_steps_per_second": 4.208, |
|
"step": 3700 |
|
}, |
|
{ |
|
"epoch": 8.37, |
|
"learning_rate": 1.6252821670428894e-05, |
|
"loss": 0.0581, |
|
"step": 3710 |
|
}, |
|
{ |
|
"epoch": 8.4, |
|
"learning_rate": 1.602708803611738e-05, |
|
"loss": 0.0525, |
|
"step": 3720 |
|
}, |
|
{ |
|
"epoch": 8.42, |
|
"learning_rate": 1.5801354401805872e-05, |
|
"loss": 0.0621, |
|
"step": 3730 |
|
}, |
|
{ |
|
"epoch": 8.44, |
|
"learning_rate": 1.5575620767494356e-05, |
|
"loss": 0.0526, |
|
"step": 3740 |
|
}, |
|
{ |
|
"epoch": 8.47, |
|
"learning_rate": 1.5349887133182844e-05, |
|
"loss": 0.0544, |
|
"step": 3750 |
|
}, |
|
{ |
|
"epoch": 8.49, |
|
"learning_rate": 1.5124153498871333e-05, |
|
"loss": 0.0538, |
|
"step": 3760 |
|
}, |
|
{ |
|
"epoch": 8.51, |
|
"learning_rate": 1.489841986455982e-05, |
|
"loss": 0.0554, |
|
"step": 3770 |
|
}, |
|
{ |
|
"epoch": 8.53, |
|
"learning_rate": 1.4672686230248308e-05, |
|
"loss": 0.0525, |
|
"step": 3780 |
|
}, |
|
{ |
|
"epoch": 8.56, |
|
"learning_rate": 1.4446952595936794e-05, |
|
"loss": 0.0522, |
|
"step": 3790 |
|
}, |
|
{ |
|
"epoch": 8.58, |
|
"learning_rate": 1.4221218961625283e-05, |
|
"loss": 0.048, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 8.58, |
|
"eval_accuracy": 0.984247311827957, |
|
"eval_f1": 0.2059620596205962, |
|
"eval_loss": 0.059634629637002945, |
|
"eval_precision": 0.8837209302325582, |
|
"eval_recall": 0.1165644171779141, |
|
"eval_roc_auc_score": 0.89597376271628, |
|
"eval_runtime": 2.8398, |
|
"eval_samples_per_second": 65.497, |
|
"eval_steps_per_second": 4.226, |
|
"step": 3800 |
|
}, |
|
{ |
|
"epoch": 8.6, |
|
"learning_rate": 1.399548532731377e-05, |
|
"loss": 0.0561, |
|
"step": 3810 |
|
}, |
|
{ |
|
"epoch": 8.62, |
|
"learning_rate": 1.3769751693002258e-05, |
|
"loss": 0.057, |
|
"step": 3820 |
|
}, |
|
{ |
|
"epoch": 8.65, |
|
"learning_rate": 1.3544018058690747e-05, |
|
"loss": 0.0568, |
|
"step": 3830 |
|
}, |
|
{ |
|
"epoch": 8.67, |
|
"learning_rate": 1.3318284424379231e-05, |
|
"loss": 0.0523, |
|
"step": 3840 |
|
}, |
|
{ |
|
"epoch": 8.69, |
|
"learning_rate": 1.309255079006772e-05, |
|
"loss": 0.0569, |
|
"step": 3850 |
|
}, |
|
{ |
|
"epoch": 8.71, |
|
"learning_rate": 1.2866817155756208e-05, |
|
"loss": 0.0506, |
|
"step": 3860 |
|
}, |
|
{ |
|
"epoch": 8.74, |
|
"learning_rate": 1.2641083521444697e-05, |
|
"loss": 0.0537, |
|
"step": 3870 |
|
}, |
|
{ |
|
"epoch": 8.76, |
|
"learning_rate": 1.2415349887133183e-05, |
|
"loss": 0.0547, |
|
"step": 3880 |
|
}, |
|
{ |
|
"epoch": 8.78, |
|
"learning_rate": 1.2189616252821672e-05, |
|
"loss": 0.0535, |
|
"step": 3890 |
|
}, |
|
{ |
|
"epoch": 8.8, |
|
"learning_rate": 1.1963882618510158e-05, |
|
"loss": 0.056, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 8.8, |
|
"eval_accuracy": 0.9843548387096774, |
|
"eval_f1": 0.2113821138211382, |
|
"eval_loss": 0.05938936397433281, |
|
"eval_precision": 0.9069767441860465, |
|
"eval_recall": 0.1196319018404908, |
|
"eval_roc_auc_score": 0.8990816178539223, |
|
"eval_runtime": 3.0308, |
|
"eval_samples_per_second": 61.371, |
|
"eval_steps_per_second": 3.959, |
|
"step": 3900 |
|
}, |
|
{ |
|
"epoch": 8.83, |
|
"learning_rate": 1.1738148984198647e-05, |
|
"loss": 0.0582, |
|
"step": 3910 |
|
}, |
|
{ |
|
"epoch": 8.85, |
|
"learning_rate": 1.1512415349887133e-05, |
|
"loss": 0.0548, |
|
"step": 3920 |
|
}, |
|
{ |
|
"epoch": 8.87, |
|
"learning_rate": 1.1286681715575622e-05, |
|
"loss": 0.0555, |
|
"step": 3930 |
|
}, |
|
{ |
|
"epoch": 8.89, |
|
"learning_rate": 1.106094808126411e-05, |
|
"loss": 0.0544, |
|
"step": 3940 |
|
}, |
|
{ |
|
"epoch": 8.92, |
|
"learning_rate": 1.0835214446952595e-05, |
|
"loss": 0.055, |
|
"step": 3950 |
|
}, |
|
{ |
|
"epoch": 8.94, |
|
"learning_rate": 1.0609480812641085e-05, |
|
"loss": 0.0527, |
|
"step": 3960 |
|
}, |
|
{ |
|
"epoch": 8.96, |
|
"learning_rate": 1.038374717832957e-05, |
|
"loss": 0.0526, |
|
"step": 3970 |
|
}, |
|
{ |
|
"epoch": 8.98, |
|
"learning_rate": 1.015801354401806e-05, |
|
"loss": 0.0561, |
|
"step": 3980 |
|
}, |
|
{ |
|
"epoch": 9.01, |
|
"learning_rate": 9.932279909706545e-06, |
|
"loss": 0.0506, |
|
"step": 3990 |
|
}, |
|
{ |
|
"epoch": 9.03, |
|
"learning_rate": 9.706546275395035e-06, |
|
"loss": 0.0516, |
|
"step": 4000 |
|
}, |
|
{ |
|
"epoch": 9.03, |
|
"eval_accuracy": 0.9843010752688172, |
|
"eval_f1": 0.2108108108108108, |
|
"eval_loss": 0.059149596840143204, |
|
"eval_precision": 0.8863636363636364, |
|
"eval_recall": 0.1196319018404908, |
|
"eval_roc_auc_score": 0.9001329288116611, |
|
"eval_runtime": 2.9531, |
|
"eval_samples_per_second": 62.986, |
|
"eval_steps_per_second": 4.064, |
|
"step": 4000 |
|
} |
|
], |
|
"max_steps": 4430, |
|
"num_train_epochs": 10, |
|
"total_flos": 2.5440967716765696e+16, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|