|
{ |
|
"best_metric": 1.047837495803833, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-200", |
|
"epoch": 1.322314049586777, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.006611570247933884, |
|
"grad_norm": 0.9148093461990356, |
|
"learning_rate": 5e-06, |
|
"loss": 4.3726, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.006611570247933884, |
|
"eval_loss": 1.6257771253585815, |
|
"eval_runtime": 14.0331, |
|
"eval_samples_per_second": 18.171, |
|
"eval_steps_per_second": 9.121, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.013223140495867768, |
|
"grad_norm": 0.842186689376831, |
|
"learning_rate": 1e-05, |
|
"loss": 4.5082, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.019834710743801654, |
|
"grad_norm": 0.7833890318870544, |
|
"learning_rate": 1.5e-05, |
|
"loss": 4.707, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.026446280991735537, |
|
"grad_norm": 1.024601697921753, |
|
"learning_rate": 2e-05, |
|
"loss": 4.7761, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.03305785123966942, |
|
"grad_norm": 0.9558944702148438, |
|
"learning_rate": 2.5e-05, |
|
"loss": 5.4576, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.03966942148760331, |
|
"grad_norm": 0.9868462681770325, |
|
"learning_rate": 3e-05, |
|
"loss": 4.9784, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.04628099173553719, |
|
"grad_norm": 1.144509196281433, |
|
"learning_rate": 3.5e-05, |
|
"loss": 4.9578, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.05289256198347107, |
|
"grad_norm": 1.2102956771850586, |
|
"learning_rate": 4e-05, |
|
"loss": 5.301, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.05950413223140496, |
|
"grad_norm": 1.0776772499084473, |
|
"learning_rate": 4.5e-05, |
|
"loss": 5.2147, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.06611570247933884, |
|
"grad_norm": 1.1019762754440308, |
|
"learning_rate": 5e-05, |
|
"loss": 5.2903, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.07272727272727272, |
|
"grad_norm": 1.2564432621002197, |
|
"learning_rate": 5.500000000000001e-05, |
|
"loss": 5.5129, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.07933884297520662, |
|
"grad_norm": 1.0597630739212036, |
|
"learning_rate": 6e-05, |
|
"loss": 5.3251, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.0859504132231405, |
|
"grad_norm": 1.242934226989746, |
|
"learning_rate": 6.500000000000001e-05, |
|
"loss": 4.8806, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.09256198347107437, |
|
"grad_norm": 1.4951207637786865, |
|
"learning_rate": 7e-05, |
|
"loss": 5.2546, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.09917355371900827, |
|
"grad_norm": 1.3901413679122925, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 5.3295, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.10578512396694215, |
|
"grad_norm": 1.412446141242981, |
|
"learning_rate": 8e-05, |
|
"loss": 5.5863, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.11239669421487604, |
|
"grad_norm": 1.4029113054275513, |
|
"learning_rate": 8.5e-05, |
|
"loss": 5.4345, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.11900826446280992, |
|
"grad_norm": 1.4717167615890503, |
|
"learning_rate": 9e-05, |
|
"loss": 5.2476, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.1256198347107438, |
|
"grad_norm": 1.39309823513031, |
|
"learning_rate": 9.5e-05, |
|
"loss": 5.0354, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.1322314049586777, |
|
"grad_norm": 1.5791093111038208, |
|
"learning_rate": 0.0001, |
|
"loss": 5.2546, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.13884297520661157, |
|
"grad_norm": 1.4860973358154297, |
|
"learning_rate": 9.999238475781957e-05, |
|
"loss": 5.047, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.14545454545454545, |
|
"grad_norm": 1.7063889503479004, |
|
"learning_rate": 9.99695413509548e-05, |
|
"loss": 5.1362, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.15206611570247933, |
|
"grad_norm": 1.6598552465438843, |
|
"learning_rate": 9.99314767377287e-05, |
|
"loss": 5.7295, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.15867768595041323, |
|
"grad_norm": 1.758675456047058, |
|
"learning_rate": 9.987820251299122e-05, |
|
"loss": 5.0831, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.1652892561983471, |
|
"grad_norm": 1.7304877042770386, |
|
"learning_rate": 9.980973490458728e-05, |
|
"loss": 5.3635, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.171900826446281, |
|
"grad_norm": 1.9273478984832764, |
|
"learning_rate": 9.972609476841367e-05, |
|
"loss": 5.7037, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.17851239669421487, |
|
"grad_norm": 2.0695853233337402, |
|
"learning_rate": 9.962730758206611e-05, |
|
"loss": 4.8804, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.18512396694214875, |
|
"grad_norm": 1.8753983974456787, |
|
"learning_rate": 9.951340343707852e-05, |
|
"loss": 5.7339, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.19173553719008266, |
|
"grad_norm": 1.8280308246612549, |
|
"learning_rate": 9.938441702975689e-05, |
|
"loss": 5.7124, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.19834710743801653, |
|
"grad_norm": 2.103041172027588, |
|
"learning_rate": 9.924038765061042e-05, |
|
"loss": 6.043, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.2049586776859504, |
|
"grad_norm": 2.314613103866577, |
|
"learning_rate": 9.908135917238321e-05, |
|
"loss": 6.096, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.2115702479338843, |
|
"grad_norm": 2.1027143001556396, |
|
"learning_rate": 9.890738003669029e-05, |
|
"loss": 5.7764, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.21818181818181817, |
|
"grad_norm": 2.3914480209350586, |
|
"learning_rate": 9.871850323926177e-05, |
|
"loss": 5.6048, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.22479338842975208, |
|
"grad_norm": 3.5736582279205322, |
|
"learning_rate": 9.851478631379982e-05, |
|
"loss": 4.9323, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.23140495867768596, |
|
"grad_norm": 2.4157803058624268, |
|
"learning_rate": 9.829629131445342e-05, |
|
"loss": 5.3481, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.23801652892561984, |
|
"grad_norm": 3.495314121246338, |
|
"learning_rate": 9.806308479691595e-05, |
|
"loss": 6.1046, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.24462809917355371, |
|
"grad_norm": 3.9649362564086914, |
|
"learning_rate": 9.781523779815179e-05, |
|
"loss": 5.6288, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.2512396694214876, |
|
"grad_norm": 1.5807100534439087, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 4.219, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.2578512396694215, |
|
"grad_norm": 1.2698018550872803, |
|
"learning_rate": 9.727592877996585e-05, |
|
"loss": 4.1076, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.2644628099173554, |
|
"grad_norm": 1.089763879776001, |
|
"learning_rate": 9.698463103929542e-05, |
|
"loss": 3.9567, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.27107438016528923, |
|
"grad_norm": 0.9421190023422241, |
|
"learning_rate": 9.667902132486009e-05, |
|
"loss": 4.3975, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.27768595041322314, |
|
"grad_norm": 0.7669892907142639, |
|
"learning_rate": 9.635919272833938e-05, |
|
"loss": 4.5829, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.28429752066115704, |
|
"grad_norm": 0.7751649022102356, |
|
"learning_rate": 9.602524267262203e-05, |
|
"loss": 4.4129, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.2909090909090909, |
|
"grad_norm": 0.9909483194351196, |
|
"learning_rate": 9.567727288213005e-05, |
|
"loss": 4.7576, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.2975206611570248, |
|
"grad_norm": 0.7943483591079712, |
|
"learning_rate": 9.53153893518325e-05, |
|
"loss": 4.4143, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.30413223140495865, |
|
"grad_norm": 0.8910554051399231, |
|
"learning_rate": 9.493970231495835e-05, |
|
"loss": 4.8309, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.31074380165289256, |
|
"grad_norm": 0.8945802450180054, |
|
"learning_rate": 9.45503262094184e-05, |
|
"loss": 4.3729, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.31735537190082647, |
|
"grad_norm": 0.9367103576660156, |
|
"learning_rate": 9.414737964294636e-05, |
|
"loss": 4.7947, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.3239669421487603, |
|
"grad_norm": 0.9118549227714539, |
|
"learning_rate": 9.373098535696979e-05, |
|
"loss": 4.9757, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.3305785123966942, |
|
"grad_norm": 0.9432209134101868, |
|
"learning_rate": 9.330127018922194e-05, |
|
"loss": 4.6552, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.3305785123966942, |
|
"eval_loss": 1.1729106903076172, |
|
"eval_runtime": 14.1768, |
|
"eval_samples_per_second": 17.987, |
|
"eval_steps_per_second": 9.029, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.3371900826446281, |
|
"grad_norm": 0.9313036203384399, |
|
"learning_rate": 9.285836503510562e-05, |
|
"loss": 4.8954, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.343801652892562, |
|
"grad_norm": 0.8967971205711365, |
|
"learning_rate": 9.24024048078213e-05, |
|
"loss": 4.2944, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.3504132231404959, |
|
"grad_norm": 0.9805819392204285, |
|
"learning_rate": 9.193352839727121e-05, |
|
"loss": 5.1855, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.35702479338842974, |
|
"grad_norm": 1.110192060470581, |
|
"learning_rate": 9.145187862775209e-05, |
|
"loss": 5.0659, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.36363636363636365, |
|
"grad_norm": 1.0864770412445068, |
|
"learning_rate": 9.09576022144496e-05, |
|
"loss": 4.6465, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.3702479338842975, |
|
"grad_norm": 1.6741044521331787, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 4.5788, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.3768595041322314, |
|
"grad_norm": 1.2498528957366943, |
|
"learning_rate": 8.993177550236464e-05, |
|
"loss": 4.8934, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.3834710743801653, |
|
"grad_norm": 1.2537343502044678, |
|
"learning_rate": 8.940053768033609e-05, |
|
"loss": 4.7502, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.39008264462809916, |
|
"grad_norm": 1.2770891189575195, |
|
"learning_rate": 8.885729807284856e-05, |
|
"loss": 4.8936, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.39669421487603307, |
|
"grad_norm": 1.4430948495864868, |
|
"learning_rate": 8.83022221559489e-05, |
|
"loss": 4.8503, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.4033057851239669, |
|
"grad_norm": 1.5071772336959839, |
|
"learning_rate": 8.773547901113862e-05, |
|
"loss": 5.3403, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.4099173553719008, |
|
"grad_norm": 1.624732494354248, |
|
"learning_rate": 8.715724127386972e-05, |
|
"loss": 5.3317, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.41652892561983473, |
|
"grad_norm": 1.6274948120117188, |
|
"learning_rate": 8.656768508095853e-05, |
|
"loss": 4.4587, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.4231404958677686, |
|
"grad_norm": 1.6330101490020752, |
|
"learning_rate": 8.596699001693255e-05, |
|
"loss": 4.1029, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.4297520661157025, |
|
"grad_norm": 1.6284996271133423, |
|
"learning_rate": 8.535533905932738e-05, |
|
"loss": 5.1398, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.43636363636363634, |
|
"grad_norm": 1.741026520729065, |
|
"learning_rate": 8.473291852294987e-05, |
|
"loss": 5.2479, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.44297520661157025, |
|
"grad_norm": 1.8886727094650269, |
|
"learning_rate": 8.409991800312493e-05, |
|
"loss": 5.6305, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.44958677685950416, |
|
"grad_norm": 1.9955488443374634, |
|
"learning_rate": 8.345653031794292e-05, |
|
"loss": 5.6926, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.456198347107438, |
|
"grad_norm": 2.239786386489868, |
|
"learning_rate": 8.280295144952536e-05, |
|
"loss": 5.9947, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.4628099173553719, |
|
"grad_norm": 2.239755630493164, |
|
"learning_rate": 8.213938048432697e-05, |
|
"loss": 4.4617, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.46942148760330576, |
|
"grad_norm": 2.663750648498535, |
|
"learning_rate": 8.146601955249188e-05, |
|
"loss": 4.2143, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.47603305785123967, |
|
"grad_norm": 2.472973108291626, |
|
"learning_rate": 8.07830737662829e-05, |
|
"loss": 4.8717, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.4826446280991736, |
|
"grad_norm": 3.02223801612854, |
|
"learning_rate": 8.009075115760243e-05, |
|
"loss": 5.2497, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.48925619834710743, |
|
"grad_norm": 4.158394813537598, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 4.8917, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.49586776859504134, |
|
"grad_norm": 1.0667253732681274, |
|
"learning_rate": 7.86788218175523e-05, |
|
"loss": 3.4973, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.5024793388429752, |
|
"grad_norm": 1.0640274286270142, |
|
"learning_rate": 7.795964517353735e-05, |
|
"loss": 3.6, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.509090909090909, |
|
"grad_norm": 0.8343002200126648, |
|
"learning_rate": 7.723195175075136e-05, |
|
"loss": 3.9431, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.515702479338843, |
|
"grad_norm": 0.8102278709411621, |
|
"learning_rate": 7.649596321166024e-05, |
|
"loss": 4.2592, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.5223140495867769, |
|
"grad_norm": 0.7733540534973145, |
|
"learning_rate": 7.575190374550272e-05, |
|
"loss": 4.4856, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.5289256198347108, |
|
"grad_norm": 0.6886125802993774, |
|
"learning_rate": 7.500000000000001e-05, |
|
"loss": 4.4077, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.5355371900826447, |
|
"grad_norm": 0.7023859620094299, |
|
"learning_rate": 7.424048101231686e-05, |
|
"loss": 4.4471, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.5421487603305785, |
|
"grad_norm": 0.733121931552887, |
|
"learning_rate": 7.347357813929454e-05, |
|
"loss": 4.2295, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.5487603305785124, |
|
"grad_norm": 1.4499454498291016, |
|
"learning_rate": 7.269952498697734e-05, |
|
"loss": 4.4026, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.5553719008264463, |
|
"grad_norm": 0.795872151851654, |
|
"learning_rate": 7.191855733945387e-05, |
|
"loss": 4.5716, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.5619834710743802, |
|
"grad_norm": 0.8914481997489929, |
|
"learning_rate": 7.113091308703498e-05, |
|
"loss": 4.6541, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.5685950413223141, |
|
"grad_norm": 0.9090842008590698, |
|
"learning_rate": 7.033683215379002e-05, |
|
"loss": 4.3894, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.5752066115702479, |
|
"grad_norm": 0.9541610479354858, |
|
"learning_rate": 6.953655642446368e-05, |
|
"loss": 4.1603, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.5818181818181818, |
|
"grad_norm": 1.0280675888061523, |
|
"learning_rate": 6.873032967079561e-05, |
|
"loss": 4.4232, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.5884297520661157, |
|
"grad_norm": 1.1509857177734375, |
|
"learning_rate": 6.7918397477265e-05, |
|
"loss": 4.442, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.5950413223140496, |
|
"grad_norm": 1.0895159244537354, |
|
"learning_rate": 6.710100716628344e-05, |
|
"loss": 4.802, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.6016528925619835, |
|
"grad_norm": 1.1312400102615356, |
|
"learning_rate": 6.627840772285784e-05, |
|
"loss": 4.6077, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.6082644628099173, |
|
"grad_norm": 1.2083485126495361, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 4.686, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.6148760330578512, |
|
"grad_norm": 1.1744060516357422, |
|
"learning_rate": 6.461858523613684e-05, |
|
"loss": 4.7755, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.6214876033057851, |
|
"grad_norm": 1.1983288526535034, |
|
"learning_rate": 6.378186779084995e-05, |
|
"loss": 4.5161, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.628099173553719, |
|
"grad_norm": 1.2709901332855225, |
|
"learning_rate": 6.294095225512603e-05, |
|
"loss": 4.6903, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.6347107438016529, |
|
"grad_norm": 1.2853686809539795, |
|
"learning_rate": 6.209609477998338e-05, |
|
"loss": 4.8628, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.6413223140495867, |
|
"grad_norm": 1.4955391883850098, |
|
"learning_rate": 6.124755271719325e-05, |
|
"loss": 5.4237, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.6479338842975206, |
|
"grad_norm": 1.4081076383590698, |
|
"learning_rate": 6.0395584540887963e-05, |
|
"loss": 5.2527, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.6545454545454545, |
|
"grad_norm": 1.5048182010650635, |
|
"learning_rate": 5.9540449768827246e-05, |
|
"loss": 4.8745, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.6611570247933884, |
|
"grad_norm": 1.615033507347107, |
|
"learning_rate": 5.868240888334653e-05, |
|
"loss": 4.8292, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.6611570247933884, |
|
"eval_loss": 1.0880825519561768, |
|
"eval_runtime": 14.1578, |
|
"eval_samples_per_second": 18.011, |
|
"eval_steps_per_second": 9.041, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.6677685950413224, |
|
"grad_norm": 1.748067021369934, |
|
"learning_rate": 5.782172325201155e-05, |
|
"loss": 4.4367, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.6743801652892562, |
|
"grad_norm": 1.8786406517028809, |
|
"learning_rate": 5.695865504800327e-05, |
|
"loss": 5.3412, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.6809917355371901, |
|
"grad_norm": 1.8346900939941406, |
|
"learning_rate": 5.6093467170257374e-05, |
|
"loss": 4.9618, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.687603305785124, |
|
"grad_norm": 2.015852928161621, |
|
"learning_rate": 5.522642316338268e-05, |
|
"loss": 5.5926, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.6942148760330579, |
|
"grad_norm": 2.249979019165039, |
|
"learning_rate": 5.435778713738292e-05, |
|
"loss": 4.8259, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.7008264462809918, |
|
"grad_norm": 2.263035774230957, |
|
"learning_rate": 5.348782368720626e-05, |
|
"loss": 5.104, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.7074380165289256, |
|
"grad_norm": 2.640855550765991, |
|
"learning_rate": 5.26167978121472e-05, |
|
"loss": 5.146, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.7140495867768595, |
|
"grad_norm": 3.1351351737976074, |
|
"learning_rate": 5.174497483512506e-05, |
|
"loss": 4.2835, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.7206611570247934, |
|
"grad_norm": 2.632106304168701, |
|
"learning_rate": 5.0872620321864185e-05, |
|
"loss": 4.8549, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.7272727272727273, |
|
"grad_norm": 2.945842742919922, |
|
"learning_rate": 5e-05, |
|
"loss": 4.8564, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.7338842975206612, |
|
"grad_norm": 3.851283550262451, |
|
"learning_rate": 4.912737967813583e-05, |
|
"loss": 5.1433, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.740495867768595, |
|
"grad_norm": 0.6680141687393188, |
|
"learning_rate": 4.825502516487497e-05, |
|
"loss": 3.4089, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.7471074380165289, |
|
"grad_norm": 0.7174019813537598, |
|
"learning_rate": 4.738320218785281e-05, |
|
"loss": 3.8666, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.7537190082644628, |
|
"grad_norm": 0.7039559483528137, |
|
"learning_rate": 4.6512176312793736e-05, |
|
"loss": 3.7843, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.7603305785123967, |
|
"grad_norm": 0.7135390043258667, |
|
"learning_rate": 4.564221286261709e-05, |
|
"loss": 3.8314, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.7669421487603306, |
|
"grad_norm": 0.7308880686759949, |
|
"learning_rate": 4.477357683661734e-05, |
|
"loss": 4.1674, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.7735537190082644, |
|
"grad_norm": 0.7223635315895081, |
|
"learning_rate": 4.390653282974264e-05, |
|
"loss": 3.8675, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.7801652892561983, |
|
"grad_norm": 0.7319607138633728, |
|
"learning_rate": 4.3041344951996746e-05, |
|
"loss": 3.9047, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.7867768595041322, |
|
"grad_norm": 0.7675497531890869, |
|
"learning_rate": 4.2178276747988446e-05, |
|
"loss": 4.1523, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.7933884297520661, |
|
"grad_norm": 0.8165543079376221, |
|
"learning_rate": 4.131759111665349e-05, |
|
"loss": 4.1521, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.8, |
|
"grad_norm": 0.8304678797721863, |
|
"learning_rate": 4.045955023117276e-05, |
|
"loss": 4.1744, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.8066115702479338, |
|
"grad_norm": 0.8759053945541382, |
|
"learning_rate": 3.960441545911204e-05, |
|
"loss": 4.2122, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.8132231404958677, |
|
"grad_norm": 0.8702043890953064, |
|
"learning_rate": 3.875244728280676e-05, |
|
"loss": 4.427, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.8198347107438017, |
|
"grad_norm": 0.9653429985046387, |
|
"learning_rate": 3.790390522001662e-05, |
|
"loss": 4.2492, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.8264462809917356, |
|
"grad_norm": 0.9605560302734375, |
|
"learning_rate": 3.705904774487396e-05, |
|
"loss": 4.6257, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.8330578512396695, |
|
"grad_norm": 0.9582360982894897, |
|
"learning_rate": 3.6218132209150045e-05, |
|
"loss": 3.8951, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.8396694214876033, |
|
"grad_norm": 1.1498273611068726, |
|
"learning_rate": 3.5381414763863166e-05, |
|
"loss": 4.4283, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.8462809917355372, |
|
"grad_norm": 1.1258846521377563, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 4.792, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.8528925619834711, |
|
"grad_norm": 1.4514033794403076, |
|
"learning_rate": 3.372159227714218e-05, |
|
"loss": 4.5196, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.859504132231405, |
|
"grad_norm": 1.3290873765945435, |
|
"learning_rate": 3.289899283371657e-05, |
|
"loss": 4.5567, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.8661157024793389, |
|
"grad_norm": 1.4877480268478394, |
|
"learning_rate": 3.2081602522734986e-05, |
|
"loss": 4.5017, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.8727272727272727, |
|
"grad_norm": 1.3383150100708008, |
|
"learning_rate": 3.12696703292044e-05, |
|
"loss": 4.966, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.8793388429752066, |
|
"grad_norm": 1.3537639379501343, |
|
"learning_rate": 3.046344357553632e-05, |
|
"loss": 4.9792, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.8859504132231405, |
|
"grad_norm": 1.4388115406036377, |
|
"learning_rate": 2.9663167846209998e-05, |
|
"loss": 4.5168, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.8925619834710744, |
|
"grad_norm": 1.5659295320510864, |
|
"learning_rate": 2.886908691296504e-05, |
|
"loss": 4.675, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.8991735537190083, |
|
"grad_norm": 1.614781141281128, |
|
"learning_rate": 2.8081442660546125e-05, |
|
"loss": 4.4682, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.9057851239669421, |
|
"grad_norm": 1.750377893447876, |
|
"learning_rate": 2.7300475013022663e-05, |
|
"loss": 4.9327, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.912396694214876, |
|
"grad_norm": 2.194397449493408, |
|
"learning_rate": 2.6526421860705473e-05, |
|
"loss": 5.1882, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.9190082644628099, |
|
"grad_norm": 1.799832820892334, |
|
"learning_rate": 2.575951898768315e-05, |
|
"loss": 3.5735, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.9256198347107438, |
|
"grad_norm": 1.9374184608459473, |
|
"learning_rate": 2.500000000000001e-05, |
|
"loss": 5.4029, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.9322314049586777, |
|
"grad_norm": 2.2784645557403564, |
|
"learning_rate": 2.4248096254497288e-05, |
|
"loss": 5.2612, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.9388429752066115, |
|
"grad_norm": 2.151506185531616, |
|
"learning_rate": 2.350403678833976e-05, |
|
"loss": 5.2387, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.9454545454545454, |
|
"grad_norm": 2.211778163909912, |
|
"learning_rate": 2.2768048249248648e-05, |
|
"loss": 5.3567, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.9520661157024793, |
|
"grad_norm": 2.3867406845092773, |
|
"learning_rate": 2.2040354826462668e-05, |
|
"loss": 4.5479, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.9586776859504132, |
|
"grad_norm": 2.5046918392181396, |
|
"learning_rate": 2.132117818244771e-05, |
|
"loss": 4.0853, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.9652892561983472, |
|
"grad_norm": 2.8745017051696777, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 4.4699, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.971900826446281, |
|
"grad_norm": 3.2906479835510254, |
|
"learning_rate": 1.9909248842397584e-05, |
|
"loss": 5.3134, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.9785123966942149, |
|
"grad_norm": 3.933941125869751, |
|
"learning_rate": 1.9216926233717085e-05, |
|
"loss": 5.3385, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.9851239669421488, |
|
"grad_norm": 0.7648929953575134, |
|
"learning_rate": 1.8533980447508137e-05, |
|
"loss": 3.8574, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.9917355371900827, |
|
"grad_norm": 1.3944673538208008, |
|
"learning_rate": 1.7860619515673033e-05, |
|
"loss": 4.5333, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.9917355371900827, |
|
"eval_loss": 1.0583877563476562, |
|
"eval_runtime": 14.1642, |
|
"eval_samples_per_second": 18.003, |
|
"eval_steps_per_second": 9.037, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.9983471074380166, |
|
"grad_norm": 2.0663795471191406, |
|
"learning_rate": 1.7197048550474643e-05, |
|
"loss": 4.4136, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.0049586776859505, |
|
"grad_norm": 2.9669547080993652, |
|
"learning_rate": 1.6543469682057106e-05, |
|
"loss": 3.9654, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.0115702479338844, |
|
"grad_norm": 0.582588255405426, |
|
"learning_rate": 1.5900081996875083e-05, |
|
"loss": 3.8065, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.018181818181818, |
|
"grad_norm": 0.5635128021240234, |
|
"learning_rate": 1.526708147705013e-05, |
|
"loss": 3.506, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.024793388429752, |
|
"grad_norm": 0.643647313117981, |
|
"learning_rate": 1.4644660940672627e-05, |
|
"loss": 3.4885, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.031404958677686, |
|
"grad_norm": 0.7142451405525208, |
|
"learning_rate": 1.4033009983067452e-05, |
|
"loss": 3.9004, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.0380165289256198, |
|
"grad_norm": 0.7921721339225769, |
|
"learning_rate": 1.3432314919041478e-05, |
|
"loss": 3.6416, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.0446280991735537, |
|
"grad_norm": 1.8580390214920044, |
|
"learning_rate": 1.2842758726130283e-05, |
|
"loss": 4.0421, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.0512396694214876, |
|
"grad_norm": 0.788405179977417, |
|
"learning_rate": 1.22645209888614e-05, |
|
"loss": 4.1674, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.0578512396694215, |
|
"grad_norm": 0.8433780670166016, |
|
"learning_rate": 1.1697777844051105e-05, |
|
"loss": 3.8614, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.0644628099173554, |
|
"grad_norm": 1.0130237340927124, |
|
"learning_rate": 1.1142701927151456e-05, |
|
"loss": 4.057, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.0710743801652893, |
|
"grad_norm": 0.9251139760017395, |
|
"learning_rate": 1.0599462319663905e-05, |
|
"loss": 4.121, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.0776859504132232, |
|
"grad_norm": 1.0490461587905884, |
|
"learning_rate": 1.006822449763537e-05, |
|
"loss": 3.7495, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.084297520661157, |
|
"grad_norm": 0.962730884552002, |
|
"learning_rate": 9.549150281252633e-06, |
|
"loss": 3.7831, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.0909090909090908, |
|
"grad_norm": 1.0215270519256592, |
|
"learning_rate": 9.042397785550405e-06, |
|
"loss": 3.8681, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.0975206611570247, |
|
"grad_norm": 1.097220778465271, |
|
"learning_rate": 8.548121372247918e-06, |
|
"loss": 4.1963, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.1041322314049586, |
|
"grad_norm": 1.1427369117736816, |
|
"learning_rate": 8.066471602728803e-06, |
|
"loss": 4.4926, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.1107438016528925, |
|
"grad_norm": 1.1889334917068481, |
|
"learning_rate": 7.597595192178702e-06, |
|
"loss": 4.1799, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.1173553719008265, |
|
"grad_norm": 1.2882307767868042, |
|
"learning_rate": 7.1416349648943894e-06, |
|
"loss": 4.7466, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.1239669421487604, |
|
"grad_norm": 1.3270764350891113, |
|
"learning_rate": 6.698729810778065e-06, |
|
"loss": 4.0636, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.1305785123966943, |
|
"grad_norm": 1.4401865005493164, |
|
"learning_rate": 6.269014643030213e-06, |
|
"loss": 4.5355, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.1371900826446282, |
|
"grad_norm": 1.280537486076355, |
|
"learning_rate": 5.852620357053651e-06, |
|
"loss": 4.0943, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.143801652892562, |
|
"grad_norm": 1.436613917350769, |
|
"learning_rate": 5.449673790581611e-06, |
|
"loss": 4.197, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.1504132231404958, |
|
"grad_norm": 1.4517698287963867, |
|
"learning_rate": 5.060297685041659e-06, |
|
"loss": 4.1342, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.1570247933884297, |
|
"grad_norm": 1.565581202507019, |
|
"learning_rate": 4.684610648167503e-06, |
|
"loss": 4.4184, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.1636363636363636, |
|
"grad_norm": 1.619643211364746, |
|
"learning_rate": 4.322727117869951e-06, |
|
"loss": 4.4695, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.1702479338842975, |
|
"grad_norm": 1.635844111442566, |
|
"learning_rate": 3.974757327377981e-06, |
|
"loss": 4.2089, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.1768595041322314, |
|
"grad_norm": 1.7811866998672485, |
|
"learning_rate": 3.6408072716606346e-06, |
|
"loss": 4.0089, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.1834710743801653, |
|
"grad_norm": 1.8893351554870605, |
|
"learning_rate": 3.3209786751399187e-06, |
|
"loss": 3.1828, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.1900826446280992, |
|
"grad_norm": 1.8557778596878052, |
|
"learning_rate": 3.0153689607045845e-06, |
|
"loss": 4.2522, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.1966942148760331, |
|
"grad_norm": 1.9851038455963135, |
|
"learning_rate": 2.724071220034158e-06, |
|
"loss": 4.7269, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.203305785123967, |
|
"grad_norm": 2.1339123249053955, |
|
"learning_rate": 2.4471741852423237e-06, |
|
"loss": 5.0067, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.2099173553719007, |
|
"grad_norm": 2.1520884037017822, |
|
"learning_rate": 2.1847622018482283e-06, |
|
"loss": 4.8541, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.2165289256198348, |
|
"grad_norm": 2.4593212604522705, |
|
"learning_rate": 1.9369152030840556e-06, |
|
"loss": 4.4116, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.2231404958677685, |
|
"grad_norm": 2.4656741619110107, |
|
"learning_rate": 1.70370868554659e-06, |
|
"loss": 3.4118, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.2297520661157024, |
|
"grad_norm": 2.7001984119415283, |
|
"learning_rate": 1.4852136862001764e-06, |
|
"loss": 3.9351, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.2363636363636363, |
|
"grad_norm": 3.269486904144287, |
|
"learning_rate": 1.2814967607382432e-06, |
|
"loss": 4.4956, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.2429752066115702, |
|
"grad_norm": 3.6578261852264404, |
|
"learning_rate": 1.0926199633097157e-06, |
|
"loss": 3.9351, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.2495867768595041, |
|
"grad_norm": 2.344083547592163, |
|
"learning_rate": 9.186408276168013e-07, |
|
"loss": 3.9633, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.256198347107438, |
|
"grad_norm": 0.6168360710144043, |
|
"learning_rate": 7.596123493895991e-07, |
|
"loss": 3.5821, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.262809917355372, |
|
"grad_norm": 0.6475507616996765, |
|
"learning_rate": 6.15582970243117e-07, |
|
"loss": 2.811, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.2694214876033059, |
|
"grad_norm": 0.6254196166992188, |
|
"learning_rate": 4.865965629214819e-07, |
|
"loss": 3.8433, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.2760330578512398, |
|
"grad_norm": 0.6778838038444519, |
|
"learning_rate": 3.7269241793390085e-07, |
|
"loss": 4.0597, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.2826446280991735, |
|
"grad_norm": 0.8316207528114319, |
|
"learning_rate": 2.7390523158633554e-07, |
|
"loss": 3.8344, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.2892561983471074, |
|
"grad_norm": 1.414699673652649, |
|
"learning_rate": 1.9026509541272275e-07, |
|
"loss": 4.2579, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.2958677685950413, |
|
"grad_norm": 0.8437047004699707, |
|
"learning_rate": 1.2179748700879012e-07, |
|
"loss": 3.9387, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.3024793388429752, |
|
"grad_norm": 1.736149787902832, |
|
"learning_rate": 6.852326227130834e-08, |
|
"loss": 4.1031, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 1.309090909090909, |
|
"grad_norm": 0.9786373376846313, |
|
"learning_rate": 3.04586490452119e-08, |
|
"loss": 3.9919, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.315702479338843, |
|
"grad_norm": 0.9801681041717529, |
|
"learning_rate": 7.615242180436522e-09, |
|
"loss": 3.9937, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 1.322314049586777, |
|
"grad_norm": 0.9714708924293518, |
|
"learning_rate": 0.0, |
|
"loss": 4.4521, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.322314049586777, |
|
"eval_loss": 1.047837495803833, |
|
"eval_runtime": 14.1574, |
|
"eval_samples_per_second": 18.012, |
|
"eval_steps_per_second": 9.041, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.6229100552192e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|