|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.0, |
|
"eval_steps": 500, |
|
"global_step": 412, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0048543689320388345, |
|
"grad_norm": 5.378143407634127, |
|
"learning_rate": 9.999854640567861e-06, |
|
"loss": 0.3464, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.009708737864077669, |
|
"grad_norm": 4.894296929528875, |
|
"learning_rate": 9.999418570723189e-06, |
|
"loss": 0.3255, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.014563106796116505, |
|
"grad_norm": 7.351759486789586, |
|
"learning_rate": 9.998691815820732e-06, |
|
"loss": 0.4611, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.019417475728155338, |
|
"grad_norm": 5.271446077550997, |
|
"learning_rate": 9.997674418116759e-06, |
|
"loss": 0.4408, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.024271844660194174, |
|
"grad_norm": 3.612464756117011, |
|
"learning_rate": 9.996366436766612e-06, |
|
"loss": 0.3053, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.02912621359223301, |
|
"grad_norm": 4.572942668158758, |
|
"learning_rate": 9.994767947821261e-06, |
|
"loss": 0.3871, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.03398058252427184, |
|
"grad_norm": 4.011088042824444, |
|
"learning_rate": 9.992879044222887e-06, |
|
"loss": 0.2683, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.038834951456310676, |
|
"grad_norm": 3.770696527596648, |
|
"learning_rate": 9.99069983579947e-06, |
|
"loss": 0.299, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.043689320388349516, |
|
"grad_norm": 3.99445466882808, |
|
"learning_rate": 9.988230449258409e-06, |
|
"loss": 0.2953, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.04854368932038835, |
|
"grad_norm": 3.305857445393283, |
|
"learning_rate": 9.985471028179155e-06, |
|
"loss": 0.2231, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.05339805825242718, |
|
"grad_norm": 4.4807917263811765, |
|
"learning_rate": 9.982421733004857e-06, |
|
"loss": 0.3227, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.05825242718446602, |
|
"grad_norm": 3.8723393421519368, |
|
"learning_rate": 9.979082741033047e-06, |
|
"loss": 0.2654, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.06310679611650485, |
|
"grad_norm": 3.972191046860092, |
|
"learning_rate": 9.975454246405312e-06, |
|
"loss": 0.2567, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.06796116504854369, |
|
"grad_norm": 5.326392755632004, |
|
"learning_rate": 9.971536460096021e-06, |
|
"loss": 0.4939, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.07281553398058252, |
|
"grad_norm": 3.911942426386476, |
|
"learning_rate": 9.96732960990005e-06, |
|
"loss": 0.2872, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.07766990291262135, |
|
"grad_norm": 3.8227075161218576, |
|
"learning_rate": 9.96283394041954e-06, |
|
"loss": 0.2827, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.0825242718446602, |
|
"grad_norm": 4.917122259212616, |
|
"learning_rate": 9.95804971304968e-06, |
|
"loss": 0.3229, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.08737864077669903, |
|
"grad_norm": 3.5881520748906803, |
|
"learning_rate": 9.952977205963496e-06, |
|
"loss": 0.2462, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.09223300970873786, |
|
"grad_norm": 3.2615635933708305, |
|
"learning_rate": 9.94761671409569e-06, |
|
"loss": 0.2877, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.0970873786407767, |
|
"grad_norm": 3.580768122545938, |
|
"learning_rate": 9.941968549125481e-06, |
|
"loss": 0.3244, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.10194174757281553, |
|
"grad_norm": 3.5694832101684346, |
|
"learning_rate": 9.936033039458494e-06, |
|
"loss": 0.3041, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.10679611650485436, |
|
"grad_norm": 3.7251131637258914, |
|
"learning_rate": 9.929810530207651e-06, |
|
"loss": 0.3085, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.11165048543689321, |
|
"grad_norm": 3.1294125258938346, |
|
"learning_rate": 9.923301383173119e-06, |
|
"loss": 0.2735, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.11650485436893204, |
|
"grad_norm": 3.498762097373417, |
|
"learning_rate": 9.916505976821262e-06, |
|
"loss": 0.2977, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.12135922330097088, |
|
"grad_norm": 3.4347462789997016, |
|
"learning_rate": 9.909424706262647e-06, |
|
"loss": 0.2773, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.1262135922330097, |
|
"grad_norm": 4.042734295116432, |
|
"learning_rate": 9.902057983229059e-06, |
|
"loss": 0.3298, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.13106796116504854, |
|
"grad_norm": 3.402243403849008, |
|
"learning_rate": 9.894406236049569e-06, |
|
"loss": 0.2786, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.13592233009708737, |
|
"grad_norm": 3.932743924163005, |
|
"learning_rate": 9.886469909625624e-06, |
|
"loss": 0.3188, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.1407766990291262, |
|
"grad_norm": 3.905197044703144, |
|
"learning_rate": 9.87824946540519e-06, |
|
"loss": 0.3535, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.14563106796116504, |
|
"grad_norm": 3.609206552249489, |
|
"learning_rate": 9.869745381355906e-06, |
|
"loss": 0.2909, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.15048543689320387, |
|
"grad_norm": 3.414831784710911, |
|
"learning_rate": 9.860958151937303e-06, |
|
"loss": 0.2574, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.1553398058252427, |
|
"grad_norm": 3.5579321291929116, |
|
"learning_rate": 9.851888288072053e-06, |
|
"loss": 0.2459, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.16019417475728157, |
|
"grad_norm": 4.162641219147627, |
|
"learning_rate": 9.842536317116262e-06, |
|
"loss": 0.3588, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.1650485436893204, |
|
"grad_norm": 4.68580370285394, |
|
"learning_rate": 9.832902782828801e-06, |
|
"loss": 0.3702, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.16990291262135923, |
|
"grad_norm": 4.193004613941464, |
|
"learning_rate": 9.822988245339701e-06, |
|
"loss": 0.381, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.17475728155339806, |
|
"grad_norm": 4.355494846026111, |
|
"learning_rate": 9.81279328111758e-06, |
|
"loss": 0.3122, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.1796116504854369, |
|
"grad_norm": 3.9624635452437906, |
|
"learning_rate": 9.802318482936121e-06, |
|
"loss": 0.3228, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.18446601941747573, |
|
"grad_norm": 4.016385482613762, |
|
"learning_rate": 9.791564459839609e-06, |
|
"loss": 0.3008, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.18932038834951456, |
|
"grad_norm": 4.060884878117307, |
|
"learning_rate": 9.780531837107519e-06, |
|
"loss": 0.3423, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.1941747572815534, |
|
"grad_norm": 3.9972439902223775, |
|
"learning_rate": 9.769221256218165e-06, |
|
"loss": 0.3306, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.19902912621359223, |
|
"grad_norm": 3.792327487947346, |
|
"learning_rate": 9.75763337481139e-06, |
|
"loss": 0.3476, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.20388349514563106, |
|
"grad_norm": 3.6416741800021715, |
|
"learning_rate": 9.745768866650339e-06, |
|
"loss": 0.3449, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.2087378640776699, |
|
"grad_norm": 3.4964559729684797, |
|
"learning_rate": 9.73362842158228e-06, |
|
"loss": 0.2669, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.21359223300970873, |
|
"grad_norm": 3.441263103573181, |
|
"learning_rate": 9.721212745498493e-06, |
|
"loss": 0.2742, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.21844660194174756, |
|
"grad_norm": 3.917275363205342, |
|
"learning_rate": 9.70852256029323e-06, |
|
"loss": 0.2954, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.22330097087378642, |
|
"grad_norm": 4.181842792733697, |
|
"learning_rate": 9.695558603821735e-06, |
|
"loss": 0.3589, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.22815533980582525, |
|
"grad_norm": 3.4511140959192277, |
|
"learning_rate": 9.682321629857348e-06, |
|
"loss": 0.324, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.23300970873786409, |
|
"grad_norm": 3.789785520542004, |
|
"learning_rate": 9.66881240804768e-06, |
|
"loss": 0.308, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.23786407766990292, |
|
"grad_norm": 4.598754296184572, |
|
"learning_rate": 9.655031723869848e-06, |
|
"loss": 0.3861, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.24271844660194175, |
|
"grad_norm": 5.1330458881850225, |
|
"learning_rate": 9.64098037858483e-06, |
|
"loss": 0.3893, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.24757281553398058, |
|
"grad_norm": 3.3657145440506264, |
|
"learning_rate": 9.626659189190852e-06, |
|
"loss": 0.263, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.2524271844660194, |
|
"grad_norm": 3.554083897905739, |
|
"learning_rate": 9.612068988375898e-06, |
|
"loss": 0.3167, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.25728155339805825, |
|
"grad_norm": 4.500513509448257, |
|
"learning_rate": 9.597210624469288e-06, |
|
"loss": 0.3181, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.2621359223300971, |
|
"grad_norm": 3.34099829795081, |
|
"learning_rate": 9.582084961392358e-06, |
|
"loss": 0.2965, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.2669902912621359, |
|
"grad_norm": 3.466575422333381, |
|
"learning_rate": 9.566692878608229e-06, |
|
"loss": 0.3219, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.27184466019417475, |
|
"grad_norm": 4.250464552549045, |
|
"learning_rate": 9.551035271070665e-06, |
|
"loss": 0.3541, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.2766990291262136, |
|
"grad_norm": 3.6232412587871043, |
|
"learning_rate": 9.53511304917204e-06, |
|
"loss": 0.3005, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.2815533980582524, |
|
"grad_norm": 3.571200047447214, |
|
"learning_rate": 9.51892713869041e-06, |
|
"loss": 0.2616, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.28640776699029125, |
|
"grad_norm": 4.347354232094379, |
|
"learning_rate": 9.502478480735678e-06, |
|
"loss": 0.3722, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.2912621359223301, |
|
"grad_norm": 3.87730169839573, |
|
"learning_rate": 9.485768031694872e-06, |
|
"loss": 0.3117, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.2961165048543689, |
|
"grad_norm": 4.274501416159858, |
|
"learning_rate": 9.468796763176549e-06, |
|
"loss": 0.3849, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.30097087378640774, |
|
"grad_norm": 3.246374985365425, |
|
"learning_rate": 9.45156566195429e-06, |
|
"loss": 0.2522, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.3058252427184466, |
|
"grad_norm": 3.696747350462723, |
|
"learning_rate": 9.43407572990933e-06, |
|
"loss": 0.293, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.3106796116504854, |
|
"grad_norm": 3.9338713829165584, |
|
"learning_rate": 9.416327983972304e-06, |
|
"loss": 0.3201, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.3155339805825243, |
|
"grad_norm": 4.0017810616141425, |
|
"learning_rate": 9.398323456064124e-06, |
|
"loss": 0.3799, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.32038834951456313, |
|
"grad_norm": 3.8342637859675444, |
|
"learning_rate": 9.380063193035968e-06, |
|
"loss": 0.292, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.32524271844660196, |
|
"grad_norm": 3.827573439920934, |
|
"learning_rate": 9.361548256608421e-06, |
|
"loss": 0.2921, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.3300970873786408, |
|
"grad_norm": 3.9845442576869092, |
|
"learning_rate": 9.342779723309746e-06, |
|
"loss": 0.4174, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.33495145631067963, |
|
"grad_norm": 3.1026015714842954, |
|
"learning_rate": 9.323758684413272e-06, |
|
"loss": 0.235, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.33980582524271846, |
|
"grad_norm": 3.7750036783126393, |
|
"learning_rate": 9.304486245873973e-06, |
|
"loss": 0.3227, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.3446601941747573, |
|
"grad_norm": 3.943387424903264, |
|
"learning_rate": 9.284963528264133e-06, |
|
"loss": 0.3371, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.34951456310679613, |
|
"grad_norm": 3.8381884344953825, |
|
"learning_rate": 9.26519166670821e-06, |
|
"loss": 0.2894, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.35436893203883496, |
|
"grad_norm": 3.544434059167168, |
|
"learning_rate": 9.24517181081683e-06, |
|
"loss": 0.2685, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.3592233009708738, |
|
"grad_norm": 3.378961777033687, |
|
"learning_rate": 9.22490512461995e-06, |
|
"loss": 0.2781, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.3640776699029126, |
|
"grad_norm": 4.331788711997835, |
|
"learning_rate": 9.204392786499168e-06, |
|
"loss": 0.3451, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.36893203883495146, |
|
"grad_norm": 3.4925860733450476, |
|
"learning_rate": 9.183635989119211e-06, |
|
"loss": 0.2746, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.3737864077669903, |
|
"grad_norm": 3.794077796789663, |
|
"learning_rate": 9.162635939358593e-06, |
|
"loss": 0.279, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.3786407766990291, |
|
"grad_norm": 4.069630918475093, |
|
"learning_rate": 9.141393858239435e-06, |
|
"loss": 0.3196, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.38349514563106796, |
|
"grad_norm": 3.7846293915199527, |
|
"learning_rate": 9.119910980856477e-06, |
|
"loss": 0.3348, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.3883495145631068, |
|
"grad_norm": 3.267156149708633, |
|
"learning_rate": 9.098188556305262e-06, |
|
"loss": 0.3272, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.3932038834951456, |
|
"grad_norm": 3.3968613462591133, |
|
"learning_rate": 9.076227847609513e-06, |
|
"loss": 0.2808, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.39805825242718446, |
|
"grad_norm": 3.011208559175575, |
|
"learning_rate": 9.054030131647682e-06, |
|
"loss": 0.2372, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.4029126213592233, |
|
"grad_norm": 4.051616797054234, |
|
"learning_rate": 9.031596699078727e-06, |
|
"loss": 0.4596, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.4077669902912621, |
|
"grad_norm": 3.6075025150149225, |
|
"learning_rate": 9.008928854267054e-06, |
|
"loss": 0.2939, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.41262135922330095, |
|
"grad_norm": 4.135509053101431, |
|
"learning_rate": 8.986027915206686e-06, |
|
"loss": 0.2967, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.4174757281553398, |
|
"grad_norm": 3.7862374826186826, |
|
"learning_rate": 8.962895213444618e-06, |
|
"loss": 0.2671, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.4223300970873786, |
|
"grad_norm": 3.0878348574553676, |
|
"learning_rate": 8.939532094003409e-06, |
|
"loss": 0.2706, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.42718446601941745, |
|
"grad_norm": 3.7265938482807037, |
|
"learning_rate": 8.91593991530297e-06, |
|
"loss": 0.316, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.4320388349514563, |
|
"grad_norm": 4.630457617713916, |
|
"learning_rate": 8.892120049081577e-06, |
|
"loss": 0.3866, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.4368932038834951, |
|
"grad_norm": 3.9288103295473937, |
|
"learning_rate": 8.868073880316125e-06, |
|
"loss": 0.3674, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.441747572815534, |
|
"grad_norm": 3.6634831334676816, |
|
"learning_rate": 8.843802807141584e-06, |
|
"loss": 0.2437, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.44660194174757284, |
|
"grad_norm": 3.9345830082103834, |
|
"learning_rate": 8.819308240769726e-06, |
|
"loss": 0.3072, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.45145631067961167, |
|
"grad_norm": 3.696277061427197, |
|
"learning_rate": 8.794591605407047e-06, |
|
"loss": 0.3546, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.4563106796116505, |
|
"grad_norm": 4.6325972388154035, |
|
"learning_rate": 8.769654338171986e-06, |
|
"loss": 0.3703, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.46116504854368934, |
|
"grad_norm": 3.720951674003812, |
|
"learning_rate": 8.744497889011344e-06, |
|
"loss": 0.3023, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.46601941747572817, |
|
"grad_norm": 3.4941267371150158, |
|
"learning_rate": 8.71912372061598e-06, |
|
"loss": 0.3681, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.470873786407767, |
|
"grad_norm": 3.59440313295267, |
|
"learning_rate": 8.693533308335786e-06, |
|
"loss": 0.2995, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.47572815533980584, |
|
"grad_norm": 3.699310247730822, |
|
"learning_rate": 8.667728140093876e-06, |
|
"loss": 0.3316, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.48058252427184467, |
|
"grad_norm": 4.114605116349415, |
|
"learning_rate": 8.641709716300092e-06, |
|
"loss": 0.3059, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.4854368932038835, |
|
"grad_norm": 3.656975878650179, |
|
"learning_rate": 8.615479549763756e-06, |
|
"loss": 0.3385, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.49029126213592233, |
|
"grad_norm": 3.3815288817303313, |
|
"learning_rate": 8.589039165605716e-06, |
|
"loss": 0.2678, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.49514563106796117, |
|
"grad_norm": 3.638304857763173, |
|
"learning_rate": 8.56239010116966e-06, |
|
"loss": 0.3167, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.5, |
|
"grad_norm": 3.6323757635548635, |
|
"learning_rate": 8.535533905932739e-06, |
|
"loss": 0.2918, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.5048543689320388, |
|
"grad_norm": 4.183379717690887, |
|
"learning_rate": 8.508472141415468e-06, |
|
"loss": 0.3186, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.5097087378640777, |
|
"grad_norm": 4.154343055084759, |
|
"learning_rate": 8.481206381090934e-06, |
|
"loss": 0.2626, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.5145631067961165, |
|
"grad_norm": 3.5786841796945685, |
|
"learning_rate": 8.453738210293316e-06, |
|
"loss": 0.2824, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.5194174757281553, |
|
"grad_norm": 4.168143564101458, |
|
"learning_rate": 8.426069226125695e-06, |
|
"loss": 0.3458, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.5242718446601942, |
|
"grad_norm": 4.126139154772882, |
|
"learning_rate": 8.398201037367202e-06, |
|
"loss": 0.3052, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.529126213592233, |
|
"grad_norm": 3.237923851122716, |
|
"learning_rate": 8.370135264379475e-06, |
|
"loss": 0.2874, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.5339805825242718, |
|
"grad_norm": 3.1249087177275308, |
|
"learning_rate": 8.341873539012443e-06, |
|
"loss": 0.2394, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.5388349514563107, |
|
"grad_norm": 5.074364305711858, |
|
"learning_rate": 8.313417504509446e-06, |
|
"loss": 0.2302, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.5436893203883495, |
|
"grad_norm": 3.3253330575964495, |
|
"learning_rate": 8.284768815411693e-06, |
|
"loss": 0.2861, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.5485436893203883, |
|
"grad_norm": 4.360168763243382, |
|
"learning_rate": 8.255929137462049e-06, |
|
"loss": 0.3128, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.5533980582524272, |
|
"grad_norm": 4.27399461394767, |
|
"learning_rate": 8.226900147508205e-06, |
|
"loss": 0.3279, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.558252427184466, |
|
"grad_norm": 3.3681041810207755, |
|
"learning_rate": 8.197683533405156e-06, |
|
"loss": 0.2547, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.5631067961165048, |
|
"grad_norm": 4.055671521702825, |
|
"learning_rate": 8.168280993917078e-06, |
|
"loss": 0.3092, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.5679611650485437, |
|
"grad_norm": 4.248189965027739, |
|
"learning_rate": 8.138694238618543e-06, |
|
"loss": 0.3236, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.5728155339805825, |
|
"grad_norm": 3.612343188062059, |
|
"learning_rate": 8.108924987795137e-06, |
|
"loss": 0.3403, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.5776699029126213, |
|
"grad_norm": 4.111961208705483, |
|
"learning_rate": 8.078974972343414e-06, |
|
"loss": 0.3044, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.5825242718446602, |
|
"grad_norm": 4.120026736966224, |
|
"learning_rate": 8.048845933670274e-06, |
|
"loss": 0.2845, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.587378640776699, |
|
"grad_norm": 3.4954482548295878, |
|
"learning_rate": 8.01853962359169e-06, |
|
"loss": 0.2441, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.5922330097087378, |
|
"grad_norm": 3.9071335972909744, |
|
"learning_rate": 7.988057804230878e-06, |
|
"loss": 0.2879, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.5970873786407767, |
|
"grad_norm": 2.957098675744294, |
|
"learning_rate": 7.957402247915817e-06, |
|
"loss": 0.2138, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.6019417475728155, |
|
"grad_norm": 3.140479418310476, |
|
"learning_rate": 7.92657473707621e-06, |
|
"loss": 0.2111, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.6067961165048543, |
|
"grad_norm": 3.0799814234459526, |
|
"learning_rate": 7.895577064139847e-06, |
|
"loss": 0.2861, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.6116504854368932, |
|
"grad_norm": 3.627717678753021, |
|
"learning_rate": 7.864411031428379e-06, |
|
"loss": 0.2902, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.616504854368932, |
|
"grad_norm": 3.2781356561377173, |
|
"learning_rate": 7.833078451052537e-06, |
|
"loss": 0.2605, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.6213592233009708, |
|
"grad_norm": 3.4215884348866066, |
|
"learning_rate": 7.801581144806752e-06, |
|
"loss": 0.3316, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.6262135922330098, |
|
"grad_norm": 3.8424343189726944, |
|
"learning_rate": 7.769920944063244e-06, |
|
"loss": 0.2814, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.6310679611650486, |
|
"grad_norm": 3.675388135381745, |
|
"learning_rate": 7.73809968966554e-06, |
|
"loss": 0.2795, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.6359223300970874, |
|
"grad_norm": 3.374157334994161, |
|
"learning_rate": 7.706119231821423e-06, |
|
"loss": 0.2682, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.6407766990291263, |
|
"grad_norm": 3.6605583885523747, |
|
"learning_rate": 7.673981429995372e-06, |
|
"loss": 0.2905, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.6456310679611651, |
|
"grad_norm": 3.5259822612580742, |
|
"learning_rate": 7.641688152800433e-06, |
|
"loss": 0.2976, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.6504854368932039, |
|
"grad_norm": 4.18102995202913, |
|
"learning_rate": 7.609241277889583e-06, |
|
"loss": 0.3087, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.6553398058252428, |
|
"grad_norm": 4.0348888803242655, |
|
"learning_rate": 7.5766426918465455e-06, |
|
"loss": 0.333, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.6601941747572816, |
|
"grad_norm": 3.4205146965325266, |
|
"learning_rate": 7.5438942900761035e-06, |
|
"loss": 0.2855, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.6650485436893204, |
|
"grad_norm": 3.412779284207671, |
|
"learning_rate": 7.51099797669389e-06, |
|
"loss": 0.3524, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.6699029126213593, |
|
"grad_norm": 4.100430036338693, |
|
"learning_rate": 7.477955664415678e-06, |
|
"loss": 0.3284, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.6747572815533981, |
|
"grad_norm": 3.720199127017926, |
|
"learning_rate": 7.444769274446168e-06, |
|
"loss": 0.3126, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.6796116504854369, |
|
"grad_norm": 3.8004784956588096, |
|
"learning_rate": 7.411440736367281e-06, |
|
"loss": 0.2948, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.6844660194174758, |
|
"grad_norm": 3.5169102335864197, |
|
"learning_rate": 7.377971988025964e-06, |
|
"loss": 0.2755, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.6893203883495146, |
|
"grad_norm": 3.633867939521945, |
|
"learning_rate": 7.3443649754215175e-06, |
|
"loss": 0.2561, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.6941747572815534, |
|
"grad_norm": 4.117823557197984, |
|
"learning_rate": 7.310621652592449e-06, |
|
"loss": 0.3183, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.6990291262135923, |
|
"grad_norm": 3.6453109102828125, |
|
"learning_rate": 7.276743981502856e-06, |
|
"loss": 0.2784, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.7038834951456311, |
|
"grad_norm": 3.7103243970413438, |
|
"learning_rate": 7.242733931928352e-06, |
|
"loss": 0.2566, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.7087378640776699, |
|
"grad_norm": 2.8634018006912907, |
|
"learning_rate": 7.208593481341536e-06, |
|
"loss": 0.263, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.7135922330097088, |
|
"grad_norm": 3.264744989210774, |
|
"learning_rate": 7.1743246147970095e-06, |
|
"loss": 0.2541, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.7184466019417476, |
|
"grad_norm": 3.8329899318886365, |
|
"learning_rate": 7.139929324815965e-06, |
|
"loss": 0.2955, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.7233009708737864, |
|
"grad_norm": 3.057133605338264, |
|
"learning_rate": 7.105409611270332e-06, |
|
"loss": 0.2343, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.7281553398058253, |
|
"grad_norm": 3.0782458045158125, |
|
"learning_rate": 7.070767481266493e-06, |
|
"loss": 0.2848, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.7330097087378641, |
|
"grad_norm": 4.5134038568086785, |
|
"learning_rate": 7.036004949028587e-06, |
|
"loss": 0.3047, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.7378640776699029, |
|
"grad_norm": 3.4495413151855634, |
|
"learning_rate": 7.00112403578139e-06, |
|
"loss": 0.3213, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.7427184466019418, |
|
"grad_norm": 3.6153705645026473, |
|
"learning_rate": 6.9661267696328015e-06, |
|
"loss": 0.3286, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.7475728155339806, |
|
"grad_norm": 3.5409135769472484, |
|
"learning_rate": 6.931015185455915e-06, |
|
"loss": 0.2804, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.7524271844660194, |
|
"grad_norm": 4.081839122757467, |
|
"learning_rate": 6.895791324770702e-06, |
|
"loss": 0.3751, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.7572815533980582, |
|
"grad_norm": 3.700678050097594, |
|
"learning_rate": 6.860457235625322e-06, |
|
"loss": 0.3016, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.7621359223300971, |
|
"grad_norm": 3.3810443516637267, |
|
"learning_rate": 6.825014972477024e-06, |
|
"loss": 0.3056, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.7669902912621359, |
|
"grad_norm": 3.500935431083706, |
|
"learning_rate": 6.7894665960727105e-06, |
|
"loss": 0.2945, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.7718446601941747, |
|
"grad_norm": 3.434907597687763, |
|
"learning_rate": 6.7538141733291e-06, |
|
"loss": 0.2763, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.7766990291262136, |
|
"grad_norm": 3.493356091277342, |
|
"learning_rate": 6.7180597772125665e-06, |
|
"loss": 0.2843, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.7815533980582524, |
|
"grad_norm": 3.2671065559456505, |
|
"learning_rate": 6.682205486618592e-06, |
|
"loss": 0.2955, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.7864077669902912, |
|
"grad_norm": 3.572930865165925, |
|
"learning_rate": 6.646253386250909e-06, |
|
"loss": 0.3555, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.7912621359223301, |
|
"grad_norm": 3.353967102879005, |
|
"learning_rate": 6.610205566500272e-06, |
|
"loss": 0.3002, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.7961165048543689, |
|
"grad_norm": 3.621050618749983, |
|
"learning_rate": 6.574064123322925e-06, |
|
"loss": 0.32, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.8009708737864077, |
|
"grad_norm": 3.1954385846083224, |
|
"learning_rate": 6.537831158118733e-06, |
|
"loss": 0.2386, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.8058252427184466, |
|
"grad_norm": 3.498291968306301, |
|
"learning_rate": 6.50150877760899e-06, |
|
"loss": 0.3214, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.8106796116504854, |
|
"grad_norm": 3.6931182322968876, |
|
"learning_rate": 6.465099093713944e-06, |
|
"loss": 0.3249, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.8155339805825242, |
|
"grad_norm": 2.8313942704171597, |
|
"learning_rate": 6.42860422342998e-06, |
|
"loss": 0.277, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.8203883495145631, |
|
"grad_norm": 3.5716383992818224, |
|
"learning_rate": 6.392026288706549e-06, |
|
"loss": 0.2689, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.8252427184466019, |
|
"grad_norm": 3.176200152955033, |
|
"learning_rate": 6.3553674163227786e-06, |
|
"loss": 0.2275, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.8300970873786407, |
|
"grad_norm": 3.2229277647359478, |
|
"learning_rate": 6.318629737763818e-06, |
|
"loss": 0.3031, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.8349514563106796, |
|
"grad_norm": 4.096738874622841, |
|
"learning_rate": 6.281815389096903e-06, |
|
"loss": 0.2688, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.8398058252427184, |
|
"grad_norm": 3.832793824430796, |
|
"learning_rate": 6.244926510847162e-06, |
|
"loss": 0.3124, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.8446601941747572, |
|
"grad_norm": 3.842229798125395, |
|
"learning_rate": 6.207965247873151e-06, |
|
"loss": 0.3541, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.8495145631067961, |
|
"grad_norm": 3.3136829491923647, |
|
"learning_rate": 6.1709337492421515e-06, |
|
"loss": 0.2365, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.8543689320388349, |
|
"grad_norm": 3.4725652481004587, |
|
"learning_rate": 6.133834168105206e-06, |
|
"loss": 0.2936, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.8592233009708737, |
|
"grad_norm": 3.504250626880649, |
|
"learning_rate": 6.096668661571934e-06, |
|
"loss": 0.3113, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.8640776699029126, |
|
"grad_norm": 3.672219050586378, |
|
"learning_rate": 6.0594393905851065e-06, |
|
"loss": 0.3333, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.8689320388349514, |
|
"grad_norm": 3.9044739145181255, |
|
"learning_rate": 6.0221485197949995e-06, |
|
"loss": 0.2887, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.8737864077669902, |
|
"grad_norm": 3.8629510412843744, |
|
"learning_rate": 5.9847982174335314e-06, |
|
"loss": 0.3359, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.8786407766990292, |
|
"grad_norm": 4.0714916209398035, |
|
"learning_rate": 5.9473906551881985e-06, |
|
"loss": 0.3581, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.883495145631068, |
|
"grad_norm": 3.4522804075936957, |
|
"learning_rate": 5.9099280080758085e-06, |
|
"loss": 0.2594, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.8883495145631068, |
|
"grad_norm": 4.288129870849474, |
|
"learning_rate": 5.872412454315999e-06, |
|
"loss": 0.3649, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.8932038834951457, |
|
"grad_norm": 3.9685985589287602, |
|
"learning_rate": 5.834846175204612e-06, |
|
"loss": 0.3596, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.8980582524271845, |
|
"grad_norm": 3.8055775375801986, |
|
"learning_rate": 5.797231354986842e-06, |
|
"loss": 0.3317, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.9029126213592233, |
|
"grad_norm": 3.3371096323112766, |
|
"learning_rate": 5.759570180730255e-06, |
|
"loss": 0.3251, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.9077669902912622, |
|
"grad_norm": 3.118923832267533, |
|
"learning_rate": 5.721864842197612e-06, |
|
"loss": 0.2382, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.912621359223301, |
|
"grad_norm": 4.13009064895878, |
|
"learning_rate": 5.684117531719552e-06, |
|
"loss": 0.4122, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.9174757281553398, |
|
"grad_norm": 4.589591211779461, |
|
"learning_rate": 5.646330444067121e-06, |
|
"loss": 0.3073, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.9223300970873787, |
|
"grad_norm": 3.113626275612425, |
|
"learning_rate": 5.608505776324158e-06, |
|
"loss": 0.25, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.9271844660194175, |
|
"grad_norm": 3.9416392350920835, |
|
"learning_rate": 5.570645727759558e-06, |
|
"loss": 0.3232, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.9320388349514563, |
|
"grad_norm": 3.489820470468405, |
|
"learning_rate": 5.532752499699381e-06, |
|
"loss": 0.355, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.9368932038834952, |
|
"grad_norm": 3.3456720028459177, |
|
"learning_rate": 5.494828295398874e-06, |
|
"loss": 0.2735, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.941747572815534, |
|
"grad_norm": 3.1645574613781116, |
|
"learning_rate": 5.456875319914355e-06, |
|
"loss": 0.2552, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.9466019417475728, |
|
"grad_norm": 3.310670471294068, |
|
"learning_rate": 5.4188957799750145e-06, |
|
"loss": 0.2289, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.9514563106796117, |
|
"grad_norm": 2.9162726270426353, |
|
"learning_rate": 5.380891883854591e-06, |
|
"loss": 0.2711, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.9563106796116505, |
|
"grad_norm": 2.980226316434736, |
|
"learning_rate": 5.34286584124299e-06, |
|
"loss": 0.231, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.9611650485436893, |
|
"grad_norm": 2.98131376385244, |
|
"learning_rate": 5.304819863117796e-06, |
|
"loss": 0.2436, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.9660194174757282, |
|
"grad_norm": 3.067413002199315, |
|
"learning_rate": 5.266756161615719e-06, |
|
"loss": 0.3038, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.970873786407767, |
|
"grad_norm": 3.7508267167237856, |
|
"learning_rate": 5.228676949903974e-06, |
|
"loss": 0.3191, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.9757281553398058, |
|
"grad_norm": 3.551674655703027, |
|
"learning_rate": 5.190584442051594e-06, |
|
"loss": 0.3213, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.9805825242718447, |
|
"grad_norm": 3.4741442297689504, |
|
"learning_rate": 5.1524808529007075e-06, |
|
"loss": 0.3261, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.9854368932038835, |
|
"grad_norm": 3.1977123160337, |
|
"learning_rate": 5.114368397937744e-06, |
|
"loss": 0.3034, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.9902912621359223, |
|
"grad_norm": 2.749801273765845, |
|
"learning_rate": 5.07624929316463e-06, |
|
"loss": 0.2442, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.9951456310679612, |
|
"grad_norm": 3.555611580765495, |
|
"learning_rate": 5.038125754969933e-06, |
|
"loss": 0.2291, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 2.750179427306159, |
|
"learning_rate": 5e-06, |
|
"loss": 0.1469, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 1.0048543689320388, |
|
"grad_norm": 3.6074292657758336, |
|
"learning_rate": 4.9618742450300675e-06, |
|
"loss": 0.2411, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 1.0097087378640777, |
|
"grad_norm": 3.2982860816255175, |
|
"learning_rate": 4.923750706835371e-06, |
|
"loss": 0.2252, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 1.0145631067961165, |
|
"grad_norm": 2.751687078673673, |
|
"learning_rate": 4.8856316020622564e-06, |
|
"loss": 0.1935, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 1.0194174757281553, |
|
"grad_norm": 2.131324360315531, |
|
"learning_rate": 4.847519147099294e-06, |
|
"loss": 0.1307, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 1.0242718446601942, |
|
"grad_norm": 2.9525937881413524, |
|
"learning_rate": 4.809415557948407e-06, |
|
"loss": 0.201, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 1.029126213592233, |
|
"grad_norm": 2.800908040363257, |
|
"learning_rate": 4.771323050096028e-06, |
|
"loss": 0.1956, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 1.0339805825242718, |
|
"grad_norm": 3.896512675233651, |
|
"learning_rate": 4.733243838384282e-06, |
|
"loss": 0.2521, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 1.0388349514563107, |
|
"grad_norm": 3.3243568354090476, |
|
"learning_rate": 4.6951801368822055e-06, |
|
"loss": 0.1846, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 1.0436893203883495, |
|
"grad_norm": 3.345967739972327, |
|
"learning_rate": 4.6571341587570114e-06, |
|
"loss": 0.1885, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 1.0485436893203883, |
|
"grad_norm": 2.6990059522214787, |
|
"learning_rate": 4.619108116145411e-06, |
|
"loss": 0.2062, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 1.0533980582524272, |
|
"grad_norm": 2.8034632634279855, |
|
"learning_rate": 4.581104220024988e-06, |
|
"loss": 0.1962, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 1.058252427184466, |
|
"grad_norm": 3.096371883439816, |
|
"learning_rate": 4.5431246800856455e-06, |
|
"loss": 0.2176, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 1.0631067961165048, |
|
"grad_norm": 3.234301301503053, |
|
"learning_rate": 4.505171704601128e-06, |
|
"loss": 0.1573, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 1.0679611650485437, |
|
"grad_norm": 2.7827553596014325, |
|
"learning_rate": 4.467247500300621e-06, |
|
"loss": 0.1558, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 1.0728155339805825, |
|
"grad_norm": 4.596075537757511, |
|
"learning_rate": 4.4293542722404435e-06, |
|
"loss": 0.1895, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 1.0776699029126213, |
|
"grad_norm": 3.4313974912911025, |
|
"learning_rate": 4.391494223675843e-06, |
|
"loss": 0.2125, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 1.0825242718446602, |
|
"grad_norm": 3.1698085004641663, |
|
"learning_rate": 4.3536695559328816e-06, |
|
"loss": 0.1564, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 1.087378640776699, |
|
"grad_norm": 3.721202767416967, |
|
"learning_rate": 4.31588246828045e-06, |
|
"loss": 0.1878, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 1.0922330097087378, |
|
"grad_norm": 2.952333401393123, |
|
"learning_rate": 4.278135157802389e-06, |
|
"loss": 0.1717, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 1.0970873786407767, |
|
"grad_norm": 2.971294433009444, |
|
"learning_rate": 4.240429819269746e-06, |
|
"loss": 0.1363, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 1.1019417475728155, |
|
"grad_norm": 3.126492959068957, |
|
"learning_rate": 4.20276864501316e-06, |
|
"loss": 0.1567, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 1.1067961165048543, |
|
"grad_norm": 3.453777131016959, |
|
"learning_rate": 4.165153824795391e-06, |
|
"loss": 0.1705, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 1.1116504854368932, |
|
"grad_norm": 2.9032949757849646, |
|
"learning_rate": 4.127587545684002e-06, |
|
"loss": 0.1557, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 1.116504854368932, |
|
"grad_norm": 3.921098873254642, |
|
"learning_rate": 4.090071991924194e-06, |
|
"loss": 0.1543, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 1.1213592233009708, |
|
"grad_norm": 3.270393778248402, |
|
"learning_rate": 4.052609344811802e-06, |
|
"loss": 0.159, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 1.1262135922330097, |
|
"grad_norm": 3.1626074130907393, |
|
"learning_rate": 4.015201782566471e-06, |
|
"loss": 0.1811, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 1.1310679611650485, |
|
"grad_norm": 3.3647207921675664, |
|
"learning_rate": 3.977851480205003e-06, |
|
"loss": 0.1842, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 1.1359223300970873, |
|
"grad_norm": 2.8874044173439906, |
|
"learning_rate": 3.940560609414894e-06, |
|
"loss": 0.1381, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 1.1407766990291262, |
|
"grad_norm": 3.279861092848255, |
|
"learning_rate": 3.903331338428067e-06, |
|
"loss": 0.1665, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 1.145631067961165, |
|
"grad_norm": 3.1603391725100702, |
|
"learning_rate": 3.866165831894796e-06, |
|
"loss": 0.1401, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 1.1504854368932038, |
|
"grad_norm": 3.392088063739412, |
|
"learning_rate": 3.829066250757851e-06, |
|
"loss": 0.1821, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 1.1553398058252426, |
|
"grad_norm": 4.192296653671298, |
|
"learning_rate": 3.7920347521268514e-06, |
|
"loss": 0.1728, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 1.1601941747572815, |
|
"grad_norm": 2.86078515364689, |
|
"learning_rate": 3.7550734891528413e-06, |
|
"loss": 0.1662, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 1.1650485436893203, |
|
"grad_norm": 3.363908060650987, |
|
"learning_rate": 3.7181846109031007e-06, |
|
"loss": 0.169, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 1.1699029126213591, |
|
"grad_norm": 2.939951981128744, |
|
"learning_rate": 3.6813702622361858e-06, |
|
"loss": 0.1335, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 1.174757281553398, |
|
"grad_norm": 3.5426747856829905, |
|
"learning_rate": 3.6446325836772244e-06, |
|
"loss": 0.193, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 1.1796116504854368, |
|
"grad_norm": 3.085063003996511, |
|
"learning_rate": 3.6079737112934533e-06, |
|
"loss": 0.1775, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 1.1844660194174756, |
|
"grad_norm": 3.5078991938473894, |
|
"learning_rate": 3.5713957765700224e-06, |
|
"loss": 0.164, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 1.1893203883495145, |
|
"grad_norm": 2.615217282840924, |
|
"learning_rate": 3.5349009062860586e-06, |
|
"loss": 0.1591, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 1.1941747572815533, |
|
"grad_norm": 3.6803453651452793, |
|
"learning_rate": 3.4984912223910105e-06, |
|
"loss": 0.1413, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 1.1990291262135921, |
|
"grad_norm": 3.4449942839520555, |
|
"learning_rate": 3.46216884188127e-06, |
|
"loss": 0.163, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 1.203883495145631, |
|
"grad_norm": 2.7954187431215565, |
|
"learning_rate": 3.425935876677077e-06, |
|
"loss": 0.1653, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 1.2087378640776698, |
|
"grad_norm": 3.3379500940316094, |
|
"learning_rate": 3.38979443349973e-06, |
|
"loss": 0.1498, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 1.2135922330097086, |
|
"grad_norm": 3.7339988958250427, |
|
"learning_rate": 3.3537466137490937e-06, |
|
"loss": 0.1635, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 1.2184466019417475, |
|
"grad_norm": 3.1712814085947807, |
|
"learning_rate": 3.3177945133814093e-06, |
|
"loss": 0.2052, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 1.2233009708737863, |
|
"grad_norm": 3.785681030228593, |
|
"learning_rate": 3.2819402227874364e-06, |
|
"loss": 0.1841, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 1.2281553398058254, |
|
"grad_norm": 3.380080489299224, |
|
"learning_rate": 3.2461858266709017e-06, |
|
"loss": 0.2052, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 1.233009708737864, |
|
"grad_norm": 2.5462863024797766, |
|
"learning_rate": 3.2105334039272924e-06, |
|
"loss": 0.1693, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 1.237864077669903, |
|
"grad_norm": 2.751706147069006, |
|
"learning_rate": 3.1749850275229777e-06, |
|
"loss": 0.1716, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 1.2427184466019416, |
|
"grad_norm": 2.9259614329694346, |
|
"learning_rate": 3.1395427643746802e-06, |
|
"loss": 0.1665, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 1.2475728155339807, |
|
"grad_norm": 2.9294906505296137, |
|
"learning_rate": 3.1042086752292995e-06, |
|
"loss": 0.1389, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 1.2524271844660193, |
|
"grad_norm": 3.3865475928641207, |
|
"learning_rate": 3.068984814544087e-06, |
|
"loss": 0.2233, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 1.2572815533980584, |
|
"grad_norm": 3.725741013397862, |
|
"learning_rate": 3.0338732303671993e-06, |
|
"loss": 0.1465, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 1.262135922330097, |
|
"grad_norm": 3.0795015825477137, |
|
"learning_rate": 2.99887596421861e-06, |
|
"loss": 0.1747, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 1.266990291262136, |
|
"grad_norm": 3.611562608462532, |
|
"learning_rate": 2.9639950509714138e-06, |
|
"loss": 0.1867, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 1.2718446601941746, |
|
"grad_norm": 3.084400701369303, |
|
"learning_rate": 2.929232518733507e-06, |
|
"loss": 0.2018, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 1.2766990291262137, |
|
"grad_norm": 3.1946687798944713, |
|
"learning_rate": 2.8945903887296686e-06, |
|
"loss": 0.1715, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 1.2815533980582523, |
|
"grad_norm": 3.5486237317517695, |
|
"learning_rate": 2.860070675184036e-06, |
|
"loss": 0.1851, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 1.2864077669902914, |
|
"grad_norm": 3.501162227913844, |
|
"learning_rate": 2.8256753852029917e-06, |
|
"loss": 0.181, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 1.29126213592233, |
|
"grad_norm": 3.6998358923196752, |
|
"learning_rate": 2.7914065186584637e-06, |
|
"loss": 0.1855, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 1.296116504854369, |
|
"grad_norm": 3.533496753991655, |
|
"learning_rate": 2.757266068071648e-06, |
|
"loss": 0.1686, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 1.3009708737864076, |
|
"grad_norm": 4.0820367314540364, |
|
"learning_rate": 2.7232560184971437e-06, |
|
"loss": 0.2041, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 1.3058252427184467, |
|
"grad_norm": 2.9637906965920973, |
|
"learning_rate": 2.689378347407553e-06, |
|
"loss": 0.1769, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 1.3106796116504853, |
|
"grad_norm": 3.28408303596329, |
|
"learning_rate": 2.6556350245784833e-06, |
|
"loss": 0.1478, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 1.3155339805825244, |
|
"grad_norm": 3.4932538693112005, |
|
"learning_rate": 2.6220280119740376e-06, |
|
"loss": 0.1784, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 1.3203883495145632, |
|
"grad_norm": 2.9444515487006915, |
|
"learning_rate": 2.588559263632719e-06, |
|
"loss": 0.1742, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 1.325242718446602, |
|
"grad_norm": 3.485379112038064, |
|
"learning_rate": 2.555230725553832e-06, |
|
"loss": 0.1452, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 1.3300970873786409, |
|
"grad_norm": 3.9882951570278484, |
|
"learning_rate": 2.522044335584322e-06, |
|
"loss": 0.2052, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 1.3349514563106797, |
|
"grad_norm": 4.110332411155784, |
|
"learning_rate": 2.489002023306112e-06, |
|
"loss": 0.2437, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 1.3398058252427185, |
|
"grad_norm": 3.0399564213447783, |
|
"learning_rate": 2.4561057099238973e-06, |
|
"loss": 0.1818, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 1.3446601941747574, |
|
"grad_norm": 4.446329888375363, |
|
"learning_rate": 2.423357308153454e-06, |
|
"loss": 0.204, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 1.3495145631067962, |
|
"grad_norm": 2.748301776472504, |
|
"learning_rate": 2.390758722110418e-06, |
|
"loss": 0.1495, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 1.354368932038835, |
|
"grad_norm": 3.1032791062681646, |
|
"learning_rate": 2.358311847199567e-06, |
|
"loss": 0.1958, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 1.3592233009708738, |
|
"grad_norm": 2.9234204018047594, |
|
"learning_rate": 2.3260185700046295e-06, |
|
"loss": 0.1332, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 1.3640776699029127, |
|
"grad_norm": 2.8010316743774832, |
|
"learning_rate": 2.2938807681785764e-06, |
|
"loss": 0.1638, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 1.3689320388349515, |
|
"grad_norm": 3.1777341991793406, |
|
"learning_rate": 2.2619003103344607e-06, |
|
"loss": 0.1726, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 1.3737864077669903, |
|
"grad_norm": 3.3059600212196347, |
|
"learning_rate": 2.2300790559367553e-06, |
|
"loss": 0.1628, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 1.3786407766990292, |
|
"grad_norm": 3.0674105340701687, |
|
"learning_rate": 2.1984188551932513e-06, |
|
"loss": 0.1453, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 1.383495145631068, |
|
"grad_norm": 3.9514680456604148, |
|
"learning_rate": 2.166921548947466e-06, |
|
"loss": 0.2181, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 1.3883495145631068, |
|
"grad_norm": 2.9964673443034773, |
|
"learning_rate": 2.1355889685716225e-06, |
|
"loss": 0.1539, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 1.3932038834951457, |
|
"grad_norm": 3.1345212878687363, |
|
"learning_rate": 2.1044229358601543e-06, |
|
"loss": 0.1716, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 1.3980582524271845, |
|
"grad_norm": 3.072100411386996, |
|
"learning_rate": 2.0734252629237892e-06, |
|
"loss": 0.155, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 1.4029126213592233, |
|
"grad_norm": 3.12310671799428, |
|
"learning_rate": 2.0425977520841837e-06, |
|
"loss": 0.1471, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 1.4077669902912622, |
|
"grad_norm": 2.9614677816599273, |
|
"learning_rate": 2.011942195769122e-06, |
|
"loss": 0.154, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 1.412621359223301, |
|
"grad_norm": 2.711481098825533, |
|
"learning_rate": 1.9814603764083112e-06, |
|
"loss": 0.1469, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 1.4174757281553398, |
|
"grad_norm": 3.6914419930416904, |
|
"learning_rate": 1.9511540663297284e-06, |
|
"loss": 0.1843, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 1.4223300970873787, |
|
"grad_norm": 2.715597600156834, |
|
"learning_rate": 1.921025027656587e-06, |
|
"loss": 0.1454, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 1.4271844660194175, |
|
"grad_norm": 3.342565246233355, |
|
"learning_rate": 1.8910750122048638e-06, |
|
"loss": 0.1728, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 1.4320388349514563, |
|
"grad_norm": 3.1790483488750163, |
|
"learning_rate": 1.8613057613814584e-06, |
|
"loss": 0.1501, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 1.4368932038834952, |
|
"grad_norm": 2.9576338390873245, |
|
"learning_rate": 1.8317190060829242e-06, |
|
"loss": 0.1574, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 1.441747572815534, |
|
"grad_norm": 3.193861736304933, |
|
"learning_rate": 1.8023164665948455e-06, |
|
"loss": 0.1804, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 1.4466019417475728, |
|
"grad_norm": 3.4043347296850635, |
|
"learning_rate": 1.773099852491796e-06, |
|
"loss": 0.1843, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 1.4514563106796117, |
|
"grad_norm": 2.9147696115237043, |
|
"learning_rate": 1.7440708625379503e-06, |
|
"loss": 0.1448, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 1.4563106796116505, |
|
"grad_norm": 3.67343773463864, |
|
"learning_rate": 1.7152311845883096e-06, |
|
"loss": 0.1968, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 1.4611650485436893, |
|
"grad_norm": 3.2061340304430224, |
|
"learning_rate": 1.686582495490554e-06, |
|
"loss": 0.2155, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 1.4660194174757282, |
|
"grad_norm": 3.1289793948282103, |
|
"learning_rate": 1.658126460987558e-06, |
|
"loss": 0.1748, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 1.470873786407767, |
|
"grad_norm": 3.4296643260484427, |
|
"learning_rate": 1.6298647356205255e-06, |
|
"loss": 0.1805, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 1.4757281553398058, |
|
"grad_norm": 2.6569119305504505, |
|
"learning_rate": 1.601798962632799e-06, |
|
"loss": 0.16, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 1.4805825242718447, |
|
"grad_norm": 3.737755757012023, |
|
"learning_rate": 1.573930773874306e-06, |
|
"loss": 0.1898, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 1.4854368932038835, |
|
"grad_norm": 3.285818119573095, |
|
"learning_rate": 1.5462617897066863e-06, |
|
"loss": 0.1655, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 1.4902912621359223, |
|
"grad_norm": 2.5935396134988675, |
|
"learning_rate": 1.5187936189090668e-06, |
|
"loss": 0.1618, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 1.4951456310679612, |
|
"grad_norm": 3.3197078655403596, |
|
"learning_rate": 1.491527858584535e-06, |
|
"loss": 0.1826, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 1.5, |
|
"grad_norm": 2.805570770413659, |
|
"learning_rate": 1.4644660940672628e-06, |
|
"loss": 0.1652, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 1.5048543689320388, |
|
"grad_norm": 3.87545230747002, |
|
"learning_rate": 1.4376098988303406e-06, |
|
"loss": 0.1326, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 1.5097087378640777, |
|
"grad_norm": 2.97370482259783, |
|
"learning_rate": 1.4109608343942855e-06, |
|
"loss": 0.1612, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 1.5145631067961165, |
|
"grad_norm": 2.923170481056903, |
|
"learning_rate": 1.3845204502362442e-06, |
|
"loss": 0.1735, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 1.5194174757281553, |
|
"grad_norm": 3.755722029546262, |
|
"learning_rate": 1.35829028369991e-06, |
|
"loss": 0.1738, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 1.5242718446601942, |
|
"grad_norm": 3.073418135837225, |
|
"learning_rate": 1.3322718599061252e-06, |
|
"loss": 0.1626, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 1.529126213592233, |
|
"grad_norm": 3.3545932991866856, |
|
"learning_rate": 1.306466691664216e-06, |
|
"loss": 0.1852, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 1.5339805825242718, |
|
"grad_norm": 3.0420631745569264, |
|
"learning_rate": 1.28087627938402e-06, |
|
"loss": 0.2058, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 1.5388349514563107, |
|
"grad_norm": 3.288084006190745, |
|
"learning_rate": 1.2555021109886589e-06, |
|
"loss": 0.1782, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 1.5436893203883495, |
|
"grad_norm": 3.4669766167403604, |
|
"learning_rate": 1.2303456618280141e-06, |
|
"loss": 0.1534, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 1.5485436893203883, |
|
"grad_norm": 2.8546758802857943, |
|
"learning_rate": 1.2054083945929534e-06, |
|
"loss": 0.1504, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 1.5533980582524272, |
|
"grad_norm": 2.5744717120278082, |
|
"learning_rate": 1.1806917592302763e-06, |
|
"loss": 0.1528, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 1.558252427184466, |
|
"grad_norm": 3.307605123935886, |
|
"learning_rate": 1.1561971928584158e-06, |
|
"loss": 0.1587, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 1.5631067961165048, |
|
"grad_norm": 3.1062458626679987, |
|
"learning_rate": 1.1319261196838782e-06, |
|
"loss": 0.1916, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 1.5679611650485437, |
|
"grad_norm": 3.806330561607627, |
|
"learning_rate": 1.1078799509184246e-06, |
|
"loss": 0.1676, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 1.5728155339805825, |
|
"grad_norm": 3.701532006854811, |
|
"learning_rate": 1.0840600846970333e-06, |
|
"loss": 0.2029, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 1.5776699029126213, |
|
"grad_norm": 3.1259651978100718, |
|
"learning_rate": 1.0604679059965923e-06, |
|
"loss": 0.2124, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 1.5825242718446602, |
|
"grad_norm": 3.22550450415583, |
|
"learning_rate": 1.0371047865553847e-06, |
|
"loss": 0.1994, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 1.587378640776699, |
|
"grad_norm": 3.2766712984535236, |
|
"learning_rate": 1.0139720847933166e-06, |
|
"loss": 0.197, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 1.5922330097087378, |
|
"grad_norm": 3.7019747182049834, |
|
"learning_rate": 9.91071145732948e-07, |
|
"loss": 0.175, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 1.5970873786407767, |
|
"grad_norm": 3.794491022818783, |
|
"learning_rate": 9.684033009212752e-07, |
|
"loss": 0.2009, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 1.6019417475728155, |
|
"grad_norm": 3.428558902601033, |
|
"learning_rate": 9.459698683523205e-07, |
|
"loss": 0.189, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 1.6067961165048543, |
|
"grad_norm": 3.744875862238987, |
|
"learning_rate": 9.237721523904891e-07, |
|
"loss": 0.2289, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 1.6116504854368932, |
|
"grad_norm": 2.4674345418991823, |
|
"learning_rate": 9.018114436947373e-07, |
|
"loss": 0.1579, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 1.616504854368932, |
|
"grad_norm": 3.058578338654439, |
|
"learning_rate": 8.80089019143524e-07, |
|
"loss": 0.1542, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 1.6213592233009708, |
|
"grad_norm": 2.825721990993473, |
|
"learning_rate": 8.586061417605668e-07, |
|
"loss": 0.1411, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 1.6262135922330097, |
|
"grad_norm": 3.450065248410591, |
|
"learning_rate": 8.373640606414097e-07, |
|
"loss": 0.1621, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 1.6310679611650487, |
|
"grad_norm": 3.6691308256662456, |
|
"learning_rate": 8.163640108807897e-07, |
|
"loss": 0.2316, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 1.6359223300970873, |
|
"grad_norm": 2.9947021116141754, |
|
"learning_rate": 7.956072135008336e-07, |
|
"loss": 0.1574, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 1.6407766990291264, |
|
"grad_norm": 3.278695868417599, |
|
"learning_rate": 7.750948753800508e-07, |
|
"loss": 0.1481, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 1.645631067961165, |
|
"grad_norm": 3.703013731780411, |
|
"learning_rate": 7.548281891831715e-07, |
|
"loss": 0.203, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 1.650485436893204, |
|
"grad_norm": 3.3722154741779953, |
|
"learning_rate": 7.348083332917927e-07, |
|
"loss": 0.1677, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 1.6553398058252426, |
|
"grad_norm": 5.7178253424321746, |
|
"learning_rate": 7.150364717358699e-07, |
|
"loss": 0.1978, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 1.6601941747572817, |
|
"grad_norm": 2.9025833998160206, |
|
"learning_rate": 6.955137541260287e-07, |
|
"loss": 0.1671, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 1.6650485436893203, |
|
"grad_norm": 3.2608625687251656, |
|
"learning_rate": 6.762413155867276e-07, |
|
"loss": 0.1748, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 1.6699029126213594, |
|
"grad_norm": 3.810077269631539, |
|
"learning_rate": 6.572202766902569e-07, |
|
"loss": 0.2342, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 1.674757281553398, |
|
"grad_norm": 3.119269749383546, |
|
"learning_rate": 6.384517433915794e-07, |
|
"loss": 0.1598, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 1.679611650485437, |
|
"grad_norm": 2.9692473587980013, |
|
"learning_rate": 6.199368069640343e-07, |
|
"loss": 0.1663, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 1.6844660194174756, |
|
"grad_norm": 3.41232556605209, |
|
"learning_rate": 6.016765439358774e-07, |
|
"loss": 0.2109, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 1.6893203883495147, |
|
"grad_norm": 2.970773165693587, |
|
"learning_rate": 5.836720160276971e-07, |
|
"loss": 0.183, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 1.6941747572815533, |
|
"grad_norm": 3.1209834435239054, |
|
"learning_rate": 5.659242700906719e-07, |
|
"loss": 0.1703, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 1.6990291262135924, |
|
"grad_norm": 2.706946357159076, |
|
"learning_rate": 5.484343380457124e-07, |
|
"loss": 0.1603, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 1.703883495145631, |
|
"grad_norm": 3.085681096910252, |
|
"learning_rate": 5.312032368234527e-07, |
|
"loss": 0.1809, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 1.70873786407767, |
|
"grad_norm": 3.0908332326767227, |
|
"learning_rate": 5.1423196830513e-07, |
|
"loss": 0.183, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 1.7135922330097086, |
|
"grad_norm": 3.0274657389189654, |
|
"learning_rate": 4.975215192643246e-07, |
|
"loss": 0.1634, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 1.7184466019417477, |
|
"grad_norm": 2.5910174115174067, |
|
"learning_rate": 4.81072861309591e-07, |
|
"loss": 0.1224, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 1.7233009708737863, |
|
"grad_norm": 2.823969566040542, |
|
"learning_rate": 4.648869508279613e-07, |
|
"loss": 0.157, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 1.7281553398058254, |
|
"grad_norm": 2.9595443993378625, |
|
"learning_rate": 4.4896472892933693e-07, |
|
"loss": 0.1448, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 1.733009708737864, |
|
"grad_norm": 2.9674587181707066, |
|
"learning_rate": 4.333071213917722e-07, |
|
"loss": 0.1561, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 1.737864077669903, |
|
"grad_norm": 2.971975487243502, |
|
"learning_rate": 4.179150386076425e-07, |
|
"loss": 0.1807, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 1.7427184466019416, |
|
"grad_norm": 3.5592330828269465, |
|
"learning_rate": 4.027893755307144e-07, |
|
"loss": 0.1957, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 1.7475728155339807, |
|
"grad_norm": 3.4457487102374276, |
|
"learning_rate": 3.8793101162410417e-07, |
|
"loss": 0.1815, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 1.7524271844660193, |
|
"grad_norm": 3.134881639795192, |
|
"learning_rate": 3.733408108091485e-07, |
|
"loss": 0.1673, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 1.7572815533980584, |
|
"grad_norm": 3.678352034117385, |
|
"learning_rate": 3.5901962141516975e-07, |
|
"loss": 0.1868, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 1.762135922330097, |
|
"grad_norm": 3.122016578093726, |
|
"learning_rate": 3.4496827613015206e-07, |
|
"loss": 0.1693, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 1.766990291262136, |
|
"grad_norm": 3.058903089372746, |
|
"learning_rate": 3.3118759195232273e-07, |
|
"loss": 0.1581, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 1.7718446601941746, |
|
"grad_norm": 3.59995245371114, |
|
"learning_rate": 3.176783701426528e-07, |
|
"loss": 0.2394, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 1.7766990291262137, |
|
"grad_norm": 3.459079793100832, |
|
"learning_rate": 3.0444139617826605e-07, |
|
"loss": 0.1932, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 1.7815533980582523, |
|
"grad_norm": 3.7129334593953818, |
|
"learning_rate": 2.91477439706771e-07, |
|
"loss": 0.2044, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 1.7864077669902914, |
|
"grad_norm": 3.604631619699467, |
|
"learning_rate": 2.787872545015069e-07, |
|
"loss": 0.1796, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 1.79126213592233, |
|
"grad_norm": 3.160625540613814, |
|
"learning_rate": 2.663715784177201e-07, |
|
"loss": 0.1859, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 1.796116504854369, |
|
"grad_norm": 3.330868483974817, |
|
"learning_rate": 2.542311333496622e-07, |
|
"loss": 0.1366, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 1.8009708737864076, |
|
"grad_norm": 3.312629290688821, |
|
"learning_rate": 2.423666251886114e-07, |
|
"loss": 0.1682, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 1.8058252427184467, |
|
"grad_norm": 3.2912055141193104, |
|
"learning_rate": 2.307787437818365e-07, |
|
"loss": 0.1658, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 1.8106796116504853, |
|
"grad_norm": 3.813237415315296, |
|
"learning_rate": 2.1946816289248163e-07, |
|
"loss": 0.1761, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 1.8155339805825244, |
|
"grad_norm": 3.46072257123938, |
|
"learning_rate": 2.0843554016039326e-07, |
|
"loss": 0.1773, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 1.820388349514563, |
|
"grad_norm": 3.1090270180019077, |
|
"learning_rate": 1.9768151706388016e-07, |
|
"loss": 0.156, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 1.825242718446602, |
|
"grad_norm": 3.7606812831013468, |
|
"learning_rate": 1.8720671888242058e-07, |
|
"loss": 0.1659, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 1.8300970873786406, |
|
"grad_norm": 3.630180348742183, |
|
"learning_rate": 1.7701175466029895e-07, |
|
"loss": 0.2017, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 1.8349514563106797, |
|
"grad_norm": 3.548564346588287, |
|
"learning_rate": 1.6709721717120042e-07, |
|
"loss": 0.2736, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 1.8398058252427183, |
|
"grad_norm": 2.58820624396961, |
|
"learning_rate": 1.574636828837395e-07, |
|
"loss": 0.1636, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 1.8446601941747574, |
|
"grad_norm": 3.2009268622796507, |
|
"learning_rate": 1.4811171192794628e-07, |
|
"loss": 0.1477, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 1.849514563106796, |
|
"grad_norm": 3.668850842502455, |
|
"learning_rate": 1.3904184806269705e-07, |
|
"loss": 0.208, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 1.854368932038835, |
|
"grad_norm": 2.8566670252001605, |
|
"learning_rate": 1.3025461864409395e-07, |
|
"loss": 0.1333, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 1.8592233009708736, |
|
"grad_norm": 3.769624398617395, |
|
"learning_rate": 1.2175053459481e-07, |
|
"loss": 0.2162, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 1.8640776699029127, |
|
"grad_norm": 3.5203837282760717, |
|
"learning_rate": 1.1353009037437523e-07, |
|
"loss": 0.1783, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 1.8689320388349513, |
|
"grad_norm": 3.0533590437151257, |
|
"learning_rate": 1.0559376395043285e-07, |
|
"loss": 0.1636, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 1.8737864077669903, |
|
"grad_norm": 3.244251275203498, |
|
"learning_rate": 9.794201677094162e-08, |
|
"loss": 0.1659, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 1.8786407766990292, |
|
"grad_norm": 3.5161837604256796, |
|
"learning_rate": 9.05752937373533e-08, |
|
"loss": 0.1707, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 1.883495145631068, |
|
"grad_norm": 2.8369908420705965, |
|
"learning_rate": 8.34940231787379e-08, |
|
"loss": 0.1493, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 1.8883495145631068, |
|
"grad_norm": 2.9716609378210834, |
|
"learning_rate": 7.66986168268824e-08, |
|
"loss": 0.1417, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 1.8932038834951457, |
|
"grad_norm": 3.542879526364103, |
|
"learning_rate": 7.018946979234997e-08, |
|
"loss": 0.2002, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 1.8980582524271845, |
|
"grad_norm": 3.0956905601973057, |
|
"learning_rate": 6.396696054150719e-08, |
|
"loss": 0.1513, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 1.9029126213592233, |
|
"grad_norm": 3.4395097945742426, |
|
"learning_rate": 5.803145087451945e-08, |
|
"loss": 0.1856, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 1.9077669902912622, |
|
"grad_norm": 3.5337908722933937, |
|
"learning_rate": 5.238328590431163e-08, |
|
"loss": 0.1943, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 1.912621359223301, |
|
"grad_norm": 2.7737787495552304, |
|
"learning_rate": 4.702279403650534e-08, |
|
"loss": 0.1496, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 1.9174757281553398, |
|
"grad_norm": 3.8937710005570234, |
|
"learning_rate": 4.195028695032133e-08, |
|
"loss": 0.2289, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 1.9223300970873787, |
|
"grad_norm": 3.3826066398740853, |
|
"learning_rate": 3.716605958046071e-08, |
|
"loss": 0.179, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 1.9271844660194175, |
|
"grad_norm": 3.3574202573717886, |
|
"learning_rate": 3.2670390099951985e-08, |
|
"loss": 0.1842, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 1.9320388349514563, |
|
"grad_norm": 3.4211377835157464, |
|
"learning_rate": 2.846353990398065e-08, |
|
"loss": 0.1794, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 1.9368932038834952, |
|
"grad_norm": 3.2237719120411232, |
|
"learning_rate": 2.4545753594688582e-08, |
|
"loss": 0.1531, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 1.941747572815534, |
|
"grad_norm": 3.6660882822246132, |
|
"learning_rate": 2.0917258966953735e-08, |
|
"loss": 0.1851, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 1.9466019417475728, |
|
"grad_norm": 3.384308263205741, |
|
"learning_rate": 1.757826699514298e-08, |
|
"loss": 0.2262, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 1.9514563106796117, |
|
"grad_norm": 3.0941560464813507, |
|
"learning_rate": 1.4528971820846894e-08, |
|
"loss": 0.1645, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 1.9563106796116505, |
|
"grad_norm": 2.8371139141201667, |
|
"learning_rate": 1.176955074159214e-08, |
|
"loss": 0.1382, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 1.9611650485436893, |
|
"grad_norm": 3.2651439167859735, |
|
"learning_rate": 9.300164200530815e-09, |
|
"loss": 0.1556, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 1.9660194174757282, |
|
"grad_norm": 3.223736788971525, |
|
"learning_rate": 7.120955777112915e-09, |
|
"loss": 0.1552, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 1.970873786407767, |
|
"grad_norm": 3.297969705359936, |
|
"learning_rate": 5.232052178738567e-09, |
|
"loss": 0.1517, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 1.9757281553398058, |
|
"grad_norm": 3.3209447677111235, |
|
"learning_rate": 3.633563233388926e-09, |
|
"loss": 0.1755, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 1.9805825242718447, |
|
"grad_norm": 3.5781779376616685, |
|
"learning_rate": 2.3255818832423894e-09, |
|
"loss": 0.1723, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 1.9854368932038835, |
|
"grad_norm": 3.17071653312937, |
|
"learning_rate": 1.3081841792694783e-09, |
|
"loss": 0.1544, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 1.9902912621359223, |
|
"grad_norm": 3.3777328087738194, |
|
"learning_rate": 5.814292768108187e-10, |
|
"loss": 0.1555, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 1.9951456310679612, |
|
"grad_norm": 3.2676077636224314, |
|
"learning_rate": 1.453594321393359e-10, |
|
"loss": 0.178, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 2.731489227558426, |
|
"learning_rate": 0.0, |
|
"loss": 0.1683, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"step": 412, |
|
"total_flos": 956555218944.0, |
|
"train_loss": 0.23991625754216922, |
|
"train_runtime": 257.1054, |
|
"train_samples_per_second": 12.765, |
|
"train_steps_per_second": 1.602 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 412, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 50000000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 956555218944.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|