|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 2.0, |
|
"eval_steps": 200, |
|
"global_step": 1334, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0014992503748125937, |
|
"grad_norm": 5.832151292802266, |
|
"learning_rate": 9.999986134743949e-06, |
|
"loss": 0.5922, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0029985007496251873, |
|
"grad_norm": 3.313562711862917, |
|
"learning_rate": 9.99994453905269e-06, |
|
"loss": 0.5035, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.004497751124437781, |
|
"grad_norm": 3.8053612317508647, |
|
"learning_rate": 9.999875213156919e-06, |
|
"loss": 0.4852, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.005997001499250375, |
|
"grad_norm": 2.975780829326707, |
|
"learning_rate": 9.999778157441126e-06, |
|
"loss": 0.448, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0074962518740629685, |
|
"grad_norm": 3.4745434712688645, |
|
"learning_rate": 9.99965337244359e-06, |
|
"loss": 0.4017, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.008995502248875561, |
|
"grad_norm": 2.766265131790075, |
|
"learning_rate": 9.999500858856382e-06, |
|
"loss": 0.444, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.010494752623688156, |
|
"grad_norm": 2.87369120054187, |
|
"learning_rate": 9.999320617525356e-06, |
|
"loss": 0.4082, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.01199400299850075, |
|
"grad_norm": 2.5779601980173705, |
|
"learning_rate": 9.999112649450154e-06, |
|
"loss": 0.424, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.013493253373313344, |
|
"grad_norm": 2.535823912227413, |
|
"learning_rate": 9.998876955784183e-06, |
|
"loss": 0.3131, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.014992503748125937, |
|
"grad_norm": 2.4492135913471715, |
|
"learning_rate": 9.998613537834625e-06, |
|
"loss": 0.3823, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.01649175412293853, |
|
"grad_norm": 2.4404067828272082, |
|
"learning_rate": 9.998322397062426e-06, |
|
"loss": 0.4054, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.017991004497751123, |
|
"grad_norm": 2.0448103433799005, |
|
"learning_rate": 9.99800353508228e-06, |
|
"loss": 0.3606, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.019490254872563718, |
|
"grad_norm": 2.0734295930755224, |
|
"learning_rate": 9.997656953662627e-06, |
|
"loss": 0.3122, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.020989505247376312, |
|
"grad_norm": 2.643785230749524, |
|
"learning_rate": 9.997282654725645e-06, |
|
"loss": 0.4157, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.022488755622188907, |
|
"grad_norm": 2.146621645771915, |
|
"learning_rate": 9.996880640347234e-06, |
|
"loss": 0.3606, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0239880059970015, |
|
"grad_norm": 1.826987060952611, |
|
"learning_rate": 9.99645091275701e-06, |
|
"loss": 0.3106, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.025487256371814093, |
|
"grad_norm": 2.2986446834557355, |
|
"learning_rate": 9.99599347433828e-06, |
|
"loss": 0.3963, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.026986506746626688, |
|
"grad_norm": 2.3216100116981404, |
|
"learning_rate": 9.99550832762805e-06, |
|
"loss": 0.3858, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.02848575712143928, |
|
"grad_norm": 1.6900482709514875, |
|
"learning_rate": 9.99499547531699e-06, |
|
"loss": 0.3069, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.029985007496251874, |
|
"grad_norm": 2.310364169428319, |
|
"learning_rate": 9.994454920249433e-06, |
|
"loss": 0.3772, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.031484257871064465, |
|
"grad_norm": 2.530397424165797, |
|
"learning_rate": 9.993886665423348e-06, |
|
"loss": 0.3945, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.03298350824587706, |
|
"grad_norm": 2.1745390028353384, |
|
"learning_rate": 9.993290713990343e-06, |
|
"loss": 0.3738, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.034482758620689655, |
|
"grad_norm": 1.8943278042394813, |
|
"learning_rate": 9.99266706925562e-06, |
|
"loss": 0.3807, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.035982008995502246, |
|
"grad_norm": 2.187296861936023, |
|
"learning_rate": 9.992015734677979e-06, |
|
"loss": 0.3541, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.037481259370314844, |
|
"grad_norm": 2.0547900805990653, |
|
"learning_rate": 9.991336713869785e-06, |
|
"loss": 0.3704, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.038980509745127435, |
|
"grad_norm": 1.6612216696005873, |
|
"learning_rate": 9.99063001059696e-06, |
|
"loss": 0.3086, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.04047976011994003, |
|
"grad_norm": 2.0346781109199727, |
|
"learning_rate": 9.989895628778952e-06, |
|
"loss": 0.3637, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.041979010494752625, |
|
"grad_norm": 1.9918388801867348, |
|
"learning_rate": 9.989133572488716e-06, |
|
"loss": 0.2998, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.043478260869565216, |
|
"grad_norm": 2.090041625605034, |
|
"learning_rate": 9.988343845952697e-06, |
|
"loss": 0.3417, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.044977511244377814, |
|
"grad_norm": 1.6921443817720527, |
|
"learning_rate": 9.987526453550798e-06, |
|
"loss": 0.3008, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.046476761619190406, |
|
"grad_norm": 1.9721881389502063, |
|
"learning_rate": 9.98668139981636e-06, |
|
"loss": 0.3079, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.047976011994003, |
|
"grad_norm": 2.034680286190636, |
|
"learning_rate": 9.98580868943614e-06, |
|
"loss": 0.3908, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.049475262368815595, |
|
"grad_norm": 1.814883175859987, |
|
"learning_rate": 9.984908327250278e-06, |
|
"loss": 0.2917, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.050974512743628186, |
|
"grad_norm": 1.5888369679528016, |
|
"learning_rate": 9.983980318252274e-06, |
|
"loss": 0.2904, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.05247376311844078, |
|
"grad_norm": 1.6870813628116277, |
|
"learning_rate": 9.983024667588961e-06, |
|
"loss": 0.3447, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.053973013493253376, |
|
"grad_norm": 1.659882359445096, |
|
"learning_rate": 9.982041380560476e-06, |
|
"loss": 0.3281, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.05547226386806597, |
|
"grad_norm": 1.7935822097403369, |
|
"learning_rate": 9.98103046262023e-06, |
|
"loss": 0.317, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.05697151424287856, |
|
"grad_norm": 1.8433431515779493, |
|
"learning_rate": 9.979991919374877e-06, |
|
"loss": 0.3631, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.05847076461769116, |
|
"grad_norm": 1.7345926587775797, |
|
"learning_rate": 9.978925756584284e-06, |
|
"loss": 0.3836, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.05997001499250375, |
|
"grad_norm": 1.9585133082499135, |
|
"learning_rate": 9.9778319801615e-06, |
|
"loss": 0.294, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.06146926536731634, |
|
"grad_norm": 1.9811955579650895, |
|
"learning_rate": 9.976710596172721e-06, |
|
"loss": 0.3655, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.06296851574212893, |
|
"grad_norm": 2.160259859236746, |
|
"learning_rate": 9.975561610837254e-06, |
|
"loss": 0.3146, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.06446776611694154, |
|
"grad_norm": 1.7207751114701895, |
|
"learning_rate": 9.974385030527496e-06, |
|
"loss": 0.3133, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.06596701649175413, |
|
"grad_norm": 1.7855443710047219, |
|
"learning_rate": 9.973180861768874e-06, |
|
"loss": 0.309, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.06746626686656672, |
|
"grad_norm": 1.8337376722089804, |
|
"learning_rate": 9.971949111239838e-06, |
|
"loss": 0.3603, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.06896551724137931, |
|
"grad_norm": 1.7686355552580748, |
|
"learning_rate": 9.970689785771798e-06, |
|
"loss": 0.3572, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.0704647676161919, |
|
"grad_norm": 1.78765448122615, |
|
"learning_rate": 9.969402892349105e-06, |
|
"loss": 0.347, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.07196401799100449, |
|
"grad_norm": 1.4875519550906255, |
|
"learning_rate": 9.968088438109002e-06, |
|
"loss": 0.2704, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.0734632683658171, |
|
"grad_norm": 1.9732499378428838, |
|
"learning_rate": 9.966746430341584e-06, |
|
"loss": 0.3503, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.07496251874062969, |
|
"grad_norm": 2.146905541628959, |
|
"learning_rate": 9.965376876489765e-06, |
|
"loss": 0.3623, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.07646176911544228, |
|
"grad_norm": 1.614927659858716, |
|
"learning_rate": 9.963979784149232e-06, |
|
"loss": 0.3578, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.07796101949025487, |
|
"grad_norm": 1.8675950039847051, |
|
"learning_rate": 9.962555161068401e-06, |
|
"loss": 0.3507, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.07946026986506746, |
|
"grad_norm": 1.7892287490203982, |
|
"learning_rate": 9.961103015148376e-06, |
|
"loss": 0.3334, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.08095952023988005, |
|
"grad_norm": 1.5076155316622697, |
|
"learning_rate": 9.95962335444291e-06, |
|
"loss": 0.283, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.08245877061469266, |
|
"grad_norm": 1.8423707803571059, |
|
"learning_rate": 9.958116187158351e-06, |
|
"loss": 0.3306, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.08395802098950525, |
|
"grad_norm": 1.8659065819141958, |
|
"learning_rate": 9.956581521653604e-06, |
|
"loss": 0.3626, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.08545727136431784, |
|
"grad_norm": 1.9384444327564434, |
|
"learning_rate": 9.955019366440082e-06, |
|
"loss": 0.3544, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.08695652173913043, |
|
"grad_norm": 1.9518720846770397, |
|
"learning_rate": 9.953429730181653e-06, |
|
"loss": 0.3983, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.08845577211394302, |
|
"grad_norm": 1.315338496772981, |
|
"learning_rate": 9.95181262169461e-06, |
|
"loss": 0.2553, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.08995502248875563, |
|
"grad_norm": 1.6617025672603152, |
|
"learning_rate": 9.950168049947597e-06, |
|
"loss": 0.3174, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.09145427286356822, |
|
"grad_norm": 1.737723627580491, |
|
"learning_rate": 9.948496024061577e-06, |
|
"loss": 0.3343, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.09295352323838081, |
|
"grad_norm": 1.7785564379947172, |
|
"learning_rate": 9.94679655330978e-06, |
|
"loss": 0.3968, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.0944527736131934, |
|
"grad_norm": 1.803424330169469, |
|
"learning_rate": 9.945069647117645e-06, |
|
"loss": 0.3156, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.095952023988006, |
|
"grad_norm": 1.9221269098368197, |
|
"learning_rate": 9.943315315062766e-06, |
|
"loss": 0.3868, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.09745127436281859, |
|
"grad_norm": 1.4963224653122351, |
|
"learning_rate": 9.941533566874852e-06, |
|
"loss": 0.2264, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.09895052473763119, |
|
"grad_norm": 1.8089627331199123, |
|
"learning_rate": 9.939724412435661e-06, |
|
"loss": 0.3716, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.10044977511244378, |
|
"grad_norm": 1.3393929513761427, |
|
"learning_rate": 9.937887861778947e-06, |
|
"loss": 0.2538, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.10194902548725637, |
|
"grad_norm": 1.772529252578735, |
|
"learning_rate": 9.93602392509041e-06, |
|
"loss": 0.3334, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.10344827586206896, |
|
"grad_norm": 1.6891607331667922, |
|
"learning_rate": 9.934132612707631e-06, |
|
"loss": 0.3189, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.10494752623688156, |
|
"grad_norm": 1.711007340717859, |
|
"learning_rate": 9.932213935120025e-06, |
|
"loss": 0.3096, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.10644677661169415, |
|
"grad_norm": 1.3911686059632464, |
|
"learning_rate": 9.930267902968774e-06, |
|
"loss": 0.2781, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.10794602698650675, |
|
"grad_norm": 2.094529457781407, |
|
"learning_rate": 9.928294527046771e-06, |
|
"loss": 0.4166, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.10944527736131934, |
|
"grad_norm": 1.927455298606518, |
|
"learning_rate": 9.92629381829856e-06, |
|
"loss": 0.3369, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.11094452773613193, |
|
"grad_norm": 2.0073308363605498, |
|
"learning_rate": 9.924265787820279e-06, |
|
"loss": 0.4219, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.11244377811094453, |
|
"grad_norm": 1.7638340255071605, |
|
"learning_rate": 9.92221044685959e-06, |
|
"loss": 0.3296, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.11394302848575712, |
|
"grad_norm": 1.7951654413405462, |
|
"learning_rate": 9.920127806815627e-06, |
|
"loss": 0.3512, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.11544227886056972, |
|
"grad_norm": 1.3789136508692579, |
|
"learning_rate": 9.918017879238922e-06, |
|
"loss": 0.3192, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.11694152923538231, |
|
"grad_norm": 1.5745019923018957, |
|
"learning_rate": 9.915880675831352e-06, |
|
"loss": 0.2941, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.1184407796101949, |
|
"grad_norm": 1.8636959024858308, |
|
"learning_rate": 9.913716208446067e-06, |
|
"loss": 0.4112, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.1199400299850075, |
|
"grad_norm": 1.7872534396339679, |
|
"learning_rate": 9.91152448908742e-06, |
|
"loss": 0.3337, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.12143928035982009, |
|
"grad_norm": 1.5591473506124205, |
|
"learning_rate": 9.909305529910917e-06, |
|
"loss": 0.2813, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.12293853073463268, |
|
"grad_norm": 1.877826476516608, |
|
"learning_rate": 9.907059343223129e-06, |
|
"loss": 0.3691, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.12443778110944528, |
|
"grad_norm": 1.3956477717435558, |
|
"learning_rate": 9.904785941481638e-06, |
|
"loss": 0.2706, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.12593703148425786, |
|
"grad_norm": 1.4855070209720886, |
|
"learning_rate": 9.902485337294965e-06, |
|
"loss": 0.2799, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.12743628185907047, |
|
"grad_norm": 1.58176239758115, |
|
"learning_rate": 9.900157543422493e-06, |
|
"loss": 0.2709, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.12893553223388307, |
|
"grad_norm": 1.775459849416814, |
|
"learning_rate": 9.897802572774407e-06, |
|
"loss": 0.3234, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.13043478260869565, |
|
"grad_norm": 1.3990606844842433, |
|
"learning_rate": 9.895420438411616e-06, |
|
"loss": 0.2972, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.13193403298350825, |
|
"grad_norm": 1.5166812129501177, |
|
"learning_rate": 9.893011153545679e-06, |
|
"loss": 0.2719, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.13343328335832083, |
|
"grad_norm": 1.616277925191225, |
|
"learning_rate": 9.89057473153874e-06, |
|
"loss": 0.3291, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.13493253373313344, |
|
"grad_norm": 1.8018886039796154, |
|
"learning_rate": 9.888111185903442e-06, |
|
"loss": 0.3504, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.136431784107946, |
|
"grad_norm": 1.9006962566663286, |
|
"learning_rate": 9.885620530302865e-06, |
|
"loss": 0.3651, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.13793103448275862, |
|
"grad_norm": 1.7392477925879741, |
|
"learning_rate": 9.883102778550434e-06, |
|
"loss": 0.3203, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.13943028485757122, |
|
"grad_norm": 1.5542968522475287, |
|
"learning_rate": 9.880557944609863e-06, |
|
"loss": 0.2724, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.1409295352323838, |
|
"grad_norm": 1.7350274457709831, |
|
"learning_rate": 9.877986042595062e-06, |
|
"loss": 0.297, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.1424287856071964, |
|
"grad_norm": 1.7460497372438528, |
|
"learning_rate": 9.87538708677006e-06, |
|
"loss": 0.3302, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.14392803598200898, |
|
"grad_norm": 1.8124484372389353, |
|
"learning_rate": 9.872761091548933e-06, |
|
"loss": 0.3909, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.1454272863568216, |
|
"grad_norm": 1.7120552903315012, |
|
"learning_rate": 9.870108071495721e-06, |
|
"loss": 0.3512, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.1469265367316342, |
|
"grad_norm": 1.5110515378145573, |
|
"learning_rate": 9.867428041324345e-06, |
|
"loss": 0.3439, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.14842578710644677, |
|
"grad_norm": 1.575901576032094, |
|
"learning_rate": 9.864721015898524e-06, |
|
"loss": 0.2743, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.14992503748125938, |
|
"grad_norm": 1.72771796682727, |
|
"learning_rate": 9.861987010231701e-06, |
|
"loss": 0.3792, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.15142428785607195, |
|
"grad_norm": 1.677067090009642, |
|
"learning_rate": 9.85922603948695e-06, |
|
"loss": 0.3108, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.15292353823088456, |
|
"grad_norm": 1.636594343381385, |
|
"learning_rate": 9.856438118976899e-06, |
|
"loss": 0.3234, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.15442278860569716, |
|
"grad_norm": 1.5690756819318563, |
|
"learning_rate": 9.853623264163638e-06, |
|
"loss": 0.2662, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.15592203898050974, |
|
"grad_norm": 1.5003127598504868, |
|
"learning_rate": 9.850781490658643e-06, |
|
"loss": 0.3091, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.15742128935532235, |
|
"grad_norm": 1.5517017459745142, |
|
"learning_rate": 9.84791281422268e-06, |
|
"loss": 0.3002, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.15892053973013492, |
|
"grad_norm": 1.6513578763089052, |
|
"learning_rate": 9.845017250765721e-06, |
|
"loss": 0.3715, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.16041979010494753, |
|
"grad_norm": 1.4661710458363477, |
|
"learning_rate": 9.84209481634686e-06, |
|
"loss": 0.3269, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.1619190404797601, |
|
"grad_norm": 1.4280739519111159, |
|
"learning_rate": 9.839145527174216e-06, |
|
"loss": 0.2853, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.1634182908545727, |
|
"grad_norm": 1.4966618108676135, |
|
"learning_rate": 9.836169399604846e-06, |
|
"loss": 0.2783, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.16491754122938532, |
|
"grad_norm": 1.820622143024208, |
|
"learning_rate": 9.833166450144665e-06, |
|
"loss": 0.304, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.1664167916041979, |
|
"grad_norm": 1.7348469895307392, |
|
"learning_rate": 9.830136695448334e-06, |
|
"loss": 0.307, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.1679160419790105, |
|
"grad_norm": 1.6477380424797046, |
|
"learning_rate": 9.827080152319182e-06, |
|
"loss": 0.3428, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.16941529235382308, |
|
"grad_norm": 1.797808391190219, |
|
"learning_rate": 9.823996837709114e-06, |
|
"loss": 0.3205, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.17091454272863568, |
|
"grad_norm": 1.5866152873541874, |
|
"learning_rate": 9.820886768718503e-06, |
|
"loss": 0.34, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.1724137931034483, |
|
"grad_norm": 1.5259306986626615, |
|
"learning_rate": 9.817749962596115e-06, |
|
"loss": 0.3371, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.17391304347826086, |
|
"grad_norm": 1.727311329625642, |
|
"learning_rate": 9.814586436738998e-06, |
|
"loss": 0.3481, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.17541229385307347, |
|
"grad_norm": 1.768431243352721, |
|
"learning_rate": 9.811396208692387e-06, |
|
"loss": 0.3234, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.17691154422788605, |
|
"grad_norm": 1.7910143368656124, |
|
"learning_rate": 9.808179296149616e-06, |
|
"loss": 0.3131, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.17841079460269865, |
|
"grad_norm": 1.6989000545542607, |
|
"learning_rate": 9.804935716952011e-06, |
|
"loss": 0.3384, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.17991004497751126, |
|
"grad_norm": 1.6517482708330093, |
|
"learning_rate": 9.801665489088795e-06, |
|
"loss": 0.3314, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.18140929535232383, |
|
"grad_norm": 1.9814538682881786, |
|
"learning_rate": 9.798368630696984e-06, |
|
"loss": 0.3509, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.18290854572713644, |
|
"grad_norm": 1.6355141459388147, |
|
"learning_rate": 9.795045160061295e-06, |
|
"loss": 0.2916, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.18440779610194902, |
|
"grad_norm": 1.589896307882102, |
|
"learning_rate": 9.791695095614036e-06, |
|
"loss": 0.3304, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.18590704647676162, |
|
"grad_norm": 1.509494539611863, |
|
"learning_rate": 9.788318455935008e-06, |
|
"loss": 0.2895, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.1874062968515742, |
|
"grad_norm": 1.5391621689007515, |
|
"learning_rate": 9.7849152597514e-06, |
|
"loss": 0.2971, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.1889055472263868, |
|
"grad_norm": 1.470357069635157, |
|
"learning_rate": 9.781485525937683e-06, |
|
"loss": 0.2687, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.1904047976011994, |
|
"grad_norm": 1.6457208798187986, |
|
"learning_rate": 9.778029273515519e-06, |
|
"loss": 0.2895, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.191904047976012, |
|
"grad_norm": 1.8946516116602614, |
|
"learning_rate": 9.774546521653633e-06, |
|
"loss": 0.3733, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.1934032983508246, |
|
"grad_norm": 1.6233385445584763, |
|
"learning_rate": 9.771037289667726e-06, |
|
"loss": 0.2899, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.19490254872563717, |
|
"grad_norm": 1.7229580627088865, |
|
"learning_rate": 9.767501597020357e-06, |
|
"loss": 0.314, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.19640179910044978, |
|
"grad_norm": 1.7220790561420378, |
|
"learning_rate": 9.76393946332084e-06, |
|
"loss": 0.3255, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.19790104947526238, |
|
"grad_norm": 1.8855841747799418, |
|
"learning_rate": 9.760350908325131e-06, |
|
"loss": 0.3063, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.19940029985007496, |
|
"grad_norm": 1.6461515144331846, |
|
"learning_rate": 9.756735951935725e-06, |
|
"loss": 0.3018, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.20089955022488756, |
|
"grad_norm": 1.6275566076097705, |
|
"learning_rate": 9.753094614201542e-06, |
|
"loss": 0.2734, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.20239880059970014, |
|
"grad_norm": 1.8047506186915083, |
|
"learning_rate": 9.749426915317812e-06, |
|
"loss": 0.3349, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.20389805097451275, |
|
"grad_norm": 1.7150541048121033, |
|
"learning_rate": 9.74573287562597e-06, |
|
"loss": 0.3814, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.20539730134932535, |
|
"grad_norm": 1.3746356899516263, |
|
"learning_rate": 9.742012515613536e-06, |
|
"loss": 0.2716, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.20689655172413793, |
|
"grad_norm": 1.3615297932269776, |
|
"learning_rate": 9.738265855914014e-06, |
|
"loss": 0.2969, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.20839580209895053, |
|
"grad_norm": 1.5967914555260672, |
|
"learning_rate": 9.734492917306754e-06, |
|
"loss": 0.3593, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.2098950524737631, |
|
"grad_norm": 1.3926414012145243, |
|
"learning_rate": 9.730693720716866e-06, |
|
"loss": 0.3249, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.21139430284857572, |
|
"grad_norm": 1.6300854417692394, |
|
"learning_rate": 9.72686828721508e-06, |
|
"loss": 0.3249, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.2128935532233883, |
|
"grad_norm": 1.6503747755051488, |
|
"learning_rate": 9.723016638017644e-06, |
|
"loss": 0.3485, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.2143928035982009, |
|
"grad_norm": 1.6424580489660745, |
|
"learning_rate": 9.719138794486198e-06, |
|
"loss": 0.3037, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.2158920539730135, |
|
"grad_norm": 1.4679460728212463, |
|
"learning_rate": 9.715234778127658e-06, |
|
"loss": 0.2478, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.21739130434782608, |
|
"grad_norm": 1.6694203271135328, |
|
"learning_rate": 9.711304610594104e-06, |
|
"loss": 0.3046, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.21889055472263869, |
|
"grad_norm": 1.7953365930318501, |
|
"learning_rate": 9.70734831368264e-06, |
|
"loss": 0.3774, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.22038980509745126, |
|
"grad_norm": 1.5605081251877446, |
|
"learning_rate": 9.7033659093353e-06, |
|
"loss": 0.2699, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.22188905547226387, |
|
"grad_norm": 2.1619847536998766, |
|
"learning_rate": 9.699357419638904e-06, |
|
"loss": 0.3967, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.22338830584707647, |
|
"grad_norm": 1.4670650567878318, |
|
"learning_rate": 9.695322866824948e-06, |
|
"loss": 0.2689, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.22488755622188905, |
|
"grad_norm": 1.7468328982463712, |
|
"learning_rate": 9.691262273269472e-06, |
|
"loss": 0.3513, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.22638680659670166, |
|
"grad_norm": 1.5327358567242733, |
|
"learning_rate": 9.687175661492944e-06, |
|
"loss": 0.302, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.22788605697151423, |
|
"grad_norm": 1.2492938609432371, |
|
"learning_rate": 9.683063054160136e-06, |
|
"loss": 0.2079, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.22938530734632684, |
|
"grad_norm": 1.801480350197766, |
|
"learning_rate": 9.678924474079986e-06, |
|
"loss": 0.3353, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.23088455772113944, |
|
"grad_norm": 1.3819349277061614, |
|
"learning_rate": 9.67475994420548e-06, |
|
"loss": 0.3082, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.23238380809595202, |
|
"grad_norm": 1.4854125018298305, |
|
"learning_rate": 9.670569487633534e-06, |
|
"loss": 0.3296, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.23388305847076463, |
|
"grad_norm": 1.591721543394339, |
|
"learning_rate": 9.666353127604845e-06, |
|
"loss": 0.3695, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.2353823088455772, |
|
"grad_norm": 1.3623438969280577, |
|
"learning_rate": 9.66211088750378e-06, |
|
"loss": 0.2687, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.2368815592203898, |
|
"grad_norm": 1.59662021079487, |
|
"learning_rate": 9.657842790858235e-06, |
|
"loss": 0.3131, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.2383808095952024, |
|
"grad_norm": 2.103772556104017, |
|
"learning_rate": 9.65354886133951e-06, |
|
"loss": 0.3974, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.239880059970015, |
|
"grad_norm": 1.5390373761351863, |
|
"learning_rate": 9.64922912276218e-06, |
|
"loss": 0.3089, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.2413793103448276, |
|
"grad_norm": 1.608215555985029, |
|
"learning_rate": 9.644883599083959e-06, |
|
"loss": 0.3104, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.24287856071964017, |
|
"grad_norm": 1.6370107777138407, |
|
"learning_rate": 9.640512314405563e-06, |
|
"loss": 0.3083, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.24437781109445278, |
|
"grad_norm": 1.720253344356971, |
|
"learning_rate": 9.636115292970587e-06, |
|
"loss": 0.3549, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.24587706146926536, |
|
"grad_norm": 1.651830367850379, |
|
"learning_rate": 9.63169255916536e-06, |
|
"loss": 0.2953, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.24737631184407796, |
|
"grad_norm": 1.4108283122209617, |
|
"learning_rate": 9.627244137518821e-06, |
|
"loss": 0.2691, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.24887556221889057, |
|
"grad_norm": 1.6335828982320395, |
|
"learning_rate": 9.622770052702366e-06, |
|
"loss": 0.3137, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.25037481259370314, |
|
"grad_norm": 1.4429157887428632, |
|
"learning_rate": 9.618270329529734e-06, |
|
"loss": 0.2653, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.2518740629685157, |
|
"grad_norm": 1.5669347774146625, |
|
"learning_rate": 9.613744992956844e-06, |
|
"loss": 0.2818, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.25337331334332835, |
|
"grad_norm": 1.3176035507431232, |
|
"learning_rate": 9.609194068081682e-06, |
|
"loss": 0.2748, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.25487256371814093, |
|
"grad_norm": 1.7360622452211882, |
|
"learning_rate": 9.60461758014414e-06, |
|
"loss": 0.3223, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.2563718140929535, |
|
"grad_norm": 1.450919912803998, |
|
"learning_rate": 9.60001555452589e-06, |
|
"loss": 0.3199, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.25787106446776614, |
|
"grad_norm": 1.5835132895230042, |
|
"learning_rate": 9.595388016750236e-06, |
|
"loss": 0.298, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.2593703148425787, |
|
"grad_norm": 1.8234775881833296, |
|
"learning_rate": 9.590734992481978e-06, |
|
"loss": 0.2865, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.2608695652173913, |
|
"grad_norm": 1.606177227731814, |
|
"learning_rate": 9.586056507527266e-06, |
|
"loss": 0.3566, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.2623688155922039, |
|
"grad_norm": 1.864558112262121, |
|
"learning_rate": 9.581352587833455e-06, |
|
"loss": 0.3244, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.2638680659670165, |
|
"grad_norm": 1.8906574630017996, |
|
"learning_rate": 9.576623259488966e-06, |
|
"loss": 0.3849, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.2653673163418291, |
|
"grad_norm": 2.102559816803484, |
|
"learning_rate": 9.571868548723137e-06, |
|
"loss": 0.4755, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.26686656671664166, |
|
"grad_norm": 1.5774518141152993, |
|
"learning_rate": 9.567088481906084e-06, |
|
"loss": 0.301, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.2683658170914543, |
|
"grad_norm": 1.4230003632405575, |
|
"learning_rate": 9.562283085548546e-06, |
|
"loss": 0.2292, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.2698650674662669, |
|
"grad_norm": 1.9278458923026665, |
|
"learning_rate": 9.55745238630174e-06, |
|
"loss": 0.3696, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.27136431784107945, |
|
"grad_norm": 1.5235483685662072, |
|
"learning_rate": 9.552596410957224e-06, |
|
"loss": 0.3119, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.272863568215892, |
|
"grad_norm": 1.4491233510197186, |
|
"learning_rate": 9.547715186446732e-06, |
|
"loss": 0.2845, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.27436281859070466, |
|
"grad_norm": 1.5556059068218815, |
|
"learning_rate": 9.542808739842034e-06, |
|
"loss": 0.3402, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.27586206896551724, |
|
"grad_norm": 1.6386610105907913, |
|
"learning_rate": 9.537877098354787e-06, |
|
"loss": 0.314, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.2773613193403298, |
|
"grad_norm": 1.7958204027789255, |
|
"learning_rate": 9.532920289336378e-06, |
|
"loss": 0.3177, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.27886056971514245, |
|
"grad_norm": 1.5112411816887386, |
|
"learning_rate": 9.52793834027778e-06, |
|
"loss": 0.3123, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.280359820089955, |
|
"grad_norm": 1.5467545255177493, |
|
"learning_rate": 9.522931278809393e-06, |
|
"loss": 0.348, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.2818590704647676, |
|
"grad_norm": 1.5182824026116655, |
|
"learning_rate": 9.517899132700889e-06, |
|
"loss": 0.3076, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.28335832083958024, |
|
"grad_norm": 1.4551124701666456, |
|
"learning_rate": 9.512841929861069e-06, |
|
"loss": 0.2654, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.2848575712143928, |
|
"grad_norm": 1.726905433944032, |
|
"learning_rate": 9.507759698337698e-06, |
|
"loss": 0.3768, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.2863568215892054, |
|
"grad_norm": 1.5251935151222835, |
|
"learning_rate": 9.50265246631735e-06, |
|
"loss": 0.3417, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.28785607196401797, |
|
"grad_norm": 1.3344343517454702, |
|
"learning_rate": 9.49752026212526e-06, |
|
"loss": 0.3046, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.2893553223388306, |
|
"grad_norm": 1.667291769433292, |
|
"learning_rate": 9.492363114225156e-06, |
|
"loss": 0.4019, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.2908545727136432, |
|
"grad_norm": 1.3462565282632641, |
|
"learning_rate": 9.487181051219107e-06, |
|
"loss": 0.2833, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.29235382308845576, |
|
"grad_norm": 1.708949142760287, |
|
"learning_rate": 9.481974101847371e-06, |
|
"loss": 0.339, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.2938530734632684, |
|
"grad_norm": 1.579905077082289, |
|
"learning_rate": 9.476742294988214e-06, |
|
"loss": 0.2755, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.29535232383808097, |
|
"grad_norm": 1.6988933363208567, |
|
"learning_rate": 9.471485659657782e-06, |
|
"loss": 0.3112, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.29685157421289354, |
|
"grad_norm": 1.5692224559179928, |
|
"learning_rate": 9.466204225009905e-06, |
|
"loss": 0.2933, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.2983508245877061, |
|
"grad_norm": 1.4813487646585581, |
|
"learning_rate": 9.460898020335964e-06, |
|
"loss": 0.3178, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.29985007496251875, |
|
"grad_norm": 1.3414890287359442, |
|
"learning_rate": 9.455567075064715e-06, |
|
"loss": 0.278, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.29985007496251875, |
|
"eval_loss": 0.31865087151527405, |
|
"eval_runtime": 9.5346, |
|
"eval_samples_per_second": 5.664, |
|
"eval_steps_per_second": 1.468, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.30134932533733133, |
|
"grad_norm": 1.2914491820490537, |
|
"learning_rate": 9.450211418762123e-06, |
|
"loss": 0.2545, |
|
"step": 201 |
|
}, |
|
{ |
|
"epoch": 0.3028485757121439, |
|
"grad_norm": 1.617206879004949, |
|
"learning_rate": 9.444831081131209e-06, |
|
"loss": 0.3245, |
|
"step": 202 |
|
}, |
|
{ |
|
"epoch": 0.30434782608695654, |
|
"grad_norm": 1.5879038240403078, |
|
"learning_rate": 9.439426092011877e-06, |
|
"loss": 0.2672, |
|
"step": 203 |
|
}, |
|
{ |
|
"epoch": 0.3058470764617691, |
|
"grad_norm": 1.4721712902504815, |
|
"learning_rate": 9.433996481380747e-06, |
|
"loss": 0.2441, |
|
"step": 204 |
|
}, |
|
{ |
|
"epoch": 0.3073463268365817, |
|
"grad_norm": 1.6851515330584104, |
|
"learning_rate": 9.428542279351e-06, |
|
"loss": 0.3122, |
|
"step": 205 |
|
}, |
|
{ |
|
"epoch": 0.30884557721139433, |
|
"grad_norm": 1.551471086948896, |
|
"learning_rate": 9.423063516172195e-06, |
|
"loss": 0.3125, |
|
"step": 206 |
|
}, |
|
{ |
|
"epoch": 0.3103448275862069, |
|
"grad_norm": 1.541216254933102, |
|
"learning_rate": 9.417560222230115e-06, |
|
"loss": 0.3265, |
|
"step": 207 |
|
}, |
|
{ |
|
"epoch": 0.3118440779610195, |
|
"grad_norm": 1.8241894939272227, |
|
"learning_rate": 9.412032428046594e-06, |
|
"loss": 0.3628, |
|
"step": 208 |
|
}, |
|
{ |
|
"epoch": 0.31334332833583206, |
|
"grad_norm": 1.4585076619859787, |
|
"learning_rate": 9.40648016427934e-06, |
|
"loss": 0.3172, |
|
"step": 209 |
|
}, |
|
{ |
|
"epoch": 0.3148425787106447, |
|
"grad_norm": 1.4947685572204896, |
|
"learning_rate": 9.400903461721783e-06, |
|
"loss": 0.3262, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.31634182908545727, |
|
"grad_norm": 1.710479676870878, |
|
"learning_rate": 9.395302351302881e-06, |
|
"loss": 0.3026, |
|
"step": 211 |
|
}, |
|
{ |
|
"epoch": 0.31784107946026985, |
|
"grad_norm": 1.5891445921255554, |
|
"learning_rate": 9.38967686408697e-06, |
|
"loss": 0.3041, |
|
"step": 212 |
|
}, |
|
{ |
|
"epoch": 0.3193403298350825, |
|
"grad_norm": 1.877043820678234, |
|
"learning_rate": 9.384027031273575e-06, |
|
"loss": 0.3651, |
|
"step": 213 |
|
}, |
|
{ |
|
"epoch": 0.32083958020989506, |
|
"grad_norm": 1.6476737455461057, |
|
"learning_rate": 9.37835288419725e-06, |
|
"loss": 0.3274, |
|
"step": 214 |
|
}, |
|
{ |
|
"epoch": 0.32233883058470764, |
|
"grad_norm": 1.4048072633025106, |
|
"learning_rate": 9.372654454327394e-06, |
|
"loss": 0.2915, |
|
"step": 215 |
|
}, |
|
{ |
|
"epoch": 0.3238380809595202, |
|
"grad_norm": 1.5824932612563747, |
|
"learning_rate": 9.366931773268083e-06, |
|
"loss": 0.3424, |
|
"step": 216 |
|
}, |
|
{ |
|
"epoch": 0.32533733133433285, |
|
"grad_norm": 1.4045621444023597, |
|
"learning_rate": 9.361184872757894e-06, |
|
"loss": 0.2994, |
|
"step": 217 |
|
}, |
|
{ |
|
"epoch": 0.3268365817091454, |
|
"grad_norm": 1.6783345093583486, |
|
"learning_rate": 9.355413784669722e-06, |
|
"loss": 0.316, |
|
"step": 218 |
|
}, |
|
{ |
|
"epoch": 0.328335832083958, |
|
"grad_norm": 1.5136120438067788, |
|
"learning_rate": 9.349618541010616e-06, |
|
"loss": 0.2875, |
|
"step": 219 |
|
}, |
|
{ |
|
"epoch": 0.32983508245877063, |
|
"grad_norm": 1.8347656685110798, |
|
"learning_rate": 9.343799173921591e-06, |
|
"loss": 0.3255, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.3313343328335832, |
|
"grad_norm": 1.6747537616262171, |
|
"learning_rate": 9.337955715677452e-06, |
|
"loss": 0.3227, |
|
"step": 221 |
|
}, |
|
{ |
|
"epoch": 0.3328335832083958, |
|
"grad_norm": 1.611748954880504, |
|
"learning_rate": 9.332088198686618e-06, |
|
"loss": 0.3494, |
|
"step": 222 |
|
}, |
|
{ |
|
"epoch": 0.3343328335832084, |
|
"grad_norm": 1.3926543986691533, |
|
"learning_rate": 9.326196655490935e-06, |
|
"loss": 0.3003, |
|
"step": 223 |
|
}, |
|
{ |
|
"epoch": 0.335832083958021, |
|
"grad_norm": 1.6253583560543892, |
|
"learning_rate": 9.32028111876551e-06, |
|
"loss": 0.312, |
|
"step": 224 |
|
}, |
|
{ |
|
"epoch": 0.3373313343328336, |
|
"grad_norm": 1.4601075879292786, |
|
"learning_rate": 9.314341621318512e-06, |
|
"loss": 0.3017, |
|
"step": 225 |
|
}, |
|
{ |
|
"epoch": 0.33883058470764615, |
|
"grad_norm": 1.8018907632171013, |
|
"learning_rate": 9.308378196091006e-06, |
|
"loss": 0.394, |
|
"step": 226 |
|
}, |
|
{ |
|
"epoch": 0.3403298350824588, |
|
"grad_norm": 1.5853864532070887, |
|
"learning_rate": 9.302390876156756e-06, |
|
"loss": 0.2828, |
|
"step": 227 |
|
}, |
|
{ |
|
"epoch": 0.34182908545727136, |
|
"grad_norm": 1.3452148779288864, |
|
"learning_rate": 9.296379694722051e-06, |
|
"loss": 0.2827, |
|
"step": 228 |
|
}, |
|
{ |
|
"epoch": 0.34332833583208394, |
|
"grad_norm": 1.310866650589732, |
|
"learning_rate": 9.29034468512552e-06, |
|
"loss": 0.2704, |
|
"step": 229 |
|
}, |
|
{ |
|
"epoch": 0.3448275862068966, |
|
"grad_norm": 1.506333918893538, |
|
"learning_rate": 9.284285880837947e-06, |
|
"loss": 0.3099, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.34632683658170915, |
|
"grad_norm": 1.490555616381991, |
|
"learning_rate": 9.278203315462078e-06, |
|
"loss": 0.2802, |
|
"step": 231 |
|
}, |
|
{ |
|
"epoch": 0.34782608695652173, |
|
"grad_norm": 1.3759927570780872, |
|
"learning_rate": 9.272097022732444e-06, |
|
"loss": 0.269, |
|
"step": 232 |
|
}, |
|
{ |
|
"epoch": 0.3493253373313343, |
|
"grad_norm": 1.5108818707437988, |
|
"learning_rate": 9.26596703651517e-06, |
|
"loss": 0.2963, |
|
"step": 233 |
|
}, |
|
{ |
|
"epoch": 0.35082458770614694, |
|
"grad_norm": 1.6364308232242346, |
|
"learning_rate": 9.259813390807788e-06, |
|
"loss": 0.3649, |
|
"step": 234 |
|
}, |
|
{ |
|
"epoch": 0.3523238380809595, |
|
"grad_norm": 1.6168540956790163, |
|
"learning_rate": 9.253636119739046e-06, |
|
"loss": 0.2944, |
|
"step": 235 |
|
}, |
|
{ |
|
"epoch": 0.3538230884557721, |
|
"grad_norm": 1.587020700255697, |
|
"learning_rate": 9.247435257568724e-06, |
|
"loss": 0.3142, |
|
"step": 236 |
|
}, |
|
{ |
|
"epoch": 0.3553223388305847, |
|
"grad_norm": 1.5298114520922985, |
|
"learning_rate": 9.241210838687438e-06, |
|
"loss": 0.3315, |
|
"step": 237 |
|
}, |
|
{ |
|
"epoch": 0.3568215892053973, |
|
"grad_norm": 1.6335252140407155, |
|
"learning_rate": 9.23496289761645e-06, |
|
"loss": 0.3397, |
|
"step": 238 |
|
}, |
|
{ |
|
"epoch": 0.3583208395802099, |
|
"grad_norm": 1.674399202898891, |
|
"learning_rate": 9.228691469007487e-06, |
|
"loss": 0.3516, |
|
"step": 239 |
|
}, |
|
{ |
|
"epoch": 0.3598200899550225, |
|
"grad_norm": 1.7823402906317811, |
|
"learning_rate": 9.222396587642528e-06, |
|
"loss": 0.3502, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.3613193403298351, |
|
"grad_norm": 1.4099094018415357, |
|
"learning_rate": 9.216078288433632e-06, |
|
"loss": 0.3187, |
|
"step": 241 |
|
}, |
|
{ |
|
"epoch": 0.36281859070464767, |
|
"grad_norm": 1.8182655020498848, |
|
"learning_rate": 9.209736606422736e-06, |
|
"loss": 0.3157, |
|
"step": 242 |
|
}, |
|
{ |
|
"epoch": 0.36431784107946025, |
|
"grad_norm": 1.7687977513877544, |
|
"learning_rate": 9.203371576781457e-06, |
|
"loss": 0.335, |
|
"step": 243 |
|
}, |
|
{ |
|
"epoch": 0.3658170914542729, |
|
"grad_norm": 1.483158152454702, |
|
"learning_rate": 9.1969832348109e-06, |
|
"loss": 0.298, |
|
"step": 244 |
|
}, |
|
{ |
|
"epoch": 0.36731634182908546, |
|
"grad_norm": 1.4043101733884487, |
|
"learning_rate": 9.190571615941462e-06, |
|
"loss": 0.3012, |
|
"step": 245 |
|
}, |
|
{ |
|
"epoch": 0.36881559220389803, |
|
"grad_norm": 1.4752437511712757, |
|
"learning_rate": 9.18413675573264e-06, |
|
"loss": 0.3182, |
|
"step": 246 |
|
}, |
|
{ |
|
"epoch": 0.37031484257871067, |
|
"grad_norm": 1.6533133805844158, |
|
"learning_rate": 9.177678689872831e-06, |
|
"loss": 0.3766, |
|
"step": 247 |
|
}, |
|
{ |
|
"epoch": 0.37181409295352325, |
|
"grad_norm": 1.3475127139058065, |
|
"learning_rate": 9.171197454179124e-06, |
|
"loss": 0.3069, |
|
"step": 248 |
|
}, |
|
{ |
|
"epoch": 0.3733133433283358, |
|
"grad_norm": 1.4956515930266987, |
|
"learning_rate": 9.16469308459712e-06, |
|
"loss": 0.3195, |
|
"step": 249 |
|
}, |
|
{ |
|
"epoch": 0.3748125937031484, |
|
"grad_norm": 1.554696970967648, |
|
"learning_rate": 9.158165617200717e-06, |
|
"loss": 0.3165, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.37631184407796103, |
|
"grad_norm": 1.4803392008015672, |
|
"learning_rate": 9.151615088191918e-06, |
|
"loss": 0.2986, |
|
"step": 251 |
|
}, |
|
{ |
|
"epoch": 0.3778110944527736, |
|
"grad_norm": 1.4012614689308533, |
|
"learning_rate": 9.14504153390063e-06, |
|
"loss": 0.2783, |
|
"step": 252 |
|
}, |
|
{ |
|
"epoch": 0.3793103448275862, |
|
"grad_norm": 1.5532716690880104, |
|
"learning_rate": 9.138444990784455e-06, |
|
"loss": 0.3668, |
|
"step": 253 |
|
}, |
|
{ |
|
"epoch": 0.3808095952023988, |
|
"grad_norm": 1.5900316421763145, |
|
"learning_rate": 9.131825495428496e-06, |
|
"loss": 0.3149, |
|
"step": 254 |
|
}, |
|
{ |
|
"epoch": 0.3823088455772114, |
|
"grad_norm": 1.4731630574906363, |
|
"learning_rate": 9.125183084545158e-06, |
|
"loss": 0.3242, |
|
"step": 255 |
|
}, |
|
{ |
|
"epoch": 0.383808095952024, |
|
"grad_norm": 1.8078183568590966, |
|
"learning_rate": 9.118517794973925e-06, |
|
"loss": 0.3534, |
|
"step": 256 |
|
}, |
|
{ |
|
"epoch": 0.3853073463268366, |
|
"grad_norm": 1.3669095909006088, |
|
"learning_rate": 9.111829663681182e-06, |
|
"loss": 0.3017, |
|
"step": 257 |
|
}, |
|
{ |
|
"epoch": 0.3868065967016492, |
|
"grad_norm": 1.4104333598522873, |
|
"learning_rate": 9.105118727759984e-06, |
|
"loss": 0.3089, |
|
"step": 258 |
|
}, |
|
{ |
|
"epoch": 0.38830584707646176, |
|
"grad_norm": 1.8586145483699874, |
|
"learning_rate": 9.098385024429875e-06, |
|
"loss": 0.3085, |
|
"step": 259 |
|
}, |
|
{ |
|
"epoch": 0.38980509745127434, |
|
"grad_norm": 1.4223222044665933, |
|
"learning_rate": 9.09162859103666e-06, |
|
"loss": 0.3244, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.391304347826087, |
|
"grad_norm": 1.884873876770662, |
|
"learning_rate": 9.08484946505221e-06, |
|
"loss": 0.3837, |
|
"step": 261 |
|
}, |
|
{ |
|
"epoch": 0.39280359820089955, |
|
"grad_norm": 1.7983916583732078, |
|
"learning_rate": 9.078047684074254e-06, |
|
"loss": 0.3546, |
|
"step": 262 |
|
}, |
|
{ |
|
"epoch": 0.39430284857571213, |
|
"grad_norm": 1.4502714296949408, |
|
"learning_rate": 9.071223285826166e-06, |
|
"loss": 0.2613, |
|
"step": 263 |
|
}, |
|
{ |
|
"epoch": 0.39580209895052476, |
|
"grad_norm": 1.6815524788277352, |
|
"learning_rate": 9.064376308156754e-06, |
|
"loss": 0.3553, |
|
"step": 264 |
|
}, |
|
{ |
|
"epoch": 0.39730134932533734, |
|
"grad_norm": 1.6792700035651802, |
|
"learning_rate": 9.057506789040063e-06, |
|
"loss": 0.3332, |
|
"step": 265 |
|
}, |
|
{ |
|
"epoch": 0.3988005997001499, |
|
"grad_norm": 1.7631646154436653, |
|
"learning_rate": 9.050614766575147e-06, |
|
"loss": 0.3745, |
|
"step": 266 |
|
}, |
|
{ |
|
"epoch": 0.4002998500749625, |
|
"grad_norm": 1.3326861130696177, |
|
"learning_rate": 9.043700278985867e-06, |
|
"loss": 0.2829, |
|
"step": 267 |
|
}, |
|
{ |
|
"epoch": 0.4017991004497751, |
|
"grad_norm": 1.5373974839212117, |
|
"learning_rate": 9.03676336462068e-06, |
|
"loss": 0.3159, |
|
"step": 268 |
|
}, |
|
{ |
|
"epoch": 0.4032983508245877, |
|
"grad_norm": 1.4848523958607267, |
|
"learning_rate": 9.029804061952426e-06, |
|
"loss": 0.2817, |
|
"step": 269 |
|
}, |
|
{ |
|
"epoch": 0.4047976011994003, |
|
"grad_norm": 1.4186378261482933, |
|
"learning_rate": 9.022822409578106e-06, |
|
"loss": 0.3007, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.4062968515742129, |
|
"grad_norm": 1.3665551190637126, |
|
"learning_rate": 9.015818446218683e-06, |
|
"loss": 0.2663, |
|
"step": 271 |
|
}, |
|
{ |
|
"epoch": 0.4077961019490255, |
|
"grad_norm": 1.8004513614500592, |
|
"learning_rate": 9.008792210718854e-06, |
|
"loss": 0.3166, |
|
"step": 272 |
|
}, |
|
{ |
|
"epoch": 0.40929535232383807, |
|
"grad_norm": 1.7271025597363883, |
|
"learning_rate": 9.00174374204684e-06, |
|
"loss": 0.3985, |
|
"step": 273 |
|
}, |
|
{ |
|
"epoch": 0.4107946026986507, |
|
"grad_norm": 1.7120420253419553, |
|
"learning_rate": 8.994673079294171e-06, |
|
"loss": 0.2887, |
|
"step": 274 |
|
}, |
|
{ |
|
"epoch": 0.4122938530734633, |
|
"grad_norm": 1.5573334644227972, |
|
"learning_rate": 8.987580261675466e-06, |
|
"loss": 0.3118, |
|
"step": 275 |
|
}, |
|
{ |
|
"epoch": 0.41379310344827586, |
|
"grad_norm": 1.7532970724016852, |
|
"learning_rate": 8.98046532852822e-06, |
|
"loss": 0.3451, |
|
"step": 276 |
|
}, |
|
{ |
|
"epoch": 0.41529235382308843, |
|
"grad_norm": 1.7990888104381544, |
|
"learning_rate": 8.973328319312577e-06, |
|
"loss": 0.378, |
|
"step": 277 |
|
}, |
|
{ |
|
"epoch": 0.41679160419790107, |
|
"grad_norm": 1.3765160345020095, |
|
"learning_rate": 8.966169273611125e-06, |
|
"loss": 0.2431, |
|
"step": 278 |
|
}, |
|
{ |
|
"epoch": 0.41829085457271364, |
|
"grad_norm": 1.7424289558232744, |
|
"learning_rate": 8.958988231128665e-06, |
|
"loss": 0.3395, |
|
"step": 279 |
|
}, |
|
{ |
|
"epoch": 0.4197901049475262, |
|
"grad_norm": 1.5102284530060985, |
|
"learning_rate": 8.95178523169199e-06, |
|
"loss": 0.3121, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.42128935532233885, |
|
"grad_norm": 1.423068646962132, |
|
"learning_rate": 8.944560315249676e-06, |
|
"loss": 0.2699, |
|
"step": 281 |
|
}, |
|
{ |
|
"epoch": 0.42278860569715143, |
|
"grad_norm": 1.2082261571628317, |
|
"learning_rate": 8.937313521871846e-06, |
|
"loss": 0.2766, |
|
"step": 282 |
|
}, |
|
{ |
|
"epoch": 0.424287856071964, |
|
"grad_norm": 1.6368109030503832, |
|
"learning_rate": 8.930044891749962e-06, |
|
"loss": 0.318, |
|
"step": 283 |
|
}, |
|
{ |
|
"epoch": 0.4257871064467766, |
|
"grad_norm": 1.6853820955568977, |
|
"learning_rate": 8.922754465196591e-06, |
|
"loss": 0.2909, |
|
"step": 284 |
|
}, |
|
{ |
|
"epoch": 0.4272863568215892, |
|
"grad_norm": 1.4242235536170476, |
|
"learning_rate": 8.915442282645183e-06, |
|
"loss": 0.299, |
|
"step": 285 |
|
}, |
|
{ |
|
"epoch": 0.4287856071964018, |
|
"grad_norm": 1.4956901901699613, |
|
"learning_rate": 8.908108384649856e-06, |
|
"loss": 0.2931, |
|
"step": 286 |
|
}, |
|
{ |
|
"epoch": 0.4302848575712144, |
|
"grad_norm": 1.4522693723774607, |
|
"learning_rate": 8.900752811885152e-06, |
|
"loss": 0.2998, |
|
"step": 287 |
|
}, |
|
{ |
|
"epoch": 0.431784107946027, |
|
"grad_norm": 1.3862849952507832, |
|
"learning_rate": 8.893375605145837e-06, |
|
"loss": 0.2787, |
|
"step": 288 |
|
}, |
|
{ |
|
"epoch": 0.4332833583208396, |
|
"grad_norm": 1.3759797800861127, |
|
"learning_rate": 8.885976805346651e-06, |
|
"loss": 0.3469, |
|
"step": 289 |
|
}, |
|
{ |
|
"epoch": 0.43478260869565216, |
|
"grad_norm": 1.6160672226143369, |
|
"learning_rate": 8.8785564535221e-06, |
|
"loss": 0.3484, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.4362818590704648, |
|
"grad_norm": 1.8236449668128274, |
|
"learning_rate": 8.871114590826211e-06, |
|
"loss": 0.3252, |
|
"step": 291 |
|
}, |
|
{ |
|
"epoch": 0.43778110944527737, |
|
"grad_norm": 2.0215149861497417, |
|
"learning_rate": 8.86365125853232e-06, |
|
"loss": 0.3041, |
|
"step": 292 |
|
}, |
|
{ |
|
"epoch": 0.43928035982008995, |
|
"grad_norm": 1.4226330217038914, |
|
"learning_rate": 8.85616649803283e-06, |
|
"loss": 0.3237, |
|
"step": 293 |
|
}, |
|
{ |
|
"epoch": 0.4407796101949025, |
|
"grad_norm": 1.3243895047996546, |
|
"learning_rate": 8.84866035083899e-06, |
|
"loss": 0.2351, |
|
"step": 294 |
|
}, |
|
{ |
|
"epoch": 0.44227886056971516, |
|
"grad_norm": 1.338019694505218, |
|
"learning_rate": 8.841132858580661e-06, |
|
"loss": 0.2858, |
|
"step": 295 |
|
}, |
|
{ |
|
"epoch": 0.44377811094452774, |
|
"grad_norm": 1.541795435962147, |
|
"learning_rate": 8.833584063006088e-06, |
|
"loss": 0.3136, |
|
"step": 296 |
|
}, |
|
{ |
|
"epoch": 0.4452773613193403, |
|
"grad_norm": 1.8246954956049057, |
|
"learning_rate": 8.826014005981662e-06, |
|
"loss": 0.3781, |
|
"step": 297 |
|
}, |
|
{ |
|
"epoch": 0.44677661169415295, |
|
"grad_norm": 1.5281229728644417, |
|
"learning_rate": 8.818422729491693e-06, |
|
"loss": 0.2583, |
|
"step": 298 |
|
}, |
|
{ |
|
"epoch": 0.4482758620689655, |
|
"grad_norm": 1.588026816491082, |
|
"learning_rate": 8.810810275638183e-06, |
|
"loss": 0.3067, |
|
"step": 299 |
|
}, |
|
{ |
|
"epoch": 0.4497751124437781, |
|
"grad_norm": 1.3942715313521852, |
|
"learning_rate": 8.803176686640577e-06, |
|
"loss": 0.2353, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.4512743628185907, |
|
"grad_norm": 1.4891178081833167, |
|
"learning_rate": 8.795522004835543e-06, |
|
"loss": 0.3063, |
|
"step": 301 |
|
}, |
|
{ |
|
"epoch": 0.4527736131934033, |
|
"grad_norm": 1.4962818451621016, |
|
"learning_rate": 8.787846272676728e-06, |
|
"loss": 0.3081, |
|
"step": 302 |
|
}, |
|
{ |
|
"epoch": 0.4542728635682159, |
|
"grad_norm": 1.3436327405802937, |
|
"learning_rate": 8.780149532734531e-06, |
|
"loss": 0.2847, |
|
"step": 303 |
|
}, |
|
{ |
|
"epoch": 0.45577211394302847, |
|
"grad_norm": 1.4513621866632591, |
|
"learning_rate": 8.772431827695862e-06, |
|
"loss": 0.3314, |
|
"step": 304 |
|
}, |
|
{ |
|
"epoch": 0.4572713643178411, |
|
"grad_norm": 1.4846215391856483, |
|
"learning_rate": 8.764693200363897e-06, |
|
"loss": 0.2956, |
|
"step": 305 |
|
}, |
|
{ |
|
"epoch": 0.4587706146926537, |
|
"grad_norm": 1.7345777302006005, |
|
"learning_rate": 8.756933693657863e-06, |
|
"loss": 0.2973, |
|
"step": 306 |
|
}, |
|
{ |
|
"epoch": 0.46026986506746626, |
|
"grad_norm": 1.5703940224862079, |
|
"learning_rate": 8.749153350612774e-06, |
|
"loss": 0.308, |
|
"step": 307 |
|
}, |
|
{ |
|
"epoch": 0.4617691154422789, |
|
"grad_norm": 1.404543016239454, |
|
"learning_rate": 8.74135221437921e-06, |
|
"loss": 0.2627, |
|
"step": 308 |
|
}, |
|
{ |
|
"epoch": 0.46326836581709147, |
|
"grad_norm": 1.622914683273527, |
|
"learning_rate": 8.733530328223076e-06, |
|
"loss": 0.3194, |
|
"step": 309 |
|
}, |
|
{ |
|
"epoch": 0.46476761619190404, |
|
"grad_norm": 1.5992326369836218, |
|
"learning_rate": 8.725687735525347e-06, |
|
"loss": 0.2734, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.4662668665667166, |
|
"grad_norm": 1.428179973398535, |
|
"learning_rate": 8.71782447978185e-06, |
|
"loss": 0.2712, |
|
"step": 311 |
|
}, |
|
{ |
|
"epoch": 0.46776611694152925, |
|
"grad_norm": 1.7414623479311544, |
|
"learning_rate": 8.709940604603006e-06, |
|
"loss": 0.3752, |
|
"step": 312 |
|
}, |
|
{ |
|
"epoch": 0.46926536731634183, |
|
"grad_norm": 1.5254762699797346, |
|
"learning_rate": 8.702036153713594e-06, |
|
"loss": 0.2906, |
|
"step": 313 |
|
}, |
|
{ |
|
"epoch": 0.4707646176911544, |
|
"grad_norm": 1.7397895016458929, |
|
"learning_rate": 8.694111170952508e-06, |
|
"loss": 0.2952, |
|
"step": 314 |
|
}, |
|
{ |
|
"epoch": 0.47226386806596704, |
|
"grad_norm": 1.6313154976101212, |
|
"learning_rate": 8.686165700272513e-06, |
|
"loss": 0.366, |
|
"step": 315 |
|
}, |
|
{ |
|
"epoch": 0.4737631184407796, |
|
"grad_norm": 1.4125903031749254, |
|
"learning_rate": 8.678199785740003e-06, |
|
"loss": 0.3001, |
|
"step": 316 |
|
}, |
|
{ |
|
"epoch": 0.4752623688155922, |
|
"grad_norm": 1.755982750732745, |
|
"learning_rate": 8.670213471534759e-06, |
|
"loss": 0.3552, |
|
"step": 317 |
|
}, |
|
{ |
|
"epoch": 0.4767616191904048, |
|
"grad_norm": 1.2975586714056746, |
|
"learning_rate": 8.662206801949694e-06, |
|
"loss": 0.2636, |
|
"step": 318 |
|
}, |
|
{ |
|
"epoch": 0.4782608695652174, |
|
"grad_norm": 1.479543914420989, |
|
"learning_rate": 8.65417982139062e-06, |
|
"loss": 0.2861, |
|
"step": 319 |
|
}, |
|
{ |
|
"epoch": 0.47976011994003, |
|
"grad_norm": 1.9072158128288146, |
|
"learning_rate": 8.646132574375994e-06, |
|
"loss": 0.3529, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.48125937031484256, |
|
"grad_norm": 1.6593484800640623, |
|
"learning_rate": 8.638065105536669e-06, |
|
"loss": 0.3585, |
|
"step": 321 |
|
}, |
|
{ |
|
"epoch": 0.4827586206896552, |
|
"grad_norm": 1.557791975939994, |
|
"learning_rate": 8.629977459615655e-06, |
|
"loss": 0.2912, |
|
"step": 322 |
|
}, |
|
{ |
|
"epoch": 0.48425787106446777, |
|
"grad_norm": 1.5749646653775289, |
|
"learning_rate": 8.621869681467865e-06, |
|
"loss": 0.3086, |
|
"step": 323 |
|
}, |
|
{ |
|
"epoch": 0.48575712143928035, |
|
"grad_norm": 1.3922529355839681, |
|
"learning_rate": 8.613741816059867e-06, |
|
"loss": 0.2526, |
|
"step": 324 |
|
}, |
|
{ |
|
"epoch": 0.487256371814093, |
|
"grad_norm": 1.6418433758391113, |
|
"learning_rate": 8.605593908469635e-06, |
|
"loss": 0.3063, |
|
"step": 325 |
|
}, |
|
{ |
|
"epoch": 0.48875562218890556, |
|
"grad_norm": 1.4903262726922777, |
|
"learning_rate": 8.597426003886295e-06, |
|
"loss": 0.2728, |
|
"step": 326 |
|
}, |
|
{ |
|
"epoch": 0.49025487256371814, |
|
"grad_norm": 1.540376386023962, |
|
"learning_rate": 8.58923814760989e-06, |
|
"loss": 0.3628, |
|
"step": 327 |
|
}, |
|
{ |
|
"epoch": 0.4917541229385307, |
|
"grad_norm": 1.6746744953148947, |
|
"learning_rate": 8.581030385051105e-06, |
|
"loss": 0.3481, |
|
"step": 328 |
|
}, |
|
{ |
|
"epoch": 0.49325337331334335, |
|
"grad_norm": 1.4618828442301055, |
|
"learning_rate": 8.572802761731031e-06, |
|
"loss": 0.3116, |
|
"step": 329 |
|
}, |
|
{ |
|
"epoch": 0.4947526236881559, |
|
"grad_norm": 1.61057521662318, |
|
"learning_rate": 8.564555323280913e-06, |
|
"loss": 0.3441, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.4962518740629685, |
|
"grad_norm": 1.5935367638144864, |
|
"learning_rate": 8.556288115441887e-06, |
|
"loss": 0.2965, |
|
"step": 331 |
|
}, |
|
{ |
|
"epoch": 0.49775112443778113, |
|
"grad_norm": 1.4319307113091793, |
|
"learning_rate": 8.548001184064733e-06, |
|
"loss": 0.3142, |
|
"step": 332 |
|
}, |
|
{ |
|
"epoch": 0.4992503748125937, |
|
"grad_norm": 1.445981529258563, |
|
"learning_rate": 8.539694575109626e-06, |
|
"loss": 0.2888, |
|
"step": 333 |
|
}, |
|
{ |
|
"epoch": 0.5007496251874063, |
|
"grad_norm": 1.602359221078496, |
|
"learning_rate": 8.531368334645865e-06, |
|
"loss": 0.293, |
|
"step": 334 |
|
}, |
|
{ |
|
"epoch": 0.5022488755622189, |
|
"grad_norm": 1.4353041375153546, |
|
"learning_rate": 8.523022508851634e-06, |
|
"loss": 0.3208, |
|
"step": 335 |
|
}, |
|
{ |
|
"epoch": 0.5037481259370314, |
|
"grad_norm": 1.5579714022068125, |
|
"learning_rate": 8.514657144013738e-06, |
|
"loss": 0.321, |
|
"step": 336 |
|
}, |
|
{ |
|
"epoch": 0.5052473763118441, |
|
"grad_norm": 1.5996183538119209, |
|
"learning_rate": 8.506272286527346e-06, |
|
"loss": 0.3876, |
|
"step": 337 |
|
}, |
|
{ |
|
"epoch": 0.5067466266866567, |
|
"grad_norm": 1.331308779665969, |
|
"learning_rate": 8.497867982895741e-06, |
|
"loss": 0.2726, |
|
"step": 338 |
|
}, |
|
{ |
|
"epoch": 0.5082458770614693, |
|
"grad_norm": 1.285540672848639, |
|
"learning_rate": 8.489444279730046e-06, |
|
"loss": 0.2651, |
|
"step": 339 |
|
}, |
|
{ |
|
"epoch": 0.5097451274362819, |
|
"grad_norm": 1.648005626003441, |
|
"learning_rate": 8.481001223748986e-06, |
|
"loss": 0.2765, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.5112443778110944, |
|
"grad_norm": 1.8337590942749775, |
|
"learning_rate": 8.47253886177861e-06, |
|
"loss": 0.3834, |
|
"step": 341 |
|
}, |
|
{ |
|
"epoch": 0.512743628185907, |
|
"grad_norm": 1.5746904364262109, |
|
"learning_rate": 8.464057240752046e-06, |
|
"loss": 0.3147, |
|
"step": 342 |
|
}, |
|
{ |
|
"epoch": 0.5142428785607196, |
|
"grad_norm": 1.5583080179143252, |
|
"learning_rate": 8.455556407709235e-06, |
|
"loss": 0.2502, |
|
"step": 343 |
|
}, |
|
{ |
|
"epoch": 0.5157421289355323, |
|
"grad_norm": 1.3784299657729973, |
|
"learning_rate": 8.447036409796663e-06, |
|
"loss": 0.3245, |
|
"step": 344 |
|
}, |
|
{ |
|
"epoch": 0.5172413793103449, |
|
"grad_norm": 1.421009503935526, |
|
"learning_rate": 8.438497294267117e-06, |
|
"loss": 0.3185, |
|
"step": 345 |
|
}, |
|
{ |
|
"epoch": 0.5187406296851574, |
|
"grad_norm": 1.734142097530356, |
|
"learning_rate": 8.429939108479403e-06, |
|
"loss": 0.4028, |
|
"step": 346 |
|
}, |
|
{ |
|
"epoch": 0.52023988005997, |
|
"grad_norm": 1.4283181015193642, |
|
"learning_rate": 8.421361899898095e-06, |
|
"loss": 0.3064, |
|
"step": 347 |
|
}, |
|
{ |
|
"epoch": 0.5217391304347826, |
|
"grad_norm": 1.48706713874812, |
|
"learning_rate": 8.412765716093273e-06, |
|
"loss": 0.2841, |
|
"step": 348 |
|
}, |
|
{ |
|
"epoch": 0.5232383808095952, |
|
"grad_norm": 1.5120205939898492, |
|
"learning_rate": 8.404150604740248e-06, |
|
"loss": 0.3197, |
|
"step": 349 |
|
}, |
|
{ |
|
"epoch": 0.5247376311844077, |
|
"grad_norm": 1.5731350530900636, |
|
"learning_rate": 8.395516613619315e-06, |
|
"loss": 0.305, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.5262368815592204, |
|
"grad_norm": 1.4591781180792678, |
|
"learning_rate": 8.386863790615472e-06, |
|
"loss": 0.3305, |
|
"step": 351 |
|
}, |
|
{ |
|
"epoch": 0.527736131934033, |
|
"grad_norm": 1.5988860912943819, |
|
"learning_rate": 8.378192183718158e-06, |
|
"loss": 0.3358, |
|
"step": 352 |
|
}, |
|
{ |
|
"epoch": 0.5292353823088456, |
|
"grad_norm": 1.690987090893082, |
|
"learning_rate": 8.369501841021e-06, |
|
"loss": 0.3412, |
|
"step": 353 |
|
}, |
|
{ |
|
"epoch": 0.5307346326836582, |
|
"grad_norm": 1.5892248038221097, |
|
"learning_rate": 8.360792810721522e-06, |
|
"loss": 0.2828, |
|
"step": 354 |
|
}, |
|
{ |
|
"epoch": 0.5322338830584707, |
|
"grad_norm": 1.5179989556837041, |
|
"learning_rate": 8.352065141120902e-06, |
|
"loss": 0.3209, |
|
"step": 355 |
|
}, |
|
{ |
|
"epoch": 0.5337331334332833, |
|
"grad_norm": 1.5600522653768132, |
|
"learning_rate": 8.343318880623688e-06, |
|
"loss": 0.2921, |
|
"step": 356 |
|
}, |
|
{ |
|
"epoch": 0.5352323838080959, |
|
"grad_norm": 1.5555612059413912, |
|
"learning_rate": 8.334554077737535e-06, |
|
"loss": 0.3502, |
|
"step": 357 |
|
}, |
|
{ |
|
"epoch": 0.5367316341829086, |
|
"grad_norm": 1.3576244747622102, |
|
"learning_rate": 8.325770781072939e-06, |
|
"loss": 0.2899, |
|
"step": 358 |
|
}, |
|
{ |
|
"epoch": 0.5382308845577212, |
|
"grad_norm": 1.255414514400548, |
|
"learning_rate": 8.316969039342963e-06, |
|
"loss": 0.301, |
|
"step": 359 |
|
}, |
|
{ |
|
"epoch": 0.5397301349325337, |
|
"grad_norm": 1.4403501035341664, |
|
"learning_rate": 8.30814890136297e-06, |
|
"loss": 0.2321, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.5412293853073463, |
|
"grad_norm": 2.0042523749431047, |
|
"learning_rate": 8.299310416050345e-06, |
|
"loss": 0.3766, |
|
"step": 361 |
|
}, |
|
{ |
|
"epoch": 0.5427286356821589, |
|
"grad_norm": 1.329044305552415, |
|
"learning_rate": 8.290453632424236e-06, |
|
"loss": 0.246, |
|
"step": 362 |
|
}, |
|
{ |
|
"epoch": 0.5442278860569715, |
|
"grad_norm": 1.8038806480964888, |
|
"learning_rate": 8.281578599605269e-06, |
|
"loss": 0.3285, |
|
"step": 363 |
|
}, |
|
{ |
|
"epoch": 0.545727136431784, |
|
"grad_norm": 1.6972593164495602, |
|
"learning_rate": 8.272685366815287e-06, |
|
"loss": 0.3985, |
|
"step": 364 |
|
}, |
|
{ |
|
"epoch": 0.5472263868065967, |
|
"grad_norm": 1.3418317537268136, |
|
"learning_rate": 8.26377398337707e-06, |
|
"loss": 0.3338, |
|
"step": 365 |
|
}, |
|
{ |
|
"epoch": 0.5487256371814093, |
|
"grad_norm": 1.5696789006175893, |
|
"learning_rate": 8.254844498714063e-06, |
|
"loss": 0.2931, |
|
"step": 366 |
|
}, |
|
{ |
|
"epoch": 0.5502248875562219, |
|
"grad_norm": 1.4863325632462137, |
|
"learning_rate": 8.2458969623501e-06, |
|
"loss": 0.3632, |
|
"step": 367 |
|
}, |
|
{ |
|
"epoch": 0.5517241379310345, |
|
"grad_norm": 1.4927122577519016, |
|
"learning_rate": 8.23693142390914e-06, |
|
"loss": 0.3526, |
|
"step": 368 |
|
}, |
|
{ |
|
"epoch": 0.553223388305847, |
|
"grad_norm": 1.4142612730492767, |
|
"learning_rate": 8.227947933114971e-06, |
|
"loss": 0.2575, |
|
"step": 369 |
|
}, |
|
{ |
|
"epoch": 0.5547226386806596, |
|
"grad_norm": 1.526469855561163, |
|
"learning_rate": 8.218946539790957e-06, |
|
"loss": 0.3258, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.5562218890554723, |
|
"grad_norm": 1.3984879591660238, |
|
"learning_rate": 8.209927293859746e-06, |
|
"loss": 0.303, |
|
"step": 371 |
|
}, |
|
{ |
|
"epoch": 0.5577211394302849, |
|
"grad_norm": 1.6745387934507217, |
|
"learning_rate": 8.200890245342999e-06, |
|
"loss": 0.3799, |
|
"step": 372 |
|
}, |
|
{ |
|
"epoch": 0.5592203898050975, |
|
"grad_norm": 1.56228468138885, |
|
"learning_rate": 8.191835444361113e-06, |
|
"loss": 0.3917, |
|
"step": 373 |
|
}, |
|
{ |
|
"epoch": 0.56071964017991, |
|
"grad_norm": 1.3202553723061632, |
|
"learning_rate": 8.182762941132944e-06, |
|
"loss": 0.2632, |
|
"step": 374 |
|
}, |
|
{ |
|
"epoch": 0.5622188905547226, |
|
"grad_norm": 1.4285299922962245, |
|
"learning_rate": 8.173672785975522e-06, |
|
"loss": 0.3207, |
|
"step": 375 |
|
}, |
|
{ |
|
"epoch": 0.5637181409295352, |
|
"grad_norm": 1.7706320948302563, |
|
"learning_rate": 8.16456502930378e-06, |
|
"loss": 0.411, |
|
"step": 376 |
|
}, |
|
{ |
|
"epoch": 0.5652173913043478, |
|
"grad_norm": 1.5555404146846652, |
|
"learning_rate": 8.155439721630265e-06, |
|
"loss": 0.3402, |
|
"step": 377 |
|
}, |
|
{ |
|
"epoch": 0.5667166416791605, |
|
"grad_norm": 1.280491820060774, |
|
"learning_rate": 8.146296913564872e-06, |
|
"loss": 0.2763, |
|
"step": 378 |
|
}, |
|
{ |
|
"epoch": 0.568215892053973, |
|
"grad_norm": 1.498816208536415, |
|
"learning_rate": 8.13713665581455e-06, |
|
"loss": 0.2841, |
|
"step": 379 |
|
}, |
|
{ |
|
"epoch": 0.5697151424287856, |
|
"grad_norm": 1.3741949197493073, |
|
"learning_rate": 8.127958999183027e-06, |
|
"loss": 0.3014, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.5712143928035982, |
|
"grad_norm": 1.2879534077401542, |
|
"learning_rate": 8.118763994570528e-06, |
|
"loss": 0.3213, |
|
"step": 381 |
|
}, |
|
{ |
|
"epoch": 0.5727136431784108, |
|
"grad_norm": 1.7415653668034985, |
|
"learning_rate": 8.109551692973487e-06, |
|
"loss": 0.2967, |
|
"step": 382 |
|
}, |
|
{ |
|
"epoch": 0.5742128935532234, |
|
"grad_norm": 1.3504330250600425, |
|
"learning_rate": 8.100322145484275e-06, |
|
"loss": 0.3172, |
|
"step": 383 |
|
}, |
|
{ |
|
"epoch": 0.5757121439280359, |
|
"grad_norm": 1.4398060360351141, |
|
"learning_rate": 8.091075403290905e-06, |
|
"loss": 0.3221, |
|
"step": 384 |
|
}, |
|
{ |
|
"epoch": 0.5772113943028486, |
|
"grad_norm": 1.4685973288174197, |
|
"learning_rate": 8.081811517676759e-06, |
|
"loss": 0.3036, |
|
"step": 385 |
|
}, |
|
{ |
|
"epoch": 0.5787106446776612, |
|
"grad_norm": 1.3757946392665268, |
|
"learning_rate": 8.072530540020294e-06, |
|
"loss": 0.3174, |
|
"step": 386 |
|
}, |
|
{ |
|
"epoch": 0.5802098950524738, |
|
"grad_norm": 1.3207011248499667, |
|
"learning_rate": 8.063232521794762e-06, |
|
"loss": 0.2801, |
|
"step": 387 |
|
}, |
|
{ |
|
"epoch": 0.5817091454272864, |
|
"grad_norm": 1.1766688774818337, |
|
"learning_rate": 8.053917514567927e-06, |
|
"loss": 0.241, |
|
"step": 388 |
|
}, |
|
{ |
|
"epoch": 0.5832083958020989, |
|
"grad_norm": 1.3126653214263113, |
|
"learning_rate": 8.04458557000177e-06, |
|
"loss": 0.2809, |
|
"step": 389 |
|
}, |
|
{ |
|
"epoch": 0.5847076461769115, |
|
"grad_norm": 1.6207249995016972, |
|
"learning_rate": 8.035236739852214e-06, |
|
"loss": 0.3034, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.5862068965517241, |
|
"grad_norm": 1.8716872118030787, |
|
"learning_rate": 8.025871075968828e-06, |
|
"loss": 0.3139, |
|
"step": 391 |
|
}, |
|
{ |
|
"epoch": 0.5877061469265368, |
|
"grad_norm": 1.3766598114110888, |
|
"learning_rate": 8.016488630294539e-06, |
|
"loss": 0.2851, |
|
"step": 392 |
|
}, |
|
{ |
|
"epoch": 0.5892053973013494, |
|
"grad_norm": 1.437525290716419, |
|
"learning_rate": 8.007089454865358e-06, |
|
"loss": 0.3209, |
|
"step": 393 |
|
}, |
|
{ |
|
"epoch": 0.5907046476761619, |
|
"grad_norm": 1.45491223433416, |
|
"learning_rate": 7.997673601810071e-06, |
|
"loss": 0.267, |
|
"step": 394 |
|
}, |
|
{ |
|
"epoch": 0.5922038980509745, |
|
"grad_norm": 1.144356936174265, |
|
"learning_rate": 7.988241123349965e-06, |
|
"loss": 0.2979, |
|
"step": 395 |
|
}, |
|
{ |
|
"epoch": 0.5937031484257871, |
|
"grad_norm": 1.6160780957452199, |
|
"learning_rate": 7.97879207179853e-06, |
|
"loss": 0.2878, |
|
"step": 396 |
|
}, |
|
{ |
|
"epoch": 0.5952023988005997, |
|
"grad_norm": 1.4698046543076704, |
|
"learning_rate": 7.969326499561173e-06, |
|
"loss": 0.2815, |
|
"step": 397 |
|
}, |
|
{ |
|
"epoch": 0.5967016491754122, |
|
"grad_norm": 1.5580121554949924, |
|
"learning_rate": 7.95984445913493e-06, |
|
"loss": 0.3351, |
|
"step": 398 |
|
}, |
|
{ |
|
"epoch": 0.5982008995502249, |
|
"grad_norm": 1.2610592076882203, |
|
"learning_rate": 7.950346003108167e-06, |
|
"loss": 0.258, |
|
"step": 399 |
|
}, |
|
{ |
|
"epoch": 0.5997001499250375, |
|
"grad_norm": 1.280154159142727, |
|
"learning_rate": 7.940831184160294e-06, |
|
"loss": 0.2849, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.5997001499250375, |
|
"eval_loss": 0.3021397590637207, |
|
"eval_runtime": 9.5189, |
|
"eval_samples_per_second": 5.673, |
|
"eval_steps_per_second": 1.471, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.6011994002998501, |
|
"grad_norm": 1.3919665388781326, |
|
"learning_rate": 7.93130005506147e-06, |
|
"loss": 0.3171, |
|
"step": 401 |
|
}, |
|
{ |
|
"epoch": 0.6026986506746627, |
|
"grad_norm": 1.217918218721182, |
|
"learning_rate": 7.921752668672316e-06, |
|
"loss": 0.2821, |
|
"step": 402 |
|
}, |
|
{ |
|
"epoch": 0.6041979010494752, |
|
"grad_norm": 1.4425838660870236, |
|
"learning_rate": 7.912189077943613e-06, |
|
"loss": 0.2888, |
|
"step": 403 |
|
}, |
|
{ |
|
"epoch": 0.6056971514242878, |
|
"grad_norm": 1.4996475538548084, |
|
"learning_rate": 7.902609335916015e-06, |
|
"loss": 0.2627, |
|
"step": 404 |
|
}, |
|
{ |
|
"epoch": 0.6071964017991005, |
|
"grad_norm": 1.4619821265759678, |
|
"learning_rate": 7.893013495719752e-06, |
|
"loss": 0.302, |
|
"step": 405 |
|
}, |
|
{ |
|
"epoch": 0.6086956521739131, |
|
"grad_norm": 1.529852377257847, |
|
"learning_rate": 7.883401610574338e-06, |
|
"loss": 0.3041, |
|
"step": 406 |
|
}, |
|
{ |
|
"epoch": 0.6101949025487257, |
|
"grad_norm": 1.6245528888965435, |
|
"learning_rate": 7.873773733788268e-06, |
|
"loss": 0.3132, |
|
"step": 407 |
|
}, |
|
{ |
|
"epoch": 0.6116941529235382, |
|
"grad_norm": 1.592388100708721, |
|
"learning_rate": 7.864129918758738e-06, |
|
"loss": 0.3218, |
|
"step": 408 |
|
}, |
|
{ |
|
"epoch": 0.6131934032983508, |
|
"grad_norm": 1.2782270070716457, |
|
"learning_rate": 7.854470218971333e-06, |
|
"loss": 0.3132, |
|
"step": 409 |
|
}, |
|
{ |
|
"epoch": 0.6146926536731634, |
|
"grad_norm": 1.4920393997659043, |
|
"learning_rate": 7.844794687999737e-06, |
|
"loss": 0.3011, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.616191904047976, |
|
"grad_norm": 1.7736914495043172, |
|
"learning_rate": 7.835103379505433e-06, |
|
"loss": 0.3163, |
|
"step": 411 |
|
}, |
|
{ |
|
"epoch": 0.6176911544227887, |
|
"grad_norm": 1.7683774569170425, |
|
"learning_rate": 7.825396347237413e-06, |
|
"loss": 0.3077, |
|
"step": 412 |
|
}, |
|
{ |
|
"epoch": 0.6191904047976012, |
|
"grad_norm": 1.4797183745953193, |
|
"learning_rate": 7.815673645031871e-06, |
|
"loss": 0.3246, |
|
"step": 413 |
|
}, |
|
{ |
|
"epoch": 0.6206896551724138, |
|
"grad_norm": 1.4362048583158942, |
|
"learning_rate": 7.805935326811913e-06, |
|
"loss": 0.3035, |
|
"step": 414 |
|
}, |
|
{ |
|
"epoch": 0.6221889055472264, |
|
"grad_norm": 1.5423426053687976, |
|
"learning_rate": 7.796181446587244e-06, |
|
"loss": 0.2674, |
|
"step": 415 |
|
}, |
|
{ |
|
"epoch": 0.623688155922039, |
|
"grad_norm": 1.4145697008017475, |
|
"learning_rate": 7.786412058453886e-06, |
|
"loss": 0.2851, |
|
"step": 416 |
|
}, |
|
{ |
|
"epoch": 0.6251874062968515, |
|
"grad_norm": 1.4263423096837133, |
|
"learning_rate": 7.776627216593863e-06, |
|
"loss": 0.2572, |
|
"step": 417 |
|
}, |
|
{ |
|
"epoch": 0.6266866566716641, |
|
"grad_norm": 1.586549287920147, |
|
"learning_rate": 7.766826975274916e-06, |
|
"loss": 0.2712, |
|
"step": 418 |
|
}, |
|
{ |
|
"epoch": 0.6281859070464768, |
|
"grad_norm": 1.5131788482439936, |
|
"learning_rate": 7.75701138885018e-06, |
|
"loss": 0.2992, |
|
"step": 419 |
|
}, |
|
{ |
|
"epoch": 0.6296851574212894, |
|
"grad_norm": 1.5020750976431931, |
|
"learning_rate": 7.747180511757908e-06, |
|
"loss": 0.3074, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.631184407796102, |
|
"grad_norm": 1.3538347735977267, |
|
"learning_rate": 7.737334398521149e-06, |
|
"loss": 0.263, |
|
"step": 421 |
|
}, |
|
{ |
|
"epoch": 0.6326836581709145, |
|
"grad_norm": 1.514413986867279, |
|
"learning_rate": 7.727473103747456e-06, |
|
"loss": 0.3495, |
|
"step": 422 |
|
}, |
|
{ |
|
"epoch": 0.6341829085457271, |
|
"grad_norm": 1.4612722169079506, |
|
"learning_rate": 7.717596682128578e-06, |
|
"loss": 0.2521, |
|
"step": 423 |
|
}, |
|
{ |
|
"epoch": 0.6356821589205397, |
|
"grad_norm": 1.4649249453385167, |
|
"learning_rate": 7.707705188440165e-06, |
|
"loss": 0.2902, |
|
"step": 424 |
|
}, |
|
{ |
|
"epoch": 0.6371814092953523, |
|
"grad_norm": 1.5852062661832558, |
|
"learning_rate": 7.697798677541448e-06, |
|
"loss": 0.3088, |
|
"step": 425 |
|
}, |
|
{ |
|
"epoch": 0.638680659670165, |
|
"grad_norm": 1.3241949061805072, |
|
"learning_rate": 7.687877204374957e-06, |
|
"loss": 0.2478, |
|
"step": 426 |
|
}, |
|
{ |
|
"epoch": 0.6401799100449775, |
|
"grad_norm": 1.4030774313449836, |
|
"learning_rate": 7.677940823966196e-06, |
|
"loss": 0.2558, |
|
"step": 427 |
|
}, |
|
{ |
|
"epoch": 0.6416791604197901, |
|
"grad_norm": 1.402885148784031, |
|
"learning_rate": 7.667989591423349e-06, |
|
"loss": 0.3037, |
|
"step": 428 |
|
}, |
|
{ |
|
"epoch": 0.6431784107946027, |
|
"grad_norm": 1.277487083138602, |
|
"learning_rate": 7.658023561936966e-06, |
|
"loss": 0.2346, |
|
"step": 429 |
|
}, |
|
{ |
|
"epoch": 0.6446776611694153, |
|
"grad_norm": 1.3581505687018312, |
|
"learning_rate": 7.648042790779677e-06, |
|
"loss": 0.293, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.6461769115442278, |
|
"grad_norm": 1.5221074822476175, |
|
"learning_rate": 7.638047333305853e-06, |
|
"loss": 0.3228, |
|
"step": 431 |
|
}, |
|
{ |
|
"epoch": 0.6476761619190404, |
|
"grad_norm": 1.4895620088229964, |
|
"learning_rate": 7.628037244951328e-06, |
|
"loss": 0.3148, |
|
"step": 432 |
|
}, |
|
{ |
|
"epoch": 0.6491754122938531, |
|
"grad_norm": 1.514290497632156, |
|
"learning_rate": 7.618012581233076e-06, |
|
"loss": 0.3167, |
|
"step": 433 |
|
}, |
|
{ |
|
"epoch": 0.6506746626686657, |
|
"grad_norm": 1.3716765759657792, |
|
"learning_rate": 7.607973397748909e-06, |
|
"loss": 0.3057, |
|
"step": 434 |
|
}, |
|
{ |
|
"epoch": 0.6521739130434783, |
|
"grad_norm": 1.4837843327419138, |
|
"learning_rate": 7.597919750177168e-06, |
|
"loss": 0.2974, |
|
"step": 435 |
|
}, |
|
{ |
|
"epoch": 0.6536731634182908, |
|
"grad_norm": 1.5261849742576514, |
|
"learning_rate": 7.587851694276412e-06, |
|
"loss": 0.2834, |
|
"step": 436 |
|
}, |
|
{ |
|
"epoch": 0.6551724137931034, |
|
"grad_norm": 1.47832766014606, |
|
"learning_rate": 7.57776928588511e-06, |
|
"loss": 0.2714, |
|
"step": 437 |
|
}, |
|
{ |
|
"epoch": 0.656671664167916, |
|
"grad_norm": 1.2933753299940371, |
|
"learning_rate": 7.56767258092133e-06, |
|
"loss": 0.3007, |
|
"step": 438 |
|
}, |
|
{ |
|
"epoch": 0.6581709145427287, |
|
"grad_norm": 1.5864089772861663, |
|
"learning_rate": 7.557561635382433e-06, |
|
"loss": 0.3543, |
|
"step": 439 |
|
}, |
|
{ |
|
"epoch": 0.6596701649175413, |
|
"grad_norm": 1.2596054376286927, |
|
"learning_rate": 7.54743650534476e-06, |
|
"loss": 0.2657, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.6611694152923538, |
|
"grad_norm": 1.4915827024861525, |
|
"learning_rate": 7.537297246963316e-06, |
|
"loss": 0.3082, |
|
"step": 441 |
|
}, |
|
{ |
|
"epoch": 0.6626686656671664, |
|
"grad_norm": 1.447635534551508, |
|
"learning_rate": 7.5271439164714695e-06, |
|
"loss": 0.3202, |
|
"step": 442 |
|
}, |
|
{ |
|
"epoch": 0.664167916041979, |
|
"grad_norm": 1.206525246287725, |
|
"learning_rate": 7.5169765701806295e-06, |
|
"loss": 0.2689, |
|
"step": 443 |
|
}, |
|
{ |
|
"epoch": 0.6656671664167916, |
|
"grad_norm": 1.521284178006273, |
|
"learning_rate": 7.506795264479941e-06, |
|
"loss": 0.2897, |
|
"step": 444 |
|
}, |
|
{ |
|
"epoch": 0.6671664167916042, |
|
"grad_norm": 1.4116064152432382, |
|
"learning_rate": 7.4966000558359675e-06, |
|
"loss": 0.2929, |
|
"step": 445 |
|
}, |
|
{ |
|
"epoch": 0.6686656671664168, |
|
"grad_norm": 1.6492116988043684, |
|
"learning_rate": 7.486391000792379e-06, |
|
"loss": 0.3485, |
|
"step": 446 |
|
}, |
|
{ |
|
"epoch": 0.6701649175412294, |
|
"grad_norm": 1.7455678286882141, |
|
"learning_rate": 7.476168155969643e-06, |
|
"loss": 0.4197, |
|
"step": 447 |
|
}, |
|
{ |
|
"epoch": 0.671664167916042, |
|
"grad_norm": 1.5935825968847936, |
|
"learning_rate": 7.465931578064703e-06, |
|
"loss": 0.2765, |
|
"step": 448 |
|
}, |
|
{ |
|
"epoch": 0.6731634182908546, |
|
"grad_norm": 1.3190269078682864, |
|
"learning_rate": 7.455681323850669e-06, |
|
"loss": 0.2788, |
|
"step": 449 |
|
}, |
|
{ |
|
"epoch": 0.6746626686656672, |
|
"grad_norm": 1.3649129605274712, |
|
"learning_rate": 7.4454174501765e-06, |
|
"loss": 0.3065, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.6761619190404797, |
|
"grad_norm": 1.308593989007283, |
|
"learning_rate": 7.4351400139666894e-06, |
|
"loss": 0.2694, |
|
"step": 451 |
|
}, |
|
{ |
|
"epoch": 0.6776611694152923, |
|
"grad_norm": 1.2894750680147766, |
|
"learning_rate": 7.424849072220953e-06, |
|
"loss": 0.3125, |
|
"step": 452 |
|
}, |
|
{ |
|
"epoch": 0.679160419790105, |
|
"grad_norm": 1.4847915599618102, |
|
"learning_rate": 7.414544682013907e-06, |
|
"loss": 0.301, |
|
"step": 453 |
|
}, |
|
{ |
|
"epoch": 0.6806596701649176, |
|
"grad_norm": 1.5232019125166283, |
|
"learning_rate": 7.404226900494753e-06, |
|
"loss": 0.3267, |
|
"step": 454 |
|
}, |
|
{ |
|
"epoch": 0.6821589205397302, |
|
"grad_norm": 1.2345785992963527, |
|
"learning_rate": 7.3938957848869684e-06, |
|
"loss": 0.2745, |
|
"step": 455 |
|
}, |
|
{ |
|
"epoch": 0.6836581709145427, |
|
"grad_norm": 1.635858249949889, |
|
"learning_rate": 7.3835513924879755e-06, |
|
"loss": 0.3125, |
|
"step": 456 |
|
}, |
|
{ |
|
"epoch": 0.6851574212893553, |
|
"grad_norm": 1.149820377473072, |
|
"learning_rate": 7.373193780668835e-06, |
|
"loss": 0.2396, |
|
"step": 457 |
|
}, |
|
{ |
|
"epoch": 0.6866566716641679, |
|
"grad_norm": 1.3222898367566278, |
|
"learning_rate": 7.36282300687392e-06, |
|
"loss": 0.2946, |
|
"step": 458 |
|
}, |
|
{ |
|
"epoch": 0.6881559220389805, |
|
"grad_norm": 1.7971708473069352, |
|
"learning_rate": 7.35243912862061e-06, |
|
"loss": 0.3595, |
|
"step": 459 |
|
}, |
|
{ |
|
"epoch": 0.6896551724137931, |
|
"grad_norm": 1.5140137944873706, |
|
"learning_rate": 7.342042203498952e-06, |
|
"loss": 0.3277, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.6911544227886057, |
|
"grad_norm": 1.695383595122999, |
|
"learning_rate": 7.33163228917136e-06, |
|
"loss": 0.3617, |
|
"step": 461 |
|
}, |
|
{ |
|
"epoch": 0.6926536731634183, |
|
"grad_norm": 1.296170215239378, |
|
"learning_rate": 7.321209443372284e-06, |
|
"loss": 0.2577, |
|
"step": 462 |
|
}, |
|
{ |
|
"epoch": 0.6941529235382309, |
|
"grad_norm": 1.5669072802310569, |
|
"learning_rate": 7.310773723907895e-06, |
|
"loss": 0.3807, |
|
"step": 463 |
|
}, |
|
{ |
|
"epoch": 0.6956521739130435, |
|
"grad_norm": 1.3689687819823577, |
|
"learning_rate": 7.300325188655762e-06, |
|
"loss": 0.2598, |
|
"step": 464 |
|
}, |
|
{ |
|
"epoch": 0.697151424287856, |
|
"grad_norm": 1.6055006193320565, |
|
"learning_rate": 7.289863895564531e-06, |
|
"loss": 0.3482, |
|
"step": 465 |
|
}, |
|
{ |
|
"epoch": 0.6986506746626686, |
|
"grad_norm": 1.4514949398414718, |
|
"learning_rate": 7.279389902653606e-06, |
|
"loss": 0.2931, |
|
"step": 466 |
|
}, |
|
{ |
|
"epoch": 0.7001499250374813, |
|
"grad_norm": 1.7002732718419034, |
|
"learning_rate": 7.268903268012823e-06, |
|
"loss": 0.3401, |
|
"step": 467 |
|
}, |
|
{ |
|
"epoch": 0.7016491754122939, |
|
"grad_norm": 1.1984173265549762, |
|
"learning_rate": 7.258404049802135e-06, |
|
"loss": 0.2287, |
|
"step": 468 |
|
}, |
|
{ |
|
"epoch": 0.7031484257871065, |
|
"grad_norm": 1.140211799491912, |
|
"learning_rate": 7.247892306251276e-06, |
|
"loss": 0.226, |
|
"step": 469 |
|
}, |
|
{ |
|
"epoch": 0.704647676161919, |
|
"grad_norm": 1.3027451409213062, |
|
"learning_rate": 7.237368095659459e-06, |
|
"loss": 0.3169, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.7061469265367316, |
|
"grad_norm": 1.6413347599296277, |
|
"learning_rate": 7.226831476395028e-06, |
|
"loss": 0.3011, |
|
"step": 471 |
|
}, |
|
{ |
|
"epoch": 0.7076461769115442, |
|
"grad_norm": 1.6722238750016507, |
|
"learning_rate": 7.216282506895155e-06, |
|
"loss": 0.2884, |
|
"step": 472 |
|
}, |
|
{ |
|
"epoch": 0.7091454272863568, |
|
"grad_norm": 1.3820652066622765, |
|
"learning_rate": 7.2057212456655055e-06, |
|
"loss": 0.3051, |
|
"step": 473 |
|
}, |
|
{ |
|
"epoch": 0.7106446776611695, |
|
"grad_norm": 1.4627147047856914, |
|
"learning_rate": 7.195147751279915e-06, |
|
"loss": 0.2925, |
|
"step": 474 |
|
}, |
|
{ |
|
"epoch": 0.712143928035982, |
|
"grad_norm": 1.5615572592798332, |
|
"learning_rate": 7.184562082380069e-06, |
|
"loss": 0.3061, |
|
"step": 475 |
|
}, |
|
{ |
|
"epoch": 0.7136431784107946, |
|
"grad_norm": 1.7127142065683358, |
|
"learning_rate": 7.173964297675168e-06, |
|
"loss": 0.344, |
|
"step": 476 |
|
}, |
|
{ |
|
"epoch": 0.7151424287856072, |
|
"grad_norm": 1.5486907588716836, |
|
"learning_rate": 7.163354455941614e-06, |
|
"loss": 0.3265, |
|
"step": 477 |
|
}, |
|
{ |
|
"epoch": 0.7166416791604198, |
|
"grad_norm": 1.4030585005720235, |
|
"learning_rate": 7.152732616022675e-06, |
|
"loss": 0.2665, |
|
"step": 478 |
|
}, |
|
{ |
|
"epoch": 0.7181409295352323, |
|
"grad_norm": 1.3322057839222532, |
|
"learning_rate": 7.142098836828162e-06, |
|
"loss": 0.3117, |
|
"step": 479 |
|
}, |
|
{ |
|
"epoch": 0.719640179910045, |
|
"grad_norm": 1.3975504878625638, |
|
"learning_rate": 7.131453177334103e-06, |
|
"loss": 0.3069, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.7211394302848576, |
|
"grad_norm": 1.0945088603081936, |
|
"learning_rate": 7.120795696582419e-06, |
|
"loss": 0.2516, |
|
"step": 481 |
|
}, |
|
{ |
|
"epoch": 0.7226386806596702, |
|
"grad_norm": 1.4290632578017513, |
|
"learning_rate": 7.1101264536805885e-06, |
|
"loss": 0.2829, |
|
"step": 482 |
|
}, |
|
{ |
|
"epoch": 0.7241379310344828, |
|
"grad_norm": 1.4243890988280488, |
|
"learning_rate": 7.099445507801324e-06, |
|
"loss": 0.281, |
|
"step": 483 |
|
}, |
|
{ |
|
"epoch": 0.7256371814092953, |
|
"grad_norm": 1.4278402465378144, |
|
"learning_rate": 7.088752918182247e-06, |
|
"loss": 0.2676, |
|
"step": 484 |
|
}, |
|
{ |
|
"epoch": 0.7271364317841079, |
|
"grad_norm": 1.5069524803063328, |
|
"learning_rate": 7.078048744125553e-06, |
|
"loss": 0.3263, |
|
"step": 485 |
|
}, |
|
{ |
|
"epoch": 0.7286356821589205, |
|
"grad_norm": 1.1209588755097866, |
|
"learning_rate": 7.067333044997689e-06, |
|
"loss": 0.2555, |
|
"step": 486 |
|
}, |
|
{ |
|
"epoch": 0.7301349325337332, |
|
"grad_norm": 1.4233085489601176, |
|
"learning_rate": 7.0566058802290196e-06, |
|
"loss": 0.3053, |
|
"step": 487 |
|
}, |
|
{ |
|
"epoch": 0.7316341829085458, |
|
"grad_norm": 1.3003998702719686, |
|
"learning_rate": 7.045867309313499e-06, |
|
"loss": 0.3077, |
|
"step": 488 |
|
}, |
|
{ |
|
"epoch": 0.7331334332833583, |
|
"grad_norm": 1.371637304171436, |
|
"learning_rate": 7.035117391808341e-06, |
|
"loss": 0.2882, |
|
"step": 489 |
|
}, |
|
{ |
|
"epoch": 0.7346326836581709, |
|
"grad_norm": 1.3700945109307232, |
|
"learning_rate": 7.024356187333692e-06, |
|
"loss": 0.3535, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.7361319340329835, |
|
"grad_norm": 1.372970950040628, |
|
"learning_rate": 7.01358375557229e-06, |
|
"loss": 0.3184, |
|
"step": 491 |
|
}, |
|
{ |
|
"epoch": 0.7376311844077961, |
|
"grad_norm": 1.3825121720318863, |
|
"learning_rate": 7.0028001562691475e-06, |
|
"loss": 0.2722, |
|
"step": 492 |
|
}, |
|
{ |
|
"epoch": 0.7391304347826086, |
|
"grad_norm": 1.3013775517397221, |
|
"learning_rate": 6.9920054492312086e-06, |
|
"loss": 0.3287, |
|
"step": 493 |
|
}, |
|
{ |
|
"epoch": 0.7406296851574213, |
|
"grad_norm": 1.4737084227604176, |
|
"learning_rate": 6.981199694327024e-06, |
|
"loss": 0.3094, |
|
"step": 494 |
|
}, |
|
{ |
|
"epoch": 0.7421289355322339, |
|
"grad_norm": 1.3101411746781326, |
|
"learning_rate": 6.97038295148642e-06, |
|
"loss": 0.2839, |
|
"step": 495 |
|
}, |
|
{ |
|
"epoch": 0.7436281859070465, |
|
"grad_norm": 1.4892593890064303, |
|
"learning_rate": 6.959555280700162e-06, |
|
"loss": 0.2964, |
|
"step": 496 |
|
}, |
|
{ |
|
"epoch": 0.7451274362818591, |
|
"grad_norm": 1.7096190584184843, |
|
"learning_rate": 6.948716742019616e-06, |
|
"loss": 0.294, |
|
"step": 497 |
|
}, |
|
{ |
|
"epoch": 0.7466266866566716, |
|
"grad_norm": 1.3979921629169787, |
|
"learning_rate": 6.937867395556428e-06, |
|
"loss": 0.3193, |
|
"step": 498 |
|
}, |
|
{ |
|
"epoch": 0.7481259370314842, |
|
"grad_norm": 1.44777450491065, |
|
"learning_rate": 6.927007301482187e-06, |
|
"loss": 0.2905, |
|
"step": 499 |
|
}, |
|
{ |
|
"epoch": 0.7496251874062968, |
|
"grad_norm": 1.271551321930749, |
|
"learning_rate": 6.916136520028087e-06, |
|
"loss": 0.297, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.7511244377811095, |
|
"grad_norm": 1.5139383519838707, |
|
"learning_rate": 6.905255111484592e-06, |
|
"loss": 0.2959, |
|
"step": 501 |
|
}, |
|
{ |
|
"epoch": 0.7526236881559221, |
|
"grad_norm": 1.6128675870437148, |
|
"learning_rate": 6.894363136201114e-06, |
|
"loss": 0.3524, |
|
"step": 502 |
|
}, |
|
{ |
|
"epoch": 0.7541229385307346, |
|
"grad_norm": 1.4424070702121157, |
|
"learning_rate": 6.88346065458566e-06, |
|
"loss": 0.3102, |
|
"step": 503 |
|
}, |
|
{ |
|
"epoch": 0.7556221889055472, |
|
"grad_norm": 1.3671962301245493, |
|
"learning_rate": 6.8725477271045085e-06, |
|
"loss": 0.3026, |
|
"step": 504 |
|
}, |
|
{ |
|
"epoch": 0.7571214392803598, |
|
"grad_norm": 1.442281151937481, |
|
"learning_rate": 6.861624414281875e-06, |
|
"loss": 0.2777, |
|
"step": 505 |
|
}, |
|
{ |
|
"epoch": 0.7586206896551724, |
|
"grad_norm": 1.3749554454658155, |
|
"learning_rate": 6.850690776699574e-06, |
|
"loss": 0.2678, |
|
"step": 506 |
|
}, |
|
{ |
|
"epoch": 0.760119940029985, |
|
"grad_norm": 1.4900828297443907, |
|
"learning_rate": 6.8397468749966735e-06, |
|
"loss": 0.3366, |
|
"step": 507 |
|
}, |
|
{ |
|
"epoch": 0.7616191904047976, |
|
"grad_norm": 1.4659412454705263, |
|
"learning_rate": 6.8287927698691745e-06, |
|
"loss": 0.318, |
|
"step": 508 |
|
}, |
|
{ |
|
"epoch": 0.7631184407796102, |
|
"grad_norm": 1.3001117219908083, |
|
"learning_rate": 6.8178285220696686e-06, |
|
"loss": 0.315, |
|
"step": 509 |
|
}, |
|
{ |
|
"epoch": 0.7646176911544228, |
|
"grad_norm": 1.2959059727140845, |
|
"learning_rate": 6.806854192406995e-06, |
|
"loss": 0.2421, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.7661169415292354, |
|
"grad_norm": 1.5826201560626563, |
|
"learning_rate": 6.795869841745912e-06, |
|
"loss": 0.3146, |
|
"step": 511 |
|
}, |
|
{ |
|
"epoch": 0.767616191904048, |
|
"grad_norm": 1.4578325207942953, |
|
"learning_rate": 6.784875531006751e-06, |
|
"loss": 0.3353, |
|
"step": 512 |
|
}, |
|
{ |
|
"epoch": 0.7691154422788605, |
|
"grad_norm": 1.161682568121232, |
|
"learning_rate": 6.7738713211650885e-06, |
|
"loss": 0.2926, |
|
"step": 513 |
|
}, |
|
{ |
|
"epoch": 0.7706146926536732, |
|
"grad_norm": 1.5305535208266787, |
|
"learning_rate": 6.762857273251396e-06, |
|
"loss": 0.3128, |
|
"step": 514 |
|
}, |
|
{ |
|
"epoch": 0.7721139430284858, |
|
"grad_norm": 1.7063210636875437, |
|
"learning_rate": 6.751833448350713e-06, |
|
"loss": 0.2993, |
|
"step": 515 |
|
}, |
|
{ |
|
"epoch": 0.7736131934032984, |
|
"grad_norm": 1.6456135292610181, |
|
"learning_rate": 6.740799907602302e-06, |
|
"loss": 0.3019, |
|
"step": 516 |
|
}, |
|
{ |
|
"epoch": 0.775112443778111, |
|
"grad_norm": 1.2426157748268456, |
|
"learning_rate": 6.729756712199309e-06, |
|
"loss": 0.2824, |
|
"step": 517 |
|
}, |
|
{ |
|
"epoch": 0.7766116941529235, |
|
"grad_norm": 1.4837539502622794, |
|
"learning_rate": 6.718703923388427e-06, |
|
"loss": 0.2434, |
|
"step": 518 |
|
}, |
|
{ |
|
"epoch": 0.7781109445277361, |
|
"grad_norm": 1.3434614613743725, |
|
"learning_rate": 6.707641602469554e-06, |
|
"loss": 0.2599, |
|
"step": 519 |
|
}, |
|
{ |
|
"epoch": 0.7796101949025487, |
|
"grad_norm": 1.5499562443429653, |
|
"learning_rate": 6.696569810795455e-06, |
|
"loss": 0.3173, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.7811094452773614, |
|
"grad_norm": 1.3861012563538573, |
|
"learning_rate": 6.685488609771422e-06, |
|
"loss": 0.2949, |
|
"step": 521 |
|
}, |
|
{ |
|
"epoch": 0.782608695652174, |
|
"grad_norm": 1.6224606096357699, |
|
"learning_rate": 6.674398060854931e-06, |
|
"loss": 0.3209, |
|
"step": 522 |
|
}, |
|
{ |
|
"epoch": 0.7841079460269865, |
|
"grad_norm": 1.3855377694609932, |
|
"learning_rate": 6.6632982255553004e-06, |
|
"loss": 0.2779, |
|
"step": 523 |
|
}, |
|
{ |
|
"epoch": 0.7856071964017991, |
|
"grad_norm": 1.6121761396352565, |
|
"learning_rate": 6.652189165433356e-06, |
|
"loss": 0.3242, |
|
"step": 524 |
|
}, |
|
{ |
|
"epoch": 0.7871064467766117, |
|
"grad_norm": 1.2201563254362011, |
|
"learning_rate": 6.64107094210108e-06, |
|
"loss": 0.2574, |
|
"step": 525 |
|
}, |
|
{ |
|
"epoch": 0.7886056971514243, |
|
"grad_norm": 1.5837106620290757, |
|
"learning_rate": 6.62994361722128e-06, |
|
"loss": 0.3597, |
|
"step": 526 |
|
}, |
|
{ |
|
"epoch": 0.7901049475262368, |
|
"grad_norm": 1.3080007300343965, |
|
"learning_rate": 6.618807252507238e-06, |
|
"loss": 0.2728, |
|
"step": 527 |
|
}, |
|
{ |
|
"epoch": 0.7916041979010495, |
|
"grad_norm": 1.7207174990749512, |
|
"learning_rate": 6.6076619097223735e-06, |
|
"loss": 0.3896, |
|
"step": 528 |
|
}, |
|
{ |
|
"epoch": 0.7931034482758621, |
|
"grad_norm": 1.8410214396008497, |
|
"learning_rate": 6.5965076506799e-06, |
|
"loss": 0.332, |
|
"step": 529 |
|
}, |
|
{ |
|
"epoch": 0.7946026986506747, |
|
"grad_norm": 1.5709803889468719, |
|
"learning_rate": 6.5853445372424805e-06, |
|
"loss": 0.3002, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.7961019490254873, |
|
"grad_norm": 1.6453166169425513, |
|
"learning_rate": 6.574172631321885e-06, |
|
"loss": 0.3038, |
|
"step": 531 |
|
}, |
|
{ |
|
"epoch": 0.7976011994002998, |
|
"grad_norm": 1.6454944309779433, |
|
"learning_rate": 6.562991994878649e-06, |
|
"loss": 0.3268, |
|
"step": 532 |
|
}, |
|
{ |
|
"epoch": 0.7991004497751124, |
|
"grad_norm": 1.568436292974634, |
|
"learning_rate": 6.551802689921726e-06, |
|
"loss": 0.3294, |
|
"step": 533 |
|
}, |
|
{ |
|
"epoch": 0.800599700149925, |
|
"grad_norm": 1.228856484817089, |
|
"learning_rate": 6.5406047785081485e-06, |
|
"loss": 0.2951, |
|
"step": 534 |
|
}, |
|
{ |
|
"epoch": 0.8020989505247377, |
|
"grad_norm": 1.150307761731132, |
|
"learning_rate": 6.529398322742677e-06, |
|
"loss": 0.2049, |
|
"step": 535 |
|
}, |
|
{ |
|
"epoch": 0.8035982008995503, |
|
"grad_norm": 1.2687951906666617, |
|
"learning_rate": 6.518183384777468e-06, |
|
"loss": 0.2396, |
|
"step": 536 |
|
}, |
|
{ |
|
"epoch": 0.8050974512743628, |
|
"grad_norm": 1.5860720753499649, |
|
"learning_rate": 6.506960026811712e-06, |
|
"loss": 0.3408, |
|
"step": 537 |
|
}, |
|
{ |
|
"epoch": 0.8065967016491754, |
|
"grad_norm": 1.400324377807205, |
|
"learning_rate": 6.495728311091303e-06, |
|
"loss": 0.2507, |
|
"step": 538 |
|
}, |
|
{ |
|
"epoch": 0.808095952023988, |
|
"grad_norm": 1.3407564762956719, |
|
"learning_rate": 6.484488299908487e-06, |
|
"loss": 0.2526, |
|
"step": 539 |
|
}, |
|
{ |
|
"epoch": 0.8095952023988006, |
|
"grad_norm": 1.694971638513342, |
|
"learning_rate": 6.473240055601517e-06, |
|
"loss": 0.2925, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.8110944527736131, |
|
"grad_norm": 1.3051634848999276, |
|
"learning_rate": 6.46198364055431e-06, |
|
"loss": 0.3198, |
|
"step": 541 |
|
}, |
|
{ |
|
"epoch": 0.8125937031484258, |
|
"grad_norm": 1.4209834742780914, |
|
"learning_rate": 6.450719117196094e-06, |
|
"loss": 0.2841, |
|
"step": 542 |
|
}, |
|
{ |
|
"epoch": 0.8140929535232384, |
|
"grad_norm": 1.5456807832456958, |
|
"learning_rate": 6.439446548001069e-06, |
|
"loss": 0.2894, |
|
"step": 543 |
|
}, |
|
{ |
|
"epoch": 0.815592203898051, |
|
"grad_norm": 1.6970824843923287, |
|
"learning_rate": 6.4281659954880605e-06, |
|
"loss": 0.3286, |
|
"step": 544 |
|
}, |
|
{ |
|
"epoch": 0.8170914542728636, |
|
"grad_norm": 1.5394537765295597, |
|
"learning_rate": 6.416877522220167e-06, |
|
"loss": 0.319, |
|
"step": 545 |
|
}, |
|
{ |
|
"epoch": 0.8185907046476761, |
|
"grad_norm": 1.5196056725942917, |
|
"learning_rate": 6.405581190804418e-06, |
|
"loss": 0.3121, |
|
"step": 546 |
|
}, |
|
{ |
|
"epoch": 0.8200899550224887, |
|
"grad_norm": 1.538525170169423, |
|
"learning_rate": 6.394277063891422e-06, |
|
"loss": 0.2982, |
|
"step": 547 |
|
}, |
|
{ |
|
"epoch": 0.8215892053973014, |
|
"grad_norm": 1.420879269740161, |
|
"learning_rate": 6.382965204175027e-06, |
|
"loss": 0.356, |
|
"step": 548 |
|
}, |
|
{ |
|
"epoch": 0.823088455772114, |
|
"grad_norm": 1.3803279644484636, |
|
"learning_rate": 6.371645674391967e-06, |
|
"loss": 0.2689, |
|
"step": 549 |
|
}, |
|
{ |
|
"epoch": 0.8245877061469266, |
|
"grad_norm": 1.5390146778775227, |
|
"learning_rate": 6.3603185373215105e-06, |
|
"loss": 0.2777, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.8260869565217391, |
|
"grad_norm": 1.2124994140317764, |
|
"learning_rate": 6.348983855785122e-06, |
|
"loss": 0.2975, |
|
"step": 551 |
|
}, |
|
{ |
|
"epoch": 0.8275862068965517, |
|
"grad_norm": 1.347666739407509, |
|
"learning_rate": 6.337641692646106e-06, |
|
"loss": 0.2931, |
|
"step": 552 |
|
}, |
|
{ |
|
"epoch": 0.8290854572713643, |
|
"grad_norm": 1.0316649456573812, |
|
"learning_rate": 6.326292110809258e-06, |
|
"loss": 0.2591, |
|
"step": 553 |
|
}, |
|
{ |
|
"epoch": 0.8305847076461769, |
|
"grad_norm": 1.1793130273689618, |
|
"learning_rate": 6.314935173220524e-06, |
|
"loss": 0.2472, |
|
"step": 554 |
|
}, |
|
{ |
|
"epoch": 0.8320839580209896, |
|
"grad_norm": 1.3306237638358906, |
|
"learning_rate": 6.303570942866643e-06, |
|
"loss": 0.3037, |
|
"step": 555 |
|
}, |
|
{ |
|
"epoch": 0.8335832083958021, |
|
"grad_norm": 1.2939098473779578, |
|
"learning_rate": 6.2921994827748e-06, |
|
"loss": 0.2318, |
|
"step": 556 |
|
}, |
|
{ |
|
"epoch": 0.8350824587706147, |
|
"grad_norm": 1.3978880053168186, |
|
"learning_rate": 6.280820856012277e-06, |
|
"loss": 0.2977, |
|
"step": 557 |
|
}, |
|
{ |
|
"epoch": 0.8365817091454273, |
|
"grad_norm": 1.2090118832547283, |
|
"learning_rate": 6.269435125686105e-06, |
|
"loss": 0.261, |
|
"step": 558 |
|
}, |
|
{ |
|
"epoch": 0.8380809595202399, |
|
"grad_norm": 1.3985866398226101, |
|
"learning_rate": 6.258042354942708e-06, |
|
"loss": 0.3036, |
|
"step": 559 |
|
}, |
|
{ |
|
"epoch": 0.8395802098950524, |
|
"grad_norm": 1.1606338171512245, |
|
"learning_rate": 6.2466426069675626e-06, |
|
"loss": 0.2477, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.841079460269865, |
|
"grad_norm": 1.2752184464154694, |
|
"learning_rate": 6.235235944984835e-06, |
|
"loss": 0.2569, |
|
"step": 561 |
|
}, |
|
{ |
|
"epoch": 0.8425787106446777, |
|
"grad_norm": 1.3606727254695086, |
|
"learning_rate": 6.223822432257043e-06, |
|
"loss": 0.3106, |
|
"step": 562 |
|
}, |
|
{ |
|
"epoch": 0.8440779610194903, |
|
"grad_norm": 1.6737768428007465, |
|
"learning_rate": 6.212402132084697e-06, |
|
"loss": 0.3228, |
|
"step": 563 |
|
}, |
|
{ |
|
"epoch": 0.8455772113943029, |
|
"grad_norm": 1.4872863959750497, |
|
"learning_rate": 6.200975107805951e-06, |
|
"loss": 0.3318, |
|
"step": 564 |
|
}, |
|
{ |
|
"epoch": 0.8470764617691154, |
|
"grad_norm": 1.4190668277366123, |
|
"learning_rate": 6.189541422796254e-06, |
|
"loss": 0.2838, |
|
"step": 565 |
|
}, |
|
{ |
|
"epoch": 0.848575712143928, |
|
"grad_norm": 1.476533938778296, |
|
"learning_rate": 6.1781011404679905e-06, |
|
"loss": 0.345, |
|
"step": 566 |
|
}, |
|
{ |
|
"epoch": 0.8500749625187406, |
|
"grad_norm": 1.4046281735745454, |
|
"learning_rate": 6.16665432427014e-06, |
|
"loss": 0.2948, |
|
"step": 567 |
|
}, |
|
{ |
|
"epoch": 0.8515742128935532, |
|
"grad_norm": 1.3727655219029598, |
|
"learning_rate": 6.155201037687917e-06, |
|
"loss": 0.3222, |
|
"step": 568 |
|
}, |
|
{ |
|
"epoch": 0.8530734632683659, |
|
"grad_norm": 1.1826488043411998, |
|
"learning_rate": 6.1437413442424236e-06, |
|
"loss": 0.2496, |
|
"step": 569 |
|
}, |
|
{ |
|
"epoch": 0.8545727136431784, |
|
"grad_norm": 1.2764505261433423, |
|
"learning_rate": 6.132275307490291e-06, |
|
"loss": 0.2573, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.856071964017991, |
|
"grad_norm": 1.4266014279851462, |
|
"learning_rate": 6.120802991023334e-06, |
|
"loss": 0.2758, |
|
"step": 571 |
|
}, |
|
{ |
|
"epoch": 0.8575712143928036, |
|
"grad_norm": 1.3290173029582117, |
|
"learning_rate": 6.109324458468198e-06, |
|
"loss": 0.2859, |
|
"step": 572 |
|
}, |
|
{ |
|
"epoch": 0.8590704647676162, |
|
"grad_norm": 1.4697115370090508, |
|
"learning_rate": 6.097839773485995e-06, |
|
"loss": 0.2775, |
|
"step": 573 |
|
}, |
|
{ |
|
"epoch": 0.8605697151424287, |
|
"grad_norm": 1.5752850012956432, |
|
"learning_rate": 6.086348999771967e-06, |
|
"loss": 0.3346, |
|
"step": 574 |
|
}, |
|
{ |
|
"epoch": 0.8620689655172413, |
|
"grad_norm": 1.36699571040609, |
|
"learning_rate": 6.074852201055121e-06, |
|
"loss": 0.2588, |
|
"step": 575 |
|
}, |
|
{ |
|
"epoch": 0.863568215892054, |
|
"grad_norm": 1.2426740616867487, |
|
"learning_rate": 6.063349441097881e-06, |
|
"loss": 0.2601, |
|
"step": 576 |
|
}, |
|
{ |
|
"epoch": 0.8650674662668666, |
|
"grad_norm": 1.4919809348981423, |
|
"learning_rate": 6.051840783695731e-06, |
|
"loss": 0.2513, |
|
"step": 577 |
|
}, |
|
{ |
|
"epoch": 0.8665667166416792, |
|
"grad_norm": 1.5345577792571532, |
|
"learning_rate": 6.040326292676865e-06, |
|
"loss": 0.2474, |
|
"step": 578 |
|
}, |
|
{ |
|
"epoch": 0.8680659670164917, |
|
"grad_norm": 1.6813509135438214, |
|
"learning_rate": 6.028806031901829e-06, |
|
"loss": 0.3529, |
|
"step": 579 |
|
}, |
|
{ |
|
"epoch": 0.8695652173913043, |
|
"grad_norm": 1.3796283386011006, |
|
"learning_rate": 6.0172800652631706e-06, |
|
"loss": 0.2799, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 0.8710644677661169, |
|
"grad_norm": 1.5983920234793207, |
|
"learning_rate": 6.005748456685077e-06, |
|
"loss": 0.2482, |
|
"step": 581 |
|
}, |
|
{ |
|
"epoch": 0.8725637181409296, |
|
"grad_norm": 1.49492444093455, |
|
"learning_rate": 5.994211270123034e-06, |
|
"loss": 0.34, |
|
"step": 582 |
|
}, |
|
{ |
|
"epoch": 0.8740629685157422, |
|
"grad_norm": 1.5280370609487266, |
|
"learning_rate": 5.9826685695634575e-06, |
|
"loss": 0.2707, |
|
"step": 583 |
|
}, |
|
{ |
|
"epoch": 0.8755622188905547, |
|
"grad_norm": 1.4115095606097392, |
|
"learning_rate": 5.971120419023349e-06, |
|
"loss": 0.3001, |
|
"step": 584 |
|
}, |
|
{ |
|
"epoch": 0.8770614692653673, |
|
"grad_norm": 1.0267416900777009, |
|
"learning_rate": 5.959566882549936e-06, |
|
"loss": 0.2554, |
|
"step": 585 |
|
}, |
|
{ |
|
"epoch": 0.8785607196401799, |
|
"grad_norm": 1.4461370111927105, |
|
"learning_rate": 5.948008024220311e-06, |
|
"loss": 0.2667, |
|
"step": 586 |
|
}, |
|
{ |
|
"epoch": 0.8800599700149925, |
|
"grad_norm": 1.2537964237042767, |
|
"learning_rate": 5.936443908141088e-06, |
|
"loss": 0.2505, |
|
"step": 587 |
|
}, |
|
{ |
|
"epoch": 0.881559220389805, |
|
"grad_norm": 1.210713323399033, |
|
"learning_rate": 5.924874598448038e-06, |
|
"loss": 0.2892, |
|
"step": 588 |
|
}, |
|
{ |
|
"epoch": 0.8830584707646177, |
|
"grad_norm": 1.2982578846962085, |
|
"learning_rate": 5.913300159305741e-06, |
|
"loss": 0.296, |
|
"step": 589 |
|
}, |
|
{ |
|
"epoch": 0.8845577211394303, |
|
"grad_norm": 1.3920112691596835, |
|
"learning_rate": 5.901720654907217e-06, |
|
"loss": 0.2823, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 0.8860569715142429, |
|
"grad_norm": 1.4144356205847997, |
|
"learning_rate": 5.8901361494735874e-06, |
|
"loss": 0.2838, |
|
"step": 591 |
|
}, |
|
{ |
|
"epoch": 0.8875562218890555, |
|
"grad_norm": 1.8116464085465231, |
|
"learning_rate": 5.878546707253704e-06, |
|
"loss": 0.3302, |
|
"step": 592 |
|
}, |
|
{ |
|
"epoch": 0.889055472263868, |
|
"grad_norm": 1.6207641114352942, |
|
"learning_rate": 5.8669523925238e-06, |
|
"loss": 0.326, |
|
"step": 593 |
|
}, |
|
{ |
|
"epoch": 0.8905547226386806, |
|
"grad_norm": 1.2974508907320734, |
|
"learning_rate": 5.855353269587134e-06, |
|
"loss": 0.2569, |
|
"step": 594 |
|
}, |
|
{ |
|
"epoch": 0.8920539730134932, |
|
"grad_norm": 1.3680649334588744, |
|
"learning_rate": 5.843749402773629e-06, |
|
"loss": 0.2908, |
|
"step": 595 |
|
}, |
|
{ |
|
"epoch": 0.8935532233883059, |
|
"grad_norm": 1.4273879867863084, |
|
"learning_rate": 5.8321408564395165e-06, |
|
"loss": 0.2769, |
|
"step": 596 |
|
}, |
|
{ |
|
"epoch": 0.8950524737631185, |
|
"grad_norm": 1.4582034105361414, |
|
"learning_rate": 5.820527694966988e-06, |
|
"loss": 0.2787, |
|
"step": 597 |
|
}, |
|
{ |
|
"epoch": 0.896551724137931, |
|
"grad_norm": 1.402381583836049, |
|
"learning_rate": 5.808909982763825e-06, |
|
"loss": 0.3137, |
|
"step": 598 |
|
}, |
|
{ |
|
"epoch": 0.8980509745127436, |
|
"grad_norm": 1.3452237620825134, |
|
"learning_rate": 5.797287784263047e-06, |
|
"loss": 0.2981, |
|
"step": 599 |
|
}, |
|
{ |
|
"epoch": 0.8995502248875562, |
|
"grad_norm": 1.520231591052473, |
|
"learning_rate": 5.785661163922558e-06, |
|
"loss": 0.3367, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.8995502248875562, |
|
"eval_loss": 0.29206138849258423, |
|
"eval_runtime": 9.5457, |
|
"eval_samples_per_second": 5.657, |
|
"eval_steps_per_second": 1.467, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 0.9010494752623688, |
|
"grad_norm": 1.6572680676451537, |
|
"learning_rate": 5.774030186224786e-06, |
|
"loss": 0.3058, |
|
"step": 601 |
|
}, |
|
{ |
|
"epoch": 0.9025487256371814, |
|
"grad_norm": 1.185723814962086, |
|
"learning_rate": 5.762394915676325e-06, |
|
"loss": 0.2436, |
|
"step": 602 |
|
}, |
|
{ |
|
"epoch": 0.904047976011994, |
|
"grad_norm": 1.39517738031953, |
|
"learning_rate": 5.750755416807575e-06, |
|
"loss": 0.2776, |
|
"step": 603 |
|
}, |
|
{ |
|
"epoch": 0.9055472263868066, |
|
"grad_norm": 1.4495451198548506, |
|
"learning_rate": 5.7391117541723914e-06, |
|
"loss": 0.2422, |
|
"step": 604 |
|
}, |
|
{ |
|
"epoch": 0.9070464767616192, |
|
"grad_norm": 1.6099728259980806, |
|
"learning_rate": 5.727463992347719e-06, |
|
"loss": 0.3282, |
|
"step": 605 |
|
}, |
|
{ |
|
"epoch": 0.9085457271364318, |
|
"grad_norm": 1.2759026133924352, |
|
"learning_rate": 5.715812195933238e-06, |
|
"loss": 0.2487, |
|
"step": 606 |
|
}, |
|
{ |
|
"epoch": 0.9100449775112444, |
|
"grad_norm": 1.5890512567327135, |
|
"learning_rate": 5.704156429551004e-06, |
|
"loss": 0.3463, |
|
"step": 607 |
|
}, |
|
{ |
|
"epoch": 0.9115442278860569, |
|
"grad_norm": 1.2349726824993277, |
|
"learning_rate": 5.692496757845092e-06, |
|
"loss": 0.2183, |
|
"step": 608 |
|
}, |
|
{ |
|
"epoch": 0.9130434782608695, |
|
"grad_norm": 1.5231664312018351, |
|
"learning_rate": 5.680833245481234e-06, |
|
"loss": 0.3024, |
|
"step": 609 |
|
}, |
|
{ |
|
"epoch": 0.9145427286356822, |
|
"grad_norm": 1.7511007657264783, |
|
"learning_rate": 5.6691659571464655e-06, |
|
"loss": 0.3781, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 0.9160419790104948, |
|
"grad_norm": 1.262964038541314, |
|
"learning_rate": 5.657494957548761e-06, |
|
"loss": 0.2968, |
|
"step": 611 |
|
}, |
|
{ |
|
"epoch": 0.9175412293853074, |
|
"grad_norm": 1.5010815792926107, |
|
"learning_rate": 5.645820311416681e-06, |
|
"loss": 0.2283, |
|
"step": 612 |
|
}, |
|
{ |
|
"epoch": 0.9190404797601199, |
|
"grad_norm": 1.4230405823122065, |
|
"learning_rate": 5.63414208349901e-06, |
|
"loss": 0.3058, |
|
"step": 613 |
|
}, |
|
{ |
|
"epoch": 0.9205397301349325, |
|
"grad_norm": 1.5061316729401288, |
|
"learning_rate": 5.622460338564393e-06, |
|
"loss": 0.2733, |
|
"step": 614 |
|
}, |
|
{ |
|
"epoch": 0.9220389805097451, |
|
"grad_norm": 1.3792177007935027, |
|
"learning_rate": 5.610775141400986e-06, |
|
"loss": 0.2475, |
|
"step": 615 |
|
}, |
|
{ |
|
"epoch": 0.9235382308845578, |
|
"grad_norm": 1.3503323435812955, |
|
"learning_rate": 5.599086556816089e-06, |
|
"loss": 0.2866, |
|
"step": 616 |
|
}, |
|
{ |
|
"epoch": 0.9250374812593704, |
|
"grad_norm": 1.2673189794772455, |
|
"learning_rate": 5.587394649635789e-06, |
|
"loss": 0.2957, |
|
"step": 617 |
|
}, |
|
{ |
|
"epoch": 0.9265367316341829, |
|
"grad_norm": 1.4641577524105518, |
|
"learning_rate": 5.575699484704599e-06, |
|
"loss": 0.2157, |
|
"step": 618 |
|
}, |
|
{ |
|
"epoch": 0.9280359820089955, |
|
"grad_norm": 1.3670619366737466, |
|
"learning_rate": 5.564001126885106e-06, |
|
"loss": 0.2519, |
|
"step": 619 |
|
}, |
|
{ |
|
"epoch": 0.9295352323838081, |
|
"grad_norm": 1.3066189579332126, |
|
"learning_rate": 5.552299641057596e-06, |
|
"loss": 0.3019, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 0.9310344827586207, |
|
"grad_norm": 1.6654792134736487, |
|
"learning_rate": 5.540595092119709e-06, |
|
"loss": 0.2605, |
|
"step": 621 |
|
}, |
|
{ |
|
"epoch": 0.9325337331334332, |
|
"grad_norm": 1.6688624360319917, |
|
"learning_rate": 5.5288875449860745e-06, |
|
"loss": 0.2787, |
|
"step": 622 |
|
}, |
|
{ |
|
"epoch": 0.9340329835082459, |
|
"grad_norm": 1.3510953241995245, |
|
"learning_rate": 5.517177064587945e-06, |
|
"loss": 0.2423, |
|
"step": 623 |
|
}, |
|
{ |
|
"epoch": 0.9355322338830585, |
|
"grad_norm": 1.124714667302018, |
|
"learning_rate": 5.505463715872846e-06, |
|
"loss": 0.235, |
|
"step": 624 |
|
}, |
|
{ |
|
"epoch": 0.9370314842578711, |
|
"grad_norm": 1.4801862748049064, |
|
"learning_rate": 5.493747563804211e-06, |
|
"loss": 0.281, |
|
"step": 625 |
|
}, |
|
{ |
|
"epoch": 0.9385307346326837, |
|
"grad_norm": 1.1040049089982813, |
|
"learning_rate": 5.482028673361015e-06, |
|
"loss": 0.2641, |
|
"step": 626 |
|
}, |
|
{ |
|
"epoch": 0.9400299850074962, |
|
"grad_norm": 1.340016529324993, |
|
"learning_rate": 5.470307109537427e-06, |
|
"loss": 0.2946, |
|
"step": 627 |
|
}, |
|
{ |
|
"epoch": 0.9415292353823088, |
|
"grad_norm": 1.430555729700914, |
|
"learning_rate": 5.45858293734244e-06, |
|
"loss": 0.2422, |
|
"step": 628 |
|
}, |
|
{ |
|
"epoch": 0.9430284857571214, |
|
"grad_norm": 1.5769826874632167, |
|
"learning_rate": 5.446856221799515e-06, |
|
"loss": 0.3112, |
|
"step": 629 |
|
}, |
|
{ |
|
"epoch": 0.9445277361319341, |
|
"grad_norm": 1.4620846536099739, |
|
"learning_rate": 5.435127027946215e-06, |
|
"loss": 0.2896, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 0.9460269865067467, |
|
"grad_norm": 1.3737452607442782, |
|
"learning_rate": 5.423395420833853e-06, |
|
"loss": 0.2799, |
|
"step": 631 |
|
}, |
|
{ |
|
"epoch": 0.9475262368815592, |
|
"grad_norm": 1.7102252286344872, |
|
"learning_rate": 5.411661465527123e-06, |
|
"loss": 0.3204, |
|
"step": 632 |
|
}, |
|
{ |
|
"epoch": 0.9490254872563718, |
|
"grad_norm": 1.4271957765913807, |
|
"learning_rate": 5.39992522710374e-06, |
|
"loss": 0.2968, |
|
"step": 633 |
|
}, |
|
{ |
|
"epoch": 0.9505247376311844, |
|
"grad_norm": 1.6668553497999354, |
|
"learning_rate": 5.38818677065409e-06, |
|
"loss": 0.326, |
|
"step": 634 |
|
}, |
|
{ |
|
"epoch": 0.952023988005997, |
|
"grad_norm": 1.3656853429793527, |
|
"learning_rate": 5.376446161280851e-06, |
|
"loss": 0.2679, |
|
"step": 635 |
|
}, |
|
{ |
|
"epoch": 0.9535232383808095, |
|
"grad_norm": 1.3480865579714252, |
|
"learning_rate": 5.364703464098645e-06, |
|
"loss": 0.306, |
|
"step": 636 |
|
}, |
|
{ |
|
"epoch": 0.9550224887556222, |
|
"grad_norm": 1.4812025245339286, |
|
"learning_rate": 5.352958744233673e-06, |
|
"loss": 0.2744, |
|
"step": 637 |
|
}, |
|
{ |
|
"epoch": 0.9565217391304348, |
|
"grad_norm": 1.2805349985156824, |
|
"learning_rate": 5.341212066823356e-06, |
|
"loss": 0.3079, |
|
"step": 638 |
|
}, |
|
{ |
|
"epoch": 0.9580209895052474, |
|
"grad_norm": 1.4695819034171325, |
|
"learning_rate": 5.329463497015969e-06, |
|
"loss": 0.2546, |
|
"step": 639 |
|
}, |
|
{ |
|
"epoch": 0.95952023988006, |
|
"grad_norm": 1.6943571642681556, |
|
"learning_rate": 5.317713099970283e-06, |
|
"loss": 0.2819, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 0.9610194902548725, |
|
"grad_norm": 1.0308594634641264, |
|
"learning_rate": 5.305960940855205e-06, |
|
"loss": 0.2323, |
|
"step": 641 |
|
}, |
|
{ |
|
"epoch": 0.9625187406296851, |
|
"grad_norm": 1.5361826846459423, |
|
"learning_rate": 5.294207084849412e-06, |
|
"loss": 0.2909, |
|
"step": 642 |
|
}, |
|
{ |
|
"epoch": 0.9640179910044977, |
|
"grad_norm": 1.4262667808104355, |
|
"learning_rate": 5.282451597140994e-06, |
|
"loss": 0.28, |
|
"step": 643 |
|
}, |
|
{ |
|
"epoch": 0.9655172413793104, |
|
"grad_norm": 1.726419979057364, |
|
"learning_rate": 5.270694542927089e-06, |
|
"loss": 0.3445, |
|
"step": 644 |
|
}, |
|
{ |
|
"epoch": 0.967016491754123, |
|
"grad_norm": 1.6777590573778045, |
|
"learning_rate": 5.258935987413524e-06, |
|
"loss": 0.312, |
|
"step": 645 |
|
}, |
|
{ |
|
"epoch": 0.9685157421289355, |
|
"grad_norm": 1.2723822863902055, |
|
"learning_rate": 5.247175995814452e-06, |
|
"loss": 0.2866, |
|
"step": 646 |
|
}, |
|
{ |
|
"epoch": 0.9700149925037481, |
|
"grad_norm": 1.4904091995112534, |
|
"learning_rate": 5.235414633351992e-06, |
|
"loss": 0.2914, |
|
"step": 647 |
|
}, |
|
{ |
|
"epoch": 0.9715142428785607, |
|
"grad_norm": 1.292447279724776, |
|
"learning_rate": 5.223651965255864e-06, |
|
"loss": 0.2833, |
|
"step": 648 |
|
}, |
|
{ |
|
"epoch": 0.9730134932533733, |
|
"grad_norm": 1.3044142845118825, |
|
"learning_rate": 5.211888056763029e-06, |
|
"loss": 0.219, |
|
"step": 649 |
|
}, |
|
{ |
|
"epoch": 0.974512743628186, |
|
"grad_norm": 1.442499937194901, |
|
"learning_rate": 5.20012297311733e-06, |
|
"loss": 0.267, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 0.9760119940029985, |
|
"grad_norm": 1.068811635820371, |
|
"learning_rate": 5.188356779569125e-06, |
|
"loss": 0.2681, |
|
"step": 651 |
|
}, |
|
{ |
|
"epoch": 0.9775112443778111, |
|
"grad_norm": 1.4833078434438498, |
|
"learning_rate": 5.176589541374929e-06, |
|
"loss": 0.3169, |
|
"step": 652 |
|
}, |
|
{ |
|
"epoch": 0.9790104947526237, |
|
"grad_norm": 1.588644516704138, |
|
"learning_rate": 5.164821323797051e-06, |
|
"loss": 0.3368, |
|
"step": 653 |
|
}, |
|
{ |
|
"epoch": 0.9805097451274363, |
|
"grad_norm": 1.4226545240627448, |
|
"learning_rate": 5.1530521921032305e-06, |
|
"loss": 0.27, |
|
"step": 654 |
|
}, |
|
{ |
|
"epoch": 0.9820089955022488, |
|
"grad_norm": 1.3572773713708628, |
|
"learning_rate": 5.141282211566276e-06, |
|
"loss": 0.2758, |
|
"step": 655 |
|
}, |
|
{ |
|
"epoch": 0.9835082458770614, |
|
"grad_norm": 1.789911225991221, |
|
"learning_rate": 5.129511447463705e-06, |
|
"loss": 0.2953, |
|
"step": 656 |
|
}, |
|
{ |
|
"epoch": 0.9850074962518741, |
|
"grad_norm": 1.5937283093067185, |
|
"learning_rate": 5.117739965077382e-06, |
|
"loss": 0.2746, |
|
"step": 657 |
|
}, |
|
{ |
|
"epoch": 0.9865067466266867, |
|
"grad_norm": 1.5824857897997608, |
|
"learning_rate": 5.105967829693155e-06, |
|
"loss": 0.3011, |
|
"step": 658 |
|
}, |
|
{ |
|
"epoch": 0.9880059970014993, |
|
"grad_norm": 1.564631220436148, |
|
"learning_rate": 5.0941951066004906e-06, |
|
"loss": 0.3024, |
|
"step": 659 |
|
}, |
|
{ |
|
"epoch": 0.9895052473763118, |
|
"grad_norm": 1.4723213190521782, |
|
"learning_rate": 5.082421861092116e-06, |
|
"loss": 0.3513, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 0.9910044977511244, |
|
"grad_norm": 1.3246585112656946, |
|
"learning_rate": 5.0706481584636605e-06, |
|
"loss": 0.2978, |
|
"step": 661 |
|
}, |
|
{ |
|
"epoch": 0.992503748125937, |
|
"grad_norm": 1.294485905427711, |
|
"learning_rate": 5.0588740640132805e-06, |
|
"loss": 0.3248, |
|
"step": 662 |
|
}, |
|
{ |
|
"epoch": 0.9940029985007496, |
|
"grad_norm": 1.371441396786382, |
|
"learning_rate": 5.047099643041312e-06, |
|
"loss": 0.2343, |
|
"step": 663 |
|
}, |
|
{ |
|
"epoch": 0.9955022488755623, |
|
"grad_norm": 1.232279769034759, |
|
"learning_rate": 5.0353249608499e-06, |
|
"loss": 0.2847, |
|
"step": 664 |
|
}, |
|
{ |
|
"epoch": 0.9970014992503748, |
|
"grad_norm": 1.5408042440355365, |
|
"learning_rate": 5.023550082742637e-06, |
|
"loss": 0.4079, |
|
"step": 665 |
|
}, |
|
{ |
|
"epoch": 0.9985007496251874, |
|
"grad_norm": 1.5975441677311908, |
|
"learning_rate": 5.011775074024202e-06, |
|
"loss": 0.3385, |
|
"step": 666 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 1.1717472846174595, |
|
"learning_rate": 5e-06, |
|
"loss": 0.1856, |
|
"step": 667 |
|
}, |
|
{ |
|
"epoch": 1.0014992503748126, |
|
"grad_norm": 1.2245141748896478, |
|
"learning_rate": 4.988224925975799e-06, |
|
"loss": 0.1454, |
|
"step": 668 |
|
}, |
|
{ |
|
"epoch": 1.0029985007496252, |
|
"grad_norm": 1.113035633567804, |
|
"learning_rate": 4.976449917257365e-06, |
|
"loss": 0.1256, |
|
"step": 669 |
|
}, |
|
{ |
|
"epoch": 1.0044977511244377, |
|
"grad_norm": 0.9840302945774542, |
|
"learning_rate": 4.964675039150102e-06, |
|
"loss": 0.1383, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.0059970014992503, |
|
"grad_norm": 1.0108761114865439, |
|
"learning_rate": 4.952900356958689e-06, |
|
"loss": 0.1429, |
|
"step": 671 |
|
}, |
|
{ |
|
"epoch": 1.0074962518740629, |
|
"grad_norm": 1.3303373350497616, |
|
"learning_rate": 4.941125935986721e-06, |
|
"loss": 0.1636, |
|
"step": 672 |
|
}, |
|
{ |
|
"epoch": 1.0089955022488755, |
|
"grad_norm": 1.0618850415016292, |
|
"learning_rate": 4.929351841536342e-06, |
|
"loss": 0.122, |
|
"step": 673 |
|
}, |
|
{ |
|
"epoch": 1.0104947526236883, |
|
"grad_norm": 1.0597281702360088, |
|
"learning_rate": 4.917578138907884e-06, |
|
"loss": 0.1423, |
|
"step": 674 |
|
}, |
|
{ |
|
"epoch": 1.0119940029985008, |
|
"grad_norm": 1.4352697889938788, |
|
"learning_rate": 4.90580489339951e-06, |
|
"loss": 0.1556, |
|
"step": 675 |
|
}, |
|
{ |
|
"epoch": 1.0134932533733134, |
|
"grad_norm": 1.555494683142059, |
|
"learning_rate": 4.894032170306846e-06, |
|
"loss": 0.134, |
|
"step": 676 |
|
}, |
|
{ |
|
"epoch": 1.014992503748126, |
|
"grad_norm": 1.370821379224341, |
|
"learning_rate": 4.882260034922618e-06, |
|
"loss": 0.133, |
|
"step": 677 |
|
}, |
|
{ |
|
"epoch": 1.0164917541229386, |
|
"grad_norm": 1.4335702429296835, |
|
"learning_rate": 4.870488552536296e-06, |
|
"loss": 0.1298, |
|
"step": 678 |
|
}, |
|
{ |
|
"epoch": 1.0179910044977512, |
|
"grad_norm": 1.52947611793239, |
|
"learning_rate": 4.858717788433725e-06, |
|
"loss": 0.1861, |
|
"step": 679 |
|
}, |
|
{ |
|
"epoch": 1.0194902548725637, |
|
"grad_norm": 1.4092638210180686, |
|
"learning_rate": 4.846947807896771e-06, |
|
"loss": 0.1511, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.0209895052473763, |
|
"grad_norm": 1.7306575478915662, |
|
"learning_rate": 4.83517867620295e-06, |
|
"loss": 0.1444, |
|
"step": 681 |
|
}, |
|
{ |
|
"epoch": 1.0224887556221889, |
|
"grad_norm": 1.1733329564563357, |
|
"learning_rate": 4.823410458625072e-06, |
|
"loss": 0.1043, |
|
"step": 682 |
|
}, |
|
{ |
|
"epoch": 1.0239880059970015, |
|
"grad_norm": 1.322557279313773, |
|
"learning_rate": 4.811643220430877e-06, |
|
"loss": 0.1202, |
|
"step": 683 |
|
}, |
|
{ |
|
"epoch": 1.025487256371814, |
|
"grad_norm": 1.62584402069839, |
|
"learning_rate": 4.7998770268826726e-06, |
|
"loss": 0.1852, |
|
"step": 684 |
|
}, |
|
{ |
|
"epoch": 1.0269865067466266, |
|
"grad_norm": 1.417400006454122, |
|
"learning_rate": 4.788111943236973e-06, |
|
"loss": 0.1225, |
|
"step": 685 |
|
}, |
|
{ |
|
"epoch": 1.0284857571214392, |
|
"grad_norm": 1.2217123888670718, |
|
"learning_rate": 4.7763480347441395e-06, |
|
"loss": 0.1416, |
|
"step": 686 |
|
}, |
|
{ |
|
"epoch": 1.0299850074962518, |
|
"grad_norm": 1.2785152245174605, |
|
"learning_rate": 4.7645853666480104e-06, |
|
"loss": 0.1338, |
|
"step": 687 |
|
}, |
|
{ |
|
"epoch": 1.0314842578710646, |
|
"grad_norm": 1.3155022956274116, |
|
"learning_rate": 4.752824004185548e-06, |
|
"loss": 0.1599, |
|
"step": 688 |
|
}, |
|
{ |
|
"epoch": 1.0329835082458771, |
|
"grad_norm": 1.0631267096170913, |
|
"learning_rate": 4.7410640125864785e-06, |
|
"loss": 0.0865, |
|
"step": 689 |
|
}, |
|
{ |
|
"epoch": 1.0344827586206897, |
|
"grad_norm": 1.3534102460997244, |
|
"learning_rate": 4.729305457072913e-06, |
|
"loss": 0.1871, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.0359820089955023, |
|
"grad_norm": 1.1008608232400132, |
|
"learning_rate": 4.717548402859008e-06, |
|
"loss": 0.1382, |
|
"step": 691 |
|
}, |
|
{ |
|
"epoch": 1.0374812593703149, |
|
"grad_norm": 1.0930736447370444, |
|
"learning_rate": 4.7057929151505895e-06, |
|
"loss": 0.1673, |
|
"step": 692 |
|
}, |
|
{ |
|
"epoch": 1.0389805097451275, |
|
"grad_norm": 1.2562782943155963, |
|
"learning_rate": 4.694039059144797e-06, |
|
"loss": 0.1536, |
|
"step": 693 |
|
}, |
|
{ |
|
"epoch": 1.04047976011994, |
|
"grad_norm": 1.17150972869752, |
|
"learning_rate": 4.6822869000297185e-06, |
|
"loss": 0.1241, |
|
"step": 694 |
|
}, |
|
{ |
|
"epoch": 1.0419790104947526, |
|
"grad_norm": 1.3078257332527907, |
|
"learning_rate": 4.670536502984033e-06, |
|
"loss": 0.127, |
|
"step": 695 |
|
}, |
|
{ |
|
"epoch": 1.0434782608695652, |
|
"grad_norm": 1.20399047700232, |
|
"learning_rate": 4.6587879331766465e-06, |
|
"loss": 0.1649, |
|
"step": 696 |
|
}, |
|
{ |
|
"epoch": 1.0449775112443778, |
|
"grad_norm": 1.2798756895600847, |
|
"learning_rate": 4.647041255766329e-06, |
|
"loss": 0.1569, |
|
"step": 697 |
|
}, |
|
{ |
|
"epoch": 1.0464767616191903, |
|
"grad_norm": 1.2836422968151344, |
|
"learning_rate": 4.6352965359013576e-06, |
|
"loss": 0.1531, |
|
"step": 698 |
|
}, |
|
{ |
|
"epoch": 1.047976011994003, |
|
"grad_norm": 1.2656982740498037, |
|
"learning_rate": 4.623553838719151e-06, |
|
"loss": 0.1382, |
|
"step": 699 |
|
}, |
|
{ |
|
"epoch": 1.0494752623688155, |
|
"grad_norm": 1.1710073058457482, |
|
"learning_rate": 4.611813229345911e-06, |
|
"loss": 0.1284, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.050974512743628, |
|
"grad_norm": 1.2173427215843842, |
|
"learning_rate": 4.6000747728962606e-06, |
|
"loss": 0.1451, |
|
"step": 701 |
|
}, |
|
{ |
|
"epoch": 1.0524737631184409, |
|
"grad_norm": 1.3085766303922433, |
|
"learning_rate": 4.588338534472878e-06, |
|
"loss": 0.1634, |
|
"step": 702 |
|
}, |
|
{ |
|
"epoch": 1.0539730134932535, |
|
"grad_norm": 1.294598563303439, |
|
"learning_rate": 4.576604579166147e-06, |
|
"loss": 0.1555, |
|
"step": 703 |
|
}, |
|
{ |
|
"epoch": 1.055472263868066, |
|
"grad_norm": 1.3608846742813256, |
|
"learning_rate": 4.564872972053786e-06, |
|
"loss": 0.137, |
|
"step": 704 |
|
}, |
|
{ |
|
"epoch": 1.0569715142428786, |
|
"grad_norm": 1.3254766227344228, |
|
"learning_rate": 4.553143778200486e-06, |
|
"loss": 0.1928, |
|
"step": 705 |
|
}, |
|
{ |
|
"epoch": 1.0584707646176912, |
|
"grad_norm": 1.4520394103397216, |
|
"learning_rate": 4.541417062657561e-06, |
|
"loss": 0.1295, |
|
"step": 706 |
|
}, |
|
{ |
|
"epoch": 1.0599700149925038, |
|
"grad_norm": 1.4398136951048757, |
|
"learning_rate": 4.529692890462574e-06, |
|
"loss": 0.1554, |
|
"step": 707 |
|
}, |
|
{ |
|
"epoch": 1.0614692653673163, |
|
"grad_norm": 1.1500875677970415, |
|
"learning_rate": 4.5179713266389866e-06, |
|
"loss": 0.1238, |
|
"step": 708 |
|
}, |
|
{ |
|
"epoch": 1.062968515742129, |
|
"grad_norm": 1.264839059509746, |
|
"learning_rate": 4.50625243619579e-06, |
|
"loss": 0.0976, |
|
"step": 709 |
|
}, |
|
{ |
|
"epoch": 1.0644677661169415, |
|
"grad_norm": 1.411189676587246, |
|
"learning_rate": 4.494536284127155e-06, |
|
"loss": 0.157, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.065967016491754, |
|
"grad_norm": 1.3419565690016924, |
|
"learning_rate": 4.4828229354120565e-06, |
|
"loss": 0.116, |
|
"step": 711 |
|
}, |
|
{ |
|
"epoch": 1.0674662668665666, |
|
"grad_norm": 1.4683125069720693, |
|
"learning_rate": 4.471112455013928e-06, |
|
"loss": 0.1419, |
|
"step": 712 |
|
}, |
|
{ |
|
"epoch": 1.0689655172413792, |
|
"grad_norm": 1.2795421939766032, |
|
"learning_rate": 4.459404907880293e-06, |
|
"loss": 0.1251, |
|
"step": 713 |
|
}, |
|
{ |
|
"epoch": 1.0704647676161918, |
|
"grad_norm": 1.4847504711631283, |
|
"learning_rate": 4.447700358942407e-06, |
|
"loss": 0.206, |
|
"step": 714 |
|
}, |
|
{ |
|
"epoch": 1.0719640179910046, |
|
"grad_norm": 1.4538062911622482, |
|
"learning_rate": 4.435998873114895e-06, |
|
"loss": 0.1442, |
|
"step": 715 |
|
}, |
|
{ |
|
"epoch": 1.0734632683658172, |
|
"grad_norm": 1.4008873503791266, |
|
"learning_rate": 4.424300515295401e-06, |
|
"loss": 0.1526, |
|
"step": 716 |
|
}, |
|
{ |
|
"epoch": 1.0749625187406298, |
|
"grad_norm": 1.347711770180375, |
|
"learning_rate": 4.412605350364213e-06, |
|
"loss": 0.1427, |
|
"step": 717 |
|
}, |
|
{ |
|
"epoch": 1.0764617691154423, |
|
"grad_norm": 1.3360204372994373, |
|
"learning_rate": 4.400913443183913e-06, |
|
"loss": 0.1669, |
|
"step": 718 |
|
}, |
|
{ |
|
"epoch": 1.077961019490255, |
|
"grad_norm": 1.414970873310588, |
|
"learning_rate": 4.389224858599015e-06, |
|
"loss": 0.1423, |
|
"step": 719 |
|
}, |
|
{ |
|
"epoch": 1.0794602698650675, |
|
"grad_norm": 1.1304666933798686, |
|
"learning_rate": 4.377539661435608e-06, |
|
"loss": 0.1039, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.08095952023988, |
|
"grad_norm": 1.290125072577748, |
|
"learning_rate": 4.365857916500991e-06, |
|
"loss": 0.1141, |
|
"step": 721 |
|
}, |
|
{ |
|
"epoch": 1.0824587706146926, |
|
"grad_norm": 1.3637566768919038, |
|
"learning_rate": 4.35417968858332e-06, |
|
"loss": 0.1979, |
|
"step": 722 |
|
}, |
|
{ |
|
"epoch": 1.0839580209895052, |
|
"grad_norm": 1.1222997413654716, |
|
"learning_rate": 4.3425050424512405e-06, |
|
"loss": 0.1318, |
|
"step": 723 |
|
}, |
|
{ |
|
"epoch": 1.0854572713643178, |
|
"grad_norm": 1.3007268919799113, |
|
"learning_rate": 4.330834042853537e-06, |
|
"loss": 0.1541, |
|
"step": 724 |
|
}, |
|
{ |
|
"epoch": 1.0869565217391304, |
|
"grad_norm": 1.5410675815308512, |
|
"learning_rate": 4.319166754518768e-06, |
|
"loss": 0.1427, |
|
"step": 725 |
|
}, |
|
{ |
|
"epoch": 1.088455772113943, |
|
"grad_norm": 1.3800795797956538, |
|
"learning_rate": 4.30750324215491e-06, |
|
"loss": 0.1304, |
|
"step": 726 |
|
}, |
|
{ |
|
"epoch": 1.0899550224887555, |
|
"grad_norm": 1.7373486522086135, |
|
"learning_rate": 4.295843570448998e-06, |
|
"loss": 0.1774, |
|
"step": 727 |
|
}, |
|
{ |
|
"epoch": 1.0914542728635683, |
|
"grad_norm": 1.2144628359769445, |
|
"learning_rate": 4.284187804066764e-06, |
|
"loss": 0.1455, |
|
"step": 728 |
|
}, |
|
{ |
|
"epoch": 1.092953523238381, |
|
"grad_norm": 1.3030437140567384, |
|
"learning_rate": 4.272536007652281e-06, |
|
"loss": 0.1348, |
|
"step": 729 |
|
}, |
|
{ |
|
"epoch": 1.0944527736131935, |
|
"grad_norm": 1.2388238391414266, |
|
"learning_rate": 4.260888245827608e-06, |
|
"loss": 0.1254, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.095952023988006, |
|
"grad_norm": 1.3662430679102924, |
|
"learning_rate": 4.249244583192425e-06, |
|
"loss": 0.1819, |
|
"step": 731 |
|
}, |
|
{ |
|
"epoch": 1.0974512743628186, |
|
"grad_norm": 1.222181566924626, |
|
"learning_rate": 4.237605084323676e-06, |
|
"loss": 0.1413, |
|
"step": 732 |
|
}, |
|
{ |
|
"epoch": 1.0989505247376312, |
|
"grad_norm": 1.5381720302103399, |
|
"learning_rate": 4.225969813775215e-06, |
|
"loss": 0.1324, |
|
"step": 733 |
|
}, |
|
{ |
|
"epoch": 1.1004497751124438, |
|
"grad_norm": 1.2618776650323837, |
|
"learning_rate": 4.214338836077444e-06, |
|
"loss": 0.137, |
|
"step": 734 |
|
}, |
|
{ |
|
"epoch": 1.1019490254872564, |
|
"grad_norm": 1.4199292581983365, |
|
"learning_rate": 4.202712215736955e-06, |
|
"loss": 0.1899, |
|
"step": 735 |
|
}, |
|
{ |
|
"epoch": 1.103448275862069, |
|
"grad_norm": 1.4016758647648953, |
|
"learning_rate": 4.191090017236177e-06, |
|
"loss": 0.1501, |
|
"step": 736 |
|
}, |
|
{ |
|
"epoch": 1.1049475262368815, |
|
"grad_norm": 1.2803781801023755, |
|
"learning_rate": 4.1794723050330125e-06, |
|
"loss": 0.1513, |
|
"step": 737 |
|
}, |
|
{ |
|
"epoch": 1.106446776611694, |
|
"grad_norm": 1.2124474695852974, |
|
"learning_rate": 4.167859143560484e-06, |
|
"loss": 0.1434, |
|
"step": 738 |
|
}, |
|
{ |
|
"epoch": 1.1079460269865067, |
|
"grad_norm": 1.1693180350237993, |
|
"learning_rate": 4.1562505972263735e-06, |
|
"loss": 0.105, |
|
"step": 739 |
|
}, |
|
{ |
|
"epoch": 1.1094452773613193, |
|
"grad_norm": 1.266898243587645, |
|
"learning_rate": 4.144646730412868e-06, |
|
"loss": 0.1443, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.1109445277361318, |
|
"grad_norm": 1.296462545853387, |
|
"learning_rate": 4.133047607476202e-06, |
|
"loss": 0.1606, |
|
"step": 741 |
|
}, |
|
{ |
|
"epoch": 1.1124437781109444, |
|
"grad_norm": 1.1266501020062736, |
|
"learning_rate": 4.121453292746297e-06, |
|
"loss": 0.1617, |
|
"step": 742 |
|
}, |
|
{ |
|
"epoch": 1.1139430284857572, |
|
"grad_norm": 1.1614340793928641, |
|
"learning_rate": 4.109863850526413e-06, |
|
"loss": 0.1605, |
|
"step": 743 |
|
}, |
|
{ |
|
"epoch": 1.1154422788605698, |
|
"grad_norm": 1.4436812287795313, |
|
"learning_rate": 4.098279345092783e-06, |
|
"loss": 0.1492, |
|
"step": 744 |
|
}, |
|
{ |
|
"epoch": 1.1169415292353824, |
|
"grad_norm": 1.3218417009167271, |
|
"learning_rate": 4.086699840694262e-06, |
|
"loss": 0.1143, |
|
"step": 745 |
|
}, |
|
{ |
|
"epoch": 1.118440779610195, |
|
"grad_norm": 1.3269621462232215, |
|
"learning_rate": 4.075125401551963e-06, |
|
"loss": 0.1546, |
|
"step": 746 |
|
}, |
|
{ |
|
"epoch": 1.1199400299850075, |
|
"grad_norm": 1.2313134265269736, |
|
"learning_rate": 4.063556091858914e-06, |
|
"loss": 0.1164, |
|
"step": 747 |
|
}, |
|
{ |
|
"epoch": 1.12143928035982, |
|
"grad_norm": 1.3666671026310135, |
|
"learning_rate": 4.051991975779691e-06, |
|
"loss": 0.1404, |
|
"step": 748 |
|
}, |
|
{ |
|
"epoch": 1.1229385307346327, |
|
"grad_norm": 1.5009915078741978, |
|
"learning_rate": 4.040433117450066e-06, |
|
"loss": 0.1644, |
|
"step": 749 |
|
}, |
|
{ |
|
"epoch": 1.1244377811094453, |
|
"grad_norm": 1.4273688853548707, |
|
"learning_rate": 4.0288795809766516e-06, |
|
"loss": 0.1543, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.1259370314842578, |
|
"grad_norm": 1.2141199766632593, |
|
"learning_rate": 4.017331430436543e-06, |
|
"loss": 0.124, |
|
"step": 751 |
|
}, |
|
{ |
|
"epoch": 1.1274362818590704, |
|
"grad_norm": 1.4237578114436968, |
|
"learning_rate": 4.005788729876968e-06, |
|
"loss": 0.1836, |
|
"step": 752 |
|
}, |
|
{ |
|
"epoch": 1.128935532233883, |
|
"grad_norm": 1.2206638311513194, |
|
"learning_rate": 3.994251543314925e-06, |
|
"loss": 0.13, |
|
"step": 753 |
|
}, |
|
{ |
|
"epoch": 1.1304347826086956, |
|
"grad_norm": 1.4173756422782844, |
|
"learning_rate": 3.982719934736832e-06, |
|
"loss": 0.1342, |
|
"step": 754 |
|
}, |
|
{ |
|
"epoch": 1.1319340329835081, |
|
"grad_norm": 1.4287245590548212, |
|
"learning_rate": 3.971193968098172e-06, |
|
"loss": 0.1729, |
|
"step": 755 |
|
}, |
|
{ |
|
"epoch": 1.133433283358321, |
|
"grad_norm": 1.2489573382268553, |
|
"learning_rate": 3.959673707323135e-06, |
|
"loss": 0.2043, |
|
"step": 756 |
|
}, |
|
{ |
|
"epoch": 1.1349325337331335, |
|
"grad_norm": 1.232882776585709, |
|
"learning_rate": 3.948159216304269e-06, |
|
"loss": 0.1449, |
|
"step": 757 |
|
}, |
|
{ |
|
"epoch": 1.136431784107946, |
|
"grad_norm": 1.5046861484890637, |
|
"learning_rate": 3.93665055890212e-06, |
|
"loss": 0.1359, |
|
"step": 758 |
|
}, |
|
{ |
|
"epoch": 1.1379310344827587, |
|
"grad_norm": 1.4726306281157244, |
|
"learning_rate": 3.92514779894488e-06, |
|
"loss": 0.134, |
|
"step": 759 |
|
}, |
|
{ |
|
"epoch": 1.1394302848575713, |
|
"grad_norm": 1.2979951757761485, |
|
"learning_rate": 3.9136510002280344e-06, |
|
"loss": 0.1884, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.1409295352323838, |
|
"grad_norm": 1.2982290747764529, |
|
"learning_rate": 3.902160226514007e-06, |
|
"loss": 0.1605, |
|
"step": 761 |
|
}, |
|
{ |
|
"epoch": 1.1424287856071964, |
|
"grad_norm": 1.2325236048172896, |
|
"learning_rate": 3.8906755415318045e-06, |
|
"loss": 0.1595, |
|
"step": 762 |
|
}, |
|
{ |
|
"epoch": 1.143928035982009, |
|
"grad_norm": 1.1882494094745506, |
|
"learning_rate": 3.8791970089766665e-06, |
|
"loss": 0.129, |
|
"step": 763 |
|
}, |
|
{ |
|
"epoch": 1.1454272863568216, |
|
"grad_norm": 1.7268601633886274, |
|
"learning_rate": 3.86772469250971e-06, |
|
"loss": 0.1698, |
|
"step": 764 |
|
}, |
|
{ |
|
"epoch": 1.1469265367316341, |
|
"grad_norm": 1.2228776128354193, |
|
"learning_rate": 3.856258655757578e-06, |
|
"loss": 0.1007, |
|
"step": 765 |
|
}, |
|
{ |
|
"epoch": 1.1484257871064467, |
|
"grad_norm": 1.383635860339973, |
|
"learning_rate": 3.844798962312085e-06, |
|
"loss": 0.1665, |
|
"step": 766 |
|
}, |
|
{ |
|
"epoch": 1.1499250374812593, |
|
"grad_norm": 1.2720059341262018, |
|
"learning_rate": 3.833345675729863e-06, |
|
"loss": 0.1345, |
|
"step": 767 |
|
}, |
|
{ |
|
"epoch": 1.1514242878560719, |
|
"grad_norm": 1.3231306715546778, |
|
"learning_rate": 3.821898859532013e-06, |
|
"loss": 0.129, |
|
"step": 768 |
|
}, |
|
{ |
|
"epoch": 1.1529235382308847, |
|
"grad_norm": 1.2768309771137538, |
|
"learning_rate": 3.8104585772037493e-06, |
|
"loss": 0.1433, |
|
"step": 769 |
|
}, |
|
{ |
|
"epoch": 1.1544227886056972, |
|
"grad_norm": 1.2978614906648258, |
|
"learning_rate": 3.7990248921940485e-06, |
|
"loss": 0.1528, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.1559220389805098, |
|
"grad_norm": 1.1227863019684725, |
|
"learning_rate": 3.787597867915303e-06, |
|
"loss": 0.1352, |
|
"step": 771 |
|
}, |
|
{ |
|
"epoch": 1.1574212893553224, |
|
"grad_norm": 1.4774070389173044, |
|
"learning_rate": 3.7761775677429567e-06, |
|
"loss": 0.1518, |
|
"step": 772 |
|
}, |
|
{ |
|
"epoch": 1.158920539730135, |
|
"grad_norm": 1.3285900564381639, |
|
"learning_rate": 3.7647640550151666e-06, |
|
"loss": 0.1182, |
|
"step": 773 |
|
}, |
|
{ |
|
"epoch": 1.1604197901049476, |
|
"grad_norm": 1.2741394206696637, |
|
"learning_rate": 3.7533573930324395e-06, |
|
"loss": 0.1264, |
|
"step": 774 |
|
}, |
|
{ |
|
"epoch": 1.1619190404797601, |
|
"grad_norm": 1.4724544443053296, |
|
"learning_rate": 3.7419576450572924e-06, |
|
"loss": 0.1437, |
|
"step": 775 |
|
}, |
|
{ |
|
"epoch": 1.1634182908545727, |
|
"grad_norm": 1.2373312789607587, |
|
"learning_rate": 3.7305648743138966e-06, |
|
"loss": 0.1411, |
|
"step": 776 |
|
}, |
|
{ |
|
"epoch": 1.1649175412293853, |
|
"grad_norm": 1.5983303644310531, |
|
"learning_rate": 3.7191791439877236e-06, |
|
"loss": 0.1421, |
|
"step": 777 |
|
}, |
|
{ |
|
"epoch": 1.1664167916041979, |
|
"grad_norm": 1.2040821528942203, |
|
"learning_rate": 3.7078005172252015e-06, |
|
"loss": 0.1414, |
|
"step": 778 |
|
}, |
|
{ |
|
"epoch": 1.1679160419790104, |
|
"grad_norm": 1.3565920153384308, |
|
"learning_rate": 3.6964290571333583e-06, |
|
"loss": 0.1289, |
|
"step": 779 |
|
}, |
|
{ |
|
"epoch": 1.169415292353823, |
|
"grad_norm": 1.494886770034295, |
|
"learning_rate": 3.6850648267794776e-06, |
|
"loss": 0.1493, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.1709145427286356, |
|
"grad_norm": 1.2441195402039313, |
|
"learning_rate": 3.673707889190744e-06, |
|
"loss": 0.0992, |
|
"step": 781 |
|
}, |
|
{ |
|
"epoch": 1.1724137931034484, |
|
"grad_norm": 1.3372883034998901, |
|
"learning_rate": 3.662358307353897e-06, |
|
"loss": 0.1191, |
|
"step": 782 |
|
}, |
|
{ |
|
"epoch": 1.1739130434782608, |
|
"grad_norm": 1.2854871350976347, |
|
"learning_rate": 3.6510161442148783e-06, |
|
"loss": 0.1218, |
|
"step": 783 |
|
}, |
|
{ |
|
"epoch": 1.1754122938530736, |
|
"grad_norm": 1.1481541679425193, |
|
"learning_rate": 3.63968146267849e-06, |
|
"loss": 0.1353, |
|
"step": 784 |
|
}, |
|
{ |
|
"epoch": 1.1769115442278861, |
|
"grad_norm": 1.0092289583242011, |
|
"learning_rate": 3.6283543256080334e-06, |
|
"loss": 0.1216, |
|
"step": 785 |
|
}, |
|
{ |
|
"epoch": 1.1784107946026987, |
|
"grad_norm": 1.3898812609035025, |
|
"learning_rate": 3.6170347958249728e-06, |
|
"loss": 0.1583, |
|
"step": 786 |
|
}, |
|
{ |
|
"epoch": 1.1799100449775113, |
|
"grad_norm": 1.2415231843122483, |
|
"learning_rate": 3.605722936108579e-06, |
|
"loss": 0.1418, |
|
"step": 787 |
|
}, |
|
{ |
|
"epoch": 1.1814092953523239, |
|
"grad_norm": 1.465232037061966, |
|
"learning_rate": 3.5944188091955843e-06, |
|
"loss": 0.126, |
|
"step": 788 |
|
}, |
|
{ |
|
"epoch": 1.1829085457271364, |
|
"grad_norm": 1.1131169462327808, |
|
"learning_rate": 3.5831224777798346e-06, |
|
"loss": 0.119, |
|
"step": 789 |
|
}, |
|
{ |
|
"epoch": 1.184407796101949, |
|
"grad_norm": 1.1297393572577206, |
|
"learning_rate": 3.5718340045119416e-06, |
|
"loss": 0.1367, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.1859070464767616, |
|
"grad_norm": 1.46635806934035, |
|
"learning_rate": 3.5605534519989327e-06, |
|
"loss": 0.1325, |
|
"step": 791 |
|
}, |
|
{ |
|
"epoch": 1.1874062968515742, |
|
"grad_norm": 1.2573786583650581, |
|
"learning_rate": 3.5492808828039083e-06, |
|
"loss": 0.0993, |
|
"step": 792 |
|
}, |
|
{ |
|
"epoch": 1.1889055472263867, |
|
"grad_norm": 1.3176981497023343, |
|
"learning_rate": 3.538016359445692e-06, |
|
"loss": 0.1317, |
|
"step": 793 |
|
}, |
|
{ |
|
"epoch": 1.1904047976011993, |
|
"grad_norm": 1.1650275353453803, |
|
"learning_rate": 3.5267599443984848e-06, |
|
"loss": 0.1732, |
|
"step": 794 |
|
}, |
|
{ |
|
"epoch": 1.191904047976012, |
|
"grad_norm": 1.3923141810081512, |
|
"learning_rate": 3.5155117000915153e-06, |
|
"loss": 0.171, |
|
"step": 795 |
|
}, |
|
{ |
|
"epoch": 1.1934032983508245, |
|
"grad_norm": 1.1182429512523535, |
|
"learning_rate": 3.5042716889086998e-06, |
|
"loss": 0.1321, |
|
"step": 796 |
|
}, |
|
{ |
|
"epoch": 1.1949025487256373, |
|
"grad_norm": 1.234419843095829, |
|
"learning_rate": 3.493039973188289e-06, |
|
"loss": 0.1052, |
|
"step": 797 |
|
}, |
|
{ |
|
"epoch": 1.1964017991004499, |
|
"grad_norm": 1.406658707936665, |
|
"learning_rate": 3.481816615222533e-06, |
|
"loss": 0.1876, |
|
"step": 798 |
|
}, |
|
{ |
|
"epoch": 1.1979010494752624, |
|
"grad_norm": 1.0328991457308447, |
|
"learning_rate": 3.470601677257323e-06, |
|
"loss": 0.1248, |
|
"step": 799 |
|
}, |
|
{ |
|
"epoch": 1.199400299850075, |
|
"grad_norm": 1.292313377436957, |
|
"learning_rate": 3.459395221491853e-06, |
|
"loss": 0.1591, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.199400299850075, |
|
"eval_loss": 0.30602961778640747, |
|
"eval_runtime": 9.5435, |
|
"eval_samples_per_second": 5.658, |
|
"eval_steps_per_second": 1.467, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.2008995502248876, |
|
"grad_norm": 1.3058171475272295, |
|
"learning_rate": 3.4481973100782756e-06, |
|
"loss": 0.1576, |
|
"step": 801 |
|
}, |
|
{ |
|
"epoch": 1.2023988005997002, |
|
"grad_norm": 1.3583892270836624, |
|
"learning_rate": 3.4370080051213527e-06, |
|
"loss": 0.2162, |
|
"step": 802 |
|
}, |
|
{ |
|
"epoch": 1.2038980509745127, |
|
"grad_norm": 1.3674456101935875, |
|
"learning_rate": 3.4258273686781156e-06, |
|
"loss": 0.1272, |
|
"step": 803 |
|
}, |
|
{ |
|
"epoch": 1.2053973013493253, |
|
"grad_norm": 1.275046828519697, |
|
"learning_rate": 3.4146554627575207e-06, |
|
"loss": 0.1525, |
|
"step": 804 |
|
}, |
|
{ |
|
"epoch": 1.206896551724138, |
|
"grad_norm": 1.4733582867903916, |
|
"learning_rate": 3.403492349320101e-06, |
|
"loss": 0.1617, |
|
"step": 805 |
|
}, |
|
{ |
|
"epoch": 1.2083958020989505, |
|
"grad_norm": 1.226397831684623, |
|
"learning_rate": 3.392338090277628e-06, |
|
"loss": 0.1026, |
|
"step": 806 |
|
}, |
|
{ |
|
"epoch": 1.209895052473763, |
|
"grad_norm": 1.6112338048507868, |
|
"learning_rate": 3.3811927474927644e-06, |
|
"loss": 0.1653, |
|
"step": 807 |
|
}, |
|
{ |
|
"epoch": 1.2113943028485756, |
|
"grad_norm": 1.3671476703363021, |
|
"learning_rate": 3.3700563827787224e-06, |
|
"loss": 0.1369, |
|
"step": 808 |
|
}, |
|
{ |
|
"epoch": 1.2128935532233882, |
|
"grad_norm": 1.0643585793575752, |
|
"learning_rate": 3.358929057898922e-06, |
|
"loss": 0.1251, |
|
"step": 809 |
|
}, |
|
{ |
|
"epoch": 1.214392803598201, |
|
"grad_norm": 1.1339958530244039, |
|
"learning_rate": 3.3478108345666456e-06, |
|
"loss": 0.1431, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.2158920539730136, |
|
"grad_norm": 1.0883168747590415, |
|
"learning_rate": 3.3367017744446995e-06, |
|
"loss": 0.1069, |
|
"step": 811 |
|
}, |
|
{ |
|
"epoch": 1.2173913043478262, |
|
"grad_norm": 1.26438068795269, |
|
"learning_rate": 3.3256019391450696e-06, |
|
"loss": 0.178, |
|
"step": 812 |
|
}, |
|
{ |
|
"epoch": 1.2188905547226387, |
|
"grad_norm": 1.0664372577970027, |
|
"learning_rate": 3.314511390228578e-06, |
|
"loss": 0.1011, |
|
"step": 813 |
|
}, |
|
{ |
|
"epoch": 1.2203898050974513, |
|
"grad_norm": 1.3331137347043034, |
|
"learning_rate": 3.303430189204545e-06, |
|
"loss": 0.117, |
|
"step": 814 |
|
}, |
|
{ |
|
"epoch": 1.221889055472264, |
|
"grad_norm": 1.3304820446438348, |
|
"learning_rate": 3.2923583975304474e-06, |
|
"loss": 0.171, |
|
"step": 815 |
|
}, |
|
{ |
|
"epoch": 1.2233883058470765, |
|
"grad_norm": 1.1062517912626064, |
|
"learning_rate": 3.2812960766115747e-06, |
|
"loss": 0.1195, |
|
"step": 816 |
|
}, |
|
{ |
|
"epoch": 1.224887556221889, |
|
"grad_norm": 1.8558570146937945, |
|
"learning_rate": 3.270243287800693e-06, |
|
"loss": 0.18, |
|
"step": 817 |
|
}, |
|
{ |
|
"epoch": 1.2263868065967016, |
|
"grad_norm": 1.2786157676011234, |
|
"learning_rate": 3.2592000923976997e-06, |
|
"loss": 0.1618, |
|
"step": 818 |
|
}, |
|
{ |
|
"epoch": 1.2278860569715142, |
|
"grad_norm": 1.4796928134938527, |
|
"learning_rate": 3.2481665516492876e-06, |
|
"loss": 0.1317, |
|
"step": 819 |
|
}, |
|
{ |
|
"epoch": 1.2293853073463268, |
|
"grad_norm": 1.1988475918746555, |
|
"learning_rate": 3.2371427267486044e-06, |
|
"loss": 0.1246, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.2308845577211394, |
|
"grad_norm": 1.275286074409384, |
|
"learning_rate": 3.2261286788349127e-06, |
|
"loss": 0.1634, |
|
"step": 821 |
|
}, |
|
{ |
|
"epoch": 1.232383808095952, |
|
"grad_norm": 1.3995624280447843, |
|
"learning_rate": 3.2151244689932505e-06, |
|
"loss": 0.1488, |
|
"step": 822 |
|
}, |
|
{ |
|
"epoch": 1.2338830584707647, |
|
"grad_norm": 1.4104751467041678, |
|
"learning_rate": 3.2041301582540903e-06, |
|
"loss": 0.1286, |
|
"step": 823 |
|
}, |
|
{ |
|
"epoch": 1.235382308845577, |
|
"grad_norm": 1.2445006734481485, |
|
"learning_rate": 3.1931458075930046e-06, |
|
"loss": 0.1326, |
|
"step": 824 |
|
}, |
|
{ |
|
"epoch": 1.23688155922039, |
|
"grad_norm": 1.1997914952456805, |
|
"learning_rate": 3.182171477930332e-06, |
|
"loss": 0.1398, |
|
"step": 825 |
|
}, |
|
{ |
|
"epoch": 1.2383808095952025, |
|
"grad_norm": 1.1650965283772017, |
|
"learning_rate": 3.171207230130826e-06, |
|
"loss": 0.1245, |
|
"step": 826 |
|
}, |
|
{ |
|
"epoch": 1.239880059970015, |
|
"grad_norm": 1.4224565886835228, |
|
"learning_rate": 3.1602531250033286e-06, |
|
"loss": 0.1441, |
|
"step": 827 |
|
}, |
|
{ |
|
"epoch": 1.2413793103448276, |
|
"grad_norm": 1.3213332823744537, |
|
"learning_rate": 3.149309223300428e-06, |
|
"loss": 0.1373, |
|
"step": 828 |
|
}, |
|
{ |
|
"epoch": 1.2428785607196402, |
|
"grad_norm": 1.3395941381528507, |
|
"learning_rate": 3.1383755857181253e-06, |
|
"loss": 0.1004, |
|
"step": 829 |
|
}, |
|
{ |
|
"epoch": 1.2443778110944528, |
|
"grad_norm": 1.4343202287074748, |
|
"learning_rate": 3.1274522728954928e-06, |
|
"loss": 0.1444, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.2458770614692654, |
|
"grad_norm": 1.3805798033233598, |
|
"learning_rate": 3.1165393454143423e-06, |
|
"loss": 0.1176, |
|
"step": 831 |
|
}, |
|
{ |
|
"epoch": 1.247376311844078, |
|
"grad_norm": 1.429797685337231, |
|
"learning_rate": 3.1056368637988876e-06, |
|
"loss": 0.1376, |
|
"step": 832 |
|
}, |
|
{ |
|
"epoch": 1.2488755622188905, |
|
"grad_norm": 1.3184640779280599, |
|
"learning_rate": 3.0947448885154085e-06, |
|
"loss": 0.1052, |
|
"step": 833 |
|
}, |
|
{ |
|
"epoch": 1.250374812593703, |
|
"grad_norm": 1.4125294862634858, |
|
"learning_rate": 3.0838634799719157e-06, |
|
"loss": 0.1762, |
|
"step": 834 |
|
}, |
|
{ |
|
"epoch": 1.2518740629685157, |
|
"grad_norm": 1.399390144522578, |
|
"learning_rate": 3.072992698517815e-06, |
|
"loss": 0.1143, |
|
"step": 835 |
|
}, |
|
{ |
|
"epoch": 1.2533733133433285, |
|
"grad_norm": 1.4083090255847572, |
|
"learning_rate": 3.0621326044435738e-06, |
|
"loss": 0.1733, |
|
"step": 836 |
|
}, |
|
{ |
|
"epoch": 1.2548725637181408, |
|
"grad_norm": 1.484272414476059, |
|
"learning_rate": 3.0512832579803873e-06, |
|
"loss": 0.1402, |
|
"step": 837 |
|
}, |
|
{ |
|
"epoch": 1.2563718140929536, |
|
"grad_norm": 1.3812662927559538, |
|
"learning_rate": 3.0404447192998398e-06, |
|
"loss": 0.1522, |
|
"step": 838 |
|
}, |
|
{ |
|
"epoch": 1.2578710644677662, |
|
"grad_norm": 1.2373359431123467, |
|
"learning_rate": 3.029617048513579e-06, |
|
"loss": 0.0968, |
|
"step": 839 |
|
}, |
|
{ |
|
"epoch": 1.2593703148425788, |
|
"grad_norm": 1.0532358406683526, |
|
"learning_rate": 3.0188003056729752e-06, |
|
"loss": 0.1136, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.2608695652173914, |
|
"grad_norm": 1.3370557887300012, |
|
"learning_rate": 3.007994550768793e-06, |
|
"loss": 0.1194, |
|
"step": 841 |
|
}, |
|
{ |
|
"epoch": 1.262368815592204, |
|
"grad_norm": 1.3888773960912888, |
|
"learning_rate": 2.9971998437308546e-06, |
|
"loss": 0.148, |
|
"step": 842 |
|
}, |
|
{ |
|
"epoch": 1.2638680659670165, |
|
"grad_norm": 1.327843448779607, |
|
"learning_rate": 2.9864162444277118e-06, |
|
"loss": 0.1604, |
|
"step": 843 |
|
}, |
|
{ |
|
"epoch": 1.265367316341829, |
|
"grad_norm": 1.6127561280028084, |
|
"learning_rate": 2.97564381266631e-06, |
|
"loss": 0.1344, |
|
"step": 844 |
|
}, |
|
{ |
|
"epoch": 1.2668665667166417, |
|
"grad_norm": 1.150612273649465, |
|
"learning_rate": 2.964882608191659e-06, |
|
"loss": 0.1065, |
|
"step": 845 |
|
}, |
|
{ |
|
"epoch": 1.2683658170914542, |
|
"grad_norm": 1.3198941904967931, |
|
"learning_rate": 2.954132690686502e-06, |
|
"loss": 0.148, |
|
"step": 846 |
|
}, |
|
{ |
|
"epoch": 1.2698650674662668, |
|
"grad_norm": 1.3171201662076748, |
|
"learning_rate": 2.9433941197709813e-06, |
|
"loss": 0.1582, |
|
"step": 847 |
|
}, |
|
{ |
|
"epoch": 1.2713643178410794, |
|
"grad_norm": 1.6640340192874812, |
|
"learning_rate": 2.9326669550023124e-06, |
|
"loss": 0.1527, |
|
"step": 848 |
|
}, |
|
{ |
|
"epoch": 1.272863568215892, |
|
"grad_norm": 1.191103327823784, |
|
"learning_rate": 2.921951255874449e-06, |
|
"loss": 0.0998, |
|
"step": 849 |
|
}, |
|
{ |
|
"epoch": 1.2743628185907045, |
|
"grad_norm": 1.3329297236987123, |
|
"learning_rate": 2.9112470818177563e-06, |
|
"loss": 0.1391, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.2758620689655173, |
|
"grad_norm": 1.3806357776189597, |
|
"learning_rate": 2.9005544921986774e-06, |
|
"loss": 0.1674, |
|
"step": 851 |
|
}, |
|
{ |
|
"epoch": 1.2773613193403297, |
|
"grad_norm": 1.316672981264349, |
|
"learning_rate": 2.8898735463194128e-06, |
|
"loss": 0.1288, |
|
"step": 852 |
|
}, |
|
{ |
|
"epoch": 1.2788605697151425, |
|
"grad_norm": 1.389744271741816, |
|
"learning_rate": 2.8792043034175817e-06, |
|
"loss": 0.1648, |
|
"step": 853 |
|
}, |
|
{ |
|
"epoch": 1.280359820089955, |
|
"grad_norm": 1.2804283857478296, |
|
"learning_rate": 2.8685468226658974e-06, |
|
"loss": 0.1478, |
|
"step": 854 |
|
}, |
|
{ |
|
"epoch": 1.2818590704647677, |
|
"grad_norm": 1.3085879349069323, |
|
"learning_rate": 2.85790116317184e-06, |
|
"loss": 0.1291, |
|
"step": 855 |
|
}, |
|
{ |
|
"epoch": 1.2833583208395802, |
|
"grad_norm": 1.1718560022595523, |
|
"learning_rate": 2.8472673839773267e-06, |
|
"loss": 0.1281, |
|
"step": 856 |
|
}, |
|
{ |
|
"epoch": 1.2848575712143928, |
|
"grad_norm": 1.4174010495436282, |
|
"learning_rate": 2.8366455440583874e-06, |
|
"loss": 0.1479, |
|
"step": 857 |
|
}, |
|
{ |
|
"epoch": 1.2863568215892054, |
|
"grad_norm": 1.3701745201308646, |
|
"learning_rate": 2.8260357023248323e-06, |
|
"loss": 0.18, |
|
"step": 858 |
|
}, |
|
{ |
|
"epoch": 1.287856071964018, |
|
"grad_norm": 1.2164522782730773, |
|
"learning_rate": 2.815437917619932e-06, |
|
"loss": 0.1545, |
|
"step": 859 |
|
}, |
|
{ |
|
"epoch": 1.2893553223388305, |
|
"grad_norm": 1.545266209233553, |
|
"learning_rate": 2.804852248720085e-06, |
|
"loss": 0.1508, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.2908545727136431, |
|
"grad_norm": 1.178187836068456, |
|
"learning_rate": 2.7942787543344957e-06, |
|
"loss": 0.1646, |
|
"step": 861 |
|
}, |
|
{ |
|
"epoch": 1.2923538230884557, |
|
"grad_norm": 1.2847407117252163, |
|
"learning_rate": 2.783717493104846e-06, |
|
"loss": 0.1311, |
|
"step": 862 |
|
}, |
|
{ |
|
"epoch": 1.2938530734632683, |
|
"grad_norm": 1.2065593565399568, |
|
"learning_rate": 2.7731685236049745e-06, |
|
"loss": 0.186, |
|
"step": 863 |
|
}, |
|
{ |
|
"epoch": 1.295352323838081, |
|
"grad_norm": 1.2537634563394848, |
|
"learning_rate": 2.762631904340546e-06, |
|
"loss": 0.1176, |
|
"step": 864 |
|
}, |
|
{ |
|
"epoch": 1.2968515742128934, |
|
"grad_norm": 1.4572893072304802, |
|
"learning_rate": 2.7521076937487248e-06, |
|
"loss": 0.1374, |
|
"step": 865 |
|
}, |
|
{ |
|
"epoch": 1.2983508245877062, |
|
"grad_norm": 1.1604095621916721, |
|
"learning_rate": 2.7415959501978674e-06, |
|
"loss": 0.1347, |
|
"step": 866 |
|
}, |
|
{ |
|
"epoch": 1.2998500749625188, |
|
"grad_norm": 1.2797788317906116, |
|
"learning_rate": 2.731096731987177e-06, |
|
"loss": 0.1399, |
|
"step": 867 |
|
}, |
|
{ |
|
"epoch": 1.3013493253373314, |
|
"grad_norm": 1.2600649383977571, |
|
"learning_rate": 2.7206100973463958e-06, |
|
"loss": 0.112, |
|
"step": 868 |
|
}, |
|
{ |
|
"epoch": 1.302848575712144, |
|
"grad_norm": 1.3111389852634707, |
|
"learning_rate": 2.71013610443547e-06, |
|
"loss": 0.1498, |
|
"step": 869 |
|
}, |
|
{ |
|
"epoch": 1.3043478260869565, |
|
"grad_norm": 1.283095079431425, |
|
"learning_rate": 2.6996748113442397e-06, |
|
"loss": 0.1495, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.3058470764617691, |
|
"grad_norm": 1.2906279291406924, |
|
"learning_rate": 2.689226276092107e-06, |
|
"loss": 0.1546, |
|
"step": 871 |
|
}, |
|
{ |
|
"epoch": 1.3073463268365817, |
|
"grad_norm": 1.37699366581781, |
|
"learning_rate": 2.6787905566277185e-06, |
|
"loss": 0.1963, |
|
"step": 872 |
|
}, |
|
{ |
|
"epoch": 1.3088455772113943, |
|
"grad_norm": 1.267731604106557, |
|
"learning_rate": 2.6683677108286423e-06, |
|
"loss": 0.1407, |
|
"step": 873 |
|
}, |
|
{ |
|
"epoch": 1.3103448275862069, |
|
"grad_norm": 1.3983973615367558, |
|
"learning_rate": 2.65795779650105e-06, |
|
"loss": 0.1407, |
|
"step": 874 |
|
}, |
|
{ |
|
"epoch": 1.3118440779610194, |
|
"grad_norm": 1.2116834320152923, |
|
"learning_rate": 2.6475608713793923e-06, |
|
"loss": 0.1147, |
|
"step": 875 |
|
}, |
|
{ |
|
"epoch": 1.313343328335832, |
|
"grad_norm": 1.2779312117230872, |
|
"learning_rate": 2.6371769931260806e-06, |
|
"loss": 0.1091, |
|
"step": 876 |
|
}, |
|
{ |
|
"epoch": 1.3148425787106448, |
|
"grad_norm": 1.3310728618855927, |
|
"learning_rate": 2.6268062193311672e-06, |
|
"loss": 0.1411, |
|
"step": 877 |
|
}, |
|
{ |
|
"epoch": 1.3163418290854572, |
|
"grad_norm": 1.1705569257005464, |
|
"learning_rate": 2.6164486075120245e-06, |
|
"loss": 0.1245, |
|
"step": 878 |
|
}, |
|
{ |
|
"epoch": 1.31784107946027, |
|
"grad_norm": 1.3341056541286225, |
|
"learning_rate": 2.606104215113033e-06, |
|
"loss": 0.1422, |
|
"step": 879 |
|
}, |
|
{ |
|
"epoch": 1.3193403298350825, |
|
"grad_norm": 1.3024411783557586, |
|
"learning_rate": 2.5957730995052477e-06, |
|
"loss": 0.1339, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.3208395802098951, |
|
"grad_norm": 1.0751188596818317, |
|
"learning_rate": 2.585455317986095e-06, |
|
"loss": 0.104, |
|
"step": 881 |
|
}, |
|
{ |
|
"epoch": 1.3223388305847077, |
|
"grad_norm": 1.4384098550039184, |
|
"learning_rate": 2.5751509277790487e-06, |
|
"loss": 0.1949, |
|
"step": 882 |
|
}, |
|
{ |
|
"epoch": 1.3238380809595203, |
|
"grad_norm": 1.3787195214409598, |
|
"learning_rate": 2.5648599860333122e-06, |
|
"loss": 0.1139, |
|
"step": 883 |
|
}, |
|
{ |
|
"epoch": 1.3253373313343328, |
|
"grad_norm": 1.4085530010640945, |
|
"learning_rate": 2.554582549823502e-06, |
|
"loss": 0.1371, |
|
"step": 884 |
|
}, |
|
{ |
|
"epoch": 1.3268365817091454, |
|
"grad_norm": 1.1829924005858234, |
|
"learning_rate": 2.5443186761493327e-06, |
|
"loss": 0.153, |
|
"step": 885 |
|
}, |
|
{ |
|
"epoch": 1.328335832083958, |
|
"grad_norm": 1.2689922518881562, |
|
"learning_rate": 2.5340684219352977e-06, |
|
"loss": 0.143, |
|
"step": 886 |
|
}, |
|
{ |
|
"epoch": 1.3298350824587706, |
|
"grad_norm": 1.4492441219474554, |
|
"learning_rate": 2.523831844030358e-06, |
|
"loss": 0.2237, |
|
"step": 887 |
|
}, |
|
{ |
|
"epoch": 1.3313343328335832, |
|
"grad_norm": 1.3724555732722612, |
|
"learning_rate": 2.513608999207622e-06, |
|
"loss": 0.132, |
|
"step": 888 |
|
}, |
|
{ |
|
"epoch": 1.3328335832083957, |
|
"grad_norm": 1.4409654292658438, |
|
"learning_rate": 2.503399944164035e-06, |
|
"loss": 0.1496, |
|
"step": 889 |
|
}, |
|
{ |
|
"epoch": 1.3343328335832085, |
|
"grad_norm": 1.2353769146697504, |
|
"learning_rate": 2.4932047355200613e-06, |
|
"loss": 0.132, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.3358320839580209, |
|
"grad_norm": 1.2911994019209236, |
|
"learning_rate": 2.483023429819372e-06, |
|
"loss": 0.1232, |
|
"step": 891 |
|
}, |
|
{ |
|
"epoch": 1.3373313343328337, |
|
"grad_norm": 1.1296916111789772, |
|
"learning_rate": 2.472856083528531e-06, |
|
"loss": 0.1347, |
|
"step": 892 |
|
}, |
|
{ |
|
"epoch": 1.338830584707646, |
|
"grad_norm": 1.3384892785279934, |
|
"learning_rate": 2.4627027530366836e-06, |
|
"loss": 0.1217, |
|
"step": 893 |
|
}, |
|
{ |
|
"epoch": 1.3403298350824588, |
|
"grad_norm": 1.1990701672887576, |
|
"learning_rate": 2.4525634946552405e-06, |
|
"loss": 0.1367, |
|
"step": 894 |
|
}, |
|
{ |
|
"epoch": 1.3418290854572714, |
|
"grad_norm": 1.6287895885905193, |
|
"learning_rate": 2.442438364617567e-06, |
|
"loss": 0.2167, |
|
"step": 895 |
|
}, |
|
{ |
|
"epoch": 1.343328335832084, |
|
"grad_norm": 1.6320237473460248, |
|
"learning_rate": 2.4323274190786703e-06, |
|
"loss": 0.137, |
|
"step": 896 |
|
}, |
|
{ |
|
"epoch": 1.3448275862068966, |
|
"grad_norm": 1.2070451582386006, |
|
"learning_rate": 2.422230714114891e-06, |
|
"loss": 0.15, |
|
"step": 897 |
|
}, |
|
{ |
|
"epoch": 1.3463268365817092, |
|
"grad_norm": 1.0813044182068166, |
|
"learning_rate": 2.4121483057235884e-06, |
|
"loss": 0.1673, |
|
"step": 898 |
|
}, |
|
{ |
|
"epoch": 1.3478260869565217, |
|
"grad_norm": 1.3765592410946605, |
|
"learning_rate": 2.4020802498228333e-06, |
|
"loss": 0.1423, |
|
"step": 899 |
|
}, |
|
{ |
|
"epoch": 1.3493253373313343, |
|
"grad_norm": 1.0216979685188097, |
|
"learning_rate": 2.392026602251093e-06, |
|
"loss": 0.1097, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.3508245877061469, |
|
"grad_norm": 1.395082211672787, |
|
"learning_rate": 2.3819874187669266e-06, |
|
"loss": 0.1498, |
|
"step": 901 |
|
}, |
|
{ |
|
"epoch": 1.3523238380809595, |
|
"grad_norm": 1.2772714877694418, |
|
"learning_rate": 2.371962755048675e-06, |
|
"loss": 0.1439, |
|
"step": 902 |
|
}, |
|
{ |
|
"epoch": 1.353823088455772, |
|
"grad_norm": 1.2230414487038161, |
|
"learning_rate": 2.36195266669415e-06, |
|
"loss": 0.1498, |
|
"step": 903 |
|
}, |
|
{ |
|
"epoch": 1.3553223388305846, |
|
"grad_norm": 1.2133735297762844, |
|
"learning_rate": 2.351957209220326e-06, |
|
"loss": 0.0944, |
|
"step": 904 |
|
}, |
|
{ |
|
"epoch": 1.3568215892053974, |
|
"grad_norm": 1.2730569058592784, |
|
"learning_rate": 2.341976438063035e-06, |
|
"loss": 0.1699, |
|
"step": 905 |
|
}, |
|
{ |
|
"epoch": 1.3583208395802098, |
|
"grad_norm": 1.235232987317895, |
|
"learning_rate": 2.332010408576653e-06, |
|
"loss": 0.1164, |
|
"step": 906 |
|
}, |
|
{ |
|
"epoch": 1.3598200899550226, |
|
"grad_norm": 1.22888641011766, |
|
"learning_rate": 2.3220591760338046e-06, |
|
"loss": 0.1243, |
|
"step": 907 |
|
}, |
|
{ |
|
"epoch": 1.3613193403298351, |
|
"grad_norm": 1.3520510496586944, |
|
"learning_rate": 2.3121227956250435e-06, |
|
"loss": 0.1295, |
|
"step": 908 |
|
}, |
|
{ |
|
"epoch": 1.3628185907046477, |
|
"grad_norm": 1.3900456962266945, |
|
"learning_rate": 2.302201322458552e-06, |
|
"loss": 0.127, |
|
"step": 909 |
|
}, |
|
{ |
|
"epoch": 1.3643178410794603, |
|
"grad_norm": 1.2949158294246113, |
|
"learning_rate": 2.292294811559837e-06, |
|
"loss": 0.135, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.3658170914542729, |
|
"grad_norm": 1.2139697429774938, |
|
"learning_rate": 2.282403317871422e-06, |
|
"loss": 0.1253, |
|
"step": 911 |
|
}, |
|
{ |
|
"epoch": 1.3673163418290855, |
|
"grad_norm": 1.31250625327868, |
|
"learning_rate": 2.2725268962525454e-06, |
|
"loss": 0.1663, |
|
"step": 912 |
|
}, |
|
{ |
|
"epoch": 1.368815592203898, |
|
"grad_norm": 1.2681561846101566, |
|
"learning_rate": 2.262665601478852e-06, |
|
"loss": 0.1495, |
|
"step": 913 |
|
}, |
|
{ |
|
"epoch": 1.3703148425787106, |
|
"grad_norm": 1.286221926294528, |
|
"learning_rate": 2.252819488242093e-06, |
|
"loss": 0.1389, |
|
"step": 914 |
|
}, |
|
{ |
|
"epoch": 1.3718140929535232, |
|
"grad_norm": 1.242105773688379, |
|
"learning_rate": 2.24298861114982e-06, |
|
"loss": 0.1304, |
|
"step": 915 |
|
}, |
|
{ |
|
"epoch": 1.3733133433283358, |
|
"grad_norm": 1.1667895987481969, |
|
"learning_rate": 2.2331730247250857e-06, |
|
"loss": 0.1061, |
|
"step": 916 |
|
}, |
|
{ |
|
"epoch": 1.3748125937031483, |
|
"grad_norm": 1.5650235156411083, |
|
"learning_rate": 2.223372783406137e-06, |
|
"loss": 0.1256, |
|
"step": 917 |
|
}, |
|
{ |
|
"epoch": 1.3763118440779611, |
|
"grad_norm": 1.3344433479675109, |
|
"learning_rate": 2.2135879415461152e-06, |
|
"loss": 0.1191, |
|
"step": 918 |
|
}, |
|
{ |
|
"epoch": 1.3778110944527735, |
|
"grad_norm": 1.540334497517668, |
|
"learning_rate": 2.203818553412757e-06, |
|
"loss": 0.1574, |
|
"step": 919 |
|
}, |
|
{ |
|
"epoch": 1.3793103448275863, |
|
"grad_norm": 1.3647654710975659, |
|
"learning_rate": 2.1940646731880887e-06, |
|
"loss": 0.1362, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.3808095952023989, |
|
"grad_norm": 1.1167831376180046, |
|
"learning_rate": 2.1843263549681287e-06, |
|
"loss": 0.1465, |
|
"step": 921 |
|
}, |
|
{ |
|
"epoch": 1.3823088455772115, |
|
"grad_norm": 1.2857228281515627, |
|
"learning_rate": 2.174603652762588e-06, |
|
"loss": 0.1224, |
|
"step": 922 |
|
}, |
|
{ |
|
"epoch": 1.383808095952024, |
|
"grad_norm": 1.422505378785925, |
|
"learning_rate": 2.164896620494569e-06, |
|
"loss": 0.1378, |
|
"step": 923 |
|
}, |
|
{ |
|
"epoch": 1.3853073463268366, |
|
"grad_norm": 1.5005035345217537, |
|
"learning_rate": 2.1552053120002655e-06, |
|
"loss": 0.132, |
|
"step": 924 |
|
}, |
|
{ |
|
"epoch": 1.3868065967016492, |
|
"grad_norm": 1.407570214282245, |
|
"learning_rate": 2.145529781028668e-06, |
|
"loss": 0.1133, |
|
"step": 925 |
|
}, |
|
{ |
|
"epoch": 1.3883058470764618, |
|
"grad_norm": 1.0514670390858363, |
|
"learning_rate": 2.1358700812412625e-06, |
|
"loss": 0.1089, |
|
"step": 926 |
|
}, |
|
{ |
|
"epoch": 1.3898050974512743, |
|
"grad_norm": 1.083129668862111, |
|
"learning_rate": 2.1262262662117327e-06, |
|
"loss": 0.1231, |
|
"step": 927 |
|
}, |
|
{ |
|
"epoch": 1.391304347826087, |
|
"grad_norm": 1.299774010899149, |
|
"learning_rate": 2.1165983894256647e-06, |
|
"loss": 0.1235, |
|
"step": 928 |
|
}, |
|
{ |
|
"epoch": 1.3928035982008995, |
|
"grad_norm": 1.6625102293551262, |
|
"learning_rate": 2.1069865042802502e-06, |
|
"loss": 0.1633, |
|
"step": 929 |
|
}, |
|
{ |
|
"epoch": 1.394302848575712, |
|
"grad_norm": 1.4135663597043977, |
|
"learning_rate": 2.0973906640839867e-06, |
|
"loss": 0.1027, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.3958020989505249, |
|
"grad_norm": 1.0803337138904263, |
|
"learning_rate": 2.0878109220563884e-06, |
|
"loss": 0.0994, |
|
"step": 931 |
|
}, |
|
{ |
|
"epoch": 1.3973013493253372, |
|
"grad_norm": 1.2267937559800803, |
|
"learning_rate": 2.078247331327685e-06, |
|
"loss": 0.1573, |
|
"step": 932 |
|
}, |
|
{ |
|
"epoch": 1.39880059970015, |
|
"grad_norm": 1.3902432370089108, |
|
"learning_rate": 2.0686999449385286e-06, |
|
"loss": 0.1289, |
|
"step": 933 |
|
}, |
|
{ |
|
"epoch": 1.4002998500749624, |
|
"grad_norm": 1.0727372587178268, |
|
"learning_rate": 2.0591688158397054e-06, |
|
"loss": 0.0882, |
|
"step": 934 |
|
}, |
|
{ |
|
"epoch": 1.4017991004497752, |
|
"grad_norm": 1.2763440022815213, |
|
"learning_rate": 2.0496539968918342e-06, |
|
"loss": 0.1217, |
|
"step": 935 |
|
}, |
|
{ |
|
"epoch": 1.4032983508245878, |
|
"grad_norm": 1.2014108208253573, |
|
"learning_rate": 2.0401555408650714e-06, |
|
"loss": 0.1362, |
|
"step": 936 |
|
}, |
|
{ |
|
"epoch": 1.4047976011994003, |
|
"grad_norm": 1.483112482396683, |
|
"learning_rate": 2.030673500438828e-06, |
|
"loss": 0.1488, |
|
"step": 937 |
|
}, |
|
{ |
|
"epoch": 1.406296851574213, |
|
"grad_norm": 1.1754077343561806, |
|
"learning_rate": 2.0212079282014725e-06, |
|
"loss": 0.1, |
|
"step": 938 |
|
}, |
|
{ |
|
"epoch": 1.4077961019490255, |
|
"grad_norm": 1.2495489419422312, |
|
"learning_rate": 2.0117588766500375e-06, |
|
"loss": 0.1476, |
|
"step": 939 |
|
}, |
|
{ |
|
"epoch": 1.409295352323838, |
|
"grad_norm": 1.4585345367189024, |
|
"learning_rate": 2.002326398189931e-06, |
|
"loss": 0.173, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.4107946026986506, |
|
"grad_norm": 1.404097214903437, |
|
"learning_rate": 1.9929105451346436e-06, |
|
"loss": 0.1197, |
|
"step": 941 |
|
}, |
|
{ |
|
"epoch": 1.4122938530734632, |
|
"grad_norm": 1.4538612436978717, |
|
"learning_rate": 1.983511369705462e-06, |
|
"loss": 0.1472, |
|
"step": 942 |
|
}, |
|
{ |
|
"epoch": 1.4137931034482758, |
|
"grad_norm": 1.3822857891996712, |
|
"learning_rate": 1.9741289240311757e-06, |
|
"loss": 0.1259, |
|
"step": 943 |
|
}, |
|
{ |
|
"epoch": 1.4152923538230884, |
|
"grad_norm": 1.274351641557688, |
|
"learning_rate": 1.9647632601477877e-06, |
|
"loss": 0.1524, |
|
"step": 944 |
|
}, |
|
{ |
|
"epoch": 1.416791604197901, |
|
"grad_norm": 1.3817398089545339, |
|
"learning_rate": 1.9554144299982314e-06, |
|
"loss": 0.1502, |
|
"step": 945 |
|
}, |
|
{ |
|
"epoch": 1.4182908545727138, |
|
"grad_norm": 1.301939773768075, |
|
"learning_rate": 1.9460824854320755e-06, |
|
"loss": 0.1301, |
|
"step": 946 |
|
}, |
|
{ |
|
"epoch": 1.419790104947526, |
|
"grad_norm": 1.341452577174406, |
|
"learning_rate": 1.9367674782052376e-06, |
|
"loss": 0.1629, |
|
"step": 947 |
|
}, |
|
{ |
|
"epoch": 1.421289355322339, |
|
"grad_norm": 1.2897360641073317, |
|
"learning_rate": 1.9274694599797067e-06, |
|
"loss": 0.1464, |
|
"step": 948 |
|
}, |
|
{ |
|
"epoch": 1.4227886056971515, |
|
"grad_norm": 1.217713411594433, |
|
"learning_rate": 1.918188482323242e-06, |
|
"loss": 0.1657, |
|
"step": 949 |
|
}, |
|
{ |
|
"epoch": 1.424287856071964, |
|
"grad_norm": 1.265335158791293, |
|
"learning_rate": 1.9089245967090952e-06, |
|
"loss": 0.1304, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.4257871064467766, |
|
"grad_norm": 1.2800428877170134, |
|
"learning_rate": 1.8996778545157263e-06, |
|
"loss": 0.1234, |
|
"step": 951 |
|
}, |
|
{ |
|
"epoch": 1.4272863568215892, |
|
"grad_norm": 1.2608530620594376, |
|
"learning_rate": 1.8904483070265133e-06, |
|
"loss": 0.1486, |
|
"step": 952 |
|
}, |
|
{ |
|
"epoch": 1.4287856071964018, |
|
"grad_norm": 1.4554065467824044, |
|
"learning_rate": 1.8812360054294725e-06, |
|
"loss": 0.1381, |
|
"step": 953 |
|
}, |
|
{ |
|
"epoch": 1.4302848575712144, |
|
"grad_norm": 1.3961840817217093, |
|
"learning_rate": 1.8720410008169727e-06, |
|
"loss": 0.1677, |
|
"step": 954 |
|
}, |
|
{ |
|
"epoch": 1.431784107946027, |
|
"grad_norm": 1.4460236341555863, |
|
"learning_rate": 1.8628633441854515e-06, |
|
"loss": 0.239, |
|
"step": 955 |
|
}, |
|
{ |
|
"epoch": 1.4332833583208395, |
|
"grad_norm": 1.4796942546972116, |
|
"learning_rate": 1.8537030864351303e-06, |
|
"loss": 0.1566, |
|
"step": 956 |
|
}, |
|
{ |
|
"epoch": 1.434782608695652, |
|
"grad_norm": 1.1462880365324495, |
|
"learning_rate": 1.8445602783697375e-06, |
|
"loss": 0.1321, |
|
"step": 957 |
|
}, |
|
{ |
|
"epoch": 1.4362818590704647, |
|
"grad_norm": 1.585283738421672, |
|
"learning_rate": 1.8354349706962243e-06, |
|
"loss": 0.1272, |
|
"step": 958 |
|
}, |
|
{ |
|
"epoch": 1.4377811094452775, |
|
"grad_norm": 1.0494214298183657, |
|
"learning_rate": 1.8263272140244803e-06, |
|
"loss": 0.1376, |
|
"step": 959 |
|
}, |
|
{ |
|
"epoch": 1.4392803598200898, |
|
"grad_norm": 1.2543649534654184, |
|
"learning_rate": 1.8172370588670563e-06, |
|
"loss": 0.1329, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.4407796101949026, |
|
"grad_norm": 1.4256311901440206, |
|
"learning_rate": 1.8081645556388866e-06, |
|
"loss": 0.1269, |
|
"step": 961 |
|
}, |
|
{ |
|
"epoch": 1.4422788605697152, |
|
"grad_norm": 1.2745005154484748, |
|
"learning_rate": 1.7991097546570018e-06, |
|
"loss": 0.1372, |
|
"step": 962 |
|
}, |
|
{ |
|
"epoch": 1.4437781109445278, |
|
"grad_norm": 1.3767502833369671, |
|
"learning_rate": 1.7900727061402556e-06, |
|
"loss": 0.1623, |
|
"step": 963 |
|
}, |
|
{ |
|
"epoch": 1.4452773613193404, |
|
"grad_norm": 1.473223666102311, |
|
"learning_rate": 1.7810534602090445e-06, |
|
"loss": 0.1244, |
|
"step": 964 |
|
}, |
|
{ |
|
"epoch": 1.446776611694153, |
|
"grad_norm": 1.5980102374273573, |
|
"learning_rate": 1.77205206688503e-06, |
|
"loss": 0.1418, |
|
"step": 965 |
|
}, |
|
{ |
|
"epoch": 1.4482758620689655, |
|
"grad_norm": 1.436451939783491, |
|
"learning_rate": 1.7630685760908623e-06, |
|
"loss": 0.1617, |
|
"step": 966 |
|
}, |
|
{ |
|
"epoch": 1.449775112443778, |
|
"grad_norm": 1.1622192875909254, |
|
"learning_rate": 1.7541030376499002e-06, |
|
"loss": 0.1203, |
|
"step": 967 |
|
}, |
|
{ |
|
"epoch": 1.4512743628185907, |
|
"grad_norm": 1.242253196158236, |
|
"learning_rate": 1.745155501285939e-06, |
|
"loss": 0.1481, |
|
"step": 968 |
|
}, |
|
{ |
|
"epoch": 1.4527736131934033, |
|
"grad_norm": 0.9974964391775575, |
|
"learning_rate": 1.736226016622931e-06, |
|
"loss": 0.1195, |
|
"step": 969 |
|
}, |
|
{ |
|
"epoch": 1.4542728635682158, |
|
"grad_norm": 1.5144900300867414, |
|
"learning_rate": 1.727314633184714e-06, |
|
"loss": 0.1838, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.4557721139430284, |
|
"grad_norm": 1.5131114078926966, |
|
"learning_rate": 1.718421400394732e-06, |
|
"loss": 0.1545, |
|
"step": 971 |
|
}, |
|
{ |
|
"epoch": 1.4572713643178412, |
|
"grad_norm": 1.379064276190616, |
|
"learning_rate": 1.7095463675757656e-06, |
|
"loss": 0.1468, |
|
"step": 972 |
|
}, |
|
{ |
|
"epoch": 1.4587706146926536, |
|
"grad_norm": 1.2722713450492789, |
|
"learning_rate": 1.7006895839496557e-06, |
|
"loss": 0.112, |
|
"step": 973 |
|
}, |
|
{ |
|
"epoch": 1.4602698650674664, |
|
"grad_norm": 1.3462576427240196, |
|
"learning_rate": 1.6918510986370312e-06, |
|
"loss": 0.1277, |
|
"step": 974 |
|
}, |
|
{ |
|
"epoch": 1.461769115442279, |
|
"grad_norm": 1.2903840705144083, |
|
"learning_rate": 1.6830309606570372e-06, |
|
"loss": 0.1272, |
|
"step": 975 |
|
}, |
|
{ |
|
"epoch": 1.4632683658170915, |
|
"grad_norm": 1.1343447342988313, |
|
"learning_rate": 1.674229218927062e-06, |
|
"loss": 0.0901, |
|
"step": 976 |
|
}, |
|
{ |
|
"epoch": 1.464767616191904, |
|
"grad_norm": 1.311434924925221, |
|
"learning_rate": 1.665445922262467e-06, |
|
"loss": 0.1323, |
|
"step": 977 |
|
}, |
|
{ |
|
"epoch": 1.4662668665667167, |
|
"grad_norm": 1.526283168070267, |
|
"learning_rate": 1.6566811193763149e-06, |
|
"loss": 0.1317, |
|
"step": 978 |
|
}, |
|
{ |
|
"epoch": 1.4677661169415293, |
|
"grad_norm": 1.189720713308383, |
|
"learning_rate": 1.6479348588791e-06, |
|
"loss": 0.1391, |
|
"step": 979 |
|
}, |
|
{ |
|
"epoch": 1.4692653673163418, |
|
"grad_norm": 1.3157965259486535, |
|
"learning_rate": 1.6392071892784789e-06, |
|
"loss": 0.1479, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.4707646176911544, |
|
"grad_norm": 1.0767318615446795, |
|
"learning_rate": 1.6304981589790015e-06, |
|
"loss": 0.1084, |
|
"step": 981 |
|
}, |
|
{ |
|
"epoch": 1.472263868065967, |
|
"grad_norm": 1.3638725536330354, |
|
"learning_rate": 1.6218078162818418e-06, |
|
"loss": 0.1587, |
|
"step": 982 |
|
}, |
|
{ |
|
"epoch": 1.4737631184407796, |
|
"grad_norm": 1.1551897396850164, |
|
"learning_rate": 1.6131362093845299e-06, |
|
"loss": 0.1418, |
|
"step": 983 |
|
}, |
|
{ |
|
"epoch": 1.4752623688155921, |
|
"grad_norm": 1.35859763625055, |
|
"learning_rate": 1.6044833863806864e-06, |
|
"loss": 0.1794, |
|
"step": 984 |
|
}, |
|
{ |
|
"epoch": 1.4767616191904047, |
|
"grad_norm": 1.0886322531605184, |
|
"learning_rate": 1.5958493952597536e-06, |
|
"loss": 0.1328, |
|
"step": 985 |
|
}, |
|
{ |
|
"epoch": 1.4782608695652173, |
|
"grad_norm": 1.1042469830014725, |
|
"learning_rate": 1.5872342839067305e-06, |
|
"loss": 0.1304, |
|
"step": 986 |
|
}, |
|
{ |
|
"epoch": 1.47976011994003, |
|
"grad_norm": 1.4521417610192422, |
|
"learning_rate": 1.5786381001019052e-06, |
|
"loss": 0.1316, |
|
"step": 987 |
|
}, |
|
{ |
|
"epoch": 1.4812593703148424, |
|
"grad_norm": 1.302024189367588, |
|
"learning_rate": 1.5700608915205978e-06, |
|
"loss": 0.1179, |
|
"step": 988 |
|
}, |
|
{ |
|
"epoch": 1.4827586206896552, |
|
"grad_norm": 1.2796658409150223, |
|
"learning_rate": 1.561502705732883e-06, |
|
"loss": 0.1312, |
|
"step": 989 |
|
}, |
|
{ |
|
"epoch": 1.4842578710644678, |
|
"grad_norm": 1.0892145988605106, |
|
"learning_rate": 1.5529635902033358e-06, |
|
"loss": 0.1061, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.4857571214392804, |
|
"grad_norm": 1.6080688773455982, |
|
"learning_rate": 1.5444435922907669e-06, |
|
"loss": 0.1238, |
|
"step": 991 |
|
}, |
|
{ |
|
"epoch": 1.487256371814093, |
|
"grad_norm": 1.2356347617084416, |
|
"learning_rate": 1.5359427592479553e-06, |
|
"loss": 0.1576, |
|
"step": 992 |
|
}, |
|
{ |
|
"epoch": 1.4887556221889056, |
|
"grad_norm": 1.4590865482376132, |
|
"learning_rate": 1.5274611382213922e-06, |
|
"loss": 0.1399, |
|
"step": 993 |
|
}, |
|
{ |
|
"epoch": 1.4902548725637181, |
|
"grad_norm": 1.473630114777543, |
|
"learning_rate": 1.5189987762510167e-06, |
|
"loss": 0.1204, |
|
"step": 994 |
|
}, |
|
{ |
|
"epoch": 1.4917541229385307, |
|
"grad_norm": 1.118155590669407, |
|
"learning_rate": 1.510555720269955e-06, |
|
"loss": 0.0941, |
|
"step": 995 |
|
}, |
|
{ |
|
"epoch": 1.4932533733133433, |
|
"grad_norm": 1.3062420399627814, |
|
"learning_rate": 1.5021320171042608e-06, |
|
"loss": 0.1606, |
|
"step": 996 |
|
}, |
|
{ |
|
"epoch": 1.4947526236881559, |
|
"grad_norm": 1.4905042557752983, |
|
"learning_rate": 1.4937277134726542e-06, |
|
"loss": 0.1319, |
|
"step": 997 |
|
}, |
|
{ |
|
"epoch": 1.4962518740629684, |
|
"grad_norm": 1.1349672110630278, |
|
"learning_rate": 1.4853428559862637e-06, |
|
"loss": 0.1272, |
|
"step": 998 |
|
}, |
|
{ |
|
"epoch": 1.497751124437781, |
|
"grad_norm": 1.3077408881003898, |
|
"learning_rate": 1.4769774911483686e-06, |
|
"loss": 0.1553, |
|
"step": 999 |
|
}, |
|
{ |
|
"epoch": 1.4992503748125938, |
|
"grad_norm": 1.3585683981092074, |
|
"learning_rate": 1.4686316653541377e-06, |
|
"loss": 0.1367, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.4992503748125938, |
|
"eval_loss": 0.3032159209251404, |
|
"eval_runtime": 9.5349, |
|
"eval_samples_per_second": 5.663, |
|
"eval_steps_per_second": 1.468, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.5007496251874062, |
|
"grad_norm": 1.2661008032372718, |
|
"learning_rate": 1.4603054248903752e-06, |
|
"loss": 0.1626, |
|
"step": 1001 |
|
}, |
|
{ |
|
"epoch": 1.502248875562219, |
|
"grad_norm": 1.2527278060860814, |
|
"learning_rate": 1.4519988159352665e-06, |
|
"loss": 0.1285, |
|
"step": 1002 |
|
}, |
|
{ |
|
"epoch": 1.5037481259370313, |
|
"grad_norm": 1.421893532069344, |
|
"learning_rate": 1.4437118845581138e-06, |
|
"loss": 0.1347, |
|
"step": 1003 |
|
}, |
|
{ |
|
"epoch": 1.5052473763118441, |
|
"grad_norm": 1.2658713398673795, |
|
"learning_rate": 1.4354446767190873e-06, |
|
"loss": 0.1332, |
|
"step": 1004 |
|
}, |
|
{ |
|
"epoch": 1.5067466266866567, |
|
"grad_norm": 1.4000848731838256, |
|
"learning_rate": 1.4271972382689685e-06, |
|
"loss": 0.1238, |
|
"step": 1005 |
|
}, |
|
{ |
|
"epoch": 1.5082458770614693, |
|
"grad_norm": 1.2285414640269432, |
|
"learning_rate": 1.4189696149488956e-06, |
|
"loss": 0.1363, |
|
"step": 1006 |
|
}, |
|
{ |
|
"epoch": 1.5097451274362819, |
|
"grad_norm": 1.2773344151550985, |
|
"learning_rate": 1.4107618523901101e-06, |
|
"loss": 0.106, |
|
"step": 1007 |
|
}, |
|
{ |
|
"epoch": 1.5112443778110944, |
|
"grad_norm": 1.2601199100453082, |
|
"learning_rate": 1.4025739961137043e-06, |
|
"loss": 0.1426, |
|
"step": 1008 |
|
}, |
|
{ |
|
"epoch": 1.512743628185907, |
|
"grad_norm": 1.3727548879157994, |
|
"learning_rate": 1.394406091530367e-06, |
|
"loss": 0.1121, |
|
"step": 1009 |
|
}, |
|
{ |
|
"epoch": 1.5142428785607196, |
|
"grad_norm": 1.608607894401084, |
|
"learning_rate": 1.3862581839401346e-06, |
|
"loss": 0.1828, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.5157421289355324, |
|
"grad_norm": 1.124663845902527, |
|
"learning_rate": 1.3781303185321377e-06, |
|
"loss": 0.0926, |
|
"step": 1011 |
|
}, |
|
{ |
|
"epoch": 1.5172413793103448, |
|
"grad_norm": 1.4143041849227191, |
|
"learning_rate": 1.370022540384347e-06, |
|
"loss": 0.1198, |
|
"step": 1012 |
|
}, |
|
{ |
|
"epoch": 1.5187406296851576, |
|
"grad_norm": 1.465343721555682, |
|
"learning_rate": 1.3619348944633331e-06, |
|
"loss": 0.13, |
|
"step": 1013 |
|
}, |
|
{ |
|
"epoch": 1.52023988005997, |
|
"grad_norm": 1.2447895534989184, |
|
"learning_rate": 1.3538674256240087e-06, |
|
"loss": 0.1598, |
|
"step": 1014 |
|
}, |
|
{ |
|
"epoch": 1.5217391304347827, |
|
"grad_norm": 1.4549885776820828, |
|
"learning_rate": 1.3458201786093795e-06, |
|
"loss": 0.1311, |
|
"step": 1015 |
|
}, |
|
{ |
|
"epoch": 1.523238380809595, |
|
"grad_norm": 1.6439292468977473, |
|
"learning_rate": 1.3377931980503055e-06, |
|
"loss": 0.2007, |
|
"step": 1016 |
|
}, |
|
{ |
|
"epoch": 1.5247376311844079, |
|
"grad_norm": 0.962127176393376, |
|
"learning_rate": 1.3297865284652417e-06, |
|
"loss": 0.1067, |
|
"step": 1017 |
|
}, |
|
{ |
|
"epoch": 1.5262368815592204, |
|
"grad_norm": 1.3137271098928553, |
|
"learning_rate": 1.3218002142599973e-06, |
|
"loss": 0.1387, |
|
"step": 1018 |
|
}, |
|
{ |
|
"epoch": 1.527736131934033, |
|
"grad_norm": 1.251568110368441, |
|
"learning_rate": 1.3138342997274883e-06, |
|
"loss": 0.1381, |
|
"step": 1019 |
|
}, |
|
{ |
|
"epoch": 1.5292353823088456, |
|
"grad_norm": 1.133675448250836, |
|
"learning_rate": 1.3058888290474937e-06, |
|
"loss": 0.1027, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.5307346326836582, |
|
"grad_norm": 1.427729760250351, |
|
"learning_rate": 1.2979638462864069e-06, |
|
"loss": 0.1216, |
|
"step": 1021 |
|
}, |
|
{ |
|
"epoch": 1.5322338830584707, |
|
"grad_norm": 1.4613681322730085, |
|
"learning_rate": 1.2900593953969947e-06, |
|
"loss": 0.1243, |
|
"step": 1022 |
|
}, |
|
{ |
|
"epoch": 1.5337331334332833, |
|
"grad_norm": 1.3020393066845384, |
|
"learning_rate": 1.2821755202181503e-06, |
|
"loss": 0.1196, |
|
"step": 1023 |
|
}, |
|
{ |
|
"epoch": 1.535232383808096, |
|
"grad_norm": 1.4126438396486742, |
|
"learning_rate": 1.2743122644746536e-06, |
|
"loss": 0.1717, |
|
"step": 1024 |
|
}, |
|
{ |
|
"epoch": 1.5367316341829085, |
|
"grad_norm": 1.3487856526588313, |
|
"learning_rate": 1.266469671776926e-06, |
|
"loss": 0.1239, |
|
"step": 1025 |
|
}, |
|
{ |
|
"epoch": 1.5382308845577213, |
|
"grad_norm": 1.6582962090055022, |
|
"learning_rate": 1.2586477856207902e-06, |
|
"loss": 0.1776, |
|
"step": 1026 |
|
}, |
|
{ |
|
"epoch": 1.5397301349325336, |
|
"grad_norm": 1.3304439720565826, |
|
"learning_rate": 1.2508466493872273e-06, |
|
"loss": 0.1339, |
|
"step": 1027 |
|
}, |
|
{ |
|
"epoch": 1.5412293853073464, |
|
"grad_norm": 1.3715694584706903, |
|
"learning_rate": 1.2430663063421388e-06, |
|
"loss": 0.1274, |
|
"step": 1028 |
|
}, |
|
{ |
|
"epoch": 1.5427286356821588, |
|
"grad_norm": 1.352084074067662, |
|
"learning_rate": 1.2353067996361034e-06, |
|
"loss": 0.1363, |
|
"step": 1029 |
|
}, |
|
{ |
|
"epoch": 1.5442278860569716, |
|
"grad_norm": 1.3830141716551954, |
|
"learning_rate": 1.2275681723041406e-06, |
|
"loss": 0.1137, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.545727136431784, |
|
"grad_norm": 1.2865850156180865, |
|
"learning_rate": 1.2198504672654694e-06, |
|
"loss": 0.1134, |
|
"step": 1031 |
|
}, |
|
{ |
|
"epoch": 1.5472263868065967, |
|
"grad_norm": 1.0607156398556805, |
|
"learning_rate": 1.212153727323273e-06, |
|
"loss": 0.1133, |
|
"step": 1032 |
|
}, |
|
{ |
|
"epoch": 1.5487256371814093, |
|
"grad_norm": 1.1884021672187197, |
|
"learning_rate": 1.2044779951644586e-06, |
|
"loss": 0.1321, |
|
"step": 1033 |
|
}, |
|
{ |
|
"epoch": 1.550224887556222, |
|
"grad_norm": 1.5194385658530858, |
|
"learning_rate": 1.1968233133594243e-06, |
|
"loss": 0.1243, |
|
"step": 1034 |
|
}, |
|
{ |
|
"epoch": 1.5517241379310345, |
|
"grad_norm": 1.1891807656263866, |
|
"learning_rate": 1.1891897243618184e-06, |
|
"loss": 0.122, |
|
"step": 1035 |
|
}, |
|
{ |
|
"epoch": 1.553223388305847, |
|
"grad_norm": 1.4329929737293103, |
|
"learning_rate": 1.1815772705083072e-06, |
|
"loss": 0.116, |
|
"step": 1036 |
|
}, |
|
{ |
|
"epoch": 1.5547226386806596, |
|
"grad_norm": 1.3786502166630517, |
|
"learning_rate": 1.17398599401834e-06, |
|
"loss": 0.1462, |
|
"step": 1037 |
|
}, |
|
{ |
|
"epoch": 1.5562218890554722, |
|
"grad_norm": 1.2776143515250926, |
|
"learning_rate": 1.1664159369939137e-06, |
|
"loss": 0.1343, |
|
"step": 1038 |
|
}, |
|
{ |
|
"epoch": 1.557721139430285, |
|
"grad_norm": 1.4095443161123418, |
|
"learning_rate": 1.1588671414193397e-06, |
|
"loss": 0.14, |
|
"step": 1039 |
|
}, |
|
{ |
|
"epoch": 1.5592203898050974, |
|
"grad_norm": 1.4162615793126077, |
|
"learning_rate": 1.1513396491610113e-06, |
|
"loss": 0.1125, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.5607196401799102, |
|
"grad_norm": 1.2350782947306302, |
|
"learning_rate": 1.1438335019671715e-06, |
|
"loss": 0.0998, |
|
"step": 1041 |
|
}, |
|
{ |
|
"epoch": 1.5622188905547225, |
|
"grad_norm": 1.4531486845257424, |
|
"learning_rate": 1.1363487414676805e-06, |
|
"loss": 0.13, |
|
"step": 1042 |
|
}, |
|
{ |
|
"epoch": 1.5637181409295353, |
|
"grad_norm": 1.4173618779997708, |
|
"learning_rate": 1.128885409173789e-06, |
|
"loss": 0.1394, |
|
"step": 1043 |
|
}, |
|
{ |
|
"epoch": 1.5652173913043477, |
|
"grad_norm": 1.2755844414820892, |
|
"learning_rate": 1.1214435464779006e-06, |
|
"loss": 0.1396, |
|
"step": 1044 |
|
}, |
|
{ |
|
"epoch": 1.5667166416791605, |
|
"grad_norm": 1.3708803915954113, |
|
"learning_rate": 1.1140231946533486e-06, |
|
"loss": 0.1215, |
|
"step": 1045 |
|
}, |
|
{ |
|
"epoch": 1.568215892053973, |
|
"grad_norm": 1.381903217425136, |
|
"learning_rate": 1.1066243948541638e-06, |
|
"loss": 0.132, |
|
"step": 1046 |
|
}, |
|
{ |
|
"epoch": 1.5697151424287856, |
|
"grad_norm": 1.3791377921791983, |
|
"learning_rate": 1.0992471881148497e-06, |
|
"loss": 0.1597, |
|
"step": 1047 |
|
}, |
|
{ |
|
"epoch": 1.5712143928035982, |
|
"grad_norm": 1.4041721910528315, |
|
"learning_rate": 1.091891615350147e-06, |
|
"loss": 0.1397, |
|
"step": 1048 |
|
}, |
|
{ |
|
"epoch": 1.5727136431784108, |
|
"grad_norm": 1.3682651227412774, |
|
"learning_rate": 1.0845577173548172e-06, |
|
"loss": 0.136, |
|
"step": 1049 |
|
}, |
|
{ |
|
"epoch": 1.5742128935532234, |
|
"grad_norm": 1.2250049124228923, |
|
"learning_rate": 1.07724553480341e-06, |
|
"loss": 0.121, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.575712143928036, |
|
"grad_norm": 1.3605511160908002, |
|
"learning_rate": 1.0699551082500387e-06, |
|
"loss": 0.104, |
|
"step": 1051 |
|
}, |
|
{ |
|
"epoch": 1.5772113943028487, |
|
"grad_norm": 1.481884448847055, |
|
"learning_rate": 1.0626864781281553e-06, |
|
"loss": 0.1182, |
|
"step": 1052 |
|
}, |
|
{ |
|
"epoch": 1.578710644677661, |
|
"grad_norm": 1.3519687795217226, |
|
"learning_rate": 1.0554396847503272e-06, |
|
"loss": 0.1148, |
|
"step": 1053 |
|
}, |
|
{ |
|
"epoch": 1.580209895052474, |
|
"grad_norm": 1.3696262077208985, |
|
"learning_rate": 1.0482147683080125e-06, |
|
"loss": 0.1666, |
|
"step": 1054 |
|
}, |
|
{ |
|
"epoch": 1.5817091454272862, |
|
"grad_norm": 1.205147099427604, |
|
"learning_rate": 1.0410117688713366e-06, |
|
"loss": 0.1447, |
|
"step": 1055 |
|
}, |
|
{ |
|
"epoch": 1.583208395802099, |
|
"grad_norm": 1.4584035383495035, |
|
"learning_rate": 1.0338307263888748e-06, |
|
"loss": 0.126, |
|
"step": 1056 |
|
}, |
|
{ |
|
"epoch": 1.5847076461769114, |
|
"grad_norm": 1.2806511365390885, |
|
"learning_rate": 1.0266716806874227e-06, |
|
"loss": 0.1706, |
|
"step": 1057 |
|
}, |
|
{ |
|
"epoch": 1.5862068965517242, |
|
"grad_norm": 1.2292034261665328, |
|
"learning_rate": 1.0195346714717813e-06, |
|
"loss": 0.1351, |
|
"step": 1058 |
|
}, |
|
{ |
|
"epoch": 1.5877061469265368, |
|
"grad_norm": 1.2746750602503882, |
|
"learning_rate": 1.0124197383245344e-06, |
|
"loss": 0.1262, |
|
"step": 1059 |
|
}, |
|
{ |
|
"epoch": 1.5892053973013494, |
|
"grad_norm": 1.1466214924152243, |
|
"learning_rate": 1.0053269207058298e-06, |
|
"loss": 0.1016, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.590704647676162, |
|
"grad_norm": 1.2990048736912905, |
|
"learning_rate": 9.982562579531607e-07, |
|
"loss": 0.1346, |
|
"step": 1061 |
|
}, |
|
{ |
|
"epoch": 1.5922038980509745, |
|
"grad_norm": 1.2500168676615038, |
|
"learning_rate": 9.912077892811473e-07, |
|
"loss": 0.086, |
|
"step": 1062 |
|
}, |
|
{ |
|
"epoch": 1.593703148425787, |
|
"grad_norm": 1.3221455765209738, |
|
"learning_rate": 9.841815537813177e-07, |
|
"loss": 0.1057, |
|
"step": 1063 |
|
}, |
|
{ |
|
"epoch": 1.5952023988005997, |
|
"grad_norm": 1.6581000106094772, |
|
"learning_rate": 9.77177590421895e-07, |
|
"loss": 0.1193, |
|
"step": 1064 |
|
}, |
|
{ |
|
"epoch": 1.5967016491754122, |
|
"grad_norm": 1.5238839036341836, |
|
"learning_rate": 9.70195938047576e-07, |
|
"loss": 0.181, |
|
"step": 1065 |
|
}, |
|
{ |
|
"epoch": 1.5982008995502248, |
|
"grad_norm": 1.369641755583762, |
|
"learning_rate": 9.63236635379321e-07, |
|
"loss": 0.1193, |
|
"step": 1066 |
|
}, |
|
{ |
|
"epoch": 1.5997001499250376, |
|
"grad_norm": 1.5315504387546468, |
|
"learning_rate": 9.562997210141355e-07, |
|
"loss": 0.1229, |
|
"step": 1067 |
|
}, |
|
{ |
|
"epoch": 1.60119940029985, |
|
"grad_norm": 1.395503826008592, |
|
"learning_rate": 9.49385233424856e-07, |
|
"loss": 0.1275, |
|
"step": 1068 |
|
}, |
|
{ |
|
"epoch": 1.6026986506746628, |
|
"grad_norm": 1.2596162129099466, |
|
"learning_rate": 9.424932109599372e-07, |
|
"loss": 0.1301, |
|
"step": 1069 |
|
}, |
|
{ |
|
"epoch": 1.6041979010494751, |
|
"grad_norm": 1.3401982239935306, |
|
"learning_rate": 9.356236918432454e-07, |
|
"loss": 0.1389, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.605697151424288, |
|
"grad_norm": 1.3134889012190543, |
|
"learning_rate": 9.287767141738352e-07, |
|
"loss": 0.1498, |
|
"step": 1071 |
|
}, |
|
{ |
|
"epoch": 1.6071964017991005, |
|
"grad_norm": 1.283193424293543, |
|
"learning_rate": 9.21952315925746e-07, |
|
"loss": 0.1265, |
|
"step": 1072 |
|
}, |
|
{ |
|
"epoch": 1.608695652173913, |
|
"grad_norm": 1.465380366009575, |
|
"learning_rate": 9.151505349477901e-07, |
|
"loss": 0.1305, |
|
"step": 1073 |
|
}, |
|
{ |
|
"epoch": 1.6101949025487257, |
|
"grad_norm": 1.1719550376703987, |
|
"learning_rate": 9.08371408963341e-07, |
|
"loss": 0.1288, |
|
"step": 1074 |
|
}, |
|
{ |
|
"epoch": 1.6116941529235382, |
|
"grad_norm": 1.3625101567419093, |
|
"learning_rate": 9.016149755701259e-07, |
|
"loss": 0.1471, |
|
"step": 1075 |
|
}, |
|
{ |
|
"epoch": 1.6131934032983508, |
|
"grad_norm": 1.2156667519976243, |
|
"learning_rate": 8.948812722400157e-07, |
|
"loss": 0.1249, |
|
"step": 1076 |
|
}, |
|
{ |
|
"epoch": 1.6146926536731634, |
|
"grad_norm": 1.278558660591185, |
|
"learning_rate": 8.881703363188199e-07, |
|
"loss": 0.1266, |
|
"step": 1077 |
|
}, |
|
{ |
|
"epoch": 1.616191904047976, |
|
"grad_norm": 1.0173628486181656, |
|
"learning_rate": 8.814822050260758e-07, |
|
"loss": 0.1079, |
|
"step": 1078 |
|
}, |
|
{ |
|
"epoch": 1.6176911544227885, |
|
"grad_norm": 1.101628767399371, |
|
"learning_rate": 8.748169154548448e-07, |
|
"loss": 0.0958, |
|
"step": 1079 |
|
}, |
|
{ |
|
"epoch": 1.6191904047976013, |
|
"grad_norm": 1.182084523674803, |
|
"learning_rate": 8.681745045715045e-07, |
|
"loss": 0.1461, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.6206896551724137, |
|
"grad_norm": 1.0002020828857712, |
|
"learning_rate": 8.615550092155478e-07, |
|
"loss": 0.1115, |
|
"step": 1081 |
|
}, |
|
{ |
|
"epoch": 1.6221889055472265, |
|
"grad_norm": 1.4678927852677748, |
|
"learning_rate": 8.549584660993726e-07, |
|
"loss": 0.1203, |
|
"step": 1082 |
|
}, |
|
{ |
|
"epoch": 1.6236881559220389, |
|
"grad_norm": 1.6501694564018676, |
|
"learning_rate": 8.483849118080828e-07, |
|
"loss": 0.1274, |
|
"step": 1083 |
|
}, |
|
{ |
|
"epoch": 1.6251874062968517, |
|
"grad_norm": 1.267755170361984, |
|
"learning_rate": 8.418343827992842e-07, |
|
"loss": 0.1276, |
|
"step": 1084 |
|
}, |
|
{ |
|
"epoch": 1.626686656671664, |
|
"grad_norm": 1.272826311386756, |
|
"learning_rate": 8.353069154028814e-07, |
|
"loss": 0.115, |
|
"step": 1085 |
|
}, |
|
{ |
|
"epoch": 1.6281859070464768, |
|
"grad_norm": 1.4121584145458737, |
|
"learning_rate": 8.28802545820877e-07, |
|
"loss": 0.1449, |
|
"step": 1086 |
|
}, |
|
{ |
|
"epoch": 1.6296851574212894, |
|
"grad_norm": 1.7533318445061865, |
|
"learning_rate": 8.223213101271709e-07, |
|
"loss": 0.1563, |
|
"step": 1087 |
|
}, |
|
{ |
|
"epoch": 1.631184407796102, |
|
"grad_norm": 1.3578821404795234, |
|
"learning_rate": 8.158632442673603e-07, |
|
"loss": 0.1418, |
|
"step": 1088 |
|
}, |
|
{ |
|
"epoch": 1.6326836581709145, |
|
"grad_norm": 1.3372173188024898, |
|
"learning_rate": 8.094283840585398e-07, |
|
"loss": 0.16, |
|
"step": 1089 |
|
}, |
|
{ |
|
"epoch": 1.6341829085457271, |
|
"grad_norm": 1.076968453210384, |
|
"learning_rate": 8.03016765189103e-07, |
|
"loss": 0.1023, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.6356821589205397, |
|
"grad_norm": 1.2173029006222529, |
|
"learning_rate": 7.966284232185451e-07, |
|
"loss": 0.1052, |
|
"step": 1091 |
|
}, |
|
{ |
|
"epoch": 1.6371814092953523, |
|
"grad_norm": 1.4378038164667921, |
|
"learning_rate": 7.902633935772647e-07, |
|
"loss": 0.1468, |
|
"step": 1092 |
|
}, |
|
{ |
|
"epoch": 1.638680659670165, |
|
"grad_norm": 1.513512766949661, |
|
"learning_rate": 7.839217115663683e-07, |
|
"loss": 0.1228, |
|
"step": 1093 |
|
}, |
|
{ |
|
"epoch": 1.6401799100449774, |
|
"grad_norm": 1.6017858779664627, |
|
"learning_rate": 7.776034123574738e-07, |
|
"loss": 0.1795, |
|
"step": 1094 |
|
}, |
|
{ |
|
"epoch": 1.6416791604197902, |
|
"grad_norm": 1.353882475612432, |
|
"learning_rate": 7.713085309925156e-07, |
|
"loss": 0.1381, |
|
"step": 1095 |
|
}, |
|
{ |
|
"epoch": 1.6431784107946026, |
|
"grad_norm": 1.2860764537774483, |
|
"learning_rate": 7.650371023835495e-07, |
|
"loss": 0.1109, |
|
"step": 1096 |
|
}, |
|
{ |
|
"epoch": 1.6446776611694154, |
|
"grad_norm": 1.371159100025601, |
|
"learning_rate": 7.587891613125631e-07, |
|
"loss": 0.155, |
|
"step": 1097 |
|
}, |
|
{ |
|
"epoch": 1.6461769115442277, |
|
"grad_norm": 1.3046289248555103, |
|
"learning_rate": 7.525647424312766e-07, |
|
"loss": 0.116, |
|
"step": 1098 |
|
}, |
|
{ |
|
"epoch": 1.6476761619190405, |
|
"grad_norm": 1.4562702726888381, |
|
"learning_rate": 7.46363880260954e-07, |
|
"loss": 0.1475, |
|
"step": 1099 |
|
}, |
|
{ |
|
"epoch": 1.6491754122938531, |
|
"grad_norm": 1.4285731527144614, |
|
"learning_rate": 7.401866091922133e-07, |
|
"loss": 0.1265, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.6506746626686657, |
|
"grad_norm": 1.1683215451669453, |
|
"learning_rate": 7.340329634848309e-07, |
|
"loss": 0.15, |
|
"step": 1101 |
|
}, |
|
{ |
|
"epoch": 1.6521739130434783, |
|
"grad_norm": 1.320313350640812, |
|
"learning_rate": 7.279029772675572e-07, |
|
"loss": 0.1448, |
|
"step": 1102 |
|
}, |
|
{ |
|
"epoch": 1.6536731634182908, |
|
"grad_norm": 1.188034434280782, |
|
"learning_rate": 7.217966845379243e-07, |
|
"loss": 0.1052, |
|
"step": 1103 |
|
}, |
|
{ |
|
"epoch": 1.6551724137931034, |
|
"grad_norm": 1.3829494559931743, |
|
"learning_rate": 7.157141191620548e-07, |
|
"loss": 0.156, |
|
"step": 1104 |
|
}, |
|
{ |
|
"epoch": 1.656671664167916, |
|
"grad_norm": 1.2764316757400505, |
|
"learning_rate": 7.096553148744806e-07, |
|
"loss": 0.1318, |
|
"step": 1105 |
|
}, |
|
{ |
|
"epoch": 1.6581709145427288, |
|
"grad_norm": 1.400209913228982, |
|
"learning_rate": 7.036203052779506e-07, |
|
"loss": 0.1435, |
|
"step": 1106 |
|
}, |
|
{ |
|
"epoch": 1.6596701649175412, |
|
"grad_norm": 1.3513499085792917, |
|
"learning_rate": 6.97609123843247e-07, |
|
"loss": 0.1202, |
|
"step": 1107 |
|
}, |
|
{ |
|
"epoch": 1.661169415292354, |
|
"grad_norm": 1.3267222518322797, |
|
"learning_rate": 6.916218039089961e-07, |
|
"loss": 0.1279, |
|
"step": 1108 |
|
}, |
|
{ |
|
"epoch": 1.6626686656671663, |
|
"grad_norm": 1.4957467373452389, |
|
"learning_rate": 6.856583786814891e-07, |
|
"loss": 0.1496, |
|
"step": 1109 |
|
}, |
|
{ |
|
"epoch": 1.6641679160419791, |
|
"grad_norm": 1.2849640190193239, |
|
"learning_rate": 6.797188812344907e-07, |
|
"loss": 0.1854, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.6656671664167915, |
|
"grad_norm": 1.2863856936453388, |
|
"learning_rate": 6.738033445090653e-07, |
|
"loss": 0.1224, |
|
"step": 1111 |
|
}, |
|
{ |
|
"epoch": 1.6671664167916043, |
|
"grad_norm": 1.116065369198763, |
|
"learning_rate": 6.67911801313384e-07, |
|
"loss": 0.0949, |
|
"step": 1112 |
|
}, |
|
{ |
|
"epoch": 1.6686656671664168, |
|
"grad_norm": 1.2505299798028895, |
|
"learning_rate": 6.620442843225483e-07, |
|
"loss": 0.141, |
|
"step": 1113 |
|
}, |
|
{ |
|
"epoch": 1.6701649175412294, |
|
"grad_norm": 1.3823124383673955, |
|
"learning_rate": 6.562008260784092e-07, |
|
"loss": 0.0966, |
|
"step": 1114 |
|
}, |
|
{ |
|
"epoch": 1.671664167916042, |
|
"grad_norm": 1.4570716508838586, |
|
"learning_rate": 6.503814589893836e-07, |
|
"loss": 0.1669, |
|
"step": 1115 |
|
}, |
|
{ |
|
"epoch": 1.6731634182908546, |
|
"grad_norm": 1.3226759106524308, |
|
"learning_rate": 6.445862153302784e-07, |
|
"loss": 0.1132, |
|
"step": 1116 |
|
}, |
|
{ |
|
"epoch": 1.6746626686656672, |
|
"grad_norm": 1.2470088426875263, |
|
"learning_rate": 6.388151272421078e-07, |
|
"loss": 0.1208, |
|
"step": 1117 |
|
}, |
|
{ |
|
"epoch": 1.6761619190404797, |
|
"grad_norm": 1.125981550475647, |
|
"learning_rate": 6.330682267319177e-07, |
|
"loss": 0.1513, |
|
"step": 1118 |
|
}, |
|
{ |
|
"epoch": 1.6776611694152923, |
|
"grad_norm": 1.3156942222993357, |
|
"learning_rate": 6.273455456726074e-07, |
|
"loss": 0.1332, |
|
"step": 1119 |
|
}, |
|
{ |
|
"epoch": 1.6791604197901049, |
|
"grad_norm": 1.2148234797505764, |
|
"learning_rate": 6.216471158027515e-07, |
|
"loss": 0.1642, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.6806596701649177, |
|
"grad_norm": 1.2628856491538, |
|
"learning_rate": 6.159729687264254e-07, |
|
"loss": 0.1057, |
|
"step": 1121 |
|
}, |
|
{ |
|
"epoch": 1.68215892053973, |
|
"grad_norm": 1.347516602837082, |
|
"learning_rate": 6.103231359130308e-07, |
|
"loss": 0.1365, |
|
"step": 1122 |
|
}, |
|
{ |
|
"epoch": 1.6836581709145428, |
|
"grad_norm": 1.2777682806438089, |
|
"learning_rate": 6.046976486971201e-07, |
|
"loss": 0.133, |
|
"step": 1123 |
|
}, |
|
{ |
|
"epoch": 1.6851574212893552, |
|
"grad_norm": 1.3288086131013543, |
|
"learning_rate": 5.990965382782177e-07, |
|
"loss": 0.0921, |
|
"step": 1124 |
|
}, |
|
{ |
|
"epoch": 1.686656671664168, |
|
"grad_norm": 1.3733724757973855, |
|
"learning_rate": 5.935198357206595e-07, |
|
"loss": 0.1308, |
|
"step": 1125 |
|
}, |
|
{ |
|
"epoch": 1.6881559220389803, |
|
"grad_norm": 1.3339521765297506, |
|
"learning_rate": 5.879675719534078e-07, |
|
"loss": 0.1465, |
|
"step": 1126 |
|
}, |
|
{ |
|
"epoch": 1.6896551724137931, |
|
"grad_norm": 1.292382820504901, |
|
"learning_rate": 5.824397777698859e-07, |
|
"loss": 0.125, |
|
"step": 1127 |
|
}, |
|
{ |
|
"epoch": 1.6911544227886057, |
|
"grad_norm": 1.3236571388254093, |
|
"learning_rate": 5.769364838278063e-07, |
|
"loss": 0.0991, |
|
"step": 1128 |
|
}, |
|
{ |
|
"epoch": 1.6926536731634183, |
|
"grad_norm": 1.5554875035316833, |
|
"learning_rate": 5.714577206490018e-07, |
|
"loss": 0.1242, |
|
"step": 1129 |
|
}, |
|
{ |
|
"epoch": 1.6941529235382309, |
|
"grad_norm": 1.3881173923805235, |
|
"learning_rate": 5.660035186192531e-07, |
|
"loss": 0.1407, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.6956521739130435, |
|
"grad_norm": 1.313045644845594, |
|
"learning_rate": 5.60573907988124e-07, |
|
"loss": 0.127, |
|
"step": 1131 |
|
}, |
|
{ |
|
"epoch": 1.697151424287856, |
|
"grad_norm": 1.2847529308733094, |
|
"learning_rate": 5.551689188687909e-07, |
|
"loss": 0.1258, |
|
"step": 1132 |
|
}, |
|
{ |
|
"epoch": 1.6986506746626686, |
|
"grad_norm": 1.1567921694342085, |
|
"learning_rate": 5.497885812378772e-07, |
|
"loss": 0.1231, |
|
"step": 1133 |
|
}, |
|
{ |
|
"epoch": 1.7001499250374814, |
|
"grad_norm": 1.0569442318571898, |
|
"learning_rate": 5.444329249352859e-07, |
|
"loss": 0.1302, |
|
"step": 1134 |
|
}, |
|
{ |
|
"epoch": 1.7016491754122938, |
|
"grad_norm": 1.4537474571069005, |
|
"learning_rate": 5.391019796640362e-07, |
|
"loss": 0.0996, |
|
"step": 1135 |
|
}, |
|
{ |
|
"epoch": 1.7031484257871066, |
|
"grad_norm": 1.272211223666709, |
|
"learning_rate": 5.337957749900958e-07, |
|
"loss": 0.1307, |
|
"step": 1136 |
|
}, |
|
{ |
|
"epoch": 1.704647676161919, |
|
"grad_norm": 1.5860743854589014, |
|
"learning_rate": 5.285143403422188e-07, |
|
"loss": 0.176, |
|
"step": 1137 |
|
}, |
|
{ |
|
"epoch": 1.7061469265367317, |
|
"grad_norm": 1.2536957174595447, |
|
"learning_rate": 5.23257705011786e-07, |
|
"loss": 0.1407, |
|
"step": 1138 |
|
}, |
|
{ |
|
"epoch": 1.707646176911544, |
|
"grad_norm": 1.3218941411911886, |
|
"learning_rate": 5.18025898152631e-07, |
|
"loss": 0.1323, |
|
"step": 1139 |
|
}, |
|
{ |
|
"epoch": 1.7091454272863569, |
|
"grad_norm": 1.099502884099189, |
|
"learning_rate": 5.128189487808927e-07, |
|
"loss": 0.0775, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.7106446776611695, |
|
"grad_norm": 1.3524965441592864, |
|
"learning_rate": 5.076368857748454e-07, |
|
"loss": 0.1393, |
|
"step": 1141 |
|
}, |
|
{ |
|
"epoch": 1.712143928035982, |
|
"grad_norm": 1.1455115884594511, |
|
"learning_rate": 5.024797378747414e-07, |
|
"loss": 0.1277, |
|
"step": 1142 |
|
}, |
|
{ |
|
"epoch": 1.7136431784107946, |
|
"grad_norm": 1.4565444265315184, |
|
"learning_rate": 4.973475336826506e-07, |
|
"loss": 0.1618, |
|
"step": 1143 |
|
}, |
|
{ |
|
"epoch": 1.7151424287856072, |
|
"grad_norm": 1.1728745490338242, |
|
"learning_rate": 4.922403016623034e-07, |
|
"loss": 0.1111, |
|
"step": 1144 |
|
}, |
|
{ |
|
"epoch": 1.7166416791604198, |
|
"grad_norm": 1.2809261476992506, |
|
"learning_rate": 4.871580701389316e-07, |
|
"loss": 0.1023, |
|
"step": 1145 |
|
}, |
|
{ |
|
"epoch": 1.7181409295352323, |
|
"grad_norm": 1.304906550454027, |
|
"learning_rate": 4.821008672991118e-07, |
|
"loss": 0.1384, |
|
"step": 1146 |
|
}, |
|
{ |
|
"epoch": 1.7196401799100451, |
|
"grad_norm": 1.4735356913064952, |
|
"learning_rate": 4.770687211906089e-07, |
|
"loss": 0.1393, |
|
"step": 1147 |
|
}, |
|
{ |
|
"epoch": 1.7211394302848575, |
|
"grad_norm": 1.3026231966975248, |
|
"learning_rate": 4.720616597222205e-07, |
|
"loss": 0.1079, |
|
"step": 1148 |
|
}, |
|
{ |
|
"epoch": 1.7226386806596703, |
|
"grad_norm": 1.1975061796660293, |
|
"learning_rate": 4.6707971066362324e-07, |
|
"loss": 0.1478, |
|
"step": 1149 |
|
}, |
|
{ |
|
"epoch": 1.7241379310344827, |
|
"grad_norm": 1.3052713668217368, |
|
"learning_rate": 4.6212290164521554e-07, |
|
"loss": 0.0999, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.7256371814092955, |
|
"grad_norm": 1.2829200350492116, |
|
"learning_rate": 4.5719126015796757e-07, |
|
"loss": 0.1134, |
|
"step": 1151 |
|
}, |
|
{ |
|
"epoch": 1.7271364317841078, |
|
"grad_norm": 1.1951762394742782, |
|
"learning_rate": 4.522848135532698e-07, |
|
"loss": 0.1219, |
|
"step": 1152 |
|
}, |
|
{ |
|
"epoch": 1.7286356821589206, |
|
"grad_norm": 1.3374337951755992, |
|
"learning_rate": 4.474035890427769e-07, |
|
"loss": 0.1258, |
|
"step": 1153 |
|
}, |
|
{ |
|
"epoch": 1.7301349325337332, |
|
"grad_norm": 1.32898947924815, |
|
"learning_rate": 4.4254761369825984e-07, |
|
"loss": 0.1345, |
|
"step": 1154 |
|
}, |
|
{ |
|
"epoch": 1.7316341829085458, |
|
"grad_norm": 1.070105003018245, |
|
"learning_rate": 4.377169144514554e-07, |
|
"loss": 0.1047, |
|
"step": 1155 |
|
}, |
|
{ |
|
"epoch": 1.7331334332833583, |
|
"grad_norm": 1.1682636360227918, |
|
"learning_rate": 4.329115180939164e-07, |
|
"loss": 0.1316, |
|
"step": 1156 |
|
}, |
|
{ |
|
"epoch": 1.734632683658171, |
|
"grad_norm": 1.0730222956223425, |
|
"learning_rate": 4.281314512768625e-07, |
|
"loss": 0.0733, |
|
"step": 1157 |
|
}, |
|
{ |
|
"epoch": 1.7361319340329835, |
|
"grad_norm": 1.5804182103141131, |
|
"learning_rate": 4.2337674051103504e-07, |
|
"loss": 0.1602, |
|
"step": 1158 |
|
}, |
|
{ |
|
"epoch": 1.737631184407796, |
|
"grad_norm": 1.2969717611999283, |
|
"learning_rate": 4.186474121665468e-07, |
|
"loss": 0.1239, |
|
"step": 1159 |
|
}, |
|
{ |
|
"epoch": 1.7391304347826086, |
|
"grad_norm": 1.1976197818859535, |
|
"learning_rate": 4.139434924727359e-07, |
|
"loss": 0.0965, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 1.7406296851574212, |
|
"grad_norm": 1.1648706115112097, |
|
"learning_rate": 4.092650075180232e-07, |
|
"loss": 0.1332, |
|
"step": 1161 |
|
}, |
|
{ |
|
"epoch": 1.742128935532234, |
|
"grad_norm": 1.1483385365618108, |
|
"learning_rate": 4.046119832497658e-07, |
|
"loss": 0.1285, |
|
"step": 1162 |
|
}, |
|
{ |
|
"epoch": 1.7436281859070464, |
|
"grad_norm": 1.3131861334486878, |
|
"learning_rate": 3.9998444547411255e-07, |
|
"loss": 0.1161, |
|
"step": 1163 |
|
}, |
|
{ |
|
"epoch": 1.7451274362818592, |
|
"grad_norm": 1.1768484897582416, |
|
"learning_rate": 3.9538241985586144e-07, |
|
"loss": 0.0885, |
|
"step": 1164 |
|
}, |
|
{ |
|
"epoch": 1.7466266866566715, |
|
"grad_norm": 1.4521716297121547, |
|
"learning_rate": 3.908059319183194e-07, |
|
"loss": 0.1409, |
|
"step": 1165 |
|
}, |
|
{ |
|
"epoch": 1.7481259370314843, |
|
"grad_norm": 1.1865511424057595, |
|
"learning_rate": 3.8625500704315645e-07, |
|
"loss": 0.1146, |
|
"step": 1166 |
|
}, |
|
{ |
|
"epoch": 1.7496251874062967, |
|
"grad_norm": 1.3202950022306146, |
|
"learning_rate": 3.8172967047026834e-07, |
|
"loss": 0.1349, |
|
"step": 1167 |
|
}, |
|
{ |
|
"epoch": 1.7511244377811095, |
|
"grad_norm": 1.1629942164700995, |
|
"learning_rate": 3.7722994729763427e-07, |
|
"loss": 0.1424, |
|
"step": 1168 |
|
}, |
|
{ |
|
"epoch": 1.752623688155922, |
|
"grad_norm": 1.6394312143624425, |
|
"learning_rate": 3.7275586248118114e-07, |
|
"loss": 0.1696, |
|
"step": 1169 |
|
}, |
|
{ |
|
"epoch": 1.7541229385307346, |
|
"grad_norm": 1.3545142591470036, |
|
"learning_rate": 3.683074408346404e-07, |
|
"loss": 0.123, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 1.7556221889055472, |
|
"grad_norm": 1.3704498094678754, |
|
"learning_rate": 3.6388470702941436e-07, |
|
"loss": 0.1229, |
|
"step": 1171 |
|
}, |
|
{ |
|
"epoch": 1.7571214392803598, |
|
"grad_norm": 1.3884693498795073, |
|
"learning_rate": 3.594876855944385e-07, |
|
"loss": 0.1246, |
|
"step": 1172 |
|
}, |
|
{ |
|
"epoch": 1.7586206896551724, |
|
"grad_norm": 1.2837215251778884, |
|
"learning_rate": 3.5511640091604293e-07, |
|
"loss": 0.1102, |
|
"step": 1173 |
|
}, |
|
{ |
|
"epoch": 1.760119940029985, |
|
"grad_norm": 1.1124723835788903, |
|
"learning_rate": 3.50770877237821e-07, |
|
"loss": 0.1328, |
|
"step": 1174 |
|
}, |
|
{ |
|
"epoch": 1.7616191904047978, |
|
"grad_norm": 1.3075248966429014, |
|
"learning_rate": 3.4645113866049187e-07, |
|
"loss": 0.1125, |
|
"step": 1175 |
|
}, |
|
{ |
|
"epoch": 1.76311844077961, |
|
"grad_norm": 1.1844091810957376, |
|
"learning_rate": 3.42157209141768e-07, |
|
"loss": 0.1115, |
|
"step": 1176 |
|
}, |
|
{ |
|
"epoch": 1.764617691154423, |
|
"grad_norm": 1.3020603364870487, |
|
"learning_rate": 3.3788911249622194e-07, |
|
"loss": 0.1083, |
|
"step": 1177 |
|
}, |
|
{ |
|
"epoch": 1.7661169415292353, |
|
"grad_norm": 1.3699853063123961, |
|
"learning_rate": 3.336468723951558e-07, |
|
"loss": 0.1108, |
|
"step": 1178 |
|
}, |
|
{ |
|
"epoch": 1.767616191904048, |
|
"grad_norm": 1.318514604562699, |
|
"learning_rate": 3.294305123664665e-07, |
|
"loss": 0.1192, |
|
"step": 1179 |
|
}, |
|
{ |
|
"epoch": 1.7691154422788604, |
|
"grad_norm": 1.2849230237090068, |
|
"learning_rate": 3.2524005579452014e-07, |
|
"loss": 0.1462, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 1.7706146926536732, |
|
"grad_norm": 1.362630355556451, |
|
"learning_rate": 3.2107552592001657e-07, |
|
"loss": 0.1347, |
|
"step": 1181 |
|
}, |
|
{ |
|
"epoch": 1.7721139430284858, |
|
"grad_norm": 1.0899083745180387, |
|
"learning_rate": 3.169369458398652e-07, |
|
"loss": 0.1022, |
|
"step": 1182 |
|
}, |
|
{ |
|
"epoch": 1.7736131934032984, |
|
"grad_norm": 1.1474712141157715, |
|
"learning_rate": 3.128243385070562e-07, |
|
"loss": 0.1482, |
|
"step": 1183 |
|
}, |
|
{ |
|
"epoch": 1.775112443778111, |
|
"grad_norm": 1.3327556649923809, |
|
"learning_rate": 3.087377267305297e-07, |
|
"loss": 0.1199, |
|
"step": 1184 |
|
}, |
|
{ |
|
"epoch": 1.7766116941529235, |
|
"grad_norm": 1.2030608973878787, |
|
"learning_rate": 3.0467713317505363e-07, |
|
"loss": 0.1154, |
|
"step": 1185 |
|
}, |
|
{ |
|
"epoch": 1.778110944527736, |
|
"grad_norm": 1.2289757672880781, |
|
"learning_rate": 3.006425803610963e-07, |
|
"loss": 0.1471, |
|
"step": 1186 |
|
}, |
|
{ |
|
"epoch": 1.7796101949025487, |
|
"grad_norm": 1.3848742717676306, |
|
"learning_rate": 2.9663409066470025e-07, |
|
"loss": 0.14, |
|
"step": 1187 |
|
}, |
|
{ |
|
"epoch": 1.7811094452773615, |
|
"grad_norm": 1.2995838193504912, |
|
"learning_rate": 2.9265168631736005e-07, |
|
"loss": 0.1807, |
|
"step": 1188 |
|
}, |
|
{ |
|
"epoch": 1.7826086956521738, |
|
"grad_norm": 1.2992697361485803, |
|
"learning_rate": 2.88695389405898e-07, |
|
"loss": 0.1135, |
|
"step": 1189 |
|
}, |
|
{ |
|
"epoch": 1.7841079460269866, |
|
"grad_norm": 1.2182466150680802, |
|
"learning_rate": 2.8476522187234177e-07, |
|
"loss": 0.1033, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 1.785607196401799, |
|
"grad_norm": 1.3597433992401198, |
|
"learning_rate": 2.808612055138038e-07, |
|
"loss": 0.1485, |
|
"step": 1191 |
|
}, |
|
{ |
|
"epoch": 1.7871064467766118, |
|
"grad_norm": 1.1444722566417072, |
|
"learning_rate": 2.76983361982357e-07, |
|
"loss": 0.084, |
|
"step": 1192 |
|
}, |
|
{ |
|
"epoch": 1.7886056971514241, |
|
"grad_norm": 1.3140225303819648, |
|
"learning_rate": 2.731317127849209e-07, |
|
"loss": 0.1319, |
|
"step": 1193 |
|
}, |
|
{ |
|
"epoch": 1.790104947526237, |
|
"grad_norm": 1.216553485628249, |
|
"learning_rate": 2.693062792831358e-07, |
|
"loss": 0.1359, |
|
"step": 1194 |
|
}, |
|
{ |
|
"epoch": 1.7916041979010495, |
|
"grad_norm": 1.1938471721227324, |
|
"learning_rate": 2.655070826932471e-07, |
|
"loss": 0.138, |
|
"step": 1195 |
|
}, |
|
{ |
|
"epoch": 1.793103448275862, |
|
"grad_norm": 1.4341104020897726, |
|
"learning_rate": 2.617341440859883e-07, |
|
"loss": 0.1279, |
|
"step": 1196 |
|
}, |
|
{ |
|
"epoch": 1.7946026986506747, |
|
"grad_norm": 1.125892799179141, |
|
"learning_rate": 2.5798748438646326e-07, |
|
"loss": 0.1115, |
|
"step": 1197 |
|
}, |
|
{ |
|
"epoch": 1.7961019490254873, |
|
"grad_norm": 1.2093194389466178, |
|
"learning_rate": 2.5426712437403134e-07, |
|
"loss": 0.1551, |
|
"step": 1198 |
|
}, |
|
{ |
|
"epoch": 1.7976011994002998, |
|
"grad_norm": 1.3263745745983149, |
|
"learning_rate": 2.5057308468218913e-07, |
|
"loss": 0.1344, |
|
"step": 1199 |
|
}, |
|
{ |
|
"epoch": 1.7991004497751124, |
|
"grad_norm": 1.0679960865391493, |
|
"learning_rate": 2.4690538579845933e-07, |
|
"loss": 0.0979, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.7991004497751124, |
|
"eval_loss": 0.30179548263549805, |
|
"eval_runtime": 9.527, |
|
"eval_samples_per_second": 5.668, |
|
"eval_steps_per_second": 1.47, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 1.800599700149925, |
|
"grad_norm": 1.2548465774808946, |
|
"learning_rate": 2.432640480642756e-07, |
|
"loss": 0.141, |
|
"step": 1201 |
|
}, |
|
{ |
|
"epoch": 1.8020989505247376, |
|
"grad_norm": 1.3193477541908551, |
|
"learning_rate": 2.396490916748706e-07, |
|
"loss": 0.1287, |
|
"step": 1202 |
|
}, |
|
{ |
|
"epoch": 1.8035982008995504, |
|
"grad_norm": 1.6176845762647734, |
|
"learning_rate": 2.360605366791624e-07, |
|
"loss": 0.1889, |
|
"step": 1203 |
|
}, |
|
{ |
|
"epoch": 1.8050974512743627, |
|
"grad_norm": 1.4269444651213539, |
|
"learning_rate": 2.32498402979644e-07, |
|
"loss": 0.1881, |
|
"step": 1204 |
|
}, |
|
{ |
|
"epoch": 1.8065967016491755, |
|
"grad_norm": 1.2071099836238353, |
|
"learning_rate": 2.2896271033227392e-07, |
|
"loss": 0.131, |
|
"step": 1205 |
|
}, |
|
{ |
|
"epoch": 1.8080959520239879, |
|
"grad_norm": 1.1514881094574225, |
|
"learning_rate": 2.2545347834636632e-07, |
|
"loss": 0.1304, |
|
"step": 1206 |
|
}, |
|
{ |
|
"epoch": 1.8095952023988007, |
|
"grad_norm": 1.3177309667368113, |
|
"learning_rate": 2.219707264844806e-07, |
|
"loss": 0.1271, |
|
"step": 1207 |
|
}, |
|
{ |
|
"epoch": 1.811094452773613, |
|
"grad_norm": 1.0853052450972838, |
|
"learning_rate": 2.1851447406231573e-07, |
|
"loss": 0.0952, |
|
"step": 1208 |
|
}, |
|
{ |
|
"epoch": 1.8125937031484258, |
|
"grad_norm": 1.2367474251829673, |
|
"learning_rate": 2.1508474024860171e-07, |
|
"loss": 0.1622, |
|
"step": 1209 |
|
}, |
|
{ |
|
"epoch": 1.8140929535232384, |
|
"grad_norm": 1.2014308008359469, |
|
"learning_rate": 2.1168154406499275e-07, |
|
"loss": 0.0989, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 1.815592203898051, |
|
"grad_norm": 1.3664210948620326, |
|
"learning_rate": 2.0830490438596418e-07, |
|
"loss": 0.0989, |
|
"step": 1211 |
|
}, |
|
{ |
|
"epoch": 1.8170914542728636, |
|
"grad_norm": 1.2102328949823467, |
|
"learning_rate": 2.0495483993870578e-07, |
|
"loss": 0.1065, |
|
"step": 1212 |
|
}, |
|
{ |
|
"epoch": 1.8185907046476761, |
|
"grad_norm": 1.2603909868780268, |
|
"learning_rate": 2.0163136930301696e-07, |
|
"loss": 0.1115, |
|
"step": 1213 |
|
}, |
|
{ |
|
"epoch": 1.8200899550224887, |
|
"grad_norm": 1.456216277877769, |
|
"learning_rate": 1.9833451091120727e-07, |
|
"loss": 0.1177, |
|
"step": 1214 |
|
}, |
|
{ |
|
"epoch": 1.8215892053973013, |
|
"grad_norm": 1.2505048564143828, |
|
"learning_rate": 1.9506428304799095e-07, |
|
"loss": 0.141, |
|
"step": 1215 |
|
}, |
|
{ |
|
"epoch": 1.823088455772114, |
|
"grad_norm": 1.3694848424018637, |
|
"learning_rate": 1.9182070385038555e-07, |
|
"loss": 0.1405, |
|
"step": 1216 |
|
}, |
|
{ |
|
"epoch": 1.8245877061469264, |
|
"grad_norm": 1.048987366714346, |
|
"learning_rate": 1.886037913076144e-07, |
|
"loss": 0.1086, |
|
"step": 1217 |
|
}, |
|
{ |
|
"epoch": 1.8260869565217392, |
|
"grad_norm": 1.2653363492592689, |
|
"learning_rate": 1.8541356326100436e-07, |
|
"loss": 0.1382, |
|
"step": 1218 |
|
}, |
|
{ |
|
"epoch": 1.8275862068965516, |
|
"grad_norm": 1.4400532322750428, |
|
"learning_rate": 1.8225003740388546e-07, |
|
"loss": 0.1255, |
|
"step": 1219 |
|
}, |
|
{ |
|
"epoch": 1.8290854572713644, |
|
"grad_norm": 1.2654798569923909, |
|
"learning_rate": 1.791132312814975e-07, |
|
"loss": 0.1337, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 1.8305847076461768, |
|
"grad_norm": 1.272505866713752, |
|
"learning_rate": 1.760031622908881e-07, |
|
"loss": 0.1336, |
|
"step": 1221 |
|
}, |
|
{ |
|
"epoch": 1.8320839580209896, |
|
"grad_norm": 1.3768727400043463, |
|
"learning_rate": 1.729198476808186e-07, |
|
"loss": 0.1116, |
|
"step": 1222 |
|
}, |
|
{ |
|
"epoch": 1.8335832083958021, |
|
"grad_norm": 1.4327873874160153, |
|
"learning_rate": 1.6986330455166733e-07, |
|
"loss": 0.1401, |
|
"step": 1223 |
|
}, |
|
{ |
|
"epoch": 1.8350824587706147, |
|
"grad_norm": 1.5102663690913205, |
|
"learning_rate": 1.6683354985533583e-07, |
|
"loss": 0.1238, |
|
"step": 1224 |
|
}, |
|
{ |
|
"epoch": 1.8365817091454273, |
|
"grad_norm": 1.3851371852522554, |
|
"learning_rate": 1.6383060039515343e-07, |
|
"loss": 0.0965, |
|
"step": 1225 |
|
}, |
|
{ |
|
"epoch": 1.8380809595202399, |
|
"grad_norm": 1.4159707749484591, |
|
"learning_rate": 1.6085447282578548e-07, |
|
"loss": 0.1411, |
|
"step": 1226 |
|
}, |
|
{ |
|
"epoch": 1.8395802098950524, |
|
"grad_norm": 1.287975739004606, |
|
"learning_rate": 1.579051836531409e-07, |
|
"loss": 0.1176, |
|
"step": 1227 |
|
}, |
|
{ |
|
"epoch": 1.841079460269865, |
|
"grad_norm": 1.3107136778886062, |
|
"learning_rate": 1.5498274923427925e-07, |
|
"loss": 0.1102, |
|
"step": 1228 |
|
}, |
|
{ |
|
"epoch": 1.8425787106446778, |
|
"grad_norm": 1.1592300505162423, |
|
"learning_rate": 1.5208718577732096e-07, |
|
"loss": 0.1475, |
|
"step": 1229 |
|
}, |
|
{ |
|
"epoch": 1.8440779610194902, |
|
"grad_norm": 1.2527400216216011, |
|
"learning_rate": 1.4921850934135785e-07, |
|
"loss": 0.1372, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 1.845577211394303, |
|
"grad_norm": 1.5762265876963002, |
|
"learning_rate": 1.463767358363627e-07, |
|
"loss": 0.1481, |
|
"step": 1231 |
|
}, |
|
{ |
|
"epoch": 1.8470764617691153, |
|
"grad_norm": 1.4664669932819818, |
|
"learning_rate": 1.4356188102310266e-07, |
|
"loss": 0.1357, |
|
"step": 1232 |
|
}, |
|
{ |
|
"epoch": 1.8485757121439281, |
|
"grad_norm": 1.1474255099409505, |
|
"learning_rate": 1.4077396051305093e-07, |
|
"loss": 0.1248, |
|
"step": 1233 |
|
}, |
|
{ |
|
"epoch": 1.8500749625187405, |
|
"grad_norm": 1.1751469367983083, |
|
"learning_rate": 1.3801298976830025e-07, |
|
"loss": 0.1021, |
|
"step": 1234 |
|
}, |
|
{ |
|
"epoch": 1.8515742128935533, |
|
"grad_norm": 1.3128571811676382, |
|
"learning_rate": 1.3527898410147677e-07, |
|
"loss": 0.12, |
|
"step": 1235 |
|
}, |
|
{ |
|
"epoch": 1.8530734632683659, |
|
"grad_norm": 1.2988630632224651, |
|
"learning_rate": 1.325719586756563e-07, |
|
"loss": 0.1141, |
|
"step": 1236 |
|
}, |
|
{ |
|
"epoch": 1.8545727136431784, |
|
"grad_norm": 1.1587056530261566, |
|
"learning_rate": 1.2989192850427933e-07, |
|
"loss": 0.1245, |
|
"step": 1237 |
|
}, |
|
{ |
|
"epoch": 1.856071964017991, |
|
"grad_norm": 1.327393545077868, |
|
"learning_rate": 1.2723890845106723e-07, |
|
"loss": 0.1399, |
|
"step": 1238 |
|
}, |
|
{ |
|
"epoch": 1.8575712143928036, |
|
"grad_norm": 1.3572133675205014, |
|
"learning_rate": 1.2461291322994118e-07, |
|
"loss": 0.1415, |
|
"step": 1239 |
|
}, |
|
{ |
|
"epoch": 1.8590704647676162, |
|
"grad_norm": 1.3310189409688409, |
|
"learning_rate": 1.2201395740493948e-07, |
|
"loss": 0.1379, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 1.8605697151424287, |
|
"grad_norm": 1.3442947125295035, |
|
"learning_rate": 1.1944205539013708e-07, |
|
"loss": 0.0987, |
|
"step": 1241 |
|
}, |
|
{ |
|
"epoch": 1.8620689655172413, |
|
"grad_norm": 1.0798614239003894, |
|
"learning_rate": 1.1689722144956672e-07, |
|
"loss": 0.1254, |
|
"step": 1242 |
|
}, |
|
{ |
|
"epoch": 1.863568215892054, |
|
"grad_norm": 1.3849846333193567, |
|
"learning_rate": 1.1437946969713731e-07, |
|
"loss": 0.1307, |
|
"step": 1243 |
|
}, |
|
{ |
|
"epoch": 1.8650674662668667, |
|
"grad_norm": 1.2406963107705151, |
|
"learning_rate": 1.1188881409655849e-07, |
|
"loss": 0.122, |
|
"step": 1244 |
|
}, |
|
{ |
|
"epoch": 1.866566716641679, |
|
"grad_norm": 1.7565277555942336, |
|
"learning_rate": 1.0942526846126122e-07, |
|
"loss": 0.1617, |
|
"step": 1245 |
|
}, |
|
{ |
|
"epoch": 1.8680659670164919, |
|
"grad_norm": 1.2480658553182324, |
|
"learning_rate": 1.0698884645432117e-07, |
|
"loss": 0.1274, |
|
"step": 1246 |
|
}, |
|
{ |
|
"epoch": 1.8695652173913042, |
|
"grad_norm": 1.1819736586926606, |
|
"learning_rate": 1.0457956158838545e-07, |
|
"loss": 0.0818, |
|
"step": 1247 |
|
}, |
|
{ |
|
"epoch": 1.871064467766117, |
|
"grad_norm": 1.2563892910427237, |
|
"learning_rate": 1.0219742722559433e-07, |
|
"loss": 0.1312, |
|
"step": 1248 |
|
}, |
|
{ |
|
"epoch": 1.8725637181409296, |
|
"grad_norm": 1.22752437876397, |
|
"learning_rate": 9.984245657750857e-08, |
|
"loss": 0.1162, |
|
"step": 1249 |
|
}, |
|
{ |
|
"epoch": 1.8740629685157422, |
|
"grad_norm": 1.9104186055248886, |
|
"learning_rate": 9.751466270503718e-08, |
|
"loss": 0.1544, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 1.8755622188905547, |
|
"grad_norm": 1.3662734745770317, |
|
"learning_rate": 9.521405851836252e-08, |
|
"loss": 0.1422, |
|
"step": 1251 |
|
}, |
|
{ |
|
"epoch": 1.8770614692653673, |
|
"grad_norm": 1.5428546343885159, |
|
"learning_rate": 9.294065677687202e-08, |
|
"loss": 0.1372, |
|
"step": 1252 |
|
}, |
|
{ |
|
"epoch": 1.87856071964018, |
|
"grad_norm": 1.3205546017262328, |
|
"learning_rate": 9.069447008908383e-08, |
|
"loss": 0.1567, |
|
"step": 1253 |
|
}, |
|
{ |
|
"epoch": 1.8800599700149925, |
|
"grad_norm": 1.1364342529541331, |
|
"learning_rate": 8.847551091257956e-08, |
|
"loss": 0.1362, |
|
"step": 1254 |
|
}, |
|
{ |
|
"epoch": 1.881559220389805, |
|
"grad_norm": 1.3084944855439842, |
|
"learning_rate": 8.62837915539344e-08, |
|
"loss": 0.1198, |
|
"step": 1255 |
|
}, |
|
{ |
|
"epoch": 1.8830584707646176, |
|
"grad_norm": 1.1852869544003721, |
|
"learning_rate": 8.411932416864832e-08, |
|
"loss": 0.1116, |
|
"step": 1256 |
|
}, |
|
{ |
|
"epoch": 1.8845577211394304, |
|
"grad_norm": 1.3425131194386866, |
|
"learning_rate": 8.198212076107881e-08, |
|
"loss": 0.1502, |
|
"step": 1257 |
|
}, |
|
{ |
|
"epoch": 1.8860569715142428, |
|
"grad_norm": 1.2585048914618981, |
|
"learning_rate": 7.987219318437489e-08, |
|
"loss": 0.1159, |
|
"step": 1258 |
|
}, |
|
{ |
|
"epoch": 1.8875562218890556, |
|
"grad_norm": 1.4671143201623167, |
|
"learning_rate": 7.778955314041103e-08, |
|
"loss": 0.136, |
|
"step": 1259 |
|
}, |
|
{ |
|
"epoch": 1.889055472263868, |
|
"grad_norm": 1.4222474463490127, |
|
"learning_rate": 7.573421217972222e-08, |
|
"loss": 0.1425, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 1.8905547226386807, |
|
"grad_norm": 1.282048667403304, |
|
"learning_rate": 7.370618170144062e-08, |
|
"loss": 0.1247, |
|
"step": 1261 |
|
}, |
|
{ |
|
"epoch": 1.892053973013493, |
|
"grad_norm": 1.2302405421730362, |
|
"learning_rate": 7.170547295323016e-08, |
|
"loss": 0.1145, |
|
"step": 1262 |
|
}, |
|
{ |
|
"epoch": 1.893553223388306, |
|
"grad_norm": 1.146980393796333, |
|
"learning_rate": 6.973209703122652e-08, |
|
"loss": 0.0961, |
|
"step": 1263 |
|
}, |
|
{ |
|
"epoch": 1.8950524737631185, |
|
"grad_norm": 1.335536733481989, |
|
"learning_rate": 6.778606487997496e-08, |
|
"loss": 0.1571, |
|
"step": 1264 |
|
}, |
|
{ |
|
"epoch": 1.896551724137931, |
|
"grad_norm": 1.4027093269543287, |
|
"learning_rate": 6.58673872923693e-08, |
|
"loss": 0.1327, |
|
"step": 1265 |
|
}, |
|
{ |
|
"epoch": 1.8980509745127436, |
|
"grad_norm": 1.2697648842309783, |
|
"learning_rate": 6.397607490959134e-08, |
|
"loss": 0.1139, |
|
"step": 1266 |
|
}, |
|
{ |
|
"epoch": 1.8995502248875562, |
|
"grad_norm": 1.5571037886598633, |
|
"learning_rate": 6.211213822105378e-08, |
|
"loss": 0.1305, |
|
"step": 1267 |
|
}, |
|
{ |
|
"epoch": 1.9010494752623688, |
|
"grad_norm": 1.5640480934714973, |
|
"learning_rate": 6.027558756434015e-08, |
|
"loss": 0.1243, |
|
"step": 1268 |
|
}, |
|
{ |
|
"epoch": 1.9025487256371814, |
|
"grad_norm": 1.2348088893423754, |
|
"learning_rate": 5.846643312514888e-08, |
|
"loss": 0.1102, |
|
"step": 1269 |
|
}, |
|
{ |
|
"epoch": 1.9040479760119942, |
|
"grad_norm": 1.319087946032606, |
|
"learning_rate": 5.668468493723489e-08, |
|
"loss": 0.1034, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 1.9055472263868065, |
|
"grad_norm": 1.230510294470354, |
|
"learning_rate": 5.4930352882357486e-08, |
|
"loss": 0.1033, |
|
"step": 1271 |
|
}, |
|
{ |
|
"epoch": 1.9070464767616193, |
|
"grad_norm": 1.5146473743987154, |
|
"learning_rate": 5.3203446690220374e-08, |
|
"loss": 0.1583, |
|
"step": 1272 |
|
}, |
|
{ |
|
"epoch": 1.9085457271364317, |
|
"grad_norm": 1.2323703018536982, |
|
"learning_rate": 5.1503975938422824e-08, |
|
"loss": 0.0985, |
|
"step": 1273 |
|
}, |
|
{ |
|
"epoch": 1.9100449775112445, |
|
"grad_norm": 1.2597644982920362, |
|
"learning_rate": 4.983195005240415e-08, |
|
"loss": 0.124, |
|
"step": 1274 |
|
}, |
|
{ |
|
"epoch": 1.9115442278860568, |
|
"grad_norm": 1.3027649075236873, |
|
"learning_rate": 4.8187378305390994e-08, |
|
"loss": 0.1342, |
|
"step": 1275 |
|
}, |
|
{ |
|
"epoch": 1.9130434782608696, |
|
"grad_norm": 1.284494594319255, |
|
"learning_rate": 4.657026981834623e-08, |
|
"loss": 0.1079, |
|
"step": 1276 |
|
}, |
|
{ |
|
"epoch": 1.9145427286356822, |
|
"grad_norm": 1.3563437540114411, |
|
"learning_rate": 4.498063355991955e-08, |
|
"loss": 0.111, |
|
"step": 1277 |
|
}, |
|
{ |
|
"epoch": 1.9160419790104948, |
|
"grad_norm": 1.2218664214432449, |
|
"learning_rate": 4.341847834639645e-08, |
|
"loss": 0.111, |
|
"step": 1278 |
|
}, |
|
{ |
|
"epoch": 1.9175412293853074, |
|
"grad_norm": 1.6304107772874457, |
|
"learning_rate": 4.188381284164933e-08, |
|
"loss": 0.1605, |
|
"step": 1279 |
|
}, |
|
{ |
|
"epoch": 1.91904047976012, |
|
"grad_norm": 1.2895628153813035, |
|
"learning_rate": 4.0376645557090864e-08, |
|
"loss": 0.1126, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 1.9205397301349325, |
|
"grad_norm": 1.2067891403881137, |
|
"learning_rate": 3.889698485162463e-08, |
|
"loss": 0.1041, |
|
"step": 1281 |
|
}, |
|
{ |
|
"epoch": 1.922038980509745, |
|
"grad_norm": 1.308614088980277, |
|
"learning_rate": 3.744483893160067e-08, |
|
"loss": 0.1636, |
|
"step": 1282 |
|
}, |
|
{ |
|
"epoch": 1.9235382308845579, |
|
"grad_norm": 1.2190048654582166, |
|
"learning_rate": 3.602021585076942e-08, |
|
"loss": 0.1134, |
|
"step": 1283 |
|
}, |
|
{ |
|
"epoch": 1.9250374812593702, |
|
"grad_norm": 1.430236210435848, |
|
"learning_rate": 3.462312351023567e-08, |
|
"loss": 0.1522, |
|
"step": 1284 |
|
}, |
|
{ |
|
"epoch": 1.926536731634183, |
|
"grad_norm": 1.3582666782393593, |
|
"learning_rate": 3.325356965841686e-08, |
|
"loss": 0.0993, |
|
"step": 1285 |
|
}, |
|
{ |
|
"epoch": 1.9280359820089954, |
|
"grad_norm": 1.2141944277630785, |
|
"learning_rate": 3.191156189099931e-08, |
|
"loss": 0.1423, |
|
"step": 1286 |
|
}, |
|
{ |
|
"epoch": 1.9295352323838082, |
|
"grad_norm": 1.2585188964273484, |
|
"learning_rate": 3.0597107650894855e-08, |
|
"loss": 0.1475, |
|
"step": 1287 |
|
}, |
|
{ |
|
"epoch": 1.9310344827586206, |
|
"grad_norm": 1.5025901907220542, |
|
"learning_rate": 2.9310214228202016e-08, |
|
"loss": 0.1357, |
|
"step": 1288 |
|
}, |
|
{ |
|
"epoch": 1.9325337331334334, |
|
"grad_norm": 1.280230397183545, |
|
"learning_rate": 2.8050888760163265e-08, |
|
"loss": 0.1023, |
|
"step": 1289 |
|
}, |
|
{ |
|
"epoch": 1.934032983508246, |
|
"grad_norm": 1.421439477267354, |
|
"learning_rate": 2.6819138231126695e-08, |
|
"loss": 0.1756, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 1.9355322338830585, |
|
"grad_norm": 1.3176598104321589, |
|
"learning_rate": 2.5614969472506634e-08, |
|
"loss": 0.1499, |
|
"step": 1291 |
|
}, |
|
{ |
|
"epoch": 1.937031484257871, |
|
"grad_norm": 1.174015790686038, |
|
"learning_rate": 2.4438389162746434e-08, |
|
"loss": 0.13, |
|
"step": 1292 |
|
}, |
|
{ |
|
"epoch": 1.9385307346326837, |
|
"grad_norm": 1.114311549268444, |
|
"learning_rate": 2.3289403827281287e-08, |
|
"loss": 0.1021, |
|
"step": 1293 |
|
}, |
|
{ |
|
"epoch": 1.9400299850074962, |
|
"grad_norm": 1.328198982585985, |
|
"learning_rate": 2.2168019838501032e-08, |
|
"loss": 0.1455, |
|
"step": 1294 |
|
}, |
|
{ |
|
"epoch": 1.9415292353823088, |
|
"grad_norm": 1.2450841353115827, |
|
"learning_rate": 2.1074243415716288e-08, |
|
"loss": 0.1004, |
|
"step": 1295 |
|
}, |
|
{ |
|
"epoch": 1.9430284857571214, |
|
"grad_norm": 1.377648309409005, |
|
"learning_rate": 2.0008080625124048e-08, |
|
"loss": 0.1412, |
|
"step": 1296 |
|
}, |
|
{ |
|
"epoch": 1.944527736131934, |
|
"grad_norm": 1.1196841017122345, |
|
"learning_rate": 1.896953737977103e-08, |
|
"loss": 0.0949, |
|
"step": 1297 |
|
}, |
|
{ |
|
"epoch": 1.9460269865067468, |
|
"grad_norm": 1.2728112916715932, |
|
"learning_rate": 1.7958619439524817e-08, |
|
"loss": 0.1464, |
|
"step": 1298 |
|
}, |
|
{ |
|
"epoch": 1.9475262368815591, |
|
"grad_norm": 1.4650226667705646, |
|
"learning_rate": 1.6975332411040547e-08, |
|
"loss": 0.1123, |
|
"step": 1299 |
|
}, |
|
{ |
|
"epoch": 1.949025487256372, |
|
"grad_norm": 1.328935000820398, |
|
"learning_rate": 1.601968174772761e-08, |
|
"loss": 0.1176, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 1.9505247376311843, |
|
"grad_norm": 1.2616991198048613, |
|
"learning_rate": 1.5091672749723564e-08, |
|
"loss": 0.1184, |
|
"step": 1301 |
|
}, |
|
{ |
|
"epoch": 1.952023988005997, |
|
"grad_norm": 1.4239725782247015, |
|
"learning_rate": 1.4191310563860806e-08, |
|
"loss": 0.1243, |
|
"step": 1302 |
|
}, |
|
{ |
|
"epoch": 1.9535232383808094, |
|
"grad_norm": 1.108235535231577, |
|
"learning_rate": 1.331860018363995e-08, |
|
"loss": 0.0854, |
|
"step": 1303 |
|
}, |
|
{ |
|
"epoch": 1.9550224887556222, |
|
"grad_norm": 1.311555327325946, |
|
"learning_rate": 1.2473546449203178e-08, |
|
"loss": 0.1482, |
|
"step": 1304 |
|
}, |
|
{ |
|
"epoch": 1.9565217391304348, |
|
"grad_norm": 1.0887766055425887, |
|
"learning_rate": 1.1656154047303691e-08, |
|
"loss": 0.1084, |
|
"step": 1305 |
|
}, |
|
{ |
|
"epoch": 1.9580209895052474, |
|
"grad_norm": 1.2437187712441724, |
|
"learning_rate": 1.0866427511285194e-08, |
|
"loss": 0.1065, |
|
"step": 1306 |
|
}, |
|
{ |
|
"epoch": 1.95952023988006, |
|
"grad_norm": 1.2558811609983191, |
|
"learning_rate": 1.0104371221050236e-08, |
|
"loss": 0.0933, |
|
"step": 1307 |
|
}, |
|
{ |
|
"epoch": 1.9610194902548725, |
|
"grad_norm": 1.0793835290254463, |
|
"learning_rate": 9.369989403041347e-09, |
|
"loss": 0.0917, |
|
"step": 1308 |
|
}, |
|
{ |
|
"epoch": 1.9625187406296851, |
|
"grad_norm": 1.3376354462141502, |
|
"learning_rate": 8.663286130216608e-09, |
|
"loss": 0.1588, |
|
"step": 1309 |
|
}, |
|
{ |
|
"epoch": 1.9640179910044977, |
|
"grad_norm": 1.4377579871555195, |
|
"learning_rate": 7.984265322023011e-09, |
|
"loss": 0.1166, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 1.9655172413793105, |
|
"grad_norm": 1.185812542463257, |
|
"learning_rate": 7.332930744380906e-09, |
|
"loss": 0.1413, |
|
"step": 1311 |
|
}, |
|
{ |
|
"epoch": 1.9670164917541229, |
|
"grad_norm": 1.4281481200200825, |
|
"learning_rate": 6.709286009657368e-09, |
|
"loss": 0.1415, |
|
"step": 1312 |
|
}, |
|
{ |
|
"epoch": 1.9685157421289357, |
|
"grad_norm": 1.2710000977808737, |
|
"learning_rate": 6.1133345766511975e-09, |
|
"loss": 0.1858, |
|
"step": 1313 |
|
}, |
|
{ |
|
"epoch": 1.970014992503748, |
|
"grad_norm": 1.3918057104368156, |
|
"learning_rate": 5.5450797505690605e-09, |
|
"loss": 0.1392, |
|
"step": 1314 |
|
}, |
|
{ |
|
"epoch": 1.9715142428785608, |
|
"grad_norm": 1.5876713361222057, |
|
"learning_rate": 5.004524683011048e-09, |
|
"loss": 0.1403, |
|
"step": 1315 |
|
}, |
|
{ |
|
"epoch": 1.9730134932533732, |
|
"grad_norm": 1.3874352873294997, |
|
"learning_rate": 4.491672371950695e-09, |
|
"loss": 0.1496, |
|
"step": 1316 |
|
}, |
|
{ |
|
"epoch": 1.974512743628186, |
|
"grad_norm": 1.1940229101877402, |
|
"learning_rate": 4.0065256617199954e-09, |
|
"loss": 0.1035, |
|
"step": 1317 |
|
}, |
|
{ |
|
"epoch": 1.9760119940029985, |
|
"grad_norm": 1.3546540706865013, |
|
"learning_rate": 3.5490872429910784e-09, |
|
"loss": 0.1269, |
|
"step": 1318 |
|
}, |
|
{ |
|
"epoch": 1.9775112443778111, |
|
"grad_norm": 1.2421709629976962, |
|
"learning_rate": 3.119359652765108e-09, |
|
"loss": 0.1626, |
|
"step": 1319 |
|
}, |
|
{ |
|
"epoch": 1.9790104947526237, |
|
"grad_norm": 1.2759649380055378, |
|
"learning_rate": 2.7173452743550767e-09, |
|
"loss": 0.1603, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 1.9805097451274363, |
|
"grad_norm": 1.3034843979031692, |
|
"learning_rate": 2.343046337374144e-09, |
|
"loss": 0.1203, |
|
"step": 1321 |
|
}, |
|
{ |
|
"epoch": 1.9820089955022488, |
|
"grad_norm": 1.1140010277374572, |
|
"learning_rate": 1.9964649177223184e-09, |
|
"loss": 0.1103, |
|
"step": 1322 |
|
}, |
|
{ |
|
"epoch": 1.9835082458770614, |
|
"grad_norm": 1.1570791526634117, |
|
"learning_rate": 1.6776029375759062e-09, |
|
"loss": 0.1017, |
|
"step": 1323 |
|
}, |
|
{ |
|
"epoch": 1.9850074962518742, |
|
"grad_norm": 1.6900883321369187, |
|
"learning_rate": 1.386462165375857e-09, |
|
"loss": 0.1708, |
|
"step": 1324 |
|
}, |
|
{ |
|
"epoch": 1.9865067466266866, |
|
"grad_norm": 1.15203993036366, |
|
"learning_rate": 1.1230442158188804e-09, |
|
"loss": 0.1116, |
|
"step": 1325 |
|
}, |
|
{ |
|
"epoch": 1.9880059970014994, |
|
"grad_norm": 1.4236098143929679, |
|
"learning_rate": 8.873505498474544e-10, |
|
"loss": 0.1164, |
|
"step": 1326 |
|
}, |
|
{ |
|
"epoch": 1.9895052473763117, |
|
"grad_norm": 1.3446422420784052, |
|
"learning_rate": 6.793824746437194e-10, |
|
"loss": 0.1543, |
|
"step": 1327 |
|
}, |
|
{ |
|
"epoch": 1.9910044977511245, |
|
"grad_norm": 1.3285112250078446, |
|
"learning_rate": 4.991411436189308e-10, |
|
"loss": 0.1227, |
|
"step": 1328 |
|
}, |
|
{ |
|
"epoch": 1.992503748125937, |
|
"grad_norm": 1.1267414247092749, |
|
"learning_rate": 3.466275564101285e-10, |
|
"loss": 0.1066, |
|
"step": 1329 |
|
}, |
|
{ |
|
"epoch": 1.9940029985007497, |
|
"grad_norm": 1.3312680039426206, |
|
"learning_rate": 2.2184255887403028e-10, |
|
"loss": 0.1394, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 1.9955022488755623, |
|
"grad_norm": 1.1117802196396092, |
|
"learning_rate": 1.2478684308037115e-10, |
|
"loss": 0.1133, |
|
"step": 1331 |
|
}, |
|
{ |
|
"epoch": 1.9970014992503748, |
|
"grad_norm": 1.0953889316887029, |
|
"learning_rate": 5.5460947310237435e-11, |
|
"loss": 0.1117, |
|
"step": 1332 |
|
}, |
|
{ |
|
"epoch": 1.9985007496251874, |
|
"grad_norm": 1.1458111655501477, |
|
"learning_rate": 1.3865256052181252e-11, |
|
"loss": 0.1165, |
|
"step": 1333 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 1.0071991154813362, |
|
"learning_rate": 0.0, |
|
"loss": 0.066, |
|
"step": 1334 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"step": 1334, |
|
"total_flos": 39377831657472.0, |
|
"train_loss": 0.2224826162912767, |
|
"train_runtime": 4117.8607, |
|
"train_samples_per_second": 2.588, |
|
"train_steps_per_second": 0.324 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 1334, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 2000, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 39377831657472.0, |
|
"train_batch_size": 2, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|