|
{ |
|
"best_metric": 0.5550929307937622, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-200", |
|
"epoch": 1.4184397163120568, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0070921985815602835, |
|
"grad_norm": 8.05357837677002, |
|
"learning_rate": 1e-05, |
|
"loss": 2.5463, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0070921985815602835, |
|
"eval_loss": 3.0699386596679688, |
|
"eval_runtime": 4.7582, |
|
"eval_samples_per_second": 50.019, |
|
"eval_steps_per_second": 12.61, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.014184397163120567, |
|
"grad_norm": 10.838326454162598, |
|
"learning_rate": 2e-05, |
|
"loss": 2.79, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.02127659574468085, |
|
"grad_norm": 8.85204792022705, |
|
"learning_rate": 3e-05, |
|
"loss": 2.7062, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.028368794326241134, |
|
"grad_norm": 4.852919578552246, |
|
"learning_rate": 4e-05, |
|
"loss": 2.6851, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.03546099290780142, |
|
"grad_norm": 4.499677658081055, |
|
"learning_rate": 5e-05, |
|
"loss": 2.4067, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0425531914893617, |
|
"grad_norm": 4.731116771697998, |
|
"learning_rate": 6e-05, |
|
"loss": 2.4972, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.04964539007092199, |
|
"grad_norm": 3.367246150970459, |
|
"learning_rate": 7e-05, |
|
"loss": 2.176, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.05673758865248227, |
|
"grad_norm": 2.1521012783050537, |
|
"learning_rate": 8e-05, |
|
"loss": 1.8984, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.06382978723404255, |
|
"grad_norm": 2.3264524936676025, |
|
"learning_rate": 9e-05, |
|
"loss": 1.8045, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.07092198581560284, |
|
"grad_norm": 2.074296236038208, |
|
"learning_rate": 0.0001, |
|
"loss": 1.5394, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.07801418439716312, |
|
"grad_norm": 2.067852735519409, |
|
"learning_rate": 9.999316524962345e-05, |
|
"loss": 1.4358, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.0851063829787234, |
|
"grad_norm": 1.765109896659851, |
|
"learning_rate": 9.997266286704631e-05, |
|
"loss": 1.2752, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.09219858156028368, |
|
"grad_norm": 1.8644369840621948, |
|
"learning_rate": 9.993849845741524e-05, |
|
"loss": 1.1874, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.09929078014184398, |
|
"grad_norm": 1.5777784585952759, |
|
"learning_rate": 9.989068136093873e-05, |
|
"loss": 1.1035, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.10638297872340426, |
|
"grad_norm": 1.6741243600845337, |
|
"learning_rate": 9.98292246503335e-05, |
|
"loss": 1.04, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.11347517730496454, |
|
"grad_norm": 1.3801331520080566, |
|
"learning_rate": 9.975414512725057e-05, |
|
"loss": 1.0535, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.12056737588652482, |
|
"grad_norm": 1.2960208654403687, |
|
"learning_rate": 9.966546331768191e-05, |
|
"loss": 0.9674, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.1276595744680851, |
|
"grad_norm": 1.146315097808838, |
|
"learning_rate": 9.956320346634876e-05, |
|
"loss": 0.9224, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.1347517730496454, |
|
"grad_norm": 1.3943123817443848, |
|
"learning_rate": 9.944739353007344e-05, |
|
"loss": 0.8943, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.14184397163120568, |
|
"grad_norm": 1.0280953645706177, |
|
"learning_rate": 9.931806517013612e-05, |
|
"loss": 0.779, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.14893617021276595, |
|
"grad_norm": 1.1355559825897217, |
|
"learning_rate": 9.917525374361912e-05, |
|
"loss": 0.8361, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.15602836879432624, |
|
"grad_norm": 1.0945531129837036, |
|
"learning_rate": 9.901899829374047e-05, |
|
"loss": 0.7334, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.16312056737588654, |
|
"grad_norm": 1.1994026899337769, |
|
"learning_rate": 9.884934153917997e-05, |
|
"loss": 0.6122, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.1702127659574468, |
|
"grad_norm": 0.8883627653121948, |
|
"learning_rate": 9.86663298624003e-05, |
|
"loss": 0.5611, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.1773049645390071, |
|
"grad_norm": 0.9672736525535583, |
|
"learning_rate": 9.847001329696653e-05, |
|
"loss": 0.5966, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.18439716312056736, |
|
"grad_norm": 1.0175507068634033, |
|
"learning_rate": 9.826044551386744e-05, |
|
"loss": 0.5375, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.19148936170212766, |
|
"grad_norm": 0.8878077864646912, |
|
"learning_rate": 9.803768380684242e-05, |
|
"loss": 0.4385, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.19858156028368795, |
|
"grad_norm": 1.0303905010223389, |
|
"learning_rate": 9.780178907671789e-05, |
|
"loss": 0.4613, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.20567375886524822, |
|
"grad_norm": 0.9411659836769104, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 0.4759, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.2127659574468085, |
|
"grad_norm": 0.9171887636184692, |
|
"learning_rate": 9.729086208503174e-05, |
|
"loss": 0.4398, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.2198581560283688, |
|
"grad_norm": 0.8607805967330933, |
|
"learning_rate": 9.701596950580806e-05, |
|
"loss": 0.4089, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.22695035460992907, |
|
"grad_norm": 0.9131023287773132, |
|
"learning_rate": 9.672822322997305e-05, |
|
"loss": 0.4104, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.23404255319148937, |
|
"grad_norm": 1.23581063747406, |
|
"learning_rate": 9.642770192448536e-05, |
|
"loss": 0.4628, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.24113475177304963, |
|
"grad_norm": 1.4086484909057617, |
|
"learning_rate": 9.611448774886924e-05, |
|
"loss": 0.3627, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.24822695035460993, |
|
"grad_norm": 6.753032207489014, |
|
"learning_rate": 9.578866633275288e-05, |
|
"loss": 0.771, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.2553191489361702, |
|
"grad_norm": 2.011505365371704, |
|
"learning_rate": 9.545032675245813e-05, |
|
"loss": 1.4407, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.2624113475177305, |
|
"grad_norm": 1.8137774467468262, |
|
"learning_rate": 9.509956150664796e-05, |
|
"loss": 1.315, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.2695035460992908, |
|
"grad_norm": 1.5304665565490723, |
|
"learning_rate": 9.473646649103818e-05, |
|
"loss": 1.1503, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.2765957446808511, |
|
"grad_norm": 1.1797360181808472, |
|
"learning_rate": 9.43611409721806e-05, |
|
"loss": 1.1155, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.28368794326241137, |
|
"grad_norm": 1.039712905883789, |
|
"learning_rate": 9.397368756032445e-05, |
|
"loss": 1.0547, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.2907801418439716, |
|
"grad_norm": 0.9909119606018066, |
|
"learning_rate": 9.357421218136386e-05, |
|
"loss": 0.9739, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.2978723404255319, |
|
"grad_norm": 0.9350584745407104, |
|
"learning_rate": 9.316282404787871e-05, |
|
"loss": 0.9576, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.3049645390070922, |
|
"grad_norm": 0.9247877597808838, |
|
"learning_rate": 9.273963562927695e-05, |
|
"loss": 0.8348, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.3120567375886525, |
|
"grad_norm": 0.8802565932273865, |
|
"learning_rate": 9.230476262104677e-05, |
|
"loss": 0.8251, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.3191489361702128, |
|
"grad_norm": 0.8769276142120361, |
|
"learning_rate": 9.185832391312644e-05, |
|
"loss": 0.8402, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.3262411347517731, |
|
"grad_norm": 0.8504616618156433, |
|
"learning_rate": 9.140044155740101e-05, |
|
"loss": 0.7801, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.3333333333333333, |
|
"grad_norm": 0.8093395829200745, |
|
"learning_rate": 9.093124073433463e-05, |
|
"loss": 0.799, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.3404255319148936, |
|
"grad_norm": 0.8495473265647888, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 0.8004, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.3475177304964539, |
|
"grad_norm": 0.8172727227210999, |
|
"learning_rate": 8.995939984474624e-05, |
|
"loss": 0.747, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.3546099290780142, |
|
"grad_norm": 0.8406370282173157, |
|
"learning_rate": 8.945702546981969e-05, |
|
"loss": 0.7108, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.3546099290780142, |
|
"eval_loss": 0.6801980137825012, |
|
"eval_runtime": 4.7673, |
|
"eval_samples_per_second": 49.923, |
|
"eval_steps_per_second": 12.586, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.3617021276595745, |
|
"grad_norm": 0.764500617980957, |
|
"learning_rate": 8.894386393810563e-05, |
|
"loss": 0.5958, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.36879432624113473, |
|
"grad_norm": 0.8594017624855042, |
|
"learning_rate": 8.842005554284296e-05, |
|
"loss": 0.6416, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.375886524822695, |
|
"grad_norm": 0.8616093397140503, |
|
"learning_rate": 8.788574348801675e-05, |
|
"loss": 0.6171, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.3829787234042553, |
|
"grad_norm": 0.8169087767601013, |
|
"learning_rate": 8.73410738492077e-05, |
|
"loss": 0.6634, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.3900709219858156, |
|
"grad_norm": 0.8082086443901062, |
|
"learning_rate": 8.678619553365659e-05, |
|
"loss": 0.5689, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.3971631205673759, |
|
"grad_norm": 0.7250106930732727, |
|
"learning_rate": 8.622126023955446e-05, |
|
"loss": 0.5487, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.40425531914893614, |
|
"grad_norm": 0.8334801197052002, |
|
"learning_rate": 8.564642241456986e-05, |
|
"loss": 0.5422, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.41134751773049644, |
|
"grad_norm": 0.8466203808784485, |
|
"learning_rate": 8.506183921362443e-05, |
|
"loss": 0.5634, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.41843971631205673, |
|
"grad_norm": 0.6738994121551514, |
|
"learning_rate": 8.44676704559283e-05, |
|
"loss": 0.416, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.425531914893617, |
|
"grad_norm": 0.685225784778595, |
|
"learning_rate": 8.386407858128706e-05, |
|
"loss": 0.4565, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.4326241134751773, |
|
"grad_norm": 0.7276149988174438, |
|
"learning_rate": 8.32512286056924e-05, |
|
"loss": 0.4568, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.4397163120567376, |
|
"grad_norm": 0.7686889171600342, |
|
"learning_rate": 8.262928807620843e-05, |
|
"loss": 0.4216, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.44680851063829785, |
|
"grad_norm": 0.7390680909156799, |
|
"learning_rate": 8.199842702516583e-05, |
|
"loss": 0.3804, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.45390070921985815, |
|
"grad_norm": 0.6119396686553955, |
|
"learning_rate": 8.135881792367686e-05, |
|
"loss": 0.3465, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.46099290780141844, |
|
"grad_norm": 0.7658885717391968, |
|
"learning_rate": 8.07106356344834e-05, |
|
"loss": 0.4185, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.46808510638297873, |
|
"grad_norm": 0.6248570680618286, |
|
"learning_rate": 8.005405736415126e-05, |
|
"loss": 0.3154, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.475177304964539, |
|
"grad_norm": 0.7547570466995239, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 0.3711, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.48226950354609927, |
|
"grad_norm": 0.8419597148895264, |
|
"learning_rate": 7.871643313414718e-05, |
|
"loss": 0.331, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.48936170212765956, |
|
"grad_norm": 1.0954856872558594, |
|
"learning_rate": 7.803575286758364e-05, |
|
"loss": 0.3504, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.49645390070921985, |
|
"grad_norm": 4.436848163604736, |
|
"learning_rate": 7.734740790612136e-05, |
|
"loss": 0.5475, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.5035460992907801, |
|
"grad_norm": 1.220984935760498, |
|
"learning_rate": 7.66515864363997e-05, |
|
"loss": 1.2138, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.5106382978723404, |
|
"grad_norm": 1.1256914138793945, |
|
"learning_rate": 7.594847868906076e-05, |
|
"loss": 1.031, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.5177304964539007, |
|
"grad_norm": 1.0466660261154175, |
|
"learning_rate": 7.52382768867422e-05, |
|
"loss": 1.0315, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.524822695035461, |
|
"grad_norm": 0.9186965227127075, |
|
"learning_rate": 7.452117519152542e-05, |
|
"loss": 0.9771, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.5319148936170213, |
|
"grad_norm": 0.78371661901474, |
|
"learning_rate": 7.379736965185368e-05, |
|
"loss": 0.8715, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.5390070921985816, |
|
"grad_norm": 0.7783875465393066, |
|
"learning_rate": 7.30670581489344e-05, |
|
"loss": 0.9333, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.5460992907801419, |
|
"grad_norm": 0.7499712109565735, |
|
"learning_rate": 7.233044034264034e-05, |
|
"loss": 0.8841, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.5531914893617021, |
|
"grad_norm": 0.7532545328140259, |
|
"learning_rate": 7.158771761692464e-05, |
|
"loss": 0.8294, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.5602836879432624, |
|
"grad_norm": 0.6495918035507202, |
|
"learning_rate": 7.083909302476453e-05, |
|
"loss": 0.7184, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.5673758865248227, |
|
"grad_norm": 0.7577716112136841, |
|
"learning_rate": 7.008477123264848e-05, |
|
"loss": 0.7317, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.574468085106383, |
|
"grad_norm": 0.7226397395133972, |
|
"learning_rate": 6.932495846462261e-05, |
|
"loss": 0.7158, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.5815602836879432, |
|
"grad_norm": 0.7062472701072693, |
|
"learning_rate": 6.855986244591104e-05, |
|
"loss": 0.7555, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.5886524822695035, |
|
"grad_norm": 0.7153007984161377, |
|
"learning_rate": 6.778969234612584e-05, |
|
"loss": 0.6622, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.5957446808510638, |
|
"grad_norm": 0.7236975431442261, |
|
"learning_rate": 6.701465872208216e-05, |
|
"loss": 0.6507, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.6028368794326241, |
|
"grad_norm": 0.6651884913444519, |
|
"learning_rate": 6.623497346023418e-05, |
|
"loss": 0.6209, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.6099290780141844, |
|
"grad_norm": 0.6654047966003418, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 0.5769, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.6170212765957447, |
|
"grad_norm": 0.6003665924072266, |
|
"learning_rate": 6.466250186922325e-05, |
|
"loss": 0.5082, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.624113475177305, |
|
"grad_norm": 0.7228586673736572, |
|
"learning_rate": 6.387014543809223e-05, |
|
"loss": 0.6009, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.6312056737588653, |
|
"grad_norm": 0.6803051233291626, |
|
"learning_rate": 6.307399704769099e-05, |
|
"loss": 0.5428, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.6382978723404256, |
|
"grad_norm": 0.714916467666626, |
|
"learning_rate": 6.227427435703997e-05, |
|
"loss": 0.4958, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.6453900709219859, |
|
"grad_norm": 0.6927108764648438, |
|
"learning_rate": 6.147119600233758e-05, |
|
"loss": 0.5062, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.6524822695035462, |
|
"grad_norm": 0.7170802354812622, |
|
"learning_rate": 6.066498153718735e-05, |
|
"loss": 0.4642, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.6595744680851063, |
|
"grad_norm": 0.6946068406105042, |
|
"learning_rate": 5.985585137257401e-05, |
|
"loss": 0.4154, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.6666666666666666, |
|
"grad_norm": 0.7123287916183472, |
|
"learning_rate": 5.90440267166055e-05, |
|
"loss": 0.4838, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.6737588652482269, |
|
"grad_norm": 0.6604336500167847, |
|
"learning_rate": 5.8229729514036705e-05, |
|
"loss": 0.4114, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.6808510638297872, |
|
"grad_norm": 0.7255561351776123, |
|
"learning_rate": 5.74131823855921e-05, |
|
"loss": 0.3689, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.6879432624113475, |
|
"grad_norm": 0.7515962719917297, |
|
"learning_rate": 5.6594608567103456e-05, |
|
"loss": 0.4193, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.6950354609929078, |
|
"grad_norm": 0.5581212639808655, |
|
"learning_rate": 5.577423184847932e-05, |
|
"loss": 0.3111, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.7021276595744681, |
|
"grad_norm": 0.6331417560577393, |
|
"learning_rate": 5.495227651252315e-05, |
|
"loss": 0.3859, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.7092198581560284, |
|
"grad_norm": 0.6088494062423706, |
|
"learning_rate": 5.4128967273616625e-05, |
|
"loss": 0.3138, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.7092198581560284, |
|
"eval_loss": 0.6135360598564148, |
|
"eval_runtime": 4.7671, |
|
"eval_samples_per_second": 49.926, |
|
"eval_steps_per_second": 12.586, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.7163120567375887, |
|
"grad_norm": 0.72311931848526, |
|
"learning_rate": 5.330452921628497e-05, |
|
"loss": 0.3679, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.723404255319149, |
|
"grad_norm": 0.774229884147644, |
|
"learning_rate": 5.247918773366112e-05, |
|
"loss": 0.3399, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.7304964539007093, |
|
"grad_norm": 0.7326410412788391, |
|
"learning_rate": 5.165316846586541e-05, |
|
"loss": 0.2171, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.7375886524822695, |
|
"grad_norm": 1.004658579826355, |
|
"learning_rate": 5.0826697238317935e-05, |
|
"loss": 0.2633, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.7446808510638298, |
|
"grad_norm": 3.270434617996216, |
|
"learning_rate": 5e-05, |
|
"loss": 0.4722, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.75177304964539, |
|
"grad_norm": 1.1170271635055542, |
|
"learning_rate": 4.917330276168208e-05, |
|
"loss": 1.1038, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.7588652482269503, |
|
"grad_norm": 1.0157475471496582, |
|
"learning_rate": 4.834683153413459e-05, |
|
"loss": 1.0103, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.7659574468085106, |
|
"grad_norm": 0.9804872870445251, |
|
"learning_rate": 4.7520812266338885e-05, |
|
"loss": 1.0137, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.7730496453900709, |
|
"grad_norm": 0.8220306634902954, |
|
"learning_rate": 4.669547078371504e-05, |
|
"loss": 0.8287, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.7801418439716312, |
|
"grad_norm": 0.829501748085022, |
|
"learning_rate": 4.5871032726383386e-05, |
|
"loss": 0.8722, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.7872340425531915, |
|
"grad_norm": 0.7647123336791992, |
|
"learning_rate": 4.504772348747687e-05, |
|
"loss": 0.8407, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.7943262411347518, |
|
"grad_norm": 0.7503801584243774, |
|
"learning_rate": 4.4225768151520694e-05, |
|
"loss": 0.7584, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.8014184397163121, |
|
"grad_norm": 0.6786141991615295, |
|
"learning_rate": 4.3405391432896555e-05, |
|
"loss": 0.7253, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.8085106382978723, |
|
"grad_norm": 0.6269477605819702, |
|
"learning_rate": 4.2586817614407895e-05, |
|
"loss": 0.6798, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.8156028368794326, |
|
"grad_norm": 0.6445633769035339, |
|
"learning_rate": 4.17702704859633e-05, |
|
"loss": 0.7028, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.8226950354609929, |
|
"grad_norm": 0.6332740187644958, |
|
"learning_rate": 4.095597328339452e-05, |
|
"loss": 0.7253, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.8297872340425532, |
|
"grad_norm": 0.6345296502113342, |
|
"learning_rate": 4.0144148627425993e-05, |
|
"loss": 0.6284, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.8368794326241135, |
|
"grad_norm": 0.6838743090629578, |
|
"learning_rate": 3.933501846281267e-05, |
|
"loss": 0.6435, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.8439716312056738, |
|
"grad_norm": 0.6241557598114014, |
|
"learning_rate": 3.852880399766243e-05, |
|
"loss": 0.6254, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.851063829787234, |
|
"grad_norm": 0.6874054074287415, |
|
"learning_rate": 3.772572564296005e-05, |
|
"loss": 0.6226, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.8581560283687943, |
|
"grad_norm": 0.650952160358429, |
|
"learning_rate": 3.6926002952309016e-05, |
|
"loss": 0.5349, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.8652482269503546, |
|
"grad_norm": 0.6546323895454407, |
|
"learning_rate": 3.612985456190778e-05, |
|
"loss": 0.5715, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.8723404255319149, |
|
"grad_norm": 0.6813673377037048, |
|
"learning_rate": 3.533749813077677e-05, |
|
"loss": 0.5908, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.8794326241134752, |
|
"grad_norm": 0.6731253266334534, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 0.5554, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.8865248226950354, |
|
"grad_norm": 0.6880707144737244, |
|
"learning_rate": 3.3765026539765834e-05, |
|
"loss": 0.5198, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.8936170212765957, |
|
"grad_norm": 0.6165528297424316, |
|
"learning_rate": 3.298534127791785e-05, |
|
"loss": 0.5125, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.900709219858156, |
|
"grad_norm": 0.624283492565155, |
|
"learning_rate": 3.221030765387417e-05, |
|
"loss": 0.4478, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.9078014184397163, |
|
"grad_norm": 0.6110616326332092, |
|
"learning_rate": 3.144013755408895e-05, |
|
"loss": 0.4553, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.9148936170212766, |
|
"grad_norm": 0.6599687337875366, |
|
"learning_rate": 3.0675041535377405e-05, |
|
"loss": 0.4373, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.9219858156028369, |
|
"grad_norm": 0.6878976821899414, |
|
"learning_rate": 2.991522876735154e-05, |
|
"loss": 0.4953, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.9290780141843972, |
|
"grad_norm": 0.5550860166549683, |
|
"learning_rate": 2.916090697523549e-05, |
|
"loss": 0.351, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.9361702127659575, |
|
"grad_norm": 0.6531512141227722, |
|
"learning_rate": 2.8412282383075363e-05, |
|
"loss": 0.4004, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.9432624113475178, |
|
"grad_norm": 0.6485903859138489, |
|
"learning_rate": 2.766955965735968e-05, |
|
"loss": 0.3665, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.950354609929078, |
|
"grad_norm": 0.6129137873649597, |
|
"learning_rate": 2.693294185106562e-05, |
|
"loss": 0.3848, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.9574468085106383, |
|
"grad_norm": 0.6872605681419373, |
|
"learning_rate": 2.6202630348146324e-05, |
|
"loss": 0.408, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.9645390070921985, |
|
"grad_norm": 0.7254231572151184, |
|
"learning_rate": 2.547882480847461e-05, |
|
"loss": 0.3731, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.9716312056737588, |
|
"grad_norm": 0.7933410406112671, |
|
"learning_rate": 2.476172311325783e-05, |
|
"loss": 0.3907, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.9787234042553191, |
|
"grad_norm": 0.6715631484985352, |
|
"learning_rate": 2.405152131093926e-05, |
|
"loss": 0.2521, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.9858156028368794, |
|
"grad_norm": 1.3465372323989868, |
|
"learning_rate": 2.3348413563600325e-05, |
|
"loss": 0.3178, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.9929078014184397, |
|
"grad_norm": 3.0612990856170654, |
|
"learning_rate": 2.2652592093878666e-05, |
|
"loss": 0.4534, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.7969256043434143, |
|
"learning_rate": 2.196424713241637e-05, |
|
"loss": 0.6872, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 1.0070921985815602, |
|
"grad_norm": 0.7533031105995178, |
|
"learning_rate": 2.128356686585282e-05, |
|
"loss": 0.9698, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 1.0141843971631206, |
|
"grad_norm": 0.7495368719100952, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 0.8849, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 1.0212765957446808, |
|
"grad_norm": 0.7248710989952087, |
|
"learning_rate": 1.9945942635848748e-05, |
|
"loss": 0.83, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 1.0283687943262412, |
|
"grad_norm": 0.6989593505859375, |
|
"learning_rate": 1.928936436551661e-05, |
|
"loss": 0.8468, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 1.0354609929078014, |
|
"grad_norm": 0.7099794745445251, |
|
"learning_rate": 1.8641182076323148e-05, |
|
"loss": 0.7607, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 1.0425531914893618, |
|
"grad_norm": 0.6711968183517456, |
|
"learning_rate": 1.800157297483417e-05, |
|
"loss": 0.7214, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 1.049645390070922, |
|
"grad_norm": 0.6636238694190979, |
|
"learning_rate": 1.7370711923791567e-05, |
|
"loss": 0.682, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 1.0567375886524824, |
|
"grad_norm": 0.6560536026954651, |
|
"learning_rate": 1.6748771394307585e-05, |
|
"loss": 0.6319, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 1.0638297872340425, |
|
"grad_norm": 0.6504359245300293, |
|
"learning_rate": 1.6135921418712956e-05, |
|
"loss": 0.5816, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.0638297872340425, |
|
"eval_loss": 0.572798490524292, |
|
"eval_runtime": 4.77, |
|
"eval_samples_per_second": 49.896, |
|
"eval_steps_per_second": 12.579, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 1.070921985815603, |
|
"grad_norm": 0.660551905632019, |
|
"learning_rate": 1.553232954407171e-05, |
|
"loss": 0.6234, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 1.0780141843971631, |
|
"grad_norm": 0.617889940738678, |
|
"learning_rate": 1.4938160786375572e-05, |
|
"loss": 0.6388, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 1.0851063829787233, |
|
"grad_norm": 0.6906648278236389, |
|
"learning_rate": 1.435357758543015e-05, |
|
"loss": 0.6463, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 1.0921985815602837, |
|
"grad_norm": 0.6109856963157654, |
|
"learning_rate": 1.3778739760445552e-05, |
|
"loss": 0.5092, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 1.099290780141844, |
|
"grad_norm": 0.6247379779815674, |
|
"learning_rate": 1.3213804466343421e-05, |
|
"loss": 0.5623, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 1.1063829787234043, |
|
"grad_norm": 0.6057058572769165, |
|
"learning_rate": 1.2658926150792322e-05, |
|
"loss": 0.5638, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 1.1134751773049645, |
|
"grad_norm": 0.6146364808082581, |
|
"learning_rate": 1.2114256511983274e-05, |
|
"loss": 0.453, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 1.1205673758865249, |
|
"grad_norm": 0.6693795323371887, |
|
"learning_rate": 1.157994445715706e-05, |
|
"loss": 0.5068, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 1.127659574468085, |
|
"grad_norm": 0.6862658858299255, |
|
"learning_rate": 1.1056136061894384e-05, |
|
"loss": 0.5096, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 1.1347517730496455, |
|
"grad_norm": 0.7163901329040527, |
|
"learning_rate": 1.0542974530180327e-05, |
|
"loss": 0.5284, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 1.1418439716312057, |
|
"grad_norm": 0.6084697842597961, |
|
"learning_rate": 1.0040600155253765e-05, |
|
"loss": 0.5107, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 1.148936170212766, |
|
"grad_norm": 0.6437754034996033, |
|
"learning_rate": 9.549150281252633e-06, |
|
"loss": 0.4771, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 1.1560283687943262, |
|
"grad_norm": 0.6459302306175232, |
|
"learning_rate": 9.068759265665384e-06, |
|
"loss": 0.4638, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 1.1631205673758864, |
|
"grad_norm": 0.5849676728248596, |
|
"learning_rate": 8.599558442598998e-06, |
|
"loss": 0.3398, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 1.1702127659574468, |
|
"grad_norm": 0.5909267663955688, |
|
"learning_rate": 8.141676086873572e-06, |
|
"loss": 0.3214, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 1.177304964539007, |
|
"grad_norm": 0.6215970516204834, |
|
"learning_rate": 7.695237378953223e-06, |
|
"loss": 0.3948, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 1.1843971631205674, |
|
"grad_norm": 0.6223005056381226, |
|
"learning_rate": 7.260364370723044e-06, |
|
"loss": 0.3943, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 1.1914893617021276, |
|
"grad_norm": 0.591517448425293, |
|
"learning_rate": 6.837175952121306e-06, |
|
"loss": 0.3294, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 1.198581560283688, |
|
"grad_norm": 0.5986785888671875, |
|
"learning_rate": 6.425787818636131e-06, |
|
"loss": 0.3227, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 1.2056737588652482, |
|
"grad_norm": 0.6252090334892273, |
|
"learning_rate": 6.026312439675552e-06, |
|
"loss": 0.2887, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 1.2127659574468086, |
|
"grad_norm": 0.6571121215820312, |
|
"learning_rate": 5.6388590278194096e-06, |
|
"loss": 0.3528, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 1.2198581560283688, |
|
"grad_norm": 0.6483495235443115, |
|
"learning_rate": 5.263533508961827e-06, |
|
"loss": 0.2749, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 1.226950354609929, |
|
"grad_norm": 0.6415087580680847, |
|
"learning_rate": 4.900438493352055e-06, |
|
"loss": 0.2799, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 1.2340425531914894, |
|
"grad_norm": 0.8774486184120178, |
|
"learning_rate": 4.549673247541875e-06, |
|
"loss": 0.288, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 1.2411347517730495, |
|
"grad_norm": 0.9746853113174438, |
|
"learning_rate": 4.2113336672471245e-06, |
|
"loss": 0.2354, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 1.24822695035461, |
|
"grad_norm": 2.3162097930908203, |
|
"learning_rate": 3.885512251130763e-06, |
|
"loss": 0.3473, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 1.2553191489361701, |
|
"grad_norm": 0.6907155513763428, |
|
"learning_rate": 3.5722980755146517e-06, |
|
"loss": 0.9727, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 1.2624113475177305, |
|
"grad_norm": 0.6453986167907715, |
|
"learning_rate": 3.271776770026963e-06, |
|
"loss": 0.8441, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 1.2695035460992907, |
|
"grad_norm": 0.6262970566749573, |
|
"learning_rate": 2.9840304941919415e-06, |
|
"loss": 0.8015, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 1.2765957446808511, |
|
"grad_norm": 0.6407637000083923, |
|
"learning_rate": 2.7091379149682685e-06, |
|
"loss": 0.7144, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 1.2836879432624113, |
|
"grad_norm": 0.6383804678916931, |
|
"learning_rate": 2.4471741852423237e-06, |
|
"loss": 0.7124, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 1.2907801418439715, |
|
"grad_norm": 0.6513094902038574, |
|
"learning_rate": 2.1982109232821178e-06, |
|
"loss": 0.7273, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 1.297872340425532, |
|
"grad_norm": 0.6380339860916138, |
|
"learning_rate": 1.962316193157593e-06, |
|
"loss": 0.6975, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 1.3049645390070923, |
|
"grad_norm": 0.6610780954360962, |
|
"learning_rate": 1.7395544861325718e-06, |
|
"loss": 0.6906, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 1.3120567375886525, |
|
"grad_norm": 0.6918430328369141, |
|
"learning_rate": 1.5299867030334814e-06, |
|
"loss": 0.677, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 1.3191489361702127, |
|
"grad_norm": 0.6196809411048889, |
|
"learning_rate": 1.333670137599713e-06, |
|
"loss": 0.6158, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 1.326241134751773, |
|
"grad_norm": 0.6588773131370544, |
|
"learning_rate": 1.1506584608200367e-06, |
|
"loss": 0.6134, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 1.3333333333333333, |
|
"grad_norm": 0.6725887060165405, |
|
"learning_rate": 9.810017062595322e-07, |
|
"loss": 0.6, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 1.3404255319148937, |
|
"grad_norm": 0.6330568790435791, |
|
"learning_rate": 8.247462563808817e-07, |
|
"loss": 0.6241, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 1.3475177304964538, |
|
"grad_norm": 0.6477409601211548, |
|
"learning_rate": 6.819348298638839e-07, |
|
"loss": 0.5487, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 1.3546099290780143, |
|
"grad_norm": 0.5947543382644653, |
|
"learning_rate": 5.526064699265753e-07, |
|
"loss": 0.5163, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 1.3617021276595744, |
|
"grad_norm": 0.5868836641311646, |
|
"learning_rate": 4.367965336512403e-07, |
|
"loss": 0.4797, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 1.3687943262411348, |
|
"grad_norm": 0.6020753979682922, |
|
"learning_rate": 3.3453668231809286e-07, |
|
"loss": 0.489, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 1.375886524822695, |
|
"grad_norm": 0.6148223280906677, |
|
"learning_rate": 2.458548727494292e-07, |
|
"loss": 0.5058, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 1.3829787234042552, |
|
"grad_norm": 0.6265669465065002, |
|
"learning_rate": 1.7077534966650766e-07, |
|
"loss": 0.4756, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 1.3900709219858156, |
|
"grad_norm": 0.5820423364639282, |
|
"learning_rate": 1.0931863906127327e-07, |
|
"loss": 0.4349, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 1.397163120567376, |
|
"grad_norm": 0.6171536445617676, |
|
"learning_rate": 6.150154258476315e-08, |
|
"loss": 0.3995, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 1.4042553191489362, |
|
"grad_norm": 0.5911435484886169, |
|
"learning_rate": 2.7337132953697554e-08, |
|
"loss": 0.3988, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 1.4113475177304964, |
|
"grad_norm": 0.6192638874053955, |
|
"learning_rate": 6.834750376549792e-09, |
|
"loss": 0.4081, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 1.4184397163120568, |
|
"grad_norm": 0.5591219663619995, |
|
"learning_rate": 0.0, |
|
"loss": 0.3524, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 1.4184397163120568, |
|
"eval_loss": 0.5550929307937622, |
|
"eval_runtime": 4.7697, |
|
"eval_samples_per_second": 49.898, |
|
"eval_steps_per_second": 12.579, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 2, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 4.264962487536845e+16, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|