|
{ |
|
"best_metric": 0.12807011604309082, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-200", |
|
"epoch": 0.27164685908319186, |
|
"eval_steps": 50, |
|
"global_step": 200, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.0013582342954159593, |
|
"grad_norm": 1.8889578580856323, |
|
"learning_rate": 1e-05, |
|
"loss": 2.2076, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0013582342954159593, |
|
"eval_loss": 0.9938458800315857, |
|
"eval_runtime": 66.1888, |
|
"eval_samples_per_second": 18.734, |
|
"eval_steps_per_second": 4.684, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.0027164685908319186, |
|
"grad_norm": 2.3116564750671387, |
|
"learning_rate": 2e-05, |
|
"loss": 3.322, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.0040747028862478775, |
|
"grad_norm": 3.3473684787750244, |
|
"learning_rate": 3e-05, |
|
"loss": 3.6327, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.005432937181663837, |
|
"grad_norm": 3.8035199642181396, |
|
"learning_rate": 4e-05, |
|
"loss": 4.1776, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.006791171477079796, |
|
"grad_norm": 4.566415309906006, |
|
"learning_rate": 5e-05, |
|
"loss": 4.4245, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.008149405772495755, |
|
"grad_norm": 4.2230682373046875, |
|
"learning_rate": 6e-05, |
|
"loss": 4.3489, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.009507640067911714, |
|
"grad_norm": 3.2478137016296387, |
|
"learning_rate": 7e-05, |
|
"loss": 2.3336, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.010865874363327675, |
|
"grad_norm": 3.2353904247283936, |
|
"learning_rate": 8e-05, |
|
"loss": 2.3194, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.012224108658743633, |
|
"grad_norm": 4.434475421905518, |
|
"learning_rate": 9e-05, |
|
"loss": 1.7946, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.013582342954159592, |
|
"grad_norm": 2.9538373947143555, |
|
"learning_rate": 0.0001, |
|
"loss": 1.4935, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.014940577249575551, |
|
"grad_norm": 3.012892961502075, |
|
"learning_rate": 9.999316524962345e-05, |
|
"loss": 1.4069, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.01629881154499151, |
|
"grad_norm": 3.1674129962921143, |
|
"learning_rate": 9.997266286704631e-05, |
|
"loss": 0.967, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.01765704584040747, |
|
"grad_norm": 3.2145333290100098, |
|
"learning_rate": 9.993849845741524e-05, |
|
"loss": 1.0964, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.019015280135823428, |
|
"grad_norm": 4.188545227050781, |
|
"learning_rate": 9.989068136093873e-05, |
|
"loss": 1.8467, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.02037351443123939, |
|
"grad_norm": 3.6614139080047607, |
|
"learning_rate": 9.98292246503335e-05, |
|
"loss": 1.4485, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.02173174872665535, |
|
"grad_norm": 2.5848684310913086, |
|
"learning_rate": 9.975414512725057e-05, |
|
"loss": 1.1819, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.023089983022071308, |
|
"grad_norm": 2.549119234085083, |
|
"learning_rate": 9.966546331768191e-05, |
|
"loss": 1.3204, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.024448217317487267, |
|
"grad_norm": 1.8019766807556152, |
|
"learning_rate": 9.956320346634876e-05, |
|
"loss": 1.04, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.025806451612903226, |
|
"grad_norm": 2.1322357654571533, |
|
"learning_rate": 9.944739353007344e-05, |
|
"loss": 0.9348, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.027164685908319185, |
|
"grad_norm": 1.5636383295059204, |
|
"learning_rate": 9.931806517013612e-05, |
|
"loss": 0.7803, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.028522920203735144, |
|
"grad_norm": 1.5372105836868286, |
|
"learning_rate": 9.917525374361912e-05, |
|
"loss": 0.9237, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.029881154499151102, |
|
"grad_norm": 1.4937751293182373, |
|
"learning_rate": 9.901899829374047e-05, |
|
"loss": 0.7272, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.03123938879456706, |
|
"grad_norm": 1.1647422313690186, |
|
"learning_rate": 9.884934153917997e-05, |
|
"loss": 0.5155, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.03259762308998302, |
|
"grad_norm": 3.998044490814209, |
|
"learning_rate": 9.86663298624003e-05, |
|
"loss": 0.6064, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.03395585738539898, |
|
"grad_norm": 1.3711620569229126, |
|
"learning_rate": 9.847001329696653e-05, |
|
"loss": 0.6581, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.03531409168081494, |
|
"grad_norm": 1.5123319625854492, |
|
"learning_rate": 9.826044551386744e-05, |
|
"loss": 0.6528, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.0366723259762309, |
|
"grad_norm": 1.4044246673583984, |
|
"learning_rate": 9.803768380684242e-05, |
|
"loss": 0.8442, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.038030560271646856, |
|
"grad_norm": 1.2004684209823608, |
|
"learning_rate": 9.780178907671789e-05, |
|
"loss": 0.4215, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.03938879456706282, |
|
"grad_norm": 1.6459702253341675, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 0.7792, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.04074702886247878, |
|
"grad_norm": 1.5842430591583252, |
|
"learning_rate": 9.729086208503174e-05, |
|
"loss": 0.6471, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.042105263157894736, |
|
"grad_norm": 1.0356532335281372, |
|
"learning_rate": 9.701596950580806e-05, |
|
"loss": 0.3588, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.0434634974533107, |
|
"grad_norm": 1.0359015464782715, |
|
"learning_rate": 9.672822322997305e-05, |
|
"loss": 0.5072, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.044821731748726654, |
|
"grad_norm": 1.0919171571731567, |
|
"learning_rate": 9.642770192448536e-05, |
|
"loss": 0.4494, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.046179966044142616, |
|
"grad_norm": 1.2643227577209473, |
|
"learning_rate": 9.611448774886924e-05, |
|
"loss": 0.5515, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.04753820033955857, |
|
"grad_norm": 1.2034884691238403, |
|
"learning_rate": 9.578866633275288e-05, |
|
"loss": 0.4593, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.048896434634974534, |
|
"grad_norm": 5.325469017028809, |
|
"learning_rate": 9.545032675245813e-05, |
|
"loss": 0.3904, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.05025466893039049, |
|
"grad_norm": 1.0181546211242676, |
|
"learning_rate": 9.509956150664796e-05, |
|
"loss": 0.3721, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.05161290322580645, |
|
"grad_norm": 1.0899913311004639, |
|
"learning_rate": 9.473646649103818e-05, |
|
"loss": 0.3608, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.052971137521222414, |
|
"grad_norm": 1.1829785108566284, |
|
"learning_rate": 9.43611409721806e-05, |
|
"loss": 0.4572, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.05432937181663837, |
|
"grad_norm": 0.8924344778060913, |
|
"learning_rate": 9.397368756032445e-05, |
|
"loss": 0.3511, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.05568760611205433, |
|
"grad_norm": 1.0210082530975342, |
|
"learning_rate": 9.357421218136386e-05, |
|
"loss": 0.3184, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.05704584040747029, |
|
"grad_norm": 1.2711970806121826, |
|
"learning_rate": 9.316282404787871e-05, |
|
"loss": 0.4089, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.05840407470288625, |
|
"grad_norm": 1.984538197517395, |
|
"learning_rate": 9.273963562927695e-05, |
|
"loss": 0.4726, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.059762308998302205, |
|
"grad_norm": 1.6680279970169067, |
|
"learning_rate": 9.230476262104677e-05, |
|
"loss": 0.3792, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.06112054329371817, |
|
"grad_norm": 1.7497315406799316, |
|
"learning_rate": 9.185832391312644e-05, |
|
"loss": 0.3635, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.06247877758913412, |
|
"grad_norm": 1.80753755569458, |
|
"learning_rate": 9.140044155740101e-05, |
|
"loss": 0.4891, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.06383701188455009, |
|
"grad_norm": 1.4111545085906982, |
|
"learning_rate": 9.093124073433463e-05, |
|
"loss": 0.4598, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.06519524617996604, |
|
"grad_norm": 1.5910176038742065, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 0.3751, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.06655348047538201, |
|
"grad_norm": 1.1267386674880981, |
|
"learning_rate": 8.995939984474624e-05, |
|
"loss": 0.2321, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.06791171477079797, |
|
"grad_norm": 1.0913166999816895, |
|
"learning_rate": 8.945702546981969e-05, |
|
"loss": 0.2783, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.06791171477079797, |
|
"eval_loss": 0.2754749059677124, |
|
"eval_runtime": 67.1086, |
|
"eval_samples_per_second": 18.478, |
|
"eval_steps_per_second": 4.619, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.06926994906621392, |
|
"grad_norm": 3.659601926803589, |
|
"learning_rate": 8.894386393810563e-05, |
|
"loss": 2.3466, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.07062818336162988, |
|
"grad_norm": 4.295755863189697, |
|
"learning_rate": 8.842005554284296e-05, |
|
"loss": 3.091, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.07198641765704585, |
|
"grad_norm": 3.616570472717285, |
|
"learning_rate": 8.788574348801675e-05, |
|
"loss": 3.2909, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.0733446519524618, |
|
"grad_norm": 3.0673322677612305, |
|
"learning_rate": 8.73410738492077e-05, |
|
"loss": 3.7464, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.07470288624787776, |
|
"grad_norm": 2.9065418243408203, |
|
"learning_rate": 8.678619553365659e-05, |
|
"loss": 2.8627, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.07606112054329371, |
|
"grad_norm": 3.1884777545928955, |
|
"learning_rate": 8.622126023955446e-05, |
|
"loss": 2.777, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.07741935483870968, |
|
"grad_norm": 4.920656681060791, |
|
"learning_rate": 8.564642241456986e-05, |
|
"loss": 2.6136, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.07877758913412564, |
|
"grad_norm": 1.7224739789962769, |
|
"learning_rate": 8.506183921362443e-05, |
|
"loss": 1.0914, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.08013582342954159, |
|
"grad_norm": 1.2613626718521118, |
|
"learning_rate": 8.44676704559283e-05, |
|
"loss": 0.9451, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.08149405772495756, |
|
"grad_norm": 2.074201822280884, |
|
"learning_rate": 8.386407858128706e-05, |
|
"loss": 0.9149, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.08285229202037352, |
|
"grad_norm": 2.2877724170684814, |
|
"learning_rate": 8.32512286056924e-05, |
|
"loss": 1.2911, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.08421052631578947, |
|
"grad_norm": 1.7160199880599976, |
|
"learning_rate": 8.262928807620843e-05, |
|
"loss": 0.6396, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.08556876061120543, |
|
"grad_norm": 1.397225022315979, |
|
"learning_rate": 8.199842702516583e-05, |
|
"loss": 0.6306, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.0869269949066214, |
|
"grad_norm": 1.5045193433761597, |
|
"learning_rate": 8.135881792367686e-05, |
|
"loss": 0.808, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.08828522920203735, |
|
"grad_norm": 2.1164638996124268, |
|
"learning_rate": 8.07106356344834e-05, |
|
"loss": 1.0962, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.08964346349745331, |
|
"grad_norm": 1.5968292951583862, |
|
"learning_rate": 8.005405736415126e-05, |
|
"loss": 0.7918, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.09100169779286928, |
|
"grad_norm": 1.1926372051239014, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 0.568, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.09235993208828523, |
|
"grad_norm": 1.2155282497406006, |
|
"learning_rate": 7.871643313414718e-05, |
|
"loss": 0.7005, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.09371816638370119, |
|
"grad_norm": 1.3235710859298706, |
|
"learning_rate": 7.803575286758364e-05, |
|
"loss": 0.5867, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.09507640067911714, |
|
"grad_norm": 1.535868763923645, |
|
"learning_rate": 7.734740790612136e-05, |
|
"loss": 0.5601, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.09643463497453311, |
|
"grad_norm": 1.332809329032898, |
|
"learning_rate": 7.66515864363997e-05, |
|
"loss": 0.5128, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.09779286926994907, |
|
"grad_norm": 1.1184101104736328, |
|
"learning_rate": 7.594847868906076e-05, |
|
"loss": 0.6584, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.09915110356536502, |
|
"grad_norm": 1.3330540657043457, |
|
"learning_rate": 7.52382768867422e-05, |
|
"loss": 0.4603, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.10050933786078098, |
|
"grad_norm": 1.7870532274246216, |
|
"learning_rate": 7.452117519152542e-05, |
|
"loss": 0.9252, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.10186757215619695, |
|
"grad_norm": 1.4110106229782104, |
|
"learning_rate": 7.379736965185368e-05, |
|
"loss": 0.532, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.1032258064516129, |
|
"grad_norm": 1.2677960395812988, |
|
"learning_rate": 7.30670581489344e-05, |
|
"loss": 0.369, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.10458404074702886, |
|
"grad_norm": 1.5324734449386597, |
|
"learning_rate": 7.233044034264034e-05, |
|
"loss": 0.4731, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.10594227504244483, |
|
"grad_norm": 1.3089126348495483, |
|
"learning_rate": 7.158771761692464e-05, |
|
"loss": 0.367, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.10730050933786078, |
|
"grad_norm": 1.065536618232727, |
|
"learning_rate": 7.083909302476453e-05, |
|
"loss": 0.5767, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.10865874363327674, |
|
"grad_norm": 1.0984853506088257, |
|
"learning_rate": 7.008477123264848e-05, |
|
"loss": 0.5426, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.1100169779286927, |
|
"grad_norm": 1.1644034385681152, |
|
"learning_rate": 6.932495846462261e-05, |
|
"loss": 0.5205, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.11137521222410866, |
|
"grad_norm": 0.6923614144325256, |
|
"learning_rate": 6.855986244591104e-05, |
|
"loss": 0.2668, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.11273344651952462, |
|
"grad_norm": 1.040382981300354, |
|
"learning_rate": 6.778969234612584e-05, |
|
"loss": 0.4309, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.11409168081494057, |
|
"grad_norm": 0.6914508938789368, |
|
"learning_rate": 6.701465872208216e-05, |
|
"loss": 0.1929, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.11544991511035653, |
|
"grad_norm": 0.976138710975647, |
|
"learning_rate": 6.623497346023418e-05, |
|
"loss": 0.3347, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.1168081494057725, |
|
"grad_norm": 0.8875433802604675, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 0.3286, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.11816638370118845, |
|
"grad_norm": 0.7386589646339417, |
|
"learning_rate": 6.466250186922325e-05, |
|
"loss": 0.4692, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.11952461799660441, |
|
"grad_norm": 1.2174679040908813, |
|
"learning_rate": 6.387014543809223e-05, |
|
"loss": 0.4497, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.12088285229202038, |
|
"grad_norm": 0.8012718558311462, |
|
"learning_rate": 6.307399704769099e-05, |
|
"loss": 0.3505, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.12224108658743633, |
|
"grad_norm": 0.8060907125473022, |
|
"learning_rate": 6.227427435703997e-05, |
|
"loss": 0.3401, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.12359932088285229, |
|
"grad_norm": 0.612433135509491, |
|
"learning_rate": 6.147119600233758e-05, |
|
"loss": 0.2253, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.12495755517826825, |
|
"grad_norm": 0.798200786113739, |
|
"learning_rate": 6.066498153718735e-05, |
|
"loss": 0.3208, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.12631578947368421, |
|
"grad_norm": 0.9495358467102051, |
|
"learning_rate": 5.985585137257401e-05, |
|
"loss": 0.5265, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.12767402376910017, |
|
"grad_norm": 1.0848101377487183, |
|
"learning_rate": 5.90440267166055e-05, |
|
"loss": 0.6237, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.12903225806451613, |
|
"grad_norm": 0.6925517916679382, |
|
"learning_rate": 5.8229729514036705e-05, |
|
"loss": 0.3498, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.13039049235993208, |
|
"grad_norm": 0.6562709808349609, |
|
"learning_rate": 5.74131823855921e-05, |
|
"loss": 0.2197, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.13174872665534804, |
|
"grad_norm": 1.1780856847763062, |
|
"learning_rate": 5.6594608567103456e-05, |
|
"loss": 0.449, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.13310696095076402, |
|
"grad_norm": 0.8803690075874329, |
|
"learning_rate": 5.577423184847932e-05, |
|
"loss": 0.2064, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.13446519524617997, |
|
"grad_norm": 1.1821833848953247, |
|
"learning_rate": 5.495227651252315e-05, |
|
"loss": 0.4137, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.13582342954159593, |
|
"grad_norm": 1.2370933294296265, |
|
"learning_rate": 5.4128967273616625e-05, |
|
"loss": 0.312, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.13582342954159593, |
|
"eval_loss": 0.18762627243995667, |
|
"eval_runtime": 67.3144, |
|
"eval_samples_per_second": 18.421, |
|
"eval_steps_per_second": 4.605, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.13718166383701189, |
|
"grad_norm": 2.157839775085449, |
|
"learning_rate": 5.330452921628497e-05, |
|
"loss": 1.8393, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.13853989813242784, |
|
"grad_norm": 3.5639190673828125, |
|
"learning_rate": 5.247918773366112e-05, |
|
"loss": 2.5582, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.1398981324278438, |
|
"grad_norm": 2.826010227203369, |
|
"learning_rate": 5.165316846586541e-05, |
|
"loss": 2.4372, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.14125636672325975, |
|
"grad_norm": 2.775473117828369, |
|
"learning_rate": 5.0826697238317935e-05, |
|
"loss": 2.3188, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.14261460101867574, |
|
"grad_norm": 2.6814448833465576, |
|
"learning_rate": 5e-05, |
|
"loss": 1.9215, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.1439728353140917, |
|
"grad_norm": 1.5364346504211426, |
|
"learning_rate": 4.917330276168208e-05, |
|
"loss": 0.9285, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.14533106960950765, |
|
"grad_norm": 1.3806895017623901, |
|
"learning_rate": 4.834683153413459e-05, |
|
"loss": 0.7553, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.1466893039049236, |
|
"grad_norm": 1.292453408241272, |
|
"learning_rate": 4.7520812266338885e-05, |
|
"loss": 0.4807, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.14804753820033956, |
|
"grad_norm": 1.7320868968963623, |
|
"learning_rate": 4.669547078371504e-05, |
|
"loss": 0.8362, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.1494057724957555, |
|
"grad_norm": 1.613324522972107, |
|
"learning_rate": 4.5871032726383386e-05, |
|
"loss": 0.5316, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.15076400679117147, |
|
"grad_norm": 1.395429015159607, |
|
"learning_rate": 4.504772348747687e-05, |
|
"loss": 0.4296, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.15212224108658742, |
|
"grad_norm": 1.6080973148345947, |
|
"learning_rate": 4.4225768151520694e-05, |
|
"loss": 0.449, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.1534804753820034, |
|
"grad_norm": 2.063976764678955, |
|
"learning_rate": 4.3405391432896555e-05, |
|
"loss": 0.7207, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.15483870967741936, |
|
"grad_norm": 1.6148802042007446, |
|
"learning_rate": 4.2586817614407895e-05, |
|
"loss": 0.8924, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.15619694397283532, |
|
"grad_norm": 1.8477810621261597, |
|
"learning_rate": 4.17702704859633e-05, |
|
"loss": 0.7061, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.15755517826825127, |
|
"grad_norm": 1.478446125984192, |
|
"learning_rate": 4.095597328339452e-05, |
|
"loss": 0.6664, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.15891341256366723, |
|
"grad_norm": 1.2406508922576904, |
|
"learning_rate": 4.0144148627425993e-05, |
|
"loss": 0.5284, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.16027164685908318, |
|
"grad_norm": 1.1124491691589355, |
|
"learning_rate": 3.933501846281267e-05, |
|
"loss": 0.5255, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.16162988115449914, |
|
"grad_norm": 0.9964424967765808, |
|
"learning_rate": 3.852880399766243e-05, |
|
"loss": 0.3398, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.16298811544991512, |
|
"grad_norm": 1.0388567447662354, |
|
"learning_rate": 3.772572564296005e-05, |
|
"loss": 0.4075, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.16434634974533108, |
|
"grad_norm": 1.1458494663238525, |
|
"learning_rate": 3.6926002952309016e-05, |
|
"loss": 0.4073, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.16570458404074703, |
|
"grad_norm": 0.9584537148475647, |
|
"learning_rate": 3.612985456190778e-05, |
|
"loss": 0.3457, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.167062818336163, |
|
"grad_norm": 1.1488529443740845, |
|
"learning_rate": 3.533749813077677e-05, |
|
"loss": 0.2939, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.16842105263157894, |
|
"grad_norm": 0.8016781806945801, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 0.2453, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.1697792869269949, |
|
"grad_norm": 0.8860985040664673, |
|
"learning_rate": 3.3765026539765834e-05, |
|
"loss": 0.2834, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.17113752122241085, |
|
"grad_norm": 1.3960487842559814, |
|
"learning_rate": 3.298534127791785e-05, |
|
"loss": 0.3573, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.17249575551782684, |
|
"grad_norm": 1.4462261199951172, |
|
"learning_rate": 3.221030765387417e-05, |
|
"loss": 0.4583, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.1738539898132428, |
|
"grad_norm": 0.9451635479927063, |
|
"learning_rate": 3.144013755408895e-05, |
|
"loss": 0.2419, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.17521222410865875, |
|
"grad_norm": 0.9558318257331848, |
|
"learning_rate": 3.0675041535377405e-05, |
|
"loss": 0.4274, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.1765704584040747, |
|
"grad_norm": 1.0216695070266724, |
|
"learning_rate": 2.991522876735154e-05, |
|
"loss": 0.413, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.17792869269949066, |
|
"grad_norm": 0.8177182674407959, |
|
"learning_rate": 2.916090697523549e-05, |
|
"loss": 0.2995, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.17928692699490661, |
|
"grad_norm": 0.8251747488975525, |
|
"learning_rate": 2.8412282383075363e-05, |
|
"loss": 0.2619, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.18064516129032257, |
|
"grad_norm": 0.8147902488708496, |
|
"learning_rate": 2.766955965735968e-05, |
|
"loss": 0.328, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.18200339558573855, |
|
"grad_norm": 1.024512767791748, |
|
"learning_rate": 2.693294185106562e-05, |
|
"loss": 0.2928, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.1833616298811545, |
|
"grad_norm": 0.9274868965148926, |
|
"learning_rate": 2.6202630348146324e-05, |
|
"loss": 0.2619, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.18471986417657046, |
|
"grad_norm": 1.010575294494629, |
|
"learning_rate": 2.547882480847461e-05, |
|
"loss": 0.3417, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.18607809847198642, |
|
"grad_norm": 0.9014824628829956, |
|
"learning_rate": 2.476172311325783e-05, |
|
"loss": 0.4541, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.18743633276740237, |
|
"grad_norm": 0.9789355993270874, |
|
"learning_rate": 2.405152131093926e-05, |
|
"loss": 0.3065, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.18879456706281833, |
|
"grad_norm": 0.7785047888755798, |
|
"learning_rate": 2.3348413563600325e-05, |
|
"loss": 0.3614, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.19015280135823429, |
|
"grad_norm": 0.809662938117981, |
|
"learning_rate": 2.2652592093878666e-05, |
|
"loss": 0.2764, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.19151103565365024, |
|
"grad_norm": 1.2895207405090332, |
|
"learning_rate": 2.196424713241637e-05, |
|
"loss": 0.6835, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.19286926994906622, |
|
"grad_norm": 0.8904023766517639, |
|
"learning_rate": 2.128356686585282e-05, |
|
"loss": 0.4472, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.19422750424448218, |
|
"grad_norm": 0.7771594524383545, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 0.293, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.19558573853989814, |
|
"grad_norm": 0.9814310669898987, |
|
"learning_rate": 1.9945942635848748e-05, |
|
"loss": 0.2739, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.1969439728353141, |
|
"grad_norm": 1.117093563079834, |
|
"learning_rate": 1.928936436551661e-05, |
|
"loss": 0.379, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.19830220713073005, |
|
"grad_norm": 0.7254476547241211, |
|
"learning_rate": 1.8641182076323148e-05, |
|
"loss": 0.2855, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.199660441426146, |
|
"grad_norm": 0.716963529586792, |
|
"learning_rate": 1.800157297483417e-05, |
|
"loss": 0.2806, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.20101867572156196, |
|
"grad_norm": 0.7899037003517151, |
|
"learning_rate": 1.7370711923791567e-05, |
|
"loss": 0.2052, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.20237691001697794, |
|
"grad_norm": 0.9633182287216187, |
|
"learning_rate": 1.6748771394307585e-05, |
|
"loss": 0.1877, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.2037351443123939, |
|
"grad_norm": 1.0001466274261475, |
|
"learning_rate": 1.6135921418712956e-05, |
|
"loss": 0.2413, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.2037351443123939, |
|
"eval_loss": 0.14431816339492798, |
|
"eval_runtime": 67.1884, |
|
"eval_samples_per_second": 18.456, |
|
"eval_steps_per_second": 4.614, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.20509337860780985, |
|
"grad_norm": 1.2680031061172485, |
|
"learning_rate": 1.553232954407171e-05, |
|
"loss": 1.549, |
|
"step": 151 |
|
}, |
|
{ |
|
"epoch": 0.2064516129032258, |
|
"grad_norm": 1.2180172204971313, |
|
"learning_rate": 1.4938160786375572e-05, |
|
"loss": 2.0092, |
|
"step": 152 |
|
}, |
|
{ |
|
"epoch": 0.20780984719864176, |
|
"grad_norm": 1.5306624174118042, |
|
"learning_rate": 1.435357758543015e-05, |
|
"loss": 2.173, |
|
"step": 153 |
|
}, |
|
{ |
|
"epoch": 0.20916808149405772, |
|
"grad_norm": 1.5853345394134521, |
|
"learning_rate": 1.3778739760445552e-05, |
|
"loss": 2.1808, |
|
"step": 154 |
|
}, |
|
{ |
|
"epoch": 0.21052631578947367, |
|
"grad_norm": 1.7286503314971924, |
|
"learning_rate": 1.3213804466343421e-05, |
|
"loss": 1.8567, |
|
"step": 155 |
|
}, |
|
{ |
|
"epoch": 0.21188455008488966, |
|
"grad_norm": 1.7660788297653198, |
|
"learning_rate": 1.2658926150792322e-05, |
|
"loss": 1.3088, |
|
"step": 156 |
|
}, |
|
{ |
|
"epoch": 0.2132427843803056, |
|
"grad_norm": 0.83577561378479, |
|
"learning_rate": 1.2114256511983274e-05, |
|
"loss": 0.6152, |
|
"step": 157 |
|
}, |
|
{ |
|
"epoch": 0.21460101867572157, |
|
"grad_norm": 1.0349199771881104, |
|
"learning_rate": 1.157994445715706e-05, |
|
"loss": 0.525, |
|
"step": 158 |
|
}, |
|
{ |
|
"epoch": 0.21595925297113752, |
|
"grad_norm": 1.2438606023788452, |
|
"learning_rate": 1.1056136061894384e-05, |
|
"loss": 0.2478, |
|
"step": 159 |
|
}, |
|
{ |
|
"epoch": 0.21731748726655348, |
|
"grad_norm": 2.1919026374816895, |
|
"learning_rate": 1.0542974530180327e-05, |
|
"loss": 0.9311, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.21867572156196943, |
|
"grad_norm": 2.6150472164154053, |
|
"learning_rate": 1.0040600155253765e-05, |
|
"loss": 0.8594, |
|
"step": 161 |
|
}, |
|
{ |
|
"epoch": 0.2200339558573854, |
|
"grad_norm": 2.1280641555786133, |
|
"learning_rate": 9.549150281252633e-06, |
|
"loss": 0.3983, |
|
"step": 162 |
|
}, |
|
{ |
|
"epoch": 0.22139219015280137, |
|
"grad_norm": 0.9619987607002258, |
|
"learning_rate": 9.068759265665384e-06, |
|
"loss": 0.3423, |
|
"step": 163 |
|
}, |
|
{ |
|
"epoch": 0.22275042444821733, |
|
"grad_norm": 1.3266946077346802, |
|
"learning_rate": 8.599558442598998e-06, |
|
"loss": 0.807, |
|
"step": 164 |
|
}, |
|
{ |
|
"epoch": 0.22410865874363328, |
|
"grad_norm": 0.9932201504707336, |
|
"learning_rate": 8.141676086873572e-06, |
|
"loss": 0.4736, |
|
"step": 165 |
|
}, |
|
{ |
|
"epoch": 0.22546689303904924, |
|
"grad_norm": 1.1188908815383911, |
|
"learning_rate": 7.695237378953223e-06, |
|
"loss": 0.4146, |
|
"step": 166 |
|
}, |
|
{ |
|
"epoch": 0.2268251273344652, |
|
"grad_norm": 1.243120551109314, |
|
"learning_rate": 7.260364370723044e-06, |
|
"loss": 0.645, |
|
"step": 167 |
|
}, |
|
{ |
|
"epoch": 0.22818336162988115, |
|
"grad_norm": 1.0500059127807617, |
|
"learning_rate": 6.837175952121306e-06, |
|
"loss": 0.3818, |
|
"step": 168 |
|
}, |
|
{ |
|
"epoch": 0.2295415959252971, |
|
"grad_norm": 1.0347051620483398, |
|
"learning_rate": 6.425787818636131e-06, |
|
"loss": 0.3161, |
|
"step": 169 |
|
}, |
|
{ |
|
"epoch": 0.23089983022071306, |
|
"grad_norm": 0.9902946949005127, |
|
"learning_rate": 6.026312439675552e-06, |
|
"loss": 0.3734, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.23225806451612904, |
|
"grad_norm": 1.1293691396713257, |
|
"learning_rate": 5.6388590278194096e-06, |
|
"loss": 0.3349, |
|
"step": 171 |
|
}, |
|
{ |
|
"epoch": 0.233616298811545, |
|
"grad_norm": 0.8362142443656921, |
|
"learning_rate": 5.263533508961827e-06, |
|
"loss": 0.2666, |
|
"step": 172 |
|
}, |
|
{ |
|
"epoch": 0.23497453310696095, |
|
"grad_norm": 0.7939189076423645, |
|
"learning_rate": 4.900438493352055e-06, |
|
"loss": 0.1747, |
|
"step": 173 |
|
}, |
|
{ |
|
"epoch": 0.2363327674023769, |
|
"grad_norm": 1.3641140460968018, |
|
"learning_rate": 4.549673247541875e-06, |
|
"loss": 0.6154, |
|
"step": 174 |
|
}, |
|
{ |
|
"epoch": 0.23769100169779286, |
|
"grad_norm": 1.0040560960769653, |
|
"learning_rate": 4.2113336672471245e-06, |
|
"loss": 0.3224, |
|
"step": 175 |
|
}, |
|
{ |
|
"epoch": 0.23904923599320882, |
|
"grad_norm": 0.8438626527786255, |
|
"learning_rate": 3.885512251130763e-06, |
|
"loss": 0.2955, |
|
"step": 176 |
|
}, |
|
{ |
|
"epoch": 0.24040747028862477, |
|
"grad_norm": 0.7685846090316772, |
|
"learning_rate": 3.5722980755146517e-06, |
|
"loss": 0.2529, |
|
"step": 177 |
|
}, |
|
{ |
|
"epoch": 0.24176570458404076, |
|
"grad_norm": 0.860312283039093, |
|
"learning_rate": 3.271776770026963e-06, |
|
"loss": 0.3214, |
|
"step": 178 |
|
}, |
|
{ |
|
"epoch": 0.2431239388794567, |
|
"grad_norm": 1.2312915325164795, |
|
"learning_rate": 2.9840304941919415e-06, |
|
"loss": 0.4232, |
|
"step": 179 |
|
}, |
|
{ |
|
"epoch": 0.24448217317487267, |
|
"grad_norm": 1.0875355005264282, |
|
"learning_rate": 2.7091379149682685e-06, |
|
"loss": 0.3892, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.24584040747028862, |
|
"grad_norm": 0.8430658578872681, |
|
"learning_rate": 2.4471741852423237e-06, |
|
"loss": 0.3758, |
|
"step": 181 |
|
}, |
|
{ |
|
"epoch": 0.24719864176570458, |
|
"grad_norm": 1.005794882774353, |
|
"learning_rate": 2.1982109232821178e-06, |
|
"loss": 0.3334, |
|
"step": 182 |
|
}, |
|
{ |
|
"epoch": 0.24855687606112054, |
|
"grad_norm": 1.1433653831481934, |
|
"learning_rate": 1.962316193157593e-06, |
|
"loss": 0.3489, |
|
"step": 183 |
|
}, |
|
{ |
|
"epoch": 0.2499151103565365, |
|
"grad_norm": 1.028061866760254, |
|
"learning_rate": 1.7395544861325718e-06, |
|
"loss": 0.3125, |
|
"step": 184 |
|
}, |
|
{ |
|
"epoch": 0.25127334465195245, |
|
"grad_norm": 0.9788212180137634, |
|
"learning_rate": 1.5299867030334814e-06, |
|
"loss": 0.3202, |
|
"step": 185 |
|
}, |
|
{ |
|
"epoch": 0.25263157894736843, |
|
"grad_norm": 0.7344953417778015, |
|
"learning_rate": 1.333670137599713e-06, |
|
"loss": 0.1761, |
|
"step": 186 |
|
}, |
|
{ |
|
"epoch": 0.25398981324278436, |
|
"grad_norm": 0.8415098190307617, |
|
"learning_rate": 1.1506584608200367e-06, |
|
"loss": 0.2663, |
|
"step": 187 |
|
}, |
|
{ |
|
"epoch": 0.25534804753820034, |
|
"grad_norm": 0.7864528894424438, |
|
"learning_rate": 9.810017062595322e-07, |
|
"loss": 0.2404, |
|
"step": 188 |
|
}, |
|
{ |
|
"epoch": 0.2567062818336163, |
|
"grad_norm": 0.8776137828826904, |
|
"learning_rate": 8.247462563808817e-07, |
|
"loss": 0.5342, |
|
"step": 189 |
|
}, |
|
{ |
|
"epoch": 0.25806451612903225, |
|
"grad_norm": 1.2769297361373901, |
|
"learning_rate": 6.819348298638839e-07, |
|
"loss": 0.3738, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.25942275042444823, |
|
"grad_norm": 1.3412901163101196, |
|
"learning_rate": 5.526064699265753e-07, |
|
"loss": 0.4223, |
|
"step": 191 |
|
}, |
|
{ |
|
"epoch": 0.26078098471986416, |
|
"grad_norm": 1.203078269958496, |
|
"learning_rate": 4.367965336512403e-07, |
|
"loss": 0.5816, |
|
"step": 192 |
|
}, |
|
{ |
|
"epoch": 0.26213921901528014, |
|
"grad_norm": 0.7777627110481262, |
|
"learning_rate": 3.3453668231809286e-07, |
|
"loss": 0.2591, |
|
"step": 193 |
|
}, |
|
{ |
|
"epoch": 0.2634974533106961, |
|
"grad_norm": 1.0466378927230835, |
|
"learning_rate": 2.458548727494292e-07, |
|
"loss": 0.5292, |
|
"step": 194 |
|
}, |
|
{ |
|
"epoch": 0.26485568760611206, |
|
"grad_norm": 0.8496146202087402, |
|
"learning_rate": 1.7077534966650766e-07, |
|
"loss": 0.266, |
|
"step": 195 |
|
}, |
|
{ |
|
"epoch": 0.26621392190152804, |
|
"grad_norm": 0.8443109393119812, |
|
"learning_rate": 1.0931863906127327e-07, |
|
"loss": 0.2021, |
|
"step": 196 |
|
}, |
|
{ |
|
"epoch": 0.26757215619694397, |
|
"grad_norm": 0.6287698745727539, |
|
"learning_rate": 6.150154258476315e-08, |
|
"loss": 0.1533, |
|
"step": 197 |
|
}, |
|
{ |
|
"epoch": 0.26893039049235995, |
|
"grad_norm": 0.7238965630531311, |
|
"learning_rate": 2.7337132953697554e-08, |
|
"loss": 0.2349, |
|
"step": 198 |
|
}, |
|
{ |
|
"epoch": 0.2702886247877759, |
|
"grad_norm": 0.7158587574958801, |
|
"learning_rate": 6.834750376549792e-09, |
|
"loss": 0.2074, |
|
"step": 199 |
|
}, |
|
{ |
|
"epoch": 0.27164685908319186, |
|
"grad_norm": 0.7276369333267212, |
|
"learning_rate": 0.0, |
|
"loss": 0.149, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.27164685908319186, |
|
"eval_loss": 0.12807011604309082, |
|
"eval_runtime": 67.31, |
|
"eval_samples_per_second": 18.422, |
|
"eval_steps_per_second": 4.606, |
|
"step": 200 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.6826331452512666e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|