|
{ |
|
"best_metric": 0.24746929109096527, |
|
"best_model_checkpoint": "miner_id_24/checkpoint-150", |
|
"epoch": 0.012797815839430071, |
|
"eval_steps": 50, |
|
"global_step": 150, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 8.531877226286714e-05, |
|
"grad_norm": 1.2464861869812012, |
|
"learning_rate": 1e-05, |
|
"loss": 0.4214, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 8.531877226286714e-05, |
|
"eval_loss": 0.7482507228851318, |
|
"eval_runtime": 1483.5317, |
|
"eval_samples_per_second": 13.306, |
|
"eval_steps_per_second": 3.327, |
|
"step": 1 |
|
}, |
|
{ |
|
"epoch": 0.00017063754452573428, |
|
"grad_norm": 1.5281243324279785, |
|
"learning_rate": 2e-05, |
|
"loss": 0.532, |
|
"step": 2 |
|
}, |
|
{ |
|
"epoch": 0.00025595631678860143, |
|
"grad_norm": 1.6495031118392944, |
|
"learning_rate": 3e-05, |
|
"loss": 0.5322, |
|
"step": 3 |
|
}, |
|
{ |
|
"epoch": 0.00034127508905146855, |
|
"grad_norm": 1.5111128091812134, |
|
"learning_rate": 4e-05, |
|
"loss": 0.4668, |
|
"step": 4 |
|
}, |
|
{ |
|
"epoch": 0.0004265938613143357, |
|
"grad_norm": 1.3864272832870483, |
|
"learning_rate": 5e-05, |
|
"loss": 0.3644, |
|
"step": 5 |
|
}, |
|
{ |
|
"epoch": 0.0005119126335772029, |
|
"grad_norm": 1.4793012142181396, |
|
"learning_rate": 6e-05, |
|
"loss": 0.3305, |
|
"step": 6 |
|
}, |
|
{ |
|
"epoch": 0.0005972314058400699, |
|
"grad_norm": 1.3043208122253418, |
|
"learning_rate": 7e-05, |
|
"loss": 0.325, |
|
"step": 7 |
|
}, |
|
{ |
|
"epoch": 0.0006825501781029371, |
|
"grad_norm": 1.21058988571167, |
|
"learning_rate": 8e-05, |
|
"loss": 0.3634, |
|
"step": 8 |
|
}, |
|
{ |
|
"epoch": 0.0007678689503658043, |
|
"grad_norm": 1.0812054872512817, |
|
"learning_rate": 9e-05, |
|
"loss": 0.3319, |
|
"step": 9 |
|
}, |
|
{ |
|
"epoch": 0.0008531877226286714, |
|
"grad_norm": 1.0187865495681763, |
|
"learning_rate": 0.0001, |
|
"loss": 0.3057, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.0009385064948915385, |
|
"grad_norm": 1.0747543573379517, |
|
"learning_rate": 9.999316524962345e-05, |
|
"loss": 0.2975, |
|
"step": 11 |
|
}, |
|
{ |
|
"epoch": 0.0010238252671544057, |
|
"grad_norm": 0.8817413449287415, |
|
"learning_rate": 9.997266286704631e-05, |
|
"loss": 0.2442, |
|
"step": 12 |
|
}, |
|
{ |
|
"epoch": 0.0011091440394172729, |
|
"grad_norm": 0.9227721095085144, |
|
"learning_rate": 9.993849845741524e-05, |
|
"loss": 0.2863, |
|
"step": 13 |
|
}, |
|
{ |
|
"epoch": 0.0011944628116801399, |
|
"grad_norm": 0.9496636986732483, |
|
"learning_rate": 9.989068136093873e-05, |
|
"loss": 0.2912, |
|
"step": 14 |
|
}, |
|
{ |
|
"epoch": 0.001279781583943007, |
|
"grad_norm": 0.9581251740455627, |
|
"learning_rate": 9.98292246503335e-05, |
|
"loss": 0.3178, |
|
"step": 15 |
|
}, |
|
{ |
|
"epoch": 0.0013651003562058742, |
|
"grad_norm": 0.9072206020355225, |
|
"learning_rate": 9.975414512725057e-05, |
|
"loss": 0.2662, |
|
"step": 16 |
|
}, |
|
{ |
|
"epoch": 0.0014504191284687414, |
|
"grad_norm": 0.9696200489997864, |
|
"learning_rate": 9.966546331768191e-05, |
|
"loss": 0.2656, |
|
"step": 17 |
|
}, |
|
{ |
|
"epoch": 0.0015357379007316086, |
|
"grad_norm": 0.9928320050239563, |
|
"learning_rate": 9.956320346634876e-05, |
|
"loss": 0.3002, |
|
"step": 18 |
|
}, |
|
{ |
|
"epoch": 0.0016210566729944755, |
|
"grad_norm": 1.0626753568649292, |
|
"learning_rate": 9.944739353007344e-05, |
|
"loss": 0.3232, |
|
"step": 19 |
|
}, |
|
{ |
|
"epoch": 0.0017063754452573427, |
|
"grad_norm": 0.9923722147941589, |
|
"learning_rate": 9.931806517013612e-05, |
|
"loss": 0.3036, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.0017916942175202099, |
|
"grad_norm": 1.0356357097625732, |
|
"learning_rate": 9.917525374361912e-05, |
|
"loss": 0.3039, |
|
"step": 21 |
|
}, |
|
{ |
|
"epoch": 0.001877012989783077, |
|
"grad_norm": 0.9788214564323425, |
|
"learning_rate": 9.901899829374047e-05, |
|
"loss": 0.2962, |
|
"step": 22 |
|
}, |
|
{ |
|
"epoch": 0.001962331762045944, |
|
"grad_norm": 0.9420165419578552, |
|
"learning_rate": 9.884934153917997e-05, |
|
"loss": 0.2941, |
|
"step": 23 |
|
}, |
|
{ |
|
"epoch": 0.0020476505343088114, |
|
"grad_norm": 1.023369312286377, |
|
"learning_rate": 9.86663298624003e-05, |
|
"loss": 0.3314, |
|
"step": 24 |
|
}, |
|
{ |
|
"epoch": 0.0021329693065716784, |
|
"grad_norm": 0.9040637016296387, |
|
"learning_rate": 9.847001329696653e-05, |
|
"loss": 0.2702, |
|
"step": 25 |
|
}, |
|
{ |
|
"epoch": 0.0022182880788345458, |
|
"grad_norm": 0.9994131922721863, |
|
"learning_rate": 9.826044551386744e-05, |
|
"loss": 0.3051, |
|
"step": 26 |
|
}, |
|
{ |
|
"epoch": 0.0023036068510974127, |
|
"grad_norm": 0.8970037698745728, |
|
"learning_rate": 9.803768380684242e-05, |
|
"loss": 0.2412, |
|
"step": 27 |
|
}, |
|
{ |
|
"epoch": 0.0023889256233602797, |
|
"grad_norm": 0.9744396805763245, |
|
"learning_rate": 9.780178907671789e-05, |
|
"loss": 0.2832, |
|
"step": 28 |
|
}, |
|
{ |
|
"epoch": 0.002474244395623147, |
|
"grad_norm": 0.997397780418396, |
|
"learning_rate": 9.755282581475769e-05, |
|
"loss": 0.2371, |
|
"step": 29 |
|
}, |
|
{ |
|
"epoch": 0.002559563167886014, |
|
"grad_norm": 1.0509902238845825, |
|
"learning_rate": 9.729086208503174e-05, |
|
"loss": 0.3107, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.0026448819401488815, |
|
"grad_norm": 1.0135645866394043, |
|
"learning_rate": 9.701596950580806e-05, |
|
"loss": 0.306, |
|
"step": 31 |
|
}, |
|
{ |
|
"epoch": 0.0027302007124117484, |
|
"grad_norm": 1.0272971391677856, |
|
"learning_rate": 9.672822322997305e-05, |
|
"loss": 0.2744, |
|
"step": 32 |
|
}, |
|
{ |
|
"epoch": 0.0028155194846746154, |
|
"grad_norm": 0.9856159090995789, |
|
"learning_rate": 9.642770192448536e-05, |
|
"loss": 0.2923, |
|
"step": 33 |
|
}, |
|
{ |
|
"epoch": 0.0029008382569374828, |
|
"grad_norm": 0.9450370073318481, |
|
"learning_rate": 9.611448774886924e-05, |
|
"loss": 0.2643, |
|
"step": 34 |
|
}, |
|
{ |
|
"epoch": 0.0029861570292003497, |
|
"grad_norm": 1.0237394571304321, |
|
"learning_rate": 9.578866633275288e-05, |
|
"loss": 0.314, |
|
"step": 35 |
|
}, |
|
{ |
|
"epoch": 0.003071475801463217, |
|
"grad_norm": 1.0201995372772217, |
|
"learning_rate": 9.545032675245813e-05, |
|
"loss": 0.2792, |
|
"step": 36 |
|
}, |
|
{ |
|
"epoch": 0.003156794573726084, |
|
"grad_norm": 1.0572022199630737, |
|
"learning_rate": 9.509956150664796e-05, |
|
"loss": 0.2912, |
|
"step": 37 |
|
}, |
|
{ |
|
"epoch": 0.003242113345988951, |
|
"grad_norm": 1.1516457796096802, |
|
"learning_rate": 9.473646649103818e-05, |
|
"loss": 0.3039, |
|
"step": 38 |
|
}, |
|
{ |
|
"epoch": 0.0033274321182518185, |
|
"grad_norm": 1.0593175888061523, |
|
"learning_rate": 9.43611409721806e-05, |
|
"loss": 0.2966, |
|
"step": 39 |
|
}, |
|
{ |
|
"epoch": 0.0034127508905146854, |
|
"grad_norm": 1.0439590215682983, |
|
"learning_rate": 9.397368756032445e-05, |
|
"loss": 0.2674, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.003498069662777553, |
|
"grad_norm": 1.1773626804351807, |
|
"learning_rate": 9.357421218136386e-05, |
|
"loss": 0.298, |
|
"step": 41 |
|
}, |
|
{ |
|
"epoch": 0.0035833884350404198, |
|
"grad_norm": 1.09316086769104, |
|
"learning_rate": 9.316282404787871e-05, |
|
"loss": 0.2878, |
|
"step": 42 |
|
}, |
|
{ |
|
"epoch": 0.0036687072073032867, |
|
"grad_norm": 1.0136131048202515, |
|
"learning_rate": 9.273963562927695e-05, |
|
"loss": 0.2619, |
|
"step": 43 |
|
}, |
|
{ |
|
"epoch": 0.003754025979566154, |
|
"grad_norm": 1.061280608177185, |
|
"learning_rate": 9.230476262104677e-05, |
|
"loss": 0.3142, |
|
"step": 44 |
|
}, |
|
{ |
|
"epoch": 0.003839344751829021, |
|
"grad_norm": 1.0769422054290771, |
|
"learning_rate": 9.185832391312644e-05, |
|
"loss": 0.2805, |
|
"step": 45 |
|
}, |
|
{ |
|
"epoch": 0.003924663524091888, |
|
"grad_norm": 1.2159357070922852, |
|
"learning_rate": 9.140044155740101e-05, |
|
"loss": 0.2998, |
|
"step": 46 |
|
}, |
|
{ |
|
"epoch": 0.0040099822963547554, |
|
"grad_norm": 1.0424118041992188, |
|
"learning_rate": 9.093124073433463e-05, |
|
"loss": 0.2641, |
|
"step": 47 |
|
}, |
|
{ |
|
"epoch": 0.004095301068617623, |
|
"grad_norm": 1.5114845037460327, |
|
"learning_rate": 9.045084971874738e-05, |
|
"loss": 0.3071, |
|
"step": 48 |
|
}, |
|
{ |
|
"epoch": 0.004180619840880489, |
|
"grad_norm": 1.5022063255310059, |
|
"learning_rate": 8.995939984474624e-05, |
|
"loss": 0.3376, |
|
"step": 49 |
|
}, |
|
{ |
|
"epoch": 0.004265938613143357, |
|
"grad_norm": 1.8348913192749023, |
|
"learning_rate": 8.945702546981969e-05, |
|
"loss": 0.3647, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.004265938613143357, |
|
"eval_loss": 0.3193605840206146, |
|
"eval_runtime": 1490.9395, |
|
"eval_samples_per_second": 13.24, |
|
"eval_steps_per_second": 3.31, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.004351257385406224, |
|
"grad_norm": 1.4306588172912598, |
|
"learning_rate": 8.894386393810563e-05, |
|
"loss": 0.4202, |
|
"step": 51 |
|
}, |
|
{ |
|
"epoch": 0.0044365761576690916, |
|
"grad_norm": 1.0270633697509766, |
|
"learning_rate": 8.842005554284296e-05, |
|
"loss": 0.3468, |
|
"step": 52 |
|
}, |
|
{ |
|
"epoch": 0.004521894929931958, |
|
"grad_norm": 0.686927080154419, |
|
"learning_rate": 8.788574348801675e-05, |
|
"loss": 0.3232, |
|
"step": 53 |
|
}, |
|
{ |
|
"epoch": 0.0046072137021948255, |
|
"grad_norm": 0.6519087553024292, |
|
"learning_rate": 8.73410738492077e-05, |
|
"loss": 0.2586, |
|
"step": 54 |
|
}, |
|
{ |
|
"epoch": 0.004692532474457693, |
|
"grad_norm": 0.7089278697967529, |
|
"learning_rate": 8.678619553365659e-05, |
|
"loss": 0.2663, |
|
"step": 55 |
|
}, |
|
{ |
|
"epoch": 0.004777851246720559, |
|
"grad_norm": 0.7305288910865784, |
|
"learning_rate": 8.622126023955446e-05, |
|
"loss": 0.2777, |
|
"step": 56 |
|
}, |
|
{ |
|
"epoch": 0.004863170018983427, |
|
"grad_norm": 0.6100066304206848, |
|
"learning_rate": 8.564642241456986e-05, |
|
"loss": 0.2407, |
|
"step": 57 |
|
}, |
|
{ |
|
"epoch": 0.004948488791246294, |
|
"grad_norm": 0.6845047473907471, |
|
"learning_rate": 8.506183921362443e-05, |
|
"loss": 0.2532, |
|
"step": 58 |
|
}, |
|
{ |
|
"epoch": 0.005033807563509161, |
|
"grad_norm": 0.7084606885910034, |
|
"learning_rate": 8.44676704559283e-05, |
|
"loss": 0.2842, |
|
"step": 59 |
|
}, |
|
{ |
|
"epoch": 0.005119126335772028, |
|
"grad_norm": 0.682884693145752, |
|
"learning_rate": 8.386407858128706e-05, |
|
"loss": 0.2672, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.0052044451080348955, |
|
"grad_norm": 0.5755671262741089, |
|
"learning_rate": 8.32512286056924e-05, |
|
"loss": 0.1856, |
|
"step": 61 |
|
}, |
|
{ |
|
"epoch": 0.005289763880297763, |
|
"grad_norm": 0.7196089029312134, |
|
"learning_rate": 8.262928807620843e-05, |
|
"loss": 0.2408, |
|
"step": 62 |
|
}, |
|
{ |
|
"epoch": 0.0053750826525606294, |
|
"grad_norm": 0.7290205359458923, |
|
"learning_rate": 8.199842702516583e-05, |
|
"loss": 0.2473, |
|
"step": 63 |
|
}, |
|
{ |
|
"epoch": 0.005460401424823497, |
|
"grad_norm": 0.7057019472122192, |
|
"learning_rate": 8.135881792367686e-05, |
|
"loss": 0.2628, |
|
"step": 64 |
|
}, |
|
{ |
|
"epoch": 0.005545720197086364, |
|
"grad_norm": 0.698653519153595, |
|
"learning_rate": 8.07106356344834e-05, |
|
"loss": 0.2402, |
|
"step": 65 |
|
}, |
|
{ |
|
"epoch": 0.005631038969349231, |
|
"grad_norm": 0.797401487827301, |
|
"learning_rate": 8.005405736415126e-05, |
|
"loss": 0.2975, |
|
"step": 66 |
|
}, |
|
{ |
|
"epoch": 0.005716357741612098, |
|
"grad_norm": 0.7598553895950317, |
|
"learning_rate": 7.938926261462366e-05, |
|
"loss": 0.2662, |
|
"step": 67 |
|
}, |
|
{ |
|
"epoch": 0.0058016765138749656, |
|
"grad_norm": 0.8131743669509888, |
|
"learning_rate": 7.871643313414718e-05, |
|
"loss": 0.2826, |
|
"step": 68 |
|
}, |
|
{ |
|
"epoch": 0.005886995286137832, |
|
"grad_norm": 0.7249765992164612, |
|
"learning_rate": 7.803575286758364e-05, |
|
"loss": 0.266, |
|
"step": 69 |
|
}, |
|
{ |
|
"epoch": 0.0059723140584006995, |
|
"grad_norm": 0.758956789970398, |
|
"learning_rate": 7.734740790612136e-05, |
|
"loss": 0.275, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.006057632830663567, |
|
"grad_norm": 0.804485023021698, |
|
"learning_rate": 7.66515864363997e-05, |
|
"loss": 0.2477, |
|
"step": 71 |
|
}, |
|
{ |
|
"epoch": 0.006142951602926434, |
|
"grad_norm": 0.8069186210632324, |
|
"learning_rate": 7.594847868906076e-05, |
|
"loss": 0.3032, |
|
"step": 72 |
|
}, |
|
{ |
|
"epoch": 0.006228270375189301, |
|
"grad_norm": 0.8778233528137207, |
|
"learning_rate": 7.52382768867422e-05, |
|
"loss": 0.3149, |
|
"step": 73 |
|
}, |
|
{ |
|
"epoch": 0.006313589147452168, |
|
"grad_norm": 0.7881379127502441, |
|
"learning_rate": 7.452117519152542e-05, |
|
"loss": 0.2377, |
|
"step": 74 |
|
}, |
|
{ |
|
"epoch": 0.006398907919715036, |
|
"grad_norm": 1.2000892162322998, |
|
"learning_rate": 7.379736965185368e-05, |
|
"loss": 0.2647, |
|
"step": 75 |
|
}, |
|
{ |
|
"epoch": 0.006484226691977902, |
|
"grad_norm": 0.7718772888183594, |
|
"learning_rate": 7.30670581489344e-05, |
|
"loss": 0.2428, |
|
"step": 76 |
|
}, |
|
{ |
|
"epoch": 0.0065695454642407695, |
|
"grad_norm": 0.7269161343574524, |
|
"learning_rate": 7.233044034264034e-05, |
|
"loss": 0.2374, |
|
"step": 77 |
|
}, |
|
{ |
|
"epoch": 0.006654864236503637, |
|
"grad_norm": 0.8207325339317322, |
|
"learning_rate": 7.158771761692464e-05, |
|
"loss": 0.2384, |
|
"step": 78 |
|
}, |
|
{ |
|
"epoch": 0.006740183008766503, |
|
"grad_norm": 0.8615207672119141, |
|
"learning_rate": 7.083909302476453e-05, |
|
"loss": 0.2582, |
|
"step": 79 |
|
}, |
|
{ |
|
"epoch": 0.006825501781029371, |
|
"grad_norm": 0.8644890189170837, |
|
"learning_rate": 7.008477123264848e-05, |
|
"loss": 0.2697, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.006910820553292238, |
|
"grad_norm": 0.8401942253112793, |
|
"learning_rate": 6.932495846462261e-05, |
|
"loss": 0.2651, |
|
"step": 81 |
|
}, |
|
{ |
|
"epoch": 0.006996139325555106, |
|
"grad_norm": 0.8869915008544922, |
|
"learning_rate": 6.855986244591104e-05, |
|
"loss": 0.3013, |
|
"step": 82 |
|
}, |
|
{ |
|
"epoch": 0.007081458097817972, |
|
"grad_norm": 0.8247209191322327, |
|
"learning_rate": 6.778969234612584e-05, |
|
"loss": 0.2566, |
|
"step": 83 |
|
}, |
|
{ |
|
"epoch": 0.0071667768700808395, |
|
"grad_norm": 0.8518325686454773, |
|
"learning_rate": 6.701465872208216e-05, |
|
"loss": 0.2569, |
|
"step": 84 |
|
}, |
|
{ |
|
"epoch": 0.007252095642343707, |
|
"grad_norm": 0.8171535730361938, |
|
"learning_rate": 6.623497346023418e-05, |
|
"loss": 0.2545, |
|
"step": 85 |
|
}, |
|
{ |
|
"epoch": 0.0073374144146065735, |
|
"grad_norm": 1.149787425994873, |
|
"learning_rate": 6.545084971874738e-05, |
|
"loss": 0.2415, |
|
"step": 86 |
|
}, |
|
{ |
|
"epoch": 0.007422733186869441, |
|
"grad_norm": 0.8294546604156494, |
|
"learning_rate": 6.466250186922325e-05, |
|
"loss": 0.2572, |
|
"step": 87 |
|
}, |
|
{ |
|
"epoch": 0.007508051959132308, |
|
"grad_norm": 0.8672974109649658, |
|
"learning_rate": 6.387014543809223e-05, |
|
"loss": 0.2332, |
|
"step": 88 |
|
}, |
|
{ |
|
"epoch": 0.007593370731395176, |
|
"grad_norm": 1.1290428638458252, |
|
"learning_rate": 6.307399704769099e-05, |
|
"loss": 0.3217, |
|
"step": 89 |
|
}, |
|
{ |
|
"epoch": 0.007678689503658042, |
|
"grad_norm": 1.0930269956588745, |
|
"learning_rate": 6.227427435703997e-05, |
|
"loss": 0.2989, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.00776400827592091, |
|
"grad_norm": 1.3688007593154907, |
|
"learning_rate": 6.147119600233758e-05, |
|
"loss": 0.2511, |
|
"step": 91 |
|
}, |
|
{ |
|
"epoch": 0.007849327048183776, |
|
"grad_norm": 0.9447515606880188, |
|
"learning_rate": 6.066498153718735e-05, |
|
"loss": 0.2585, |
|
"step": 92 |
|
}, |
|
{ |
|
"epoch": 0.007934645820446644, |
|
"grad_norm": 1.0924570560455322, |
|
"learning_rate": 5.985585137257401e-05, |
|
"loss": 0.2975, |
|
"step": 93 |
|
}, |
|
{ |
|
"epoch": 0.008019964592709511, |
|
"grad_norm": 0.9857505559921265, |
|
"learning_rate": 5.90440267166055e-05, |
|
"loss": 0.2445, |
|
"step": 94 |
|
}, |
|
{ |
|
"epoch": 0.008105283364972378, |
|
"grad_norm": 0.9534607529640198, |
|
"learning_rate": 5.8229729514036705e-05, |
|
"loss": 0.2287, |
|
"step": 95 |
|
}, |
|
{ |
|
"epoch": 0.008190602137235246, |
|
"grad_norm": 1.0266928672790527, |
|
"learning_rate": 5.74131823855921e-05, |
|
"loss": 0.2782, |
|
"step": 96 |
|
}, |
|
{ |
|
"epoch": 0.008275920909498113, |
|
"grad_norm": 0.9990158677101135, |
|
"learning_rate": 5.6594608567103456e-05, |
|
"loss": 0.2623, |
|
"step": 97 |
|
}, |
|
{ |
|
"epoch": 0.008361239681760979, |
|
"grad_norm": 1.167478322982788, |
|
"learning_rate": 5.577423184847932e-05, |
|
"loss": 0.2732, |
|
"step": 98 |
|
}, |
|
{ |
|
"epoch": 0.008446558454023846, |
|
"grad_norm": 1.2563468217849731, |
|
"learning_rate": 5.495227651252315e-05, |
|
"loss": 0.2993, |
|
"step": 99 |
|
}, |
|
{ |
|
"epoch": 0.008531877226286714, |
|
"grad_norm": 1.479835867881775, |
|
"learning_rate": 5.4128967273616625e-05, |
|
"loss": 0.3311, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.008531877226286714, |
|
"eval_loss": 0.289458304643631, |
|
"eval_runtime": 1491.6906, |
|
"eval_samples_per_second": 13.233, |
|
"eval_steps_per_second": 3.308, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.008617195998549581, |
|
"grad_norm": 0.9032772183418274, |
|
"learning_rate": 5.330452921628497e-05, |
|
"loss": 0.3579, |
|
"step": 101 |
|
}, |
|
{ |
|
"epoch": 0.008702514770812448, |
|
"grad_norm": 0.8081871271133423, |
|
"learning_rate": 5.247918773366112e-05, |
|
"loss": 0.3355, |
|
"step": 102 |
|
}, |
|
{ |
|
"epoch": 0.008787833543075316, |
|
"grad_norm": 0.6597061157226562, |
|
"learning_rate": 5.165316846586541e-05, |
|
"loss": 0.2763, |
|
"step": 103 |
|
}, |
|
{ |
|
"epoch": 0.008873152315338183, |
|
"grad_norm": 0.604412853717804, |
|
"learning_rate": 5.0826697238317935e-05, |
|
"loss": 0.2498, |
|
"step": 104 |
|
}, |
|
{ |
|
"epoch": 0.008958471087601049, |
|
"grad_norm": 0.6138935089111328, |
|
"learning_rate": 5e-05, |
|
"loss": 0.2511, |
|
"step": 105 |
|
}, |
|
{ |
|
"epoch": 0.009043789859863916, |
|
"grad_norm": 0.5212016105651855, |
|
"learning_rate": 4.917330276168208e-05, |
|
"loss": 0.2301, |
|
"step": 106 |
|
}, |
|
{ |
|
"epoch": 0.009129108632126784, |
|
"grad_norm": 0.5596919655799866, |
|
"learning_rate": 4.834683153413459e-05, |
|
"loss": 0.2405, |
|
"step": 107 |
|
}, |
|
{ |
|
"epoch": 0.009214427404389651, |
|
"grad_norm": 0.5765125751495361, |
|
"learning_rate": 4.7520812266338885e-05, |
|
"loss": 0.2322, |
|
"step": 108 |
|
}, |
|
{ |
|
"epoch": 0.009299746176652518, |
|
"grad_norm": 0.6592385172843933, |
|
"learning_rate": 4.669547078371504e-05, |
|
"loss": 0.2766, |
|
"step": 109 |
|
}, |
|
{ |
|
"epoch": 0.009385064948915386, |
|
"grad_norm": 0.5780729651451111, |
|
"learning_rate": 4.5871032726383386e-05, |
|
"loss": 0.2318, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.009470383721178251, |
|
"grad_norm": 0.5643982291221619, |
|
"learning_rate": 4.504772348747687e-05, |
|
"loss": 0.211, |
|
"step": 111 |
|
}, |
|
{ |
|
"epoch": 0.009555702493441119, |
|
"grad_norm": 0.620904803276062, |
|
"learning_rate": 4.4225768151520694e-05, |
|
"loss": 0.2637, |
|
"step": 112 |
|
}, |
|
{ |
|
"epoch": 0.009641021265703986, |
|
"grad_norm": 0.6117725372314453, |
|
"learning_rate": 4.3405391432896555e-05, |
|
"loss": 0.2407, |
|
"step": 113 |
|
}, |
|
{ |
|
"epoch": 0.009726340037966854, |
|
"grad_norm": 0.6310005187988281, |
|
"learning_rate": 4.2586817614407895e-05, |
|
"loss": 0.2699, |
|
"step": 114 |
|
}, |
|
{ |
|
"epoch": 0.009811658810229721, |
|
"grad_norm": 0.6059605479240417, |
|
"learning_rate": 4.17702704859633e-05, |
|
"loss": 0.2172, |
|
"step": 115 |
|
}, |
|
{ |
|
"epoch": 0.009896977582492588, |
|
"grad_norm": 0.7402892112731934, |
|
"learning_rate": 4.095597328339452e-05, |
|
"loss": 0.2434, |
|
"step": 116 |
|
}, |
|
{ |
|
"epoch": 0.009982296354755456, |
|
"grad_norm": 0.7294708490371704, |
|
"learning_rate": 4.0144148627425993e-05, |
|
"loss": 0.235, |
|
"step": 117 |
|
}, |
|
{ |
|
"epoch": 0.010067615127018321, |
|
"grad_norm": 0.644306480884552, |
|
"learning_rate": 3.933501846281267e-05, |
|
"loss": 0.2424, |
|
"step": 118 |
|
}, |
|
{ |
|
"epoch": 0.010152933899281189, |
|
"grad_norm": 0.6743088960647583, |
|
"learning_rate": 3.852880399766243e-05, |
|
"loss": 0.2314, |
|
"step": 119 |
|
}, |
|
{ |
|
"epoch": 0.010238252671544056, |
|
"grad_norm": 0.7130791544914246, |
|
"learning_rate": 3.772572564296005e-05, |
|
"loss": 0.2807, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.010323571443806924, |
|
"grad_norm": 0.7781491875648499, |
|
"learning_rate": 3.6926002952309016e-05, |
|
"loss": 0.2441, |
|
"step": 121 |
|
}, |
|
{ |
|
"epoch": 0.010408890216069791, |
|
"grad_norm": 0.7216753363609314, |
|
"learning_rate": 3.612985456190778e-05, |
|
"loss": 0.2729, |
|
"step": 122 |
|
}, |
|
{ |
|
"epoch": 0.010494208988332658, |
|
"grad_norm": 0.7023508548736572, |
|
"learning_rate": 3.533749813077677e-05, |
|
"loss": 0.2646, |
|
"step": 123 |
|
}, |
|
{ |
|
"epoch": 0.010579527760595526, |
|
"grad_norm": 0.72576904296875, |
|
"learning_rate": 3.4549150281252636e-05, |
|
"loss": 0.2596, |
|
"step": 124 |
|
}, |
|
{ |
|
"epoch": 0.010664846532858391, |
|
"grad_norm": 0.7103844285011292, |
|
"learning_rate": 3.3765026539765834e-05, |
|
"loss": 0.233, |
|
"step": 125 |
|
}, |
|
{ |
|
"epoch": 0.010750165305121259, |
|
"grad_norm": 0.6678680777549744, |
|
"learning_rate": 3.298534127791785e-05, |
|
"loss": 0.2312, |
|
"step": 126 |
|
}, |
|
{ |
|
"epoch": 0.010835484077384126, |
|
"grad_norm": 0.7078672051429749, |
|
"learning_rate": 3.221030765387417e-05, |
|
"loss": 0.2339, |
|
"step": 127 |
|
}, |
|
{ |
|
"epoch": 0.010920802849646994, |
|
"grad_norm": 0.8453111052513123, |
|
"learning_rate": 3.144013755408895e-05, |
|
"loss": 0.2782, |
|
"step": 128 |
|
}, |
|
{ |
|
"epoch": 0.011006121621909861, |
|
"grad_norm": 0.8020609021186829, |
|
"learning_rate": 3.0675041535377405e-05, |
|
"loss": 0.2809, |
|
"step": 129 |
|
}, |
|
{ |
|
"epoch": 0.011091440394172728, |
|
"grad_norm": 0.6998770236968994, |
|
"learning_rate": 2.991522876735154e-05, |
|
"loss": 0.233, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.011176759166435594, |
|
"grad_norm": 0.758491039276123, |
|
"learning_rate": 2.916090697523549e-05, |
|
"loss": 0.2558, |
|
"step": 131 |
|
}, |
|
{ |
|
"epoch": 0.011262077938698462, |
|
"grad_norm": 0.8167868852615356, |
|
"learning_rate": 2.8412282383075363e-05, |
|
"loss": 0.268, |
|
"step": 132 |
|
}, |
|
{ |
|
"epoch": 0.011347396710961329, |
|
"grad_norm": 0.7342557311058044, |
|
"learning_rate": 2.766955965735968e-05, |
|
"loss": 0.2178, |
|
"step": 133 |
|
}, |
|
{ |
|
"epoch": 0.011432715483224196, |
|
"grad_norm": 0.7934457659721375, |
|
"learning_rate": 2.693294185106562e-05, |
|
"loss": 0.2338, |
|
"step": 134 |
|
}, |
|
{ |
|
"epoch": 0.011518034255487064, |
|
"grad_norm": 0.8661943078041077, |
|
"learning_rate": 2.6202630348146324e-05, |
|
"loss": 0.2701, |
|
"step": 135 |
|
}, |
|
{ |
|
"epoch": 0.011603353027749931, |
|
"grad_norm": 0.8646010160446167, |
|
"learning_rate": 2.547882480847461e-05, |
|
"loss": 0.2557, |
|
"step": 136 |
|
}, |
|
{ |
|
"epoch": 0.011688671800012798, |
|
"grad_norm": 0.8198699951171875, |
|
"learning_rate": 2.476172311325783e-05, |
|
"loss": 0.2446, |
|
"step": 137 |
|
}, |
|
{ |
|
"epoch": 0.011773990572275664, |
|
"grad_norm": 0.7358266115188599, |
|
"learning_rate": 2.405152131093926e-05, |
|
"loss": 0.2028, |
|
"step": 138 |
|
}, |
|
{ |
|
"epoch": 0.011859309344538532, |
|
"grad_norm": 0.7880418300628662, |
|
"learning_rate": 2.3348413563600325e-05, |
|
"loss": 0.222, |
|
"step": 139 |
|
}, |
|
{ |
|
"epoch": 0.011944628116801399, |
|
"grad_norm": 0.7976980805397034, |
|
"learning_rate": 2.2652592093878666e-05, |
|
"loss": 0.2108, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.012029946889064266, |
|
"grad_norm": 0.8414627313613892, |
|
"learning_rate": 2.196424713241637e-05, |
|
"loss": 0.2495, |
|
"step": 141 |
|
}, |
|
{ |
|
"epoch": 0.012115265661327134, |
|
"grad_norm": 0.7766191363334656, |
|
"learning_rate": 2.128356686585282e-05, |
|
"loss": 0.1911, |
|
"step": 142 |
|
}, |
|
{ |
|
"epoch": 0.012200584433590001, |
|
"grad_norm": 0.8738869428634644, |
|
"learning_rate": 2.061073738537635e-05, |
|
"loss": 0.2331, |
|
"step": 143 |
|
}, |
|
{ |
|
"epoch": 0.012285903205852869, |
|
"grad_norm": 0.9872680306434631, |
|
"learning_rate": 1.9945942635848748e-05, |
|
"loss": 0.2705, |
|
"step": 144 |
|
}, |
|
{ |
|
"epoch": 0.012371221978115734, |
|
"grad_norm": 0.8350234627723694, |
|
"learning_rate": 1.928936436551661e-05, |
|
"loss": 0.2319, |
|
"step": 145 |
|
}, |
|
{ |
|
"epoch": 0.012456540750378602, |
|
"grad_norm": 1.025537133216858, |
|
"learning_rate": 1.8641182076323148e-05, |
|
"loss": 0.2773, |
|
"step": 146 |
|
}, |
|
{ |
|
"epoch": 0.012541859522641469, |
|
"grad_norm": 0.9157933592796326, |
|
"learning_rate": 1.800157297483417e-05, |
|
"loss": 0.2232, |
|
"step": 147 |
|
}, |
|
{ |
|
"epoch": 0.012627178294904336, |
|
"grad_norm": 1.1109967231750488, |
|
"learning_rate": 1.7370711923791567e-05, |
|
"loss": 0.2734, |
|
"step": 148 |
|
}, |
|
{ |
|
"epoch": 0.012712497067167204, |
|
"grad_norm": 1.05686616897583, |
|
"learning_rate": 1.6748771394307585e-05, |
|
"loss": 0.2682, |
|
"step": 149 |
|
}, |
|
{ |
|
"epoch": 0.012797815839430071, |
|
"grad_norm": 1.449411392211914, |
|
"learning_rate": 1.6135921418712956e-05, |
|
"loss": 0.3932, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.012797815839430071, |
|
"eval_loss": 0.24746929109096527, |
|
"eval_runtime": 1491.868, |
|
"eval_samples_per_second": 13.232, |
|
"eval_steps_per_second": 3.308, |
|
"step": 150 |
|
} |
|
], |
|
"logging_steps": 1, |
|
"max_steps": 200, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 1, |
|
"save_steps": 50, |
|
"stateful_callbacks": { |
|
"EarlyStoppingCallback": { |
|
"args": { |
|
"early_stopping_patience": 5, |
|
"early_stopping_threshold": 0.0 |
|
}, |
|
"attributes": { |
|
"early_stopping_patience_counter": 0 |
|
} |
|
}, |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": false |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 2.157230712548229e+17, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|