|
{ |
|
"best_metric": null, |
|
"best_model_checkpoint": null, |
|
"epoch": 3.0, |
|
"eval_steps": 600, |
|
"global_step": 1770, |
|
"is_hyper_param_search": false, |
|
"is_local_process_zero": true, |
|
"is_world_process_zero": true, |
|
"log_history": [ |
|
{ |
|
"epoch": 0.01694915254237288, |
|
"grad_norm": 0.9151965379714966, |
|
"learning_rate": 2.824858757062147e-06, |
|
"loss": 2.0144, |
|
"step": 10 |
|
}, |
|
{ |
|
"epoch": 0.03389830508474576, |
|
"grad_norm": 0.3579045534133911, |
|
"learning_rate": 5.649717514124294e-06, |
|
"loss": 1.9709, |
|
"step": 20 |
|
}, |
|
{ |
|
"epoch": 0.05084745762711865, |
|
"grad_norm": 0.26751402020454407, |
|
"learning_rate": 8.47457627118644e-06, |
|
"loss": 1.8976, |
|
"step": 30 |
|
}, |
|
{ |
|
"epoch": 0.06779661016949153, |
|
"grad_norm": 0.17199155688285828, |
|
"learning_rate": 1.1299435028248587e-05, |
|
"loss": 1.832, |
|
"step": 40 |
|
}, |
|
{ |
|
"epoch": 0.0847457627118644, |
|
"grad_norm": 0.15522244572639465, |
|
"learning_rate": 1.4124293785310736e-05, |
|
"loss": 1.7838, |
|
"step": 50 |
|
}, |
|
{ |
|
"epoch": 0.1016949152542373, |
|
"grad_norm": 0.1401771456003189, |
|
"learning_rate": 1.694915254237288e-05, |
|
"loss": 1.7742, |
|
"step": 60 |
|
}, |
|
{ |
|
"epoch": 0.11864406779661017, |
|
"grad_norm": 0.13913491368293762, |
|
"learning_rate": 1.977401129943503e-05, |
|
"loss": 1.7601, |
|
"step": 70 |
|
}, |
|
{ |
|
"epoch": 0.13559322033898305, |
|
"grad_norm": 0.13437530398368835, |
|
"learning_rate": 2.2598870056497175e-05, |
|
"loss": 1.7553, |
|
"step": 80 |
|
}, |
|
{ |
|
"epoch": 0.15254237288135594, |
|
"grad_norm": 0.14121650159358978, |
|
"learning_rate": 2.5423728813559322e-05, |
|
"loss": 1.7318, |
|
"step": 90 |
|
}, |
|
{ |
|
"epoch": 0.1694915254237288, |
|
"grad_norm": 0.15103434026241302, |
|
"learning_rate": 2.8248587570621472e-05, |
|
"loss": 1.7234, |
|
"step": 100 |
|
}, |
|
{ |
|
"epoch": 0.1864406779661017, |
|
"grad_norm": 0.1516140103340149, |
|
"learning_rate": 3.107344632768362e-05, |
|
"loss": 1.731, |
|
"step": 110 |
|
}, |
|
{ |
|
"epoch": 0.2033898305084746, |
|
"grad_norm": 0.14474578201770782, |
|
"learning_rate": 3.389830508474576e-05, |
|
"loss": 1.7048, |
|
"step": 120 |
|
}, |
|
{ |
|
"epoch": 0.22033898305084745, |
|
"grad_norm": 0.1441410928964615, |
|
"learning_rate": 3.672316384180791e-05, |
|
"loss": 1.7091, |
|
"step": 130 |
|
}, |
|
{ |
|
"epoch": 0.23728813559322035, |
|
"grad_norm": 0.14374051988124847, |
|
"learning_rate": 3.954802259887006e-05, |
|
"loss": 1.7163, |
|
"step": 140 |
|
}, |
|
{ |
|
"epoch": 0.2542372881355932, |
|
"grad_norm": 0.15048211812973022, |
|
"learning_rate": 4.2372881355932206e-05, |
|
"loss": 1.7085, |
|
"step": 150 |
|
}, |
|
{ |
|
"epoch": 0.2711864406779661, |
|
"grad_norm": 0.13899974524974823, |
|
"learning_rate": 4.519774011299435e-05, |
|
"loss": 1.6966, |
|
"step": 160 |
|
}, |
|
{ |
|
"epoch": 0.288135593220339, |
|
"grad_norm": 0.14798595011234283, |
|
"learning_rate": 4.80225988700565e-05, |
|
"loss": 1.7086, |
|
"step": 170 |
|
}, |
|
{ |
|
"epoch": 0.3050847457627119, |
|
"grad_norm": 0.1401553452014923, |
|
"learning_rate": 4.999956245830044e-05, |
|
"loss": 1.7086, |
|
"step": 180 |
|
}, |
|
{ |
|
"epoch": 0.3220338983050847, |
|
"grad_norm": 0.14466150104999542, |
|
"learning_rate": 4.9991784365248394e-05, |
|
"loss": 1.6885, |
|
"step": 190 |
|
}, |
|
{ |
|
"epoch": 0.3389830508474576, |
|
"grad_norm": 0.1339222490787506, |
|
"learning_rate": 4.997428660526443e-05, |
|
"loss": 1.705, |
|
"step": 200 |
|
}, |
|
{ |
|
"epoch": 0.3559322033898305, |
|
"grad_norm": 0.13571041822433472, |
|
"learning_rate": 4.994707598348085e-05, |
|
"loss": 1.6889, |
|
"step": 210 |
|
}, |
|
{ |
|
"epoch": 0.3728813559322034, |
|
"grad_norm": 0.12768986821174622, |
|
"learning_rate": 4.991016308250258e-05, |
|
"loss": 1.6939, |
|
"step": 220 |
|
}, |
|
{ |
|
"epoch": 0.3898305084745763, |
|
"grad_norm": 0.1278401017189026, |
|
"learning_rate": 4.9863562258291494e-05, |
|
"loss": 1.699, |
|
"step": 230 |
|
}, |
|
{ |
|
"epoch": 0.4067796610169492, |
|
"grad_norm": 0.1307385265827179, |
|
"learning_rate": 4.980729163458312e-05, |
|
"loss": 1.6969, |
|
"step": 240 |
|
}, |
|
{ |
|
"epoch": 0.423728813559322, |
|
"grad_norm": 0.12608782947063446, |
|
"learning_rate": 4.974137309583804e-05, |
|
"loss": 1.6931, |
|
"step": 250 |
|
}, |
|
{ |
|
"epoch": 0.4406779661016949, |
|
"grad_norm": 0.9130762815475464, |
|
"learning_rate": 4.966583227873079e-05, |
|
"loss": 1.7061, |
|
"step": 260 |
|
}, |
|
{ |
|
"epoch": 0.4576271186440678, |
|
"grad_norm": 0.12975232303142548, |
|
"learning_rate": 4.9580698562179297e-05, |
|
"loss": 1.6956, |
|
"step": 270 |
|
}, |
|
{ |
|
"epoch": 0.4745762711864407, |
|
"grad_norm": 0.1291048377752304, |
|
"learning_rate": 4.948600505591905e-05, |
|
"loss": 1.7004, |
|
"step": 280 |
|
}, |
|
{ |
|
"epoch": 0.4915254237288136, |
|
"grad_norm": 0.12629887461662292, |
|
"learning_rate": 4.938178858762622e-05, |
|
"loss": 1.693, |
|
"step": 290 |
|
}, |
|
{ |
|
"epoch": 0.5084745762711864, |
|
"grad_norm": 0.12622225284576416, |
|
"learning_rate": 4.926808968859483e-05, |
|
"loss": 1.6819, |
|
"step": 300 |
|
}, |
|
{ |
|
"epoch": 0.5254237288135594, |
|
"grad_norm": 0.12781433761119843, |
|
"learning_rate": 4.9144952577973596e-05, |
|
"loss": 1.6984, |
|
"step": 310 |
|
}, |
|
{ |
|
"epoch": 0.5423728813559322, |
|
"grad_norm": 0.1239006370306015, |
|
"learning_rate": 4.90124251455684e-05, |
|
"loss": 1.6875, |
|
"step": 320 |
|
}, |
|
{ |
|
"epoch": 0.559322033898305, |
|
"grad_norm": 0.1296393871307373, |
|
"learning_rate": 4.88705589332173e-05, |
|
"loss": 1.6758, |
|
"step": 330 |
|
}, |
|
{ |
|
"epoch": 0.576271186440678, |
|
"grad_norm": 0.12318539619445801, |
|
"learning_rate": 4.871940911474513e-05, |
|
"loss": 1.695, |
|
"step": 340 |
|
}, |
|
{ |
|
"epoch": 0.5932203389830508, |
|
"grad_norm": 0.12802407145500183, |
|
"learning_rate": 4.8559034474505614e-05, |
|
"loss": 1.6862, |
|
"step": 350 |
|
}, |
|
{ |
|
"epoch": 0.6101694915254238, |
|
"grad_norm": 0.12415086477994919, |
|
"learning_rate": 4.838949738451929e-05, |
|
"loss": 1.6652, |
|
"step": 360 |
|
}, |
|
{ |
|
"epoch": 0.6271186440677966, |
|
"grad_norm": 0.12488778680562973, |
|
"learning_rate": 4.821086378021608e-05, |
|
"loss": 1.6898, |
|
"step": 370 |
|
}, |
|
{ |
|
"epoch": 0.6440677966101694, |
|
"grad_norm": 0.12341786175966263, |
|
"learning_rate": 4.802320313479214e-05, |
|
"loss": 1.6766, |
|
"step": 380 |
|
}, |
|
{ |
|
"epoch": 0.6610169491525424, |
|
"grad_norm": 0.12222524732351303, |
|
"learning_rate": 4.7826588432190614e-05, |
|
"loss": 1.6838, |
|
"step": 390 |
|
}, |
|
{ |
|
"epoch": 0.6779661016949152, |
|
"grad_norm": 0.19455482065677643, |
|
"learning_rate": 4.76210961387172e-05, |
|
"loss": 1.6702, |
|
"step": 400 |
|
}, |
|
{ |
|
"epoch": 0.6949152542372882, |
|
"grad_norm": 0.12282032519578934, |
|
"learning_rate": 4.7406806173301285e-05, |
|
"loss": 1.6793, |
|
"step": 410 |
|
}, |
|
{ |
|
"epoch": 0.711864406779661, |
|
"grad_norm": 0.12586097419261932, |
|
"learning_rate": 4.7183801876414294e-05, |
|
"loss": 1.6611, |
|
"step": 420 |
|
}, |
|
{ |
|
"epoch": 0.7288135593220338, |
|
"grad_norm": 0.12690460681915283, |
|
"learning_rate": 4.695216997765737e-05, |
|
"loss": 1.6896, |
|
"step": 430 |
|
}, |
|
{ |
|
"epoch": 0.7457627118644068, |
|
"grad_norm": 0.1262531876564026, |
|
"learning_rate": 4.6712000562031e-05, |
|
"loss": 1.6756, |
|
"step": 440 |
|
}, |
|
{ |
|
"epoch": 0.7627118644067796, |
|
"grad_norm": 0.12429999560117722, |
|
"learning_rate": 4.6463387034899645e-05, |
|
"loss": 1.6658, |
|
"step": 450 |
|
}, |
|
{ |
|
"epoch": 0.7796610169491526, |
|
"grad_norm": 0.12122652679681778, |
|
"learning_rate": 4.6206426085665046e-05, |
|
"loss": 1.6841, |
|
"step": 460 |
|
}, |
|
{ |
|
"epoch": 0.7966101694915254, |
|
"grad_norm": 0.1276836097240448, |
|
"learning_rate": 4.594121765016229e-05, |
|
"loss": 1.6947, |
|
"step": 470 |
|
}, |
|
{ |
|
"epoch": 0.8135593220338984, |
|
"grad_norm": 0.12912869453430176, |
|
"learning_rate": 4.5667864871793345e-05, |
|
"loss": 1.6888, |
|
"step": 480 |
|
}, |
|
{ |
|
"epoch": 0.8305084745762712, |
|
"grad_norm": 0.12462390959262848, |
|
"learning_rate": 4.538647406141308e-05, |
|
"loss": 1.6675, |
|
"step": 490 |
|
}, |
|
{ |
|
"epoch": 0.847457627118644, |
|
"grad_norm": 0.12297125160694122, |
|
"learning_rate": 4.509715465598344e-05, |
|
"loss": 1.6828, |
|
"step": 500 |
|
}, |
|
{ |
|
"epoch": 0.864406779661017, |
|
"grad_norm": 0.12475843727588654, |
|
"learning_rate": 4.480001917601185e-05, |
|
"loss": 1.6713, |
|
"step": 510 |
|
}, |
|
{ |
|
"epoch": 0.8813559322033898, |
|
"grad_norm": 0.1244443878531456, |
|
"learning_rate": 4.449518318179029e-05, |
|
"loss": 1.6544, |
|
"step": 520 |
|
}, |
|
{ |
|
"epoch": 0.8983050847457628, |
|
"grad_norm": 0.12166355550289154, |
|
"learning_rate": 4.418276522845231e-05, |
|
"loss": 1.6679, |
|
"step": 530 |
|
}, |
|
{ |
|
"epoch": 0.9152542372881356, |
|
"grad_norm": 0.1233435645699501, |
|
"learning_rate": 4.386288681986516e-05, |
|
"loss": 1.6704, |
|
"step": 540 |
|
}, |
|
{ |
|
"epoch": 0.9322033898305084, |
|
"grad_norm": 0.12299688160419464, |
|
"learning_rate": 4.353567236137521e-05, |
|
"loss": 1.6759, |
|
"step": 550 |
|
}, |
|
{ |
|
"epoch": 0.9491525423728814, |
|
"grad_norm": 0.12501336634159088, |
|
"learning_rate": 4.3201249111424877e-05, |
|
"loss": 1.6617, |
|
"step": 560 |
|
}, |
|
{ |
|
"epoch": 0.9661016949152542, |
|
"grad_norm": 0.1281379759311676, |
|
"learning_rate": 4.2859747132060006e-05, |
|
"loss": 1.6632, |
|
"step": 570 |
|
}, |
|
{ |
|
"epoch": 0.9830508474576272, |
|
"grad_norm": 0.12356545031070709, |
|
"learning_rate": 4.251129923834685e-05, |
|
"loss": 1.6841, |
|
"step": 580 |
|
}, |
|
{ |
|
"epoch": 1.0, |
|
"grad_norm": 0.12595489621162415, |
|
"learning_rate": 4.215604094671835e-05, |
|
"loss": 1.6448, |
|
"step": 590 |
|
}, |
|
{ |
|
"epoch": 1.0169491525423728, |
|
"grad_norm": 0.1292751580476761, |
|
"learning_rate": 4.1794110422269825e-05, |
|
"loss": 1.6261, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.0169491525423728, |
|
"eval_loss": 1.6485204696655273, |
|
"eval_runtime": 4.4104, |
|
"eval_samples_per_second": 86.387, |
|
"eval_steps_per_second": 10.883, |
|
"step": 600 |
|
}, |
|
{ |
|
"epoch": 1.0338983050847457, |
|
"grad_norm": 0.13464362919330597, |
|
"learning_rate": 4.142564842502461e-05, |
|
"loss": 1.6501, |
|
"step": 610 |
|
}, |
|
{ |
|
"epoch": 1.0508474576271187, |
|
"grad_norm": 0.13430601358413696, |
|
"learning_rate": 4.1050798255190405e-05, |
|
"loss": 1.6435, |
|
"step": 620 |
|
}, |
|
{ |
|
"epoch": 1.0677966101694916, |
|
"grad_norm": 0.1346493363380432, |
|
"learning_rate": 4.0669705697427754e-05, |
|
"loss": 1.6567, |
|
"step": 630 |
|
}, |
|
{ |
|
"epoch": 1.0847457627118644, |
|
"grad_norm": 0.13532420992851257, |
|
"learning_rate": 4.0282518964152296e-05, |
|
"loss": 1.6527, |
|
"step": 640 |
|
}, |
|
{ |
|
"epoch": 1.1016949152542372, |
|
"grad_norm": 0.1375868022441864, |
|
"learning_rate": 3.988938863789278e-05, |
|
"loss": 1.6408, |
|
"step": 650 |
|
}, |
|
{ |
|
"epoch": 1.11864406779661, |
|
"grad_norm": 0.13993752002716064, |
|
"learning_rate": 3.949046761272736e-05, |
|
"loss": 1.6441, |
|
"step": 660 |
|
}, |
|
{ |
|
"epoch": 1.1355932203389831, |
|
"grad_norm": 0.1394631415605545, |
|
"learning_rate": 3.908591103482088e-05, |
|
"loss": 1.6454, |
|
"step": 670 |
|
}, |
|
{ |
|
"epoch": 1.152542372881356, |
|
"grad_norm": 0.1412103921175003, |
|
"learning_rate": 3.867587624208627e-05, |
|
"loss": 1.6689, |
|
"step": 680 |
|
}, |
|
{ |
|
"epoch": 1.1694915254237288, |
|
"grad_norm": 0.1373807042837143, |
|
"learning_rate": 3.826052270299356e-05, |
|
"loss": 1.6475, |
|
"step": 690 |
|
}, |
|
{ |
|
"epoch": 1.1864406779661016, |
|
"grad_norm": 0.14468206465244293, |
|
"learning_rate": 3.784001195455027e-05, |
|
"loss": 1.6469, |
|
"step": 700 |
|
}, |
|
{ |
|
"epoch": 1.2033898305084745, |
|
"grad_norm": 0.14381623268127441, |
|
"learning_rate": 3.7414507539477315e-05, |
|
"loss": 1.6335, |
|
"step": 710 |
|
}, |
|
{ |
|
"epoch": 1.2203389830508475, |
|
"grad_norm": 0.1429220587015152, |
|
"learning_rate": 3.698417494260494e-05, |
|
"loss": 1.6289, |
|
"step": 720 |
|
}, |
|
{ |
|
"epoch": 1.2372881355932204, |
|
"grad_norm": 0.14810022711753845, |
|
"learning_rate": 3.654918152651319e-05, |
|
"loss": 1.6428, |
|
"step": 730 |
|
}, |
|
{ |
|
"epoch": 1.2542372881355932, |
|
"grad_norm": 0.14096328616142273, |
|
"learning_rate": 3.610969646644222e-05, |
|
"loss": 1.6592, |
|
"step": 740 |
|
}, |
|
{ |
|
"epoch": 1.271186440677966, |
|
"grad_norm": 0.14148060977458954, |
|
"learning_rate": 3.566589068449761e-05, |
|
"loss": 1.6251, |
|
"step": 750 |
|
}, |
|
{ |
|
"epoch": 1.288135593220339, |
|
"grad_norm": 0.14239969849586487, |
|
"learning_rate": 3.5217936783176216e-05, |
|
"loss": 1.6708, |
|
"step": 760 |
|
}, |
|
{ |
|
"epoch": 1.305084745762712, |
|
"grad_norm": 0.14554665982723236, |
|
"learning_rate": 3.476600897823864e-05, |
|
"loss": 1.6334, |
|
"step": 770 |
|
}, |
|
{ |
|
"epoch": 1.3220338983050848, |
|
"grad_norm": 0.14560729265213013, |
|
"learning_rate": 3.431028303095415e-05, |
|
"loss": 1.6475, |
|
"step": 780 |
|
}, |
|
{ |
|
"epoch": 1.3389830508474576, |
|
"grad_norm": 0.14253520965576172, |
|
"learning_rate": 3.385093617974458e-05, |
|
"loss": 1.6418, |
|
"step": 790 |
|
}, |
|
{ |
|
"epoch": 1.3559322033898304, |
|
"grad_norm": 0.1438617706298828, |
|
"learning_rate": 3.338814707125377e-05, |
|
"loss": 1.6296, |
|
"step": 800 |
|
}, |
|
{ |
|
"epoch": 1.3728813559322033, |
|
"grad_norm": 0.14633050560951233, |
|
"learning_rate": 3.2922095690869224e-05, |
|
"loss": 1.636, |
|
"step": 810 |
|
}, |
|
{ |
|
"epoch": 1.3898305084745763, |
|
"grad_norm": 0.1484278440475464, |
|
"learning_rate": 3.2452963292723305e-05, |
|
"loss": 1.6156, |
|
"step": 820 |
|
}, |
|
{ |
|
"epoch": 1.4067796610169492, |
|
"grad_norm": 0.14433981478214264, |
|
"learning_rate": 3.1980932329200756e-05, |
|
"loss": 1.6474, |
|
"step": 830 |
|
}, |
|
{ |
|
"epoch": 1.423728813559322, |
|
"grad_norm": 0.1485067456960678, |
|
"learning_rate": 3.150618637998041e-05, |
|
"loss": 1.6356, |
|
"step": 840 |
|
}, |
|
{ |
|
"epoch": 1.4406779661016949, |
|
"grad_norm": 0.14338724315166473, |
|
"learning_rate": 3.1028910080638354e-05, |
|
"loss": 1.6396, |
|
"step": 850 |
|
}, |
|
{ |
|
"epoch": 1.457627118644068, |
|
"grad_norm": 0.14848238229751587, |
|
"learning_rate": 3.054928905084049e-05, |
|
"loss": 1.6402, |
|
"step": 860 |
|
}, |
|
{ |
|
"epoch": 1.4745762711864407, |
|
"grad_norm": 0.14883729815483093, |
|
"learning_rate": 3.006750982215234e-05, |
|
"loss": 1.6282, |
|
"step": 870 |
|
}, |
|
{ |
|
"epoch": 1.4915254237288136, |
|
"grad_norm": 0.15096916258335114, |
|
"learning_rate": 2.9583759765494267e-05, |
|
"loss": 1.6402, |
|
"step": 880 |
|
}, |
|
{ |
|
"epoch": 1.5084745762711864, |
|
"grad_norm": 0.1506650447845459, |
|
"learning_rate": 2.9098227018270134e-05, |
|
"loss": 1.6392, |
|
"step": 890 |
|
}, |
|
{ |
|
"epoch": 1.5254237288135593, |
|
"grad_norm": 0.14937371015548706, |
|
"learning_rate": 2.8611100411198037e-05, |
|
"loss": 1.6472, |
|
"step": 900 |
|
}, |
|
{ |
|
"epoch": 1.542372881355932, |
|
"grad_norm": 0.14744701981544495, |
|
"learning_rate": 2.8122569394871225e-05, |
|
"loss": 1.6215, |
|
"step": 910 |
|
}, |
|
{ |
|
"epoch": 1.559322033898305, |
|
"grad_norm": 0.14676077663898468, |
|
"learning_rate": 2.763282396607803e-05, |
|
"loss": 1.6313, |
|
"step": 920 |
|
}, |
|
{ |
|
"epoch": 1.576271186440678, |
|
"grad_norm": 0.14905640482902527, |
|
"learning_rate": 2.7142054593909422e-05, |
|
"loss": 1.6452, |
|
"step": 930 |
|
}, |
|
{ |
|
"epoch": 1.5932203389830508, |
|
"grad_norm": 0.14924922585487366, |
|
"learning_rate": 2.6650452145682762e-05, |
|
"loss": 1.6386, |
|
"step": 940 |
|
}, |
|
{ |
|
"epoch": 1.6101694915254239, |
|
"grad_norm": 0.15311497449874878, |
|
"learning_rate": 2.615820781271075e-05, |
|
"loss": 1.6301, |
|
"step": 950 |
|
}, |
|
{ |
|
"epoch": 1.6271186440677967, |
|
"grad_norm": 0.15127715468406677, |
|
"learning_rate": 2.566551303594437e-05, |
|
"loss": 1.6247, |
|
"step": 960 |
|
}, |
|
{ |
|
"epoch": 1.6440677966101696, |
|
"grad_norm": 0.15096339583396912, |
|
"learning_rate": 2.5172559431518727e-05, |
|
"loss": 1.6387, |
|
"step": 970 |
|
}, |
|
{ |
|
"epoch": 1.6610169491525424, |
|
"grad_norm": 0.15415184199810028, |
|
"learning_rate": 2.467953871623073e-05, |
|
"loss": 1.6289, |
|
"step": 980 |
|
}, |
|
{ |
|
"epoch": 1.6779661016949152, |
|
"grad_norm": 0.14868630468845367, |
|
"learning_rate": 2.4186642632977697e-05, |
|
"loss": 1.6339, |
|
"step": 990 |
|
}, |
|
{ |
|
"epoch": 1.694915254237288, |
|
"grad_norm": 0.1516588181257248, |
|
"learning_rate": 2.3694062876185738e-05, |
|
"loss": 1.6172, |
|
"step": 1000 |
|
}, |
|
{ |
|
"epoch": 1.711864406779661, |
|
"grad_norm": 0.15194956958293915, |
|
"learning_rate": 2.3201991017257007e-05, |
|
"loss": 1.6169, |
|
"step": 1010 |
|
}, |
|
{ |
|
"epoch": 1.7288135593220337, |
|
"grad_norm": 0.15378808975219727, |
|
"learning_rate": 2.2710618430064843e-05, |
|
"loss": 1.6156, |
|
"step": 1020 |
|
}, |
|
{ |
|
"epoch": 1.7457627118644068, |
|
"grad_norm": 0.15048706531524658, |
|
"learning_rate": 2.222013621652565e-05, |
|
"loss": 1.6348, |
|
"step": 1030 |
|
}, |
|
{ |
|
"epoch": 1.7627118644067796, |
|
"grad_norm": 0.1514016091823578, |
|
"learning_rate": 2.173073513227667e-05, |
|
"loss": 1.6162, |
|
"step": 1040 |
|
}, |
|
{ |
|
"epoch": 1.7796610169491527, |
|
"grad_norm": 0.15484623610973358, |
|
"learning_rate": 2.1242605512488248e-05, |
|
"loss": 1.6162, |
|
"step": 1050 |
|
}, |
|
{ |
|
"epoch": 1.7966101694915255, |
|
"grad_norm": 0.1544799506664276, |
|
"learning_rate": 2.0755937197839802e-05, |
|
"loss": 1.6146, |
|
"step": 1060 |
|
}, |
|
{ |
|
"epoch": 1.8135593220338984, |
|
"grad_norm": 0.15184859931468964, |
|
"learning_rate": 2.0270919460688055e-05, |
|
"loss": 1.6178, |
|
"step": 1070 |
|
}, |
|
{ |
|
"epoch": 1.8305084745762712, |
|
"grad_norm": 0.154934361577034, |
|
"learning_rate": 1.9787740931456165e-05, |
|
"loss": 1.6287, |
|
"step": 1080 |
|
}, |
|
{ |
|
"epoch": 1.847457627118644, |
|
"grad_norm": 0.15421293675899506, |
|
"learning_rate": 1.9306589525272756e-05, |
|
"loss": 1.6348, |
|
"step": 1090 |
|
}, |
|
{ |
|
"epoch": 1.8644067796610169, |
|
"grad_norm": 0.15633705258369446, |
|
"learning_rate": 1.8827652368888888e-05, |
|
"loss": 1.6295, |
|
"step": 1100 |
|
}, |
|
{ |
|
"epoch": 1.8813559322033897, |
|
"grad_norm": 0.15599501132965088, |
|
"learning_rate": 1.835111572790183e-05, |
|
"loss": 1.629, |
|
"step": 1110 |
|
}, |
|
{ |
|
"epoch": 1.8983050847457628, |
|
"grad_norm": 0.1540609449148178, |
|
"learning_rate": 1.7877164934313628e-05, |
|
"loss": 1.6127, |
|
"step": 1120 |
|
}, |
|
{ |
|
"epoch": 1.9152542372881356, |
|
"grad_norm": 0.1557774394750595, |
|
"learning_rate": 1.740598431445282e-05, |
|
"loss": 1.6345, |
|
"step": 1130 |
|
}, |
|
{ |
|
"epoch": 1.9322033898305084, |
|
"grad_norm": 0.15810750424861908, |
|
"learning_rate": 1.6937757117287278e-05, |
|
"loss": 1.6291, |
|
"step": 1140 |
|
}, |
|
{ |
|
"epoch": 1.9491525423728815, |
|
"grad_norm": 0.15605640411376953, |
|
"learning_rate": 1.6472665443155983e-05, |
|
"loss": 1.6311, |
|
"step": 1150 |
|
}, |
|
{ |
|
"epoch": 1.9661016949152543, |
|
"grad_norm": 0.15914802253246307, |
|
"learning_rate": 1.6010890172947606e-05, |
|
"loss": 1.6082, |
|
"step": 1160 |
|
}, |
|
{ |
|
"epoch": 1.9830508474576272, |
|
"grad_norm": 0.15677359700202942, |
|
"learning_rate": 1.5552610897753292e-05, |
|
"loss": 1.6266, |
|
"step": 1170 |
|
}, |
|
{ |
|
"epoch": 2.0, |
|
"grad_norm": 0.15751083195209503, |
|
"learning_rate": 1.509800584902108e-05, |
|
"loss": 1.6265, |
|
"step": 1180 |
|
}, |
|
{ |
|
"epoch": 2.016949152542373, |
|
"grad_norm": 0.17009219527244568, |
|
"learning_rate": 1.4647251829239139e-05, |
|
"loss": 1.5776, |
|
"step": 1190 |
|
}, |
|
{ |
|
"epoch": 2.0338983050847457, |
|
"grad_norm": 0.16682426631450653, |
|
"learning_rate": 1.4200524143174677e-05, |
|
"loss": 1.5922, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 2.0338983050847457, |
|
"eval_loss": 1.6305924654006958, |
|
"eval_runtime": 4.3076, |
|
"eval_samples_per_second": 88.449, |
|
"eval_steps_per_second": 11.143, |
|
"step": 1200 |
|
}, |
|
{ |
|
"epoch": 2.0508474576271185, |
|
"grad_norm": 0.16748355329036713, |
|
"learning_rate": 1.3757996529695411e-05, |
|
"loss": 1.5948, |
|
"step": 1210 |
|
}, |
|
{ |
|
"epoch": 2.0677966101694913, |
|
"grad_norm": 0.17169494926929474, |
|
"learning_rate": 1.331984109420001e-05, |
|
"loss": 1.5884, |
|
"step": 1220 |
|
}, |
|
{ |
|
"epoch": 2.084745762711864, |
|
"grad_norm": 0.17144790291786194, |
|
"learning_rate": 1.2886228241683749e-05, |
|
"loss": 1.6067, |
|
"step": 1230 |
|
}, |
|
{ |
|
"epoch": 2.1016949152542375, |
|
"grad_norm": 0.16989319026470184, |
|
"learning_rate": 1.2457326610465642e-05, |
|
"loss": 1.6103, |
|
"step": 1240 |
|
}, |
|
{ |
|
"epoch": 2.1186440677966103, |
|
"grad_norm": 0.1745283454656601, |
|
"learning_rate": 1.2033303006602444e-05, |
|
"loss": 1.5979, |
|
"step": 1250 |
|
}, |
|
{ |
|
"epoch": 2.135593220338983, |
|
"grad_norm": 0.17248323559761047, |
|
"learning_rate": 1.1614322339015484e-05, |
|
"loss": 1.5786, |
|
"step": 1260 |
|
}, |
|
{ |
|
"epoch": 2.152542372881356, |
|
"grad_norm": 0.16860243678092957, |
|
"learning_rate": 1.1200547555355067e-05, |
|
"loss": 1.5903, |
|
"step": 1270 |
|
}, |
|
{ |
|
"epoch": 2.169491525423729, |
|
"grad_norm": 0.17552419006824493, |
|
"learning_rate": 1.0792139578627865e-05, |
|
"loss": 1.5913, |
|
"step": 1280 |
|
}, |
|
{ |
|
"epoch": 2.1864406779661016, |
|
"grad_norm": 0.17908188700675964, |
|
"learning_rate": 1.0389257244611602e-05, |
|
"loss": 1.592, |
|
"step": 1290 |
|
}, |
|
{ |
|
"epoch": 2.2033898305084745, |
|
"grad_norm": 0.17985224723815918, |
|
"learning_rate": 9.992057240081487e-06, |
|
"loss": 1.5968, |
|
"step": 1300 |
|
}, |
|
{ |
|
"epoch": 2.2203389830508473, |
|
"grad_norm": 0.174930602312088, |
|
"learning_rate": 9.600694041872518e-06, |
|
"loss": 1.5893, |
|
"step": 1310 |
|
}, |
|
{ |
|
"epoch": 2.23728813559322, |
|
"grad_norm": 0.17611059546470642, |
|
"learning_rate": 9.215319856801158e-06, |
|
"loss": 1.5861, |
|
"step": 1320 |
|
}, |
|
{ |
|
"epoch": 2.2542372881355934, |
|
"grad_norm": 0.1744425892829895, |
|
"learning_rate": 8.836084562469921e-06, |
|
"loss": 1.606, |
|
"step": 1330 |
|
}, |
|
{ |
|
"epoch": 2.2711864406779663, |
|
"grad_norm": 0.17969445884227753, |
|
"learning_rate": 8.463135648977773e-06, |
|
"loss": 1.5911, |
|
"step": 1340 |
|
}, |
|
{ |
|
"epoch": 2.288135593220339, |
|
"grad_norm": 0.1790507286787033, |
|
"learning_rate": 8.096618161559142e-06, |
|
"loss": 1.5832, |
|
"step": 1350 |
|
}, |
|
{ |
|
"epoch": 2.305084745762712, |
|
"grad_norm": 0.17973437905311584, |
|
"learning_rate": 7.736674644173695e-06, |
|
"loss": 1.5868, |
|
"step": 1360 |
|
}, |
|
{ |
|
"epoch": 2.3220338983050848, |
|
"grad_norm": 0.17950280010700226, |
|
"learning_rate": 7.383445084068907e-06, |
|
"loss": 1.5788, |
|
"step": 1370 |
|
}, |
|
{ |
|
"epoch": 2.3389830508474576, |
|
"grad_norm": 0.17697365581989288, |
|
"learning_rate": 7.037066857337058e-06, |
|
"loss": 1.5769, |
|
"step": 1380 |
|
}, |
|
{ |
|
"epoch": 2.3559322033898304, |
|
"grad_norm": 0.17717047035694122, |
|
"learning_rate": 6.69767467548762e-06, |
|
"loss": 1.5676, |
|
"step": 1390 |
|
}, |
|
{ |
|
"epoch": 2.3728813559322033, |
|
"grad_norm": 0.17924557626247406, |
|
"learning_rate": 6.365400533056012e-06, |
|
"loss": 1.5896, |
|
"step": 1400 |
|
}, |
|
{ |
|
"epoch": 2.389830508474576, |
|
"grad_norm": 0.17875994741916656, |
|
"learning_rate": 6.040373656269041e-06, |
|
"loss": 1.5698, |
|
"step": 1410 |
|
}, |
|
{ |
|
"epoch": 2.406779661016949, |
|
"grad_norm": 0.18066538870334625, |
|
"learning_rate": 5.722720452786928e-06, |
|
"loss": 1.5881, |
|
"step": 1420 |
|
}, |
|
{ |
|
"epoch": 2.423728813559322, |
|
"grad_norm": 0.18073728680610657, |
|
"learning_rate": 5.412564462541544e-06, |
|
"loss": 1.5864, |
|
"step": 1430 |
|
}, |
|
{ |
|
"epoch": 2.440677966101695, |
|
"grad_norm": 0.18556879460811615, |
|
"learning_rate": 5.110026309689922e-06, |
|
"loss": 1.592, |
|
"step": 1440 |
|
}, |
|
{ |
|
"epoch": 2.457627118644068, |
|
"grad_norm": 0.17982180416584015, |
|
"learning_rate": 4.815223655701812e-06, |
|
"loss": 1.5912, |
|
"step": 1450 |
|
}, |
|
{ |
|
"epoch": 2.4745762711864407, |
|
"grad_norm": 0.1827942430973053, |
|
"learning_rate": 4.5282711535994115e-06, |
|
"loss": 1.5732, |
|
"step": 1460 |
|
}, |
|
{ |
|
"epoch": 2.4915254237288136, |
|
"grad_norm": 0.18152937293052673, |
|
"learning_rate": 4.2492804033671145e-06, |
|
"loss": 1.5994, |
|
"step": 1470 |
|
}, |
|
{ |
|
"epoch": 2.5084745762711864, |
|
"grad_norm": 0.18349309265613556, |
|
"learning_rate": 3.978359908548746e-06, |
|
"loss": 1.5906, |
|
"step": 1480 |
|
}, |
|
{ |
|
"epoch": 2.5254237288135593, |
|
"grad_norm": 0.18495327234268188, |
|
"learning_rate": 3.71561503404885e-06, |
|
"loss": 1.593, |
|
"step": 1490 |
|
}, |
|
{ |
|
"epoch": 2.542372881355932, |
|
"grad_norm": 0.18758471310138702, |
|
"learning_rate": 3.4611479651548457e-06, |
|
"loss": 1.6067, |
|
"step": 1500 |
|
}, |
|
{ |
|
"epoch": 2.559322033898305, |
|
"grad_norm": 0.18525028228759766, |
|
"learning_rate": 3.2150576677956458e-06, |
|
"loss": 1.5926, |
|
"step": 1510 |
|
}, |
|
{ |
|
"epoch": 2.576271186440678, |
|
"grad_norm": 0.17536324262619019, |
|
"learning_rate": 2.977439850052366e-06, |
|
"loss": 1.5849, |
|
"step": 1520 |
|
}, |
|
{ |
|
"epoch": 2.593220338983051, |
|
"grad_norm": 0.18296369910240173, |
|
"learning_rate": 2.7483869249360915e-06, |
|
"loss": 1.5828, |
|
"step": 1530 |
|
}, |
|
{ |
|
"epoch": 2.610169491525424, |
|
"grad_norm": 0.1857452690601349, |
|
"learning_rate": 2.527987974447113e-06, |
|
"loss": 1.5802, |
|
"step": 1540 |
|
}, |
|
{ |
|
"epoch": 2.6271186440677967, |
|
"grad_norm": 0.18499261140823364, |
|
"learning_rate": 2.316328714929633e-06, |
|
"loss": 1.5869, |
|
"step": 1550 |
|
}, |
|
{ |
|
"epoch": 2.6440677966101696, |
|
"grad_norm": 0.18661075830459595, |
|
"learning_rate": 2.113491463735437e-06, |
|
"loss": 1.5813, |
|
"step": 1560 |
|
}, |
|
{ |
|
"epoch": 2.6610169491525424, |
|
"grad_norm": 0.18289625644683838, |
|
"learning_rate": 1.9195551072095086e-06, |
|
"loss": 1.5751, |
|
"step": 1570 |
|
}, |
|
{ |
|
"epoch": 2.6779661016949152, |
|
"grad_norm": 0.1815192848443985, |
|
"learning_rate": 1.7345950700099617e-06, |
|
"loss": 1.5887, |
|
"step": 1580 |
|
}, |
|
{ |
|
"epoch": 2.694915254237288, |
|
"grad_norm": 0.17822951078414917, |
|
"learning_rate": 1.558683285774304e-06, |
|
"loss": 1.5868, |
|
"step": 1590 |
|
}, |
|
{ |
|
"epoch": 2.711864406779661, |
|
"grad_norm": 0.17891579866409302, |
|
"learning_rate": 1.3918881691434128e-06, |
|
"loss": 1.5841, |
|
"step": 1600 |
|
}, |
|
{ |
|
"epoch": 2.7288135593220337, |
|
"grad_norm": 0.18717658519744873, |
|
"learning_rate": 1.2342745891540857e-06, |
|
"loss": 1.5783, |
|
"step": 1610 |
|
}, |
|
{ |
|
"epoch": 2.7457627118644066, |
|
"grad_norm": 0.1843055635690689, |
|
"learning_rate": 1.0859038440105164e-06, |
|
"loss": 1.5761, |
|
"step": 1620 |
|
}, |
|
{ |
|
"epoch": 2.7627118644067794, |
|
"grad_norm": 0.18406164646148682, |
|
"learning_rate": 9.46833637244568e-07, |
|
"loss": 1.5948, |
|
"step": 1630 |
|
}, |
|
{ |
|
"epoch": 2.7796610169491527, |
|
"grad_norm": 0.1829424947500229, |
|
"learning_rate": 8.171180552740048e-07, |
|
"loss": 1.5791, |
|
"step": 1640 |
|
}, |
|
{ |
|
"epoch": 2.7966101694915255, |
|
"grad_norm": 0.18232209980487823, |
|
"learning_rate": 6.9680754636752e-07, |
|
"loss": 1.6034, |
|
"step": 1650 |
|
}, |
|
{ |
|
"epoch": 2.8135593220338984, |
|
"grad_norm": 0.18290942907333374, |
|
"learning_rate": 5.859489010246494e-07, |
|
"loss": 1.5902, |
|
"step": 1660 |
|
}, |
|
{ |
|
"epoch": 2.830508474576271, |
|
"grad_norm": 0.17814739048480988, |
|
"learning_rate": 4.845852337782953e-07, |
|
"loss": 1.5887, |
|
"step": 1670 |
|
}, |
|
{ |
|
"epoch": 2.847457627118644, |
|
"grad_norm": 0.1855529248714447, |
|
"learning_rate": 3.9275596642685543e-07, |
|
"loss": 1.5685, |
|
"step": 1680 |
|
}, |
|
{ |
|
"epoch": 2.864406779661017, |
|
"grad_norm": 0.1889604926109314, |
|
"learning_rate": 3.1049681270249044e-07, |
|
"loss": 1.5738, |
|
"step": 1690 |
|
}, |
|
{ |
|
"epoch": 2.8813559322033897, |
|
"grad_norm": 0.18516628444194794, |
|
"learning_rate": 2.3783976438156442e-07, |
|
"loss": 1.5753, |
|
"step": 1700 |
|
}, |
|
{ |
|
"epoch": 2.898305084745763, |
|
"grad_norm": 0.18402761220932007, |
|
"learning_rate": 1.7481307884256727e-07, |
|
"loss": 1.5773, |
|
"step": 1710 |
|
}, |
|
{ |
|
"epoch": 2.915254237288136, |
|
"grad_norm": 0.18513433635234833, |
|
"learning_rate": 1.2144126807641376e-07, |
|
"loss": 1.5865, |
|
"step": 1720 |
|
}, |
|
{ |
|
"epoch": 2.9322033898305087, |
|
"grad_norm": 0.1811605542898178, |
|
"learning_rate": 7.774508915337209e-08, |
|
"loss": 1.5831, |
|
"step": 1730 |
|
}, |
|
{ |
|
"epoch": 2.9491525423728815, |
|
"grad_norm": 0.17931722104549408, |
|
"learning_rate": 4.374153615033794e-08, |
|
"loss": 1.5919, |
|
"step": 1740 |
|
}, |
|
{ |
|
"epoch": 2.9661016949152543, |
|
"grad_norm": 0.18402303755283356, |
|
"learning_rate": 1.9443833541599178e-08, |
|
"loss": 1.5807, |
|
"step": 1750 |
|
}, |
|
{ |
|
"epoch": 2.983050847457627, |
|
"grad_norm": 0.18316687643527985, |
|
"learning_rate": 4.8614310556388544e-09, |
|
"loss": 1.5941, |
|
"step": 1760 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"grad_norm": 0.1795782446861267, |
|
"learning_rate": 0.0, |
|
"loss": 1.5941, |
|
"step": 1770 |
|
}, |
|
{ |
|
"epoch": 3.0, |
|
"step": 1770, |
|
"total_flos": 1.0291750314285466e+19, |
|
"train_loss": 1.6438454094579664, |
|
"train_runtime": 3711.4901, |
|
"train_samples_per_second": 30.48, |
|
"train_steps_per_second": 0.477 |
|
} |
|
], |
|
"logging_steps": 10, |
|
"max_steps": 1770, |
|
"num_input_tokens_seen": 0, |
|
"num_train_epochs": 3, |
|
"save_steps": 500, |
|
"stateful_callbacks": { |
|
"TrainerControl": { |
|
"args": { |
|
"should_epoch_stop": false, |
|
"should_evaluate": false, |
|
"should_log": false, |
|
"should_save": true, |
|
"should_training_stop": true |
|
}, |
|
"attributes": {} |
|
} |
|
}, |
|
"total_flos": 1.0291750314285466e+19, |
|
"train_batch_size": 8, |
|
"trial_name": null, |
|
"trial_params": null |
|
} |
|
|